void texture::do_create(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, D3D11_SUBRESOURCE_DATA* subresource) { // TODO: MSAA in desc needs to be evaluated! size_ = DirectX::XMFLOAT2(static_cast<FLOAT>(desc.Width), static_cast<FLOAT>(desc.Height)); desc.BindFlags |= D3D11_BIND_SHADER_RESOURCE; assert_hr(device->CreateTexture2D(&desc, subresource, &texture_)); D3D11_SHADER_RESOURCE_VIEW_DESC desc_srv; if (desc.Format == DXGI_FORMAT_R32_TYPELESS) desc_srv.Format = DXGI_FORMAT_R32_FLOAT; else desc_srv.Format = desc.Format; if (desc.ArraySize > 1) { desc_srv.Texture2DArray.ArraySize = desc.ArraySize; desc_srv.Texture2DArray.MipLevels = desc.MipLevels; desc_srv.Texture2DArray.FirstArraySlice = 0; desc_srv.Texture2DArray.MostDetailedMip = 0; desc_srv.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DARRAY; } else { desc_srv.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; desc_srv.Texture2D.MostDetailedMip = 0; desc_srv.Texture2D.MipLevels = (desc.MipLevels == 0) ? 6 : desc.MipLevels; } assert_hr(device->CreateShaderResourceView(texture_, &desc_srv, &srview_)); }
void kinect_gbuffer::init() { int device_id_ = 0; assert_hr(NuiCreateSensorByIndex(device_id_, &sensor_)); DWORD options = NUI_INITIALIZE_FLAG_USES_COLOR; // | NUI_INITIALIZE_FLAG_USES_DEPTH; assert_hr(sensor_->NuiInitialize(options)); NUI_IMAGE_RESOLUTION eResolution = NUI_IMAGE_RESOLUTION_640x480; if (width_ == 640 && height_ == 480) eResolution = NUI_IMAGE_RESOLUTION_640x480; else if (width_ == 1280 && height_ == 960) eResolution = NUI_IMAGE_RESOLUTION_1280x960; else { tcout << L"Invalid resolution, setting back to 640x480" << std::endl; width_ = 640; height_ = 480; } color_event_ = CreateEvent(nullptr, TRUE, FALSE, nullptr); depth_event_ = CreateEvent(nullptr, TRUE, FALSE, nullptr); kill_event_ = CreateEvent(nullptr, TRUE, FALSE, nullptr); assert_hr(sensor_->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, eResolution, 0, 2, color_event_, &color_stream_)); if ((options & NUI_INITIALIZE_FLAG_USES_DEPTH) != 0) { // fixed size assert_hr(sensor_->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0, 2, depth_event_, &depth_stream_)); assert_hr(sensor_->NuiImageStreamSetImageFrameFlags(depth_stream_, NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE)); } has_new_color_ = has_new_depth_ = false; }
void video_gbuffer::create(ID3D11Device* device, UINT32 index, const tstring& name, UINT w, UINT h) { async_ = false; context_ = nullptr; running_ = false; gbuffer::create(device, name); InitializeCriticalSection(&cs_); D3D11_TEXTURE2D_DESC desc; ZeroMemory(&desc, sizeof(D3D11_TEXTURE2D_DESC)); desc.Width = w; desc.Height = h; desc.MipLevels = desc.ArraySize = 1; desc.SampleDesc.Count = 1; desc.Usage = D3D11_USAGE_DYNAMIC; desc.BindFlags = D3D11_BIND_SHADER_RESOURCE; desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // needed for multithreading //desc.MiscFlags = D3D11_BIND_UNORDERED_ACCESS; desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; add_target(RT_VIDEO_COLOR, desc); temp_buffer_.resize(w * h * 4); // TODO: make public, independent of create for video file input? assert_hr(init_device(index, w, h, true)); }
void delta_sparse_voxel_octree::create(ID3D11Device* device, UINT volume_size) { // create regular stuff first sparse_voxel_octree::create(device, volume_size); // create delta volume D3D11_TEXTURE3D_DESC desc; v_rho_->GetDesc(&desc); assert_hr(device->CreateTexture3D(&desc, nullptr, &v_delta_)); // create delta uav D3D11_UNORDERED_ACCESS_VIEW_DESC uav_desc; uav_v_rho_->GetDesc(&uav_desc); assert_hr(device->CreateUnorderedAccessView(v_delta_, &uav_desc, &uav_v_delta_)); // create delta srv D3D11_SHADER_RESOURCE_VIEW_DESC srv_desc; srv_v_rho_->GetDesc(&srv_desc); assert_hr(device->CreateShaderResourceView(v_delta_, &srv_desc, &srv_v_delta_)); }
HRESULT video_gbuffer::start(ID3D11DeviceContext* context) { context_ = context; running_ = true; has_new_buffer_ = false; // connect #ifdef SHOW_DEBUG_RENDERER assert_hr(capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, current->filter, samplegrabberfilter, nullptr)); #else assert_hr(capture_->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, current_device_, sgf_, nullf_)); #endif // start the riot LONGLONG v = MAXLONGLONG; assert_hr(capture_->ControlStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, current_device_, &v, &v, 1,2)); assert_hr(control_->Run()); return S_OK; }
HRESULT create(IFilterGraph2* graph) { // FIXME: do not assert here -> result is RCP_E_CHANGED_MODE, which can be ignored (CoInitialize(nullptr)); VARIANT name; //LONGLONG start=MAXLONGLONG, stop=MAXLONGLONG; devices.clear(); //create an enumerator for video input devices ICreateDevEnum* dev_enum; assert_hr(CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&dev_enum)); IEnumMoniker* enum_moniker; HRESULT hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &enum_moniker, 0); // no devices if (hr == S_FALSE) return S_OK; //get devices IMoniker* moniker; unsigned long dev_count; const int MAX_DEVICES = 8; assert_hr(enum_moniker->Next(MAX_DEVICES, &moniker, &dev_count)); for (size_t i=0; i < dev_count; ++i) { //get properties IPropertyBag* pbag; hr = moniker[i].BindToStorage(nullptr, nullptr, IID_IPropertyBag, (void**)&pbag); if (hr >= 0) { VariantInit(&name); //get the description if(FAILED(pbag->Read(L"Description", &name, 0))) hr = pbag->Read(L"FriendlyName", &name, 0); if (SUCCEEDED(hr)) { //Initialize the VideoDevice struct video_device dev; dev.name = name.bstrVal; //add a filter for the device if (SUCCEEDED(graph->AddSourceFilterForMoniker(moniker+i, 0, dev.name.c_str(), &dev.filter))) devices.push_back(dev); } VariantClear(&name); safe_release(pbag); } moniker[i].Release(); } return S_OK; }
HRESULT video_gbuffer::init_device(UINT32 index, size_t w, size_t h, bool async) { // init com // FIXME: do not assert here -> result is RCP_E_CHANGED_MODE, which can be ignored (CoInitialize(nullptr)); // create graph assert_hr(CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER, IID_IFilterGraph2, (void**)&graph_)); assert_hr(CoCreateInstance(CLSID_CaptureGraphBuilder2, nullptr, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&capture_)); assert_hr(graph_->QueryInterface(IID_IMediaControl, (void**) &control_)); assert_hr(capture_->SetFiltergraph(graph_)); // create null renderer to supress window output assert_hr(CoCreateInstance(CLSID_NullRenderer, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&nullf_)); assert_hr(graph_->AddFilter(nullf_, L"Null Renderer")); // enumerate devices detail::video_device_list list; assert_hr(list.create(graph_)); for (size_t i = 0; i < list.devices.size(); ++i) { tclog << "Camera found: " << list.devices[i].name << std::endl; } // create samplegrabber assert_hr(CoCreateInstance(CLSID_SampleGrabber, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&sgf_)); assert_hr(graph_->AddFilter(sgf_, L"Sample Grabber")); assert_hr(sgf_->QueryInterface(IID_ISampleGrabber, (void**)&sg_)); // set media type to RGB24 AM_MEDIA_TYPE mt; ZeroMemory(&mt, sizeof(mt)); mt.majortype = MEDIATYPE_Video; mt.subtype = MEDIASUBTYPE_RGB32; assert_hr(sg_->SetMediaType(&mt)); // init callback callback_.caller = this; assert_hr(sg_->SetCallback(dynamic_cast<ISampleGrabberCB*>(&callback_),0)); // select device detail::video_device *dev = &list.devices[index]; if (!dev->filter) return E_INVALIDARG; if (running_) stop(); //remove and add the filters to force disconnect of pins if (current_device_) { graph_->RemoveFilter(current_device_); graph_->RemoveFilter(sgf_); //graph_->AddFilter(sgf_, L"Sample Grabber"); //graph_->AddFilter(current->filter, current->filtername); } current_device_ = dev->filter; // set size /* IAMStreamConfig *config = nullptr; assert_hr(capture_->FindInterface(&PIN_CATEGORY_PREVIEW, 0, current_device_, IID_IAMStreamConfig, (void**)&config)); int resolutions, size; VIDEO_STREAM_CONFIG_CAPS caps; AM_MEDIA_TYPE *mt1; config->GetFormat(&mt1); VIDEOINFOHEADER *info = reinterpret_cast<VIDEOINFOHEADER*>(mt1->pbFormat); info->bmiHeader.biWidth = w; info->bmiHeader.biHeight = h; info->bmiHeader.biSizeImage = DIBSIZE(info->bmiHeader); assert_hr(config->SetFormat(mt1)); */ external_callback_ = nullptr; return S_OK; }
void sparse_voxel_octree::create(ID3D11Device* device, UINT volume_size) { profiler_.create(device); volume_size_ = volume_size; UINT voxel_count = volume_size_ * volume_size_ * volume_size_; UINT mips = static_cast<UINT>(std::log2(volume_size_)); // create volume(s) D3D11_TEXTURE3D_DESC desc; ZeroMemory(&desc, sizeof(desc)); desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS | D3D11_BIND_RENDER_TARGET; desc.Width = volume_size; desc.Height = volume_size; desc.Depth = volume_size; desc.Usage = D3D11_USAGE_DEFAULT; desc.Format = DXGI_FORMAT_R16G16B16A16_FLOAT; desc.MiscFlags = D3D11_RESOURCE_MISC_GENERATE_MIPS; desc.MipLevels = mips; assert_hr(device->CreateTexture3D(&desc, nullptr, &v_normal_)); assert_hr(device->CreateTexture3D(&desc, nullptr, &v_rho_)); // create unordered access view for volume(s) D3D11_UNORDERED_ACCESS_VIEW_DESC uav_desc; ZeroMemory(&uav_desc, sizeof(uav_desc)); uav_desc.Format = DXGI_FORMAT_R16G16B16A16_FLOAT; uav_desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE3D; uav_desc.Texture3D.FirstWSlice = 0; uav_desc.Texture3D.MipSlice = 0; uav_desc.Texture3D.WSize = volume_size; assert_hr(device->CreateUnorderedAccessView(v_normal_, &uav_desc, &uav_v_normal_)); assert_hr(device->CreateUnorderedAccessView(v_rho_, &uav_desc, &uav_v_rho_)); // create shader resource view for volume(s) D3D11_SHADER_RESOURCE_VIEW_DESC srv_desc; srv_desc.Format = DXGI_FORMAT_R16G16B16A16_FLOAT; srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE3D; srv_desc.Texture3D.MostDetailedMip = 0; srv_desc.Texture3D.MipLevels = mips; assert_hr(device->CreateShaderResourceView(v_normal_, &srv_desc, &srv_v_normal_)); assert_hr(device->CreateShaderResourceView(v_rho_, &srv_desc, &srv_v_rho_)); D3D11_BLEND_DESC bld; ZeroMemory(&bld, sizeof(D3D11_BLEND_DESC)); bld.IndependentBlendEnable = TRUE; for (size_t i = 0; i < 6; ++i) { bld.RenderTarget[i].BlendEnable = TRUE; bld.RenderTarget[i].SrcBlend = D3D11_BLEND_ONE; bld.RenderTarget[i].DestBlend = D3D11_BLEND_ONE; bld.RenderTarget[i].SrcBlendAlpha = D3D11_BLEND_ONE; bld.RenderTarget[i].DestBlendAlpha = D3D11_BLEND_ONE; bld.RenderTarget[i].BlendOp = D3D11_BLEND_OP_MAX; bld.RenderTarget[i].BlendOpAlpha = D3D11_BLEND_OP_MAX; bld.RenderTarget[i].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL; } assert_hr(device->CreateBlendState(&bld, &bs_voxelize_)); CD3D11_RASTERIZER_DESC raster_desc = CD3D11_RASTERIZER_DESC(CD3D11_DEFAULT()); raster_desc.FrontCounterClockwise = true; raster_desc.CullMode = D3D11_CULL_NONE; assert_hr(device->CreateRasterizerState(&raster_desc, &no_culling_)); cb_parameters_.create(device); cb_gi_parameters_.create(device); DirectX::XMStoreFloat4x4(&world_to_svo_, DirectX::XMMatrixIdentity()); }
void stb_to_srv(ID3D11Device* device, LPCSTR filename, ID3D11ShaderResourceView** srv) { if (!srv) throw exception(L"No SRV"); *srv = nullptr; bool is_hdr = std::string(filename).find(".hdr") != std::string::npos; bool is_jpg = std::string(filename).find(".jpg") != std::string::npos; D3D11_TEXTURE2D_DESC tex_desc; ZeroMemory(&tex_desc, sizeof(tex_desc)); D3D11_SUBRESOURCE_DATA subdata; ZeroMemory(&subdata, sizeof(subdata)); int width, height, nc; unsigned char* ldr_data = nullptr; float* hdr_data = nullptr; if (!is_hdr) { ldr_data = stbi_load(filename, &width, &height, &nc, 4); subdata.SysMemPitch = width * 4; subdata.pSysMem = ldr_data; if (is_jpg) tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; else // TODO: Remap to SRGB later when throwing out D3DX tex_desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // TODO: Need to create mipmaps } else { hdr_data = stbi_loadf(filename, &width, &height, &nc, 4); subdata.SysMemPitch = width * 4 * 4; subdata.pSysMem = hdr_data; tex_desc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT; } if (!ldr_data && !hdr_data) { tstring error = L"can't load"; if (stbi_failure_reason()) error = to_tstring(std::string(stbi_failure_reason())); throw exception(tstring(L"stbi: ") + error); } tex_desc.Width = width; tex_desc.Height = height; tex_desc.MipLevels = 1; tex_desc.ArraySize = 1; tex_desc.SampleDesc.Count = 1; tex_desc.SampleDesc.Quality = 0; tex_desc.Usage = D3D11_USAGE_DEFAULT; tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE; tex_desc.CPUAccessFlags = 0; tex_desc.MiscFlags = 0; ID3D11Texture2D* texture; assert_hr(device->CreateTexture2D(&tex_desc, &subdata, &texture)); assert_hr(device->CreateShaderResourceView(texture, nullptr, srv)); safe_release(texture); if (is_hdr) stbi_image_free(hdr_data); else stbi_image_free(ldr_data); }