void RayTracer::tracePhotons(Photon p, int bouncesLeft){ //Only trace the photon if more than one bounce if (bouncesLeft > 0){ //Get the intersected surface shared_ptr<UniversalSurfel> surfel = dynamic_pointer_cast<UniversalSurfel>(castRay(G3D::Ray(p.origin,p.direction).bumpedRay(0.0001), finf(), 0)); if (surfel){ p.origin = surfel->position; //Don't store for mirror surfaces if (!(surfel->glossyReflectionCoefficient.nonZero() && (surfel->glossyReflectionExponent == inf()))){ Vector3 D = p.origin - m_camera->frame().translation; //if (D.length() < 10){ if(castRay(G3D::Ray(m_camera->frame().translation,D).bumpedRay(0.0001), D.length(), 1)) { photons.insert(p); } //} } shared_ptr<Random> rnd = m_rnd[0]; Vector3 wo; Color3 weight; //scatter the photon using the method provided by G3D. Used to be a lot of lines which got deleted surfel->scatter(PathDirection::SOURCE_TO_EYE, -p.direction, true, *rnd, weight, wo); //If it scatters, continue tracing if (wo.magnitude() > 0){ p.direction = wo; p.power *= weight; tracePhotons(p, bouncesLeft - 1); } rnd.reset(); } } }
//=======================================================================// void stitch::PhotonTraceRenderer::render(RadianceMap &radianceMap, const stitch::Camera * const camera, const float frameDeltaTime) { radianceMap.clear(Colour_t()); const float halfWindowHeight = radianceMap.getHeight() * 0.5f; const float halfWindowWidth = radianceMap.getWidth() * 0.5f; const size_t imgWidth=radianceMap.getWidth(); const size_t imgHeight=radianceMap.getHeight(); const size_t numIterations=125; const float iterationTime=frameDeltaTime/numIterations; for (size_t i=0; i<numIterations; ++i) { tracePhotons(camera, iterationTime); std::vector<Photon *>::const_iterator iter=photonVector_.begin(); const std::vector<Photon *>::const_iterator iterEnd=photonVector_.end(); if (!stopRender_) { for (; iter!=iterEnd; ++iter) { Photon const * const photon=*iter; const float fx=(photon->centre_.x()*imgWidth+halfWindowWidth); const float fy=(photon->centre_.y()*imgWidth+halfWindowHeight);//Note: imgWidth is used correctly here to scale the photon location to screen space! if ((fy<imgHeight)&&(fy>=0.0f)) { if ((fx<imgWidth)&&(fx>=0.0f)) { //Note: The energy calculation leads to fractional radiance c.(dL/dt) contributed to the radiance map. radianceMap.addToMapValue(fx, fy, photon->energy_ * ((float(imgWidth) * float(imgHeight) * 0.1f)/frameDeltaTime), 0); } } } } else { break;//from for loop } } }
void RayTracer::createPhotonMap(){ int numLights = m_lighting->lightArray.size(); photons.clear(); //Compute the total power of lights Power3 totalPower(0,0,0); for (int i = 0; i < numLights; ++i){ totalPower += m_lighting->lightArray[i]->emittedPower(); } shared_ptr<Random> rnd = m_rnd[0]; //Calculate the probability that the photon is emitted from each light float prob[numLights]; prob[0] = m_lighting->lightArray[0]->emittedPower().sum() / totalPower.sum(); for (int i = 1; i < numLights; ++i){ prob[i] = prob[i-1] + m_lighting->lightArray[i] -> emittedPower().sum() / totalPower.sum(); } //Randomly emit the photons from each light source for (int i = 0; i < m_settings.numPhotons; ++i){ Photon p; float tmp = rnd->uniform(); int j = 0; while (prob[j] < tmp){ ++j; } p.origin = m_lighting->lightArray[j]->position().xyz(); p.power = totalPower/m_settings.numPhotons; //rejection sampling to ensure the photons are emitted in the right direction for spotlights //directional light not supported currently Vector3 tmpDirection; do{ tmpDirection = Vector3::random(); } while (m_lighting->lightArray[j]->inFieldOfView(tmpDirection)); p.direction = tmpDirection; //trace the created photon tracePhotons(p, m_settings.forwardDistance); } //When finish tracing all the photons, rebalance the KDTree photons.balance(); }
void PhotonTracerCL::tracePhotons(const Volume* volume, const TransferFunction& transferFunction, const BufferCL* axisAlignedBoundingBoxCL, const AdvancedMaterialProperty& material, const Camera* camera, float stepSize, const LightSamples* lightSamples, const Buffer<unsigned int>* photonsToRecomputeIndices, int nInvalidPhotons, int photonOffset, int batch, int maxInteractions, PhotonData* photonOutData, const VECTOR_CLASS<cl::Event> *waitForEvents, cl::Event *event /*= nullptr*/) { if (!photonTracerKernel_) { return; } if (randomState_.getSize() != photonOutData->getNumberOfPhotons()) { setRandomSeedSize(photonOutData->getNumberOfPhotons()); } auto volumeDim = volume->getDimensions(); // Texture space spacing const mat4 volumeTextureToWorld = volume->getCoordinateTransformer().getTextureToWorldMatrix(); const mat4 textureToIndexMatrix = volume->getCoordinateTransformer().getTextureToIndexMatrix(); vec3 voxelSpacing(1.f / glm::length(textureToIndexMatrix[0]), 1.f / glm::length(textureToIndexMatrix[1]), 1.f / glm::length(textureToIndexMatrix[2])); try { if (useGLSharing_) { SyncCLGL glSync; auto volumeCL = volume->getRepresentation<VolumeCLGL>(); const BufferCLGL* lightSamplesCL = lightSamples->getLightSamples()->getRepresentation<BufferCLGL>(); const BufferCLGL* intersectionPointsCL = lightSamples->getIntersectionPoints()->getRepresentation<BufferCLGL>(); BufferCLGL* photonCL = photonOutData->photons_.getEditableRepresentation<BufferCLGL>(); const LayerCLGL* transferFunctionCL = transferFunction.getData()->getRepresentation<LayerCLGL>(); const ElementBufferCLGL* photonsToRecomputeIndicesCL = nullptr; // Acquire shared representations before using them in OpenGL // The SyncCLGL object will take care of synchronization between OpenGL and OpenCL glSync.addToAquireGLObjectList(volumeCL); glSync.addToAquireGLObjectList(lightSamplesCL); glSync.addToAquireGLObjectList(intersectionPointsCL); glSync.addToAquireGLObjectList(photonCL); glSync.addToAquireGLObjectList(transferFunctionCL); //{IVW_CPU_PROFILING("aquireAllObjects") if (photonsToRecomputeIndices) { photonsToRecomputeIndicesCL = photonsToRecomputeIndices->getRepresentation<ElementBufferCLGL>(); glSync.addToAquireGLObjectList(photonsToRecomputeIndicesCL); } glSync.aquireAllObjects(); //} //{IVW_CPU_PROFILING("tracePhotons") tracePhotons(photonOutData, volumeCL, volumeCL->getVolumeStruct(volume), axisAlignedBoundingBoxCL , transferFunctionCL, material, stepSize, lightSamplesCL, intersectionPointsCL, lightSamples->getSize(), photonsToRecomputeIndicesCL, nInvalidPhotons , photonCL, photonOffset, batch, maxInteractions , waitForEvents, event); //} } else { const VolumeCL* volumeCL = volume->getRepresentation<VolumeCL>(); const BufferCL* lightSamplesCL = lightSamples->getLightSamples()->getRepresentation<BufferCL>(); const BufferCL* intersectionPointsCL = lightSamples->getIntersectionPoints()->getRepresentation<BufferCL>(); BufferCL* photonCL = photonOutData->photons_.getEditableRepresentation<BufferCL>(); const LayerCL* transferFunctionCL = transferFunction.getData()->getRepresentation<LayerCL>(); const BufferCL* photonsToRecomputeIndicesCL = nullptr; if (photonsToRecomputeIndices) { photonsToRecomputeIndicesCL = photonsToRecomputeIndices->getRepresentation<BufferCL>(); } tracePhotons(photonOutData, volumeCL, volumeCL->getVolumeStruct(volume), axisAlignedBoundingBoxCL , transferFunctionCL, material, stepSize, lightSamplesCL, intersectionPointsCL, lightSamples->getSize(), photonsToRecomputeIndicesCL, nInvalidPhotons , photonCL, photonOffset, batch, maxInteractions , waitForEvents, event); } } catch (cl::Error& err) { LogError(getCLErrorString(err)); } }