// Scene Methods void Scene::Render() { // Allocate and initialize _sample_ Sample *sample = new Sample(surfaceIntegrator, volumeIntegrator, this); // Allow integrators to do pre-processing for the scene surfaceIntegrator->Preprocess(this); volumeIntegrator->Preprocess(this); // Trace rays: The main loop ProgressReporter progress(sampler->TotalSamples(), "Rendering"); while (sampler->GetNextSample(sample)) { // Find camera ray for _sample_ RayDifferential ray; float rayWeight = camera->GenerateRay(*sample, &ray); // Generate ray differentials for camera ray ++(sample->imageX); camera->GenerateRay(*sample, &ray.rx); --(sample->imageX); ++(sample->imageY); camera->GenerateRay(*sample, &ray.ry); ray.hasDifferentials = true; --(sample->imageY); // Evaluate radiance along camera ray float alpha; Spectrum Ls = 0.f; if (rayWeight > 0.f) Ls = rayWeight * Li(ray, sample, &alpha); // Issue warning if unexpected radiance value returned if (Ls.IsNaN()) { Error("Not-a-number radiance value returned " "for image sample. Setting to black."); Ls = Spectrum(0.f); } else if (Ls.y() < -1e-5) { Error("Negative luminance value, %g, returned " "for image sample. Setting to black.", Ls.y()); Ls = Spectrum(0.f); } else if (isinf(Ls.y())) { Error("Infinite luminance value returned " "for image sample. Setting to black."); Ls = Spectrum(0.f); } // Add sample contribution to image camera->film->AddSample(*sample, ray, Ls, alpha); // Free BSDF memory from computing image sample value BSDF::FreeAll(); // Report rendering progress static StatsCounter cameraRaysTraced("Camera", "Camera Rays Traced"); ++cameraRaysTraced; progress.Update(); } // Clean up after rendering and store final image delete sample; progress.Done(); camera->film->WriteImage(); }
void LightCPURenderThread::RenderFunc() { //SLG_LOG("[LightCPURenderThread::" << threadIndex << "] Rendering thread started"); //-------------------------------------------------------------------------- // Initialization //-------------------------------------------------------------------------- LightCPURenderEngine *engine = (LightCPURenderEngine *)renderEngine; RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex); Scene *scene = engine->renderConfig->scene; PerspectiveCamera *camera = scene->camera; Film *film = threadFilm; // Setup the sampler double metropolisSharedTotalLuminance, metropolisSharedSampleCount; Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film, &metropolisSharedTotalLuminance, &metropolisSharedSampleCount); const u_int sampleBootSize = 11; const u_int sampleEyeStepSize = 4; const u_int sampleLightStepSize = 5; const u_int sampleSize = sampleBootSize + // To generate the initial setup engine->maxPathDepth * sampleEyeStepSize + // For each eye vertex engine->maxPathDepth * sampleLightStepSize; // For each light vertex sampler->RequestSamples(sampleSize); //-------------------------------------------------------------------------- // Trace light paths //-------------------------------------------------------------------------- vector<SampleResult> sampleResults; while (!boost::this_thread::interruption_requested()) { sampleResults.clear(); // Select one light source float lightPickPdf; const LightSource *light = scene->SampleAllLights(sampler->GetSample(2), &lightPickPdf); // Initialize the light path float lightEmitPdfW; Ray nextEventRay; Spectrum lightPathFlux = light->Emit(scene, sampler->GetSample(3), sampler->GetSample(4), sampler->GetSample(5), sampler->GetSample(6), &nextEventRay.o, &nextEventRay.d, &lightEmitPdfW); if (lightPathFlux.Black()) { sampler->NextSample(sampleResults); continue; } lightPathFlux /= lightEmitPdfW * lightPickPdf; assert (!lightPathFlux.IsNaN() && !lightPathFlux.IsInf()); // Sample a point on the camera lens Point lensPoint; if (!camera->SampleLens(sampler->GetSample(7), sampler->GetSample(8), &lensPoint)) { sampler->NextSample(sampleResults); continue; } //---------------------------------------------------------------------- // I don't try to connect the light vertex directly with the eye // because InfiniteLight::Emit() returns a point on the scene bounding // sphere. Instead, I trace a ray from the camera like in BiDir. // This is also a good why to test the Film Per-Pixel-Normalization and // the Per-Screen-Normalization Buffers used by BiDir. //---------------------------------------------------------------------- TraceEyePath(sampler, &sampleResults); //---------------------------------------------------------------------- // Trace the light path //---------------------------------------------------------------------- int depth = 1; while (depth <= engine->maxPathDepth) { const u_int sampleOffset = sampleBootSize + sampleEyeStepSize * engine->maxPathDepth + (depth - 1) * sampleLightStepSize; RayHit nextEventRayHit; BSDF bsdf; Spectrum connectionThroughput; if (scene->Intersect(device, true, sampler->GetSample(sampleOffset), &nextEventRay, &nextEventRayHit, &bsdf, &connectionThroughput)) { // Something was hit lightPathFlux *= connectionThroughput; //-------------------------------------------------------------- // Try to connect the light path vertex with the eye //-------------------------------------------------------------- ConnectToEye(sampler->GetSample(sampleOffset + 1), bsdf, lensPoint, lightPathFlux, sampleResults); if (depth >= engine->maxPathDepth) break; //-------------------------------------------------------------- // Build the next vertex path ray //-------------------------------------------------------------- float bsdfPdf; Vector sampledDir; BSDFEvent event; float cosSampleDir; const Spectrum bsdfSample = bsdf.Sample(&sampledDir, sampler->GetSample(sampleOffset + 2), sampler->GetSample(sampleOffset + 3), &bsdfPdf, &cosSampleDir, &event); if (bsdfSample.Black()) break; if (depth >= engine->rrDepth) { // Russian Roulette const float prob = Max(bsdfSample.Filter(), engine->rrImportanceCap); if (sampler->GetSample(sampleOffset + 4) < prob) bsdfPdf *= prob; else break; } lightPathFlux *= bsdfSample * (cosSampleDir / bsdfPdf); assert (!lightPathFlux.IsNaN() && !lightPathFlux.IsInf()); nextEventRay = Ray(bsdf.hitPoint, sampledDir); ++depth; } else { // Ray lost in space... break; } } sampler->NextSample(sampleResults); #ifdef WIN32 // Work around Windows bad scheduling renderThread->yield(); #endif } delete sampler; delete rndGen; //SLG_LOG("[LightCPURenderThread::" << threadIndex << "] Rendering thread halted"); }
void PathCPURenderThread::RenderFunc() { //SLG_LOG("[PathCPURenderEngine::" << threadIndex << "] Rendering thread started"); //-------------------------------------------------------------------------- // Initialization //-------------------------------------------------------------------------- PathCPURenderEngine *engine = (PathCPURenderEngine *)renderEngine; RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex); Scene *scene = engine->renderConfig->scene; PerspectiveCamera *camera = scene->camera; Film * film = threadFilm; const unsigned int filmWidth = film->GetWidth(); const unsigned int filmHeight = film->GetHeight(); // Setup the sampler double metropolisSharedTotalLuminance, metropolisSharedSampleCount; Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film, &metropolisSharedTotalLuminance, &metropolisSharedSampleCount); const unsigned int sampleBootSize = 4; const unsigned int sampleStepSize = 9; const unsigned int sampleSize = sampleBootSize + // To generate eye ray engine->maxPathDepth * sampleStepSize; // For each path vertex sampler->RequestSamples(sampleSize); //-------------------------------------------------------------------------- // Trace paths //-------------------------------------------------------------------------- vector<SampleResult> sampleResults(1); sampleResults[0].type = PER_PIXEL_NORMALIZED; while (!boost::this_thread::interruption_requested()) { float alpha = 1.f; Ray eyeRay; const float screenX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1)); const float screenY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1)); camera->GenerateRay(screenX, screenY, &eyeRay, sampler->GetSample(2), sampler->GetSample(3)); int depth = 1; bool lastSpecular = true; float lastPdfW = 1.f; Spectrum radiance; Spectrum pathThrouput(1.f, 1.f, 1.f); BSDF bsdf; while (depth <= engine->maxPathDepth) { const unsigned int sampleOffset = sampleBootSize + (depth - 1) * sampleStepSize; RayHit eyeRayHit; Spectrum connectionThroughput; if (!scene->Intersect(device, false, sampler->GetSample(sampleOffset), &eyeRay, &eyeRayHit, &bsdf, &connectionThroughput)) { // Nothing was hit, look for infinitelight DirectHitInfiniteLight(lastSpecular, pathThrouput * connectionThroughput, eyeRay.d, lastPdfW, &radiance); if (depth == 1) alpha = 0.f; break; } pathThrouput *= connectionThroughput; // Something was hit // Check if it is a light source if (bsdf.IsLightSource()) { DirectHitFiniteLight(lastSpecular, pathThrouput, eyeRayHit.t, bsdf, lastPdfW, &radiance); } // Note: pass-through check is done inside SceneIntersect() //------------------------------------------------------------------ // Direct light sampling //------------------------------------------------------------------ DirectLightSampling(sampler->GetSample(sampleOffset + 1), sampler->GetSample(sampleOffset + 2), sampler->GetSample(sampleOffset + 3), sampler->GetSample(sampleOffset + 4), sampler->GetSample(sampleOffset + 5), pathThrouput, bsdf, depth, &radiance); //------------------------------------------------------------------ // Build the next vertex path ray //------------------------------------------------------------------ Vector sampledDir; BSDFEvent event; float cosSampledDir; const Spectrum bsdfSample = bsdf.Sample(&sampledDir, sampler->GetSample(sampleOffset + 6), sampler->GetSample(sampleOffset + 7), &lastPdfW, &cosSampledDir, &event); if (bsdfSample.Black()) break; lastSpecular = ((event & SPECULAR) != 0); if ((depth >= engine->rrDepth) && !lastSpecular) { // Russian Roulette const float prob = Max(bsdfSample.Filter(), engine->rrImportanceCap); if (sampler->GetSample(sampleOffset + 8) < prob) lastPdfW *= prob; else break; } pathThrouput *= bsdfSample * (cosSampledDir / lastPdfW); assert (!pathThrouput.IsNaN() && !pathThrouput.IsInf()); eyeRay = Ray(bsdf.hitPoint, sampledDir); ++depth; } assert (!radiance.IsNaN() && !radiance.IsInf()); sampleResults[0].screenX = screenX; sampleResults[0].screenY = screenY; sampleResults[0].radiance = radiance; sampleResults[0].alpha = alpha; sampler->NextSample(sampleResults); #ifdef WIN32 // Work around Windows bad scheduling renderThread->yield(); #endif } delete sampler; delete rndGen; //SLG_LOG("[PathCPURenderEngine::" << threadIndex << "] Rendering thread halted"); }
void NativeFilm::UpdateScreenBuffer() { switch (toneMapParams->GetType()) { case TONEMAP_LINEAR: { const LinearToneMapParams &tm = (LinearToneMapParams &)(*toneMapParams); const SamplePixel *sp = sampleFrameBuffer->GetPixels(); Pixel *p = frameBuffer->GetPixels(); const unsigned int pixelCount = width * height; const float perScreenNormalizationFactor = tm.scale / (float)statsTotalSampleCount; for (unsigned int i = 0; i < pixelCount; ++i) { const float weight = sp[i].weight; if (weight > 0.f) { if (usePerScreenNormalization) { p[i].r = Radiance2PixelFloat(sp[i].radiance.r * perScreenNormalizationFactor); p[i].g = Radiance2PixelFloat(sp[i].radiance.g * perScreenNormalizationFactor); p[i].b = Radiance2PixelFloat(sp[i].radiance.b * perScreenNormalizationFactor); } else { const float invWeight = tm.scale / weight; p[i].r = Radiance2PixelFloat(sp[i].radiance.r * invWeight); p[i].g = Radiance2PixelFloat(sp[i].radiance.g * invWeight); p[i].b = Radiance2PixelFloat(sp[i].radiance.b * invWeight); } } else { p[i].r = 0.f; p[i].g = 0.f; p[i].b = 0.f; } } break; } case TONEMAP_REINHARD02: { const Reinhard02ToneMapParams &tm = (Reinhard02ToneMapParams &)(*toneMapParams); const float alpha = .1f; const float preScale = tm.preScale; const float postScale = tm.postScale; const float burn = tm.burn; const SamplePixel *sp = sampleFrameBuffer->GetPixels(); Pixel *p = frameBuffer->GetPixels(); const unsigned int pixelCount = width * height; const float perScreenNormalizationFactor = 1.f / (float)statsTotalSampleCount; // Use the frame buffer as temporary storage and calculate the average luminance float Ywa = 0.f; for (unsigned int i = 0; i < pixelCount; ++i) { const float weight = sp[i].weight; Spectrum rgb = sp[i].radiance; if ((weight > 0.f) && !rgb.IsNaN()) { if (usePerScreenNormalization) rgb *= perScreenNormalizationFactor; else rgb /= weight; // Convert to XYZ color space p[i].r = 0.412453f * rgb.r + 0.357580f * rgb.g + 0.180423f * rgb.b; p[i].g = 0.212671f * rgb.r + 0.715160f * rgb.g + 0.072169f * rgb.b; p[i].b = 0.019334f * rgb.r + 0.119193f * rgb.g + 0.950227f * rgb.b; Ywa += p[i].g; } else { p[i].r = 0.f; p[i].g = 0.f; p[i].b = 0.f; } } Ywa /= pixelCount; // Avoid division by zero if (Ywa == 0.f) Ywa = 1.f; const float Yw = preScale * alpha * burn; const float invY2 = 1.f / (Yw * Yw); const float pScale = postScale * preScale * alpha / Ywa; for (unsigned int i = 0; i < pixelCount; ++i) { Spectrum xyz = p[i]; const float ys = xyz.g; xyz *= pScale * (1.f + ys * invY2) / (1.f + ys); // Convert back to RGB color space p[i].r = 3.240479f * xyz.r - 1.537150f * xyz.g - 0.498535f * xyz.b; p[i].g = -0.969256f * xyz.r + 1.875991f * xyz.g + 0.041556f * xyz.b; p[i].b = 0.055648f * xyz.r - 0.204043f * xyz.g + 1.057311f * xyz.b; // Gamma correction p[i].r = Radiance2PixelFloat(p[i].r); p[i].g = Radiance2PixelFloat(p[i].g); p[i].b = Radiance2PixelFloat(p[i].b); } break; } default: assert (false); break; } }
// Scene Methods void Scene::Render() { /*if(!Spectrum::SpectrumTest()) { printf("FAILED spectrum unit test. NO rendering allowed.\n"); return; }else{ printf("PASSED spectrum unit test. YES!\n"); }*/ // Allocate and initialize _sample_ Sample *sample = new Sample(surfaceIntegrator, volumeIntegrator, this); // Allow integrators to do pre-processing for the scene surfaceIntegrator->Preprocess(this); volumeIntegrator->Preprocess(this); camera->AutoFocus(this); // Trace rays: The main loop ProgressReporter progress(sampler->TotalSamples(), "Rendering"); while (sampler->GetNextSample(sample)) { // Find camera ray for _sample_ RayDifferential ray; float rayWeight = camera->GenerateRay(*sample, &ray); // Generate ray differentials for camera ray ++(sample->imageX); float wt1 = camera->GenerateRay(*sample, &ray.rx); --(sample->imageX); ++(sample->imageY); float wt2 = camera->GenerateRay(*sample, &ray.ry); if (wt1 > 0 && wt2 > 0) ray.hasDifferentials = true; --(sample->imageY); // Evaluate radiance along camera ray float alpha = 1.f;//initialized to count rayWeight 0 as opaque black Spectrum Ls = 0.f; if (rayWeight > 0.f) { Ls = rayWeight * Li(ray, sample, &alpha); // Ls = Li(ray, sample, &alpha); //printf("Li Value: "); //Ls.printSelf(); } // Issue warning if unexpected radiance value returned if (Ls.IsNaN()) { Error("Not-a-number radiance value returned " "for image sample. Setting to black."); Ls = Spectrum(0.f); //printf("NAN ALERT\n"); } else if (Ls.y() < -1e-5) { Error("Negative luminance value, %g, returned " "for image sample. Setting to black.", Ls.y()); Ls = Spectrum(0.f); //printf("NEGATIVE LUMINANCE ALERT\n"); } else if (isinf(Ls.y())) { Error("Infinite luminance value returned " "for image sample. Setting to black."); Ls = Spectrum(0.f); //printf("INFINITE LUMINANCE ALERT\n"); } // Add sample contribution to image camera->film->AddSample(*sample, ray, Ls, alpha); // Free BSDF memory from computing image sample value BSDF::FreeAll(); // Report rendering progress static StatsCounter cameraRaysTraced("Camera", "Camera Rays Traced"); ++cameraRaysTraced; progress.Update(); } // Clean up after rendering and store final image delete sample; progress.Done(); camera->film->WriteImage(); }