Beispiel #1
0
	void Renderer::rayTrace(Film *film, Shape& scene, PerspectiveCamera& camera, Lights& lights) {
		int w = film->width(), h = film->height();
		IntersectResult result;
		for (int y = 0; y < h; y++) {
			float sy = 1.0f - (float)y / h;
			for (int x = 0; x < w; x++) {
				float sx = (float)x / w;
				Ray& ray = camera.GenerateRay(sx, sy);
				scene.Intersect(ray, &result);
				if (result.geometry) {
					Material* pMaterial = result.geometry->material;
					Color color(0, 0, 0);
					for (int i = 0; i < lights.size(); i++) {
						Vector3dF incidence = lights[i]->incidence(result.position);
						Color c = pMaterial->Sample(ray, result.position, result.normal, incidence);
						color = color + c;
					}
					//printf("c=%f,%f,%f\n", color->r(),color->g(),color->b());
					film->set(x, y, 
						min(int(color.r() * 255), 255),
						min(int(color.g() * 255), 255),
						min(int(color.b() * 255), 255));
				}
			}
		}
	}
Beispiel #2
0
void PathHybridState::Init(const PathHybridRenderThread *thread) {
	PathHybridRenderEngine *renderEngine = (PathHybridRenderEngine *)thread->renderEngine;
	Scene *scene = renderEngine->renderConfig->scene;

	depth = 1;
	lastPdfW = 1.f;
	throuput = Spectrum(1.f);

	directLightRadiance = Spectrum();

	// Initialize eye ray
	PerspectiveCamera *camera = scene->camera;
	Film *film = thread->threadFilm;
	const u_int filmWidth = film->GetWidth();
	const u_int filmHeight = film->GetHeight();

	sampleResults[0].screenX = std::min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
	sampleResults[0].screenY = std::min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
	camera->GenerateRay(sampleResults[0].screenX, sampleResults[0].screenY, &nextPathVertexRay,
		sampler->GetSample(2), sampler->GetSample(3));

	sampleResults[0].alpha = 1.f;
	sampleResults[0].radiance = Spectrum(0.f);
	lastSpecular = true;
}
Beispiel #3
0
void testCamera() {

	ParamSet params;

	GaussianFilter *filter = new GaussianFilter(2, 2, 2);
	float crop[4] = { 0, 1, 0, 1 };
    ImageFilm* film = new ImageFilm(100, 100, filter, crop, "filename", false);

	Transform t = LookAt(Point(0,0,0), Point(0,0,-100), Vector(0,1,0));
	AnimatedTransform cam2world(&t, 0, &t, 0);

	//BoxFilter *filter = CreateBoxFilter(params);
	//ImageFilm *film = CreateImageFilm(params, filter);
	PerspectiveCamera *camera = CreatePerspectiveCamera(params, cam2world, film);

    bool jitter = false; //params.FindOneBool("jitter", true);
    // Initialize common sampler parameters
    int xstart, xend, ystart, yend;
    film->GetSampleExtent(&xstart, &xend, &ystart, &yend);
    int xsamp = 1;
    int ysamp = 1;
    StratifiedSampler *sampler = new StratifiedSampler(
		xstart, xend, ystart, yend,
		xsamp, ysamp,
        jitter, camera->shutterOpen, camera->shutterClose);

	RNG rng;
	Sample sample(sampler, NULL, NULL, NULL);

	int count = 0;
	while (sampler->GetMoreSamples(&sample, rng) && count < 10) {

		//sample.imageX
		printf("sample imageX: %g, imageY: %g\n", sample.imageX, sample.imageY);

		Ray ray;
		camera->GenerateRay(sample, &ray);

		print("ray.o", ray.o);
		print("ray.d", ray.d);
		printf("ray mint: %g, maxt: %g", ray.mint, ray.maxt);

		count++;
	}

	//CameraSample sample;

	//camera->GenerateRay(
}
Beispiel #4
0
	void Renderer::renderDepth(Film *film, Shape& scene, PerspectiveCamera& camera, float maxDepth) {
		int w = film->width(), h = film->height();
		IntersectResult result;
		for (int y = 0; y < h; y++) {
			float sy = 1.0f - (float)y / h;
			for (int x = 0; x < w; x++) {
				float sx = (float)x / w;
				Ray& ray = camera.GenerateRay(sx, sy);
				scene.Intersect(ray, &result);
				if (result.geometry) {
					int depth = int(255.0f - min((result.distance / maxDepth) * 255.0f, 255.0f));
					film->set(x, y, depth, depth, depth);
				}
			}
		}
	}
Beispiel #5
0
	void Renderer::renderNormal(Film *film, Shape& scene, PerspectiveCamera& camera, float maxDepth) {
		int w = film->width(), h = film->height();
		IntersectResult result;
		for (int y = 0; y < h; y++) {
			float sy = 1.0f - (float)y / h;
			for (int x = 0; x < w; x++) {
				float sx = (float)x / w;
				Ray&& ray = camera.GenerateRay(sx, sy);
				scene.Intersect(ray, &result);
				if (result.geometry) {
					Normal3dF& n = result.normal;
					film->set(x, y, 
						(n.x + 1.0f) * 128.0f,
						(n.y + 1.0f) * 128.0f,
						(n.z + 1.0f) * 128.0f);
				}
			}
		}
	};
Beispiel #6
0
	void Renderer::rayTraceReflection(Film *film, Shape* scene, PerspectiveCamera& camera, Lights& lights, int maxReflect, int px, int py, int pw, int ph) {
		int w = pw, h = ph, img_width = film->width(), img_height = film->height();
		if (w == 0)
			w = img_width;
		if (h == 0)
			h = img_height;
		for (int y = py, yMax = py + h; y < yMax; y++) {
			float sy = 1.0f - (float)y / img_height;
			for (int x = px, xMax = px + w; x < xMax; x++) {
				float sx = (float)x / img_width;
				//printf("sx,sy=%f,%f\n",sx,sy);
				Ray& ray = camera.GenerateRay(sx, sy);
				Color color = rayTraceRecursive(&(*scene), ray, lights, maxReflect);
				int r = min(int(color.r() * 255), 255),
					g = min(int(color.g() * 255), 255),
					b = min(int(color.b() * 255), 255);
				//printf("[rgb] %d %d = %d %d %d\n", x, y, r, g, b);
				film->set(x, y, 
					r, g, b);
			}
		}
	}
void LightCPURenderThread::TraceEyePath(Sampler *sampler, vector<SampleResult> *sampleResults) {
	LightCPURenderEngine *engine = (LightCPURenderEngine *)renderEngine;
	Scene *scene = engine->renderConfig->scene;
	PerspectiveCamera *camera = scene->camera;
	Film *film = threadFilm;
	const u_int filmWidth = film->GetWidth();
	const u_int filmHeight = film->GetHeight();

	// Sample offsets
	const u_int sampleBootSize = 11;
	const u_int sampleEyeStepSize = 3;

	Ray eyeRay;
	const float screenX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
	const float screenY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
	camera->GenerateRay(screenX, screenY, &eyeRay,
		sampler->GetSample(9), sampler->GetSample(10));

	Spectrum radiance, eyePathThroughput(1.f, 1.f, 1.f);
	int depth = 1;
	while (depth <= engine->maxPathDepth) {
		const u_int sampleOffset = sampleBootSize + (depth - 1) * sampleEyeStepSize;

		RayHit eyeRayHit;
		BSDF bsdf;
		Spectrum connectionThroughput;
		const bool somethingWasHit = scene->Intersect(device, false,
				sampler->GetSample(sampleOffset), &eyeRay, &eyeRayHit, &bsdf, &connectionThroughput);
		if (!somethingWasHit) {
			// Nothing was hit, check infinite lights (including sun)
			const Spectrum throughput = eyePathThroughput * connectionThroughput;
			if (scene->envLight)
				radiance +=  throughput * scene->envLight->GetRadiance(scene, -eyeRay.d);
			if (scene->sunLight)
				radiance +=  throughput * scene->sunLight->GetRadiance(scene, -eyeRay.d);
			break;
		} else {
			// Something was hit, check if it is a light source
			if (bsdf.IsLightSource())
				radiance = eyePathThroughput * connectionThroughput * bsdf.GetEmittedRadiance(scene);
			else {
				// Check if it is a specular bounce

				float bsdfPdf;
				Vector sampledDir;
				BSDFEvent event;
				float cosSampleDir;
				const Spectrum bsdfSample = bsdf.Sample(&sampledDir,
						sampler->GetSample(sampleOffset + 1),
						sampler->GetSample(sampleOffset + 2),
						&bsdfPdf, &cosSampleDir, &event);
				if (bsdfSample.Black() || ((depth == 1) && !(event & SPECULAR)))
					break;

				// If depth = 1 and it is a specular bounce, I continue to trace the
				// eye path looking for a light source

				eyePathThroughput *= connectionThroughput * bsdfSample * (cosSampleDir / bsdfPdf);
				assert (!eyePathThroughput.IsNaN() && !eyePathThroughput.IsInf());

				eyeRay = Ray(bsdf.hitPoint, sampledDir);
			}

			++depth;
		}
	}

	// Add a sample even if it is black in order to avoid aliasing problems
	// between sampled pixel and not sampled one (in PER_PIXEL_NORMALIZED buffer)
	AddSampleResult(*sampleResults, PER_PIXEL_NORMALIZED,
			screenX, screenY, radiance, (depth == 1) ? 1.f : 0.f);
}
void BiDirVMCPURenderThread::RenderFuncVM() {
	//SLG_LOG("[BiDirVMCPURenderThread::" << threadIndex << "] Rendering thread started");

	//--------------------------------------------------------------------------
	// Initialization
	//--------------------------------------------------------------------------

	BiDirVMCPURenderEngine *engine = (BiDirVMCPURenderEngine *)renderEngine;
	RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex);
	Scene *scene = engine->renderConfig->scene;
	PerspectiveCamera *camera = scene->camera;
	Film *film = threadFilm;
	const unsigned int filmWidth = film->GetWidth();
	const unsigned int filmHeight = film->GetHeight();
	pixelCount = filmWidth * filmHeight;

	// Setup the samplers
	vector<Sampler *> samplers(engine->lightPathsCount, NULL);
	const unsigned int sampleSize = 
		sampleBootSize + // To generate the initial light vertex and trace eye ray
		engine->maxLightPathDepth * sampleLightStepSize + // For each light vertex
		engine->maxEyePathDepth * sampleEyeStepSize; // For each eye vertex
	double metropolisSharedTotalLuminance, metropolisSharedSampleCount;
	for (u_int i = 0; i < samplers.size(); ++i) {
		Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film,
				&metropolisSharedTotalLuminance, &metropolisSharedSampleCount);
		sampler->RequestSamples(sampleSize);

		samplers[i] = sampler;
	}

	u_int iteration = 0;
	vector<vector<SampleResult> > samplesResults(samplers.size());
	vector<vector<PathVertexVM> > lightPathsVertices(samplers.size());
	vector<Point> lensPoints(samplers.size());
	HashGrid hashGrid;
	while (!boost::this_thread::interruption_requested()) {
		// Clear the arrays
		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			samplesResults[samplerIndex].clear();
			lightPathsVertices[samplerIndex].clear();
		}

		// Setup vertex merging
		float radius = engine->baseRadius;
        radius /= powf(float(iteration + 1), .5f * (1.f - engine->radiusAlpha));
		radius = Max(radius, DEFAULT_EPSILON_STATIC);
		const float radius2 = radius * radius;

		const float vmFactor = M_PI * radius2 * engine->lightPathsCount;
		vmNormalization = 1.f / vmFactor;

		const float etaVCM = vmFactor;
		misVmWeightFactor = MIS(etaVCM);
		misVcWeightFactor = MIS(1.f / etaVCM);

		//----------------------------------------------------------------------
		// Trace all light paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];

			// Sample a point on the camera lens
			if (!camera->SampleLens(sampler->GetSample(3), sampler->GetSample(4),
					&lensPoints[samplerIndex]))
				continue;

			TraceLightPath(sampler, lensPoints[samplerIndex],
					lightPathsVertices[samplerIndex], samplesResults[samplerIndex]);
		}

		//----------------------------------------------------------------------
		// Store all light path vertices in the k-NN accelerator
		//----------------------------------------------------------------------

		hashGrid.Build(lightPathsVertices, radius);

		//----------------------------------------------------------------------
		// Trace all eye paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];
			const vector<PathVertexVM> &lightPathVertices = lightPathsVertices[samplerIndex];

			PathVertexVM eyeVertex;
			SampleResult eyeSampleResult;
			eyeSampleResult.type = PER_PIXEL_NORMALIZED;
			eyeSampleResult.alpha = 1.f;

			Ray eyeRay;
			eyeSampleResult.screenX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
			eyeSampleResult.screenY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
			camera->GenerateRay(eyeSampleResult.screenX, eyeSampleResult.screenY, &eyeRay,
				sampler->GetSample(9), sampler->GetSample(10));

			eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
			eyeVertex.throughput = Spectrum(1.f, 1.f, 1.f);
			const float cosAtCamera = Dot(scene->camera->GetDir(), eyeRay.d);
			const float cameraPdfW = 1.f / (cosAtCamera * cosAtCamera * cosAtCamera *
				scene->camera->GetPixelArea());
			eyeVertex.dVCM = MIS(1.f / cameraPdfW);
			eyeVertex.dVC = 1.f;
			eyeVertex.dVM = 1.f;

			eyeVertex.depth = 1;
			while (eyeVertex.depth <= engine->maxEyePathDepth) {
				const unsigned int sampleOffset = sampleBootSize + engine->maxLightPathDepth * sampleLightStepSize +
					(eyeVertex.depth - 1) * sampleEyeStepSize;

				RayHit eyeRayHit;
				Spectrum connectionThroughput;
				if (!scene->Intersect(device, false, sampler->GetSample(sampleOffset), &eyeRay,
						&eyeRayHit, &eyeVertex.bsdf, &connectionThroughput)) {
					// Nothing was hit, look for infinitelight

					// This is a trick, you can not have a BSDF of something that has
					// not been hit. DirectHitInfiniteLight must be aware of this.
					eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
					eyeVertex.throughput *= connectionThroughput;

					DirectHitLight(false, eyeVertex, &eyeSampleResult.radiance);

					if (eyeVertex.depth == 1)
						eyeSampleResult.alpha = 0.f;
					break;
				}
				eyeVertex.throughput *= connectionThroughput;

				// Something was hit

				// Update MIS constants
				const float factor = 1.f / MIS(AbsDot(eyeVertex.bsdf.hitPoint.shadeN, eyeVertex.bsdf.hitPoint.fixedDir));
				eyeVertex.dVCM *= MIS(eyeRayHit.t * eyeRayHit.t) * factor;
				eyeVertex.dVC *= factor;
				eyeVertex.dVM *= factor;

				// Check if it is a light source
				if (eyeVertex.bsdf.IsLightSource())
					DirectHitLight(true, eyeVertex, &eyeSampleResult.radiance);

				// Note: pass-through check is done inside SceneIntersect()

				//--------------------------------------------------------------
				// Direct light sampling
				//--------------------------------------------------------------

				DirectLightSampling(sampler->GetSample(sampleOffset + 1),
						sampler->GetSample(sampleOffset + 2),
						sampler->GetSample(sampleOffset + 3),
						sampler->GetSample(sampleOffset + 4),
						sampler->GetSample(sampleOffset + 5),
						eyeVertex, &eyeSampleResult.radiance);

				if (!eyeVertex.bsdf.IsDelta()) {
					//----------------------------------------------------------
					// Connect vertex path ray with all light path vertices
					//----------------------------------------------------------

					for (vector<PathVertexVM>::const_iterator lightPathVertex = lightPathVertices.begin();
							lightPathVertex < lightPathVertices.end(); ++lightPathVertex)
						ConnectVertices(eyeVertex, *lightPathVertex, &eyeSampleResult,
								sampler->GetSample(sampleOffset + 6));

					//----------------------------------------------------------
					// Vertex Merging step
					//----------------------------------------------------------

					hashGrid.Process(this, eyeVertex, &eyeSampleResult.radiance);
				}

				//--------------------------------------------------------------
				// Build the next vertex path ray
				//--------------------------------------------------------------

				if (!Bounce(sampler, sampleOffset + 7, &eyeVertex, &eyeRay))
					break;

				++(eyeVertex.depth);
			}

			samplesResults[samplerIndex].push_back(eyeSampleResult);
		}

		//----------------------------------------------------------------------
		// Splat all samples
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
			samplers[samplerIndex]->NextSample(samplesResults[samplerIndex]);

		++iteration;

#ifdef WIN32
		// Work around Windows bad scheduling
		renderThread->yield();
#endif
	}

	for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
		delete samplers[samplerIndex];
	delete rndGen;

	//SLG_LOG("[BiDirVMCPURenderThread::" << renderThread->threadIndex << "] Rendering thread halted");
}
int AATest() {
    printf("Starting AA Tests.\n");
	SamplerUtil::Init(false);

    
	Timer timer;	
	Image image(640, 480);
	PerspectiveCamera camera;
	camera.SetImageDimension(640, 480);
	camera.SetAspectRatio(((FLOAT)640)/480);
	camera.SetPosition(Vector3(0, 0, 10));	
	
    Sphere sphere1(Vector3(2, 0, 0), 2.5);
    Sphere sphere2(Vector3(-2, 0, 0), 2.5);

	sphere1.Init();
    sphere2.Init();

	Vector3 light(0, 10, 13);

	LinearAccel accel;
	accel.AddGeometry(&sphere1);
    accel.AddGeometry(&sphere2);
	accel.Init();
	
    int* superSamples = new int[640 * 480];
    memset(superSamples, 0, 640 * 480 * sizeof(int));

	timer.Start();

	Ray				ray;
	Ray             shadow;
    Intersection	intersection;
	SurfaceElement	surfel;
    
    ScanLineBucket scanLineBucket(640, 480);
    
    StochasticSuperSampler primarySampler(512);
    //GridSuperSampler primarySampler(23);
    //PrimarySampler primarySampler;
    //AdaptiveSuperSampler primarySampler;

    primarySampler.SetRenderBucket(&scanLineBucket);
    primarySampler.Reset(0, 0, 480, 640);
    primarySampler.SetPixelSize(1.8f);

    PrimarySample sample(&image);

    while (primarySampler.GetNextSample(sample)) {
        superSamples[sample.row * 640 + sample.col]++;
        camera.GenerateRay(ray, sample.xFilm, sample.yFilm);
        intersection.Reset();

		if (accel.Intersect(ray, intersection)) {
            sample.model = intersection.primitive->GetParentModel();
			surfel.Init(&intersection, &ray);

			Vector3 lightVec(light - surfel.iPoint);
			shadow.Set(surfel.iPoint, lightVec);

			if (accel.Intersect(shadow) == NO_HIT) {
                DOUBLE ln = shadow.direction.Dot(surfel.normal);
				ln = fabs(ln);
				if (ln > 0) {
                    FLOAT lnf = (FLOAT)ln;
                    if (intersection.primitive == &sphere1) {
                        sample.SetColor(Color4f(lnf, 0, 0));					    
                    } else {
                        sample.SetColor(Color4f(0, 0, lnf));
                    }
                }
			}
		} else {
            sample.model = nullptr;
            sample.SetColor(Color4f::ZERO());
        }
    }    
    printf("Time to render image: %d(ms)\n", timer.Stop());

    int minSamples = 3000;
    int maxSamples = 0;
    int totalSamples = 0;
    for (int i = 0; i < 640 * 480; i++) {
        minSamples = MIN(minSamples, superSamples[i]);
        maxSamples = MAX(maxSamples, superSamples[i]);
        totalSamples += superSamples[i];
    }
    printf("Min Samples: %d\n", minSamples);
    printf("Max Samples: %d\n", maxSamples);
    printf("Total Samples: %d\n", totalSamples);
    printf("Averages Samples per pixel: %f\n", (FLOAT)totalSamples / (640 * 480));
    Image samples(640, 480);
    for (int row = 0; row < 480; row++) {
        for (int col = 0; col < 640; col++) {
            int s = superSamples[row * 640 + col];
            FLOAT value = (s - minSamples) / (FLOAT)(maxSamples - minSamples);
            Color4f color = UIHelper::GetColorIntensity(value);
            samples.SetPixel(color, row, col);
        }
    }

    ImageIO::Save("..\\Results\\AASamples.png", &samples);
	ImageIO::Save("..\\Results\\AARender.png", &image);

    printf("End AA Tests.\n");
    
    return 0;
}
void PathCPURenderThread::RenderFunc() {
	//SLG_LOG("[PathCPURenderEngine::" << threadIndex << "] Rendering thread started");

	//--------------------------------------------------------------------------
	// Initialization
	//--------------------------------------------------------------------------

	PathCPURenderEngine *engine = (PathCPURenderEngine *)renderEngine;
	RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex);
	Scene *scene = engine->renderConfig->scene;
	PerspectiveCamera *camera = scene->camera;
	Film * film = threadFilm;
	const unsigned int filmWidth = film->GetWidth();
	const unsigned int filmHeight = film->GetHeight();

	// Setup the sampler
	double metropolisSharedTotalLuminance, metropolisSharedSampleCount;
	Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film,
			&metropolisSharedTotalLuminance, &metropolisSharedSampleCount);
	const unsigned int sampleBootSize = 4;
	const unsigned int sampleStepSize = 9;
	const unsigned int sampleSize = 
		sampleBootSize + // To generate eye ray
		engine->maxPathDepth * sampleStepSize; // For each path vertex
	sampler->RequestSamples(sampleSize);

	//--------------------------------------------------------------------------
	// Trace paths
	//--------------------------------------------------------------------------

	vector<SampleResult> sampleResults(1);
	sampleResults[0].type = PER_PIXEL_NORMALIZED;
	while (!boost::this_thread::interruption_requested()) {
		float alpha = 1.f;

		Ray eyeRay;
		const float screenX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
		const float screenY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
		camera->GenerateRay(screenX, screenY, &eyeRay,
			sampler->GetSample(2), sampler->GetSample(3));

		int depth = 1;
		bool lastSpecular = true;
		float lastPdfW = 1.f;
		Spectrum radiance;
		Spectrum pathThrouput(1.f, 1.f, 1.f);
		BSDF bsdf;
		while (depth <= engine->maxPathDepth) {
			const unsigned int sampleOffset = sampleBootSize + (depth - 1) * sampleStepSize;

			RayHit eyeRayHit;
			Spectrum connectionThroughput;
			if (!scene->Intersect(device, false, sampler->GetSample(sampleOffset),
					&eyeRay, &eyeRayHit, &bsdf, &connectionThroughput)) {
				// Nothing was hit, look for infinitelight
				DirectHitInfiniteLight(lastSpecular, pathThrouput * connectionThroughput, eyeRay.d,
						lastPdfW, &radiance);

				if (depth == 1)
					alpha = 0.f;
				break;
			}
			pathThrouput *= connectionThroughput;

			// Something was hit

			// Check if it is a light source
			if (bsdf.IsLightSource()) {
				DirectHitFiniteLight(lastSpecular, pathThrouput,
						eyeRayHit.t, bsdf, lastPdfW, &radiance);
			}

			// Note: pass-through check is done inside SceneIntersect()

			//------------------------------------------------------------------
			// Direct light sampling
			//------------------------------------------------------------------

			DirectLightSampling(sampler->GetSample(sampleOffset + 1),
					sampler->GetSample(sampleOffset + 2),
					sampler->GetSample(sampleOffset + 3),
					sampler->GetSample(sampleOffset + 4),
					sampler->GetSample(sampleOffset + 5),
					pathThrouput, bsdf, depth, &radiance);

			//------------------------------------------------------------------
			// Build the next vertex path ray
			//------------------------------------------------------------------

			Vector sampledDir;
			BSDFEvent event;
			float cosSampledDir;
			const Spectrum bsdfSample = bsdf.Sample(&sampledDir,
					sampler->GetSample(sampleOffset + 6),
					sampler->GetSample(sampleOffset + 7),
					&lastPdfW, &cosSampledDir, &event);
			if (bsdfSample.Black())
				break;

			lastSpecular = ((event & SPECULAR) != 0);

			if ((depth >= engine->rrDepth) && !lastSpecular) {
				// Russian Roulette
				const float prob = Max(bsdfSample.Filter(), engine->rrImportanceCap);
				if (sampler->GetSample(sampleOffset + 8) < prob)
					lastPdfW *= prob;
				else
					break;
			}

			pathThrouput *= bsdfSample * (cosSampledDir / lastPdfW);
			assert (!pathThrouput.IsNaN() && !pathThrouput.IsInf());

			eyeRay = Ray(bsdf.hitPoint, sampledDir);
			++depth;
		}

		assert (!radiance.IsNaN() && !radiance.IsInf());

		sampleResults[0].screenX = screenX;
		sampleResults[0].screenY = screenY;
		sampleResults[0].radiance = radiance;
		sampleResults[0].alpha = alpha;
		sampler->NextSample(sampleResults);

#ifdef WIN32
		// Work around Windows bad scheduling
		renderThread->yield();
#endif
	}

	delete sampler;
	delete rndGen;

	//SLG_LOG("[PathCPURenderEngine::" << threadIndex << "] Rendering thread halted");
}