Пример #1
0
void BiDirVMCPURenderThread::RenderFuncVM() {
	//SLG_LOG("[BiDirVMCPURenderThread::" << threadIndex << "] Rendering thread started");

	//--------------------------------------------------------------------------
	// Initialization
	//--------------------------------------------------------------------------

	BiDirVMCPURenderEngine *engine = (BiDirVMCPURenderEngine *)renderEngine;
	RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex);
	Scene *scene = engine->renderConfig->scene;
	PerspectiveCamera *camera = scene->camera;
	Film *film = threadFilm;
	const unsigned int filmWidth = film->GetWidth();
	const unsigned int filmHeight = film->GetHeight();
	pixelCount = filmWidth * filmHeight;

	// Setup the samplers
	vector<Sampler *> samplers(engine->lightPathsCount, NULL);
	const unsigned int sampleSize = 
		sampleBootSize + // To generate the initial light vertex and trace eye ray
		engine->maxLightPathDepth * sampleLightStepSize + // For each light vertex
		engine->maxEyePathDepth * sampleEyeStepSize; // For each eye vertex
	double metropolisSharedTotalLuminance, metropolisSharedSampleCount;
	for (u_int i = 0; i < samplers.size(); ++i) {
		Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film,
				&metropolisSharedTotalLuminance, &metropolisSharedSampleCount);
		sampler->RequestSamples(sampleSize);

		samplers[i] = sampler;
	}

	u_int iteration = 0;
	vector<vector<SampleResult> > samplesResults(samplers.size());
	vector<vector<PathVertexVM> > lightPathsVertices(samplers.size());
	vector<Point> lensPoints(samplers.size());
	HashGrid hashGrid;
	while (!boost::this_thread::interruption_requested()) {
		// Clear the arrays
		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			samplesResults[samplerIndex].clear();
			lightPathsVertices[samplerIndex].clear();
		}

		// Setup vertex merging
		float radius = engine->baseRadius;
        radius /= powf(float(iteration + 1), .5f * (1.f - engine->radiusAlpha));
		radius = Max(radius, DEFAULT_EPSILON_STATIC);
		const float radius2 = radius * radius;

		const float vmFactor = M_PI * radius2 * engine->lightPathsCount;
		vmNormalization = 1.f / vmFactor;

		const float etaVCM = vmFactor;
		misVmWeightFactor = MIS(etaVCM);
		misVcWeightFactor = MIS(1.f / etaVCM);

		//----------------------------------------------------------------------
		// Trace all light paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];

			// Sample a point on the camera lens
			if (!camera->SampleLens(sampler->GetSample(3), sampler->GetSample(4),
					&lensPoints[samplerIndex]))
				continue;

			TraceLightPath(sampler, lensPoints[samplerIndex],
					lightPathsVertices[samplerIndex], samplesResults[samplerIndex]);
		}

		//----------------------------------------------------------------------
		// Store all light path vertices in the k-NN accelerator
		//----------------------------------------------------------------------

		hashGrid.Build(lightPathsVertices, radius);

		//----------------------------------------------------------------------
		// Trace all eye paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];
			const vector<PathVertexVM> &lightPathVertices = lightPathsVertices[samplerIndex];

			PathVertexVM eyeVertex;
			SampleResult eyeSampleResult;
			eyeSampleResult.type = PER_PIXEL_NORMALIZED;
			eyeSampleResult.alpha = 1.f;

			Ray eyeRay;
			eyeSampleResult.screenX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
			eyeSampleResult.screenY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
			camera->GenerateRay(eyeSampleResult.screenX, eyeSampleResult.screenY, &eyeRay,
				sampler->GetSample(9), sampler->GetSample(10));

			eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
			eyeVertex.throughput = Spectrum(1.f, 1.f, 1.f);
			const float cosAtCamera = Dot(scene->camera->GetDir(), eyeRay.d);
			const float cameraPdfW = 1.f / (cosAtCamera * cosAtCamera * cosAtCamera *
				scene->camera->GetPixelArea());
			eyeVertex.dVCM = MIS(1.f / cameraPdfW);
			eyeVertex.dVC = 1.f;
			eyeVertex.dVM = 1.f;

			eyeVertex.depth = 1;
			while (eyeVertex.depth <= engine->maxEyePathDepth) {
				const unsigned int sampleOffset = sampleBootSize + engine->maxLightPathDepth * sampleLightStepSize +
					(eyeVertex.depth - 1) * sampleEyeStepSize;

				RayHit eyeRayHit;
				Spectrum connectionThroughput;
				if (!scene->Intersect(device, false, sampler->GetSample(sampleOffset), &eyeRay,
						&eyeRayHit, &eyeVertex.bsdf, &connectionThroughput)) {
					// Nothing was hit, look for infinitelight

					// This is a trick, you can not have a BSDF of something that has
					// not been hit. DirectHitInfiniteLight must be aware of this.
					eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
					eyeVertex.throughput *= connectionThroughput;

					DirectHitLight(false, eyeVertex, &eyeSampleResult.radiance);

					if (eyeVertex.depth == 1)
						eyeSampleResult.alpha = 0.f;
					break;
				}
				eyeVertex.throughput *= connectionThroughput;

				// Something was hit

				// Update MIS constants
				const float factor = 1.f / MIS(AbsDot(eyeVertex.bsdf.hitPoint.shadeN, eyeVertex.bsdf.hitPoint.fixedDir));
				eyeVertex.dVCM *= MIS(eyeRayHit.t * eyeRayHit.t) * factor;
				eyeVertex.dVC *= factor;
				eyeVertex.dVM *= factor;

				// Check if it is a light source
				if (eyeVertex.bsdf.IsLightSource())
					DirectHitLight(true, eyeVertex, &eyeSampleResult.radiance);

				// Note: pass-through check is done inside SceneIntersect()

				//--------------------------------------------------------------
				// Direct light sampling
				//--------------------------------------------------------------

				DirectLightSampling(sampler->GetSample(sampleOffset + 1),
						sampler->GetSample(sampleOffset + 2),
						sampler->GetSample(sampleOffset + 3),
						sampler->GetSample(sampleOffset + 4),
						sampler->GetSample(sampleOffset + 5),
						eyeVertex, &eyeSampleResult.radiance);

				if (!eyeVertex.bsdf.IsDelta()) {
					//----------------------------------------------------------
					// Connect vertex path ray with all light path vertices
					//----------------------------------------------------------

					for (vector<PathVertexVM>::const_iterator lightPathVertex = lightPathVertices.begin();
							lightPathVertex < lightPathVertices.end(); ++lightPathVertex)
						ConnectVertices(eyeVertex, *lightPathVertex, &eyeSampleResult,
								sampler->GetSample(sampleOffset + 6));

					//----------------------------------------------------------
					// Vertex Merging step
					//----------------------------------------------------------

					hashGrid.Process(this, eyeVertex, &eyeSampleResult.radiance);
				}

				//--------------------------------------------------------------
				// Build the next vertex path ray
				//--------------------------------------------------------------

				if (!Bounce(sampler, sampleOffset + 7, &eyeVertex, &eyeRay))
					break;

				++(eyeVertex.depth);
			}

			samplesResults[samplerIndex].push_back(eyeSampleResult);
		}

		//----------------------------------------------------------------------
		// Splat all samples
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
			samplers[samplerIndex]->NextSample(samplesResults[samplerIndex]);

		++iteration;

#ifdef WIN32
		// Work around Windows bad scheduling
		renderThread->yield();
#endif
	}

	for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
		delete samplers[samplerIndex];
	delete rndGen;

	//SLG_LOG("[BiDirVMCPURenderThread::" << renderThread->threadIndex << "] Rendering thread halted");
}
Пример #2
0
void BiDirVMCPURenderThread::RenderFuncVM() {
	//SLG_LOG("[BiDirVMCPURenderThread::" << threadIndex << "] Rendering thread started");

	//--------------------------------------------------------------------------
	// Initialization
	//--------------------------------------------------------------------------

	BiDirVMCPURenderEngine *engine = (BiDirVMCPURenderEngine *)renderEngine;
	RandomGenerator *rndGen = new RandomGenerator(engine->seedBase + threadIndex);
	Scene *scene = engine->renderConfig->scene;
	Camera *camera = scene->camera;
	Film *film = threadFilm;
	const u_int filmWidth = film->GetWidth();
	const u_int filmHeight = film->GetHeight();
	pixelCount = filmWidth * filmHeight;

	// Setup the samplers
	vector<Sampler *> samplers(engine->lightPathsCount, NULL);
	const u_int sampleSize = 
		sampleBootSizeVM + // To generate the initial light vertex and trace eye ray
		engine->maxLightPathDepth * sampleLightStepSize + // For each light vertex
		engine->maxEyePathDepth * sampleEyeStepSize; // For each eye vertex
	// metropolisSharedTotalLuminance and metropolisSharedSampleCount are
	// initialized inside MetropolisSampler::RequestSamples()
	double metropolisSharedTotalLuminance, metropolisSharedSampleCount;
	for (u_int i = 0; i < samplers.size(); ++i) {
		Sampler *sampler = engine->renderConfig->AllocSampler(rndGen, film,
				&metropolisSharedTotalLuminance, &metropolisSharedSampleCount);
		sampler->RequestSamples(sampleSize);

		samplers[i] = sampler;
	}

	u_int iteration = 0;
	vector<vector<SampleResult> > samplesResults(samplers.size());
	vector<vector<PathVertexVM> > lightPathsVertices(samplers.size());
	vector<Point> lensPoints(samplers.size());
	HashGrid hashGrid;
	const u_int haltDebug = engine->renderConfig->GetProperty("batch.haltdebug").Get<u_int>();

	for(u_int steps = 0; !boost::this_thread::interruption_requested(); ++steps) {
		// Clear the arrays
		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			samplesResults[samplerIndex].clear();
			lightPathsVertices[samplerIndex].clear();
		}

		// Setup vertex merging
		float radius = engine->baseRadius;
        radius /= powf(float(iteration + 1), .5f * (1.f - engine->radiusAlpha));
		radius = Max(radius, DEFAULT_EPSILON_STATIC);
		const float radius2 = radius * radius;

		const float vmFactor = M_PI * radius2 * engine->lightPathsCount;
		vmNormalization = 1.f / vmFactor;

		const float etaVCM = vmFactor;
		misVmWeightFactor = MIS(etaVCM);
		misVcWeightFactor = MIS(1.f / etaVCM);

		// Using the same time for all rays in the same pass is required by the
		// current implementation (i.e. I can not mix paths with different
		// times). However this is detrimental for the Metropolis sampler.
		const float time = rndGen->floatValue();

		//----------------------------------------------------------------------
		// Trace all light paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];

			// Sample a point on the camera lens
			if (!camera->SampleLens(time, sampler->GetSample(3), sampler->GetSample(4),
					&lensPoints[samplerIndex]))
				continue;

			TraceLightPath(time, sampler, lensPoints[samplerIndex],
					lightPathsVertices[samplerIndex], samplesResults[samplerIndex]);
		}

		//----------------------------------------------------------------------
		// Store all light path vertices in the k-NN accelerator
		//----------------------------------------------------------------------

		hashGrid.Build(lightPathsVertices, radius);

		//cout << "==========================================\n";
		//cout << "Iteration: " << iteration << "  Paths: " << engine->lightPathsCount << "  Light path vertices: "<< hashGrid.GetVertexCount() <<"\n";

		//----------------------------------------------------------------------
		// Trace all eye paths
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex) {
			Sampler *sampler = samplers[samplerIndex];

			PathVertexVM eyeVertex;
			SampleResult eyeSampleResult(Film::RADIANCE_PER_PIXEL_NORMALIZED | Film::ALPHA, 1);
			eyeSampleResult.alpha = 1.f;

			Ray eyeRay;
			eyeSampleResult.filmX = min(sampler->GetSample(0) * filmWidth, (float)(filmWidth - 1));
			eyeSampleResult.filmY = min(sampler->GetSample(1) * filmHeight, (float)(filmHeight - 1));
			camera->GenerateRay(eyeSampleResult.filmX, eyeSampleResult.filmY, &eyeRay,
				sampler->GetSample(9), sampler->GetSample(10), time);

			eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
			eyeVertex.throughput = Spectrum(1.f);
			const float cosAtCamera = Dot(scene->camera->GetDir(), eyeRay.d);
			const float cameraPdfW = 1.f / (cosAtCamera * cosAtCamera * cosAtCamera *
				scene->camera->GetPixelArea());
			eyeVertex.dVCM = MIS(1.f / cameraPdfW);
			eyeVertex.dVC = 1.f;
			eyeVertex.dVM = 1.f;

			eyeVertex.depth = 1;
			while (eyeVertex.depth <= engine->maxEyePathDepth) {
				const u_int sampleOffset = sampleBootSizeVM + engine->maxLightPathDepth * sampleLightStepSize +
					(eyeVertex.depth - 1) * sampleEyeStepSize;

				RayHit eyeRayHit;
				Spectrum connectionThroughput, connectEmission;
				const bool hit = scene->Intersect(device, false,
						&eyeVertex.volInfo, sampler->GetSample(sampleOffset),
						&eyeRay, &eyeRayHit, &eyeVertex.bsdf,
						&connectionThroughput, NULL, NULL, &connectEmission);
				// I account for volume emission only with path tracing (i.e. here and
				// not in any other place)
				eyeSampleResult.radiancePerPixelNormalized[0] += connectEmission;

				if (!hit) {
					// Nothing was hit, look for infinitelight

					// This is a trick, you can not have a BSDF of something that has
					// not been hit. DirectHitInfiniteLight must be aware of this.
					eyeVertex.bsdf.hitPoint.fixedDir = -eyeRay.d;
					eyeVertex.throughput *= connectionThroughput;

					DirectHitLight(false, eyeVertex, &eyeSampleResult.radiancePerPixelNormalized[0]);

					if (eyeVertex.depth == 1)
						eyeSampleResult.alpha = 0.f;
					break;
				}
				eyeVertex.throughput *= connectionThroughput;

				// Something was hit

				// Update MIS constants
				const float factor = 1.f / MIS(AbsDot(eyeVertex.bsdf.hitPoint.shadeN, eyeVertex.bsdf.hitPoint.fixedDir));
				eyeVertex.dVCM *= MIS(eyeRayHit.t * eyeRayHit.t) * factor;
				eyeVertex.dVC *= factor;
				eyeVertex.dVM *= factor;

				// Check if it is a light source
				if (eyeVertex.bsdf.IsLightSource())
					DirectHitLight(true, eyeVertex, &eyeSampleResult.radiancePerPixelNormalized[0]);

				// Note: pass-through check is done inside Scene::Intersect()

				//--------------------------------------------------------------
				// Direct light sampling
				//--------------------------------------------------------------

				DirectLightSampling(time,
						sampler->GetSample(sampleOffset + 1),
						sampler->GetSample(sampleOffset + 2),
						sampler->GetSample(sampleOffset + 3),
						sampler->GetSample(sampleOffset + 4),
						sampler->GetSample(sampleOffset + 5),
						eyeVertex, &eyeSampleResult.radiancePerPixelNormalized[0]);

				if (!eyeVertex.bsdf.IsDelta()) {
					//----------------------------------------------------------
					// Connect vertex path ray with all light path vertices
					//----------------------------------------------------------
			
					const vector<PathVertexVM> &lightPathVertices = lightPathsVertices[samplerIndex];
					for (vector<PathVertexVM>::const_iterator lightPathVertex = lightPathVertices.begin();
							lightPathVertex < lightPathVertices.end(); ++lightPathVertex)
						ConnectVertices(time,
								eyeVertex, *lightPathVertex, &eyeSampleResult,
								sampler->GetSample(sampleOffset + 6));

					//----------------------------------------------------------
					// Vertex Merging step
					//----------------------------------------------------------

					hashGrid.Process(this, eyeVertex, &eyeSampleResult.radiancePerPixelNormalized[0]);
				}

				//--------------------------------------------------------------
				// Build the next vertex path ray
				//--------------------------------------------------------------

				if (!Bounce(time, sampler, sampleOffset + 7, &eyeVertex, &eyeRay))
					break;
			}

			samplesResults[samplerIndex].push_back(eyeSampleResult);
		}

		//----------------------------------------------------------------------
		// Splat all samples
		//----------------------------------------------------------------------

		for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
			samplers[samplerIndex]->NextSample(samplesResults[samplerIndex]);

		++iteration;

#ifdef WIN32
		// Work around Windows bad scheduling
		renderThread->yield();
#endif

		//hashGrid.PrintStatistics();

		if ((haltDebug > 0u) && (steps >= haltDebug))
			break;
	}

	for (u_int samplerIndex = 0; samplerIndex < samplers.size(); ++samplerIndex)
		delete samplers[samplerIndex];
	delete rndGen;

	//SLG_LOG("[BiDirVMCPURenderThread::" << renderThread->threadIndex << "] Rendering thread halted");
}
Пример #3
0
// finds the loops around a connected component as polygons
void ComponentLoops(const MiscLib::Vector< int > &componentImg, size_t uextent,
	size_t vextent, int label, bool uwrap, bool vwrap,
	MiscLib::Vector< MiscLib::Vector< GfxTL::VectorXD< 2, size_t > > > *polys)
{
	typedef GfxTL::VectorXD< 2, size_t > Vec2;
	// find first point of component
	size_t firsti = 0;
	int x, y, prevx, prevy;
	// the corners of our pixels will be the vertices of our polygons
	// (x, y) is the upper left corner of the pixel y * uextent + x
	HashGrid< bool, 4 > edges;
	unsigned int edgesExtent[] = { uextent + 1, vextent + 1, 3, 3 };
	edges.Extent(edgesExtent);
	bool prevPixelWasWhite = true;
	do
	{
		// find the first edge in the polygon
		// edges are oriented so that the "black" pixels are on the right
		// black pixels are pixels == label
		for(; firsti < componentImg.size(); ++firsti)
		{
			if(prevPixelWasWhite && componentImg[firsti] == label)
			{
				prevPixelWasWhite = false;
				x = firsti % uextent;
				y = firsti / uextent;
				break;
			}
			prevPixelWasWhite = componentImg[firsti] != label;
		}
		if(firsti >= componentImg.size()) // unable to find a pixel -> good bye
		{
			// if there is a uwrap, then the last row could be an outer loop
			// this outer loop could be missed of all pixels in the last
			// row are black
			// to find that out we spawn another trial at the first
			// pixel in the last row (if it is black)
			// if the loop has already been detected, than this
			// edge should already be in edges
			if(!uwrap)
				break;
			if(componentImg[(vextent - 1) * uextent] == label)
			{
				x = 0;
				y = vextent - 1;
			}
		}
		MiscLib::Vector< Vec2 > poly;
		// we initialize the path with an oriented edge
		// since the black pixel is on the right the edge goes from
		// bottom to top, i.e. from (x, y + 1) to (x, y)
		if((x > 0 && (size_t)y < vextent - 1)
			|| (!uwrap && !vwrap) || (vwrap && !uwrap && y == 0))
		{
			// on the left of pixel
			// check if edge was visited already
			unsigned int edgeIndex[] = { x, y, 1, 2 };
			if(edges.find(edgeIndex))
				continue;
			prevx = 0;
			prevy = 1;
		}
		else if(uwrap && !vwrap && x == 0 && (size_t)y != vextent - 1)
		{
			size_t dx, dy;
			if(!IsEdge(componentImg, uextent, vextent, label, uwrap, vwrap,
				x, y, 1, 0, &dx, &dy))
				continue;
			// check if edge was visited already
			unsigned int edgeIndex[] = { x + 1, y, 0, 1 };
			if(edges.find(edgeIndex))
				continue;
			// on top of pixel
			prevx = -1;
			prevy = 0;
			++x;
		}
		else if(uwrap && !vwrap && x == 0 && (size_t)y == vextent - 1)
		{
			size_t dx, dy;
			if(!IsEdge(componentImg, uextent, vextent, label, uwrap, vwrap,
				x + 1, y + 1, -1, 0, &dx, &dy))
				continue;
			// on bottom of pixel
			// check if edge was visited already
			unsigned int edgeIndex[] = { x + 1, y + 1, 0, 1 };
			if(edges.find(edgeIndex))
				continue;
			prevx = -1;
			prevy = 0;
			++y;
		}
		else if(!uwrap && vwrap && (size_t)x == uextent - 1)
		{
			// on right of pixel
			size_t dx, dy;
			if(!IsEdge(componentImg, uextent, vextent, label, uwrap, vwrap,
				x + 1, y + 1, 0, -1, &dx, &dy))
				continue;
			// on bottom of pixel
			// check if edge was visited already
			unsigned int edgeIndex[] = { x + 1, y + 1, 1, 0 };
			if(edges.find(edgeIndex))
				continue;
			prevx = 0;
			prevy = 1;
			++y;
		}
		else
			continue; // we are unable to start a loop at this position
		poly.push_back(Vec2(x + prevx, y + prevy));
		edges[x][y][prevx + 1][prevy + 1] = true;
		do
		{
			poly.push_back(Vec2(x, y));
			// check the four neighbors of (x, y) from left to right
			// starting from where we came from
			// we take the first edge that we encounter
			size_t nextx, nexty;
			size_t checkEdge;
			for(checkEdge = 0; checkEdge < 3; ++checkEdge)
			{
				std::swap(prevx, prevy);
				prevx *= -1;
				if(IsEdge(componentImg, uextent, vextent, label, uwrap, vwrap,
					x, y, prevx, prevy, &nextx, &nexty))
					break;
			}
			if(checkEdge > 3)
				return;
			x = nextx;
			y = nexty;
			prevx = -prevx;
			prevy = -prevy;
			edges[x][y][prevx + 1][prevy + 1] = true;
		}
		while(poly[0] != Vec2(x, y));
		polys->push_back(poly);
	}
	while(firsti < componentImg.size());
#ifdef _DEBUG
	static int fname_int = 0;
	std::ostringstream fn;
	fn << "ComponentLoopsInput" << fname_int << ".txt";
	std::ofstream file;
	file.open(fn.str().c_str(), std::ios::out);
	for(size_t j = 0; j < vextent; ++j)
	{
		for(size_t i = 0; i < uextent; ++i)
			file /*<< std::setw(3)*/ << componentImg[j * uextent + i]/* << " "*/;
		file << std::endl;
	}
	file.close();
	MiscLib::Vector< int > loopsImg((uextent + 1) * (vextent + 1), 0);
	std::ostringstream fn2;
	fn2 << "ComponentLoopsOutput" << fname_int++ << ".txt";
	for(size_t i = 0; i < polys->size(); ++i)
		for(size_t j = 0; j < (*polys)[i].size(); ++j)
			loopsImg[(*polys)[i][j][1] * (uextent + 1) + (*polys)[i][j][0]] = i + 1;
	file.open(fn2.str().c_str(), std::ios::out);
	for(size_t j = 0; j < vextent + 1; ++j)
	{
		for(size_t i = 0; i < uextent + 1; ++i)
			file /*<< std::setw(3)*/ << loopsImg[j * (uextent + 1) + i]/* << " "*/;
		file << std::endl;
	}
	file.close();
#endif
}