Ejemplo n.º 1
0
vector<vec3f> PhotonMap::renderPixels(const Camera& camera){
	uint width = camera.width, height = camera.height;
	std::vector<vec3f> pixelColors(width * height, vec3f(0,0,0));
		
	omp_init_lock(&surfaceHashGridLock);
	omp_init_lock(&volumeHashGridLock);
	omp_init_lock(&debugPrintLock);

	//std::vector<int> pixelMaps(pixelColors.size(), 0);

	preprocessEmissionSampler();
		
	mRadius = mBaseRadius;

	clock_t startTime = clock();

	for(uint s = 0; s < spp; s++){
		std::cout << "iteration : " << s << std::endl;
		
		std::vector<vec3f> oneIterColors(pixelColors.size(), vec3f(0,0,0));
#ifdef PPM
		//if (renderer->scene.getTotalVolume() > 1e-6f)
		if (true)
		{
			rayMarching = true;
			mRadius = MAX(mBaseRadius * powf(powf(s+1 , mAlpha-1) , 1.f / 3.f) , EPSILON);
		}
		else
		{
			rayMarching = false;
			mRadius = MAX(mBaseRadius * sqrt(powf(s+1, mAlpha-1)), EPSILON);
		}
#endif
		std::vector<Path*> pixelLightPaths(mPhotonsNum, NULL);
		std::vector<LightPoint> surfaceLightVertices(0);
		std::vector<LightPoint> volumeLightVertices(0);

		surfaceHashGrid.Reserve(pixelColors.size());
		volumeHashGrid.Reserve(pixelColors.size());

#pragma omp parallel for
		// step1: sample light paths and build range search struct independently for surface and volume
		for(int p = 0; p < mPhotonsNum; p++){
			Ray lightRay = genEmissiveSurfaceSample(true , false);
			pixelLightPaths[p] = new Path;
			Path &lightPath = *pixelLightPaths[p];
			samplePath(lightPath, lightRay);
			for(int i = 1; i < lightPath.size(); i++){
				// light is not reflective
				if(lightPath[i].contactObject && lightPath[i].contactObject->emissive())
					break;
				// only store particles non-specular
				if(lightPath[i].directionSampleType == Ray::DEFINITE)
					continue;
				LightPoint lightPoint;
				lightPoint.position = lightPath[i].origin;
				lightPoint.indexInThePath = i;
				lightPoint.pathThePointIn = &lightPath;
				lightPoint.photonType = lightPath[i].photonType;
				if(lightPoint.photonType == Ray::OUTVOL){
					omp_set_lock(&surfaceHashGridLock);
					surfaceLightVertices.push_back(lightPoint);
					omp_unset_lock(&surfaceHashGridLock);
				}
				if(lightPoint.photonType == Ray::INVOL){
					omp_set_lock(&volumeHashGridLock);
					volumeLightVertices.push_back(lightPoint);
					omp_unset_lock(&volumeHashGridLock);
				}
			}
		}
		std::cout<< "vol vertices= " << volumeLightVertices.size() << " sur vertices= " << surfaceLightVertices.size() << std::endl;
			
		surfaceHashGrid.Build(surfaceLightVertices, mRadius);
		volumeHashGrid.Build(volumeLightVertices, mRadius);

		std::cout<< "finish building hashgrid" << std::endl;

		// step2: calculate pixel colors by progressive photon mapping
#pragma omp parallel for
		for(int p = 0; p < pixelColors.size(); p++){
			Path eyePath;
			if (rayMarching)
				sampleMergePath(eyePath, camera.generateRay(p), 0);
			else
				samplePath(eyePath, camera.generateRay(p));

			//fprintf(fp , "===================\n");
			//for (int i = 0; i < eyePath.size(); i++)
			//{
			//	fprintf(fp , "l=%d, bsdf=(%.8f,%.8f,%.8f), originPdf=%.8f, dirPdf=%.8f\n" , i , eyePath[i].color.x ,
			//		eyePath[i].color.y , eyePath[i].color.z , eyePath[i].originProb , eyePath[i].directionProb);
			//}

			/*if(eyePath[1].contactObj && eyePath[1].contactObj->anisotropic()){
				pixelMaps[p] = 1;
			}*/
			throughputByDensityEstimation(oneIterColors[p], eyePath, surfaceLightVertices, volumeLightVertices);
		}
		/*std::ofstream fout(engine->renderer->name + engine->scene.name+"pixelMap.txt");
		for(int p = 0; p < pixelMaps.size(); p++)
			fout << pixelMaps[p] << ' ' ;
		fout << std::endl;
		fout.close();*/

		std::cout << "calculation done" << std::endl;

		for(uint i = 0; i < pixelColors.size(); i++){
			pixelColors[i] *= s / float(s+1);
			pixelColors[i] += camera.eliminateVignetting(oneIterColors[i], i) / (s + 1);
			delete pixelLightPaths[i];
		}

		unsigned nowTime = (float)(clock() - startTime) / 1000;
		//if (nowTime > recordTime)
		if (s % outputIter == 0)
		{
			showCurrentResult(pixelColors , &nowTime , &s);
			//showCurrentResult(pixelColors , &lastTime , &s);
			//recordTime += timeInterval;
		}
		else
			showCurrentResult(pixelColors);
	}
	return pixelColors;
}
Ejemplo n.º 2
0
	std::vector<vec3f> PathTracer::renderPixels(){
		Camera &camera = getCamera();
		uint width = camera.mResolution.x, height = camera.mResolution.y;
		std::vector<vec3f> pixelColors(width * height, vec3f(0,0,0));
		
		if(useNextEventEstimation)
			prepareForLightSampling();

		for(uint s = 0; s < spp; s++){
			std::cout << "iteration : " << s << std::endl;
			
			engine->scene.updateSceneForMotionBlur();

#pragma omp parallel for
			for(int p = 0; p < pixelColors.size(); p++){
				Path eyePath;
				samplePath(eyePath, camera.generateRay(p));
				pixelColors[p] *= s / float(s+1);
				vec3f color = vec3f(1,1,1);

				bool hasToConnect = eyePath[eyePath.size()-1].radiance.length() <= 0;

				if(!useNextEventEstimation || !hasToConnect){
					for(int i = 0; i < eyePath.size(); i++){
						if(i != eyePath.size() - 1){
							color *= eyePath[i].cosineTerm();
							float dist = (eyePath[i+1].origin - eyePath[i].origin).length();
							color *= eyePath[i].radianceDecay(dist);
						}
						color *= eyePath[i].radiance / eyePath[i].originProb / eyePath[i].directionProb;
					}
				}
				else{
					int endIndex = (eyePath.back().contactObj || eyePath.back().insideObj) ? -1 : eyePath.size()-2;
					if(endIndex <= 0)											continue;

					Ray &endRay = eyePath[endIndex], lightRay = genLightSample();
					if(endRay.contactObj && endRay.contactObj->isEmissive())	continue;
					if(endRay.contactObj && !endRay.contactObj->nonSpecular())	continue;

					endRay.direction = lightRay.origin - endRay.origin;
					endRay.direction.normalize();
					lightRay.direction = -endRay.direction;

					float connectDist = MAX((lightRay.origin - endRay.origin).length(), EPSILON);
					
					if(endRay.direction.dot(lightRay.contactNormal()) >= 0)		continue;
					
					if(!visibilityTest(endRay, lightRay))						continue;
					
					for(int i = 0; i < endIndex; i++){
						color *= eyePath[i].radiance / eyePath[i].originProb / eyePath[i].directionProb;
						color *= eyePath[i].cosineTerm();
						float dist = (eyePath[i+1].origin - eyePath[i].origin).length();
						color *= eyePath[i].radianceDecay(dist);
					}
					color *= eyePath[endIndex-1].evalBSDF(endRay) * lightRay.radiance * endRay.radianceDecay(connectDist)
						* lightRay.cosineTerm() * endRay.cosineTerm() / (connectDist * connectDist);
					color /= eyePath[endIndex].originProb * lightRay.originProb;
				}
				if(!isLegalColor(color))
					color = vec3f(0,0,0);
				pixelColors[p] += camera.fixVignetting(color, p) / (s+1);
			}
			camera.mFilm.setBuffer(pixelColors);
			std::string filename = engine->renderer->name + engine->scene.name + ".pfm";
			camera.mFilm.savePFM(filename);
		}
		return pixelColors;
	}
Ejemplo n.º 3
0
vector<vec3f> IptTracer::renderPixels(const Camera& camera)
{
	if (!usePPM)
	{
		lightPathNum = totPathNum * pathRatio;
		interPathNum = totPathNum * (1.f - pathRatio);
		partialPathNum = interPathNum;

		mergeIterations = maxDepth;
		useWeight = true;
	}
	else
	{
		lightPathNum = totPathNum;
		interPathNum = totPathNum;
		partialPathNum = interPathNum;

		mergeIterations = 0;
		useWeight = false;
	}
	cameraPathNum = pixelNum;
	
	useUniformInterSampler = (useUniformSur && useUniformVol);

	vector<vec3f> pixelColors(camera.width * camera.height, vec3f(0, 0, 0));
	vector<omp_lock_t> pixelLocks(pixelColors.size());
	volMask.resize(camera.width * camera.height);

	preprocessEmissionSampler();
	preprocessOtherSampler(useUniformSur);
	preprocessVolumeSampler(useUniformVol , mergeRadius * 0.1f);
	
	for(int i=0; i<pixelLocks.size(); i++)
	{
		omp_init_lock(&pixelLocks[i]);
	}

	omp_lock_t cmdLock;
	omp_init_lock(&cmdLock);

	if (gatherRadius < 1e-6f)
		gatherRadius = mergeRadius;
	Real r0 = mergeRadius;
	Real gr0 = gatherRadius;

	totArea = renderer->scene.getTotalArea();
	totVol = renderer->scene.getTotalVolume();
	printf("scene: totArea = %.8f, totVol = %.8f\n" , totArea , totVol);

	// abandon surface
	//totArea = 0.f;

	if (totVol > 1e-6f && totArea > 1e-6f)
		partialPathNum = interPathNum / 2;

	unsigned startTime = clock();

	for(unsigned s=0; s<=spp; s++)
	{
		partPathMergeIndex.resize(interPathNum);

		partialSubPathList.clear();
		/*
		float shrinkRatio;
		if (totVol > 1e-6f)
			shrinkRatio = powf(((float)s + alpha) / ((float)s + 1.f) , 1.f / 3.f);
		else
			shrinkRatio = powf(((float)s + alpha) / ((float)s + 1.f) , 1.f / 2.f);
		if (s > 0)
			mergeRadius *= shrinkRatio;
		*/

		float base;
		if (useUniformInterSampler)
			base = (float)s + 1.f;
		else 
			base = (float)s;

		if (totVol > 1e-6f)
		{
			mergeRadius = r0 * powf(powf(max(base , 1.f) , alpha - 1.f) , 1.f / 3.f);
			gatherRadius = gr0 * powf(powf(max(base , 1.f) , alpha - 1.f) , 1.f / 3.f);
		}
		else
		{
			mergeRadius = r0 * powf(powf(max(base , 1.f) , alpha - 1.f) , 1.f / 2.f);
			gatherRadius = gr0 * powf(powf(max(base , 1.f) , alpha - 1.f) , 1.f / 2.f);
		}
		mergeRadius = r0;
		mergeRadius = std::max(mergeRadius , 1e-7f);
		gatherRadius = std::max(gatherRadius , 1e-7f);

		printf("mergeRadius = %.8f, gatherRadius = %.8f\n" , mergeRadius , gatherRadius);

		vector<vec3f> singleImageColors(pixelColors.size(), vec3f(0, 0, 0));

		string cmd;

		unsigned t = clock();

		vector<Path*> lightPathList(lightPathNum , NULL);
		vector<Path*> interPathList(interPathNum, NULL);

		interMergeKernel = 1.f / (M_PI * mergeRadius * 
			mergeRadius * (Real)partialPathNum);
		lightMergeKernel = 1.f / (M_PI * mergeRadius *
			mergeRadius * (Real)lightPathNum);

		interGatherKernel = 1.f / (M_PI * gatherRadius * 
			gatherRadius * (Real)partialPathNum);
		lightGatherKernel = 1.f / (M_PI * gatherRadius *
			gatherRadius * (Real)lightPathNum);

		if (!renderer->scene.usingGPU())
		{
			genLightPaths(cmdLock , lightPathList , (s == 0));

			if (!useUniformInterSampler)
			{
				if (!useUniformSur)
					renderer->scene.beginUpdateOtherSampler(s);
				if (!useUniformVol)
					renderer->scene.beginUpdateVolumeSampler(s);
				for (int i = 0; i < partialSubPathList.size(); i++)
				{
					IptPathState& lightState = partialSubPathList[i];
					if (lightState.ray->contactObject && !useUniformSur)
					{
						renderer->scene.updateOtherSampler(lightState.ray->contactObject->objectIndex ,
							lightState.ray->contactObjectTriangleID , s , lightState.throughput / (Real)lightPathNum);
					}
					else if (lightState.ray->insideObject && lightState.ray->insideObject->isVolumetric() &&
						!lightState.ray->contactObject && !useUniformVol)
					{
						vec3f thr = lightState.throughput;
						renderer->scene.updateVolumeSampler(lightState.ray->insideObject->objectIndex ,
							lightState.ray->origin , s , thr / (Real)lightPathNum);
					}
				}
				if (!useUniformSur)
					renderer->scene.endUpdateOtherSampler();
				if (!useUniformVol)
					renderer->scene.endUpdateVolumeSampler();

				/*
				Scene::SurfaceSampler *interSampler = renderer->scene.otherSurfaceSampler;
				fprintf(fp , "totWeight = %.8f\n" , interSampler->totalWeight);
				for (int i = 0; i < interSampler->targetObjects.size(); i++)
				{
					SceneObject *obj = interSampler->targetObjects[i];
					fprintf(fp , "======= objId = %d , totEnergy = %.8f , weight = %.8f =======\n" , obj->objectIndex ,
						obj->totalEnergy , obj->weight);
					for (int j = 0; j < obj->getTriangleNum(); j++)
					{
						fprintf(fp , "triId = %d , e = %.8f\n" , j , obj->energyDensity[j]);
					}
				}
				
				Scene::VolumeSampler *interVolSampler = renderer->scene.volumeSampler;
				fprintf(fp , "totWeight = %.8f\n" , interVolSampler->totalWeight);
				for (int i = 0; i < interVolSampler->targetObjects.size(); i++)
				{
					SceneObject *obj = interVolSampler->targetObjects[i];
					fprintf(fp , "======= objId = %d , totEnergy = %.8f , weight = %.8f =======\n" , obj->objectIndex ,
						obj->countHashGrid->sumWeights , obj->volumeWeight);
					for (int j = 0; j < obj->countHashGrid->effectiveWeights.size(); j++)
					{
						fprintf(fp , "cellIndex = %d , e = %.8f\n" , obj->countHashGrid->effectiveIndex[j] , 
							obj->countHashGrid->effectiveWeights[j]);
					}
				}
				*/
				if (s == 0)
					continue;
			}

			if (!usePPM)
				genIntermediatePaths(cmdLock , interPathList);
			
			printf("lightPhotonNum = %d, partialPhotonNum = %d\n" , lightPhotonNum , partialPhotonNum);

			mergePartialPaths(cmdLock);

			PointKDTree<IptPathState> partialSubPaths(partialSubPathList);
			
			
			for (int i = 0; i < partialPhotonNum; i++)
			{
				IptPathState& subPath = partialSubPathList[i];
				if ((useUniformInterSampler && s == 0) || (!useUniformInterSampler && s == 1))
				{
					vec3f contrib;
					if (i < lightPhotonNum)
					{
						//contrib = subPath.throughput;
						//fprintf(fp1 , "==============\n");
						//fprintf(fp1 , "dirContrib=(%.8f,%.8f,%.8f), pathNum = %.1lf\n" , contrib.x , contrib.y , contrib.z , subPath.mergedPath);
					}
					else
					{
						contrib = subPath.indirContrib;
						if (intensity(contrib) < 1e-6f)
							continue;
						fprintf(fp , "==============\n");
						fprintf(fp , "indirContrib=(%.8f,%.8f,%.8f), pathNum = %.1lf\n" , contrib.x , contrib.y , contrib.z , subPath.mergedPath);
					}			
				}
			}
			

#pragma omp parallel for
			for(int p=0; p<cameraPathNum; p++)
			{
				//fprintf(fp2 , "========== pixel id = %d ==========\n" , p);
				Path eyePath;
				
				sampleMergePath(eyePath , camera.generateRay(p) , 0);
				singleImageColors[p] += colorByRayMarching(eyePath , partialSubPaths , p);
				
				// abandon all the rest!
				/*
				samplePath(eyePath, camera.generateRay(p));
				if (eyePath.size() <= 1)
					continue;

				IptPathState cameraState;
				bool lastSpecular = 1;
				Real lastPdfW = 1.f;

				cameraState.throughput = vec3f(1.f) / (eyePath[0].originProb * eyePath[0].directionProb * eyePath[1].originProb);
	
				cameraState.index = eyePath.front().pixelID;

				vector<float> weights;

				//fprintf(fp , "===================\n");
				//for (int i = 0; i < eyePath.size(); i++)
				//{
				//	fprintf(fp , "l=%d, bsdf=(%.8f,%.8f,%.8f), originPdf=%.8f, dirPdf=%.8f\n" , i , eyePath[i].color.x ,
				//		eyePath[i].color.y , eyePath[i].color.z , eyePath[i].originProb , eyePath[i].directionProb);
				//}

				int nonSpecLength = 0;
				vector<vec3f> mergeContribs;
				mergeContribs.clear();

				float weightFactor = 1.f;
				vec3f colorHitLight(0.f);

				int N = maxDepth;
				nonSpecLength = 0;

				for(unsigned i = 1; i < eyePath.size(); i++)
				//for (unsigned i = 1; i < 2; i++)
				{
					vec3f colorGlbIllu(0.f) , colorDirIllu(0.f);

					Real dist = std::max((eyePath[i].origin - eyePath[i - 1].origin).length() , 1e-5f);
					cameraState.throughput *= eyePath[i - 1].getRadianceDecay(dist);

					if (eyePath[i].contactObject && eyePath[i].contactObject->emissive())
					{
						vec3f contrib = ((SceneEmissiveObject*)(eyePath[i].contactObject))->getColor();
						float dirPdfA = eyePath[i].contactObject->getEmissionWeight() / eyePath[i].contactObject->totalArea;
						float mis = 1.f;
						if (i > 1 && !lastSpecular)
						{
							float cosine = eyePath[i].getContactNormal().dot(-eyePath[i - 1].direction);
							float dist = (eyePath[i].origin - eyePath[i - 1].origin).length();
							float dirPdfW = dirPdfA * dist * dist / abs(cosine);
							mis = lastPdfW / (lastPdfW + dirPdfW);
							
							//fprintf(fp , "==================\n");
							//fprintf(fp , "thr=(%.6f,%.6f,%.6f), contrib=(%.6f,%.6f,%.6f), pdfA=%.6f, pdfW=%.6f, lastPdfW=%.6f, cosine=%.6f, mis=%.6f\n" , 
							//	cameraState.throughput[0] , cameraState.throughput[1] , cameraState.throughput[2] , contrib[0] ,
							//	contrib[1] , contrib[2] , dirPdfA , dirPdfW , lastPdfW , cosine , mis);
							
						}
						
						colorHitLight = cameraState.throughput * contrib * mis;

						if (N > 0)
							weightFactor = 1.f - (Real)nonSpecLength / (Real)N;
						else
							weightFactor = 1.f;

						singleImageColors[cameraState.index] += colorHitLight * weightFactor;

						break;
					}

					//if (N == 0)
					//	printf("%d , error\n" , i);

					cameraState.pos = eyePath[i].origin;
					cameraState.lastRay = &eyePath[i - 1];
					cameraState.ray = &eyePath[i];

					if (eyePath[i].directionSampleType == Ray::RANDOM)
					{
						// mis with colorHitLight
						colorDirIllu = colorByConnectingLights(eyePath[i - 1] , eyePath[i]) * cameraState.throughput;
						weightFactor = 1.f - ((Real)nonSpecLength + 1.f) / (Real)N;
						colorDirIllu *= weightFactor;

						colorGlbIllu = colorByMergingPaths(cameraState , partialSubPaths);
						mergeContribs.push_back(colorDirIllu + colorGlbIllu / (Real)N);
					}

					lastSpecular = (eyePath[i].directionSampleType == Ray::DEFINITE);
					lastPdfW = eyePath[i].directionProb;

					if (eyePath[i].directionSampleType == Ray::RANDOM)
					{
						nonSpecLength++;
						
						if (nonSpecLength == N)
							break; // PPM, eye path length is 1
					}

					if (eyePath[i].direction.length() < 0.5f)
						break;

					if (i >= eyePath.size() - 1)
						break;
				
					cameraState.throughput *= (eyePath[i].color * eyePath[i].getCosineTerm()) / 
						(eyePath[i + 1].originProb * eyePath[i].directionProb);

					//fprintf(fp , "l=%d, thr=(%.8f,%.8f,%.8f), bsdf=(%.8f,%.8f,%.8f), cos=%.8f, prob=%.8f\n" , 
					//	i , cameraState.throughput[0] , cameraState.throughput[1] , cameraState.throughput[2] ,
					//	bsdfFactor[0] , bsdfFactor[1] , bsdfFactor[2] , eyePath[i].getCosineTerm() , eyePath[i].directionProb);
				}
				
				for (int i = 0; i < mergeContribs.size(); i++)
				{
					singleImageColors[cameraState.index] += mergeContribs[i];
				}
				*/
			}
		}
		else
		{
			vector<Path> lightPathListGPU , interPathListGPU , eyePathListGPU;
			vector<Ray> eyeRayList(cameraPathNum);
			vector<Ray> lightRayList(lightPathNum);
			vector<Ray> interRayList(interPathNum);

#pragma omp parallel for
			for (int p = 0; p < lightPathNum; p++)
				lightRayList[p] = genEmissiveSurfaceSample(true , false);
			lightPathListGPU = samplePathList(lightRayList);
			movePaths(cmdLock , lightPathListGPU , lightPathList);

			genLightPaths(cmdLock , lightPathList , (s == 0));

			if (!usePPM)
			{
#pragma omp parallel for
				for (int p = 0; p < interPathNum; p++)
					interRayList[p] = genIntermediateSamples(renderer->scene);
				interPathListGPU = samplePathList(interRayList);
				movePaths(cmdLock , interPathListGPU , interPathList);

				genIntermediatePaths(cmdLock , interPathList);
			}
			
			printf("lightPhotonNum = %d, partialPhotonNum = %d\n" , lightPhotonNum , partialPhotonNum);

			mergePartialPaths(cmdLock);

			PointKDTree<IptPathState> partialSubPaths(partialSubPathList);

#pragma omp parallel for
			for (int p = 0; p < cameraPathNum; p++)
				eyeRayList[p] = camera.generateRay(p);
			eyePathListGPU = sampleMergePathList(eyeRayList);

#pragma omp parallel for
			for(int p=0; p<cameraPathNum; p++)
			{
				Path eyePath;
				eyePath = eyePathListGPU[p];
				/*
				fprintf(fp , "==================\n");
				for (int i = 0; i < eyePath.size(); i++)
				{
					fprintf(fp , "c = (%.8f,%.8f,%.8f), dir = (%.8f,%.8f,%.8f), cos = %.8f, dirPdf = %.8f, oriPdf = %.8f\n" ,
						eyePath[i].color.x , eyePath[i].color.y , eyePath[i].color.z ,
						eyePath[i].direction.x , eyePath[i].direction.y , eyePath[i].direction.z ,
						eyePath[i].getCosineTerm() , eyePath[i].directionProb , eyePath[i].originProb);
				}
				*/
				//sampleMergePath(eyePath , camera.generateRay(p , true) , 0);
				singleImageColors[p] += colorByRayMarching(eyePath , partialSubPaths , p);
			}
		}

		printf("done calculation, release memory\n");

		if(cmd == "exit")
			return pixelColors;

		for(int i=0; i<pixelColors.size(); i++)
		{
			if (!isIllegal(singleImageColors[i]))
			{
				if (useUniformInterSampler)
				{
					pixelColors[i] *= (Real)s / ((Real)s + 1.f);
					pixelColors[i] += singleImageColors[i] / ((Real)s + 1.f); 
				}
				else
				{
					pixelColors[i] *= ((Real)s - 1.f) / ((Real)s);
					pixelColors[i] += singleImageColors[i] / ((Real)s); 
				}
			}
			else
			{

				fprintf(err , "(%.8f,%.8f,%.8f) occurs in iter %d\n" , singleImageColors[i].x ,
					singleImageColors[i].y , singleImageColors[i].z , s);
				continue;
			}
		}

		if (!renderer->scene.usingGPU())
		{
			for (int i = 0; i < lightPathNum; i++)
			{
				if (lightPathList[i])
					delete lightPathList[i];
			}

			for (int i = 0; i < interPathNum; i++)
			{
				if (interPathList[i])
					delete interPathList[i];
			}
		}
		else
		{
			for (int i = 0; i < lightPathNum; i++)
				lightPathList[i] = NULL;
			for (int i = 0; i < interPathNum; i++)
				interPathList[i] = NULL;
		}

		printf("Iter: %d  IterTime: %ds  TotalTime: %ds\n", s, (clock()-t)/1000, clock()/1000);

		if ((useUniformInterSampler && s == 0) || (!useUniformInterSampler && s == 1))
		{
			for (int y = camera.height - 1; y >= 0; y--)
			{
				for (int x = 0; x < camera.width; x++)
				{
					if (volMask[y * camera.width + x])
						fprintf(fm , "1 ");
					else
						fprintf(fm , "0 ");
				}
				fprintf(fm , "\n");
			}
		}

		//if (clock() / 1000 >= lastTime)
		if (s % outputIter == 0 && !isDebug)
		{
			unsigned nowTime = (clock() - startTime) / 1000;
			showCurrentResult(pixelColors , &nowTime , &s);
			//showCurrentResult(pixelColors , &lastTime , &s);
			//lastTime += timeInterval;
		}
		else
			showCurrentResult(pixelColors);
	}	

	for(int i=0; i<pixelLocks.size(); i++)
	{
		omp_destroy_lock(&pixelLocks[i]);
	}
	omp_destroy_lock(&cmdLock);

	return pixelColors;
}
Ejemplo n.º 4
0
vector<vec3f> PathTracer::renderPixels(const Camera& camera)
{
	int t_start = clock();
	vector<vec3f> pixelColors(camera.width*camera.height, vec3f(0, 0, 0));

	if(useConnection)
		renderer->scene.preprocessEmissionSampler();

	if(!renderer->scene.usingGPU())
	{
		for(unsigned s=0; s<spp; s++)
		{
			int t = clock();
#pragma omp parallel for
			for(int p=0; p<pixelColors.size(); p++)
			{
				Path eyePath;
				samplePath(eyePath, camera.generateRay(p));

				pixelColors[p] *= s/float(s+1);

				if (!(eyePath.back().contactObject && eyePath.back().contactObject->emissive()))
					continue;

				vec3f color = vec3f(1, 1, 1);
				for(unsigned i=0; i<eyePath.size(); i++)
				{
					color *= eyePath[i].color / eyePath[i].directionProb / eyePath[i].originProb;
				
					if(i!=eyePath.size()-1)
					{
						color *= eyePath[i].getCosineTerm();
						float dist = (eyePath[i+1].origin - eyePath[i].origin).length();
						// NOTE: Must multiply the decay !!!!!!!!!
						color *= eyePath[i].getRadianceDecay(dist);
					}
				}

				pixelColors[p] += renderer->camera.eliminateVignetting(color, p)/(s+1);//*camera.width*camera.height;
				//pixelColors[p] += color * eyePath[0].directionProb / (s+1);
			}

			//if (clock() / 1000 >= lastTime)
			if (s % outputIter == 0)
			{
				unsigned nowTime = (clock()) / 1000;
				showCurrentResult(pixelColors , &nowTime , &s);
				//showCurrentResult(pixelColors , &lastTime , &s);
				//lastTime += timeInterval;
			}
			else
				showCurrentResult(pixelColors);
			printf("Iter: %d  IterTime: %ds  TotalTime: %ds\n", s+1, (clock()-t)/1000, (clock()-t_start)/1000);
		}
	}
	else
	{
		
		for(unsigned s=0; s<spp; s++)
		{
			int t = clock();
			vector<Ray> eyeRays(pixelColors.size());

#pragma omp parallel for
			for(int p=0; p<pixelColors.size(); p++)
			{
				eyeRays[p] = camera.generateRay(p);
			}

			int clk = clock();
			vector<Path> pathList = samplePathList(eyeRays);

			vector<Path> lightPathList;
			vector<vector<unsigned>> visList;

			if(useConnection)
			{
				lightPathList.resize(pathList.size());
				for(unsigned i=0; i<pathList.size(); i++)
					lightPathList[i].push_back(genEmissiveSurfaceSample(true , false));
				visList = testPathListVisibility(pathList, lightPathList);
			}

#pragma omp parallel for
			for(int p=0; p<pathList.size(); p++)
			{
				pixelColors[p] *= s/float(s+1);

				vec3f color = vec3f(0, 0, 0);

				//pathList[p][0].directionProb = 1.f;

				//if(!useConnection || mustUsePT(pathList[p]) || 
				//	pathList[p].size()==2 && pathList[p].back().contactObject && pathList[p].back().contactObject->emissive())
				if (pathList[p].back().contactObject && pathList[p].back().contactObject->emissive())
				{
					vec3f c(1, 1, 1);
					for(unsigned i=0; i<pathList[p].size(); i++)
					{
						c *= pathList[p][i].color / pathList[p][i].directionProb / pathList[p][i].originProb;

						if(i!=pathList[p].size()-1)
						{
							c *= pathList[p][i].getCosineTerm();
							float dist = (pathList[p][i+1].origin - pathList[p][i].origin).length();
							// NOTE: Must multiply the decay !!!!!!!!!
							c *= pathList[p][i].getRadianceDecay(dist);
						}
					}
					color += c;
				}
				/*
				else
				{
					Ray &lightRay = lightPathList[p][0];
					for(unsigned i=1; i<pathList[p].size(); i++)
					{
						if(!((visList[p][i/32]>>(i%32)) & 1))
							continue;
						Path connectedPath;
						connectedPath.push_back(lightRay);
						Path &eyePath = pathList[p];
						if(eyePath[i].contactObject && eyePath[i].contactObject->emissive())
							break;
						if(eyePath[i].directionSampleType != Ray::RANDOM)
							continue;
						for(unsigned k=0; k<=i; k++)
							connectedPath.push_back(eyePath[i-k]);
						connectRays(connectedPath, 0);
						vec4f color_prob = connectColorProb(connectedPath, 0);
						if(vec3f(color_prob).length()>0 && color_prob.w > 0)
							color += vec3f(color_prob) / color_prob.w;// / camera.width / camera.height;
					}
				}
				*/
				pixelColors[p] += renderer->camera.eliminateVignetting(color, p)/(s+1);//*camera.width*camera.height;
				//pixelColors[p] += color / (s+1);
			}

			//if (clock() / 1000 >= lastTime)
			if (s % outputIter == 0)
			{
				unsigned nowTime = (clock()) / 1000;
				showCurrentResult(pixelColors , &nowTime , &s);
				//showCurrentResult(pixelColors , &lastTime , &s);
				//lastTime += timeInterval;
			}
			else
				showCurrentResult(pixelColors);
			printf("Iter: %d  IterTime: %ds  TotalTime: %ds\n", s+1, (clock()-t)/1000, (clock()-t_start)/1000);
		}
	}
	return pixelColors;
}