コード例 #1
0
void BlenderSession::do_write_update_render_result(BL::RenderResult b_rr, BL::RenderLayer b_rlay, RenderTile& rtile, bool do_update_only)
{
	RenderBuffers *buffers = rtile.buffers;

	/* copy data from device */
	if(!buffers->copy_from_device())
		return;

	BufferParams& params = buffers->params;
	float exposure = scene->film->exposure;

	vector<float> pixels(params.width*params.height*4);

	if(!do_update_only) {
		/* copy each pass */
		BL::RenderLayer::passes_iterator b_iter;

		for(b_rlay.passes.begin(b_iter); b_iter != b_rlay.passes.end(); ++b_iter) {
			BL::RenderPass b_pass(*b_iter);

			/* find matching pass type */
			PassType pass_type = get_pass_type(b_pass);
			int components = b_pass.channels();

			/* copy pixels */
			if(!buffers->get_pass_rect(pass_type, exposure, rtile.sample, components, &pixels[0]))
				memset(&pixels[0], 0, pixels.size()*sizeof(float));

			b_pass.rect(&pixels[0]);
		}
	}
	else {
		/* copy combined pass */
		 BL::RenderPass b_combined_pass(b_rlay.passes.find_by_type(BL::RenderPass::type_COMBINED, b_rview_name.c_str()));
		if(buffers->get_pass_rect(PASS_COMBINED, exposure, rtile.sample, 4, &pixels[0]))
			b_combined_pass.rect(&pixels[0]);
	}

	/* tag result as updated */
	b_engine.update_result(b_rr);
}
コード例 #2
0
void BlenderSession::write_render_result()
{
	/* get state */
	RenderBuffers *buffers = session->buffers;

	/* copy data from device */
	if(!buffers->copy_from_device())
		return;

	BufferParams& params = buffers->params;
	float exposure = scene->film->exposure;
	double total_time, sample_time;
	int sample;

	session->progress.get_sample(sample, total_time, sample_time);

	vector<float> pixels(params.width*params.height*4);

	/* copy each pass */
	BL::RenderLayer::passes_iterator b_iter;
	
	for(b_rlay.passes.begin(b_iter); b_iter != b_rlay.passes.end(); ++b_iter) {
		BL::RenderPass b_pass(*b_iter);

		/* find matching pass type */
		PassType pass_type = get_pass_type(b_pass);
		int components = b_pass.channels();

		/* copy pixels */
		if(buffers->get_pass(pass_type, exposure, sample, components, &pixels[0]))
			rna_RenderPass_rect_set(&b_pass.ptr, &pixels[0]);
	}

	/* copy combined pass */
	if(buffers->get_pass(PASS_COMBINED, exposure, sample, 4, &pixels[0]))
		rna_RenderLayer_rect_set(&b_rlay.ptr, &pixels[0]);

	/* tag result as updated */
	RE_engine_update_result((RenderEngine*)b_engine.ptr.data, (RenderResult*)b_rr.ptr.data);
}
コード例 #3
0
ファイル: tracing_thread.cpp プロジェクト: Kai-Fu/KRender
void ImageSampler::DoPixelSampling(UINT32 x, UINT32 y, UINT32 sample_count, PixelSamplingResult& result)
{
	IntersectContext tempCtx;
	if (mTempSamplingRes.size() < sample_count)
		mTempSamplingRes.resize(sample_count);
	float sampleCnt = (float)sample_count;
	float hitCnt = 0;
	KColor sum(0,0,0);
	RenderBuffers* pRBufs = mpInputData->pRenderBuffers;

	for (UINT32 si = 0; si < sample_count; ++si) {

		TracingInstance& tracingInst = *mTracingThreadData.get();
		tracingInst.mCameraContext.inScreenPos = pRBufs->RS_Image(x, y);
		float motionTime = ENABLE_MB ? pRBufs->RS_MotionBlur(x, y) : 0;
		tracingInst.mCameraContext.inMotionTime = motionTime;
		tracingInst.mCameraContext.inAperturePos = ENABLE_DOF ? pRBufs->RS_DOF(x, y) : KVec2(0,0);

		KColor out_clr;
		bool isHit = mpInputData->pCurrentCamera->EvaluateShading(tracingInst, mTempSamplingRes[si]);
		sum.Add(mTempSamplingRes[si]);

		mpInputData->pRenderBuffers->IncreaseSampledCount(x, y, 1);

		if (isHit)
			hitCnt += 1.0f;
	}

	result.alpha = hitCnt / sampleCnt;
	result.variance = 0;
	result.average = sum;
	result.average.Scale(1.0f / sampleCnt);

	for (UINT32 si = 0; si < sample_count; ++si) {
		result.variance += mTempSamplingRes[si].DiffRatio(result.average);
	}
	
	result.variance /= sampleCnt;
}