Example #1
0
void VPLShaderManager::drawBackground(const Sensor *sensor,
		const Transform &projectionTransform, Float scaleFactor) {
	if (m_backgroundProgram == NULL)
		return;

	const Transform &trafo = sensor->getWorldTransform()->eval(0);

	Transform clipToWorld = trafo
		* Transform::scale(Vector(-1, 1, -1)) * projectionTransform.inverse();

	GPUProgram *prog = m_backgroundProgram;
	int tuOffset = 0;
	prog->bind();
	m_backgroundDependencies.bind(prog, m_backgroundDependencies, tuOffset);

	if (sensor->getType() & Sensor::EOrthographicCamera) {
		Vector d = trafo(Vector(0.0f, 0.0f, 1.0f));
		prog->setParameter(m_backgroundParam_camDirection, d);
	} else {
		Point p = trafo(Point(0.0f));
		prog->setParameter(m_backgroundParam_camPosition, p);
	}

	prog->setParameter(m_backgroundParam_emitterScale, scaleFactor);
	prog->setParameter(m_backgroundParam_clipToWorld, clipToWorld);
	m_renderer->blitQuad(false);
	prog->unbind();
	m_backgroundDependencies.unbind();
}
Example #2
0
	void fillIntersectionRecord(const Ray &ray,
			const void *temp, Intersection &its) const {
		const Float *data = static_cast<const Float *>(temp);

		Float r = std::sqrt(data[0] * data[0] + data[1] * data[1]),
			  invR = (r == 0) ? 0.0f : (1.0f / r);

		Float phi = std::atan2(data[1], data[0]);
		if (phi < 0)
			phi += 2*M_PI;

		Float cosPhi = data[0] * invR, sinPhi = data[1] * invR;
		const Transform &trafo = m_objectToWorld->eval(ray.time);

		its.shape = this;
		if (r != 0) {
			its.dpdu = trafo(Vector(cosPhi, sinPhi, 0));
			its.dpdv = trafo(Vector(-sinPhi, cosPhi, 0));
		} else {
			its.dpdu = trafo(Vector(1, 0, 0));
			its.dpdv = trafo(Vector(0, 1, 0));
		}

		its.shFrame.n = normalize(trafo(Normal(0, 0, 1)));
		its.uv = Point2(r, phi * INV_TWOPI);
		its.p = ray(its.t);
 		its.hasUVPartials = false;
		its.instance = NULL;
		its.time = ray.time;
	}
Example #3
0
	Spectrum samplePosition(PositionSamplingRecord &pRec, const Point2 &sample, const Point2 *extra) const {
		const Transform &trafo = m_worldTransform->eval(pRec.time);
		pRec.p = trafo(Point(0.0f));
		pRec.n = trafo(Vector(0.0f, 0.0f, 1.0f));
		pRec.pdf = 1.0f;
		pRec.measure = EDiscrete;
		return m_power;
	}
Example #4
0
	void samplePosition(PositionSamplingRecord &pRec, const Point2 &sample) const {
		const Transform &trafo = m_objectToWorld->eval(pRec.time);
		Point2 p = warp::squareToUniformDiskConcentric(sample);

		pRec.p = trafo(Point3(p.x, p.y, 0));
		pRec.n = normalize(trafo(Normal(0, 0, 1)));
		pRec.pdf = m_invSurfaceArea;
		pRec.measure = EArea;
	}
Example #5
0
	void configure() {
		ProjectiveCamera::configure();

		const Vector2i &filmSize   = m_film->getSize();
		const Vector2i &cropSize   = m_film->getCropSize();
		const Point2i  &cropOffset = m_film->getCropOffset();

		Vector2 relSize((Float) cropSize.x / (Float) filmSize.x,
			(Float) cropSize.y / (Float) filmSize.y);
		Point2 relOffset((Float) cropOffset.x / (Float) filmSize.x,
			(Float) cropOffset.y / (Float) filmSize.y);

		/**
		 * These do the following (in reverse order):
		 *
		 * 1. Create transform from camera space to [-1,1]x[-1,1]x[0,1] clip
		 *    coordinates (not taking account of the aspect ratio yet)
		 *
		 * 2+3. Translate and scale to shift the clip coordinates into the
		 *    range from zero to one, and take the aspect ratio into account.
		 *
		 * 4+5. Translate and scale the coordinates once more to account
		 *     for a cropping window (if there is any)
		 */
		m_cameraToSample =
			  Transform::scale(Vector(1.0f / relSize.x, 1.0f / relSize.y, 1.0f))
			* Transform::translate(Vector(-relOffset.x, -relOffset.y, 0.0f))
			* Transform::scale(Vector(-0.5f, -0.5f*m_aspect, 1.0f))
			* Transform::translate(Vector(-1.0f, -1.0f/m_aspect, 0.0f))
			* Transform::orthographic(m_nearClip, m_farClip);

		m_sampleToCamera = m_cameraToSample.inverse();

		/* Position differentials on the near plane */
		m_dx = m_sampleToCamera(Point(m_invResolution.x, 0.0f, 0.0f))
			 - m_sampleToCamera(Point(0.0f));
		m_dy = m_sampleToCamera(Point(0.0f, m_invResolution.y, 0.0f))
			 - m_sampleToCamera(Point(0.0f));

		/* Clip-space transformation for OpenGL */
		m_clipTransform = Transform::translate(
			Vector((1-2*relOffset.x)/relSize.x - 1,
			      -(1-2*relOffset.y)/relSize.y + 1, 0.0f)) *
			Transform::scale(Vector(1.0f / relSize.x, 1.0f / relSize.y, 1.0f));

		const Transform &trafo = m_worldTransform->eval(0.0f);

		m_invSurfaceArea = 1.0f / (
			trafo(m_sampleToCamera(Vector(1, 0, 0))).length() *
			trafo(m_sampleToCamera(Vector(0, 1, 0))).length());

		m_scale = trafo(Vector(0, 0, 1)).length();
	}
	Spectrum sampleRay(Ray &ray,
			const Point2 &spatialSample,
			const Point2 &directionalSample,
			Float time) const {
		const Transform &trafo = m_worldTransform->eval(time);
		Point2 p = warp::squareToUniformDiskConcentric(spatialSample);
		Vector perpOffset = trafo(Vector(p.x, p.y, 0) * m_bsphere.radius);
		Vector d = trafo(Vector(0, 0, 1));
		ray.setOrigin(m_bsphere.center - d*m_bsphere.radius + perpOffset);
		ray.setDirection(d);
		ray.setTime(time);
		return m_power;
	}
	Spectrum samplePosition(PositionSamplingRecord &pRec, const Point2 &sample, const Point2 *extra) const {
		const Transform &trafo = m_worldTransform->eval(pRec.time);

		Point2 p = warp::squareToUniformDiskConcentric(sample);

		Vector perpOffset = trafo(Vector(p.x, p.y, 0) * m_bsphere.radius);
		Vector d = trafo(Vector(0, 0, 1));

		pRec.p = m_bsphere.center - d*m_bsphere.radius + perpOffset;
		pRec.n = d;
		pRec.pdf = m_invSurfaceArea;
		pRec.measure = EArea;
		return m_power;
	}
Example #8
0
	AABB getAABB() const {
		std::set<Float> times;
		m_objectToWorld->collectKeyframes(times);

		AABB aabb;
		for (std::set<Float>::iterator it = times.begin(); it != times.end(); ++it) {
			const Transform &trafo = m_objectToWorld->eval(*it);
			aabb.expandBy(trafo(Point( 1,  0, 0)));
			aabb.expandBy(trafo(Point(-1,  0, 0)));
			aabb.expandBy(trafo(Point( 0,  1, 0)));
			aabb.expandBy(trafo(Point( 0, -1, 0)));
		}
		return aabb;
	}
Example #9
0
	void configure() {
		Shape::configure();

		const Transform &trafo = m_objectToWorld->eval(0);
		Vector dpdu = trafo(Vector(1, 0, 0));
		Vector dpdv = trafo(Vector(0, 1, 0));

		if (std::abs(dot(normalize(dpdu), normalize(dpdv))) > 1e-3f)
			Log(EError, "Error: 'toWorld' transformation contains shear!");

		if (std::abs(dpdu.length() / dpdv.length() - 1) > 1e-3f)
			Log(EError, "Error: 'toWorld' transformation contains a non-uniform scale!");

		m_invSurfaceArea = 1.0f / (M_PI * dpdu.length() * dpdu.length());
	}
Example #10
0
	ComputeIconMasks(IconTaskCtx& ctx)
		: ctx(ctx)
	{
		// clamp the icon size
		iconSizecv = cv::Size(
					clamp(ctx.iconSize.width(),
						  IconTask::IconSizeMin, IconTask::IconSizeMax),
					clamp(ctx.iconSize.height(),
						  IconTask::IconSizeMin, IconTask::IconSizeMax));

		// inner size = icon size without border (fixed to 1px)
		innerSize = QSize(iconSizecv.width - 2,
						  iconSizecv.height - 2);

		innerSizecv = cv::Size(innerSize.width(), innerSize.height());

		if (ctx.applyROI) {
			labels = ctx.roi_labels;
		} else {
			labels = ctx.full_labels;
		}

		scale = scaleToFit(labels.size(), innerSizecv);

		// offset into icon rect
		dx = 0.5 * (float(iconSizecv.width) - labels.cols*scale);
		dy = 0.5 * (float(iconSizecv.height) - labels.rows*scale);

		// affine trafo matrix
		trafo = cv::Mat1f::zeros(2,3);
		trafo(0,0) = scale;
		trafo(1,1) = scale;
		trafo(0,2) = dx;
		trafo(1,2) = dy;

		// rect of the transformed mask in the icon
		drect = QRectF(dx, dy,
					   labels.cols*scale, labels.rows*scale);
		// rect of the border around the transformed mask
		brect = QRectF(drect.left(), drect.top(),
					   drect.width()-1, drect.height()-1);

//		GGDBGM("desired icon size " << iconSizecv << endl);
//		GGDBGM("scale " << scale << endl);
//		GGDBGM("dx " << dx << endl);
//		GGDBGM("dy " << dy << endl);
//		GGDBGM("scaled mask size " << innerSizecv << endl);
	}
Example #11
0
	Spectrum sampleDirect(DirectSamplingRecord &dRec, const Point2 &) const {
		const Transform &trafo = m_worldTransform->eval(dRec.time);

		dRec.n = trafo(Vector(0, 0, 1));
		Float scale = dRec.n.length();

		Point localP = trafo.inverse().transformAffine(dRec.ref);
		localP.z *= scale;

		Point sample = m_cameraToSample.transformAffine(localP);

		if (sample.x < 0 || sample.x > 1 || sample.y < 0 ||
			sample.y > 1 || sample.z < 0 || sample.z > 1) {
			dRec.pdf = 0.0f;
			return Spectrum(0.0f);
		}

		dRec.p = trafo.transformAffine(Point(localP.x, localP.y, 0.0f));
		dRec.n /= scale;
		dRec.d = -dRec.n;
		dRec.dist = localP.z;
		dRec.uv = Point2(sample.x * m_resolution.x,
		                 sample.y * m_resolution.y);
		dRec.pdf = 1.0f;
		dRec.measure = EDiscrete;

		return Spectrum(m_invSurfaceArea);
	}
int main (int argc, char** argv)
{
  if (argc != 3)
  {
    ROS_INFO_STREAM("please provide a pointcloud file followed by a text file containing a transformation matrix as arguments");
    exit(0);
  }
  pcl::PCDReader reader;
  pcl::PointCloud<PointType>::Ptr cloudIn (new pcl::PointCloud<PointType>);
  pcl::PointCloud<PointType>::Ptr cloudOut (new pcl::PointCloud<PointType>);
  pcl::PointCloud<PointType>::Ptr cloudOut_inv (new pcl::PointCloud<PointType>);
  Eigen::Matrix4f trafo, trafo_inv;
  reader.read (argv[1], *cloudIn);
  std::ifstream myfile;
  myfile.open (argv[2]);
  for (int row = 0; row < 4; row++)
    for (int col = 0; col < 4; col++)
    {
      myfile >> trafo (row, col);
    }
  trafo_inv = trafo.inverse();
  ROS_INFO_STREAM("transform to be used: \n" << trafo);


  transformPointCloud (*cloudIn, *cloudOut, trafo);
  transformPointCloud (*cloudIn, *cloudOut_inv, trafo_inv);

  pcl::PCDWriter writer;
  writer.write ("output.pcd", *cloudOut, false);
  writer.write ("output_inverse.pcd", *cloudOut_inv, false);
  return (0);
}
Transform Transform::perspective(Float fov, Float clipNear, Float clipFar) {
	/* Project vectors in camera space onto a plane at z=1:
	 *
	 *  xProj = x / z
	 *  yProj = y / z
	 *  zProj = (far * (z - near)) / (z * (far-near))
	 *
	 *  Camera-space depths are not mapped linearly!
	 */
	Float recip = 1.0f / (clipFar - clipNear);

	/* Perform a scale so that the field of view is mapped
	 * to the interval [-1, 1] */
	Float cot = 1.0f / std::tan(degToRad(fov / 2.0f));

	Matrix4x4 trafo(
		cot,  0,    0,   0,
		0,    cot,  0,   0,
		0,    0,    clipFar * recip, -clipNear * clipFar * recip,
		0,    0,    1,   0
	);


	return Transform(trafo);
}
Example #14
0
	Spectrum sampleDirection(DirectionSamplingRecord &dRec,
			PositionSamplingRecord &pRec,
			const Point2 &sample, const Point2 *extra) const {
		const Transform &trafo = m_worldTransform->eval(pRec.time);

		Point samplePos(sample.x, sample.y, 0.0f);

		if (extra) {
			/* The caller wants to condition on a specific pixel position */
			samplePos.x = (extra->x + sample.x) * m_invResolution.x;
			samplePos.y = (extra->y + sample.y) * m_invResolution.y;
		}

		pRec.uv = Point2(samplePos.x * m_resolution.x,
			samplePos.y * m_resolution.y);

		Float sinPhi, cosPhi, sinTheta, cosTheta;
		math::sincos(samplePos.x * 2 * M_PI, &sinPhi, &cosPhi);
		math::sincos(samplePos.y * M_PI, &sinTheta, &cosTheta);

		dRec.d = trafo(Vector(sinPhi*sinTheta, cosTheta, -cosPhi*sinTheta));
		dRec.measure = ESolidAngle;
		dRec.pdf = 1 / (2 * M_PI * M_PI * std::max(sinTheta, Epsilon));

		return Spectrum(1.0f);
	}
	Spectrum sampleDirection(DirectionSamplingRecord &dRec,
			PositionSamplingRecord &pRec,
			const Point2 &sample, const Point2 *extra) const {
		const Transform &trafo = m_worldTransform->eval(pRec.time);

		Point samplePos(sample.x, sample.y, 0.0f);

		if (extra) {
			/* The caller wants to condition on a specific pixel position */
			samplePos.x = (extra->x + sample.x) * m_invResolution.x;
			samplePos.y = (extra->y + sample.y) * m_invResolution.y;
		}

		pRec.uv = Point2(samplePos.x * m_resolution.x,
			samplePos.y * m_resolution.y);

		/* Compute the corresponding position on the
		   near plane (in local camera space) */
		Point nearP = m_sampleToCamera(samplePos);
		nearP.x = nearP.x * (m_focusDistance / nearP.z);
		nearP.y = nearP.y * (m_focusDistance / nearP.z);
		nearP.z = m_focusDistance;

		Point apertureP = trafo.inverse().transformAffine(pRec.p);

		/* Turn that into a normalized ray direction */
		Vector d = normalize(nearP - apertureP);
		dRec.d = trafo(d);
		dRec.measure = ESolidAngle;
		dRec.pdf = m_normalization / (d.z * d.z * d.z);

		return Spectrum(1.0f);
	}
	Spectrum sampleRay(Ray &ray, const Point2 &pixelSample,
			const Point2 &otherSample, Float timeSample) const {
		Point2 tmp = warp::squareToUniformDiskConcentric(otherSample)
			* m_apertureRadius;
		ray.time = sampleTime(timeSample);

		/* Compute the corresponding position on the
		   near plane (in local camera space) */
		Point nearP = m_sampleToCamera(Point(
			pixelSample.x * m_invResolution.x,
			pixelSample.y * m_invResolution.y, 0.0f));

		/* Aperture position */
		Point apertureP(tmp.x, tmp.y, 0.0f);

		/* Sampled position on the focal plane */
		Point focusP = nearP * (m_focusDistance / nearP.z);

		/* Turn these into a normalized ray direction, and
		   adjust the ray interval accordingly */
		Vector d = normalize(focusP - apertureP);
		Float invZ = 1.0f / d.z;
		ray.mint = m_nearClip * invZ;
		ray.maxt = m_farClip * invZ;

		const Transform &trafo = m_worldTransform->eval(ray.time);
		ray.setOrigin(trafo.transformAffine(apertureP));
		ray.setDirection(trafo(d));
		return Spectrum(1.0f);
	}
Example #17
0
void MainWindow::kiir(double pbal, double pjobb, double prev, double Sp, double Se){
    double adjPrev = trafo(prev, Sp, Se);
    double balvp = trafo(pbal, Sp, Se);
    double jobbvp = trafo(pjobb, Sp, Se);
    QString s,lb,ub,lc,uc,pr;

    pr.sprintf("%.4f", prev);
    lc.sprintf("%.4f", pbal);
    uc.sprintf("%.4f", pjobb);
    ui->plainTextEdit->appendPlainText(QString("    Observed test prevalence:         %1    CI: (%2 , %3)").arg(pr).arg(lc).arg(uc));

    s.sprintf("%.4f", adjPrev);
    lb.sprintf("%.4f", balvp);
    ub.sprintf("%.4f", jobbvp);
    ui->plainTextEdit->appendPlainText(QString("    Prevalence adjusted for Se/Sp:  %1    CI: (%2 , %3)\n\n").arg(s).arg(lb).arg(ub));
}
Example #18
0
	Spectrum sampleRay(Ray &ray, const Point2 &pixelSample,
			const Point2 &otherSample, Float timeSample) const {
		ray.time = sampleTime(timeSample);
		ray.mint = Epsilon;
		ray.maxt = std::numeric_limits<Float>::infinity();

		const Transform &trafo = m_worldTransform->eval(ray.time);

		Float sinPhi, cosPhi, sinTheta, cosTheta;
		math::sincos(pixelSample.x * m_invResolution.x * 2 * M_PI, &sinPhi, &cosPhi);
		math::sincos(pixelSample.y * m_invResolution.y * M_PI, &sinTheta, &cosTheta);

		Vector d(sinPhi*sinTheta, cosTheta, -cosPhi*sinTheta);

		ray.setOrigin(trafo(Point(0.0f)));
		ray.setDirection(trafo(d));
		return Spectrum(1.0f);
	}
Example #19
0
	Spectrum sampleRay(Ray &ray,
			const Point2 &spatialSample,
			const Point2 &directionalSample,
			Float time) const {
		const Transform &trafo = m_worldTransform->eval(time);
		ray.setTime(time);
		ray.setOrigin(trafo.transformAffine(Point(0.0f)));
		ray.setDirection(trafo(Vector(0.0f, 0.0f, 1.0f)));
		return m_power;
	}
Example #20
0
	Spectrum sampleRayDifferential(RayDifferential &ray, const Point2 &pixelSample,
			const Point2 &otherSample, Float timeSample) const {
		/* Record pixel index, added by Lifan */
		ray.index.x = (int)std::floor(pixelSample.x);
		ray.index.y = (int)std::floor(pixelSample.y);

		Point2 tmp = warp::squareToUniformDiskConcentric(otherSample)
			* m_apertureRadius;
		ray.time = sampleTime(timeSample);

		/* Compute the corresponding position on the
		   near plane (in local camera space) */
		Point nearP = m_sampleToCamera(Point(
			pixelSample.x * m_invResolution.x,
			pixelSample.y * m_invResolution.y, 0.0f));

		/* Aperture position */
		Point apertureP(tmp.x, tmp.y, 0.0f);

		/* Sampled position on the focal plane */
		Float fDist = m_focusDistance / nearP.z;
		Point focusP  =  nearP       * fDist;
		Point focusPx = (nearP+m_dx) * fDist;
		Point focusPy = (nearP+m_dy) * fDist;

		/* Turn that into a normalized ray direction, and
		   adjust the ray interval accordingly */
		Vector d = normalize(focusP - apertureP);
		Float invZ = 1.0f / d.z;
		ray.mint = m_nearClip * invZ;
		ray.maxt = m_farClip * invZ;

		const Transform &trafo = m_worldTransform->eval(ray.time);
		ray.setOrigin(trafo.transformAffine(apertureP));
		ray.setDirection(trafo(d));
		ray.rxOrigin = ray.ryOrigin = ray.o;
		ray.rxDirection = trafo(normalize(Vector(focusPx - apertureP)));
		ray.ryDirection = trafo(normalize(Vector(focusPy - apertureP)));
		ray.hasDifferentials = true;

		return Spectrum(1.0f);
	}
Example #21
0
	ref<TriMesh> createTriMesh() {
		const uint32_t phiSteps = 40;

		ref<TriMesh> mesh = new TriMesh(getName(),
			phiSteps-1, 2*phiSteps, true, true, false);

		Point *vertices = mesh->getVertexPositions();
		Normal *normals = mesh->getVertexNormals();
		Point2 *texcoords = mesh->getVertexTexcoords();
		Triangle *triangles = mesh->getTriangles();

		Float dphi = (2 * M_PI) / (Float) (phiSteps-1);

		const Transform &trafo = m_objectToWorld->eval(0.0f);
		Point center = trafo(Point(0.0f));
		Normal normal = normalize(trafo(Normal(0, 0, 1)));

		for (uint32_t i=0; i<phiSteps; ++i) {
			Float phi = i*dphi;
			vertices[i] = center;
			vertices[phiSteps+i] = trafo(
				Point(std::cos(phi), std::sin(phi), 0)
			);

			normals[i] = normal;
			normals[phiSteps+i] = normal;
			texcoords[i] = Point2(0.0f, phi * INV_TWOPI);
			texcoords[phiSteps+i] = Point2(1.0f, phi * INV_TWOPI);
		}

		for (uint32_t i=0; i<phiSteps-1; ++i) {
			triangles[i].idx[0] = i;
			triangles[i].idx[1] = i+phiSteps;
			triangles[i].idx[2] = i+phiSteps+1;
		}

		mesh->copyAttachments(this);
		mesh->configure();

		return mesh.get();
	}
Transform Transform::glOrthographic(Float clipNear, Float clipFar) {
	Float a = -2.0f / (clipFar - clipNear),
	      b = -(clipFar + clipNear) / (clipFar - clipNear);

	Matrix4x4 trafo(
		1, 0, 0, 0,
		0, 1, 0, 0,
		0, 0, a, b,
		0, 0, 0, 1
	);
	return Transform(trafo);
}
Example #23
0
/*
 * maximize c^T x subject to Ax = b and x >= 0
 *
 * inplace, b is last column of A, c first row
 */
static void simplex2(unsigned int D, unsigned int N, float A[D + 1][N + 1])
{
	// 2. Loop over all columns

//	print_tableaux(D, N, A);

	while (true) {

		unsigned int i = 0;

		for (i = 0; i < N; i++)
			 if (A[0][i] < 0.)
				break;

		if (i == N)
			break;

		// 3. find pivot element

		// Bland's rule

		int pivot_index = -1;
		float pivot_value = 0.;

		for (unsigned int j = 1; j < D + 1; j++) {

			if (0. < A[j][i]) {

				float nval = A[j][N] / A[j][i];

				if ((-1 == pivot_index) || (nval < pivot_value)) {

					pivot_value = nval;
					pivot_index = j;
				}
			}
		}

		if (-1 == pivot_index)
			break;

//		printf("PI %dx%d\n", pivot_index, i);

		trafo(D + 1, N + 1, A, pivot_index, i);

//		print_tableaux(D, N, A);
		float x[N];
		solution(D, N, x, A);
		assert(feasible_p(D, N, x, A));
	}
//	print_tableaux(D, N, A);
}
Transform Transform::glPerspective(Float fov, Float clipNear, Float clipFar) {
	Float recip = 1.0f / (clipNear - clipFar);
	Float cot = 1.0f / std::tan(degToRad(fov / 2.0f));

	Matrix4x4 trafo(
		cot,   0,     0,   0,
		0,     cot,   0,   0,
		0,     0,     (clipFar + clipNear) * recip,  2 * clipFar * clipNear * recip,
		0,     0,     -1,   0
	);

	return Transform(trafo);
}
Transform Transform::glFrustum(Float left, Float right, Float bottom, Float top, Float nearVal, Float farVal) {
	Float invFMN = 1 / (farVal-nearVal);
	Float invTMB = 1 / (top-bottom);
	Float invRML = 1 / (right-left);

	Matrix4x4 trafo(
		2*nearVal*invRML, 0, (right+left)*invRML, 0,
		0, 2*nearVal*invTMB, (top+bottom)*invTMB, 0,
		0, 0, -(farVal + nearVal) * invFMN, -2*farVal*nearVal*invFMN,
		0, 0, -1, 0
	);

	return Transform(trafo);
}
Example #26
0
	Spectrum sampleRayDifferential(RayDifferential &ray, const Point2 &pixelSample,
			const Point2 &otherSample, Float timeSample) const {
		ray.time = sampleTime(timeSample);
		const Transform &trafo = m_worldTransform->eval(ray.time);

		/* Compute the corresponding position on the
		   near plane (in local camera space) */
		Point nearP = m_sampleToCamera.transformAffine(Point(
			pixelSample.x * m_invResolution.x,
			pixelSample.y * m_invResolution.y, 0.0f));
		nearP.z = 0.0f;

		ray.setOrigin(trafo.transformAffine(nearP));
		ray.setDirection(normalize(trafo(Vector(0, 0, 1))));
		ray.mint = m_nearClip;
		ray.maxt = m_farClip;
		ray.rxOrigin = trafo(nearP + m_dx);
		ray.ryOrigin = trafo(nearP + m_dy);
		ray.rxDirection = ray.ryDirection = ray.d;
		ray.hasDifferentials = true;

		return Spectrum(1.0f);
	}
	Spectrum samplePosition(PositionSamplingRecord &pRec,
			const Point2 &sample, const Point2 *extra) const {
		const Transform &trafo = m_worldTransform->eval(pRec.time);

		Point2 aperturePos = warp::squareToUniformDiskConcentric(sample)
			* m_apertureRadius;

		pRec.p = trafo.transformAffine(
			Point(aperturePos.x, aperturePos.y, 0.0f));
		pRec.n = trafo(Vector(0.0f, 0.0f, 1.0f));
		pRec.pdf = m_aperturePdf;
		pRec.measure = EArea;
		return Spectrum(1.0f);
	}
Transform Transform::translate(const Vector &v) {
	Matrix4x4 trafo(
		1, 0, 0, v.x,
		0, 1, 0, v.y,
		0, 0, 1, v.z,
		0, 0, 0, 1
	);
	Matrix4x4 invTrafo(
		1, 0, 0, -v.x,
		0, 1, 0, -v.y,
		0, 0, 1, -v.z,
		0, 0, 0, 1
	);
	return Transform(trafo, invTrafo);
}
Transform Transform::scale(const Vector &v) {
	Matrix4x4 trafo(
		v.x, 0,   0,   0,
		0,   v.y, 0,   0,
		0,   0,   v.z, 0,
		0,   0,   0,   1
	);
	Matrix4x4 invTrafo(
		1.0f/v.x, 0,        0,        0,
		0,        1.0f/v.y, 0,        0,
		0,        0,        1.0f/v.z, 0,
		0,        0,        0,        1
	);
	return Transform(trafo, invTrafo);
}
Example #30
0
// Transforms a shape with the current transformation matrix and
// returns the transformed shape
TopoDS_Shape CTiglTransformation::Transform(const TopoDS_Shape& shape) const
{

    if (IsUniform()) {
        gp_Trsf t;
        t.SetValues(m_matrix[0][0], m_matrix[0][1], m_matrix[0][2], m_matrix[0][3],
                    m_matrix[1][0], m_matrix[1][1], m_matrix[1][2], m_matrix[1][3],
                    m_matrix[2][0], m_matrix[2][1], m_matrix[2][2], m_matrix[2][3]
#if OCC_VERSION_HEX >= VERSION_HEX_CODE(6,8,0)
                );
#else
                ,1e-10, 1e-10);
#endif
        BRepBuilderAPI_Transform trafo(shape, t);
        return trafo.Shape();
    }