QPoint &EdgeWidgetTransformHelper::transform( QPoint &point )
{
	switch (_mode)
	{
		case TransformMirror:
			return applyMatrix(point, mirrorMatrix);
		case TransformSimple:
		default:
			return applyMatrix(point, simpleMatrix);
	}
}
Beispiel #2
0
void Renderer::updateCamera(const glm::vec3 &delta) {
  glm::mat4 viewMat = glm::inverse(camera_.calculateViewMatrix());

  glm::vec3 forward = applyMatrix(viewMat, glm::vec3(0, 1, 0), false);
  forward = glm::normalize(glm::vec3(forward.x, forward.y, 0));

  glm::vec3 right = applyMatrix(viewMat, glm::vec3(1, 0, 0), false);
  right = glm::normalize(glm::vec3(right.x, right.y, 0));

  auto transformedDelta = delta.x * right + delta.y * forward;
  setCameraLookAt(camera_.getLookAt() + transformedDelta);
}
Beispiel #3
0
/**
 * Function: rotateCamera
 * Description: Rotate this camera by the given 3 angle of rotations with respect
 * to its current orientation. Rotation is done by changing the lookVector, upVector
 * and rightVector of this camera.
 **/
void WZ_Camera::rotateCamera(float rollDegree, float pitchDegree, float yawDegree)
{
	// Calculate transformation matrix
	Matrix4By4<float> rotationMatrix_along_look = makeRotationMatrix(rollDegree, lookVector);
	Matrix4By4<float> rotationMatrix_along_right = makeRotationMatrix(pitchDegree, rightVector);
	Matrix4By4<float> rotationMatrix_along_up = makeRotationMatrix(yawDegree, upVector);
	Matrix4By4<float> rotationMatrix = rotationMatrix_along_look * rotationMatrix_along_right * rotationMatrix_along_up;

	//transform the 3 vertex
	upVector = applyMatrix(rotationMatrix, upVector);
	lookVector = applyMatrix(rotationMatrix, lookVector);
	rightVector = applyMatrix(rotationMatrix, rightVector);
}
Beispiel #4
0
std::tuple<glm::vec3, glm::vec3> Renderer::screenToRay(
    const glm::vec2 &screenCoord) const {
  glm::vec3 ndc = screenToNDC(screenCoord);
  auto inverseProjMat = glm::inverse(getProjectionStack().current());
  auto inverseViewMat = glm::inverse(getViewStack().current());
  glm::vec3 cameraDir = glm::normalize(
      applyMatrix(inverseProjMat, glm::vec3(ndc)));
  glm::vec3 worldDir = glm::normalize(
      applyMatrix(inverseViewMat, cameraDir, false));
  glm::vec3 worldPos = applyMatrix(inverseViewMat, glm::vec3(0.f));

  return std::make_tuple(worldPos, worldDir);
}
Beispiel #5
0
bool WireCreator::checkPath(QVector<int> path)
{
    auto modifiedPath=pathToPoints(path);
    auto basicPath=modifiedPath;

    m_wire.read_Input(modifiedPath);


    for(float rot=0.0f;rot<360.0f; rot+=5.0f ){

        while(!m_wire.nextPoint3()){
            //qDebug()<<"no collision for: "<<m_wire.outerPoints;
            if(m_wire.success){

               cPath=modifiedPath;
                return true;
        }
        }

        auto rotMat=QMatrix4x4();
        rotMat.setToIdentity();
        rotMat.rotate(rot,1.0f, 0.0f, 0.0f);
        //qDebug()<<"old path"<<modifiedPath;
        modifiedPath=applyMatrix(basicPath, rotMat);
        //qDebug()<<"new path"<<modifiedPath;
        m_wire.read_Input(modifiedPath);
    }
    //qDebug()<<"no rotation found";
    return false;

}
Beispiel #6
0
void Renderer::startRender() {
  renderdt_ = Clock::secondsSince(lastRender_);
  lastRender_ = Clock::now();

  renderdt_ *= timeMultiplier_;

  gameTime_ += renderdt_;

  glClearColor(0.f, 0.f, 0.f, 0.f);
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

  glEnable(GL_DEPTH_TEST);
  glEnable(GL_BLEND);
  glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

  // Set up matrices
  float aspect = resolution_.x / resolution_.y;
  float fov = 60.f;
  getProjectionStack().clear();
  getProjectionStack().current() = glm::perspective(fov, aspect, 0.1f, 100.f);
  getViewStack().clear();
  getViewStack().current() = camera_.calculateViewMatrix();

  // Set up lights
  // TODO(zack): read light pos from map config
  auto lightPos = applyMatrix(getViewStack().current(), glm::vec3(-5, -5, 10));
  setParam("renderer.lightPos", lightPos);
  setParam("renderer.light.ambient", glm::vec3(0.1f));
  setParam("renderer.light.diffuse", glm::vec3(1.f));
  setParam("renderer.light.specular", glm::vec3(1.f));
}
	void Calibration::draw3d(int i) const {
		ofPushStyle();
		ofPushMatrix();
		ofNoFill();
		
		applyMatrix(makeMatrix(boardRotations[i], boardTranslations[i]));
		
		ofSetColor(ofColor::fromHsb(255 * i / size(), 255, 255));
		
		ofDrawBitmapString(ofToString(i), 0, 0);
		
		for(int j = 0; j < (int)objectPoints[i].size(); j++) {
			ofPushMatrix();
			ofTranslate(toOf(objectPoints[i][j]));
			ofCircle(0, 0, .5);
			ofPopMatrix();
		}

		ofMesh mesh;
		mesh.setMode(OF_PRIMITIVE_LINE_STRIP);
		for(int j = 0; j < (int)objectPoints[i].size(); j++) {
			ofVec3f cur = toOf(objectPoints[i][j]);
			mesh.addVertex(cur);
		}
		mesh.draw();
		
		ofPopMatrix();
		ofPopStyle();
	}
QImage ImgConvolutions::laplacianov8(QImage *img){
    QVector<double> filterMatrix(9);
    filterMatrix.fill(1.0);
    filterMatrix[4] = -8;
    qDebug() << "Applying laplace v4";
    QImage ret = applyMatrix(img, filterMatrix);
    return ret;
}
QImage ImgConvolutions::pasaBajos(QImage *img, int filterSize){
    int matrixSize = filterSize * filterSize;
    QVector<double> filterMatrix(matrixSize);
    filterMatrix.fill(1.0);
    qDebug() << "Applying low-frequency filter";
    QImage ret = applyMatrix(img, filterMatrix, 1.0/matrixSize);
    return ret;
}
QImage ImgConvolutions::sharpen(QImage *img, int filterSize){
    int matrixSize = filterSize * filterSize;
    QVector<double> filterMatrix(matrixSize);
    filterMatrix.fill(-1.0);
    filterMatrix[matrixSize/2] = matrixSize;
    qDebug() << "Applying sharpening";
    QImage ret = applyMatrix(img, filterMatrix);
    return ret;
}
QImage ImgConvolutions::pasaAltos(QImage *img, int filterSize){
    int matrixSize = filterSize * filterSize;
    QVector<double> filterMatrix(matrixSize);
    filterMatrix.fill(-1.0);
    filterMatrix[matrixSize/2] = matrixSize - 1;
    qDebug() << "Applying high-frequency filter";
    QImage ret = applyMatrix(img, filterMatrix);
    return ret;
}
QImage ImgConvolutions::motionBlur(QImage *img, int filterSize){
    QVector<double> filterMatrix(filterSize * filterSize);
    filterMatrix.fill(0.0);
    for(int i = 0; i < filterSize; i++){
        filterMatrix[(i * filterSize) + i] = 1.0;
    }
    qDebug() << "Applying motion blur";
    QImage ret = applyMatrix(img, filterMatrix, 1.0/filterSize);
    return ret;
}
Beispiel #13
0
void applyTestino(Bitmap* bitmap) {
	//Cache to local variables
	unsigned char* red = (*bitmap).red;
	unsigned char* green = (*bitmap).green;
	unsigned char* blue = (*bitmap).blue;

	unsigned int length = (*bitmap).width * (*bitmap).height;
	register unsigned int i;
	unsigned char r, g, b;
	//HSBColour hsb;
	register unsigned char grey;
	short int greyscaleInvertMaskScreenComponentLut[256][256];
	short int overlayLut[256][256];
	unsigned int j;
	for (i = 256; i--;) {
		for (j = 256; j--;) {
			greyscaleInvertMaskScreenComponentLut[i][j] = -1;
			overlayLut[i][j] = -1;
		}
	}

	float matrix[4][4];
	identMatrix(matrix);
	float saturation = 1.5f;
	saturateMatrix(matrix, &saturation);
	applyMatrix(bitmap, matrix);

	for (i = length; i--;) {
		//rgbToHsb(red[i], green[i], blue[i], &hsb);
		//hsb.s = min(hsb.s * 1.5f, 1.0f);
		//hsbToRgb(&hsb, &r, &g, &b);

		r = red[i];
		g = green[i];
		b = blue[i];

		grey = ((unsigned int)red[i] + (unsigned int)green[i] + (unsigned int)blue[i])/3;
		r = (greyscaleInvertMaskScreenComponentLut[grey][r] == -1) ? greyscaleInvertMaskScreenComponentLut[grey][r] = greyscaleInvertMaskScreenComponent(grey, 0.5f, r) : greyscaleInvertMaskScreenComponentLut[grey][r];
		g = (greyscaleInvertMaskScreenComponentLut[grey][g] == -1) ? greyscaleInvertMaskScreenComponentLut[grey][g] = greyscaleInvertMaskScreenComponent(grey, 0.5f, g) : greyscaleInvertMaskScreenComponentLut[grey][g];
		b = (greyscaleInvertMaskScreenComponentLut[grey][b] == -1) ? greyscaleInvertMaskScreenComponentLut[grey][b] = greyscaleInvertMaskScreenComponent(grey, 0.5f, b) : greyscaleInvertMaskScreenComponentLut[grey][b];

		// Create black and white pixel
		grey = blackAndWhite(red[i], green[i], blue[i]);

		r = (overlayLut[grey][r] == -1) ? overlayLut[grey][r] = overlayPixelComponents(grey, r, 1.0f) : overlayLut[grey][r];
		g = (overlayLut[grey][g] == -1) ? overlayLut[grey][g] = overlayPixelComponents(grey, g, 1.0f) : overlayLut[grey][g];
		b = (overlayLut[grey][b] == -1) ? overlayLut[grey][b] = overlayPixelComponents(grey, b, 1.0f) : overlayLut[grey][b];
		red[i] = (overlayLut[grey][r] == -1) ? overlayLut[grey][r] = overlayPixelComponents(grey, r, 1.0f) : overlayLut[grey][r];
		green[i] = (overlayLut[grey][g] == -1) ? overlayLut[grey][g] = overlayPixelComponents(grey, g, 1.0f) : overlayLut[grey][g];
		blue[i] = (overlayLut[grey][b] == -1) ? overlayLut[grey][b] = overlayPixelComponents(grey, b, 1.0f) : overlayLut[grey][b];
	}
}
Beispiel #14
0
bool Sphere::occlude(const Ray &ray) const {
    vec3 o = applyMatrix(inv_t, ray.o);
    vec3 d = normalize(applyMatrix(inv_t, ray.o + ray.d) - o);

    float r2 = r * r;
    vec3 toSphere = c - o;
    float l2 = dot(toSphere, toSphere);

    if (l2 > r2) {
        float d2 = dot(toSphere, d);
        if (d2 <= 0.0f) {
            return false;
        }

        float thc = r2 - l2 + d2 * d2;
        if (thc <= 0.0f) {
            return false;
        }

        float thc_sqrt = sqrtf(thc);
        float t_temp = d2 - thc_sqrt;
        if (t_temp > ray.tmin) {
            return t_temp < ray.tmax;
        }
        else {
            t_temp = d2 + thc_sqrt;
            return t_temp > ray.tmin && t_temp < ray.tmax;
        }
    }
    else {
        float d2 = dot(toSphere, d);
        float thc = r2 - l2 + d2 * d2;
        float t_temp = sqrtf(thc) + d2;
        return t_temp > ray.tmin && t_temp < ray.tmax;
    }
}
Beispiel #15
0
/*
Do transformation on one vertex
*/
void transform(GzRender *render, GzCoord vl)
{
	float *homoCoord;
	homoCoord = (float*)malloc(sizeof(float)* 4);
	for (int j = 0; j < 3; j++)
		homoCoord[j] = vl[j];
	homoCoord[3] = 1;
	applyMatrix(homoCoord, render->Ximage[render->matlevel - 1]);

	// Return transformation values back to vl
	for (int j = 0; j < 3; j++)
	{
		vl[j] = homoCoord[j] / homoCoord[3];
	}
}
QImage ImgConvolutions::laplacianov4(QImage *img){
    QVector<double> filterMatrix(9);
    filterMatrix[0] = 0.0;
    filterMatrix[1] = 1.0;
    filterMatrix[2] = 0.0;
    filterMatrix[3] = 1.0;
    filterMatrix[4] = -4.0;
    filterMatrix[5] = 1.0;
    filterMatrix[6] = 0.0;
    filterMatrix[7] = 1.0;
    filterMatrix[8] = 0.0;
    qDebug() << "Applying laplace v4";
    QImage ret = applyMatrix(img, filterMatrix);
    return ret;
}
Beispiel #17
0
void testApp::draw() {
	ofBackground(128);
	
	cam.begin();
	glEnable(GL_DEPTH_TEST);
	
	glPushMatrix();
	glScaled(1, -1, -1);
	
	ofDrawAxis(100);
	
	ofSetColor(255);
	curColor.draw(0, 0, curColor.getWidth(), curColor.getHeight());
	
	glEnableClientState(GL_VERTEX_ARRAY);
	glEnableClientState(GL_COLOR_ARRAY);
	glColorPointer(3, GL_FLOAT, sizeof(Point3f), &(pointCloudColors[0].x));
	glVertexPointer(3, GL_FLOAT, sizeof(Point3f), &(pointCloud[0].x));
	glDrawArrays(GL_POINTS, 0, pointCloud.size());
	glDisableClientState(GL_COLOR_ARRAY);
	glDisableClientState(GL_VERTEX_ARRAY);
	
	glDisable(GL_DEPTH_TEST);
	
	ofSetColor(255);
	glEnableClientState(GL_VERTEX_ARRAY);
	glVertexPointer(2, GL_FLOAT, sizeof(Point2f), &(imagePoints[0].x));
	glDrawArrays(GL_POINTS, 0, pointCloud.size());
	glDisableClientState(GL_VERTEX_ARRAY);
	
	Calibration* curCalibration;
	if(mouseX < ofGetWidth() / 2) {
		curCalibration = &kinectCalibration;
	} else {		
		curCalibration = &colorCalibration;
		applyMatrix(makeMatrix(rotationColorToKinect, translationColorToKinect));
	}
	
	curCalibration->draw3d(curImage);
	
	glPopMatrix();
	
	cam.end();
	
}
Beispiel #18
0
glm::mat4 makeCylindricalBillboardTransform(
    const glm::vec3 &pos,
    const glm::vec3 &rot_axis) {
  glm::mat4 view_transform = getViewStack().current();
  glm::vec3 up = glm::normalize(rot_axis);

  glm::vec3 cpos = applyMatrix(glm::inverse(view_transform), glm::vec3(0.f));
  glm::vec3 look = pos - cpos;
  glm::vec3 right = glm::normalize(glm::cross(look, up));
  look = glm::normalize(glm::cross(up, right));

  glm::mat4 transform = glm::mat4(1.f);
  transform[0] = glm::vec4(up, 0.f);
  transform[1] = glm::vec4(right, 0.f);
  transform[2] = glm::vec4(look, 0.f);
  transform[3] = glm::vec4(pos, 1.f);

  return transform;
}
 void EntityModelRenderer::render(ShaderProgram& shaderProgram, Transformation& transformation, const Vec3f& position, const Quatf& rotation) {
     const Mat4f matrix = translationMatrix(position) * rotationMatrix(rotation);
     ApplyModelMatrix applyMatrix(transformation, matrix);
     render(shaderProgram);
 }
QPoint &EdgeWidgetTransformHelper::transform( QPoint &point )
{
    return applyMatrix(point);
}
Beispiel #21
0
void RemoteControl::caminatas (byte comando) {
	
	// todo esto está programado como el orto, hay que reorganizarlo
	// por ejemplo este "anguloso"... talvez las otras variables también deban estar acá
	static float anguloso = 0;
	
	if (anguloso != 0) {
		COORD2D matrix [2];
		getRotationMatrix (matrix, anguloso);
		centro_caminata = applyMatrix (centro_caminata, matrix);
		anguloso = 0;
	}
	
	// switch de seteo
	switch (comando) {
		/// mega provisorio
		///////////////////////////
		case 5:
			mov.salto (velocidad, HALF_PI);
			break;
		///////////////////////////	
		/// termina mega provisorio
		case RC_UP:
			texto1 = "UP";
			angulo = angulo_offset + HALF_PI;     
			break;
		
		case RC_DOWN:
			texto1 = "DN";
			angulo = angulo_offset - HALF_PI;
			break;
		
		case RC_LEFT:
			texto1 = "LEFT";
			if (modo == CAMINATAS1) {
				angulo = angulo_offset + PI;
			} else {
				anguloso = 0;
				mov.mon_angulo = &anguloso;       // aca el ángulo offset se usaría para rotar el centro      
				mov.curva (velocidad, desplazamiento, (COORD2D) {0, 0} , CCW, marcha, largo_pasos);
			}
			break;
	
		case RC_RIGHT:
			texto1 = "RIGHT";
			if (modo == CAMINATAS1) {
				angulo = angulo_offset;
			} else {
				anguloso = 0;
				mov.mon_angulo = &anguloso;       // aca el ángulo offset se usaría para rotar el centro 
				mov.curva (velocidad, desplazamiento, (COORD2D) {0, 0} , CW, marcha, largo_pasos);
			}
			break;

		case RC_MENU:
			texto1 = "MENU";
			mov.mon_angulo = NULL;
			mov.curva (velocidad, desplazamiento, centro_caminata, CCW, marcha, largo_pasos);
			break;
			
		case RC_EXIT:
			texto1 = "EXIT";
			mov.mon_angulo = NULL;
			mov.curva (velocidad, desplazamiento, centro_caminata, CW, marcha, largo_pasos);
			break;
			
		case RC_MTS:
			texto1 = "MTS";
			mov.mon_angulo = &angulo_offset;
			mov.curva (velocidad, desplazamiento, centro_caminata, CCW, marcha, largo_pasos);
			break;
			
		case RC_CCTTX:
			mov.mon_angulo = &angulo_offset;
			mov.curva (velocidad, desplazamiento, centro_caminata, CW, marcha, largo_pasos);
			texto1 = "CC_TTX";
			break;
			
		case RC_ENTER1:
			texto1 = "STOP";
			mov.stop();
			isMoving = false;
			break;
			
		case RC_VOL_UP:
			if (pantalla.isBusy()) {break;}
			velocidad = constrain (velocidad+inc, 1, 50);
			texto1 = "Vel " + float2string (velocidad);
			if (isMoving) {mov.set_vel (velocidad);}
			retardo = true;
			break;
			
		case RC_VOL_DN:
			if (pantalla.isBusy()) {break;}
			velocidad = constrain (velocidad-inc, 1, 50);
			texto1 = "Vel " + float2string (velocidad);
			if (isMoving) {mov.set_vel (velocidad);}
			retardo = true;
			break;
			
		case RC_CH_UP:
			if (pantalla.isBusy()) {break;}
			if (!isMoving) {
				largo_pasos = constrain (largo_pasos+inc, 0, 20);
				texto1 = "Paso "; 
				if (largo_pasos == 0) {texto1 += "AUTO";} else {texto1 += float2string (largo_pasos);}
			} else {
				texto1 = "Escala " + String (mov.dec_escala(), DEC);
			}
			retardo = true;
			break;
			
		case RC_CH_DN:
			if (pantalla.isBusy()) {break;}
			if (!isMoving) {
				largo_pasos = constrain (largo_pasos-inc, 0, 20);
				texto1 = "Paso "; 
				if (largo_pasos == 0) {texto1 += "AUTO";} else {texto1 += float2string (largo_pasos);}
			} else {
				texto1 = "Escala " + String (mov.inc_escala(), DEC);
			}
			retardo = true;
			break;
	}
	
	// switch de ejecución (y puede haber más; talvez la variable swicheada en segunda instancia no sea "comando")
	switch (comando) {
		case RC_UP: case RC_DOWN: case RC_LEFT: case RC_RIGHT:
			if (modo == CAMINATAS1) {mov.mon_desplazamiento = NULL;}
			else if (modo == CAMINATAS2) {
				if (comando == RC_LEFT || comando == RC_RIGHT) {break;}    // la lógica hay que reformularla toda 
				mov.mon_desplazamiento = &centro_caminata;
			}
		  mov.recta (velocidad, desplazamiento, angulo, marcha, largo_pasos);
			isMoving = true;
			break;
	}

}
Beispiel #22
0
void TextNode::applyDeltaMatrix(QPointF delta) {
	QTransform t;
	t.fromTranslate(delta.x(), delta.y());
	applyMatrix(t);
}
Beispiel #23
0
int applySahara(Bitmap* bitmap) {
	int length = (*bitmap).width * (*bitmap).height;
	int i;
	unsigned char r, g, b;

	//HSBColour hsb;
	unsigned char* red = (*bitmap).red;
	unsigned char* green = (*bitmap).green;
	unsigned char* blue = (*bitmap).blue;
	unsigned char brightnessLut[256];
	unsigned char contrastLut[256];
	for (i = 0; i < 256; i++) {
		float pixelf = i/255.0f;
		//brightnessLut[i] = 255*applyBrightnessToPixelComponent(pixelf, 0.35433f);
		//contrastLut[i] = 255*applyContrastToPixelComponent(pixelf, 0.1496f);
		brightnessLut[i] = 255*applyBrightnessToPixelComponent(pixelf, 0.45f);
		contrastLut[i] = 255*applyContrastToPixelComponent(pixelf, 0.1f);
	}
	for (i = length; i--; ) {
		r = brightnessLut[red[i]];
		g = brightnessLut[green[i]];
		b = brightnessLut[blue[i]];

		r = contrastLut[r];
		green[i] = contrastLut[g];
		b = contrastLut[b];

		red[i] = (r*0.8431f/*215*/)+40; //compress the red channel between 18 - 237
		blue[i] = (b*0.8823f/*225*/)+30; //compress the blue channel between 50 - 205

		//rgbToHsb(red[i], green[i], blue[i], &hsb);
		//hsb.s = hsb.s * 0.55f;

		//hsbToRgb(&hsb, &red[i], &green[i], &blue[i]);
	}

	float matrix[4][4];
	identMatrix(matrix);
	float saturation = 0.65f;
	saturateMatrix(matrix, &saturation);
	applyMatrix(bitmap, matrix);

	unsigned char* blurRed;
	unsigned char* blurGreen;
	unsigned char* blurBlue;
	int resultCode = newUnsignedCharArray(length, &blurRed);
	if (resultCode != MEMORY_OK) {
		return resultCode;
	}
	resultCode = newUnsignedCharArray(length, &blurGreen);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		return resultCode;
	}
	resultCode = newUnsignedCharArray(length, &blurBlue);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		freeUnsignedCharArray(&blurGreen);
		return resultCode;
	}

	float blurRadius = 1.0f;
	resultCode = stackBlur(&blurRadius, (*bitmap).red, (*bitmap).green, (*bitmap).blue, &((*bitmap).width), &((*bitmap).height), blurRed, blurGreen, blurBlue);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		freeUnsignedCharArray(&blurGreen);
		freeUnsignedCharArray(&blurBlue);
		return resultCode;
	}

	short int overlayLut[256][256];
	unsigned char multiplyLut255[256];
	unsigned char multiplyLut227[256];
	unsigned char multiplyLut187[256];
	unsigned int j;
	for (i = 0; i < 256; i++) {
		for (j = 0; j < 256; j++) {
			overlayLut[i][j] = -1;//overlayPixelComponents(i, j, 1.0f);
		}
		multiplyLut255[i] = multiplyPixelComponents(255, i);
		multiplyLut227[i] = multiplyPixelComponents(227, i);
		multiplyLut187[i] = multiplyPixelComponents(187, i);
	}
	for (i = length; i--; ) {
		if (overlayLut[blurRed[i]][red[i]] == -1) {
			overlayLut[blurRed[i]][red[i]] = overlayPixelComponents(blurRed[i], red[i], 1.0f);
		}
		red[i] = overlayLut[blurRed[i]][red[i]];//overlayPixelComponents(blurRed[i], red[i], 1.0f);

		if (overlayLut[blurGreen[i]][green[i]] == -1) {
			overlayLut[blurGreen[i]][green[i]] = overlayPixelComponents(blurGreen[i], green[i], 1.0f);
		}
		green[i] = overlayLut[blurGreen[i]][green[i]];//overlayPixelComponents(blurGreen[i], green[i], 1.0f);

		if (overlayLut[blurBlue[i]][blue[i]] == -1) {
			overlayLut[blurBlue[i]][blue[i]] = overlayPixelComponents(blurBlue[i], blue[i], 1.0f);
		}
		blue[i] = overlayLut[blurBlue[i]][blue[i]];//overlayPixelComponents(blurBlue[i], blue[i], 1.0f);

		// Multiply by a wheat colour rgb(255, 227, 187)
		red[i] = multiplyLut255[red[i]];//multiplyPixelComponents(255, red[i]);
		green[i] = multiplyLut227[green[i]];//multiplyPixelComponents(227, green[i]);
		blue[i] = multiplyLut187[blue[i]];//multiplyPixelComponents(187, blue[i]);
	}

	freeUnsignedCharArray(&blurRed);
	freeUnsignedCharArray(&blurGreen);
	freeUnsignedCharArray(&blurBlue);

	return MEMORY_OK;
}
Beispiel #24
0
//TODO memory usage may be reduced by using component based blur
int applyHDR(Bitmap* bitmap) {
	//Cache to local variables
	unsigned char* red = (*bitmap).red;
	unsigned char* green = (*bitmap).green;
	unsigned char* blue = (*bitmap).blue;

	unsigned char* blurRed;
	unsigned char* blurGreen;
	unsigned char* blurBlue;
	int length = (*bitmap).width * (*bitmap).height;
	int resultCode = newUnsignedCharArray(length, &blurRed);
	if (resultCode != MEMORY_OK) {
		return resultCode;
	}
	resultCode = newUnsignedCharArray(length, &blurGreen);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		return resultCode;
	}
	resultCode = newUnsignedCharArray(length, &blurBlue);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		freeUnsignedCharArray(&blurGreen);
		return resultCode;
	}
	float blurRadius = 9.0f;
	resultCode = stackBlur(&blurRadius, red, green, blue, &((*bitmap).width), &((*bitmap).height), blurRed, blurGreen, blurBlue);
	if (resultCode != MEMORY_OK) {
		freeUnsignedCharArray(&blurRed);
		freeUnsignedCharArray(&blurGreen);
		freeUnsignedCharArray(&blurBlue);
		return resultCode;
	}

	unsigned int i, j;
	unsigned char r1, g1, b1, r2, g2, b2;
	float matrix[4][4];
	identMatrix(matrix);
	float saturation = 1.3f;
	saturateMatrix(matrix, &saturation);
	for (i = length; i--;) {
		// invert the blurred pixel
		r1 = 255 - blurRed[i];
		g1 = 255 - blurGreen[i];
		b1 = 255 - blurBlue[i];

		// Grain merge the inverted blurred pixel with the original
		r1 = grainMergePixelsComponent(r1, red[i]);
		g1 = grainMergePixelsComponent(g1, green[i]);
		b1 = grainMergePixelsComponent(b1, blue[i]);

		// boost the saturation of the original pixel
		//HSBColour hsb;
		//rgbToHsb(red[i], green[i], blue[i], &hsb);
		//hsb.s = min(1.0f, hsb.s * 1.3f);
		r2 = red[i];
		g2 = green[i];
		b2 = blue[i];
		applyMatrixToPixel(&r2, &g2, &b2, matrix);
		//hsbToRgb(&hsb, &r2, &g2, &b2);

		// grain merge the saturated pixel with the inverted grain merged pixel
		red[i] = grainMergePixelsComponent(r2, r1);
		green[i] = grainMergePixelsComponent(g2, g1);
		blue[i] = grainMergePixelsComponent(b2, g1);
	}

	applyMatrix(bitmap, matrix);

	freeUnsignedCharArray(&blurRed);
	freeUnsignedCharArray(&blurGreen);
	freeUnsignedCharArray(&blurBlue);

	return MEMORY_OK;
}
Beispiel #25
0
Intersection Sphere::intersect(const Ray &ray) const {

    // Inverse transform the ray.
    vec3 o = applyMatrix(inv_t, ray.o);
    vec3 d = normalize(applyMatrix(inv_t, ray.o + ray.d) - o);

    Intersection ret(&m, this, CONST_FAR);

    float r2 = r * r;
    vec3 toSphere = c - o;
    float l2 = dot(toSphere, toSphere);

    if (l2 > r2) {
        float d2 = dot(toSphere, d);
        if (d2 <= 0.0f) {
            return ret;
        }

        float thc = r2 - l2 + d2 * d2;
        if (thc <= 0.0f) {
            return ret;
        }

        float thc_sqrt = sqrtf(thc);
        float t_temp = d2 - thc_sqrt;
        if (t_temp > CONST_NEAR) {
            vec3 hitpoint = o + t_temp * d;
            vec3 normal = normalize(hitpoint - c);
            ret.point = applyMatrix(t, hitpoint);
            ret.t = length(ret.point - ray.o);
            ret.normal = normalize(normal_mat * normal);
            ret.type = INTERSECTION_OBJ;
            // Is the intersection inside or outside.
            ret.pos = dot(ret.normal, ray.d) < 0.0f ? INTERSECTION_OUT : INTERSECTION_IN;
        }
        else {
            t_temp = d2 + thc_sqrt;
            if (t_temp > CONST_NEAR) {
                vec3 hitpoint = o + t_temp * d;
                vec3 normal = normalize(hitpoint - c);
                ret.point = applyMatrix(t, hitpoint);
                ret.t = length(ret.point - ray.o);
                ret.normal = normalize(normal_mat * normal);
                ret.type = INTERSECTION_OBJ;
                // Is the intersection inside or outside.
                ret.pos = dot(ret.normal, ray.d) < 0.0f ? INTERSECTION_OUT : INTERSECTION_IN;
            }
        }
        return ret;
    }
    else {
        float d2 = dot(toSphere, d);
        float thc = r2 - l2 + d2 * d2;
        float t_temp = sqrtf(thc) + d2;
        if (t_temp > CONST_NEAR) {
            vec3 hitpoint = o + t_temp * d;
            vec3 normal = normalize(hitpoint - c);
            ret.point = applyMatrix(t, hitpoint);
            ret.t = length(ret.point - ray.o);
            ret.normal = normalize(normal_mat * normal);
            ret.type = INTERSECTION_OBJ;
            // Is the intersection inside or outside.
            ret.pos = dot(ret.normal, ray.d) < 0.0f ? INTERSECTION_OUT : INTERSECTION_IN;
        }
        return ret;
    }
}
Beispiel #26
0
I3DObject &Translation::applyLocalMatrix(const I3DMatrix &matrix) 
{
	return applyMatrix(matrix);
}