示例#1
0
FloatRect Transform::transformRect(const FloatRect& rectangle) const
{
    // Transform the 4 corners of the rectangle
    const Vector2f points[] =
    {
        transformPoint(rectangle.left, rectangle.top),
        transformPoint(rectangle.left, rectangle.top + rectangle.height),
        transformPoint(rectangle.left + rectangle.width, rectangle.top),
        transformPoint(rectangle.left + rectangle.width, rectangle.top + rectangle.height)
    };

    // Compute the bounding rectangle of the transformed points
    float left = points[0].x;
    float top = points[0].y;
    float right = points[0].x;
    float bottom = points[0].y;
    for (int i = 1; i < 4; ++i)
    {
        if      (points[i].x < left)   left = points[i].x;
        else if (points[i].x > right)  right = points[i].x;
        if      (points[i].y < top)    top = points[i].y;
        else if (points[i].y > bottom) bottom = points[i].y;
    }

    return FloatRect(left, top, right - left, bottom - top);
}
示例#2
0
Rect32 TransformTools::newRect(const Rect32 &oldRect, const TransformStruct &transform, Point32 *newHotspot) {
	Point32 nw(oldRect.left, oldRect.top);
	Point32 ne(oldRect.right, oldRect.top);
	Point32 sw(oldRect.left, oldRect.bottom);
	Point32 se(oldRect.right, oldRect.bottom);

	FloatPoint nw1, ne1, sw1, se1;

	nw1 = transformPoint(nw - transform._hotspot, transform._angle, transform._zoom);
	ne1 = transformPoint(ne - transform._hotspot, transform._angle, transform._zoom);
	sw1 = transformPoint(sw - transform._hotspot, transform._angle, transform._zoom);
	se1 = transformPoint(se - transform._hotspot, transform._angle, transform._zoom);

	float top = MIN(nw1.y, MIN(ne1.y, MIN(sw1.y, se1.y)));
	float bottom = MAX(nw1.y, MAX(ne1.y, MAX(sw1.y, se1.y)));
	float left = MIN(nw1.x, MIN(ne1.x, MIN(sw1.x, se1.x)));
	float right = MAX(nw1.x, MAX(ne1.x, MAX(sw1.x, se1.x)));

	if (newHotspot) {
		newHotspot->y = (uint32)(-floor(top));
		newHotspot->x = (uint32)(-floor(left));
	}

	Rect32 res;
	res.top = (int32)(floor(top)) + transform._hotspot.y;
	res.bottom = (int32)(ceil(bottom)) + transform._hotspot.y;
	res.left = (int32)(floor(left)) + transform._hotspot.x;
	res.right = (int32)(ceil(right)) + transform._hotspot.x;

	return res;
}
示例#3
0
void MotionEvent::transform(const float matrix[9]) {
    // The tricky part of this implementation is to preserve the value of
    // rawX and rawY.  So we apply the transformation to the first point
    // then derive an appropriate new X/Y offset that will preserve rawX
     // and rawY for that point.
    float oldXOffset = mXOffset;
    float oldYOffset = mYOffset;
    float newX, newY;
    float rawX = getRawX(0);
    float rawY = getRawY(0);
    transformPoint(matrix, rawX + oldXOffset, rawY + oldYOffset, &newX, &newY);
    mXOffset = newX - rawX;
    mYOffset = newY - rawY;

    // Determine how the origin is transformed by the matrix so that we
    // can transform orientation vectors.
    float originX, originY;
    transformPoint(matrix, 0, 0, &originX, &originY);

    // Apply the transformation to all samples.
    size_t numSamples = mSamplePointerCoords.size();
    for (size_t i = 0; i < numSamples; i++) {
        PointerCoords& c = mSamplePointerCoords.editItemAt(i);
        float x = c.getAxisValue(AMOTION_EVENT_AXIS_X) + oldXOffset;
        float y = c.getAxisValue(AMOTION_EVENT_AXIS_Y) + oldYOffset;
        transformPoint(matrix, x, y, &x, &y);
        c.setAxisValue(AMOTION_EVENT_AXIS_X, x - mXOffset);
        c.setAxisValue(AMOTION_EVENT_AXIS_Y, y - mYOffset);

        float orientation = c.getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION);
        c.setAxisValue(AMOTION_EVENT_AXIS_ORIENTATION,
                transformAngle(matrix, orientation, originX, originY));
    }
}
示例#4
0
			Rectf32 transformRect(const Rectf32 &rectangle) const
			{
				// Transform the 4 corners of the rectangle
				const vector2df points[4] =
				{
					transformPoint(rectangle.leftTop()),
					transformPoint({ rectangle.left, rectangle.bottom() }),
					transformPoint({ rectangle.right(), rectangle.top }),
					transformPoint(rectangle.rightBottom())
				};

				// Compute the bounding rectangle of the transformed points
				f32 left = points[0].x;
				f32 top = points[0].y;
				f32 right = points[0].x;
				f32 bottom = points[0].y;
				for (auto i = 1; i < 4; ++i)
				{
					if (points[i].x < left)   left = points[i].x;
					else if (points[i].x > right)  right = points[i].x;
					if (points[i].y < top)    top = points[i].y;
					else if (points[i].y > bottom) bottom = points[i].y;
				}

				return Rectf32{ left, top, right - left, bottom - top };
			}
示例#5
0
void renderer_addCommandStroke( const float2* pPoints, const uint8* pCommands, uint commandCount )
{
    StrokeCommand strokeCommand;
    createDrawCommand(&strokeCommand);
    const float variance=s_renderer.currentVariance;
    for(uint i=0u;i<commandCount;++i)
    {
        uint8 command=*pCommands++;
        float2 pos;
        
        switch(command)
        {
        case DrawCommand_Move:
            if(strokeCommand.data.draw.pointCount>0u)
            {
                computeStrokeNormals(&strokeCommand, 0);
                pushStrokeCommand(&strokeCommand);
            }
            transformPoint(&pos,pPoints++,variance);
            createDrawCommand(&strokeCommand);
            if(pushStrokePoint(&pos))
            {
                strokeCommand.data.draw.pointCount++;
            }
            //SYS_TRACE_DEBUG("m(%f,%f)\n",pos.x,pos.y);
            break;

        case DrawCommand_Line:
            {
                transformPoint(&pos,pPoints++,variance);
                if(pushStrokePoint(&pos))
                {
                    strokeCommand.data.draw.pointCount++;
                }
                //SYS_TRACE_DEBUG("l(%f,%f)\n",pos.x,pos.y);
            }
            break;

        case DrawCommand_Curve:
            {
                float2 p1,p2;
                transformPoint(&p1,pPoints++,variance);
                transformPoint(&p2,pPoints++,variance);
                strokeCommand.data.draw.pointCount += addQuadraticCurvePointsRec(&pos,&p1,&p2);
                pos=p2;
                //SYS_TRACE_DEBUG("c(%f,%f)\n",pos.x,pos.y);
            }
            break;
        }
    }
    if(strokeCommand.data.draw.pointCount>0u)
    {
        computeStrokeNormals(&strokeCommand, 0);
        pushStrokeCommand(&strokeCommand);        
    }
}
示例#6
0
void Triangle::getBounds(Vect4* lower, Vect4* upper) {
  Vect4 a = transformPoint(M, this->a);
  Vect4 b = transformPoint(M, this->b);
  Vect4 c = transformPoint(M, this->c);
  *lower = *upper = a;
  for (int i = 0; i < 3; i++) {
    if (b[i] < (*lower)[i]) (*lower)[i] = b[i];
    else if (b[i] > (*upper)[i]) (*upper)[i] = b[i];
    if (c[i] < (*lower)[i]) (*lower)[i] = c[i];
    else if (c[i] > (*upper)[i]) (*upper)[i] = c[i];
  }
}
示例#7
0
void drawProjectedQuad(float x1, float x2, float x3, float x4, float y1, float y2, float y3, float y4, float z1, float z2, float z3, float z4, int color, int transprency) {
	float transformedX1;
	float transformedX2;
	float transformedX3;
	float transformedX4;

	float transformedY1;
	float transformedY2;
	float transformedY3;
	float transformedY4;

	x1 -= translateX;
	x2 -= translateX;
	x3 -= translateX;
	x4 -= translateX;

	y1 -= translateY;
	y2 -= translateY;
	y3 -= translateY;
	y4 -= translateY;

	z1 -= translateZ;
	z2 -= translateZ;
	z3 -= translateZ;
	z4 -= translateZ;

	transformPoint(&x1, &y1, &z1);
	transformPoint(&x2, &y2, &z2);
	transformPoint(&x3, &y3, &z3);
	transformPoint(&x4, &y4, &z4);

	z1 += cameraX;
	z2 += cameraX;
	z3 += cameraX;
	z4 += cameraX;

	transformedX1 = ((x1 * cameraY) / (float)z1) + cameraCenterX;
	transformedX2 = ((x2 * cameraY) / (float)z2) + cameraCenterX;
	transformedX3 = ((x3 * cameraY) / (float)z3) + cameraCenterX;
	transformedX4 = ((x4 * cameraY) / (float)z4) + cameraCenterX;

	transformedY1 = ((y1 * cameraZ) / (float)z1) + cameraCenterY;
	transformedY2 = ((y2 * cameraZ) / (float)z2) + cameraCenterY;
	transformedY3 = ((y3 * cameraZ) / (float)z3) + cameraCenterY;
	transformedY4 = ((y4 * cameraZ) / (float)z4) + cameraCenterY;

	if(z1 > DEPTH_THRESHOLD && z2 > DEPTH_THRESHOLD && z3 > DEPTH_THRESHOLD && z4 > DEPTH_THRESHOLD) {
		g_driver->draw3dQuad(transformedX1, transformedY1, z1, transformedX2, transformedY2, z2, transformedX3, transformedY3, z3, transformedX4, transformedY4, z4, color, transprency);
	}

	//g_driver->draw3dQuad(x1,y1,z1, x2,y2,z2, x3,y3,z3, x4,y4,z4, color);
}
示例#8
0
void Physics3DConstraintDemo::onTouchesBegan(const std::vector<cocos2d::Touch*>& touches, cocos2d::Event  *event)
{
    //ray trace
    if(_camera)
    {
        auto touch = touches[0];
        auto location = touch->getLocationInView();
        Vec3 nearP(location.x, location.y, 0.0f), farP(location.x, location.y, 1.0f);
        
        auto size = Director::getInstance()->getWinSize();
        _camera->unproject(size, &nearP, &nearP);
        _camera->unproject(size, &farP, &farP);
        
        Physics3DWorld::HitResult result;
        bool ret = physicsScene->getPhysics3DWorld()->rayCast(nearP, farP, &result);
        if (ret && result.hitObj->getObjType() == Physics3DObject::PhysicsObjType::RIGID_BODY)
        {
            auto mat = result.hitObj->getWorldTransform().getInversed();
            Vec3 position;
            mat.transformPoint(result.hitPosition, &position);
            
            _constraint = Physics3DPointToPointConstraint::create(static_cast<Physics3DRigidBody*>(result.hitObj), position);
            physicsScene->getPhysics3DWorld()->addPhysics3DConstraint(_constraint, true);
            _pickingDistance = (result.hitPosition - nearP).length();
            return;
        }
    }
    Physics3DTestDemo::onTouchesBegan(touches, event);
    _needShootBox = false;
}
示例#9
0
static void renderer_addSingleStroke( const float2* pStrokePoints, uint strokePointCount )
{
    SYS_ASSERT( s_renderer.pageState == PageState_BeforeDraw );
    if( !pStrokePoints || strokePointCount < 2u )
    {
        return;
    }
    
    const int isCycle=float2_isEqual(&pStrokePoints[0u],&pStrokePoints[strokePointCount-1u]);
    
    StrokeCommand command;
    createDrawCommand( &command );

    const float variance = s_renderer.currentVariance;

    // transform, randomize and copy positions
    for( uint i = 0u; i < strokePointCount; ++i )
    {
        const float2 strokePoint = pStrokePoints[ i ];

        // reduced variance in the beginning
        const int isFirstVertexInStroke = command.data.draw.pointCount == 0u;

        float2 point;
        transformPoint( &point, &strokePoint, isFirstVertexInStroke ? variance / 4.0f : variance );
        if( pushStrokePoint( &point ) )
        {
            command.data.draw.pointCount++;
        }
    }
   
    computeStrokeNormals( &command, isCycle );
    pushStrokeCommand( &command );
}
示例#10
0
void CadItem::transformPoints()
{
    points.clear();

    foreach(QPointF point, pointPolygon)
    {
        points << transformPoint(point);
    }
void FaceDetectorFilter::facesCallback(const pcl::PointCloud<pcl::PointXYZL>::ConstPtr& msg)
{
	tf::Transform cameraTransform;
	try {
		tf::StampedTransform tr;
		transformListener.lookupTransform ( "odom","camera_link2",ros::Time(0),tr);
		cameraTransform=tr;
	} catch(...) {
		return;
	}
	std::set<unsigned int> usedUsers = deleteOld();
	std::list<Point> incomingUsers;
	fillList(incomingUsers, msg,cameraTransform);

	while(1) {
		std::pair<unsigned int,std::list<Point>::iterator> match = findClosest(incomingUsers);
		if(match.first == 0)
			break;
		float distance = getDistance(users[match.first],*match.second);
		if(distance> MAX_SPEED)
			break;

		if(usedUsers.find(match.first) == usedUsers.end()) {
			users[match.first] = *match.second;
			usedUsers.insert(match.first);
			std::cerr<<"user updated: "<<match.first<<", distance:" <<distance<<std::endl;
		} else {
			std::cerr<<"user ignored: "<<match.first<<", distance:" <<distance<<std::endl;

		}
		incomingUsers.erase(match.second);
	}

	for(std::list<Point>::iterator it = incomingUsers.begin(); it!=incomingUsers.end(); ++it) {
		unsigned int newId = getAvailableId(usedUsers);
		users[newId] = *it;
		std::cerr<<"added user: "******"camera_link2";
	pmsg->height = 1;
	for(std::map<unsigned int,Point>::iterator it = users.begin(); it != users.end(); ++it) {
		pcl::PointXYZL point;
		Point p(it->second);
		transformPoint(p,cameraTransform,true);
		
		point.label = it->first;
		point.x=p.x;
		point.y=p.y;
		point.z = p.z;
		pmsg->points.push_back(point);
	}
	pmsg->width = pmsg->points.size();
	facePublisher.publish(pmsg);

}
示例#12
0
void Hero::_armEventHandler(cocos2d::EventCustom* event)
{
    const auto eventObject = (dragonBones::EventObject*)event->getUserData();

    if (eventObject->type == dragonBones::EventObject::COMPLETE)
    {
        _isAttacking = false;
        _hitCount = 0;
        const auto animationName = "ready_" + _weaponName;
        _armArmature->getAnimation().fadeIn(animationName);
    }
    else if (eventObject->type == dragonBones::EventObject::FRAME_EVENT)
    {
        if (eventObject->name == "ready")
        {
            _isAttacking = false;
            _hitCount++;
        }
        else if (eventObject->name == "fire")
        {
            const auto display = (dragonBones::CCArmatureDisplay*)(eventObject->armature->getDisplay());
            const auto firePointBone = eventObject->armature->getBone("bow");
            const auto transform = display->getNodeToWorldTransform();
            cocos2d::Vec3 localPoint(firePointBone->global.x, -firePointBone->global.y, 0.f);
            cocos2d::Vec2 globalPoint;
            transform.transformPoint(&localPoint);
            globalPoint.set(localPoint.x, localPoint.y);

            auto radian = 0.f;
            if (_faceDir > 0)
            {
                radian = firePointBone->global.getRotation() + display->getRotation() * dragonBones::ANGLE_TO_RADIAN;
            }
            else
            {
                radian = dragonBones::PI - (firePointBone->global.getRotation() + display->getRotation() * dragonBones::ANGLE_TO_RADIAN);
            }

            switch (_weaponsLevel[_weaponIndex])
            {
            case 0:
                _fire(globalPoint, radian);
                break;

            case 1:
                _fire(globalPoint, radian + 3.f * dragonBones::ANGLE_TO_RADIAN);
                _fire(globalPoint, radian - 3.f * dragonBones::ANGLE_TO_RADIAN);
                break;

            case 2:
                _fire(globalPoint, radian + 6.f * dragonBones::ANGLE_TO_RADIAN);
                _fire(globalPoint, radian);
                _fire(globalPoint, radian - 6.f * dragonBones::ANGLE_TO_RADIAN);
                break;
            }
        }
    }
}
示例#13
0
void LeapMotionPlugin::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
        const std::vector<LeapMotionPlugin::LeapMotionJoint>& joints,
        const std::vector<LeapMotionPlugin::LeapMotionJoint>& prevJoints) {

    glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
    glm::quat controllerToAvatarRotation = glmExtractRotation(controllerToAvatar);

    glm::vec3 hmdSensorPosition;    // HMD
    glm::quat hmdSensorOrientation; // HMD
    glm::vec3 leapMotionOffset;     // Desktop
    if (_isLeapOnHMD) {
        hmdSensorPosition = extractTranslation(inputCalibrationData.hmdSensorMat);
        hmdSensorOrientation = extractRotation(inputCalibrationData.hmdSensorMat);
    } else {
        // Desktop "zero" position is some distance above the Leap Motion sensor and half the avatar's shoulder-to-hand length 
        // in front of avatar.
        float halfShouldToHandLength = fabsf(extractTranslation(inputCalibrationData.defaultLeftHand).x
            - extractTranslation(inputCalibrationData.defaultLeftArm).x) / 2.0f;
        leapMotionOffset = glm::vec3(0.0f, _desktopHeightOffset, halfShouldToHandLength);
    }

    for (size_t i = 0; i < joints.size(); i++) {
        int poseIndex = LeapMotionJointIndexToPoseIndex((LeapMotionJointIndex)i);

        if (joints[i].position == Vectors::ZERO) {
            _poseStateMap[poseIndex] = controller::Pose();
            continue;
        }

        glm::vec3 pos;
        glm::quat rot;
        if (_isLeapOnHMD) {
            auto jointPosition = joints[i].position;
            const glm::vec3 HMD_EYE_TO_LEAP_OFFSET = glm::vec3(0.0f, 0.0f, -0.09f);  // Eyes to surface of Leap Motion.
            jointPosition = glm::vec3(-jointPosition.x, -jointPosition.z, -jointPosition.y) + HMD_EYE_TO_LEAP_OFFSET;
            jointPosition = hmdSensorPosition + hmdSensorOrientation * jointPosition;
            pos = transformPoint(controllerToAvatar, jointPosition);

            glm::quat jointOrientation = joints[i].orientation;
            jointOrientation = glm::quat(jointOrientation.w, -jointOrientation.x, -jointOrientation.z, -jointOrientation.y);
            rot = controllerToAvatarRotation * hmdSensorOrientation * jointOrientation;
        } else {
            pos = controllerToAvatarRotation * (joints[i].position - leapMotionOffset);
            const glm::quat ZERO_HAND_ORIENTATION = glm::quat(glm::vec3(PI_OVER_TWO, PI, 0.0f));
            rot = controllerToAvatarRotation * joints[i].orientation * ZERO_HAND_ORIENTATION;
        }

        glm::vec3 linearVelocity, angularVelocity;
        if (i < prevJoints.size()) {
            linearVelocity = (pos - (prevJoints[i].position * METERS_PER_CENTIMETER)) / deltaTime;  // m/s
            // quat log imaginary part points along the axis of rotation, with length of one half the angle of rotation.
            glm::quat d = glm::log(rot * glm::inverse(prevJoints[i].orientation));
            angularVelocity = glm::vec3(d.x, d.y, d.z) / (0.5f * deltaTime); // radians/s
        }

        _poseStateMap[poseIndex] = controller::Pose(pos, rot, linearVelocity, angularVelocity);
    }
}
示例#14
0
 Pose Pose::transform(const glm::mat4& mat) const {
     auto rot = glmExtractRotation(mat);
     Pose pose(transformPoint(mat, translation),
               rot * rotation,
               transformVectorFast(mat, velocity),
               rot * angularVelocity);
     pose.valid = valid;
     return pose;
 }
示例#15
0
void renderer_addBurnHole( const float2* pStart, const float2* pEnd, float size )
{
    for( uint i = 0u; i < SYS_COUNTOF(s_renderer.burnHoles); ++i )
    {
        if( s_renderer.burnHoles[i].size <= 0.0f )
        {
            s_renderer.burnHoles[i].size=size;
            s_renderer.burnHoles[i].initialSize=size;
            transformPoint( &s_renderer.burnHoles[i].start, pStart, 0.0f );
            transformPoint( &s_renderer.burnHoles[i].end, pEnd, 0.0f );
            s_renderer.burnHoles[i].rot=float_rand_range(0.0f, 2.0f*PI);
            drawBurnHole( &s_renderer.burnHoles[i] );
            return;
        }
    }

    SYS_TRACE_WARNING( "no burn hole slot found!\n" );
}
示例#16
0
Ray Matrix4x4::transformRay(const Ray &r) const
{
    Ray transformedRay = r;
    Vector3D transformedOrigin = transformPoint(r.o);
    Vector3D transformedDir    = transformVector(r.d);

    transformedRay.o = transformedOrigin;
    transformedRay.d = transformedDir;

    return transformedRay;
}
示例#17
0
void Homography::setSrcRect( const Rectf & r )
{
	srcPts[0](r.l,r.t);
	srcPts[1](r.r,r.t);
	srcPts[2](r.r,r.b);
	srcPts[3](r.l,r.b);
	for( int i = 0; i < 4; i++ )
	{
		dstPts[i] = transformPoint(srcPts[i]);
	}
	compute();
}
示例#18
0
	sf::ConvexShape convertRectToConvex(const Shape& base, const VanishPlane& plane)
	{
		auto transform = base.getTransform();
		const unsigned n_points = base.getPointCount();
		sf::ConvexShape result(n_points);
		for(unsigned i = 0; i < n_points; ++i)
			result.setPoint(i, plane(transform.transformPoint(base.getPoint(i))));
		result.setFillColor(base.getFillColor());
		result.setOutlineColor(base.getOutlineColor());
		result.setOutlineThickness(1);
		return result;
	}
示例#19
0
OBB::OBB(const AABB& aabb)
{
	corner[0] = vec2(aabb.x1,aabb.y1);
	corner[1] = vec2(aabb.x2,aabb.y1);
	corner[2] = vec2(aabb.x2,aabb.y2);
	corner[3] = vec2(aabb.x1,aabb.y2);

	// rotate by current matrix
	mat4 mt = glGetCurrentMatrix(GL_MODELVIEW_MATRIX);
  for (int c = 0; c < 4; ++c)
		corner[c] = vec2(transformPoint(mt,vec3(corner[c])));
	
  computeAxes();
}
示例#20
0
void Rat::update(float dt)
{
	switch (_state)
	{
	case RAT_IDLE:
		break;
	case RAT_FORWARD:
	{
						 Vec3 curPos = this->getPosition3D();
						 Vec3 newFaceDir = _targetPos - curPos;//新的脸朝向为目的坐标-当前坐标,从场景的OnTouchEnded获得
						 newFaceDir.y = 0.0f;//只考虑xz平面上脸朝向的变化
						 newFaceDir.normalize();
						 Vec3 offset = newFaceDir*RAT_FORWARD_SPEED*dt;//向脸的朝向每秒位移n个单位
						 curPos = curPos + offset;
						 this->setPosition3D(curPos);
	}
		break;
	case RAT_KNOCKED:
		break;
	case RAT_ATTACK:
		break;
	case RAT_DEAD:
		break;
	default:
		break;
	}
	// transform player position to world coord
	auto playerPos = this->getPosition3D();//获得人物在父节点(层)中的空间三维坐标
	auto playerModelMat = this->getParent()->getNodeToWorldTransform();//得到人物所在父节点(层)的坐标向世界坐标转换的矩阵
	playerModelMat.transformPoint(&playerPos);//对人物所在点进行转换
	Vec3 Normal;
	float player_h = 0;//根据角色的x、z坐标获得高度
	if (Normal.isZero())//check the player whether is out of the terrain
	{
		player_h = playerPos.y;
	}
	else
	{
		player_h += PLAYER_HEIGHT;
	}
	this->setPositionY(player_h);

	Quaternion q2;
	q2.createFromAxisAngle(Vec3(0, 1, 0), (float)-M_PI, &q2);//此四元数表示绕y轴转过180度,即令角色转身

	Quaternion headingQ;
	headingQ.createFromAxisAngle(_headingAxis, _headingAngle, &headingQ);
	this->setRotationQuat(headingQ*q2);
	this->updateState();
}
示例#21
0
文件: osm.c 项目: filbertkm/osmutils
xmlNodePtr nodeElement(NODE node)
{
	if (strcmp(srid, "4326") != 0) 
	{
		node = transformPoint(node, srid);
	}

	xmlNodePtr osmNode;
	osmNode = xmlNewNode(NULL, BAD_CAST "node");
	xmlNewProp(osmNode, BAD_CAST "id", BAD_CAST xmlEscape(int2char(node.id)));
	xmlNewProp(osmNode, BAD_CAST "lon", BAD_CAST xmlEscape(dbl2char(node.x)));
	xmlNewProp(osmNode, BAD_CAST "lat", BAD_CAST xmlEscape(dbl2char(node.y))); 
	return osmNode;
	
}
示例#22
0
TGLint tgluProject(TGLfloat objx, TGLfloat objy, TGLfloat objz, const TGLfloat model[16], const TGLfloat proj[16],
		const TGLint viewport[4], TGLfloat *winx, TGLfloat *winy, TGLfloat *winz) {
	TGLfloat in[4], out[4];

	in[0] = objx;
	in[1] = objy;
	in[2] = objz;
	in[3] = 1.0f;
	transformPoint(out, model, in);
	transformPoint(in, proj, out);

	if (in[3] == 0.0)
		return TGL_FALSE;

	in[0] /= in[3];
	in[1] /= in[3];
	in[2] /= in[3];

	*winx = viewport[0] + (1 + in[0]) * viewport[2] / 2;
	*winy = viewport[1] + (1 + in[1]) * viewport[3] / 2;
	*winz = (1 + in[2]) / 2;

	return TGL_TRUE;
}
示例#23
0
void Mecha::_frameEventHandler(cocos2d::EventCustom* event)
{
    const auto eventObject = (dragonBones::EventObject*)event->getUserData();
    if (eventObject->name == "onFire")
    {
        const auto display = (dragonBones::CCArmatureDisplay*)eventObject->armature->getDisplay();
        const auto firePointBone = eventObject->armature->getBone("firePoint");
        const auto transform = display->getNodeToWorldTransform();
        cocos2d::Vec3 localPoint(firePointBone->global.x, -firePointBone->global.y, 0.f);
        cocos2d::Vec2 globalPoint;
        transform.transformPoint(&localPoint);
        globalPoint.set(localPoint.x, localPoint.y);

        _fire(globalPoint);
    }
}
示例#24
0
文件: AABox.cpp 项目: ZappoMan/hifi
// Logic based on http://clb.demon.fi/MathGeoLib/nightly/docs/AABB.cpp_code.html#471
void AABox::transform(const glm::mat4& matrix) {
    // FIXME use simd operations
    auto halfSize = _scale * 0.5f;
    auto center = _corner + halfSize;
    halfSize = abs(halfSize);
    auto mm = glm::transpose(glm::mat3(matrix));
    vec3 newDir = vec3(
                      glm::dot(glm::abs(mm[0]), halfSize),
                      glm::dot(glm::abs(mm[1]), halfSize),
                      glm::dot(glm::abs(mm[2]), halfSize)
                  );

    auto newCenter = transformPoint(matrix, center);
    _corner = newCenter - newDir;
    _scale = newDir * 2.0f;
}
int FaceDetectorFilter::fillList(std::list<Point> & list,pcl::PointCloud<pcl::PointXYZL>::ConstPtr msg,const tf::Transform cameraTransform)
{
	ros::Time now = ros::Time::now();


	for(pcl::PointCloud<pcl::PointXYZL>::const_iterator it= msg->points.begin(); it!= msg->points.end(); ++it) {
		Point p;
		p.x = it->x;
		p.y = it->y;
		p.z = it->z;
		p.lastSeen = now;
		transformPoint(p,cameraTransform,false);
		list.push_back(p);
	}

	return msg->points.size();
}
示例#26
0
static float transformAngle(const float matrix[9], float angleRadians,
        float originX, float originY) {
    // Construct and transform a vector oriented at the specified clockwise angle from vertical.
    // Coordinate system: down is increasing Y, right is increasing X.
    float x = sinf(angleRadians);
    float y = -cosf(angleRadians);
    transformPoint(matrix, x, y, &x, &y);
    x -= originX;
    y -= originY;

    // Derive the transformed vector's clockwise angle from vertical.
    float result = atan2f(x, -y);
    if (result < - M_PI_2) {
        result += M_PI;
    } else if (result > M_PI_2) {
        result -= M_PI;
    }
    return result;
}
示例#27
0
geometry_msgs::Pose transformPose(geometry_msgs::Pose pose_in, Eigen::Matrix4f transf){
    geometry_msgs::Pose pose_out;
    // Obtener rotación desde matriz de transformación
    Eigen::Matrix4f rot;
    rot = transf;
    rot.col(3) = Eigen::Vector4f(0, 0, 0, 1);
    // Crear normal desde quaternion
    tf::Quaternion tf_q;
    tf::quaternionMsgToTF(pose_in.orientation, tf_q);
    tf::Vector3 normal(1, 0, 0);
    normal = tf::quatRotate(tf_q, normal);
    normal.normalize();
    // Rotar normal
    Eigen::Vector3f normal_vector (normal.x(), normal.y(), normal.z());
    Eigen::Vector3f normal_rotated = transformVector(normal_vector, transf);
    normal_rotated.normalize();
    // Obtener quaternion desde normal rotada
    pose_out.orientation = coefsToQuaternionMsg(normal_rotated[0], normal_rotated[1], normal_rotated[2]);
    // Transportar posición
    pose_out.position = transformPoint(pose_in.position, transf);
    return pose_out;
}
示例#28
0
void TransformTests::getInverseMatrix() {

    const vec3 t(0.0f, 0.0f, 10.0f);

    // create a matrix that is composed of a PI/2 rotation followed by a small z translation
    const mat4 m(vec4(rot90 * xAxis, 0.0f),
                 vec4(rot90 * yAxis, 0.0f),
                 vec4(rot90 * zAxis, 0.0f),
                 vec4(vec4(t, 1.0f)));

    // mirror about the x axis.
    const mat4 mirrorX(vec4(-1.0f, 0.0f, 0.0f, 0.0f),
                       vec4( 0.0f, 1.0f, 0.0f, 0.0f),
                       vec4( 0.0f, 0.0f, 1.0f, 0.0f),
                       vec4( 0.0f, 0.0f, 0.0f, 1.0f));
    const mat4 result_a = glm::inverse(m * mirrorX);

    Transform xform;
    xform.setTranslation(t);
    xform.setRotation(rot90);
    xform.postScale(vec3(-1.0f, 1.0f, 1.0f));

    mat4 result_b;
    xform.getInverseMatrix(result_b);

    // don't check elements directly, instead compare each axis transformed by the matrix.
    auto xa = transformPoint(result_a, xAxis);
    auto ya = transformPoint(result_a, yAxis);
    auto za = transformPoint(result_a, zAxis);

    auto xb = transformPoint(result_b, xAxis);
    auto yb = transformPoint(result_b, yAxis);
    auto zb = transformPoint(result_b, zAxis);

    QCOMPARE_WITH_ABS_ERROR(xa, xb, TEST_EPSILON);
    QCOMPARE_WITH_ABS_ERROR(ya, yb, TEST_EPSILON);
    QCOMPARE_WITH_ABS_ERROR(za, zb, TEST_EPSILON);
}
示例#29
0
文件: Camera.cpp 项目: EEmmanuel7/pvr
Vector PerspectiveCamera::rasterToWorld(const Vector &rsP, const PTime time) const
{
  return transformPoint(rsP, m_rasterToWorld, time);
}
示例#30
0
文件: Camera.cpp 项目: EEmmanuel7/pvr
Vector PerspectiveCamera::worldToRaster(const Vector &wsP, const PTime time) const
{
  return transformPoint(wsP, m_worldToRaster, time);
}