double calcMidAngle(pointObj *p1, pointObj *p2, pointObj *p3) { pointObj p1n; double dx12, dy12, dx23, dy23, l12, l23; /* We treat both segments as vector 1-2 and vector 2-3 and * compute their dx,dy and length */ dx12 = p2->x - p1->x; dy12 = p2->y - p1->y; l12 = sqrt(dx12*dx12 + dy12*dy12); dx23 = p3->x - p2->x; dy23 = p3->y - p2->y; l23 = sqrt(dx23*dx23 + dy23*dy23); /* Normalize length of vector 1-2 to same as length of vector 2-3 */ if (l12 > 0.0) { p1n.x = p2->x - dx12*(l23/l12); p1n.y = p2->y - dy12*(l23/l12); } else p1n = *p2; /* segment 1-2 is 0-length, use segment 2-3 for orientation */ /* Return the orientation defined by the sum of the normalized vectors */ return calcOrientation(&p1n, p3); }
/*! \param rotAxis * \param rotAngle */ void kCamera::rotatePosition(SbVec3f rotAxis, double rotAngle, SbVec3f axisPoint) { //Error20051017: Er rotierte immer nur um eine Achse, ohne diese zu verschieben (in den LookAt-Punkt). //Daher zunächst eine Translation der aktuellen Position um den Rotationspunkt und anschließend wieder zurück SbVec3f tempPos = currentPosition - axisPoint; //Error20051017 // Position rotieren SbRotation pointRotation; pointRotation.setValue(rotAxis,rotAngle); //pointRotation.multVec(currentPosition, currentPosition); pointRotation.multVec(tempPos, tempPos); //Error20051017 currentPosition = tempPos + axisPoint; //Error20051017 currentLookDir = currentLookAt-currentPosition; currentLookDir.normalize(); currentUpVec = calcUpVector(currentLookDir,NormPlump); //! Neuen UpVec ausrechnen - Rotation wird schon in calcUpVector vollzogen currentUpVec.normalize(); currentOrientation = calcOrientation(currentUpVec,currentLookDir); //! Berechnet neue orientation // writeOrientation(currentOrientation); //! Schreibt orientation in ObjMgr // writePosition(currentPosition); //! Schreibt position in ObjMgr }
void TransformProperty::prepare() { current_.setTranslation(translation_); current_.setRotation(rotation_); calcOrientation(current_, orientation_); current_.setScale(scale_); current_.setAnchorPoint(anchor_point_); current_.refreshMatrix(); }
/*! \param angle */ void kCamera::rotateCam(double angle) { // UpVec rotieren - bisher nur um die Sichtachse, also kein Kippen gegenüber der Sichtrichtung // Bei anderen Rotationen müssten dann sowohl UpVec wie auch lookDir gedreht werden currentUpVecAngle = currentUpVecAngle + angle; SbVec3f perfectUpVec = calcPerfectUpVector(currentLookDir,NormPlump); perfectUpVec.normalize(); rotateVector(perfectUpVec,currentLookDir, currentUpVecAngle); currentUpVec = perfectUpVec; currentOrientation = calcOrientation(currentUpVec,currentLookDir); //! Berechnet neue orientation //writeOrientation(currentOrientation); //! Schreibt orientation in ObjMgr }
/*! \param position * \param lookAt */ void kCamera::setCamPosition(const SbVec3f& position, const SbVec3f& lookAt) { currentLookAt = lookAt; currentPosition = position; currentLookDir = currentLookAt-currentPosition; currentLookDir.normalize(); currentUpVec = calcUpVector(currentLookDir,NormPlump); //! Neuen UpVec ausrechnen - Rotation wird schon in calcUpVector vollzogen currentUpVec.normalize(); currentOrientation = calcOrientation(currentUpVec,currentLookDir); //! Berechnet neue orientation // writeOrientation(currentOrientation); //! Schreibt orientation in ObjMgr // writePosition(currentPosition); //! Schreibt position in ObjMgr }
/* * RFC48 implementation: * - transform the original shapeobj * - use the styleObj to render the transformed shapeobj */ int msDrawTransformedShape(mapObj *map, imageObj *image, shapeObj *shape, styleObj *style, double scalefactor) { int type = style->_geomtransform.type; int i,j,status = MS_SUCCESS; switch(type) { case MS_GEOMTRANSFORM_END: /*render point on last vertex only*/ for(j=0; j<shape->numlines; j++) { lineObj *line = &(shape->line[j]); pointObj *p = &(line->point[line->numpoints-1]); if(p->x<0||p->x>image->width||p->y<0||p->y>image->height) continue; if(style->autoangle==MS_TRUE && line->numpoints>1) { style->angle = calcOrientation(&(line->point[line->numpoints-2]),p); } status = msDrawMarkerSymbol(map,image,p,style,scalefactor); } break; case MS_GEOMTRANSFORM_START: /*render point on first vertex only*/ for(j=0; j<shape->numlines; j++) { lineObj *line = &(shape->line[j]); pointObj *p = &(line->point[0]); /*skip if outside image*/ if(p->x<0||p->x>image->width||p->y<0||p->y>image->height) continue; if(style->autoangle==MS_TRUE && line->numpoints>1) { style->angle = calcOrientation(p,&(line->point[1])); } status = msDrawMarkerSymbol(map,image,p,style,scalefactor); } break; case MS_GEOMTRANSFORM_VERTICES: for(j=0; j<shape->numlines; j++) { lineObj *line = &(shape->line[j]); for(i=1; i<line->numpoints-1; i++) { pointObj *p = &(line->point[i]); /*skip points outside image*/ if(p->x<0||p->x>image->width||p->y<0||p->y>image->height) continue; if(style->autoangle==MS_TRUE) { style->angle = calcMidAngle(&(line->point[i-1]),&(line->point[i]),&(line->point[i+1])); } status = msDrawMarkerSymbol(map,image,p,style,scalefactor); } } break; case MS_GEOMTRANSFORM_BBOX: { shapeObj bbox; lineObj bbox_line; pointObj bbox_points[5]; int padding = MS_MAX(style->width,style->size)+3; /* so clipped shape does not extent into image */ /*create a shapeObj representing the bounding box (clipped by the image size)*/ bbox.numlines = 1; bbox.line = &bbox_line; bbox.line->numpoints = 5; bbox.line->point = bbox_points; msComputeBounds(shape); bbox_points[0].x=bbox_points[4].x=bbox_points[1].x = (shape->bounds.minx < -padding) ? -padding : shape->bounds.minx; bbox_points[2].x=bbox_points[3].x = (shape->bounds.maxx > image->width+padding) ? image->width+padding : shape->bounds.maxx; bbox_points[0].y=bbox_points[4].y=bbox_points[3].y = (shape->bounds.miny < -padding) ? -padding : shape->bounds.miny; bbox_points[1].y=bbox_points[2].y = (shape->bounds.maxy > image->height+padding) ? image->height+padding : shape->bounds.maxy; status = msDrawShadeSymbol(map, image, &bbox, style, scalefactor); } break; case MS_GEOMTRANSFORM_CENTROID: { double unused; /*used by centroid function*/ pointObj centroid; if(MS_SUCCESS == msGetPolygonCentroid(shape,¢roid,&unused,&unused)) { status = msDrawMarkerSymbol(map,image,¢roid,style,scalefactor); } } break; case MS_GEOMTRANSFORM_EXPRESSION: { int status; shapeObj *tmpshp; parseObj p; p.shape = shape; /* set a few parser globals (hence the lock) */ p.expr = &(style->_geomtransform); if(p.expr->tokens == NULL) { /* this could happen if drawing originates from legend code (#5193) */ status = msTokenizeExpression(p.expr, NULL, NULL); if(status != MS_SUCCESS) { msSetError(MS_MISCERR, "Unable to tokenize expression.", "msDrawTransformedShape()"); return MS_FAILURE; } } p.expr->curtoken = p.expr->tokens; /* reset */ p.type = MS_PARSE_TYPE_SHAPE; status = yyparse(&p); if (status != 0) { msSetError(MS_PARSEERR, "Failed to process shape expression: %s", "msDrawTransformedShape", style->_geomtransform.string); return MS_FAILURE; } tmpshp = p.result.shpval; switch (tmpshp->type) { case MS_SHAPE_POINT: case MS_SHAPE_POLYGON: status = msDrawShadeSymbol(map, image, tmpshp, style, scalefactor); break; case MS_SHAPE_LINE: status = msDrawLineSymbol(map, image, tmpshp, style, scalefactor); break; } msFreeShape(tmpshp); msFree(tmpshp); } break; case MS_GEOMTRANSFORM_LABELPOINT: case MS_GEOMTRANSFORM_LABELPOLY: break; default: msSetError(MS_MISCERR, "unknown geomtransform", "msDrawTransformedShape()"); return MS_FAILURE; } return status; }