void Tile::Render(sf::RenderWindow& window, bool grey) { if (grey) { int color; if (m_pheromone < 1) { color = 1; } else if(m_pheromone > 255) { color = 255; } else { color = m_pheromone; } m_shape.setFillColor(sf::Color(color, color, color)); } else { m_shape.setFillColor(ComputeColor(m_terrain)); } window.draw(m_shape); if (m_type == FEED) { RenderFeed(window); } else if (m_type == NEST) { RenderNest(window); } }
// plots raster point (ix,iy) int PlotPoint(unsigned char A[] , unsigned int ix, unsigned int iy, int IterationMax) { unsigned i; /* index of 1D array */ unsigned char iColor; i = Give_i(ix,iy); /* compute index of 1D array from indices of 2D array */ iColor = ComputeColor(ix, iy, IterationMax); A[i] = iColor; return 0; }
Tile::Tile(unsigned int row, unsigned int col, unsigned int pCol, TerrainType terrain) : m_terrain(terrain) , m_type(NOTHING) , m_position(col, row) , m_pheromone(0.1f) , m_weight(ComputeWeight(terrain)) { m_shape.setRadius(m_ShapeRadius); m_shape.setPointCount(6); m_shape.setPosition(ComputePosition(row, pCol)); m_shape.setOrigin(m_ShapeRadius, m_ShapeRadius); m_shape.setFillColor(ComputeColor(terrain)); m_shape.setOutlineColor(sf::Color::Blue); }
void RendererRender(RendererPtr renderer, const char* filename, float fovy) { Ray r; FxsVector4 vtmp; int i, j; FxsImage *img = renderer->renderContext->image; ObjectPtr obj = renderer->renderContext->object; float asp = ((float)img->width)/img->height; float tanfov = tanf(M_PI / 180.0f * fovy / 2.0); float t; ShadingRecord sr; unsigned char color[3]; for (i = 0; i < img->width; i++) { for (j = 0; j < img->height; j++) { FxsVector3MakeZero(&r.origin); r.direction.x = 2.0f * ((i + 0.5f)/img->width) - 1.0f; r.direction.y = 2.0f * ((j + 0.5f)/img->height) - 1.0f; r.direction.x *= (tanfov * asp); r.direction.y *= tanfov; r.direction.z = -1.0f; FxsMatrix4MultiplyVector3(&vtmp, &renderer->renderContext->cameraToWorld, &r.origin); VEC3FROMVEC4(r.origin, vtmp); FxsMatrix4MultiplyVector3(&vtmp, &renderer->renderContext->cameraToWorld, &r.direction); VEC3FROMVEC4(r.direction, vtmp); FxsVector3Substract(&r.direction, &r.direction, &r.origin); FxsVector3Normalize(&r.direction); sr.color[0] = renderer->renderContext->defaultColor[0]; sr.color[1] = renderer->renderContext->defaultColor[1]; sr.color[2] = renderer->renderContext->defaultColor[2]; if (ObjectIsIntersectedByRay(obj, &t, &sr, &r)) { ComputeColor(&sr, color); FxsImageSet(img, i, img->height - 1 - j, color); } else { FxsImageSet(img, i, img->height - 1 - j, renderer->renderContext->bgColor); } } } FxsImageSave(img, filename); }
void Velodyne::ComputePoints(const hal::LidarMsg &LidarData, std::shared_ptr<LidarMsg> Points) { hal::MatrixMsg* pbMatPoint = nullptr; if(Points != nullptr) { pbMatPoint = Points->mutable_distance(); pbMatPoint->set_rows(4); Points->mutable_rotational_position()->CopyFrom( LidarData.rotational_position()); } //block contains upper and lower block, i.e. 64 lasers. for(int block=0; block<6; block++) { //calculating sine and cos beforehand, to save computation later double cos_rotation_pos=cos(LidarData.rotational_position().data(block)*M_PI/180); double sin_rotation_pos=sin(LidarData.rotational_position().data(block)*M_PI/180); for(int laser=0; laser<mn_NumLasers;laser++) { //Change in naming convention here, "_" are used to delineate words. //printf("--------------------------------------------------------------------\n"); int pt_idx = block*mn_NumLasers + laser; //if the distance is 0, that means it was less than 0.9, so invalid, in that case we don't do anything //if the distance is 120 or greater it's max range, we don't know if point is there or not. double distance = LidarData.distance().data(pt_idx); double distance_raw = distance; if(distance==0)// || distance >= 120) continue; //getting a pointer to calibration data for this laser. VelodyneLaserCorrection vcl = vlc[laser];//vcl stands for Velodyne calibration of a laser. /* 1. Correct the distance, that is done by just adding the value with distCorrection (which is far point calibration at 25.04m). * This is the distance error along the ray which a laser has. * Distance from LidarMsg is already converted in meters, so was the correction factor in calibration data. **/ double dist_corr = vcl.distCorrection; distance += dist_corr; //printf("pt_id = %d\ndist=%.3lf, dist_corr=%.1lf, dist_cor = %.1lf\n", pt_idx, distance, dist_corr, vc[laser].distCorrection); //printf("cos_rot_ang = %.1lf, sin = %.1lf\n", cos_rotation_pos, sin_rotation_pos); /* 2. Now we correct angles, these angles are with the front of the camera, which is +y. * These values will be primarily used when we will be computing x and y coordinate. * If a is angle of laser, b is correction, to correct angle we want a-b, but finally for calculationswe want cos(a-b), we have cos of a,b. * So we use the identity cos(a-b) = cos(a)cos(b) + sin(a)*sin(b) * Similarly for sin(a-b) = sin(a)*cos(b) - cos(a)*sin(b) **/ double cos_rotation_angle = cos_rotation_pos*vcl.cos_rotCorrection + sin_rotation_pos*vcl.sin_rotCorrection; double sin_rotation_angle = sin_rotation_pos*vcl.cos_rotCorrection - vcl.sin_rotCorrection*cos_rotation_pos; //printf("cos_rot_corr= %.1lf, sin = %.1lf\n", vcl.cos_rotCorrection, vcl.sin_rotCorrection); /* 3. Now we compute the distance in xy plane, i.e. the distance in horizontal plane and not along the ray. * This is done to correct vertical and horizontal offset for the laser. Each laser is supposed to originate from a single point, * which obviously doesn't happen. Vertical Offset is the offset along z-axis from xy-plane, a positive offset is towards +z. * Horizontal offset is the offset in xy plane from the origin, a +ve offset is towards -x. * To get the final results, some more adjustments are made in next step. **/ double cos_vert_corr = vcl.cos_vertCorrection; double sin_vert_corr = vcl.sin_vertCorrection; double horiz_offset = vcl.horizOffsetCorrection; double vert_offset = vcl.vertOffsetCorrection; //now variable names are dist in corresponding planes or axis double xy = distance * cos_vert_corr; double xx = xy * sin_rotation_angle - horiz_offset * cos_rotation_angle; double yy = xy * cos_rotation_angle + horiz_offset * sin_rotation_angle; xx = xx<0?-xx:xx; yy = yy<0?-yy:yy; /* 4. Now, we correct for parameters distCorrectionX and distCorrectionY. Why we do this is unclear, but; what we know is what we do. * The idea here is that we have correction value for near points at x=2.4m and y=1.93m, we also have distCorrection for far point * calibration at 25.04m. So, to get the correction for a particular distance in x, we interpolate using values corresponding to x-axis * i.e. 2.4, distCorrectionX, 25.04, distCorrection (last two apply to both cases). Similarly for y-axis using 1.93,distCorrectionY, * 25.04, distCorrection. * For better understanding of the interpolation formulae refer to Appendix F of the manual. **/ //compute the correction via interpolation double corr_xx = vcl.distCorrectionX + (dist_corr - vcl.distCorrectionX) * (xx-2.4)/22.64;//25.04-2.4 = 22.64 double corr_yy = vcl.distCorrectionY + (dist_corr - vcl.distCorrectionY) * (yy-1.93)/23.11;//25.04-1.93 = 23.11 /* 5. Next task is to extract coordinates x, y and z. * To compute x and y cordinates we correct distance with corrections computed, then take projection on xy palne, then on respective axes. * Ofcourse we correct for horizontal offset as well, we are not stupid you know. * z is a good boy and calculating it is straight forward, projection of distance on z-axis and then correcting by vertOffset. **/ //If B=distance in the follwing three formulae, then To B or Not to B is the question. //The X-coordinate xy = (distance_raw + corr_xx)*cos_vert_corr; xx = xy * sin_rotation_angle - horiz_offset * cos_rotation_angle; //The Y-coordinate xy = (distance_raw + corr_yy)*cos_vert_corr; yy = xy * cos_rotation_angle + horiz_offset * sin_rotation_angle; //The Z-coordinate double zz=distance_raw * sin_vert_corr + vert_offset + 1.5;//velodyne is porbably at 1.5m from ground, adding 1.5 to have the ground plane at z=0, exact value to be estimated later. //we have angular resolution of 0.01 degrees. For each rotational position (there are 36000 such positions) we have a block of 256 (64(laser) * 4(x,y,z,1) = 256) float values. //Angle supplied by LidarData is in degrees, so to compute a rotational position we multiply angle by 100, then we multiply by 256 to reach the offset for that block, hence //multiplication by 25600. //Adding each laser gives us data worth of 4 floats, so we multiply laser by 4 to get exact position in array. int idx = ((int)LidarData.rotational_position().data(block))*25600 + laser*4;// mp_Points[idx] = (float)xx; mp_Points[idx+1] = (float)yy; mp_Points[idx+2] = (float)zz; mp_Points[idx+3] = 1.0; ComputeColor(idx); if(Points != nullptr && pbMatPoint) { pbMatPoint->add_data(xx); pbMatPoint->add_data(yy); pbMatPoint->add_data(zz); pbMatPoint->add_data(1); } } } }