Esempio n. 1
0
Transform simpleMotionsToTransform(TransformData* td,
                                   const LocalMotions* motions){
  int center_x = 0;
  int center_y = 0;
	Transform t = null_transform();
  if(motions==0) return t;
  int num_motions=vs_vector_size(motions);
  double *angles = (double*) vs_malloc(sizeof(double) * num_motions);
  LocalMotion meanmotion;
  int i;
  if(num_motions < 1)
    return t;

  // calc center point of all remaining fields
  for (i = 0; i < num_motions; i++) {
    center_x += LMGet(motions,i)->f.x;
    center_y += LMGet(motions,i)->f.y;
  }
  center_x /= num_motions;
  center_y /= num_motions;

  // cleaned mean
  meanmotion = cleanmean_localmotions(motions);

  // figure out angle
  if (num_motions < 6) {
    // the angle calculation is inaccurate for 5 and less fields
    t.alpha = 0;
  } else {
    for (i = 0; i < num_motions; i++) {
      // substract avg and calc angle
      LocalMotion m = sub_localmotion(LMGet(motions,i),&meanmotion);
      angles[i] = calcAngle(&m, center_x, center_y);
    }
    double min, max;
    t.alpha = -cleanmean(angles, num_motions, &min, &max);
    if (max - min > td->maxAngleVariation) {
      t.alpha = 0;
      vs_log_info(td->modName, "too large variation in angle(%f)\n",
		  max-min);
    }
  }
  vs_free(angles);
  // compensate for off-center rotation
  double p_x = (center_x - td->fiSrc.width / 2);
  double p_y = (center_y - td->fiSrc.height / 2);
  t.x = meanmotion.v.x + (cos(t.alpha) - 1) * p_x - sin(t.alpha) * p_y;
  t.y = meanmotion.v.y + sin(t.alpha) * p_x + (cos(t.alpha) - 1) * p_y;

  return t;
}
Esempio n. 2
0
int motionDetection(MotionDetect* md, Transform* trans, unsigned char *frame) {
  assert(md->initialized==2);

  md->currorig = frame;
  // smoothen image to do better motion detection
  //  (larger stepsize or eventually gradient descent (need higher resolution)
  if (isRGB(md->fi.pFormat)) {
    // we could calculate a grayscale version and use the YUV stuff afterwards
    // so far only YUV implemented
    memcpy(md->curr, frame, md->fi.framesize);
  } else {
    // box-kernel smoothing (plain average of pixels), which is fine for us
    boxblurYUV(md->curr, frame, md->currtmp, &md->fi, md->stepSize*1/*1.4*/,
               BoxBlurNoColor);
    // two times yields tent-kernel smoothing, which may be better, but I don't
    //  think we need it
    //boxblurYUV(md->curr, md->curr, md->currtmp, &md->fi, md->stepSize*1,
    // BoxBlurNoColor);
  }

  if (md->hasSeenOneFrame) {
    //    md->curr = frame;
    if (isRGB(md->fi.pFormat)) {
      if (md->algo == 0)
        *trans = calcShiftRGBSimple(md);
      else if (md->algo == 1)
        *trans = calcTransFields(md, calcFieldTransRGB, contrastSubImgRGB);
    } else if (md->fi.pFormat == PF_YUV) {
      if (md->algo == 0)
        *trans = calcShiftYUVSimple(md);
      else if (md->algo == 1)
        *trans = calcTransFields(md, calcFieldTransYUV, contrastSubImgYUV);
    } else {
      ds_log_warn(md->modName, "unsupported Pixel Format (Codec: %i)\n",
                  md->fi.pFormat);
      return DS_ERROR;
    }
  } else {
    md->hasSeenOneFrame = 1;
    *trans = null_transform();
  }

  // copy current frame (smoothed) to prev for next frame comparison
  memcpy(md->prev, md->curr, md->fi.framesize);
  md->frameNum++;
  return DS_OK;
}
Esempio n. 3
0
/**
 * cleanmean_xy_transform: calulcates the cleaned mean of an array
 * of transforms, considering only x and y
 *
 * Parameters:
 *    transforms: array of transforms.
 *           len: length  of array
 * Return value:
 *     A new transform with x and y beeing the cleaned mean
 *     (meaning upper and lower pentile are removed) of
 *     all transforms. alpha and other fields are 0.
 * Preconditions:
 *     len>0
 * Side effects:
 *     None
 */
Transform cleanmean_xy_transform(const Transform* transforms, int len)
{
  Transform* ts = vs_malloc(sizeof(Transform) * len);
  Transform t = null_transform();
  int i, cut = len / 5;
  memcpy(ts, transforms, sizeof(Transform) * len);
  qsort(ts,len, sizeof(Transform), cmp_trans_x);
  for (i = cut; i < len - cut; i++){ // all but cutted
    t.x += ts[i].x;
  }
  qsort(ts, len, sizeof(Transform), cmp_trans_y);
  for (i = cut; i < len - cut; i++){ // all but cutted
    t.y += ts[i].y;
  }
  vs_free(ts);
  return mult_transform(&t, 1.0 / (len - (2.0 * cut)));
}
Esempio n. 4
0
/**
 * vsReadOldTransforms: read transforms file (Deprecated format)
 *  The format is as follows:
 *   Lines with # at the beginning are comments and will be ignored
 *   Data lines have 5 columns seperated by space or tab containing
 *   time, x-translation, y-translation, alpha-rotation, extra
 *   where time and extra are integers
 *   and the latter is unused at the moment
 *
 * Parameters:
 *         f:  file description
 *         trans: place to store the transforms
 * Return value:
 *         number of transforms read
 * Preconditions: f is opened
 */
int vsReadOldTransforms(const VSTransformData* td, FILE* f , VSTransformations* trans)
{
  char l[1024];
  int s = 0;
  int i = 0;
  int ti; // time (ignored)
  VSTransform t;

  while (fgets(l, sizeof(l), f)) {
    t = null_transform();
    if (l[0] == '#')
      continue;    //  ignore comments
    if (strlen(l) == 0)
      continue; //  ignore empty lines
    // try new format
    if (sscanf(l, "%i %lf %lf %lf %lf %i", &ti, &t.x, &t.y, &t.alpha,
               &t.zoom, &t.extra) != 6) {
      if (sscanf(l, "%i %lf %lf %lf %i", &ti, &t.x, &t.y, &t.alpha,
                 &t.extra) != 5) {
        vs_log_error(td->conf.modName, "Cannot parse line: %s", l);
        return 0;
      }
      t.zoom=0;
    }

    if (i>=s) { // resize transform array
      if (s == 0)
        s = 256;
      else
        s*=2;
      /* vs_log_info(td->modName, "resize: %i\n", s); */
      trans->ts = vs_realloc(trans->ts, sizeof(VSTransform)* s);
      if (!trans->ts) {
        vs_log_error(td->conf.modName, "Cannot allocate memory"
                     " for transformations: %i\n", s);
        return 0;
      }
    }
    trans->ts[i] = t;
    i++;
  }
  trans->len = i;

  return i;
}
Esempio n. 5
0
/* tries to register current frame onto previous frame.
 *   Algorithm:
 *   discards fields with low contrast
 *   select maxfields fields according to their contrast
 *   check theses fields for vertical and horizontal transformation
 *   use minimal difference of all possible positions
 *   calculate shift as cleaned mean of all remaining fields
 *   calculate rotation angle of each field in respect to center of fields
 *   after shift removal
 *   calculate rotation angle as cleaned mean of all angles
 *   compensate for possibly off-center rotation
 */
Transform calcTransFields(MotionDetect* md, calcFieldTransFunc fieldfunc,
                         contrastSubImgFunc contrastfunc) {
  Transform* ts = (Transform*) ds_malloc(sizeof(Transform) * md->fieldNum);
  Field** fs = (Field**) ds_malloc(sizeof(Field*) * md->fieldNum);
  double *angles = (double*) ds_malloc(sizeof(double) * md->fieldNum);
  int i, index = 0, num_trans;
  Transform t;
#ifdef STABVERBOSE
  FILE *file = NULL;
  char buffer[32];
  ds_snprintf(buffer, sizeof(buffer), "k%04i.dat", md->frameNum);
  file = fopen(buffer, "w");
  fprintf(file, "# plot \"%s\" w l, \"\" every 2:1:0\n", buffer);
#endif

  DSVector goodflds = selectfields(md, contrastfunc);
  // use all "good" fields and calculate optimal match to previous frame
#ifdef USE_OMP
#pragma omp parallel for shared(goodflds, md, ts, fs) // does not bring speedup
#endif
  for(index=0; index < ds_vector_size(&goodflds); index++){
    int i = ((contrast_idx*)ds_vector_get(&goodflds,index))->index;

    t = fieldfunc(md, &md->fields[i], i); // e.g. calcFieldTransYUV
#ifdef STABVERBOSE
    fprintf(file, "%i %i\n%f %f %i\n \n\n", md->fields[i].x, md->fields[i].y,
        md->fields[i].x + t.x, md->fields[i].y + t.y, t.extra);
#endif
    if (t.extra != -1) { // ignore if extra == -1 (unused at the moment)
      ts[index] = t;
      fs[index] = md->fields + i;
    }
  }

  t = null_transform();
  num_trans = ds_vector_size(&goodflds); // amount of transforms we actually have
  ds_vector_del(&goodflds);
  if (num_trans < 1) {
    ds_log_warn(md->modName, "too low contrast! No field remains.\n"
                             "(no translations are detected in frame %i)", md->frameNum);
    return t;
  }

  int center_x = 0;
  int center_y = 0;
  // calc center point of all remaining fields
  for (i = 0; i < num_trans; i++) {
    center_x += fs[i]->x;
    center_y += fs[i]->y;
  }
  center_x /= num_trans;
  center_y /= num_trans;

  if (md->show) { // draw fields and transforms into frame.
    // this has to be done one after another to handle possible overlap
    if (md->show > 1) {
      for (i = 0; i < num_trans; i++)
        drawFieldScanArea(md, fs[i], &ts[i]);
    }
    for (i = 0; i < num_trans; i++)
      drawField(md, fs[i], &ts[i]);
    for (i = 0; i < num_trans; i++)
      drawFieldTrans(md, fs[i], &ts[i]);
  }
  /* median over all transforms
     t= median_xy_transform(ts, md->field_num);*/
  // cleaned mean
  t = cleanmean_xy_transform(ts, num_trans);

  // substract avg
  for (i = 0; i < num_trans; i++) {
    ts[i] = sub_transforms(&ts[i], &t);
  }
  // figure out angle
  if (md->fieldNum < 6) {
    // the angle calculation is inaccurate for 5 and less fields
    t.alpha = 0;
  } else {
    for (i = 0; i < num_trans; i++) {
      angles[i] = calcAngle(md, fs[i], &ts[i], center_x, center_y);
    }
    double min, max;
    t.alpha = -cleanmean(angles, num_trans, &min, &max);
    if (max - min > md->maxAngleVariation) {
      t.alpha = 0;
      ds_log_info(md->modName, "too large variation in angle(%f)\n",
                  max-min);
    }
  }
  // compensate for off-center rotation
  double p_x = (center_x - md->fi.width / 2);
  double p_y = (center_y - md->fi.height / 2);
  t.x += (cos(t.alpha) - 1) * p_x - sin(t.alpha) * p_y;
  t.y += sin(t.alpha) * p_x + (cos(t.alpha) - 1) * p_y;

#ifdef STABVERBOSE
  fclose(file);
#endif
  return t;
}
Esempio n. 6
0
/* calculates the optimal transformation for one field in RGB
 *   slower than the YUV version because it uses all three color channels
 */
Transform calcFieldTransRGB(MotionDetect* md, const Field* field, int fieldnum) {
  int tx = 0;
  int ty = 0;
  uint8_t *I_c = md->curr, *I_p = md->prev;
  int i, j;
  int bpp = (md->fi.pFormat == PF_RGB) ?3 :4;

  /* Here we improve speed by checking first the most probable position
     then the search paths are most effectively cut. (0,0) is a simple start
  */
  unsigned int minerror = compareSubImg(I_c, I_p, field, md->fi.width, md->fi.height,
                                        bpp, 0, 0, UINT_MAX);
  // check all positions...
  for (i = -md->maxShift; i <= md->maxShift; i += md->stepSize) {
    for (j = -md->maxShift; j <= md->maxShift; j += md->stepSize) {
      if (i == 0 && j == 0)
        continue; //no need to check this since already done
      unsigned int error = compareSubImg(I_c, I_p, field, md->fi.width,
                                         md->fi.height, bpp, i, j, minerror);
      if (error < minerror) {
        minerror = error;
        tx = i;
        ty = j;
      }
    }
  }
  if (md->stepSize > 1) { // make fine grain check around the best match
    int txc = tx; // save the shifts
    int tyc = ty;
    int r = md->stepSize - 1;
    for (i = txc - r; i <= txc + r; i += 1) {
      for (j = tyc - r; j <= tyc + r; j += 1) {
        if (i == txc && j == tyc)
          continue; //no need to check this since already done
        unsigned int error = compareSubImg(I_c, I_p, field, md->fi.width,
                                           md->fi.height, bpp, i, j, minerror);
        if (error < minerror) {
          minerror = error;
          tx = i;
          ty = j;
        }
      }
    }
  }

  if (!md->allowMax && fabs(tx) >= md->maxShift + md->stepSize) {
#ifdef STABVERBOSE
    ds_log_msg(md->modName, "maximal x shift ");
#endif
    tx = 0;
  }
  if (!md->allowMax && fabs(ty) == md->maxShift + md->stepSize) {
#ifdef STABVERBOSE
    ds_log_msg(md->modName, "maximal y shift ");
#endif
    ty = 0;
  }
  Transform t = null_transform();
  t.x = tx;
  t.y = ty;
  return t;
}
Esempio n. 7
0
/* calculates the optimal transformation for one field in YUV frames
 * (only luminance)
 */
Transform calcFieldTransYUV(MotionDetect* md, const Field* field, int fieldnum) {
  int tx = 0;
  int ty = 0;
  uint8_t *Y_c = md->curr, *Y_p = md->prev;
  // we only use the luminance part of the image
  int i, j;
  int stepSize = md->stepSize;

#ifdef STABVERBOSE
  // printf("%i %i %f\n", md->frameNum, fieldnum, contr);
  FILE *f = NULL;
  char buffer[32];
  ds_snprintf(buffer, sizeof(buffer), "f%04i_%02i.dat", md->frameNum, fieldnum);
  f = fopen(buffer, "w");
  fprintf(f, "# splot \"%s\"\n", buffer);
#endif

#ifdef USE_SPIRAL_FIELD_CALC
  unsigned int minerror = UINT_MAX;

  // check all positions by outgoing spiral
  i = 0; j = 0;
  int limit = 1;
  int step = 0;
  int dir = 0;
  while (j >= -md->maxShift && j <= md->maxShift && i >= -md->maxShift && i <= md->maxShift) {
      unsigned int error = compareSubImg(Y_c, Y_p, field, md->fi.width, md->fi.height,
                 1, i, j, minerror);

      if (error < minerror) {
          minerror = error;
          tx = i;
          ty = j;
      }

      //spiral indexing...
      step++;
      switch (dir) {
      case 0:
         i += stepSize;
         if (step == limit) {
             dir = 1;
             step = 0;
         }
     break;    
      case 1:
         j += stepSize;
         if (step == limit) {
             dir = 2;
             step = 0;
             limit++;
         }
     break;
      case 2:
         i -= stepSize;
         if (step == limit) {
             dir = 3;
             step = 0;
         }
          break;
      case 3:
         j -= stepSize;
         if (step == limit) {
             dir = 0;
             step = 0;
             limit++;
         }
     break;
      }
  }
#else
  /* Here we improve speed by checking first the most probable position
     then the search paths are most effectively cut. (0,0) is a simple start
  */
  unsigned int minerror = compareSubImg(Y_c, Y_p, field, md->fi.width, md->fi.height,
                    1, 0, 0, UINT_MAX);
  // check all positions...
  for (i = -md->maxShift; i <= md->maxShift; i += stepSize) {
    for (j = -md->maxShift; j <= md->maxShift; j += stepSize) {
      if( i==0 && j==0 )
        continue; //no need to check this since already done
        unsigned int error = compareSubImg(Y_c, Y_p, field, md->fi.width, md->fi.height,
                                           1, i, j, minerror);
      if (error < minerror) {
        minerror = error;
        tx = i;
        ty = j;
      }
#ifdef STABVERBOSE
      fprintf(f, "%i %i %f\n", i, j, error);
#endif
    }
  }

#endif

  while(stepSize > 1) {// make fine grain check around the best match
    int txc = tx; // save the shifts
    int tyc = ty;
    int newStepSize = stepSize/2;
    int r = stepSize - newStepSize;
    for (i = txc - r; i <= txc + r; i += newStepSize) {
      for (j = tyc - r; j <= tyc + r; j += newStepSize) {
        if (i == txc && j == tyc)
          continue; //no need to check this since already done
        unsigned int error = compareSubImg(Y_c, Y_p, field, md->fi.width,
                                           md->fi.height, 1, i, j, minerror);
#ifdef STABVERBOSE
    fprintf(f, "%i %i %f\n", i, j, error);
#endif
        if (error < minerror) {
          minerror = error;
          tx = i;
          ty = j;
        }
      }
    }
    stepSize /= 2;
  }
#ifdef STABVERBOSE
  fclose(f);
  ds_log_msg(md->modName, "Minerror: %f\n", minerror);
#endif

  if (!md->allowMax && fabs(tx) >= md->maxShift + md->stepSize) {
#ifdef STABVERBOSE
    ds_log_msg(md->modName, "maximal x shift ");
#endif
    tx = 0;
  }
  if (!md->allowMax && fabs(ty) == md->maxShift + md->stepSize) {
#ifdef STABVERBOSE
    ds_log_msg(md->modName, "maximal y shift ");
#endif
    ty = 0;
  }
  Transform t = null_transform();
  t.x = tx;
  t.y = ty;
  return t;
}
Esempio n. 8
0
void pathfinding_system::advance_pathfinding_sessions(logic_step& step) {
	auto& cosmos = step.cosm;
	const auto& settings = cosmos.significant.meta.settings.pathfinding;

	/* prepare epsilons to be used later, just to make the notation more clear */
	const float epsilon_distance_visible_point_sq = settings.epsilon_distance_visible_point * settings.epsilon_distance_visible_point;
	
	/* we'll need a reference to physics system for raycasting */
	physics_system& physics = cosmos.systems_temporary.get<physics_system>();

	auto& renderer = augs::renderer::get_current();
	auto& lines = augs::renderer::get_current().logic_lines;

	for (const auto& it : cosmos.get(processing_subjects::WITH_PATHFINDING)) {
		/* get necessary components */
		auto& pathfinding = it.get<components::pathfinding>();
		const auto& transform = it.logic_transform() + pathfinding.eye_offset;
		auto& body = it.get<components::physics>();

		if (!body.is_constructed()) {
			continue;
		}

		/* check if we request pathfinding at the moment */
		if (!pathfinding.session_stack.empty()) {
			/* get visibility information */
			auto& vision = step.transient.calculated_visibility[it];
			
			std::vector<components::pathfinding::pathfinding_session::navigation_vertex> undiscovered_visible;

			/* proceed only if the session is not degenerate */
			if (!vision.edges.empty() && !vision.discontinuities.empty()) {
				if (pathfinding.force_touch_sensors) {
					for (auto& vertex_hit : vision.vertex_hits) {
						messages::visibility_information_response::discontinuity new_discontinuity;
						new_discontinuity.edge_index = vertex_hit.first;
						new_discontinuity.points.first = vertex_hit.second;
						new_discontinuity.is_boundary = false;
						/* rest is not worth filling so proceed */

						if (vertex_hit.second.y >= transform.pos.y) {
							vision.discontinuities.push_back(new_discontinuity);
						}
					}

					vision.vertex_hits.clear();
				}

				//vision.ignore_discontinuities_shorter_than = pathfinding.session().temporary_ignore_discontinuities_shorter_than;

				/* save all fully visible vertices as discovered */
				for (const auto& visible_vertex : vision.vertex_hits) {
					bool this_visible_vertex_is_already_memorised = false;

					//auto components::pathfinding::pathfinding_session::* location = 
					//		pathfinding.force_touch_sensors ? 
					//	&components::pathfinding::pathfinding_session::undiscovered_vertices :
					//&components::pathfinding::pathfinding_session::discovered_vertices;

					for (auto& memorised : pathfinding.session().discovered_vertices) {
						/* if a similiar discovered vertex exists */
						if (memorised.location.compare(visible_vertex.second, settings.epsilon_distance_the_same_vertex)) {
							this_visible_vertex_is_already_memorised = true;
							/* overwrite the location just in case */
							memorised.location = visible_vertex.second;
							break;
						}
					}

					/* save if unique */
					if (!this_visible_vertex_is_already_memorised) {
						components::pathfinding::pathfinding_session::navigation_vertex vert;
						vert.location = visible_vertex.second;
						pathfinding.session().discovered_vertices.push_back(vert);
					}
				}



				/* save all new discontinuities from visibility */
				for (const auto& disc : vision.discontinuities) {
					if (disc.is_boundary) continue;

					bool this_discontinuity_is_already_memorised = false;
					bool this_discontinuity_is_already_discovered = false;

					components::pathfinding::pathfinding_session::navigation_vertex vert;

					for (auto& memorised_undiscovered : pathfinding.session().undiscovered_vertices) {
						/* if a discontinuity with the same closer vertex already exists */
						if (memorised_undiscovered.location.compare(disc.points.first, settings.epsilon_distance_the_same_vertex)) {
							this_discontinuity_is_already_memorised = true;
							vert = memorised_undiscovered;
							//memorised_undiscovered.location = disc.points.first;
							break;
						}
					}

					for (auto& memorised_discovered : pathfinding.session().discovered_vertices) {
						/* if a discontinuity with the same closer vertex already exists */
						if (memorised_discovered.location.compare(disc.points.first, settings.epsilon_distance_the_same_vertex)) {
							this_discontinuity_is_already_discovered = true;
							memorised_discovered.location = disc.points.first;
							break;
						}
					}

					vert.location = disc.points.first;

					/* if it is unique, push it */
					if (!this_discontinuity_is_already_memorised && !this_discontinuity_is_already_discovered) {

						/* get the associated edge to prepare a relevant sensor */
						auto associated_edge = vision.edges[disc.edge_index];

						/* get the direction the sensor will be going to */
						vec2 sensor_direction;

						bool degenerate = false;

						/* if the first vertex of the edge matches the location */
						if (associated_edge.first.compare(vert.location))
							sensor_direction = associated_edge.first - associated_edge.second;
						/* if it is the second one */
						else if (associated_edge.second.compare(vert.location))
							sensor_direction = associated_edge.second - associated_edge.first;
						/* should never happen, degenerate edge */
						else {
							degenerate = true;
						}
						if (!degenerate) {
							/* rotate a bit to prevent non-reachable sensors */
							float rotation = pathfinding.rotate_navpoints;
							if (disc.winding == disc.LEFT) rotation = -rotation;
							sensor_direction.rotate(rotation, vec2(0, 0));
							//sensor_direction = transform.pos - vert.location;
							sensor_direction.normalize();

							vert.sensor = vert.location + sensor_direction * pathfinding.target_offset;

							/* if this sensor overlaps anything, discard it */
							std::vector<vec2> sensor_polygon = {
								sensor_direction * 10 + vert.location - sensor_direction.perpendicular_cw() * 4,
								sensor_direction * 10 + vert.location - sensor_direction.perpendicular_cw() * 4 + sensor_direction * pathfinding.target_offset,
								sensor_direction * 10 + vert.location + sensor_direction.perpendicular_cw() * 4 + sensor_direction * pathfinding.target_offset,
								sensor_direction * 10 + vert.location + sensor_direction.perpendicular_cw() * 4
							};

							auto out = physics.query_polygon(sensor_polygon, pathfinding.filter, it);

							if (out.bodies.empty()) {
								vert.sensor = physics.push_away_from_walls(vert.sensor, pathfinding.target_offset, 50, pathfinding.filter, it);
								pathfinding.session().undiscovered_vertices.push_back(vert);
							}
						}
					}

					if (!this_discontinuity_is_already_discovered)
						undiscovered_visible.push_back(vert);
				}

			}

			/* mark as visible vertices such that:
				a) there is a memorised discovered vertex that is epsilon-close to it
				b) sensor's distance from the the body is less than distance_navpoint_hit 
			*/

			/* prepare body polygon to test for overlaps */
			b2PolygonShape body_poly;
			auto verts = get_world_vertices(it);
			body_poly.Set(verts.data(), verts.size());

			/* for every undiscovered navigation point */
			auto& undiscs = pathfinding.session().undiscovered_vertices;
			undiscs.erase(std::remove_if(undiscs.begin(), undiscs.end(), [&body, &pathfinding, &body_poly, &settings](const components::pathfinding::pathfinding_session::navigation_vertex& nav){

				/* if we want to force the entity to touch the sensors, we can't discard undiscovered vertices only by 
					saying that there exists a discovered vertex (which is discovered only because it is fully visible)
				*/
				//if (!pathfinding.force_touch_sensors) {
					/* find epsilon-close discovered vertices */
					for (auto& memorised_discovered : pathfinding.session().discovered_vertices)
						/* if a similiar discovered vertex exists */
					if (memorised_discovered.location.compare(nav.location, settings.epsilon_distance_the_same_vertex))
							return true;
				//}
				
				if (pathfinding.mark_touched_as_discovered) {
					/* prepare edge shape for sensor to test for overlaps */
					b2EdgeShape sensor_edge;
					sensor_edge.Set(nav.location * PIXELS_TO_METERSf, nav.sensor * PIXELS_TO_METERSf);

					/* prepare null transform, both bodies are already in the same frame of reference */
					b2Transform null_transform(b2Vec2(0.f, 0.f), b2Rot(0.f));

					/* if shortest distance between body and sensor fits in distance_navpoint_hit */
					if (b2TestOverlap(&sensor_edge, 0, &body_poly, 0, null_transform, null_transform, pathfinding.distance_navpoint_hit * PIXELS_TO_METERSf)) {
						/* save this sensor in discovered ones and return true to remove it from the undiscovered */
						pathfinding.session().discovered_vertices.push_back(nav);
						return true;
					}
				}

				return false;
			}), undiscs.end());

			/* now for the actual pathfinding routine */

			/* helpful lambda */
			auto& is_point_visible = [&physics, epsilon_distance_visible_point_sq, &pathfinding, it](vec2 from, vec2 point, b2Filter& filter){
				bool visibility_condition_fulfilled = true;
				
				//if (pathfinding.target_visibility_condition)
				//	visibility_condition_fulfilled = pathfinding.target_visibility_condition(it, from, point);

				if (visibility_condition_fulfilled) {
					auto line_of_sight = physics.ray_cast_px(from, point, filter);
					return (!line_of_sight.hit || (line_of_sight.intersection - point).length_sq() < epsilon_distance_visible_point_sq);
				}
				else return false;
			};

			/* we are sure here that session stack has at least 1 session
				we drop secondary sessions whose targets are visible
			*/
			if (pathfinding.enable_session_rollbacks && pathfinding.session_stack.size() >= 2) {
				for (auto old_session = pathfinding.session_stack.begin(); old_session != pathfinding.session_stack.end(); ++old_session) {
					/*  check if there's a line of sight to any of the old targets
					if there's a line of sight to "navigate_to" it will be visible as target to the newer session
					and we either way also handle the current session's target so nothing is missing here
					*/

					/* if we're exploring, we have no target in the first session */
					if (pathfinding.is_exploring && old_session == pathfinding.session_stack.begin())
						continue;

					if (body.test_point((*old_session).target) ||
						is_point_visible(transform.pos, (*old_session).target, pathfinding.filter)) {
							/* if there is, roll back to this session */
							pathfinding.session() = (*old_session);
							
							/* if it is the first session, we don't want to erase it since we still need to reach the target */
							if (old_session == pathfinding.session_stack.begin())
								++old_session;

							/* drop unnecessary sessions */
							pathfinding.session_stack.erase(old_session, pathfinding.session_stack.end());
							break;
					}
				}
			}

			/* if we're exploring, we have no target in the first session */
			if (!pathfinding.is_exploring && pathfinding.session_stack.size() == 1) {
				/* if the target is inside body, it's already found */
				if (body.test_point(pathfinding.session().target)) {
					/* done, target found */
					pathfinding.stop_and_clear_pathfinding();
					continue;
				}

				/* check if there's a line of sight */
				if (is_point_visible(transform.pos, pathfinding.session().target, pathfinding.filter)) {
					/* if there is, navigate directly to target */

					pathfinding.session().discovered_vertices.clear();
					pathfinding.session().undiscovered_vertices.clear();

					pathfinding.session().navigate_to = pathfinding.session().target;
					continue;
				}
			}

			/* if it is the last session but there's no line of sight,
				or it is not the last session but it was not dropped from the loop which means there's no line of sight to target,
				pick the best navigation candidate

				if we're exploring, pick only visible undiscovered vertices not to get stuck between two nodes
			*/

			auto& vertices = //(pathfinding.is_exploring && pathfinding.session_stack.size() == 1 && !undiscovered_visible.empty()) ?
			//undiscovered_visible : 
			pathfinding.session().undiscovered_vertices;

			/* save only for queries within the function "exists_among_undiscovered_visible" */
			pathfinding.session().undiscovered_visible = undiscovered_visible;

			if (settings.draw_undiscovered) {
				for (auto& disc : vertices)
					lines.draw(disc.location, disc.sensor, rgba(0, 127, 255, 255));

				for (auto& disc : pathfinding.session().discovered_vertices)
					//if(disc.sensor.non_zero())
					lines.draw(disc.location, disc.location + vec2(0, pathfinding.target_offset), rgba(0, 255, 0, 255));
			}

			if (!vertices.empty()) {
				bool persistent_navpoint_found = false;

				components::pathfinding::pathfinding_session::navigation_vertex current_target;

				if (pathfinding.force_persistent_navpoints) {
					if (pathfinding.session().persistent_navpoint_set) {
						for (auto& v : vertices) {
							if (v.sensor.compare(pathfinding.session().persistent_navpoint.sensor, settings.epsilon_distance_the_same_vertex)) {
								persistent_navpoint_found = true;
								break;
							}
						}
					}
				}

				if (persistent_navpoint_found) {
					current_target = pathfinding.session().persistent_navpoint;
				}
				else {
					vec2 unit_vel = body.velocity();
					unit_vel.normalize();
					
					auto local_minimum_predicate = [&pathfinding, &transform, unit_vel](const components::pathfinding::pathfinding_session::navigation_vertex& a,
						const components::pathfinding::pathfinding_session::navigation_vertex& b) {

						/* if we're exploring, we have no target in the first session */
						if (pathfinding.is_exploring && pathfinding.session_stack.size() == 1) {
							if (pathfinding.favor_velocity_parallellness) {
								float parallellness_a = 0.f;
								float parallellness_b = 0.f;

								if (pathfinding.custom_exploration_hint.enabled) {
									vec2 compared_dir = (pathfinding.custom_exploration_hint.target - pathfinding.custom_exploration_hint.origin).normalize();
									parallellness_a = (a.location - pathfinding.custom_exploration_hint.origin).normalize().dot(compared_dir);
									parallellness_b = (b.location - pathfinding.custom_exploration_hint.origin).normalize().dot(compared_dir);
								}
								else {
									parallellness_a = (a.location - transform.pos).normalize().dot(unit_vel);
									parallellness_b = (b.location - transform.pos).normalize().dot(unit_vel);
								}

								return parallellness_a > parallellness_b;
							}
							else if (pathfinding.custom_exploration_hint.enabled)
								return (a.location - pathfinding.custom_exploration_hint.origin).length_sq() < (b.location - pathfinding.custom_exploration_hint.origin).length_sq();
							else return (a.location - transform.pos).length_sq() < (b.location - transform.pos).length_sq();
						}

						auto dist_a = (a.location - pathfinding.session().target).length_sq() + (a.location - transform.pos).length_sq();
						auto dist_b = (b.location - pathfinding.session().target).length_sq() + (b.location - transform.pos).length_sq();
						return dist_a < dist_b;
					};

					bool first_priority_navpoint_found = false;

					//if (pathfinding.first_priority_navpoint_check) {
					//	std::vector<components::pathfinding::pathfinding_session::navigation_vertex> first_priority_candidates;
					//
					//	for (auto& v : vertices) {
					//		try {
					//			/* arguments: subject, transform, navpoint 
					//				returns true or false
					//			*/
					//			if (pathfinding.first_priority_navpoint_check(it, transform.pos, v.sensor)) {
					//				first_priority_candidates.push_back(v);
					//			}
					//		}
					//		catch (std::exception compilation_error) {
					//			LOG(compilation_error.what());
					//		}
					//	}
					//
					//	if (!first_priority_candidates.empty()) {
					//		/* find discontinuity that is closest to the target */
					//		current_target = (*std::min_element(first_priority_candidates.begin(), first_priority_candidates.end(), local_minimum_predicate));
					//
					//		first_priority_navpoint_found = true;
					//	}
					//}

					if (!first_priority_navpoint_found) 
						/* find discontinuity that is closest to the target */
						current_target = (*std::min_element(vertices.begin(), vertices.end(), local_minimum_predicate));

					if (pathfinding.force_persistent_navpoints) {
						pathfinding.session().persistent_navpoint_set = true;
						pathfinding.session().persistent_navpoint = current_target;
					}
				}

				/* extract the closer vertex, condition to faciliate debug */
				if (current_target.sensor != pathfinding.session().navigate_to)
					pathfinding.session().navigate_to = current_target.sensor;


				if (settings.draw_undiscovered) {
					lines.draw(transform.pos, current_target.sensor, rgba(255, 255, 0, 255));
					lines.draw(transform.pos, pathfinding.session().target, rgba(255, 0, 0, 255));
				}

				bool rays_hit = false;
				/* extract all transformed vertices of the subject's original model, false means we want pixels */
				auto& subject_verts = get_world_vertices(it, false);
				subject_verts.clear();
				subject_verts.push_back(transform.pos);

				for (auto& subject_vert : subject_verts) {
					if (
						//is_point_visible(subject_vert, current_target.location, pathfinding.filter) ||
						is_point_visible(subject_vert, current_target.sensor, pathfinding.filter)
						) {

						/* assume for now that the rays DID hit the navpoint */
						rays_hit = true;

						/* now see if the navpoint can be seen through marked non-walkable areas
							prepare raycast data
						*/
						b2RayCastOutput output;
						b2RayCastInput input;
						input.maxFraction = 1.0;

						for (auto& marked : vision.marked_holes) {
							/* prepare raycast subject */
							b2EdgeShape marked_hole;
							marked_hole.Set(marked.first, marked.second);

							input.p1 = subject_vert;
							input.p2 = current_target.location;

							/* we don't need to transform edge or ray since they are in the same space
							but we have to prepare dummy b2Transform as argument for b2EdgeShape::RayCast
							*/
							b2Transform null_transform(b2Vec2(0.f, 0.f), b2Rot(0.f));

							if (marked_hole.RayCast(&output, input, null_transform, 0)) {
								rays_hit = false;
								break;
							}

							input.p2 = current_target.sensor;

							if (marked_hole.RayCast(&output, input, null_transform, 0)) {
								rays_hit = false;
								break;
							}
						}
					}
				}

				/* if we can see it, navigate there */
				if (body.test_point(current_target.location) ||
					body.test_point(current_target.sensor) ||
					rays_hit
					) {
				}
				/* else start new navigation session */
				else {
					if (pathfinding.enable_backtracking) {
						vec2 new_target = pathfinding.session().navigate_to;
						pathfinding.session_stack.push_back(components::pathfinding::pathfinding_session());
						pathfinding.session().target = new_target;
						pathfinding.session().temporary_ignore_discontinuities_shorter_than = pathfinding.starting_ignore_discontinuities_shorter_than;
					}
				}

			}
			else {
				/* something went wrong, let's begin again */
				//if (pathfinding.session_stack.size() == 1) {
					pathfinding.session().discovered_vertices.clear();
					pathfinding.session().undiscovered_vertices.clear();
					pathfinding.session().persistent_navpoint_set = false;
					pathfinding.session().undiscovered_visible.clear();
					//pathfinding.session().temporary_ignore_discontinuities_shorter_than /= 1.5f;
				//}
				//pathfinding.session_stack.resize(1);
			}
		}
	}
}