static OniSensorInfo getSensorInfo() { FreenectVideoModeMap supported_modes = getSupportedVideoModes(); OniVideoMode* modes = new OniVideoMode[supported_modes.size()]; std::transform(supported_modes.begin(), supported_modes.end(), modes, RetrieveKey()); OniSensorInfo sensors = { sensor_type, static_cast<int>(supported_modes.size()), modes }; return sensors; }
void Counter::getStartTimes(const std::list<std::vector<cv::Point2f> >& trajectories, std::vector<int>& startTimes, int fno) { std::map<int, int> startMap; std::list<std::vector<cv::Point2f> >::const_iterator iTrack = trajectories.begin(); for (; iTrack!=trajectories.end(); iTrack++) { if ((int)(fno-iTrack->size())>track_len) { startMap[iTrack->at(0).x]++; } } std::transform(startMap.begin(), startMap.end(), std::back_inserter(startTimes), RetrieveKey()); }
static OniSensorInfo getSensorInfo() { FreenectVideoModeMap supported_modes = getSupportedVideoModes(); OniVideoMode* modes = new OniVideoMode[supported_modes.size()]; std::transform(supported_modes.begin(), supported_modes.end(), modes, RetrieveKey()); return { sensor_type, SIZE(modes), modes }; // sensorType, numSupportedVideoModes, pSupportedVideoModes }
void Sequence::getPulseEdgeTimes(std::set<double > ×) const { times.clear(); std::transform(edges.begin(),edges.end(),std::inserter(times,times.end()), RetrieveKey() ); }