template<class T, class Policies> inline interval<T, Policies> cos(const interval<T, Policies>& x) { if (interval_lib::detail::test_input(x)) return interval<T, Policies>::empty(); typename Policies::rounding rnd; typedef interval<T, Policies> I; typedef typename interval_lib::unprotect<I>::type R; // get lower bound within [0, pi] const R pi2 = interval_lib::pi_twice<R>(); R tmp = fmod((const R&)x, pi2); if (width(tmp) >= pi2.lower()) return I(static_cast<T>(-1), static_cast<T>(1), true); // we are covering a full period if (tmp.lower() >= interval_lib::constants::pi_upper<T>()) return -cos(tmp - interval_lib::pi<R>()); T l = tmp.lower(); T u = tmp.upper(); BOOST_USING_STD_MIN(); // separate into monotone subintervals if (u <= interval_lib::constants::pi_lower<T>()) return I(rnd.cos_down(u), rnd.cos_up(l), true); else if (u <= pi2.lower()) return I(static_cast<T>(-1), rnd.cos_up(min BOOST_PREVENT_MACRO_SUBSTITUTION(rnd.sub_down(pi2.lower(), u), l)), true); else return I(static_cast<T>(-1), static_cast<T>(1), true); }
int main() { R a(5,4); a.print(); //µ÷ÓÃvoid print() const R b(20,52); b.print(); //µ÷ÓÃvoid print() const }
/// @brief Makes the inverse relation, mapping codim_to entities /// to their codim_from neighbours. /// /// Implementation note: The algorithm has been changed /// to a three-pass O(n) algorithm. /// @param inv The OrientedEntityTable void makeInverseRelation(OrientedEntityTable<codim_to, codim_from>& inv) const { // Find the maximum index used. This will give (one less than) the size // of the table to be created. int maxind = -1; for (int i = 0; i < size(); ++i) { EntityRep<codim_from> from_ent(i, true); row_type r = operator[](from_ent); for (int j = 0; j < r.size(); ++j) { EntityRep<codim_to> to_ent = r[j]; int ind = to_ent.index(); maxind = std::max(ind, maxind); } } // Build the new_sizes vector and compute datacount. std::vector<int> new_sizes(maxind + 1); int datacount = 0; for (int i = 0; i < size(); ++i) { EntityRep<codim_from> from_ent(i, true); row_type r = operator[](from_ent); datacount += r.size(); for (int j = 0; j < r.size(); ++j) { EntityRep<codim_to> to_ent = r[j]; int ind = to_ent.index(); ++new_sizes[ind]; } } // Compute the cumulative sizes. std::vector<int> cumul_sizes(new_sizes.size() + 1); cumul_sizes[0] = 0; std::partial_sum(new_sizes.begin(), new_sizes.end(), cumul_sizes.begin() + 1); // Using the cumulative sizes array as indices, we populate new_data. // Note that cumul_sizes[ind] is not kept constant, but incremented so that // it always gives the correct index for new data corresponding to index ind. std::vector<int> new_data(datacount); for (int i = 0; i < size(); ++i) { EntityRep<codim_from> from_ent(i, true); row_type r = operator[](from_ent); for (int j = 0; j < r.size(); ++j) { EntityRep<codim_to> to_ent = r[j]; int ind = to_ent.index(); int data_ind = cumul_sizes[ind]; new_data[data_ind] = to_ent.orientation() ? i : ~i; ++cumul_sizes[ind]; } } inv = OrientedEntityTable<codim_to, codim_from>(new_data.begin(), new_data.end(), new_sizes.begin(), new_sizes.end()); }
/** @brief Prints the relation matrix corresponding to the table. Let the entities of codimensions f and t be given by the sets \f$E^f = { e^f_i } \f$ and \f$E^t = { e^t_j }\f$. A relation matrix R is defined by \f{eqnarray*}{ R_{ij} &=& 0 \mbox{ if } e^f_i \mbox{ and } e^t_j \mbox{ are not neighbours }\\ R_{ij} &=& 1 \mbox{ if they are neighbours with same orientation }\\ R_{ij} &=& -1 \mbox{ if they are neighbours with opposite orientation.} \f} @param os The output stream. */ void printRelationMatrix(std::ostream& os) const { int columns = numberOfColumns(); for (int i = 0; i < size(); ++i) { FromType from_ent(i); row_type r = operator[](from_ent); int cur_col = 0; int next_ent = 0; ToType to_ent = r[next_ent]; int next_print = to_ent.index(); while (cur_col < columns) { if (cur_col == next_print) { if (to_ent.orientation()) { os << " 1"; } else { os << " -1"; } ++next_ent; if (next_ent >= r.size()) { next_print = columns; } else { to_ent = r[next_ent]; next_print = to_ent.index(); } } else { os << " 0"; } ++cur_col; } os << '\n'; } }
int numberOfColumns() const { int maxind = 0; for (int i = 0; i < size(); ++i) { FromType from_ent(i); row_type r = operator[](from_ent); for (int j = 0; j < r.size(); ++j) { maxind = std::max(maxind, r[j].index()); } } return maxind + 1; }
inline void storeDataImpl(Contents<D> & cont) const { r_.storeData(cont); }
virtual bool empty() const { return r.empty(); }
void append(S*& dest) const { left.append(dest); right.append(dest); }
int size() const { return left.size() + right.size(); }
void run(const P& p0, const P& p1, const bool* blocked, int* flood_buffer, const P& map_dims, std::vector<P>& out, const bool allow_diagonal, const bool randomize_steps) { out.clear(); if (p0 == p1) { // Origin and target is same cell return; } floodfill::run(p0, blocked, flood_buffer, map_dims, -1, p1, allow_diagonal); if (flood_buffer[idx2(p1, map_dims.y)] == 0) { // No path exists return; } const std::vector<P>& dirs = allow_diagonal ? dir_utils::dir_list : dir_utils::cardinal_list; const size_t nr_dirs = dirs.size(); // Corresponds to the elements in "dirs" std::vector<bool> valid_offsets(nr_dirs, false); // The path length will be equal to the flood value at the target cell, so // we can reserve that many elements beforehand. out.reserve(flood_buffer[idx2(p1, map_dims.y)]); P p(p1); out.push_back(p); const R map_r(P(0, 0), map_dims - 1); while (true) { const int val = flood_buffer[idx2(p, map_dims.y)]; P adj_p; // Find valid offsets, and check if origin is reached for (size_t i = 0; i < nr_dirs; ++i) { const P& d(dirs[i]); adj_p = p + d; if (adj_p == p0) { // Origin reached return; } // TODO: What is the purpose of this check? If the current value is // zero, doesn't that mean this cell is the target? Only the target // cell and unreachable cells should have values of zero(?) // Try removing the check and verify if the algorithm still works if (val != 0) { const bool is_inside_map = map_r.is_p_inside(adj_p); const int adj_val = is_inside_map ? flood_buffer[idx2(adj_p, map_dims.y)] : 0; // Mark this as a valid travel direction if it's fewer steps // from the target than the current cell valid_offsets[i] = adj_val < val; } } // Set the next position to one of the valid offsets - either pick one // randomly, or iterate over the list and pick the first valid choice. if (randomize_steps) { std::vector<P> adj_p_bucket; for (size_t i = 0; i < nr_dirs; ++i) { if (valid_offsets[i]) { adj_p_bucket.push_back(p + dirs[i]); } } ASSERT(!adj_p_bucket.empty()); adj_p = rnd::element(adj_p_bucket); } else // Do not randomize step choices - iterate over offset list { for (size_t i = 0; i < nr_dirs; ++i) { if (valid_offsets[i]) { adj_p = P(p + dirs[i]); break; } } } out.push_back(adj_p); p = adj_p; } //while }
void run(const P& p0, const bool* blocked, int* out, const P& map_dims, int travel_lmt, const P& p1, const bool allow_diagonal) { std::fill_n(out, map_dims.x * map_dims.y, 0); // List of positions to travel to std::vector<P> positions; // In the worst case we need to visit every position, reserve the elements positions.reserve(map_dims.x * map_dims.y); // Instead of removing evaluated positions from the vector, we track which // index to try next (cheaper than erasing front elements). size_t next_p_idx = 0; int val = 0; bool path_exists = true; bool is_at_tgt = false; bool is_stopping_at_tgt = p1.x != -1; const R bounds(P(1, 1), map_dims - 2); P p(p0); const auto& dirs = allow_diagonal ? dir_utils::dir_list : dir_utils::cardinal_list; bool done = false; while (!done) { // "Flood" around the current position, and add those to the list of // positions to travel to. for (const P& d : dirs) { const P new_p(p + d); if ( !blocked[idx2(new_p, map_dims.y)] && bounds.is_p_inside(new_p) && out[idx2(new_p, map_dims.y)] == 0 && new_p != p0) { val = out[idx2(p, map_dims.y)]; if (travel_lmt == -1 || val < travel_lmt) { out[idx2(new_p, map_dims.y)] = val + 1; } if (is_stopping_at_tgt && new_p == p1) { is_at_tgt = true; break; } if (!is_stopping_at_tgt || !is_at_tgt) { positions.push_back(new_p); } } } // Offset loop if (is_stopping_at_tgt) { if (positions.size() == next_p_idx) { path_exists = false; } if (is_at_tgt || !path_exists) { done = true; } } else if (positions.size() == next_p_idx) { done = true; } if (val == travel_lmt) { done = true; } if (!is_stopping_at_tgt || !is_at_tgt) { if (positions.size() == next_p_idx) { // No more positions to evaluate path_exists = false; } else // There are more positions to evaluate { p = positions[next_p_idx]; ++next_p_idx; } } } // while }
virtual value_type front() const { return std::make_pair(l.front(), r.front()); }
virtual void popFront() { do { r.popFront(); } while (!r.empty() && !p(r.front())); }
RangeFilter(const R &_r, const P &_p) : r(_r), p(_p) { if (!r.empty() && !p(r.front())) { this->popFront(); } }
virtual void popFront() { if (!l.empty()) l.popFront(); else r.popFront(); }
virtual value_type front() const { if (!l.empty()) return l.front(); else return r.front(); }
virtual bool empty() const { return l.empty() && r.empty(); }
virtual void popFront() { l.popFront(); r.popFront(); }
inline void storeLinkImpl(Links<A> & cont) const { cont.push_back(ConditionJumpLink<A, true>(pred_, r_.getContentsSize(), r_.getLinksSize() + 1)); r_.storeLink(cont); cont.push_back(JumpLink(-r_.getContentsSize(), -r_.getLinksSize() - 2)); }
void run(const P& p0, const P& p1, const bool blocked[map_w][map_h], std::vector<P>& out, const bool allow_diagonal, const bool randomize_steps) { out.clear(); if (p0 == p1) { // Origin and target is same cell return; } int flood_buffer[map_w][map_h]; floodfill::run(p0, blocked, flood_buffer, -1, p1, allow_diagonal); if (flood_buffer[p1.x][p1.y] == 0) { // No path exists return; } const std::vector<P>& dirs = allow_diagonal ? dir_utils::dir_list : dir_utils::cardinal_list; const size_t nr_dirs = dirs.size(); // Corresponds to the elements in "dirs" std::vector<bool> valid_offsets(nr_dirs, false); // The path length will be equal to the flood value at the target cell, so // we can reserve that many elements beforehand. out.reserve(flood_buffer[p1.x][p1.y]); // We start at the target cell P p(p1); out.push_back(p); const R map_r(P(0, 0), P(map_w, map_h) - 1); while (true) { const int current_val = flood_buffer[p.x][p.y]; P adj_p; // Find valid offsets, and check if origin is reached for (size_t i = 0; i < nr_dirs; ++i) { const P& d(dirs[i]); adj_p = p + d; if (adj_p == p0) { // Origin reached return; } if (map_r.is_p_inside(adj_p)) { const int adj_val = flood_buffer[adj_p.x][adj_p.y]; // Mark this as a valid travel direction if it is not blocked, // and is fewer steps from the target than the current cell. valid_offsets[i] = (adj_val != 0) && (adj_val < current_val); } } // Set the next position to one of the valid offsets - either pick one // randomly, or iterate over the list and pick the first valid choice. if (randomize_steps) { std::vector<P> adj_p_bucket; for (size_t i = 0; i < nr_dirs; ++i) { if (valid_offsets[i]) { adj_p_bucket.push_back(p + dirs[i]); } } ASSERT(!adj_p_bucket.empty()); adj_p = rnd::element(adj_p_bucket); } else // Do not randomize step choices - iterate over offset list { for (size_t i = 0; i < nr_dirs; ++i) { if (valid_offsets[i]) { adj_p = P(p + dirs[i]); break; } } } out.push_back(adj_p); p = adj_p; } // while }
virtual value_type front() const { return r.front(); }