wkt_generator<OutputIterator, Geometry>::wkt_generator(bool single) : wkt_generator::base_type(wkt) { boost::spirit::karma::uint_type uint_; boost::spirit::karma::_val_type _val; boost::spirit::karma::_1_type _1; boost::spirit::karma::lit_type lit; boost::spirit::karma::_a_type _a; boost::spirit::karma::_b_type _b; boost::spirit::karma::_c_type _c; boost::spirit::karma::_r1_type _r1; boost::spirit::karma::eps_type eps; boost::spirit::karma::string_type kstring; wkt = point | linestring | polygon ; point = &uint_(mapnik::geometry_type::types::Point)[_1 = _type(_val)] << kstring[ phoenix::if_ (single) [_1 = "Point("] .else_[_1 = "("]] << point_coord [_1 = _first(_val)] << lit(')') ; linestring = &uint_(mapnik::geometry_type::types::LineString)[_1 = _type(_val)] << kstring[ phoenix::if_ (single) [_1 = "LineString("] .else_[_1 = "("]] << coords << lit(')') ; polygon = &uint_(mapnik::geometry_type::types::Polygon)[_1 = _type(_val)] << kstring[ phoenix::if_ (single) [_1 = "Polygon("] .else_[_1 = "("]] << coords2 << lit("))") ; point_coord = &uint_ << coordinate << lit(' ') << coordinate ; polygon_coord %= ( &uint_(mapnik::SEG_MOVETO) << eps[_r1 += 1][_a = _x(_val)][ _b = _y(_val)] << kstring[ if_ (_r1 > 1) [_1 = "),("] .else_[_1 = "("]] | &uint_(mapnik::SEG_LINETO) << lit(',') << eps[_a = _x(_val)][_b = _y(_val)] ) << coordinate[_1 = _a] << lit(' ') << coordinate[_1 = _b] ; coords2 %= *polygon_coord(_a,_b,_c) ; coords = point_coord % lit(',') ; }
namespace boost { namespace hana { struct Function { struct hana { struct operators : boost::hana::operators::of<Comparable> { }; }; }; template <typename Domain, typename Codomain, typename F, typename = operators::adl> struct function_type { struct hana { using datatype = Function; }; Domain dom; Codomain cod; F def; friend constexpr auto domain(function_type f) { return f.dom; } friend constexpr auto codomain(function_type f) { return f.cod; } template <typename X> constexpr auto operator()(X x) const { if (!elem(domain(*this), x)) throw std::domain_error{"use of a hana::function with an argument out of the domain"}; return def(x); } }; BOOST_HANA_CONSTEXPR_LAMBDA auto function = [](auto domain, auto codomain) { return [=](auto definition) { return function_type<decltype(domain), decltype(codomain), decltype(definition)>{ domain, codomain, definition }; }; }; BOOST_HANA_CONSTEXPR_LAMBDA auto frange = [](auto f) { // Note: that would be better handled by a set data structure, but // whatever for now. return foldl(transform(domain(f), f), make<Tuple>(), [](auto xs, auto x) { return if_(elem(xs, x), xs, prepend(x, xs)); }); }; template <> struct equal_impl<Function, Function> { template <typename F, typename G> static constexpr auto apply(F f, G g) { return domain(f) == domain(g) && all_of(domain(f), demux(equal)(f, g)); } }; }} // end namespace boost::hana
static constexpr decltype(auto) apply() { return unpack(range_c<unsigned, 0, R>, [](auto ...n) { return unpack(range_c<unsigned, 0, C>, [=](auto ...m) { auto row = [=](auto n) { return cppcon::row(if_(n == m, int_c<1>, int_c<0>)...); }; return cppcon::matrix(row(n)...); }); }); }
geometry_generator_grammar() : geometry_generator_grammar::base_type(coordinates) { boost::spirit::karma::uint_type uint_; boost::spirit::bool_type bool_; boost::spirit::karma::_val_type _val; boost::spirit::karma::_1_type _1; boost::spirit::karma::lit_type lit; boost::spirit::karma::_a_type _a; boost::spirit::karma::_r1_type _r1; boost::spirit::karma::eps_type eps; boost::spirit::karma::string_type kstring; coordinates = point | linestring | polygon ; point = &uint_(mapnik::geometry_type::types::Point)[_1 = _type(_val)] << point_coord [_1 = _first(_val)] ; linestring = &uint_(mapnik::geometry_type::types::LineString)[_1 = _type(_val)] << lit('[') << coords << lit(']') ; polygon = &uint_(mapnik::geometry_type::types::Polygon)[_1 = _type(_val)] << lit('[') << coords2 << lit("]]") ; point_coord = &uint_ << lit('[') << coord_type << lit(',') << coord_type << lit(']') ; polygon_coord %= ( &uint_(mapnik::SEG_MOVETO) << eps[_r1 += 1] << kstring[ if_ (_r1 > 1) [_1 = "],["] .else_[_1 = '[' ]] | &uint_(mapnik::SEG_LINETO) << lit(',')) << lit('[') << coord_type << lit(',') << coord_type << lit(']') ; coords2 %= *polygon_coord(_a) ; coords = point_coord % lit(',') ; }
/* Returns index if a sentence matches, 0 otherwise */ int ifladder(int index) { /* Local var blockLine and passing the error message implicitly declares a backtrace. (Maybe include an indentation level value somewhere?)*/ int blockLine = curLine; if(index = if_(index)) { if(index = elseifs(index)) { if(index = else_(index)) return index; } } error(IFLADDER, blockLine); return index; }
void lex_prop_list() { lexertl::rules rules_; lexertl::state_machine state_machine_; std::ifstream if_("PropList.txt"); lexertl::stream_shared_iterator iter_(if_); lexertl::stream_shared_iterator end_; lexertl::match_results<lexertl::stream_shared_iterator> results_(iter_, end_); enum {eRange = 1, eName, eShortName}; rules_.push_state("RANGE"); rules_.push_state("WS"); rules_.push_state("NAME"); rules_.push_state("SHORT_NAME"); rules_.push_state("FINISH"); rules_.push("^#.*", rules_.skip()); rules_.push("\n", rules_.skip()); rules_.push("INITIAL", "^[0-9A-F]+(\\.\\.[0-9A-F]+)?", eRange, "RANGE"); rules_.push("RANGE", " *; ", rules_.skip(), "NAME"); rules_.push("NAME", "[A-Z][a-zA-Z_]+", eName, "WS"); rules_.push("WS", " # ", rules_.skip(), "SHORT_NAME"); rules_.push("SHORT_NAME", "[A-Z][a-z&]", eShortName, "FINISH"); rules_.push("FINISH", ".*\n", rules_.skip(), "INITIAL"); lexertl::generator::build(rules_, state_machine_); do { lexertl::lookup(state_machine_, results_); std::cout << "Id: " << results_.id << ", Token: '" << std::string(results_.start, results_.end) << "'\n"; if (results_.id > eShortName) { // int i = 0; } } while (results_.id != 0); }
static stmt_node *statement(void){ stmt_node *stmtn = newStmtNode(); if(match(TOKEN_IDENTIFIER, NO_CONSUME) || match(TOKEN_SCOL, NO_CONSUME)){ if((stmtn->assn = assignment()) != NULL) return stmtn; } else if(match(TOKEN_TYPEKEY, NO_CONSUME)){ if((stmtn->decn = declaration()) != NULL) return stmtn; } else if(match(TOKEN_IFKEY, NO_CONSUME)){ if((stmtn->ifn = if_()) != NULL) return stmtn; } else if(match(TOKEN_WHILEKEY,NO_CONSUME)){ if((stmtn->whilen = while_()) != NULL) return stmtn; } else if(match(TOKEN_FORKEY, NO_CONSUME)){ if((stmtn->forn = for_()) != NULL) return stmtn; } else if((stmtn->lCur = match(TOKEN_LCUR, CONSUME)) != NULL){ if((stmtn->sln = statement_list()) != error) if((stmtn->rCur = match(TOKEN_RCUR, CONSUME)) != NULL) return stmtn; } else if((stmtn->ins = match(TOKEN_INSTRUCTION, CONSUME)) != NULL) return stmtn; freeStmt(stmtn); return NULL; }
static constexpr auto max_impl(X x, Y y) { return if_(less(x, y), y, x); }
static constexpr auto min_impl(X x, Y y) { return if_(less(x, y), x, y); }
static constexpr auto apply(Set set, Pred p) { auto x = set.find(p); return if_(p(x), hana::just(x), hana::nothing); }
void lex_unicode_data() { clock_t started_ = ::clock(); lexertl::rules rules_; lexertl::state_machine state_machine_; lexertl::memory_file if_("UnicodeData.txt"); const char *start_ = if_.data(); const char *end_ = start_ + if_.size(); lexertl::cmatch results_(start_, end_); enum {eNumber = 1, eName}; std::size_t num_ = 0; std::map<std::string, lexertl::basic_string_token<std::size_t> > map_; rules_.push_state("LONG_NAME"); rules_.push_state("SHORT_NAME"); rules_.push_state("FINISH"); rules_.push("INITIAL", "^[A-F0-9]+", eNumber, "LONG_NAME"); rules_.push("LONG_NAME", ";[^;]+;", rules_.skip(), "SHORT_NAME"); rules_.push("SHORT_NAME", "[A-Z][a-z]?", eName, "FINISH"); rules_.push("FINISH", ".*\n", rules_.skip(), "INITIAL"); lexertl::generator::build(rules_, state_machine_); do { lexertl::lookup(state_machine_, results_); if (results_.id == eNumber) { num_ = 0; for (;;) { if (*results_.start >= '0' && *results_.start <= '9') { num_ <<= 4; num_ |= *results_.start++ - '0'; } else if (*results_.start >= 'A' && *results_.start <= 'F') { num_ <<= 4; num_ |= *results_.start++ - 'A' + 10; } else { break; } } // ::sscanf(&*results_.start, "%x", &num_); // Too slow! } else if (results_.id == eName) { const std::string name_(results_.start, results_.end); map_[name_].insert(lexertl::basic_string_token<std::size_t>::range (num_, num_)); } } while (results_.id != 0); clock_t finished_ = ::clock(); double seconds_ = static_cast<double> (finished_ - started_) / CLOCKS_PER_SEC; std::cout << seconds_ << "\n"; }
void case_mapping() { lexertl::rules rules_; lexertl::state_machine sm_; std::ifstream if_("UnicodeData.txt"); lexertl::stream_shared_iterator iter_(if_); lexertl::stream_shared_iterator end_; lexertl::match_results<lexertl::stream_shared_iterator> results_(iter_, end_); enum e_Token {eEOF, eCodeValue, eName, eLl, eLu, eNeither, eMapping, eEmpty}; e_Token eToken = eEOF; std::string code_; std::string mapping_; int count_ = 0; rules_.push_state("NAME"); rules_.push_state("TYPE"); rules_.push_state("Ll"); rules_.push_state("Lu"); rules_.push_state("MAPPING"); rules_.push_state("END"); rules_.push("INITIAL", "^[0-9A-F]{4,6};", eCodeValue, "NAME"); rules_.push("NAME", "[^;]*;", sm_.skip(), "TYPE"); rules_.push("TYPE", "Ll;", eLl, "Ll"); rules_.push("Ll", "([^;]*;){9}", sm_.skip(), "MAPPING"); rules_.push("TYPE", "Lu;", eLu, "Lu"); rules_.push("Lu", "([^;]*;){10}", sm_.skip(), "MAPPING"); rules_.push("TYPE", "[^;]*;", eNeither, "END"); rules_.push("MAPPING", ";", eEmpty, "END"); rules_.push("MAPPING", "[0-9A-F]{4,6};", eMapping, "END"); rules_.push("END", "[^\n]*\n", sm_.skip(), "INITIAL"); lexertl::generator::build(rules_, sm_); do { lexertl::lookup(sm_, results_); eToken = static_cast<e_Token>(results_.id); if (eToken == eEOF) { break; } else if (eToken != eCodeValue) { throw std::runtime_error("Syntax error"); } code_.assign(results_.start, results_.end); lexertl::lookup(sm_, results_); eToken = static_cast<e_Token>(results_.id); if (eToken != eLl && eToken != eLu && eToken != eNeither) { throw std::runtime_error("Syntax error"); } if (eToken != eNeither) { lexertl::lookup(sm_, results_); eToken = static_cast<e_Token>(results_.id); if (eToken == eMapping) { mapping_.assign(results_.start, results_.end); std::cout << "(0x" << code_.substr(0, code_.size() - 1) << ", " << "0x" << mapping_.substr(0, mapping_.size() - 1) << "), "; code_.clear(); mapping_.clear(); ++count_; if (count_ > 2) { count_ = 0; std::cout << '\n'; } } } } while (results_.id != 0); }
namespace boost { namespace hana { //! @ingroup group-datatypes //! A `Monad` for searching infinite sets in finite time. //! //! Taken from http://math.andrej.com/2008/11/21/a-haskell-monad-for-infinite-search-in-finite-time/. struct SearchableSet { struct hana { struct operators : boost::hana::operators::of<Comparable, Monad> { }; }; }; template <typename Find, typename = operators::adl> struct _sset { Find find; struct hana { using datatype = SearchableSet; }; }; BOOST_HANA_CONSTEXPR_LAMBDA auto searchable_set = [](auto pred) { return _sset<decltype(pred)>{pred}; }; BOOST_HANA_CONSTEXPR_LAMBDA auto singleton = [](auto x) { return searchable_set([=](auto p) { return x; }); }; BOOST_HANA_CONSTEXPR_LAMBDA auto doubleton = [](auto x, auto y) { return searchable_set([=](auto p) { return if_(p(x), x, y); }); }; BOOST_HANA_CONSTEXPR_LAMBDA auto union_ = [](auto xs, auto ys) { return flatten(doubleton(xs, ys)); }; ////////////////////////////////////////////////////////////////////////// // Comparable ////////////////////////////////////////////////////////////////////////// template <> struct equal_impl<SearchableSet, SearchableSet> { template <typename Xs, typename Ys> static constexpr auto apply(Xs xs, Ys ys) { return and_(subset(xs, ys), subset(ys, xs)); } }; ////////////////////////////////////////////////////////////////////////// // Functor ////////////////////////////////////////////////////////////////////////// template <> struct transform_impl<SearchableSet> { template <typename Set, typename F> static constexpr auto apply(Set set, F f) { return searchable_set([=](auto q) { return f(set.find([=](auto x) { return q(f(x)); })); }); } }; ////////////////////////////////////////////////////////////////////////// // Applicative ////////////////////////////////////////////////////////////////////////// template <> struct lift_impl<SearchableSet> { template <typename X> static constexpr auto apply(X x) { return singleton(x); } }; template <> struct ap_impl<SearchableSet> { template <typename F, typename Set> static constexpr auto apply(F fset, Set set) { return flatten(transform(fset, [=](auto f) { return transform(set, f); })); } }; ////////////////////////////////////////////////////////////////////////// // Monad ////////////////////////////////////////////////////////////////////////// template <> struct flatten_impl<SearchableSet> { template <typename Set> static constexpr auto apply(Set set) { return searchable_set([=](auto p) { return set.find([=](auto set) { return any_of(set, p); }).find(p); }); } }; ////////////////////////////////////////////////////////////////////////// // Searchable ////////////////////////////////////////////////////////////////////////// template <> struct find_impl<SearchableSet> { template <typename Set, typename Pred> static constexpr auto apply(Set set, Pred p) { auto x = set.find(p); return if_(p(x), just(x), nothing); } }; template <> struct any_of_impl<SearchableSet> { template <typename Set, typename Pred> static constexpr auto apply(Set set, Pred p) { return p(set.find(p)); } }; }} // end namespace boost::hana
static constexpr auto drop_impl(N n, R r) { auto size = minus(r.to, r.from); return range(if_(greater(n, size), r.to, plus(r.from, n)), r.to); }