int get_tex_toks_register(int j) { str_number s= get_nullstr(); if(toks(j)!=null){ s= tokens_to_string(toks(j)); } return s; }
CPerftSuite::CPerftSuite(void) { std::ifstream params; params.open("perftsuite.epd", std::ifstream::in); if (params.is_open()) { std::string line; int square = chess::A8; while (! params.eof()) { getline(params, line); if (line[0] == '#') continue; boost::char_separator<char> sep(";"); boost::char_separator<char> sep2(" "); boost::tokenizer<boost::char_separator<char>> toks(line, sep); // 1st tok is fen auto tok = toks.begin(); const std::string fen = *tok++; CBoard b; b.set_fen_position(fen); std::cout << b.fen() << " "; int depth = 1; while (tok != toks.end()) { boost::tokenizer<boost::char_separator<char>> toks2(*tok, sep2); auto tok2 = toks2.begin(); tok2++; uint64_t i = std::stoull(*tok2); if (i > 100000000) break; // takes too long CPerft p(b); auto res = p.DoPerft(depth); if (res.nodes == i) { std::cout << "."; } else { std::cout << "WRONG (depth " << depth << ": " << res.nodes << " != expected " << i << ")" << std::endl; ASSERT(res.nodes == i); break; } depth++; tok++; } } std::cout << "completed"; } }
void DirectoryList::SplitPath(const fs::Path& path, fs::Path& parent, std::queue<std::string>& masks) { typedef boost::tokenizer<boost::char_separator<char>> tokenizer; static const char* wildcardChars = "*?[]"; if (path.Absolute()) parent = "/"; bool foundWildcards = false; boost::char_separator<char> sep("/"); tokenizer toks(std::string(path), sep); for (const auto& token : toks) { if (foundWildcards || token.find_first_of(wildcardChars) != std::string::npos) { masks.push(token); foundWildcards = true; } else { parent /= token; } } }
void WebController::ListDir(struct json_token *paramtoken){ char *dir = NULL; toks(paramtoken, &dir, 0); if (dir) { DIR *dp; struct dirent *ep; int w = 0,first=1; dp = opendir (dir); if (dp != NULL){ w += snprintf(gp_response+w, MAX_LINE-w, "["); while ((ep = readdir(dp))){ //printf("%d %s\n", ep->d_type, ep->d_name); if(first){ first = 0; w += json_emit(gp_response+w, MAX_LINE-w, "{s: s, s: i}", "name", ep->d_name,"type",ep->d_type ); } else { w += json_emit(gp_response+w, MAX_LINE-w, ",{s: s, s: i}", "name", ep->d_name,"type",ep->d_type ); } } w += snprintf(gp_response+w, MAX_LINE-w, "]"); (void) closedir (dp); } else { this->DoErrorMessage(strerror(errno)); } } free(dir); }
void CMD_SetVariable(QTextStream& cout, const char* args) { Config& cfg = Config::Instance(); COM::ItTokenizer toks(args, " \t"); COM::ItTokenizer::Token tok; tok = toks.NextToken(); const std::string name = std::string(tok.start, toks.Length(tok)); Config::variable_t var = cfg.FirstVariable(); while(var && name != cfg.Name(var)) { var = cfg.NextVariable(var); } if(var) { tok = toks.NextToken(); const std::string valueStr = std::string(tok.start); if(!cfg.Parse(var, valueStr)) { cout << "error parsing \"" << valueStr.c_str() << "\" " << "as type \"" << cfg.TypeString(var) << "\"" << "\n"; } } else { cout << "unknown variable \"" << name.c_str() << "\"\n"; } }
int WizAppData::ExplodeList(const wxString str,char sep,wxArrayString& items) { items.Clear(); wxStringTokenizer toks(str,wxString(sep)); int rv=toks.CountTokens(); while (toks.HasMoreTokens()) { wxString str=toks.GetNextToken(); items.Add(str); } return rv; }
void multiCompare3() const { // Original pattern that failed: "return|(|&&|%oror% %var% &&|%oror%|==|!=|<=|>=|<|>|-|%or% %var% )|&&|%oror%|;" // Code snippet that failed: "return lv@86 |= rv@87 ;" // Note: Also test "reverse" alternative pattern, two different code paths to handle it givenACodeSampleToTokenize toks("return a |= b ;", true); ASSERT_EQUALS(false, Token::Match(toks.tokens(), "return %var% xyz|%or% %var% ;")); ASSERT_EQUALS(false, Token::Match(toks.tokens(), "return %var% %or%|xyz %var% ;")); givenACodeSampleToTokenize toks2("return a | b ;", true); ASSERT_EQUALS(true, Token::Match(toks2.tokens(), "return %var% xyz|%or% %var% ;")); ASSERT_EQUALS(true, Token::Match(toks2.tokens(), "return %var% %or%|xyz %var% ;")); givenACodeSampleToTokenize toks3("return a || b ;", true); ASSERT_EQUALS(false, Token::Match(toks3.tokens(), "return %var% xyz|%or% %var% ;")); ASSERT_EQUALS(false, Token::Match(toks3.tokens(), "return %var% %or%|xyz %var% ;")); ASSERT_EQUALS(true, Token::Match(toks3.tokens(), "return %var% xyz|%oror% %var% ;")); ASSERT_EQUALS(true, Token::Match(toks3.tokens(), "return %var% %oror%|xyz %var% ;")); givenACodeSampleToTokenize toks4("a % b ;", true); ASSERT_EQUALS(true, Token::Match(toks4.tokens(), "%var% >>|<<|&|%or%|^|% %var% ;")); ASSERT_EQUALS(true, Token::Match(toks4.tokens(), "%var% %|>>|<<|&|%or%|^ %var% ;")); ASSERT_EQUALS(true, Token::Match(toks4.tokens(), "%var% >>|<<|&|%or%|%|^ %var% ;")); //%var%|%num% support givenACodeSampleToTokenize num("100", true); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%num%|%var%")); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%var%|%num%")); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%var%|%num%|%bool%")); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%var%|%bool%|%num%")); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%var%|%bool%|%str%|%num%")); ASSERT_EQUALS(false, Token::Match(num.tokens(), "%bool%|%var%")); ASSERT_EQUALS(false, Token::Match(num.tokens(), "%type%|%bool%|%char%")); ASSERT_EQUALS(true, Token::Match(num.tokens(), "%type%|%bool%|100")); givenACodeSampleToTokenize numparen("( 100 )", true); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| %num%|%var% )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| %var%|%num% )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| %var%|%num%|%bool% )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| %var%|%bool%|%num% )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| %var%|%bool%|%str%|%num% )|")); ASSERT_EQUALS(false, Token::Match(numparen.tokens(), "(| %bool%|%var% )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %num%|%var%| )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %var%|%num%| )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %var%|%num%|%bool%| )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %var%|%bool%|%num%| )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %var%|%bool%|%str%|%num%| )|")); ASSERT_EQUALS(true, Token::Match(numparen.tokens(), "(| 100 %bool%|%var%| )|")); }
int WebController::HandleJsonRequest(struct mg_connection *conn, const char *object, const char *func) { json_token *data = NULL; json_token *paramtoken; char *content; int params; int ret; content = strndup(conn->content, conn->content_len); debug("%s",content); free(content); data = parse_json2(conn->content, conn->content_len); if (data == NULL) { debug("parsing api request failed"); return MG_FALSE; } params = 0; paramtoken = find_json_token(data, "params"); if (paramtoken != NULL) { if (paramtoken->type == JSON_TYPE_OBJECT) { params = 1; } else if (paramtoken->type == JSON_TYPE_ARRAY) { params = paramtoken->num_desc; } else { params = 1; } } //Reset global response string gp_response[0] = '\0'; //gp_response = (char*) malloc(sizeof(char) * ( MAX_LINE)); ret = 0; if (!strcmp("kmx", object)) { if (FUNC_SIGP("loadGlobalFile", 2)) { int fileType = -1; char * file = NULL; toki(paramtoken + 1, &fileType); toks(paramtoken + 2, &file, 0); if (file != NULL) { if(strlen(file) > 0){ if(fileType == 1){ this->LoadGcode(file); } else if(fileType == 2){ this->LoadMachineConfiguration(file); } } free(file); } } else if(FUNC_SIGP("listDir", 1)){ ListDir(paramtoken); } else if (FUNC_SIGP("jog", 2)) { int axis; int speed; toki(paramtoken + 1, &axis); toki(paramtoken + 2, &speed); this->Jog(axis, speed); } else if (FUNC_SIGP("onFeedhold", 0)) { this->Feedhold(); } else if (FUNC_SIGP("onSimulate", 0)) { this->Simulate(); } else if (FUNC_SIGP("onEmergencyStop", 0)) { this->EmergencyStop(); } else if (FUNC_SIGP("onHalt", 0)) { this->Halt(); } else if (FUNC_SIGP("onStep", 0)) { this->Step(); } else if (FUNC_SIGP("onReset", 0)) { this->Reset(); } else if (FUNC_SIGP("onCycleStart", 0)) { this->CycleStart(); } else if(FUNC_SIGP("onUpdateMotionParams", 0)) { this->UpdateMotionParams(); } else if (FUNC_SIGP("onInvokeAction", 1)) { BOOL FlushBeforeUnbufferedOperation = FALSE; int action; toki(paramtoken + 1, &action); ret = this->InvokeAction(action,FlushBeforeUnbufferedOperation); } else if (FUNC_SIGP("onDoErrorMessage", 1)) { char *p1 = NULL; toks(paramtoken, &p1, 0); this->DoErrorMessage(p1); EMIT_RESPONSE("[S]", p1); free(p1); } else { log_info("Function request is not part of API %s",func); } } else { log_info("API not implemented %s",object); } mg_send_header(conn, "Content-Type", "application/json"); //Need to send some data or connection will not be closed if (gp_response[0] == '\0') { EMIT_RESPONSE("N"); } mg_printf_data(conn, "%s", gp_response); //free(gp_response); free(data); return MG_TRUE; }
void multiCompare2() const { // #3294 // Original pattern that failed: [[,(=<>+-*|&^] %num% [+-*/] %num% ]|,|)|;|=|%op% givenACodeSampleToTokenize toks("a == 1", true); ASSERT_EQUALS(true, Token::Match(toks.tokens(), "a =|%op%")); }