bool onResponse ( Crawler::Worker & worker , Crawler::Website & website , Crawler::Link & link , sf::Http & client , sf::Http::Response & response ) { // Print all responses std::cout << & worker << std::dec << " " << static_cast <std::size_t> ( response.getStatus ( ) ) << " " << website.getScheme ( ) + "://" + website.getAuthority ( ) << link.toString ( ) << " " << response.getBody ( ).size ( ) << " Bytes\n" ; return true ; }
bool MapSite::sendRequest(const std::string& url, sf::Http::Response& resp) { if(serverGood) { resp = server.sendRequest({url}); return resp.getStatus() == sf::Http::Response::Status::Ok; } else { return false; } }
bool MapSite::getPage(const std::string& url, sf::Http::Response& resp) { sf::Http::Request req(url, sf::Http::Request::Method::Get); resp = server.sendRequest(req); sf::Http::Response::Status status = resp.getStatus(); if(status != sf::Http::Response::Ok) { std::cout << "[ERROR]: Bad response from server: " << status << " on request for: " << url << '\n'; return false; } return true; }
MapSite::StrMap MapSite::getMapsFromPage(const sf::Http::Response& resp, const std::string& dir, const std::string& cond) { StrMap maps; std::stringstream ss(resp.getBody()); std::string mapLine; while(std::getline(ss, mapLine)) { MapPair pair = getMap(mapLine, dir); if(!pair.first.empty() && !pair.second.empty()) if(!cond.empty() && pair.first.find(cond) != std::string::npos) maps[pair.first] = pair.second; } return std::move(maps); }