QueryData generate(QueryContext& context) override { if (context.isCached("awesome_data")) { // There is cache entry for awesome data. return {{{"data", "more_awesome_data"}}}; } else { Row r = {{"data", "awesome_data"}}; context.setCache("awesome_data", r); return {r}; } }
void drive_Schema(QueryContext& _query) { CIMName base("CQL_TestElement"); CIMClass _class = _query.getClass(base); PEGASUS_TEST_ASSERT(_class.getClassName() == base); Array<CIMName> names = _query.enumerateClassNames(base); PEGASUS_TEST_ASSERT(names.size() == 2); CIMName derived("CQL_TestPropertyTypes"); PEGASUS_TEST_ASSERT(_query.isSubClass(base, derived)); PEGASUS_TEST_ASSERT(!_query.isSubClass(derived, base)); PEGASUS_TEST_ASSERT( _query.getClassRelation(base, base) == QueryContext::SAMECLASS); PEGASUS_TEST_ASSERT( _query.getClassRelation(base, derived) == QueryContext::SUBCLASS); PEGASUS_TEST_ASSERT( _query.getClassRelation(derived, base) == QueryContext::SUPERCLASS); CIMName unrelated("CIM_Process"); PEGASUS_TEST_ASSERT( _query.getClassRelation(base, unrelated) == QueryContext::NOTRELATED); PEGASUS_TEST_ASSERT( _query.getClassRelation(unrelated, base) == QueryContext::NOTRELATED); }
QueryData pidsFromContext(const QueryContext& context, bool all) { QueryData procs; if (context.hasConstraint("pid", EQUALS)) { context.forEachConstraint( "pid", EQUALS, ([&procs](const std::string& expr) { auto proc = SQL::selectAllFrom("processes", "pid", EQUALS, expr); procs.insert(procs.end(), procs.begin(), procs.end()); })); } else if (!all) { procs = SQL::selectAllFrom( "processes", "pid", EQUALS, std::to_string(platformGetPid())); } else { procs = SQL::selectAllFrom("processes"); } return procs; }
QueryData usersFromContext(const QueryContext& context, bool all) { QueryData users; if (context.hasConstraint("uid", EQUALS)) { context.forEachConstraint( "uid", EQUALS, ([&users](const std::string& expr) { auto user = SQL::selectAllFrom("users", "uid", EQUALS, expr); users.insert(users.end(), user.begin(), user.end()); })); } else if (!all) { users = SQL::selectAllFrom( "users", "uid", EQUALS, std::to_string(platformGetUid())); } else { users = SQL::selectAllFrom("users"); } return users; }
void SelectStatementRep::setQueryContext(QueryContext& inCtx) { if (_ctx == NULL) { _ctx = inCtx.clone(); } else { throw QueryException(MessageLoaderParms("QueryCommon.SelectStatementRep.QUERY_CTX_ALREADY_SET", "The QueryContext can only be set once on a SelectStatement.")); } }
void drive_WhereIds(QueryContext& _query) { CQLChainedIdentifier chid1("fromclass.eo.scope1::prop"); CQLChainedIdentifier chid2("fromclass.eo.scope2::prop"); CQLChainedIdentifier chid3("fromclass.eo.scope1::prop#'ok'"); CQLChainedIdentifier chid4("fromclass.eo.scope1::prop[1]"); _query.addWhereIdentifier(chid1); _query.addWhereIdentifier(chid2); _query.addWhereIdentifier(chid3); _query.addWhereIdentifier(chid4); _query.addWhereIdentifier(chid1); // dup, ignored Array<QueryChainedIdentifier> qchids = _query.getWhereList(); PEGASUS_TEST_ASSERT(qchids.size() == 4); PEGASUS_TEST_ASSERT(qchids[0].getSubIdentifiers().size() == 3); PEGASUS_TEST_ASSERT(qchids[1].getSubIdentifiers().size() == 3); PEGASUS_TEST_ASSERT(qchids[2].getSubIdentifiers().size() == 3); PEGASUS_TEST_ASSERT(qchids[3].getSubIdentifiers().size() == 3); }
void genHashForFile(const std::string& path, const std::string& dir, QueryContext& context, QueryData& results) { // Must provide the path, filename, directory separate from boost path->string // helpers to match any explicit (query-parsed) predicate constraints. Row r; if (context.isCached(path)) { r = context.getCache(path); } else { auto hashes = hashMultiFromFile( HASH_TYPE_MD5 | HASH_TYPE_SHA1 | HASH_TYPE_SHA256, path); r["path"] = path; r["directory"] = dir; r["md5"] = std::move(hashes.md5); r["sha1"] = std::move(hashes.sha1); r["sha256"] = std::move(hashes.sha256); context.setCache(path, r); } results.push_back(r); }
QueryData genOSXPlist(QueryContext& context) { QueryData results; // Resolve file paths for EQUALS and LIKE operations. auto paths = context.constraints["path"].getAll(EQUALS); context.expandConstraints( "path", LIKE, paths, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); for (const auto& path : paths) { if (!pathExists(path).ok() || !isReadable(path).ok()) { VLOG(1) << "Cannot find/read defaults plist from path: " + path; continue; } pt::ptree tree; if (!osquery::parsePlist(path, tree).ok()) { VLOG(1) << "Could not parse plist: " + path; continue; } for (const auto& item : tree) { Row r; r["path"] = path; r["key"] = item.first; r["subkey"] = ""; genOSXPlistPrefValue(item.second, r, 0, results); } } return results; }
void expandFSPathConstraints(QueryContext& context, const std::string& path_column_name, std::set<std::string>& paths) { context.expandConstraints( path_column_name, LIKE, paths, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); }
QueryData genFile(QueryContext& context) { QueryData results; // Resolve file paths for EQUALS and LIKE operations. auto paths = context.constraints["path"].getAll(EQUALS); context.expandConstraints( "path", LIKE, paths, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); // Iterate through each of the resolved/supplied paths. for (const auto& path_string : paths) { fs::path path = path_string; genFileInfo(path, path.parent_path(), "", results); } // Resolve directories for EQUALS and LIKE operations. auto directories = context.constraints["directory"].getAll(EQUALS); context.expandConstraints( "directory", LIKE, directories, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_FOLDERS | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); // Now loop through constraints using the directory column constraint. for (const auto& directory_string : directories) { if (!isReadable(directory_string) || !isDirectory(directory_string)) { continue; } try { // Iterate over the directory and generate info for each regular file. fs::directory_iterator begin(directory_string), end; for (; begin != end; ++begin) { genFileInfo(begin->path(), directory_string, "", results); } } catch (const fs::filesystem_error& e) { continue; } } return results; }
SelectStatementRep::SelectStatementRep(String& inQlang, String& inQuery, QueryContext& inCtx) :_qlang(inQlang), _query(inQuery) { _ctx = inCtx.clone(); }
void drive_FromList(QueryContext& _query) { Array<String> alias; Array<CQLIdentifier> classes; alias.append("A"); alias.append("B"); alias.append("C"); alias.append("D"); // alias == identifier, ignore alias alias.append("A"); // dup, should not be inserted classes.append(CQLIdentifier("APPLE")); classes.append(CQLIdentifier("BONGO")); classes.append(CQLIdentifier("CLAVE")); classes.append(CQLIdentifier("D")); // alias == identifier, ignore alias classes.append(CQLIdentifier("APPLE")); // dup, should not be inserted for (Uint32 i = 0; i < alias.size(); i++) { _query.insertClassPath(classes[i],alias[i]); } // // Error inserts. Keep before the from list test below // // empty identifier try { _query.insertClassPath(QueryIdentifier()); PEGASUS_TEST_ASSERT(false); } catch (QueryParseException&) { } // identifier is already an alias try { _query.insertClassPath(CQLIdentifier("A")); PEGASUS_TEST_ASSERT(false); } catch (QueryParseException&) { } // alias is already in the from list try { _query.insertClassPath(CQLIdentifier("NEW"),String("BONGO")); PEGASUS_TEST_ASSERT(false); } catch (QueryParseException&) { } // alias is already used for another from list entry try { _query.insertClassPath(CQLIdentifier("NEW"),String("B")); PEGASUS_TEST_ASSERT(false); } catch (QueryParseException&) { } // check the from list Array<QueryIdentifier> fromList = _query.getFromList(); PEGASUS_TEST_ASSERT(fromList.size() == 4); PEGASUS_TEST_ASSERT(fromList[0].getName() == "APPLE"); PEGASUS_TEST_ASSERT(fromList[1].getName() == "BONGO"); PEGASUS_TEST_ASSERT(fromList[2].getName() == "CLAVE"); PEGASUS_TEST_ASSERT(fromList[3].getName() == "D"); // check the from string String fromString = _query.getFromString(); PEGASUS_TEST_ASSERT( fromString == "FROM APPLE AS A , BONGO AS B , CLAVE AS C , D "); // identifier and alias lookup QueryIdentifier lookup = _query.findClass(String("C")); PEGASUS_TEST_ASSERT(lookup.getName() == "CLAVE"); lookup = _query.findClass(String("BONGO")); PEGASUS_TEST_ASSERT(lookup.getName() == "BONGO"); lookup = _query.findClass(String("D")); PEGASUS_TEST_ASSERT(lookup.getName() == "D"); lookup = _query.findClass(String("notthere")); PEGASUS_TEST_ASSERT(lookup.getName() == CIMName()); }
bool WorkOrderFactory::ProtoIsValid(const serialization::WorkOrder &proto, const CatalogDatabaseLite &catalog_database, const QueryContext &query_context) { switch (proto.work_order_type()) { case serialization::AGGREGATION: { return proto.HasExtension(serialization::AggregationWorkOrder::block_id) && proto.HasExtension(serialization::AggregationWorkOrder::aggr_state_index) && query_context.isValidAggregationStateId( proto.GetExtension(serialization::AggregationWorkOrder::aggr_state_index)); } case serialization::BUILD_HASH: { if (!proto.HasExtension(serialization::BuildHashWorkOrder::relation_id)) { return false; } const relation_id rel_id = proto.GetExtension(serialization::BuildHashWorkOrder::relation_id); if (!catalog_database.hasRelationWithId(rel_id)) { return false; } const CatalogRelationSchema &relation = catalog_database.getRelationSchemaById(rel_id); for (int i = 0; i < proto.ExtensionSize(serialization::BuildHashWorkOrder::join_key_attributes); ++i) { if (!relation.hasAttributeWithId( proto.GetExtension(serialization::BuildHashWorkOrder::join_key_attributes, i))) { return false; } } return proto.HasExtension(serialization::BuildHashWorkOrder::any_join_key_attributes_nullable) && proto.HasExtension(serialization::BuildHashWorkOrder::block_id) && proto.HasExtension(serialization::BuildHashWorkOrder::join_hash_table_index) && query_context.isValidJoinHashTableId( proto.GetExtension(serialization::BuildHashWorkOrder::join_hash_table_index)); } case serialization::DELETE: { return proto.HasExtension(serialization::DeleteWorkOrder::relation_id) && catalog_database.hasRelationWithId( proto.GetExtension(serialization::DeleteWorkOrder::relation_id)) && proto.HasExtension(serialization::DeleteWorkOrder::predicate_index) && query_context.isValidPredicate( proto.GetExtension(serialization::DeleteWorkOrder::predicate_index)) && proto.HasExtension(serialization::DeleteWorkOrder::block_id) && proto.HasExtension(serialization::DeleteWorkOrder::operator_index); } case serialization::DESTROY_HASH: { return proto.HasExtension(serialization::DestroyHashWorkOrder::join_hash_table_index) && query_context.isValidJoinHashTableId( proto.GetExtension(serialization::DestroyHashWorkOrder::join_hash_table_index)); } case serialization::DROP_TABLE: { return true; } case serialization::FINALIZE_AGGREGATION: { return proto.HasExtension(serialization::FinalizeAggregationWorkOrder::aggr_state_index) && query_context.isValidAggregationStateId( proto.GetExtension(serialization::FinalizeAggregationWorkOrder::aggr_state_index)) && proto.HasExtension(serialization::FinalizeAggregationWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::FinalizeAggregationWorkOrder::insert_destination_index)); } case serialization::HASH_JOIN: { if (!proto.HasExtension(serialization::HashJoinWorkOrder::hash_join_work_order_type)) { return false; } const auto hash_join_work_order_type = proto.GetExtension(serialization::HashJoinWorkOrder::hash_join_work_order_type); if (!serialization::HashJoinWorkOrder_HashJoinWorkOrderType_IsValid(hash_join_work_order_type)) { return false; } if (!proto.HasExtension(serialization::HashJoinWorkOrder::build_relation_id) || !proto.HasExtension(serialization::HashJoinWorkOrder::probe_relation_id)) { return false; } const relation_id build_relation_id = proto.GetExtension(serialization::HashJoinWorkOrder::build_relation_id); if (!catalog_database.hasRelationWithId(build_relation_id)) { return false; } const relation_id probe_relation_id = proto.GetExtension(serialization::HashJoinWorkOrder::probe_relation_id); if (!catalog_database.hasRelationWithId(probe_relation_id)) { return false; } const CatalogRelationSchema &build_relation = catalog_database.getRelationSchemaById(build_relation_id); const CatalogRelationSchema &probe_relation = catalog_database.getRelationSchemaById(probe_relation_id); for (int i = 0; i < proto.ExtensionSize(serialization::HashJoinWorkOrder::join_key_attributes); ++i) { const attribute_id attr_id = proto.GetExtension(serialization::HashJoinWorkOrder::join_key_attributes, i); if (!build_relation.hasAttributeWithId(attr_id) || !probe_relation.hasAttributeWithId(attr_id)) { return false; } } if (hash_join_work_order_type == serialization::HashJoinWorkOrder::HASH_OUTER_JOIN) { if (!proto.HasExtension(serialization::HashJoinWorkOrder::is_selection_on_build)) { return false; } } else { if (!proto.HasExtension(serialization::HashJoinWorkOrder::residual_predicate_index) || !query_context.isValidPredicate( proto.GetExtension(serialization::HashJoinWorkOrder::residual_predicate_index))) { return false; } } return proto.HasExtension(serialization::HashJoinWorkOrder::any_join_key_attributes_nullable) && proto.HasExtension(serialization::HashJoinWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::HashJoinWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::HashJoinWorkOrder::join_hash_table_index) && query_context.isValidJoinHashTableId( proto.GetExtension(serialization::HashJoinWorkOrder::join_hash_table_index)) && proto.HasExtension(serialization::HashJoinWorkOrder::selection_index) && query_context.isValidScalarGroupId( proto.GetExtension(serialization::HashJoinWorkOrder::selection_index)) && proto.HasExtension(serialization::HashJoinWorkOrder::block_id); } case serialization::INSERT: { return proto.HasExtension(serialization::InsertWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::InsertWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::InsertWorkOrder::tuple_index) && query_context.isValidTupleId( proto.GetExtension(serialization::InsertWorkOrder::tuple_index)); } case serialization::NESTED_LOOP_JOIN: { if (!proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::left_relation_id) || !proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::right_relation_id)) { return false; } const relation_id left_relation_id = proto.GetExtension(serialization::NestedLoopsJoinWorkOrder::left_relation_id); if (!catalog_database.hasRelationWithId(left_relation_id)) { return false; } const relation_id right_relation_id = proto.GetExtension(serialization::NestedLoopsJoinWorkOrder::right_relation_id); if (!catalog_database.hasRelationWithId(right_relation_id)) { return false; } return proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::left_block_id) && proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::right_block_id) && proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::NestedLoopsJoinWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::join_predicate_index) && query_context.isValidPredicate( proto.GetExtension(serialization::NestedLoopsJoinWorkOrder::join_predicate_index)) && proto.HasExtension(serialization::NestedLoopsJoinWorkOrder::selection_index) && query_context.isValidScalarGroupId( proto.GetExtension(serialization::NestedLoopsJoinWorkOrder::selection_index)); } case serialization::SAMPLE: { return catalog_database.hasRelationWithId(proto.GetExtension(serialization::SampleWorkOrder::relation_id)) && proto.HasExtension(serialization::SampleWorkOrder::block_id) && proto.HasExtension(serialization::SampleWorkOrder::is_block_sample) && proto.HasExtension(serialization::SampleWorkOrder::percentage) && proto.HasExtension(serialization::SampleWorkOrder::insert_destination_index); } case serialization::SAVE_BLOCKS: { return proto.HasExtension(serialization::SaveBlocksWorkOrder::block_id) && proto.HasExtension(serialization::SaveBlocksWorkOrder::force); } case serialization::SELECT: { if (!proto.HasExtension(serialization::SelectWorkOrder::relation_id) || !proto.HasExtension(serialization::SelectWorkOrder::simple_projection) || !proto.HasExtension(serialization::SelectWorkOrder::selection_index)) { return false; } const relation_id rel_id = proto.GetExtension(serialization::SelectWorkOrder::relation_id); if (!catalog_database.hasRelationWithId(rel_id)) { return false; } const CatalogRelationSchema &relation = catalog_database.getRelationSchemaById(rel_id); for (int i = 0; i < proto.ExtensionSize(serialization::SelectWorkOrder::simple_selection); ++i) { if (!relation.hasAttributeWithId( proto.GetExtension(serialization::SelectWorkOrder::simple_selection, i))) { return false; } } if (proto.GetExtension(serialization::SelectWorkOrder::simple_projection) == query_context.isValidScalarGroupId( proto.GetExtension(serialization::SelectWorkOrder::selection_index))) { return false; } return proto.HasExtension(serialization::SelectWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::SelectWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::SelectWorkOrder::predicate_index) && query_context.isValidPredicate( proto.GetExtension(serialization::SelectWorkOrder::predicate_index)) && proto.HasExtension(serialization::SelectWorkOrder::block_id); } case serialization::SORT_MERGE_RUN: { // In Protobuf 2.6, proto.HasExtension does not work for the repeated // message field, but Protobuf 3.0 beta works. // TODO(zuyu): Validate serialization::SortMergeRunWorkOrder::runs. return proto.HasExtension(serialization::SortMergeRunWorkOrder::sort_config_index) && query_context.isValidSortConfigId( proto.GetExtension(serialization::SortMergeRunWorkOrder::sort_config_index)) && proto.HasExtension(serialization::SortMergeRunWorkOrder::top_k) && proto.HasExtension(serialization::SortMergeRunWorkOrder::merge_level) && proto.HasExtension(serialization::SortMergeRunWorkOrder::relation_id) && catalog_database.hasRelationWithId( proto.GetExtension(serialization::SortMergeRunWorkOrder::relation_id)) && proto.HasExtension(serialization::SortMergeRunWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::SortMergeRunWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::SortMergeRunWorkOrder::operator_index); } case serialization::SORT_RUN_GENERATION: { return proto.HasExtension(serialization::SortRunGenerationWorkOrder::relation_id) && catalog_database.hasRelationWithId( proto.GetExtension(serialization::SortRunGenerationWorkOrder::relation_id)) && proto.HasExtension(serialization::SortRunGenerationWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::SortRunGenerationWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::SortRunGenerationWorkOrder::sort_config_index) && query_context.isValidSortConfigId( proto.GetExtension(serialization::SortRunGenerationWorkOrder::sort_config_index)) && proto.HasExtension(serialization::SortRunGenerationWorkOrder::block_id); } case serialization::TABLE_GENERATOR: { return proto.HasExtension(serialization::TableGeneratorWorkOrder::generator_function_index) && query_context.isValidGeneratorFunctionId( proto.GetExtension(serialization::TableGeneratorWorkOrder::generator_function_index)) && proto.HasExtension(serialization::TableGeneratorWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::TableGeneratorWorkOrder::insert_destination_index)); } case serialization::TEXT_SCAN: { if (!proto.HasExtension(serialization::TextScanWorkOrder::field_terminator) || !proto.HasExtension(serialization::TextScanWorkOrder::process_escape_sequences) || !proto.HasExtension(serialization::TextScanWorkOrder::insert_destination_index) || !query_context.isValidInsertDestinationId( proto.GetExtension(serialization::TextScanWorkOrder::insert_destination_index))) { return false; } // Two fields are exclusive. if (proto.HasExtension(serialization::TextScanWorkOrder::filename) == proto.HasExtension(serialization::TextScanWorkOrder::text_blob)) { return false; } return proto.HasExtension(serialization::TextScanWorkOrder::filename) || proto.GetExtension(serialization::TextScanWorkOrder::text_blob).IsInitialized(); } case serialization::TEXT_SPLIT: { return proto.HasExtension(serialization::TextSplitWorkOrder::filename) && proto.HasExtension(serialization::TextSplitWorkOrder::process_escape_sequences) && proto.HasExtension(serialization::TextSplitWorkOrder::operator_index); } case serialization::UPDATE: { return proto.HasExtension(serialization::UpdateWorkOrder::relation_id) && catalog_database.hasRelationWithId( proto.GetExtension(serialization::UpdateWorkOrder::relation_id)) && proto.HasExtension(serialization::UpdateWorkOrder::insert_destination_index) && query_context.isValidInsertDestinationId( proto.GetExtension(serialization::UpdateWorkOrder::insert_destination_index)) && proto.HasExtension(serialization::UpdateWorkOrder::predicate_index) && query_context.isValidPredicate( proto.GetExtension(serialization::UpdateWorkOrder::predicate_index)) && proto.HasExtension(serialization::UpdateWorkOrder::update_group_index) && query_context.isValidUpdateGroupId( proto.GetExtension(serialization::UpdateWorkOrder::update_group_index)) && proto.HasExtension(serialization::UpdateWorkOrder::operator_index) && proto.HasExtension(serialization::UpdateWorkOrder::block_id); } default: return false; } }
QueryData genHash(QueryContext& context) { QueryData results; boost::system::error_code ec; // The query must provide a predicate with constraints including path or // directory. We search for the parsed predicate constraints with the equals // operator. auto paths = context.constraints["path"].getAll(EQUALS); context.expandConstraints( "path", LIKE, paths, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); // Iterate through the file paths, adding the hash results for (const auto& path_string : paths) { boost::filesystem::path path = path_string; if (!boost::filesystem::is_regular_file(path, ec)) { continue; } genHashForFile(path_string, path.parent_path().string(), context, results); } // Now loop through constraints using the directory column constraint. auto directories = context.constraints["directory"].getAll(EQUALS); context.expandConstraints( "directory", LIKE, directories, ([&](const std::string& pattern, std::set<std::string>& out) { std::vector<std::string> patterns; auto status = resolveFilePattern(pattern, patterns, GLOB_FOLDERS | GLOB_NO_CANON); if (status.ok()) { for (const auto& resolved : patterns) { out.insert(resolved); } } return status; })); // Iterate over the directory paths for (const auto& directory_string : directories) { boost::filesystem::path directory = directory_string; if (!boost::filesystem::is_directory(directory, ec)) { continue; } // Iterate over the directory files and generate a hash for each regular // file. boost::filesystem::directory_iterator begin(directory), end; for (; begin != end; ++begin) { if (boost::filesystem::is_regular_file(begin->path(), ec)) { genHashForFile( begin->path().string(), directory_string, context, results); } } } return results; }
extern "C" void Java_com_turbulenz_turbulenz_payment_nativePurchaseQueryResponse (JNIEnv *env, jobject thiz, jlong context, jstring sku, jstring details, jstring token, jstring devPayload, jstring sig) { if (0 == context) { LOGE("purchase query callback called with null context"); return; } QueryContext *queryCtx = (QueryContext *)(size_t )context; // Conditions are: // sku == null, details != null means error (msg in 'details') // sku == "", details == null, signature == null means end of purchases if (0 == sku) { if (0 != details) { std::string errStr; InitStringFromJString(errStr, env, details); LOGE("query failed: %s", errStr.c_str()); } else { LOGE("query failed: (internal error - no details available)"); } // Create a single empty entry to indicate an error. GooglePlayBilling::PurchaseList &list = queryCtx->purchases; list.clear(); list.push_back({ "", "", "", "", "" }); queryCtx->callback(queryCtx->callerContext, queryCtx->purchases); delete queryCtx; } else if (0 == details) { // The list has been terminated. We can make the callback now. queryCtx->callback(queryCtx->callerContext, queryCtx->purchases); delete queryCtx; } else { // We have a purchase to add to the list GooglePlayBilling::PurchaseList &list = queryCtx->purchases; list.push_back(GooglePlayBilling::Purchase()); GooglePlayBilling::Purchase &purchase = list[list.size() - 1]; InitStringFromJString(purchase.sku, env, sku); InitStringFromJString(purchase.details, env, details); InitStringFromJString(purchase.googleToken, env, token); InitStringFromJString(purchase.clientToken, env, devPayload); InitStringFromJString(purchase.signature, env, sig); } }