Policy *GenericAgentLoadPolicy(EvalContext *ctx, GenericAgentConfig *config) { config->policy_last_read_attempt = time(NULL); StringSet *parsed_files = StringSetNew(); StringSet *failed_files = StringSetNew(); Policy *policy = LoadPolicyFile(ctx, config, config->input_file, parsed_files, failed_files); if (StringSetSize(failed_files) > 0) { Log(LOG_LEVEL_ERR, "There are syntax errors in policy files"); exit(EXIT_FAILURE); } StringSetDestroy(parsed_files); StringSetDestroy(failed_files); { Seq *errors = SeqNew(100, PolicyErrorDestroy); if (PolicyCheckPartial(policy, errors)) { if (!config->bundlesequence && (PolicyIsRunnable(policy) || config->check_runnable)) { Log(LOG_LEVEL_VERBOSE, "Running full policy integrity checks"); PolicyCheckRunnable(ctx, policy, errors, config->ignore_missing_bundles); } } if (SeqLength(errors) > 0) { Writer *writer = FileWriter(stderr); for (size_t i = 0; i < errors->length; i++) { PolicyErrorWrite(writer, errors->data[i]); } WriterClose(writer); exit(EXIT_FAILURE); // TODO: do not exit } SeqDestroy(errors); } if (LogGetGlobalLevel() >= LOG_LEVEL_VERBOSE) { ShowContext(ctx); } if (policy) { VerifyPromises(ctx, policy, config); } return policy; }
static void DumpErrors(Seq *errs) { if (SeqLength(errs) > 0) { Writer *writer = FileWriter(stdout); for (size_t i = 0; i < errs->length; i++) { PolicyErrorWrite(writer, errs->data[i]); } FileWriterDetach(writer); } }
Policy *LoadPolicy(EvalContext *ctx, GenericAgentConfig *config) { StringSet *parsed_files_and_checksums = StringSetNew(); StringSet *failed_files = StringSetNew(); Policy *policy = LoadPolicyFile(ctx, config, config->input_file, parsed_files_and_checksums, failed_files); if (StringSetSize(failed_files) > 0) { Log(LOG_LEVEL_ERR, "There are syntax errors in policy files"); exit(EXIT_FAILURE); } StringSetDestroy(parsed_files_and_checksums); StringSetDestroy(failed_files); { Seq *errors = SeqNew(100, PolicyErrorDestroy); if (PolicyCheckPartial(policy, errors)) { if (!config->bundlesequence && (PolicyIsRunnable(policy) || config->check_runnable)) { Log(LOG_LEVEL_VERBOSE, "Running full policy integrity checks"); PolicyCheckRunnable(ctx, policy, errors, config->ignore_missing_bundles); } } if (SeqLength(errors) > 0) { Writer *writer = FileWriter(stderr); for (size_t i = 0; i < errors->length; i++) { PolicyErrorWrite(writer, errors->data[i]); } WriterClose(writer); exit(EXIT_FAILURE); // TODO: do not exit } SeqDestroy(errors); } if (LogGetGlobalLevel() >= LOG_LEVEL_VERBOSE) { ShowContext(ctx); } if (policy) { for (size_t i = 0; i < SeqLength(policy->bundles); i++) { Bundle *bp = SeqAt(policy->bundles, i); EvalContextStackPushBundleFrame(ctx, bp, NULL, false); for (size_t j = 0; j < SeqLength(bp->promise_types); j++) { PromiseType *sp = SeqAt(bp->promise_types, j); EvalContextStackPushPromiseTypeFrame(ctx, sp); for (size_t ppi = 0; ppi < SeqLength(sp->promises); ppi++) { Promise *pp = SeqAt(sp->promises, ppi); ExpandPromise(ctx, pp, CommonEvalPromise, NULL); } EvalContextStackPopFrame(ctx); } EvalContextStackPopFrame(ctx); } PolicyResolve(ctx, policy, config); // TODO: need to move this inside PolicyCheckRunnable eventually. if (!config->bundlesequence && config->check_runnable) { // only verify policy-defined bundlesequence for cf-agent, cf-promises if ((config->agent_type == AGENT_TYPE_AGENT) || (config->agent_type == AGENT_TYPE_COMMON)) { if (!VerifyBundleSequence(ctx, policy, config)) { FatalError(ctx, "Errors in promise bundles: could not verify bundlesequence"); } } } } JsonElement *validated_doc = ReadReleaseIdFileFromInputs(); if (validated_doc) { const char *release_id = JsonObjectGetAsString(validated_doc, "releaseId"); if (release_id) { policy->release_id = xstrdup(release_id); } JsonDestroy(validated_doc); } return policy; }
static Policy *LoadPolicyFile(EvalContext *ctx, GenericAgentConfig *config, const char *policy_file, StringSet *parsed_files_and_checksums, StringSet *failed_files) { Policy *policy = NULL; unsigned char digest[EVP_MAX_MD_SIZE + 1] = { 0 }; char hashbuffer[EVP_MAX_MD_SIZE * 4] = { 0 }; char hashprintbuffer[CF_BUFSIZE] = { 0 }; HashFile(policy_file, digest, CF_DEFAULT_DIGEST); snprintf(hashprintbuffer, CF_BUFSIZE - 1, "{checksum}%s", HashPrintSafe(CF_DEFAULT_DIGEST, true, digest, hashbuffer)); Log(LOG_LEVEL_DEBUG, "Hashed policy file %s to %s", policy_file, hashprintbuffer); if (StringSetContains(parsed_files_and_checksums, policy_file)) { Log(LOG_LEVEL_VERBOSE, "Skipping loading of duplicate policy file %s", policy_file); return NULL; } else if (StringSetContains(parsed_files_and_checksums, hashprintbuffer)) { Log(LOG_LEVEL_VERBOSE, "Skipping loading of duplicate (detected by hash) policy file %s", policy_file); return NULL; } else { Log(LOG_LEVEL_DEBUG, "Loading policy file %s", policy_file); } policy = Cf3ParseFile(config, policy_file); // we keep the checksum and the policy file name to help debugging StringSetAdd(parsed_files_and_checksums, xstrdup(policy_file)); StringSetAdd(parsed_files_and_checksums, xstrdup(hashprintbuffer)); if (policy) { Seq *errors = SeqNew(10, free); if (!PolicyCheckPartial(policy, errors)) { Writer *writer = FileWriter(stderr); for (size_t i = 0; i < errors->length; i++) { PolicyErrorWrite(writer, errors->data[i]); } WriterClose(writer); SeqDestroy(errors); StringSetAdd(failed_files, xstrdup(policy_file)); return NULL; } SeqDestroy(errors); } else { StringSetAdd(failed_files, xstrdup(policy_file)); return NULL; } PolicyResolve(ctx, policy, config); Body *body_common_control = PolicyGetBody(policy, NULL, "common", "control"); Body *body_file_control = PolicyGetBody(policy, NULL, "file", "control"); if (body_common_control) { Seq *potential_inputs = BodyGetConstraint(body_common_control, "inputs"); Constraint *cp = EffectiveConstraint(ctx, potential_inputs); SeqDestroy(potential_inputs); if (cp) { Policy *aux_policy = LoadPolicyInputFiles(ctx, config, RvalRlistValue(cp->rval), parsed_files_and_checksums, failed_files); if (aux_policy) { policy = PolicyMerge(policy, aux_policy); } } } if (body_file_control) { Seq *potential_inputs = BodyGetConstraint(body_file_control, "inputs"); Constraint *cp = EffectiveConstraint(ctx, potential_inputs); SeqDestroy(potential_inputs); if (cp) { Policy *aux_policy = LoadPolicyInputFiles(ctx, config, RvalRlistValue(cp->rval), parsed_files_and_checksums, failed_files); if (aux_policy) { policy = PolicyMerge(policy, aux_policy); } } } return policy; }
static Policy *LoadPolicyFile(EvalContext *ctx, GenericAgentConfig *config, const char *policy_file, StringSet *parsed_files_and_checksums, StringSet *failed_files) { unsigned char digest[EVP_MAX_MD_SIZE + 1] = { 0 }; char hashbuffer[CF_HOSTKEY_STRING_SIZE] = { 0 }; char hashprintbuffer[CF_BUFSIZE] = { 0 }; HashFile(policy_file, digest, CF_DEFAULT_DIGEST); snprintf(hashprintbuffer, CF_BUFSIZE - 1, "{checksum}%s", HashPrintSafe(hashbuffer, sizeof(hashbuffer), digest, CF_DEFAULT_DIGEST, true)); Log(LOG_LEVEL_DEBUG, "Hashed policy file %s to %s", policy_file, hashprintbuffer); if (StringSetContains(parsed_files_and_checksums, policy_file)) { Log(LOG_LEVEL_VERBOSE, "Skipping loading of duplicate policy file %s", policy_file); return NULL; } else if (StringSetContains(parsed_files_and_checksums, hashprintbuffer)) { Log(LOG_LEVEL_VERBOSE, "Skipping loading of duplicate (detected by hash) policy file %s", policy_file); return NULL; } else { Log(LOG_LEVEL_DEBUG, "Loading policy file %s", policy_file); } Policy *policy = Cf3ParseFile(config, policy_file); // we keep the checksum and the policy file name to help debugging StringSetAdd(parsed_files_and_checksums, xstrdup(policy_file)); StringSetAdd(parsed_files_and_checksums, xstrdup(hashprintbuffer)); if (policy) { Seq *errors = SeqNew(10, free); if (!PolicyCheckPartial(policy, errors)) { Writer *writer = FileWriter(stderr); for (size_t i = 0; i < errors->length; i++) { PolicyErrorWrite(writer, errors->data[i]); } WriterClose(writer); SeqDestroy(errors); StringSetAdd(failed_files, xstrdup(policy_file)); PolicyDestroy(policy); return NULL; } SeqDestroy(errors); } else { StringSetAdd(failed_files, xstrdup(policy_file)); return NULL; } PolicyResolve(ctx, policy, config); DataType def_inputs_type = CF_DATA_TYPE_NONE; VarRef *inputs_ref = VarRefParse("def.augment_inputs"); const void *def_inputs = EvalContextVariableGet(ctx, inputs_ref, &def_inputs_type); VarRefDestroy(inputs_ref); if (RVAL_TYPE_CONTAINER == DataTypeToRvalType(def_inputs_type) && NULL != def_inputs) { const JsonElement *el; JsonIterator iter = JsonIteratorInit((JsonElement*) def_inputs); while ((el = JsonIteratorNextValueByType(&iter, JSON_ELEMENT_TYPE_PRIMITIVE, true))) { char *input = JsonPrimitiveToString(el); Log(LOG_LEVEL_VERBOSE, "Loading augments from def.augment_inputs: %s", input); Rlist* inputs_rlist = NULL; RlistAppendScalar(&inputs_rlist, input); Policy *aux_policy = LoadPolicyInputFiles(ctx, config, inputs_rlist, parsed_files_and_checksums, failed_files); if (aux_policy) { policy = PolicyMerge(policy, aux_policy); } RlistDestroy(inputs_rlist); free(input); } } Body *body_common_control = PolicyGetBody(policy, NULL, "common", "control"); Body *body_file_control = PolicyGetBody(policy, NULL, "file", "control"); if (body_common_control) { Seq *potential_inputs = BodyGetConstraint(body_common_control, "inputs"); Constraint *cp = EffectiveConstraint(ctx, potential_inputs); SeqDestroy(potential_inputs); if (cp) { Policy *aux_policy = LoadPolicyInputFiles(ctx, config, RvalRlistValue(cp->rval), parsed_files_and_checksums, failed_files); if (aux_policy) { policy = PolicyMerge(policy, aux_policy); } } } if (body_file_control) { Seq *potential_inputs = BodyGetConstraint(body_file_control, "inputs"); Constraint *cp = EffectiveConstraint(ctx, potential_inputs); SeqDestroy(potential_inputs); if (cp) { Policy *aux_policy = LoadPolicyInputFiles(ctx, config, RvalRlistValue(cp->rval), parsed_files_and_checksums, failed_files); if (aux_policy) { policy = PolicyMerge(policy, aux_policy); } } } return policy; }