static void parse_parser_error(yaml_document_t * document, yaml_node_t * node, parser_test * test) { if( node->type != YAML_MAPPING_NODE ) { return; } yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = keyNode->data.scalar.value; if( valueNode->type == YAML_SCALAR_NODE ) { char * valueValue = valueNode->data.scalar.value; if( strcmp(keyValue, "simple") == 0 ) { } else if( strcmp(keyValue, "verbose") == 0 ) { test->expected = strdup(valueValue); } else if( strcmp(keyValue, "line") == 0 ) { } else if( strcmp(keyValue, "column") == 0 ) { } } } }
static int pkg_set_files_from_node(struct pkg *pkg, yaml_node_t *item, yaml_document_t *doc, const char *filename) { yaml_node_pair_t *pair = NULL; yaml_node_t *key = NULL; yaml_node_t *val = NULL; const char *sum = NULL; const char *uname = NULL; const char *gname = NULL; void *set = NULL; mode_t perm = 0; pair = item->data.mapping.pairs.start; while (pair < item->data.mapping.pairs.top) { key = yaml_document_get_node(doc, pair->key); val = yaml_document_get_node(doc, pair->value); if (key->data.scalar.length <= 0) { pkg_emit_error("Skipping malformed file entry for %s", filename); ++pair; continue; } if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) { pkg_emit_error("Skipping malformed file entry for %s", filename); ++pair; continue; } if (!strcasecmp(key->data.scalar.value, "uname")) uname = val->data.scalar.value; else if (!strcasecmp(key->data.scalar.value, "gname")) gname = val->data.scalar.value; else if (!strcasecmp(key->data.scalar.value, "sum") && val->data.scalar.length == 64) sum = val->data.scalar.value; else if (!strcasecmp(key->data.scalar.value, "perm")) { if ((set = setmode(val->data.scalar.value)) == NULL) pkg_emit_error("Not a valid mode: %s", val->data.scalar.value); else perm = getmode(set, 0); } else { pkg_emit_error("Skipping unknown key for file(%s): %s", filename, key->data.scalar.value); } ++pair; } if (key != NULL) pkg_addfile_attr(pkg, key->data.scalar.value, sum, uname, gname, perm, false); return (EPKG_OK); }
int print_document(yaml_document_t *document, yaml_node_t *node) { int rv = 0; switch (node->type) { case YAML_NO_NODE: { printf("node type: no node\n"); break; } case YAML_SCALAR_NODE: { size_t size = node->data.scalar.length; unsigned char *value = node->data.scalar.value; printf("node type: scalar [length: %zu, value: %s] \n", size, value); break; } case YAML_SEQUENCE_NODE: { printf("node type: sequence\n"); int n = node->data.sequence.items.top - node->data.sequence.items.start; for (int i = 0; i < n; i++) { int child_node_idx = node->data.sequence.items.start[i]; yaml_node_t *child_node = yaml_document_get_node(document, child_node_idx); rv = print_document(document, child_node); } break; } case YAML_MAPPING_NODE: { printf("node type: mapping\n"); int n = node->data.mapping.pairs.top - node->data.mapping.pairs.start; for (int i = 0; i < n; i++) { int child_node_k_idx = node->data.mapping.pairs.start[i].key; int child_node_v_idx = node->data.mapping.pairs.start[i].value; yaml_node_t *child_node_k = yaml_document_get_node(document, child_node_k_idx); yaml_node_t *child_node_v = yaml_document_get_node(document, child_node_v_idx); rv = print_document(document, child_node_k); if (rv) { break; } rv = print_document(document, child_node_v); if (rv) { break; } } break; } default: { printf("ERROR: unexpected node type: %d\n", node->type); rv = ERR_UNEXPECTED_NODE_TYPE; } } return rv; }
static int pkg_set_dirs_from_node(struct pkg *pkg, yaml_node_t *item, yaml_document_t *doc, const char *dirname) { yaml_node_pair_t *pair; yaml_node_t *key; yaml_node_t *val; const char *uname = NULL; const char *gname = NULL; void *set; mode_t perm = 0; bool try = false; pair = item->data.mapping.pairs.start; while (pair < item->data.mapping.pairs.top) { key = yaml_document_get_node(doc, pair->key); val = yaml_document_get_node(doc, pair->value); if (key->data.scalar.length <= 0) { pkg_emit_error("Skipping malformed file entry for %s", dirname); ++pair; continue; } if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) { pkg_emit_error("Skipping malformed file entry for %s", dirname); ++pair; continue; } if (!strcasecmp(key->data.scalar.value, "uname")) uname = val->data.scalar.value; else if (!strcasecmp(key->data.scalar.value, "gname")) gname = val->data.scalar.value; else if (!strcasecmp(key->data.scalar.value, "perm")) { if ((set = setmode(val->data.scalar.value)) == NULL) pkg_emit_error("Not a valid mode: %s", val->data.scalar.value); else perm = getmode(set, 0); } else if (!strcasecmp(key->data.scalar.value, "try")) { if (val->data.scalar.value[0] == 'n') try = false; else if (val->data.scalar.value[0] == 'y') try = true; else pkg_emit_error("Wrong value for try: %s, " "expected 'y' or 'n'", val->data.scalar.value); } else {
yaml_node_t* fatso_yaml_mapping_lookup(yaml_document_t* doc, yaml_node_t* mapping, const char* key) { assert(mapping->type == YAML_MAPPING_NODE); for (yaml_node_pair_t* x = mapping->data.mapping.pairs.start; x != mapping->data.mapping.pairs.top; ++x) { yaml_node_t* k = yaml_document_get_node(doc, x->key); if (k) { if (strncmp(key, (char*)k->data.scalar.value, k->data.scalar.length) == 0) { return yaml_document_get_node(doc, x->value); } } } return NULL; }
int compare_nodes(yaml_document_t *document1, int index1, yaml_document_t *document2, int index2, int level) { if (level++ > 1000) return 0; yaml_node_t *node1 = yaml_document_get_node(document1, index1); yaml_node_t *node2 = yaml_document_get_node(document2, index2); int k; assert(node1); assert(node2); if (node1->type != node2->type) return 0; if (strcmp((char *)node1->tag, (char *)node2->tag) != 0) return 0; switch (node1->type) { case YAML_SCALAR_NODE: if (node1->data.scalar.length != node2->data.scalar.length) return 0; if (strncmp((char *)node1->data.scalar.value, (char *)node2->data.scalar.value, node1->data.scalar.length) != 0) return 0; break; case YAML_SEQUENCE_NODE: if ((node1->data.sequence.items.top - node1->data.sequence.items.start) != (node2->data.sequence.items.top - node2->data.sequence.items.start)) return 0; for (k = 0; k < (node1->data.sequence.items.top - node1->data.sequence.items.start); k ++) { if (!compare_nodes(document1, node1->data.sequence.items.start[k], document2, node2->data.sequence.items.start[k], level)) return 0; } break; case YAML_MAPPING_NODE: if ((node1->data.mapping.pairs.top - node1->data.mapping.pairs.start) != (node2->data.mapping.pairs.top - node2->data.mapping.pairs.start)) return 0; for (k = 0; k < (node1->data.mapping.pairs.top - node1->data.mapping.pairs.start); k ++) { if (!compare_nodes(document1, node1->data.mapping.pairs.start[k].key, document2, node2->data.mapping.pairs.start[k].key, level)) return 0; if (!compare_nodes(document1, node1->data.mapping.pairs.start[k].value, document2, node2->data.mapping.pairs.start[k].value, level)) return 0; } break; default: assert(0); break; } return 1; }
yaml_node_t* fatso_yaml_sequence_lookup(yaml_document_t* doc, yaml_node_t* sequence, size_t idx) { assert(sequence->type == YAML_SEQUENCE_NODE); if (idx >= fatso_yaml_sequence_length(sequence)) return NULL; return yaml_document_get_node(doc, sequence->data.sequence.items.start[idx]); }
int yaml_json_sequence(FILE *f, yaml_document_t *ydoc, yaml_node_t *node) { yaml_node_t *bufnode; yaml_node_item_t *item; int first = 1; if (!node || node->type != YAML_SEQUENCE_NODE) { fprintf(f, "null"); return 0; } fprintf(f, "["); for (item = node->data.sequence.items.start; item < node->data.sequence.items.top; item++) { // get the key node for current pair; should be a scalar we'll use for property name bufnode = yaml_document_get_node(ydoc, *item); if (!first) fprintf(f, ","); first = 0; if (!bufnode) fprintf(f, "null"); else if (bufnode->type == YAML_SCALAR_NODE) fprintf(f, "\"%s\"", bufnode->data.scalar.value); else if (bufnode->type == YAML_MAPPING_NODE) yaml_json_mapping(f, ydoc, bufnode); else if (bufnode->type == YAML_SEQUENCE_NODE) yaml_json_sequence(f, ydoc, bufnode); else fprintf(f, "null"); } fprintf(f, "]"); return 0; }
int yaml_json_mapping(FILE *f, yaml_document_t *ydoc, yaml_node_t *node) { yaml_node_t *bufnode; yaml_node_pair_t *pair; int first = 1; int quoted = 1; char *key = NULL; if (!node || node->type != YAML_MAPPING_NODE) { fprintf(f, "null"); return 0; } fprintf(f, "{"); for (pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++) { // get the key node for current pair; should be a scalar we'll use for property name bufnode = yaml_document_get_node(ydoc, pair->key); if (!bufnode || bufnode->type != YAML_SCALAR_NODE) continue; if (!first) fprintf(f, ","); first = 0; key = bufnode->data.scalar.value; fprintf(f, "\"%s\":", key); bufnode = yaml_document_get_node(ydoc, pair->value); if (!bufnode) fprintf(f, "null"); else if (bufnode->type == YAML_SCALAR_NODE) { // a number of properties don't need quoting as they are always numeric or otherwise base types quoted = 1; if (strncmp(key, "time", 4) == 0) quoted = 0; else if (strncmp(key, "level", 5) == 0) quoted = 0; else if (strncmp(key, "iconID", 6) == 0) quoted = 0; else if (strncmp(key, "radius", 6) == 0) quoted = 0; else if (strncmp(key, "consume", 7) == 0) quoted = 0; else if (strncmp(key, "soundID", 7) == 0) quoted = 0; else if (strncmp(key, "quantity", 8) == 0) quoted = 0; else if (strncmp(key, "graphicID", 9) == 0) quoted = 0; else if (strncmp(key, "probability", 11) == 0) quoted = 0; if (quoted > 0) fprintf(f, "\"%s\"", bufnode->data.scalar.value); else fprintf(f, "%s", bufnode->data.scalar.value); } else if (bufnode->type == YAML_MAPPING_NODE) yaml_json_mapping(f, ydoc, bufnode); else if (bufnode->type == YAML_SEQUENCE_NODE) yaml_json_sequence(f, ydoc, bufnode); } fprintf(f, "}"); return 0; }
static void convertNode(JSON::Value &value, yaml_node_t *node, yaml_document_t &document) { switch (node->type) { case YAML_SCALAR_NODE: value = std::string((char *)node->data.scalar.value, node->data.scalar.length); break; case YAML_SEQUENCE_NODE: { value = JSON::Array(); JSON::Array &array = value.get<JSON::Array>(); yaml_node_item_t *item = node->data.sequence.items.start; array.resize(node->data.sequence.items.top - item); JSON::Array::iterator it = array.begin(); while (item < node->data.sequence.items.top) { convertNode(*it, yaml_document_get_node(&document, *item), document); ++it; ++item; } break; } case YAML_MAPPING_NODE: { value = JSON::Object(); JSON::Object &object = value.get<JSON::Object>(); yaml_node_pair_t *pair = node->data.mapping.pairs.start; while (pair < node->data.mapping.pairs.top) { yaml_node_t *keyNode = yaml_document_get_node(&document, pair->key); yaml_node_t *valueNode = yaml_document_get_node(&document, pair->value); if (keyNode->type != YAML_SCALAR_NODE) MORDOR_THROW_EXCEPTION(std::runtime_error("Can't use a non-string as a key")); std::string key((char *)keyNode->data.scalar.value, keyNode->data.scalar.length); convertNode(object.insert(std::make_pair(key, JSON::Value()))->second, valueNode, document); ++pair; } break; } default: MORDOR_NOTREACHED(); } }
void mustache_spec_parse_document(yaml_document_t * document) { yaml_node_t * node = yaml_document_get_root_node(document); if( node->type != YAML_MAPPING_NODE ) { return; } yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = reinterpret_cast<char *>(keyNode->data.scalar.value); if( strcmp(keyValue, "tests") == 0 && valueNode->type == YAML_SEQUENCE_NODE ) { mustache_spec_parse_tests(document, valueNode); } } }
static int parse_sequence(struct pkg * pkg, yaml_node_t *node, yaml_document_t *doc, int attr) { yaml_node_item_t *item; yaml_node_t *val; item = node->data.sequence.items.start; while (item < node->data.sequence.items.top) { val = yaml_document_get_node(doc, *item); switch (attr) { case PKG_CONFLICTS: if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) pkg_emit_error("Skipping malformed conflict"); else pkg_addconflict(pkg, val->data.scalar.value); break; case PKG_CATEGORIES: if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) pkg_emit_error("Skipping malformed category"); else pkg_addcategory(pkg, val->data.scalar.value); break; case PKG_LICENSES: if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) pkg_emit_error("Skipping malformed license"); else pkg_addlicense(pkg, val->data.scalar.value); break; case PKG_USERS: if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) pkg_emit_error("Skipping malformed license"); else pkg_adduser(pkg, val->data.scalar.value); break; case PKG_GROUPS: if (val->type != YAML_SCALAR_NODE || val->data.scalar.length <= 0) pkg_emit_error("Skipping malformed license"); else pkg_addgroup(pkg, val->data.scalar.value); break; case PKG_DIRS: if (val->type == YAML_SCALAR_NODE && val->data.scalar.length > 0) pkg_adddir(pkg, val->data.scalar.value, 1); else if (val->type == YAML_MAPPING_NODE) parse_mapping(pkg, val, doc, attr); else pkg_emit_error("Skipping malformed dirs"); } ++item; } return (EPKG_OK); }
void mustache_spec_parse_data(yaml_document_t * document, yaml_node_t * node, mustache::Data * data) { if( node->type == YAML_MAPPING_NODE ) { yaml_node_pair_t * pair; data->init(mustache::Data::TypeMap, 0); for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = reinterpret_cast<char *>(keyNode->data.scalar.value); mustache::Data * child = new mustache::Data(); mustache_spec_parse_data(document, valueNode, child); data->data.insert(std::pair<std::string,mustache::Data*>(keyValue,child)); } } else if( node->type == YAML_SEQUENCE_NODE ) { yaml_node_item_t * item; int nItems = node->data.sequence.items.top - node->data.sequence.items.start; data->init(mustache::Data::TypeArray, nItems); int i = 0; for( item = node->data.sequence.items.start; item < node->data.sequence.items.top; item ++) { mustache::Data * child = new mustache::Data(); data->array.push_back(child); yaml_node_t * valueNode = yaml_document_get_node(document, *item); mustache_spec_parse_data(document, valueNode, child); } data->length = data->array.size(); } else if( node->type == YAML_SCALAR_NODE ) { char * keyValue = reinterpret_cast<char *>(node->data.scalar.value); if( strcmp(keyValue, "0") == 0 || strcmp(keyValue, "false") == 0 ) { data->init(mustache::Data::TypeString, 0); } else { data->init(mustache::Data::TypeString, node->data.scalar.length); data->val->assign(keyValue); mustache::trimDecimal(*(data->val)); } } }
static void parse_parser_test(yaml_document_t * document, yaml_node_t * node) { if( node->type != YAML_MAPPING_NODE ) { return; } parser_test * test = get_parser_test(); yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = keyNode->data.scalar.value; if( valueNode->type == YAML_SCALAR_NODE ) { char * valueValue = valueNode->data.scalar.value; if( strcmp(keyValue, "name") == 0 ) { test->name = strdup(valueValue); } else if( strcmp(keyValue, "desc") == 0 ) { test->desc = strdup(valueValue); } else if( strcmp(keyValue, "template") == 0 ) { test->tmpl = strdup(valueValue); } else if( strcmp(keyValue, "expected") == 0 ) { test->expected = strdup(valueValue); } else if( strcmp(keyValue, "fails") == 0 ) { if( strcmp(valueValue, "true") == 0 ) { test->expect_error = 1; } } } else if( valueNode->type == YAML_MAPPING_NODE ) { if( strcmp(keyValue, "error") == 0 ) { parse_parser_error(document, valueNode, test); } } } // Output test parser_test_print_c(outfile, test); }
void mustache_spec_parse_tests(yaml_document_t * document, yaml_node_t * node) { if( node->type != YAML_SEQUENCE_NODE ) { return; } yaml_node_item_t *item; for( item = node->data.sequence.items.start; item < node->data.sequence.items.top; item ++) { yaml_node_t * valueNode = yaml_document_get_node(document, *item); if( valueNode->type == YAML_MAPPING_NODE ) { mustache_spec_parse_test(document, valueNode); } } }
int fatso_configuration_parse(struct fatso_configuration* e, struct yaml_document_s* doc, struct yaml_node_s* node, char** out_error_message) { int r = 0; yaml_node_t* dependencies = fatso_yaml_mapping_lookup(doc, node, "dependencies"); if (dependencies && dependencies->type == YAML_SEQUENCE_NODE) { size_t len = fatso_yaml_sequence_length(dependencies); if (len != 0) { e->dependencies.size = len; e->dependencies.data = fatso_calloc(len, sizeof(struct fatso_dependency)); for (size_t i = 0; i < len; ++i) { r = fatso_dependency_parse(&e->dependencies.data[i], doc, fatso_yaml_sequence_lookup(doc, dependencies, i), out_error_message); if (r != 0) goto out; } } } yaml_node_t* defines = fatso_yaml_mapping_lookup(doc, node, "defines"); if (defines && defines->type == YAML_MAPPING_NODE) { size_t len = fatso_yaml_mapping_length(defines); if (len != 0) { e->defines.size = len; e->defines.data = fatso_calloc(len, sizeof(struct fatso_define)); for (size_t i = 0; i < len; ++i) { yaml_node_pair_t* pair = &defines->data.mapping.pairs.start[i]; yaml_node_t* key = yaml_document_get_node(doc, pair->key); yaml_node_t* value = yaml_document_get_node(doc, pair->value); e->defines.data[i].key = fatso_yaml_scalar_strdup(key); e->defines.data[i].value = fatso_yaml_scalar_strdup(value); } } } out: return r; }
void mustache_spec_parse_partials(yaml_document_t * document, yaml_node_t * node, mustache::Node::Partials * partials) { if( node->type != YAML_MAPPING_NODE ) { return; } mustache::Mustache mustache; std::string ckey; yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = reinterpret_cast<char *>(keyNode->data.scalar.value); char * valueValue = reinterpret_cast<char *>(valueNode->data.scalar.value); std::string ckey(keyValue); std::string tmpl(valueValue); mustache::Node node; partials->insert(std::make_pair(ckey, node)); mustache.tokenize(&tmpl, &(*partials)[ckey]); } }
/* this is where the magic happens */ static set* _cluster_keys(range_request* rr, apr_pool_t* pool, const char* cluster, const char* cluster_file) { apr_array_header_t* working_range; set* sections; char* section; char* cur_section; apr_pool_t* req_pool = range_request_pool(rr); yaml_node_t *node; yaml_node_t *rootnode; yaml_node_t *keynode; yaml_node_t *valuenode; yaml_parser_t parser; yaml_node_item_t *item; yaml_node_pair_t *pair; yaml_document_t document; FILE* fp = fopen(cluster_file, "r"); /* make sure we can open the file and parse it */ if (!fp) { range_request_warn(rr, "%s: %s not readable", cluster, cluster_file); return set_new(pool, 0); } if (!yaml_parser_initialize(&parser)) { range_request_warn(rr, "%s: cannot initialize yaml parser", cluster); fclose(fp); return set_new(pool, 0); } yaml_parser_set_input_file(&parser, fp); if(!yaml_parser_load(&parser, &document)) { range_request_warn(rr, "%s: malformatted cluster definition %s", cluster, cluster_file); fclose(fp); yaml_parser_delete(&parser); return set_new(pool, 0); } fclose(fp); rootnode = yaml_document_get_root_node(&document); /* make sure it's just a simple dictionary */ if(rootnode->type != YAML_MAPPING_NODE) { range_request_warn(rr, "%s: malformatted cluster definition %s", cluster, cluster_file); yaml_document_delete(&document); yaml_parser_delete(&parser); return set_new(pool, 0); } /* "sections" refers to cluster sections - %cluster:SECTION it's what we're going to return */ sections = set_new(pool, 0); section = cur_section = NULL; for(pair = rootnode->data.mapping.pairs.start; pair < rootnode->data.mapping.pairs.top; pair++) { /* these are the keys */ keynode = yaml_document_get_node(&document, pair->key); /* cur_section is the keyname - the WHATEVER in %cluster:WHATEVER */ cur_section = apr_pstrdup(pool, (char *)(keynode->data.scalar.value)); valuenode = yaml_document_get_node(&document, pair->value); /* if the value is a scalar, that's our answer */ if(valuenode->type == YAML_SCALAR_NODE) { set_add(sections, cur_section, apr_psprintf(pool, "%s", valuenode->data.scalar.value)); } else if (valuenode->type == YAML_SEQUENCE_NODE) { /* otherwise, glue together all the values in the list */ working_range = apr_array_make(req_pool, 1, sizeof(char*)); for(item = valuenode->data.sequence.items.start; item < valuenode->data.sequence.items.top; item++) { node = yaml_document_get_node(&document, (int)*item); if(node->type != YAML_SCALAR_NODE) { /* only scalars allowed */ range_request_warn(rr, "%s: malformed cluster definition %s", cluster, cluster_file); yaml_document_delete(&document); yaml_parser_delete(&parser); return set_new(pool, 0); } else { /* add to the working set */ /* include it in () because we're going to comma it together later */ *(char**)apr_array_push(working_range) = apr_psprintf(pool, "(%s)", _substitute_dollars(pool, cluster, node->data.scalar.value)); } } /* glue the list items together with commas */ set_add(sections, cur_section, apr_array_pstrcat(pool, working_range, ',')); } } /* Add a "KEYS" toplevel key that lists all the other keys */ /* TODO: make an error if somebody tries to specify KEYS manually? */ set_add(sections, "KEYS", _join_elements(pool, ',', sections)); yaml_document_delete(&document); yaml_parser_delete(&parser); return sections; }
static int parse_mapping(struct pkg *pkg, yaml_node_t *item, yaml_document_t *doc, int attr) { struct sbuf *tmp = NULL; yaml_node_pair_t *pair; yaml_node_t *key; yaml_node_t *val; pkg_script_t script_type; pair = item->data.mapping.pairs.start; while (pair < item->data.mapping.pairs.top) { key = yaml_document_get_node(doc, pair->key); val = yaml_document_get_node(doc, pair->value); if (key->data.scalar.length <= 0) { pkg_emit_error("Skipping empty dependency name"); ++pair; continue; } switch (attr) { case PKG_DEPS: if (val->type != YAML_MAPPING_NODE) pkg_emit_error("Skipping malformed depencency %s", key->data.scalar.value); else pkg_set_deps_from_node(pkg, val, doc, key->data.scalar.value); break; case PKG_DIRS: if (val->type != YAML_MAPPING_NODE) pkg_emit_error("Skipping malformed dirs %s", key->data.scalar.value); else pkg_set_dirs_from_node(pkg, val, doc, key->data.scalar.value); break; case PKG_DIRECTORIES: if (val->type == YAML_SCALAR_NODE && val->data.scalar.length > 0) { urldecode(key->data.scalar.value, &tmp); if (val->data.scalar.value[0] == 'y') pkg_adddir(pkg, sbuf_data(tmp), 1); else pkg_adddir(pkg, sbuf_data(tmp), 0); } else if (val->type == YAML_MAPPING_NODE) { pkg_set_dirs_from_node(pkg, val, doc, key->data.scalar.value); } else { pkg_emit_error("Skipping malformed directories %s", key->data.scalar.value); } break; case PKG_FILES: if (val->type == YAML_SCALAR_NODE && val->data.scalar.length > 0) { urldecode(key->data.scalar.value, &tmp); pkg_addfile(pkg, sbuf_data(tmp), val->data.scalar.length == 64 ? val->data.scalar.value : NULL); } else if (val->type == YAML_MAPPING_NODE) pkg_set_files_from_node(pkg, val, doc, key->data.scalar.value); else pkg_emit_error("Skipping malformed files %s", key->data.scalar.value); break; case PKG_OPTIONS: if (val->type != YAML_SCALAR_NODE) pkg_emit_error("Skipping malformed option %s", key->data.scalar.value); else pkg_addoption(pkg, key->data.scalar.value, val->data.scalar.value); break; case PKG_SCRIPTS: if (val->type != YAML_SCALAR_NODE) pkg_emit_error("Skipping malformed scripts %s", key->data.scalar.value); if (strcmp(key->data.scalar.value, "pre-install") == 0) { script_type = PKG_SCRIPT_PRE_INSTALL; } else if (strcmp(key->data.scalar.value, "install") == 0) { script_type = PKG_SCRIPT_INSTALL; } else if (strcmp(key->data.scalar.value, "post-install") == 0) { script_type = PKG_SCRIPT_POST_INSTALL; } else if (strcmp(key->data.scalar.value, "pre-upgrade") == 0) { script_type = PKG_SCRIPT_PRE_UPGRADE; } else if (strcmp(key->data.scalar.value, "upgrade") == 0) { script_type = PKG_SCRIPT_UPGRADE; } else if (strcmp(key->data.scalar.value, "post-upgrade") == 0) { script_type = PKG_SCRIPT_POST_UPGRADE; } else if (strcmp(key->data.scalar.value, "pre-deinstall") == 0) { script_type = PKG_SCRIPT_PRE_DEINSTALL; } else if (strcmp(key->data.scalar.value, "deinstall") == 0) { script_type = PKG_SCRIPT_DEINSTALL; } else if (strcmp(key->data.scalar.value, "post-deinstall") == 0) { script_type = PKG_SCRIPT_POST_DEINSTALL; } else { pkg_emit_error("Skipping unknown script type: %s", key->data.scalar.value); break; } pkg_addscript(pkg, val->data.scalar.value, script_type); break; } ++pair; } sbuf_free(tmp); return (EPKG_OK); }
mrb_value node_to_value_with_aliases(mrb_state *mrb, yaml_document_t *document, yaml_node_t *node, int use_scalar_aliases) { /* YAML will return a NULL node if the input was empty */ if (!node) return mrb_nil_value(); switch (node->type) { case YAML_SCALAR_NODE: { const char *str = (char *) node->data.scalar.value; char *endptr; long long ll; double dd; /* if node is a YAML_PLAIN_SCALAR_STYLE */ if (node->data.scalar.style == YAML_PLAIN_SCALAR_STYLE) { if (streql("~", str)) return mrb_nil_value(); if (use_scalar_aliases) { /* Check if it is a null http://yaml.org/type/null.html */ if (streql("nil", str) || streql("", str) #if MRUBY_YAML_NULL || streql("null", str) || streql("Null", str) || streql("NULL", str) #endif ) { return mrb_nil_value(); /* Check if it is a Boolean http://yaml.org/type/bool.html */ } else if ( streql("true", str) || streql("True", str) || streql("TRUE", str) #if MRUBY_YAML_BOOLEAN_ON || streql("on", str) || streql("On", str) || streql("ON", str) #endif #if MRUBY_YAML_BOOLEAN_YES || streql("yes", str) || streql("Yes", str) || streql("YES", str) #endif #if MRUBY_YAML_BOOLEAN_SHORTHAND_YES || streql("y", str) || streql("Y", str) #endif ) { return mrb_true_value(); } else if ( streql("false", str) || streql("False", str) || streql("FALSE", str) #if MRUBY_YAML_BOOLEAN_OFF || streql("off", str) || streql("Off", str) || streql("OFF", str) #endif #if MRUBY_YAML_BOOLEAN_NO || streql("no", str) || streql("No", str) || streql("NO", str) #endif #if MRUBY_YAML_BOOLEAN_SHORTHAND_NO || streql("n", str) || streql("N", str) #endif ) { return mrb_false_value(); } } /* Check if it is a Fixnum */ ll = strtoll(str, &endptr, 0); if (str != endptr && *endptr == '\0') return mrb_fixnum_value(ll); /* Check if it is a Float */ dd = strtod(str, &endptr); if (str != endptr && *endptr == '\0') return mrb_float_value(mrb, dd); } /* Otherwise it is a String */ return mrb_str_new(mrb, str, node->data.scalar.length); } case YAML_SEQUENCE_NODE: { /* Sequences are arrays in Ruby */ mrb_value result = mrb_ary_new(mrb); yaml_node_item_t *item; int ai = mrb_gc_arena_save(mrb); for (item = node->data.sequence.items.start; item < node->data.sequence.items.top; item++) { yaml_node_t *child_node = yaml_document_get_node(document, *item); mrb_value child = node_to_value(mrb, document, child_node); mrb_ary_push(mrb, result, child); mrb_gc_arena_restore(mrb, ai); } return result; } case YAML_MAPPING_NODE: { /* Mappings are hashes in Ruby */ mrb_value result = mrb_hash_new(mrb); yaml_node_t *key_node; yaml_node_t *value_node; yaml_node_pair_t *pair; mrb_value key, value; int ai = mrb_gc_arena_save(mrb); for (pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++) { key_node = yaml_document_get_node(document, pair->key); value_node = yaml_document_get_node(document, pair->value); key = node_to_value_key(mrb, document, key_node); value = node_to_value(mrb, document, value_node); mrb_hash_set(mrb, result, key, value); mrb_gc_arena_restore(mrb, ai); } return result; } default: return mrb_nil_value(); } }
void mustache_spec_parse_test(yaml_document_t * document, yaml_node_t * node) { if( node->type != YAML_MAPPING_NODE ) { return; } MustacheSpecTest * test = new MustacheSpecTest; yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = reinterpret_cast<char *>(keyNode->data.scalar.value); if( valueNode->type == YAML_SCALAR_NODE ) { char * valueValue = reinterpret_cast<char *>(valueNode->data.scalar.value); if( strcmp(keyValue, "name") == 0 ) { test->name.assign(valueValue); } else if( strcmp(keyValue, "desc") == 0 ) { test->desc.assign(valueValue); } else if( strcmp(keyValue, "template") == 0 ) { test->tmpl.assign(valueValue); } else if( strcmp(keyValue, "expected") == 0 ) { test->expected.assign(valueValue); } } else if( valueNode->type == YAML_MAPPING_NODE ) { if( strcmp(keyValue, "data") == 0 ) { mustache_spec_parse_data(document, valueNode, &test->data); } else if( strcmp(keyValue, "partials") == 0 ) { mustache_spec_parse_partials(document, valueNode, &test->partials); } } } mustache::Mustache mustache; mustache::Compiler compiler; mustache::VM vm; // Tokenize mustache::Node root; mustache.tokenize(&test->tmpl, &root); // Compile mustache::Compiler::vectorToBuffer(compiler.compile(&root, &test->partials), &test->compiled, &test->compiled_length); // Execute the test for( int i = 0; i < execNum; i++ ) { test->output.clear(); mustache.render(&root, &test->data, &test->partials, &test->output); } // Execute the test in VM mode for( int i = 0; i < execNum; i++ ) { test->compiled_output.clear(); vm.execute(test->compiled, test->compiled_length, &test->data, &test->compiled_output); } // Output result test->print(); tests.push_back(test); }
void mustache_spec_parse_test(yaml_document_t * document, yaml_node_t * node) { if( node->type != YAML_MAPPING_NODE ) { return; } MustacheSpecTest * test = new MustacheSpecTest; yaml_node_pair_t * pair; for( pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair++ ) { yaml_node_t * keyNode = yaml_document_get_node(document, pair->key); yaml_node_t * valueNode = yaml_document_get_node(document, pair->value); char * keyValue = reinterpret_cast<char *>(keyNode->data.scalar.value); if( valueNode->type == YAML_SCALAR_NODE ) { char * valueValue = reinterpret_cast<char *>(valueNode->data.scalar.value); if( strcmp(keyValue, "name") == 0 ) { test->name.assign(valueValue); } else if( strcmp(keyValue, "desc") == 0 ) { test->desc.assign(valueValue); } else if( strcmp(keyValue, "template") == 0 ) { test->tmpl.assign(valueValue); } else if( strcmp(keyValue, "expected") == 0 ) { test->expected.assign(valueValue); } } else if( valueNode->type == YAML_MAPPING_NODE ) { if( strcmp(keyValue, "data") == 0 ) { mustache_spec_parse_data(document, valueNode, &test->data); } else if( strcmp(keyValue, "partials") == 0 ) { mustache_spec_parse_partials(document, valueNode, &test->partials); } } } mustache::Mustache mustache; bool isLambdaSuite = 0 == strcmp(currentSuite, "~lambdas.yml"); // Load lambdas? if( isLambdaSuite ) { load_lambdas_into_test_data(&test->data, test->name); } // This test isn't supported yet if (test->name == "Implicit Iterator - Array") { return; } // Tokenize mustache::Node root; mustache.tokenize(&test->tmpl, &root); // Execute the test for( int i = 0; i < execNum; i++ ) { test->output.clear(); mustache.render(&root, &test->data, &test->partials, &test->output); } // Output result test->print(); tests.push_back(test); }
static yaml_node_t * get(int const index, yaml_document_t *doc) { return yaml_document_get_node(doc, index); }
void load_config() { FILE * file; yaml_parser_t parser; yaml_document_t document; yaml_node_t * root; yaml_node_t * map; yaml_node_pair_t * pair; yaml_node_t * key, * value; char name[256]; /* Look for velox.yaml in the config directories */ file = open_config_file("velox.yaml"); /* Nothing to do if there is no configuration file */ if (file == NULL) return; yaml_parser_initialize(&parser); yaml_parser_set_input_file(&parser, file); if (!yaml_parser_load(&parser, &document)) { fprintf(stderr, "Error parsing config file\n"); goto cleanup; } /* The root node should be a mapping */ map = yaml_document_get_root_node(&document); assert(map->type == YAML_MAPPING_NODE); /* For each key/value pair in the root mapping */ for (pair = map->data.mapping.pairs.start; pair < map->data.mapping.pairs.top; ++pair) { key = yaml_document_get_node(&document, pair->key); value = yaml_document_get_node(&document, pair->value); assert(key->type == YAML_SCALAR_NODE); /* The module section */ if (strcmp((const char *) key->data.scalar.value, "modules") == 0) { yaml_node_item_t * module_item; yaml_node_t * node; assert(value->type == YAML_SEQUENCE_NODE); printf("\n** Loading Modules **\n"); /* For each module */ for (module_item = value->data.sequence.items.start; module_item < value->data.sequence.items.top; ++module_item) { node = yaml_document_get_node(&document, *module_item); load_module((const char *) node->data.scalar.value); } } /* The border_width property */ else if (strcmp((const char *) key->data.scalar.value, "border_width") == 0) { assert(value->type == YAML_SCALAR_NODE); border_width = strtoul((const char *) value->data.scalar.value, NULL, 10); } } yaml_document_delete(&document); printf("\n** Configuring Modules **\n"); /* While we still have documents to parse */ while (yaml_parser_load(&parser, &document)) { /* If the document contains no root node, we are at the end */ if (yaml_document_get_root_node(&document) == NULL) { yaml_document_delete(&document); break; } sscanf((const char *) yaml_document_get_root_node(&document)->tag, "!velox:%s", name); /* Configure the specified module with this YAML document */ configure_module(name, &document); yaml_document_delete(&document); } cleanup: yaml_parser_delete(&parser); fclose(file); }
static int parse_sequence(struct pkg * pkg, yaml_node_t *node, yaml_document_t *doc, int attr) { yaml_node_item_t *item; yaml_node_t *val; item = node->data.sequence.items.start; while (item < node->data.sequence.items.top) { val = yaml_document_get_node(doc, *item); switch (attr) { case PKG_CATEGORIES: if (!is_valid_yaml_scalar(val)) pkg_emit_error("Skipping malformed category"); else pkg_addcategory(pkg, val->data.scalar.value); break; case PKG_LICENSES: if (!is_valid_yaml_scalar(val)) pkg_emit_error("Skipping malformed license"); else pkg_addlicense(pkg, val->data.scalar.value); break; case PKG_USERS: if (is_valid_yaml_scalar(val)) pkg_adduser(pkg, val->data.scalar.value); else if (val->type == YAML_MAPPING_NODE) parse_mapping(pkg, val, doc, attr); else pkg_emit_error("Skipping malformed license"); break; case PKG_GROUPS: if (is_valid_yaml_scalar(val)) pkg_addgroup(pkg, val->data.scalar.value); else if (val->type == YAML_MAPPING_NODE) parse_mapping(pkg, val, doc, attr); else pkg_emit_error("Skipping malformed license"); break; case PKG_DIRS: if (is_valid_yaml_scalar(val)) pkg_adddir(pkg, val->data.scalar.value, 1, false); else if (val->type == YAML_MAPPING_NODE) parse_mapping(pkg, val, doc, attr); else pkg_emit_error("Skipping malformed dirs"); break; case PKG_SHLIBS_REQUIRED: if (!is_valid_yaml_scalar(val)) pkg_emit_error("Skipping malformed required shared library"); else pkg_addshlib_required(pkg, val->data.scalar.value); break; case PKG_SHLIBS_PROVIDED: if (!is_valid_yaml_scalar(val)) pkg_emit_error("Skipping malformed provided shared library"); else pkg_addshlib_provided(pkg, val->data.scalar.value); break; } ++item; } return (EPKG_OK); }
static int parse_mapping(struct pkg *pkg, yaml_node_t *item, yaml_document_t *doc, int attr) { struct sbuf *tmp = NULL; yaml_node_pair_t *pair; yaml_node_t *key; yaml_node_t *val; pkg_script script_type; pair = item->data.mapping.pairs.start; while (pair < item->data.mapping.pairs.top) { key = yaml_document_get_node(doc, pair->key); val = yaml_document_get_node(doc, pair->value); if (key->data.scalar.length <= 0) { pkg_emit_error("Skipping empty dependency name"); ++pair; continue; } switch (attr) { case PKG_DEPS: if (val->type != YAML_MAPPING_NODE) pkg_emit_error("Skipping malformed dependency %s", key->data.scalar.value); else pkg_set_deps_from_node(pkg, val, doc, key->data.scalar.value); break; case PKG_DIRS: if (val->type != YAML_MAPPING_NODE) pkg_emit_error("Skipping malformed dirs %s", key->data.scalar.value); else pkg_set_dirs_from_node(pkg, val, doc, key->data.scalar.value); break; case PKG_USERS: if (is_valid_yaml_scalar(val)) pkg_adduid(pkg, key->data.scalar.value, val->data.scalar.value); else pkg_emit_error("Skipping malformed users %s", key->data.scalar.value); break; case PKG_GROUPS: if (is_valid_yaml_scalar(val)) pkg_addgid(pkg, key->data.scalar.value, val->data.scalar.value); else pkg_emit_error("Skipping malformed groups %s", key->data.scalar.value); break; case PKG_DIRECTORIES: if (is_valid_yaml_scalar(val)) { urldecode(key->data.scalar.value, &tmp); if (val->data.scalar.value[0] == 'y') pkg_adddir(pkg, sbuf_get(tmp), 1, false); else pkg_adddir(pkg, sbuf_get(tmp), 0, false); } else if (val->type == YAML_MAPPING_NODE) { pkg_set_dirs_from_node(pkg, val, doc, key->data.scalar.value); } else { pkg_emit_error("Skipping malformed directories %s", key->data.scalar.value); } break; case PKG_FILES: if (is_valid_yaml_scalar(val)) { const char *pkg_sum = NULL; if (val->data.scalar.length == 64) pkg_sum = val->data.scalar.value; urldecode(key->data.scalar.value, &tmp); pkg_addfile(pkg, sbuf_get(tmp), pkg_sum, false); } else if (val->type == YAML_MAPPING_NODE) pkg_set_files_from_node(pkg, val, doc, key->data.scalar.value); else pkg_emit_error("Skipping malformed files %s", key->data.scalar.value); break; case PKG_OPTIONS: if (val->type != YAML_SCALAR_NODE) pkg_emit_error("Skipping malformed option %s", key->data.scalar.value); else pkg_addoption(pkg, key->data.scalar.value, val->data.scalar.value); break; case PKG_SCRIPTS: if (val->type != YAML_SCALAR_NODE) pkg_emit_error("Skipping malformed scripts %s", key->data.scalar.value); script_type = script_type_str(key->data.scalar.value); if (script_type == PKG_SCRIPT_UNKNOWN) { pkg_emit_error("Skipping unknown script " "type: %s", key->data.scalar.value); break; } urldecode(val->data.scalar.value, &tmp); pkg_addscript(pkg, sbuf_get(tmp), script_type); break; } ++pair; } sbuf_free(tmp); return (EPKG_OK); }