void array_assert_equal(const double* arr1,const double* arr2, const int width, const int height){ for (int i = 0; i < height; i++){ for (int j = 0; j < width; j++){ int a = get_element_at(arr1,j,i,width); int b = get_element_at(arr2,j,i,width); printf("%d == %d?\n",a,b); assert(dequals(a,b)); printf(" OK\n"); } } }
void destroy_selector(selector* s){ if(s->id && s->id->value) free(s->id->value); if(s->id) free(s->id); int i; for(i = 0; i < s->attrs->count; i++) destroy_attr_selector((attr_selector*) get_element_at(s->attrs, i)); destroy_generic_list(s->attrs); for(i = 0; i < s->filters->count; i++) destroy_filter_selector((filter_selector*) get_element_at(s->filters, i)); destroy_generic_list(s->filters); free(s); }
void print_array(const double* arr, const int width, const int height){ for (int i = 0; i < height; i++){ for (int j = 0; j < width; j++){ printf("% 4f,", get_element_at(arr,j,i,width)); } printf("\n"); } }
cassys_tokens_list *add_replaced_text( const char *text, cassys_tokens_list *list, int transducer_id, const char *alphabet_name,int mask_encoding_compatibility_input) { Alphabet *alphabet = load_alphabet(alphabet_name); struct snt_files *snt_text_files = new_snt_files(text); struct fifo *stage_concord = read_concord_file(snt_text_files->concord_ind, mask_encoding_compatibility_input); // performance enhancement cassys_tokens_list *current_list_position = list; long current_token_position = 0; int nb_sentence = 0; while (!is_empty(stage_concord)) { nb_sentence++; locate_pos *l = (locate_pos*) take_ptr(stage_concord); struct list_ustring *new_sentence_lu = cassys_tokenize_word_by_word(l->label, alphabet); cassys_tokens_list *new_sentence_ctl = new_list(new_sentence_lu, transducer_id); // performance enhancement : // Since matches are sorted, we begin the search from the last known position in the list. // We have to substract from the text position the current token position. cassys_tokens_list *list_position = get_element_at(current_list_position, transducer_id - 1, l->token_start_offset - current_token_position); int replaced_sentence_length = l->token_end_offset - l->token_start_offset+1; int new_sentence_length = length(new_sentence_lu); add_output(list_position, new_sentence_ctl, transducer_id, replaced_sentence_length, new_sentence_length-1); // performance enhancement current_list_position = list_position; current_token_position = l-> token_start_offset; free(l->label); free(l); free_list_ustring(new_sentence_lu); } free_fifo(stage_concord); free_snt_files(snt_text_files); free_alphabet(alphabet); return list; }
int main(int argc, char** argv){ int i; char *filename, *query_string, *node; if(argc == 2){ filename = "-"; } else if(argc != 3){ printf("usage: %s \"query\" \"xml_file\"\n", argv[0]); return 0; } else{ filename = argv[2]; } query_string = argv[1]; doc* document = parse_xml(filename); if(document == NULL) exit(1); register_extended_operators(); list* result = query(query_string, document->root); if(result == NULL) exit(1); for(i=0; i < result->count; i++){ dom_node* t = (dom_node*)get_element_at(result, i); node = node_to_string(t, XML); printf("%s", node); free(node); } if(result) destroy_generic_list(result); if(document != NULL) destroy_dom_tree(document); destroy_dictionary(); destroy_custom_filters(); destroy_custom_operators(); return 0; }
double SparseMatrix::operator()(int i, int j) const { return get_element_at(i, j); }