static void test_highlighting(TestBatchRunner *runner) { Schema *schema = Schema_new(); StandardTokenizer *tokenizer = StandardTokenizer_new(); FullTextType *plain_type = FullTextType_new((Analyzer*)tokenizer); FullTextType_Set_Highlightable(plain_type, true); FullTextType *dunked_type = FullTextType_new((Analyzer*)tokenizer); FullTextType_Set_Highlightable(dunked_type, true); FullTextType_Set_Boost(dunked_type, 0.1f); String *content = (String*)SSTR_WRAP_UTF8("content", 7); Schema_Spec_Field(schema, content, (FieldType*)plain_type); String *alt = (String*)SSTR_WRAP_UTF8("alt", 3); Schema_Spec_Field(schema, alt, (FieldType*)dunked_type); DECREF(plain_type); DECREF(dunked_type); DECREF(tokenizer); RAMFolder *folder = RAMFolder_new(NULL); Indexer *indexer = Indexer_new(schema, (Obj*)folder, NULL, 0); Doc *doc = Doc_new(NULL, 0); String *string = (String *)SSTR_WRAP_UTF8(TEST_STRING, TEST_STRING_LEN); Doc_Store(doc, content, (Obj*)string); Indexer_Add_Doc(indexer, doc, 1.0f); DECREF(doc); doc = Doc_new(NULL, 0); string = (String *)SSTR_WRAP_UTF8("\"I see,\" said the blind man.", 28); Doc_Store(doc, content, (Obj*)string); Indexer_Add_Doc(indexer, doc, 1.0f); DECREF(doc); doc = Doc_new(NULL, 0); string = (String *)SSTR_WRAP_UTF8("x but not why or 2ee", 20); Doc_Store(doc, content, (Obj*)string); string = (String *)SSTR_WRAP_UTF8(TEST_STRING " and extra stuff so it scores lower", TEST_STRING_LEN + 35); Doc_Store(doc, alt, (Obj*)string); Indexer_Add_Doc(indexer, doc, 1.0f); DECREF(doc); Indexer_Commit(indexer); DECREF(indexer); Searcher *searcher = (Searcher*)IxSearcher_new((Obj*)folder); Obj *query = (Obj*)SSTR_WRAP_UTF8("\"x y z\" AND " PHI, 14); Hits *hits = Searcher_Hits(searcher, query, 0, 10, NULL); test_Raw_Excerpt(runner, searcher, query); test_Highlight_Excerpt(runner, searcher, query); test_Create_Excerpt(runner, searcher, query, hits); DECREF(hits); DECREF(searcher); DECREF(folder); DECREF(schema); }
static Schema* S_create_schema() { // Create a new schema. Schema *schema = Schema_new(); // Create an analyzer. String *language = Str_newf("en"); EasyAnalyzer *analyzer = EasyAnalyzer_new(language); // Specify fields. { String *field_str = Str_newf("title"); FullTextType *type = FullTextType_new((Analyzer*)analyzer); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(type); DECREF(field_str); } { String *field_str = Str_newf("content"); FullTextType *type = FullTextType_new((Analyzer*)analyzer); FullTextType_Set_Highlightable(type, true); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(type); DECREF(field_str); } { String *field_str = Str_newf("url"); StringType *type = StringType_new(); StringType_Set_Indexed(type, false); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(type); DECREF(field_str); } { String *field_str = Str_newf("category"); StringType *type = StringType_new(); StringType_Set_Stored(type, false); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(type); DECREF(field_str); } DECREF(analyzer); DECREF(language); return schema; }
static Schema* S_create_schema() { // Create a new schema. Schema *schema = Schema_new(); // Create an analyzer. String *language = Str_newf("en"); EasyAnalyzer *analyzer = EasyAnalyzer_new(language); // Specify fields. FullTextType *type = FullTextType_new((Analyzer*)analyzer); { String *field_str = Str_newf("title"); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(field_str); } { String *field_str = Str_newf("content"); Schema_Spec_Field(schema, field_str, (FieldType*)type); DECREF(field_str); } DECREF(language); DECREF(analyzer); DECREF(type); return schema; }
static void test_hl_selection(TestBatchRunner *runner) { Schema *schema = Schema_new(); StandardTokenizer *tokenizer = StandardTokenizer_new(); FullTextType *plain_type = FullTextType_new((Analyzer*)tokenizer); FullTextType_Set_Highlightable(plain_type, true); String *content = (String*)SSTR_WRAP_UTF8("content", 7); Schema_Spec_Field(schema, content, (FieldType*)plain_type); DECREF(plain_type); DECREF(tokenizer); RAMFolder *folder = RAMFolder_new(NULL); Indexer *indexer = Indexer_new(schema, (Obj*)folder, NULL, 0); static char test_string[] = "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla NNN bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla MMM bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. " "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "; Doc *doc = Doc_new(NULL, 0); String *string = (String *)SSTR_WRAP_UTF8(test_string, strlen(test_string)); Doc_Store(doc, content, (Obj*)string); Indexer_Add_Doc(indexer, doc, 1.0f); DECREF(doc); Indexer_Commit(indexer); DECREF(indexer); Searcher *searcher = (Searcher*)IxSearcher_new((Obj*)folder); Obj *query = (Obj*)SSTR_WRAP_UTF8("NNN MMM", 7); Highlighter *highlighter = Highlighter_new(searcher, query, content, 200); Hits *hits = Searcher_Hits(searcher, query, 0, 10, NULL); HitDoc *hit = Hits_Next(hits); String *excerpt = Highlighter_Create_Excerpt(highlighter, hit); String *mmm = (String*)SSTR_WRAP_UTF8("MMM", 3); String *nnn = (String*)SSTR_WRAP_UTF8("NNN", 3); TEST_TRUE(runner, Str_Find(excerpt, mmm) >= 0 || Str_Find(excerpt, nnn) >= 0, "Sentence boundary algo doesn't chop terms"); DECREF(excerpt); DECREF(hit); DECREF(hits); DECREF(highlighter); DECREF(searcher); DECREF(folder); DECREF(schema); }
TestSchema* TestSchema_init(TestSchema *self) { Tokenizer *tokenizer = Tokenizer_new(NULL); FullTextType *type = FullTextType_new((Analyzer*)tokenizer); Schema_init((Schema*)self); FullTextType_Set_Highlightable(type, true); Schema_Spec_Field(self, &content, (FieldType*)type); DECREF(type); DECREF(tokenizer); return self; }
static void test_Dump_Load_and_Equals(TestBatchRunner *runner) { StandardTokenizer *tokenizer = StandardTokenizer_new(); Normalizer *normalizer = Normalizer_new(NULL, true, false); FullTextType *type = FullTextType_new((Analyzer*)tokenizer); FullTextType *other = FullTextType_new((Analyzer*)normalizer); FullTextType *boost_differs = FullTextType_new((Analyzer*)tokenizer); FullTextType *not_indexed = FullTextType_new((Analyzer*)tokenizer); FullTextType *not_stored = FullTextType_new((Analyzer*)tokenizer); FullTextType *highlightable = FullTextType_new((Analyzer*)tokenizer); Obj *dump = (Obj*)FullTextType_Dump(type); Obj *clone = Freezer_load(dump); Obj *another_dump = (Obj*)FullTextType_Dump_For_Schema(type); FullTextType_Set_Boost(boost_differs, 1.5); FullTextType_Set_Indexed(not_indexed, false); FullTextType_Set_Stored(not_stored, false); FullTextType_Set_Highlightable(highlightable, true); // (This step is normally performed by Schema_Load() internally.) Hash_Store_Utf8((Hash*)another_dump, "analyzer", 8, INCREF(tokenizer)); FullTextType *another_clone = FullTextType_Load(type, another_dump); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)boost_differs), "Equals() false with different boost"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)other), "Equals() false with different Analyzer"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_indexed), "Equals() false with indexed => false"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_stored), "Equals() false with stored => false"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)highlightable), "Equals() false with highlightable => true"); TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)clone), "Dump => Load round trip"); TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)another_clone), "Dump_For_Schema => Load round trip"); DECREF(another_clone); DECREF(dump); DECREF(clone); DECREF(another_dump); DECREF(highlightable); DECREF(not_stored); DECREF(not_indexed); DECREF(boost_differs); DECREF(other); DECREF(type); DECREF(normalizer); DECREF(tokenizer); }
static void test_Compare_Values(TestBatchRunner *runner) { StandardTokenizer *tokenizer = StandardTokenizer_new(); FullTextType *type = FullTextType_new((Analyzer*)tokenizer); StackString *a = SSTR_WRAP_UTF8("a", 1); StackString *b = SSTR_WRAP_UTF8("b", 1); TEST_TRUE(runner, FullTextType_Compare_Values(type, (Obj*)a, (Obj*)b) < 0, "a less than b"); TEST_TRUE(runner, FullTextType_Compare_Values(type, (Obj*)b, (Obj*)a) > 0, "b greater than a"); TEST_TRUE(runner, FullTextType_Compare_Values(type, (Obj*)b, (Obj*)b) == 0, "b equals b"); DECREF(type); DECREF(tokenizer); }
static Folder* build_index() { // Plain type. String *pattern = Str_newf("\\S+"); RegexTokenizer *tokenizer = RegexTokenizer_new(pattern); FullTextType *plain = FullTextType_new((Analyzer*)tokenizer); // Fancy type. String *word_pattern = Str_newf("\\w+"); RegexTokenizer *word_tokenizer = RegexTokenizer_new(word_pattern); Hash *stop_list = Hash_new(0); Hash_Store_Utf8(stop_list, "x", 1, (Obj*)CFISH_TRUE); SnowballStopFilter *stop_filter = SnowStop_new(NULL, stop_list); Vector *analyzers = Vec_new(0); Vec_Push(analyzers, (Obj*)word_tokenizer); Vec_Push(analyzers, (Obj*)stop_filter); PolyAnalyzer *fancy_analyzer = PolyAnalyzer_new(NULL, analyzers); FullTextType *fancy = FullTextType_new((Analyzer*)fancy_analyzer); // Schema. Schema *schema = Schema_new(); String *plain_str = Str_newf("plain"); String *fancy_str = Str_newf("fancy"); Schema_Spec_Field(schema, plain_str, (FieldType*)plain); Schema_Spec_Field(schema, fancy_str, (FieldType*)fancy); // Indexer. RAMFolder *folder = RAMFolder_new(NULL); Indexer *indexer = Indexer_new(schema, (Obj*)folder, NULL, 0); // Index documents. Vector *doc_set = TestUtils_doc_set(); for (uint32_t i = 0; i < Vec_Get_Size(doc_set); ++i) { String *content_string = (String*)Vec_Fetch(doc_set, i); Doc *doc = Doc_new(NULL, 0); Doc_Store(doc, plain_str, (Obj*)content_string); Doc_Store(doc, fancy_str, (Obj*)content_string); Indexer_Add_Doc(indexer, doc, 1.0); DECREF(doc); } Indexer_Commit(indexer); // Clean up. DECREF(doc_set); DECREF(indexer); DECREF(fancy_str); DECREF(plain_str); DECREF(schema); DECREF(fancy); DECREF(fancy_analyzer); DECREF(analyzers); DECREF(stop_list); DECREF(word_pattern); DECREF(plain); DECREF(tokenizer); DECREF(pattern); return (Folder*)folder; }