Exemplo n.º 1
0
static void
test_highlighting(TestBatchRunner *runner) {
    Schema *schema = Schema_new();
    StandardTokenizer *tokenizer = StandardTokenizer_new();
    FullTextType *plain_type = FullTextType_new((Analyzer*)tokenizer);
    FullTextType_Set_Highlightable(plain_type, true);
    FullTextType *dunked_type = FullTextType_new((Analyzer*)tokenizer);
    FullTextType_Set_Highlightable(dunked_type, true);
    FullTextType_Set_Boost(dunked_type, 0.1f);
    String *content = (String*)SSTR_WRAP_UTF8("content", 7);
    Schema_Spec_Field(schema, content, (FieldType*)plain_type);
    String *alt = (String*)SSTR_WRAP_UTF8("alt", 3);
    Schema_Spec_Field(schema, alt, (FieldType*)dunked_type);
    DECREF(plain_type);
    DECREF(dunked_type);
    DECREF(tokenizer);

    RAMFolder *folder = RAMFolder_new(NULL);
    Indexer *indexer = Indexer_new(schema, (Obj*)folder, NULL, 0);

    Doc *doc = Doc_new(NULL, 0);
    String *string = (String *)SSTR_WRAP_UTF8(TEST_STRING, TEST_STRING_LEN);
    Doc_Store(doc, content, (Obj*)string);
    Indexer_Add_Doc(indexer, doc, 1.0f);
    DECREF(doc);

    doc = Doc_new(NULL, 0);
    string = (String *)SSTR_WRAP_UTF8("\"I see,\" said the blind man.", 28);
    Doc_Store(doc, content, (Obj*)string);
    Indexer_Add_Doc(indexer, doc, 1.0f);
    DECREF(doc);

    doc = Doc_new(NULL, 0);
    string = (String *)SSTR_WRAP_UTF8("x but not why or 2ee", 20);
    Doc_Store(doc, content, (Obj*)string);
    string = (String *)SSTR_WRAP_UTF8(TEST_STRING
                                     " and extra stuff so it scores lower",
                                     TEST_STRING_LEN + 35);
    Doc_Store(doc, alt, (Obj*)string);
    Indexer_Add_Doc(indexer, doc, 1.0f);
    DECREF(doc);

    Indexer_Commit(indexer);
    DECREF(indexer);

    Searcher *searcher = (Searcher*)IxSearcher_new((Obj*)folder);
    Obj *query = (Obj*)SSTR_WRAP_UTF8("\"x y z\" AND " PHI, 14);
    Hits *hits = Searcher_Hits(searcher, query, 0, 10, NULL);

    test_Raw_Excerpt(runner, searcher, query);
    test_Highlight_Excerpt(runner, searcher, query);
    test_Create_Excerpt(runner, searcher, query, hits);

    DECREF(hits);
    DECREF(searcher);
    DECREF(folder);
    DECREF(schema);
}
Exemplo n.º 2
0
static void
test_hl_selection(TestBatchRunner *runner) {
    Schema *schema = Schema_new();
    StandardTokenizer *tokenizer = StandardTokenizer_new();
    FullTextType *plain_type = FullTextType_new((Analyzer*)tokenizer);
    FullTextType_Set_Highlightable(plain_type, true);
    String *content = (String*)SSTR_WRAP_UTF8("content", 7);
    Schema_Spec_Field(schema, content, (FieldType*)plain_type);
    DECREF(plain_type);
    DECREF(tokenizer);

    RAMFolder *folder = RAMFolder_new(NULL);
    Indexer *indexer = Indexer_new(schema, (Obj*)folder, NULL, 0);

    static char test_string[] =
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla NNN bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla MMM bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. "
        "bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla. ";
    Doc *doc = Doc_new(NULL, 0);
    String *string = (String *)SSTR_WRAP_UTF8(test_string, strlen(test_string));
    Doc_Store(doc, content, (Obj*)string);
    Indexer_Add_Doc(indexer, doc, 1.0f);
    DECREF(doc);

    Indexer_Commit(indexer);
    DECREF(indexer);

    Searcher *searcher = (Searcher*)IxSearcher_new((Obj*)folder);
    Obj *query = (Obj*)SSTR_WRAP_UTF8("NNN MMM", 7);
    Highlighter *highlighter = Highlighter_new(searcher, query, content, 200);
    Hits *hits = Searcher_Hits(searcher, query, 0, 10, NULL);
    HitDoc *hit = Hits_Next(hits);
    String *excerpt = Highlighter_Create_Excerpt(highlighter, hit);
    String *mmm = (String*)SSTR_WRAP_UTF8("MMM", 3);
    String *nnn = (String*)SSTR_WRAP_UTF8("NNN", 3);
    TEST_TRUE(runner, Str_Find(excerpt, mmm) >= 0 || Str_Find(excerpt, nnn) >= 0,
              "Sentence boundary algo doesn't chop terms");

    DECREF(excerpt);
    DECREF(hit);
    DECREF(hits);
    DECREF(highlighter);
    DECREF(searcher);
    DECREF(folder);
    DECREF(schema);
}
Exemplo n.º 3
0
EasyAnalyzer*
EasyAnalyzer_init(EasyAnalyzer *self, const CharBuf *language) {
    Analyzer_init((Analyzer*)self);
    EasyAnalyzerIVARS *const ivars = EasyAnalyzer_IVARS(self);
    ivars->language   = CB_Clone(language);
    ivars->tokenizer  = StandardTokenizer_new();
    ivars->normalizer = Normalizer_new(NULL, true, false);
    ivars->stemmer    = SnowStemmer_new(language);
    return self;
}
Exemplo n.º 4
0
static void
test_Dump_Load_and_Equals(TestBatchRunner *runner) {
    StandardTokenizer *tokenizer     = StandardTokenizer_new();
    Normalizer        *normalizer    = Normalizer_new(NULL, true, false);
    FullTextType      *type          = FullTextType_new((Analyzer*)tokenizer);
    FullTextType      *other         = FullTextType_new((Analyzer*)normalizer);
    FullTextType      *boost_differs = FullTextType_new((Analyzer*)tokenizer);
    FullTextType      *not_indexed   = FullTextType_new((Analyzer*)tokenizer);
    FullTextType      *not_stored    = FullTextType_new((Analyzer*)tokenizer);
    FullTextType      *highlightable = FullTextType_new((Analyzer*)tokenizer);
    Obj               *dump          = (Obj*)FullTextType_Dump(type);
    Obj               *clone         = Freezer_load(dump);
    Obj               *another_dump  = (Obj*)FullTextType_Dump_For_Schema(type);

    FullTextType_Set_Boost(boost_differs, 1.5);
    FullTextType_Set_Indexed(not_indexed, false);
    FullTextType_Set_Stored(not_stored, false);
    FullTextType_Set_Highlightable(highlightable, true);

    // (This step is normally performed by Schema_Load() internally.)
    Hash_Store_Utf8((Hash*)another_dump, "analyzer", 8, INCREF(tokenizer));
    FullTextType *another_clone = FullTextType_Load(type, another_dump);

    TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)boost_differs),
               "Equals() false with different boost");
    TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)other),
               "Equals() false with different Analyzer");
    TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_indexed),
               "Equals() false with indexed => false");
    TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_stored),
               "Equals() false with stored => false");
    TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)highlightable),
               "Equals() false with highlightable => true");
    TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)clone),
              "Dump => Load round trip");
    TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)another_clone),
              "Dump_For_Schema => Load round trip");

    DECREF(another_clone);
    DECREF(dump);
    DECREF(clone);
    DECREF(another_dump);
    DECREF(highlightable);
    DECREF(not_stored);
    DECREF(not_indexed);
    DECREF(boost_differs);
    DECREF(other);
    DECREF(type);
    DECREF(normalizer);
    DECREF(tokenizer);
}
Exemplo n.º 5
0
static void
test_Dump_Load_and_Equals(TestBatchRunner *runner) {
    StandardTokenizer *tokenizer = StandardTokenizer_new();
    Obj *dump  = StandardTokenizer_Dump(tokenizer);
    StandardTokenizer *clone
        = (StandardTokenizer*)StandardTokenizer_Load(tokenizer, dump);

    TEST_TRUE(runner,
              StandardTokenizer_Equals(tokenizer, (Obj*)clone),
              "Dump => Load round trip");

    DECREF(tokenizer);
    DECREF(dump);
    DECREF(clone);
}
Exemplo n.º 6
0
static void
test_Compare_Values(TestBatchRunner *runner) {
    StandardTokenizer *tokenizer = StandardTokenizer_new();
    FullTextType      *type      = FullTextType_new((Analyzer*)tokenizer);
    StackString       *a         = SSTR_WRAP_UTF8("a", 1);
    StackString       *b         = SSTR_WRAP_UTF8("b", 1);

    TEST_TRUE(runner,
              FullTextType_Compare_Values(type, (Obj*)a, (Obj*)b) < 0,
              "a less than b");
    TEST_TRUE(runner,
              FullTextType_Compare_Values(type, (Obj*)b, (Obj*)a) > 0,
              "b greater than a");
    TEST_TRUE(runner,
              FullTextType_Compare_Values(type, (Obj*)b, (Obj*)b) == 0,
              "b equals b");

    DECREF(type);
    DECREF(tokenizer);
}
Exemplo n.º 7
0
static void
test_tokenizer(TestBatchRunner *runner) {
    StandardTokenizer *tokenizer = StandardTokenizer_new();

    String *word = SSTR_WRAP_C(
                              " ."
                              "tha\xCC\x82t's"
                              ":"
                              "1,02\xC2\xADZ4.38"
                              "\xE0\xB8\x81\xC2\xAD\xC2\xAD"
                              "\xF0\xA0\x80\x80"
                              "a"
                              "/");
    Vector *got = StandardTokenizer_Split(tokenizer, word);
    String *token = (String*)Vec_Fetch(got, 0);
    TEST_TRUE(runner,
              token
              && Str_is_a(token, STRING)
              && Str_Equals_Utf8(token, "tha\xcc\x82t's", 8),
              "Token: %s", Str_Get_Ptr8(token));
    token = (String*)Vec_Fetch(got, 1);
    TEST_TRUE(runner,
              token
              && Str_is_a(token, STRING)
              && Str_Equals_Utf8(token, "1,02\xC2\xADZ4.38", 11),
              "Token: %s", Str_Get_Ptr8(token));
    token = (String*)Vec_Fetch(got, 2);
    TEST_TRUE(runner,
              token
              && Str_is_a(token, STRING)
              && Str_Equals_Utf8(token, "\xE0\xB8\x81\xC2\xAD\xC2\xAD", 7),
              "Token: %s", Str_Get_Ptr8(token));
    token = (String*)Vec_Fetch(got, 3);
    TEST_TRUE(runner,
              token
              && Str_is_a(token, STRING)
              && Str_Equals_Utf8(token, "\xF0\xA0\x80\x80", 4),
              "Token: %s", Str_Get_Ptr8(token));
    token = (String*)Vec_Fetch(got, 4);
    TEST_TRUE(runner,
              token
              && Str_is_a(token, STRING)
              && Str_Equals_Utf8(token, "a", 1),
              "Token: %s", Str_Get_Ptr8(token));
    DECREF(got);

    FSFolder *modules_folder = TestUtils_modules_folder();
    if (modules_folder == NULL) {
        SKIP(runner, 1372, "Can't locate test data");
    }
    else {
        String *path = Str_newf("unicode/ucd/WordBreakTest.json");
        Vector *tests = (Vector*)Json_slurp_json((Folder*)modules_folder, path);
        if (!tests) { RETHROW(Err_get_error()); }

        for (uint32_t i = 0, max = Vec_Get_Size(tests); i < max; i++) {
            Hash *test = (Hash*)Vec_Fetch(tests, i);
            String *text = (String*)Hash_Fetch_Utf8(test, "text", 4);
            Vector *wanted = (Vector*)Hash_Fetch_Utf8(test, "words", 5);
            Vector *got = StandardTokenizer_Split(tokenizer, text);
            TEST_TRUE(runner, Vec_Equals(wanted, (Obj*)got), "UCD test #%d", i + 1);
            DECREF(got);
        }

        DECREF(tests);
        DECREF(modules_folder);
        DECREF(path);
    }

    DECREF(tokenizer);
}