static void test_Read_File_and_Write_File(TestBatchRunner *runner) { Snapshot *snapshot = Snapshot_new(); Folder *folder = (Folder*)RAMFolder_new(NULL); String *snap = (String*)SSTR_WRAP_UTF8("snap", 4); String *foo = (String*)SSTR_WRAP_UTF8("foo", 3); Snapshot_Add_Entry(snapshot, foo); Snapshot_Write_File(snapshot, folder, snap); Snapshot *dupe = Snapshot_new(); Snapshot *read_retval = Snapshot_Read_File(dupe, folder, snap); TEST_TRUE(runner, dupe == read_retval, "Read_File() returns the object"); Vector *orig_list = Snapshot_List(snapshot); Vector *dupe_list = Snapshot_List(dupe); TEST_TRUE(runner, Vec_Equals(orig_list, (Obj*)dupe_list), "Round trip through Write_File() and Read_File()"); DECREF(orig_list); DECREF(dupe_list); DECREF(dupe); DECREF(snapshot); DECREF(folder); }
bool PolyAnalyzer_Equals_IMP(PolyAnalyzer *self, Obj *other) { if ((PolyAnalyzer*)other == self) { return true; } if (!Obj_Is_A(other, POLYANALYZER)) { return false; } PolyAnalyzerIVARS *const ivars = PolyAnalyzer_IVARS(self); PolyAnalyzerIVARS *const ovars = PolyAnalyzer_IVARS((PolyAnalyzer*)other); if (!Vec_Equals(ovars->analyzers, (Obj*)ivars->analyzers)) { return false; } return true; }
bool PolyQuery_Equals_IMP(PolyQuery *self, Obj *other) { if ((PolyQuery*)other == self) { return true; } if (!Obj_is_a(other, POLYQUERY)) { return false; } PolyQueryIVARS *const ivars = PolyQuery_IVARS(self); PolyQueryIVARS *const ovars = PolyQuery_IVARS((PolyQuery*)other); if (ivars->boost != ovars->boost) { return false; } if (!Vec_Equals(ovars->children, (Obj*)ivars->children)) { return false; } return true; }
static void S_round_trip_integer(TestBatchRunner *runner, int64_t value) { Integer *num = Int_new(value); Vector *array = Vec_new(1); Vec_Store(array, 0, (Obj*)num); String *json = Json_to_json((Obj*)array); Obj *dump = Json_from_json(json); TEST_TRUE(runner, Vec_Equals(array, dump), "Round trip integer %ld", (long)value); DECREF(dump); DECREF(json); DECREF(array); }
bool PhraseQuery_Equals_IMP(PhraseQuery *self, Obj *other) { if ((PhraseQuery*)other == self) { return true; } if (!Obj_is_a(other, PHRASEQUERY)) { return false; } PhraseQueryIVARS *const ivars = PhraseQuery_IVARS(self); PhraseQueryIVARS *const ovars = PhraseQuery_IVARS((PhraseQuery*)other); if (ivars->boost != ovars->boost) { return false; } if (ivars->field && !ovars->field) { return false; } if (!ivars->field && ovars->field) { return false; } if (ivars->field && !Str_Equals(ivars->field, (Obj*)ovars->field)) { return false; } if (!Vec_Equals(ovars->terms, (Obj*)ivars->terms)) { return false; } return true; }
bool ProximityQuery_Equals_IMP(ProximityQuery *self, Obj *other) { if ((ProximityQuery*)other == self) { return true; } if (!Obj_is_a(other, PROXIMITYQUERY)) { return false; } ProximityQueryIVARS *const ivars = ProximityQuery_IVARS(self); ProximityQueryIVARS *const ovars = ProximityQuery_IVARS((ProximityQuery*)other); if (ivars->boost != ovars->boost) { return false; } if (ivars->field && !ovars->field) { return false; } if (!ivars->field && ovars->field) { return false; } if (ivars->field && !Str_Equals(ivars->field, (Obj*)ovars->field)) { return false; } if (!Vec_Equals(ovars->terms, (Obj*)ivars->terms)) { return false; } if (ivars->within != ovars->within) { return false; } return true; }
static void test_tokenizer(TestBatchRunner *runner) { StandardTokenizer *tokenizer = StandardTokenizer_new(); String *word = SSTR_WRAP_C( " ." "tha\xCC\x82t's" ":" "1,02\xC2\xADZ4.38" "\xE0\xB8\x81\xC2\xAD\xC2\xAD" "\xF0\xA0\x80\x80" "a" "/"); Vector *got = StandardTokenizer_Split(tokenizer, word); String *token = (String*)Vec_Fetch(got, 0); TEST_TRUE(runner, token && Str_is_a(token, STRING) && Str_Equals_Utf8(token, "tha\xcc\x82t's", 8), "Token: %s", Str_Get_Ptr8(token)); token = (String*)Vec_Fetch(got, 1); TEST_TRUE(runner, token && Str_is_a(token, STRING) && Str_Equals_Utf8(token, "1,02\xC2\xADZ4.38", 11), "Token: %s", Str_Get_Ptr8(token)); token = (String*)Vec_Fetch(got, 2); TEST_TRUE(runner, token && Str_is_a(token, STRING) && Str_Equals_Utf8(token, "\xE0\xB8\x81\xC2\xAD\xC2\xAD", 7), "Token: %s", Str_Get_Ptr8(token)); token = (String*)Vec_Fetch(got, 3); TEST_TRUE(runner, token && Str_is_a(token, STRING) && Str_Equals_Utf8(token, "\xF0\xA0\x80\x80", 4), "Token: %s", Str_Get_Ptr8(token)); token = (String*)Vec_Fetch(got, 4); TEST_TRUE(runner, token && Str_is_a(token, STRING) && Str_Equals_Utf8(token, "a", 1), "Token: %s", Str_Get_Ptr8(token)); DECREF(got); FSFolder *modules_folder = TestUtils_modules_folder(); if (modules_folder == NULL) { SKIP(runner, 1372, "Can't locate test data"); } else { String *path = Str_newf("unicode/ucd/WordBreakTest.json"); Vector *tests = (Vector*)Json_slurp_json((Folder*)modules_folder, path); if (!tests) { RETHROW(Err_get_error()); } for (uint32_t i = 0, max = Vec_Get_Size(tests); i < max; i++) { Hash *test = (Hash*)Vec_Fetch(tests, i); String *text = (String*)Hash_Fetch_Utf8(test, "text", 4); Vector *wanted = (Vector*)Hash_Fetch_Utf8(test, "words", 5); Vector *got = StandardTokenizer_Split(tokenizer, text); TEST_TRUE(runner, Vec_Equals(wanted, (Obj*)got), "UCD test #%d", i + 1); DECREF(got); } DECREF(tests); DECREF(modules_folder); DECREF(path); } DECREF(tokenizer); }