Hash* DefDelWriter_Metadata_IMP(DefaultDeletionsWriter *self) { DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self); DefDelWriter_Metadata_t super_meta = (DefDelWriter_Metadata_t)SUPER_METHOD_PTR(DEFAULTDELETIONSWRITER, LUCY_DefDelWriter_Metadata); Hash *const metadata = super_meta(self); Hash *const files = Hash_new(0); for (uint32_t i = 0, max = VA_Get_Size(ivars->seg_readers); i < max; i++) { SegReader *seg_reader = (SegReader*)VA_Fetch(ivars->seg_readers, i); if (ivars->updated[i]) { BitVector *deldocs = (BitVector*)VA_Fetch(ivars->bit_vecs, i); Segment *segment = SegReader_Get_Segment(seg_reader); Hash *mini_meta = Hash_new(2); Hash_Store_Utf8(mini_meta, "count", 5, (Obj*)Str_newf("%u32", (uint32_t)BitVec_Count(deldocs))); Hash_Store_Utf8(mini_meta, "filename", 8, (Obj*)S_del_filename(self, seg_reader)); Hash_Store(files, (Obj*)Seg_Get_Name(segment), (Obj*)mini_meta); } } Hash_Store_Utf8(metadata, "files", 5, (Obj*)files); return metadata; }
// Create a test data structure including at least one each of Hash, Vector, // and String. static Obj* S_make_dump() { Hash *dump = Hash_new(0); Hash_Store_Utf8(dump, "foo", 3, (Obj*)Str_newf("foo")); Hash_Store_Utf8(dump, "stuff", 5, (Obj*)Vec_new(0)); return (Obj*)dump; }
Obj* TermQuery_Dump_IMP(TermQuery *self) { TermQueryIVARS *ivars = TermQuery_IVARS(self); TermQuery_Dump_t super_dump = SUPER_METHOD_PTR(TERMQUERY, LUCY_TermQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "field", 5, Freezer_dump((Obj*)ivars->field)); Hash_Store_Utf8(dump, "term", 4, Freezer_dump(ivars->term)); return (Obj*)dump; }
Obj* PhraseQuery_Dump_IMP(PhraseQuery *self) { PhraseQueryIVARS *ivars = PhraseQuery_IVARS(self); PhraseQuery_Dump_t super_dump = SUPER_METHOD_PTR(PHRASEQUERY, LUCY_PhraseQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "field", 5, Freezer_dump((Obj*)ivars->field)); Hash_Store_Utf8(dump, "terms", 5, Freezer_dump((Obj*)ivars->terms)); return (Obj*)dump; }
Obj* Query_Dump_IMP(Query *self) { QueryIVARS *ivars = Query_IVARS(self); Hash *dump = Hash_new(0); Hash_Store_Utf8(dump, "_class", 6, (Obj*)Str_Clone(Obj_Get_Class_Name((Obj*)self))); Hash_Store_Utf8(dump, "boost", 5, (Obj*)Str_newf("%f64", (double)ivars->boost)); return (Obj*)dump; }
Hash* SortWriter_Metadata_IMP(SortWriter *self) { SortWriterIVARS *const ivars = SortWriter_IVARS(self); SortWriter_Metadata_t super_meta = (SortWriter_Metadata_t)SUPER_METHOD_PTR(SORTWRITER, LUCY_SortWriter_Metadata); Hash *const metadata = super_meta(self); Hash_Store_Utf8(metadata, "counts", 6, INCREF(ivars->counts)); Hash_Store_Utf8(metadata, "null_ords", 9, INCREF(ivars->null_ords)); Hash_Store_Utf8(metadata, "ord_widths", 10, INCREF(ivars->ord_widths)); return metadata; }
Obj* ProximityQuery_Dump_IMP(ProximityQuery *self) { ProximityQueryIVARS *ivars = ProximityQuery_IVARS(self); ProximityQuery_Dump_t super_dump = SUPER_METHOD_PTR(PROXIMITYQUERY, LUCY_ProximityQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "field", 5, Freezer_dump((Obj*)ivars->field)); Hash_Store_Utf8(dump, "terms", 5, Freezer_dump((Obj*)ivars->terms)); Hash_Store_Utf8(dump, "within", 6, (Obj*)Str_newf("%i64", (int64_t)ivars->within)); return (Obj*)dump; }
Obj* LeafQuery_Dump_IMP(LeafQuery *self) { LeafQueryIVARS *ivars = LeafQuery_IVARS(self); LeafQuery_Dump_t super_dump = SUPER_METHOD_PTR(LEAFQUERY, LUCY_LeafQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); if (ivars->field) { Hash_Store_Utf8(dump, "field", 5, Freezer_dump((Obj*)ivars->field)); } Hash_Store_Utf8(dump, "text", 4, Freezer_dump((Obj*)ivars->text)); return (Obj*)dump; }
Hash* FullTextType_Dump_IMP(FullTextType *self) { FullTextTypeIVARS *const ivars = FullTextType_IVARS(self); Hash *dump = FullTextType_Dump_For_Schema(self); Hash_Store_Utf8(dump, "_class", 6, (Obj*)Str_Clone(FullTextType_get_class_name(self))); Hash_Store_Utf8(dump, "analyzer", 8, (Obj*)Analyzer_Dump(ivars->analyzer)); DECREF(Hash_Delete_Utf8(dump, "type", 4)); return dump; }
Hash* DataWriter_Metadata_IMP(DataWriter *self) { Hash *metadata = Hash_new(0); Hash_Store_Utf8(metadata, "format", 6, (Obj*)Str_newf("%i32", DataWriter_Format(self))); return metadata; }
Hash* Schema_Dump_IMP(Schema *self) { SchemaIVARS *const ivars = Schema_IVARS(self); Hash *dump = Hash_new(0); Hash *type_dumps = Hash_new(Hash_Get_Size(ivars->types)); // Record class name, store dumps of unique Analyzers. Hash_Store_Utf8(dump, "_class", 6, (Obj*)Str_Clone(Schema_get_class_name(self))); Hash_Store_Utf8(dump, "analyzers", 9, Freezer_dump((Obj*)ivars->uniq_analyzers)); // Dump FieldTypes. Hash_Store_Utf8(dump, "fields", 6, (Obj*)type_dumps); HashIterator *iter = HashIter_new(ivars->types); while (HashIter_Next(iter)) { String *field = HashIter_Get_Key(iter); FieldType *type = (FieldType*)HashIter_Get_Value(iter); Class *type_class = FType_get_class(type); // Dump known types to simplified format. if (type_class == FULLTEXTTYPE) { FullTextType *fttype = (FullTextType*)type; Hash *type_dump = FullTextType_Dump_For_Schema(fttype); Analyzer *analyzer = FullTextType_Get_Analyzer(fttype); uint32_t tick = S_find_in_array(ivars->uniq_analyzers, (Obj*)analyzer); // Store the tick which references a unique analyzer. Hash_Store_Utf8(type_dump, "analyzer", 8, (Obj*)Str_newf("%u32", tick)); Hash_Store(type_dumps, field, (Obj*)type_dump); } else if (type_class == STRINGTYPE || type_class == BLOBTYPE) { Hash *type_dump = FType_Dump_For_Schema(type); Hash_Store(type_dumps, field, (Obj*)type_dump); } // Unknown FieldType type, so punt. else { Hash_Store(type_dumps, field, FType_Dump(type)); } } DECREF(iter); return dump; }
Hash* NumType_Dump_IMP(NumericType *self) { Hash *dump = NumType_Dump_For_Schema(self); Hash_Store_Utf8(dump, "_class", 6, (Obj*)Str_Clone(NumType_get_class_name(self))); DECREF(Hash_Delete_Utf8(dump, "type", 4)); return dump; }
Hash* StringType_Dump_IMP(StringType *self) { Hash *dump = StringType_Dump_For_Schema(self); Hash_Store_Utf8(dump, "_class", 6, (Obj*)Str_Clone(StringType_Get_Class_Name(self))); DECREF(Hash_Delete_Utf8(dump, "type", 4)); return dump; }
Hash* EasyAnalyzer_Dump_IMP(EasyAnalyzer *self) { EasyAnalyzerIVARS *const ivars = EasyAnalyzer_IVARS(self); EasyAnalyzer_Dump_t super_dump = SUPER_METHOD_PTR(EASYANALYZER, LUCY_EasyAnalyzer_Dump); Hash *dump = super_dump(self); Hash_Store_Utf8(dump, "language", 8, (Obj*)Str_Clone(ivars->language)); return dump; }
void Seg_Write_File_IMP(Segment *self, Folder *folder) { SegmentIVARS *const ivars = Seg_IVARS(self); Hash *my_metadata = Hash_new(16); // Store metadata specific to this Segment object. Hash_Store_Utf8(my_metadata, "count", 5, (Obj*)Str_newf("%i64", ivars->count)); Hash_Store_Utf8(my_metadata, "name", 4, (Obj*)Str_Clone(ivars->name)); Hash_Store_Utf8(my_metadata, "field_names", 11, INCREF(ivars->by_num)); Hash_Store_Utf8(my_metadata, "format", 6, (Obj*)Str_newf("%i32", 1)); Hash_Store_Utf8(ivars->metadata, "segmeta", 7, (Obj*)my_metadata); String *filename = Str_newf("%o/segmeta.json", ivars->name); bool result = Json_spew_json((Obj*)ivars->metadata, folder, filename); DECREF(filename); if (!result) { RETHROW(INCREF(Err_get_error())); } }
Obj* PolyQuery_Dump_IMP(PolyQuery *self) { PolyQueryIVARS *ivars = PolyQuery_IVARS(self); PolyQuery_Dump_t super_dump = SUPER_METHOD_PTR(POLYQUERY, LUCY_PolyQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "children", 8, Freezer_dump((Obj*)ivars->children)); return (Obj*)dump; }
Obj* RegexTokenizer_Dump_IMP(RegexTokenizer *self) { RegexTokenizerIVARS *const ivars = RegexTokenizer_IVARS(self); RegexTokenizer_Dump_t super_dump = SUPER_METHOD_PTR(REGEXTOKENIZER, LUCY_RegexTokenizer_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "pattern", 7, (Obj*)Str_Clone(ivars->pattern)); return (Obj*)dump; }
Hash* HitDoc_Dump_IMP(HitDoc *self) { HitDocIVARS *const ivars = HitDoc_IVARS(self); HitDoc_Dump_t super_dump = SUPER_METHOD_PTR(HITDOC, LUCY_HitDoc_Dump); Hash *dump = super_dump(self); Hash_Store_Utf8(dump, "score", 5, (Obj*)Str_newf("%f64", ivars->score)); return dump; }
Hash* SnowStemmer_Dump_IMP(SnowballStemmer *self) { SnowballStemmerIVARS *const ivars = SnowStemmer_IVARS(self); SnowStemmer_Dump_t super_dump = SUPER_METHOD_PTR(SNOWBALLSTEMMER, LUCY_SnowStemmer_Dump); Hash *dump = super_dump(self); Hash_Store_Utf8(dump, "language", 8, (Obj*)Str_Clone(ivars->language)); return dump; }
Hash* BlobType_Dump_For_Schema_IMP(BlobType *self) { BlobTypeIVARS *const ivars = BlobType_IVARS(self); Hash *dump = Hash_new(0); Hash_Store_Utf8(dump, "type", 4, (Obj*)Str_newf("blob")); // Store attributes that override the defaults -- even if they're // meaningless. if (ivars->boost != 1.0) { Hash_Store_Utf8(dump, "boost", 5, (Obj*)Str_newf("%f64", ivars->boost)); } if (ivars->indexed) { Hash_Store_Utf8(dump, "indexed", 7, (Obj*)CFISH_TRUE); } if (ivars->stored) { Hash_Store_Utf8(dump, "stored", 6, (Obj*)CFISH_TRUE); } return dump; }
Obj* PolyAnalyzer_Dump_IMP(PolyAnalyzer *self) { PolyAnalyzerIVARS *const ivars = PolyAnalyzer_IVARS(self); PolyAnalyzer_Dump_t super_dump = SUPER_METHOD_PTR(POLYANALYZER, LUCY_PolyAnalyzer_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); if (ivars->analyzers) { Hash_Store_Utf8(dump, "analyzers", 9, Freezer_dump((Obj*)ivars->analyzers)); } return (Obj*)dump; }
Obj* RangeQuery_Dump_IMP(RangeQuery *self) { RangeQueryIVARS *ivars = RangeQuery_IVARS(self); RangeQuery_Dump_t super_dump = SUPER_METHOD_PTR(RANGEQUERY, LUCY_RangeQuery_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); Hash_Store_Utf8(dump, "field", 5, Freezer_dump((Obj*)ivars->field)); if (ivars->lower_term) { Hash_Store_Utf8(dump, "lower_term", 10, Freezer_dump((Obj*)ivars->lower_term)); } if (ivars->upper_term) { Hash_Store_Utf8(dump, "upper_term", 10, Freezer_dump((Obj*)ivars->upper_term)); } Hash_Store_Utf8(dump, "include_lower", 13, (Obj*)Bool_singleton(ivars->include_lower)); Hash_Store_Utf8(dump, "include_upper", 13, (Obj*)Bool_singleton(ivars->include_upper)); return (Obj*)dump; }
Obj* SnowStop_Dump_IMP(SnowballStopFilter *self) { SnowballStopFilterIVARS *ivars = SnowStop_IVARS(self); SnowStop_Dump_t super_dump = SUPER_METHOD_PTR(SNOWBALLSTOPFILTER, LUCY_SnowStop_Dump); Hash *dump = (Hash*)CERTIFY(super_dump(self), HASH); if (ivars->stoplist) { Hash_Store_Utf8(dump, "stoplist", 8, Freezer_dump((Obj*)ivars->stoplist)); } return (Obj*)dump; }
Hash* FullTextType_Dump_For_Schema_IMP(FullTextType *self) { FullTextTypeIVARS *const ivars = FullTextType_IVARS(self); Hash *dump = Hash_new(0); Hash_Store_Utf8(dump, "type", 4, (Obj*)Str_newf("fulltext")); // Store attributes that override the defaults. if (ivars->boost != 1.0) { Hash_Store_Utf8(dump, "boost", 5, (Obj*)Str_newf("%f64", ivars->boost)); } if (!ivars->indexed) { Hash_Store_Utf8(dump, "indexed", 7, (Obj*)CFISH_FALSE); } if (!ivars->stored) { Hash_Store_Utf8(dump, "stored", 6, (Obj*)CFISH_FALSE); } if (ivars->sortable) { Hash_Store_Utf8(dump, "sortable", 8, (Obj*)CFISH_TRUE); } if (ivars->highlightable) { Hash_Store_Utf8(dump, "highlightable", 13, (Obj*)CFISH_TRUE); } return dump; }
void Snapshot_Write_File_IMP(Snapshot *self, Folder *folder, String *path) { SnapshotIVARS *const ivars = Snapshot_IVARS(self); Hash *all_data = Hash_new(0); Vector *list = Snapshot_List(self); // Update path. DECREF(ivars->path); if (path != NULL && Str_Get_Size(path) != 0) { ivars->path = Str_Clone(path); } else { String *latest = IxFileNames_latest_snapshot(folder); uint64_t gen = latest ? IxFileNames_extract_gen(latest) + 1 : 1; char base36[StrHelp_MAX_BASE36_BYTES]; StrHelp_to_base36(gen, &base36); ivars->path = Str_newf("snapshot_%s.json", &base36); DECREF(latest); } // Don't overwrite. if (Folder_Exists(folder, ivars->path)) { THROW(ERR, "Snapshot file '%o' already exists", ivars->path); } // Sort, then store file names. Vec_Sort(list); Hash_Store_Utf8(all_data, "entries", 7, (Obj*)list); // Create a JSON-izable data structure. Hash_Store_Utf8(all_data, "format", 6, (Obj*)Str_newf("%i32", (int32_t)Snapshot_current_file_format)); Hash_Store_Utf8(all_data, "subformat", 9, (Obj*)Str_newf("%i32", (int32_t)Snapshot_current_file_subformat)); // Write out JSON-ized data to the new file. Json_spew_json((Obj*)all_data, folder, ivars->path); DECREF(all_data); }
Hash* NumType_Dump_For_Schema_IMP(NumericType *self) { NumericTypeIVARS *const ivars = NumType_IVARS(self); Hash *dump = Hash_new(0); Hash_Store_Utf8(dump, "type", 4, (Obj*)NumType_Specifier(self)); // Store attributes that override the defaults. if (ivars->boost != 1.0) { Hash_Store_Utf8(dump, "boost", 5, (Obj*)Str_newf("%f64", ivars->boost)); } if (!ivars->indexed) { Hash_Store_Utf8(dump, "indexed", 7, (Obj*)CFISH_FALSE); } if (!ivars->stored) { Hash_Store_Utf8(dump, "stored", 6, (Obj*)CFISH_FALSE); } if (ivars->sortable) { Hash_Store_Utf8(dump, "sortable", 8, (Obj*)CFISH_TRUE); } return dump; }
void IxManager_Write_Merge_Data_IMP(IndexManager *self, int64_t cutoff) { IndexManagerIVARS *const ivars = IxManager_IVARS(self); String *merge_json = SSTR_WRAP_C("merge.json"); Hash *data = Hash_new(1); bool success; Hash_Store_Utf8(data, "cutoff", 6, (Obj*)Str_newf("%i64", cutoff)); success = Json_spew_json((Obj*)data, ivars->folder, merge_json); DECREF(data); if (!success) { THROW(ERR, "Failed to write to %o", merge_json); } }
static void test_max_depth(TestBatchRunner *runner) { Hash *circular = Hash_new(0); Hash_Store_Utf8(circular, "circular", 8, INCREF(circular)); Err_set_error(NULL); String *not_json = Json_to_json((Obj*)circular); TEST_TRUE(runner, not_json == NULL, "to_json returns NULL when fed recursing data"); TEST_TRUE(runner, Err_get_error() != NULL, "to_json sets global error when fed recursing data"); DECREF(Hash_Delete_Utf8(circular, "circular", 8)); DECREF(circular); }
static void test_Dump_Load_and_Equals(TestBatchRunner *runner) { StandardTokenizer *tokenizer = StandardTokenizer_new(); Normalizer *normalizer = Normalizer_new(NULL, true, false); FullTextType *type = FullTextType_new((Analyzer*)tokenizer); FullTextType *other = FullTextType_new((Analyzer*)normalizer); FullTextType *boost_differs = FullTextType_new((Analyzer*)tokenizer); FullTextType *not_indexed = FullTextType_new((Analyzer*)tokenizer); FullTextType *not_stored = FullTextType_new((Analyzer*)tokenizer); FullTextType *highlightable = FullTextType_new((Analyzer*)tokenizer); Obj *dump = (Obj*)FullTextType_Dump(type); Obj *clone = Freezer_load(dump); Obj *another_dump = (Obj*)FullTextType_Dump_For_Schema(type); FullTextType_Set_Boost(boost_differs, 1.5); FullTextType_Set_Indexed(not_indexed, false); FullTextType_Set_Stored(not_stored, false); FullTextType_Set_Highlightable(highlightable, true); // (This step is normally performed by Schema_Load() internally.) Hash_Store_Utf8((Hash*)another_dump, "analyzer", 8, INCREF(tokenizer)); FullTextType *another_clone = FullTextType_Load(type, another_dump); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)boost_differs), "Equals() false with different boost"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)other), "Equals() false with different Analyzer"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_indexed), "Equals() false with indexed => false"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)not_stored), "Equals() false with stored => false"); TEST_FALSE(runner, FullTextType_Equals(type, (Obj*)highlightable), "Equals() false with highlightable => true"); TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)clone), "Dump => Load round trip"); TEST_TRUE(runner, FullTextType_Equals(type, (Obj*)another_clone), "Dump_For_Schema => Load round trip"); DECREF(another_clone); DECREF(dump); DECREF(clone); DECREF(another_dump); DECREF(highlightable); DECREF(not_stored); DECREF(not_indexed); DECREF(boost_differs); DECREF(other); DECREF(type); DECREF(normalizer); DECREF(tokenizer); }
FilePurger* FilePurger_init(FilePurger *self, Folder *folder, Snapshot *snapshot, IndexManager *manager) { FilePurgerIVARS *const ivars = FilePurger_IVARS(self); ivars->folder = (Folder*)INCREF(folder); ivars->snapshot = (Snapshot*)INCREF(snapshot); ivars->manager = manager ? (IndexManager*)INCREF(manager) : IxManager_new(NULL, NULL); IxManager_Set_Folder(ivars->manager, folder); // Don't allow the locks directory to be zapped. ivars->disallowed = Hash_new(0); Hash_Store_Utf8(ivars->disallowed, "locks", 5, (Obj*)CFISH_TRUE); return self; }