void BBSortEx_feed(BBSortEx *self, void *data) { SortEx_feed((SortExternal*)self, data); // Flush() if necessary. ByteBuf *bytebuf = (ByteBuf*)CERTIFY(*(ByteBuf**)data, BYTEBUF); self->mem_consumed += BB_Get_Size(bytebuf); if (self->mem_consumed >= self->mem_thresh) { BBSortEx_Flush(self); } }
void BBSortEx_Feed_IMP(BBSortEx *self, Obj *item) { BBSortExIVARS *const ivars = BBSortEx_IVARS(self); BBSortEx_Feed_t super_feed = SUPER_METHOD_PTR(BBSORTEX, LUCY_BBSortEx_Feed); super_feed(self, item); // Flush() if necessary. ByteBuf *bytebuf = (ByteBuf*)CERTIFY(item, BYTEBUF); ivars->mem_consumed += BB_Get_Size(bytebuf); if (ivars->mem_consumed >= ivars->mem_thresh) { BBSortEx_Flush(self); } }
void BBSortEx_flip(BBSortEx *self) { u32_t i; u32_t run_mem_thresh = 65536; BBSortEx_Flush(self); /* Recalculate the approximate mem allowed for each run. */ if (self->num_runs) { run_mem_thresh = (self->mem_thresh / 2) / self->num_runs; if (run_mem_thresh < 65536) run_mem_thresh = 65536; } for (i = 0; i < self->num_runs; i++) { BBSortExRun_Set_Mem_Thresh(self->runs[i], run_mem_thresh); } /* OK to fetch now. */ self->flipped = true; }
void BBSortEx_flip(BBSortEx *self) { uint32_t run_mem_thresh = 65536; BBSortEx_Flush(self); // Recalculate the approximate mem allowed for each run. uint32_t num_runs = VA_Get_Size(self->runs); if (num_runs) { run_mem_thresh = (self->mem_thresh / 2) / num_runs; if (run_mem_thresh < 65536) { run_mem_thresh = 65536; } } for (uint32_t i = 0; i < num_runs; i++) { BBSortEx *run = (BBSortEx*)VA_Fetch(self->runs, i); BBSortEx_Set_Mem_Thresh(run, run_mem_thresh); } // OK to fetch now. self->flipped = true; }