couchstore_error_t read_view_group_header(view_group_info_t *info, index_header_t **header) { couchstore_error_t ret; cs_off_t pos = info->header_pos; tree_file *file = &info->file; char *header_buf = NULL; int header_len; char buf; if (file->handle == NULL) { return COUCHSTORE_ERROR_FILE_CLOSED; } if (info->file.ops->pread(NULL, file->handle, &buf, 1, pos) != 1) { return COUCHSTORE_ERROR_READ; } if (buf == 0) { return COUCHSTORE_ERROR_NO_HEADER; } else if (buf != 1) { return COUCHSTORE_ERROR_CORRUPT; } header_len = pread_header(file, pos, &header_buf, MAX_HEADER_SIZE); if (header_len < 0) { return (couchstore_error_t) header_len; } ret = decode_index_header(header_buf, (size_t) header_len, header); free(header_buf); return ret; }
static index_header_t *test_index_header_decoding(const char *header_bin, size_t header_bin_size) { uint16_t active[] = { 3,7,19,23,27,31,35,39,43,47,51,55,59,63 }; uint16_t passive[] = { 0,1,2,4,5,6,8,9,10,12,13,14,16,17,18,20,21,22,24,25, 26,28,29,30,32,33,34,36,37,38,40,41,42,44,45,46,48,49, 50,52,53,54,56,57,60,62 }; uint16_t cleanup[] = { 11,15,58,61 }; uint16_t unindexable[] = { 0,63 }; uint16_t replicas_on_transfer[] = { 5, 10, 60, 62 }; uint16_t pending_active[] = { 11,15 }; uint16_t pending_passive[] = { 58,61 }; uint16_t pending_unindexable[] = { 15,58 }; index_header_t *header = NULL; bitmap_t expected_active, expected_passive, expected_cleanup; unsigned i; assert(decode_index_header(header_bin, header_bin_size, &header) == COUCHSTORE_SUCCESS); assert(header != NULL); assert(header->version == 1); assert(memcmp(header->signature, header_bin, 16) == 0); assert(header->num_partitions == 64); assert(header->num_views == 2); memset(&expected_active, 0, sizeof(expected_active)); for (i = 0; i < (sizeof(active) / sizeof(active[0])); ++i) { set_bit(&expected_active, active[i]); } assert(memcmp(&header->active_bitmask, &expected_active, sizeof(expected_active)) == 0); memset(&expected_passive, 0, sizeof(expected_passive)); for (i = 0; i < (sizeof(passive) / sizeof(passive[0])); ++i) { set_bit(&expected_passive, passive[i]); } assert(memcmp(&header->passive_bitmask, &expected_passive, sizeof(expected_passive)) == 0); memset(&expected_cleanup, 0, sizeof(expected_cleanup)); for (i = 0; i < (sizeof(cleanup) / sizeof(cleanup[0])); ++i) { set_bit(&expected_cleanup, cleanup[i]); } assert(memcmp(&header->cleanup_bitmask, &expected_cleanup, sizeof(expected_cleanup)) == 0); assert(sorted_list_size(header->seqs) == 58); for (uint16_t i = 0; i < 64; ++i) { switch (i) { /* unindexable */ case 0: case 63: /* cleanup */ case 11: case 15: case 58: case 61: continue; default: break; } part_seq_t rs, *pseq; rs.part_id = i; pseq = (part_seq_t *) sorted_list_get(header->seqs, &rs); assert(pseq != NULL); assert(pseq->part_id == i); assert(pseq->seq == 1221); } int num_unindexable = sizeof(unindexable) / sizeof(unindexable[0]); assert(sorted_list_size(header->unindexable_seqs) == num_unindexable); for (int i = 0; i < num_unindexable; ++i) { part_seq_t rs, *pseq; rs.part_id = unindexable[i]; pseq = (part_seq_t *) sorted_list_get(header->unindexable_seqs, &rs); assert(pseq != NULL); assert(pseq->part_id == unindexable[i]); assert(pseq->seq == 1221); } assert(header->id_btree_state->pointer == 1617507); assert(header->id_btree_state->subtreesize == 1286028); assert(header->id_btree_state->reduce_value.size == 133); /* TODO: once view reduction decoding is done, test the exact reduction value. */ assert(header->view_btree_states[0]->pointer == 2901853); assert(header->view_btree_states[0]->subtreesize == 1284202); assert(header->view_btree_states[0]->reduce_value.size == 140); /* TODO: once view reduction decoding is done, test the exact reduction value. */ assert(header->view_btree_states[1]->pointer == 4180175); assert(header->view_btree_states[1]->subtreesize == 1278451); assert(header->view_btree_states[1]->reduce_value.size == 140); /* TODO: once view reduction decoding is done, test the exact reduction value. */ assert(header->has_replica == 1); assert(header->replicas_on_transfer != NULL); int num_reps = (sizeof(replicas_on_transfer) / sizeof(replicas_on_transfer[0])); assert(sorted_list_size(header->replicas_on_transfer) == num_reps); for (int i = 0; i < num_reps; ++i) { uint16_t *part_id = sorted_list_get(header->replicas_on_transfer, &replicas_on_transfer[i]); assert(part_id != NULL); assert(*part_id == replicas_on_transfer[i]); } int num_pending_active = sizeof(pending_active) / sizeof(pending_active[0]); assert(sorted_list_size(header->pending_transition.active) == num_pending_active); for (int i = 0; i < num_pending_active; ++i) { uint16_t *part_id = sorted_list_get(header->pending_transition.active, &pending_active[i]); assert(part_id != NULL); assert(*part_id == pending_active[i]); } int num_pending_passive = sizeof(pending_passive) / sizeof(pending_passive[0]); assert(sorted_list_size(header->pending_transition.passive) == num_pending_passive); for (int i = 0; i < num_pending_passive; ++i) { uint16_t *part_id = sorted_list_get(header->pending_transition.passive, &pending_passive[i]); assert(part_id != NULL); assert(*part_id == pending_passive[i]); } int num_pending_unindexable = sizeof(pending_unindexable) / sizeof(pending_unindexable[0]); assert(sorted_list_size(header->pending_transition.unindexable) == num_pending_unindexable); for (int i = 0; i < num_pending_unindexable; ++i) { uint16_t *part_id = sorted_list_get(header->pending_transition.unindexable, &pending_unindexable[i]); assert(part_id != NULL); assert(*part_id == pending_unindexable[i]); } return header; }