/* * Test following possible scenarios, * Case 1: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large * size, no write, close, reopen in parallel, read to verify all return * the fill value. * Case 2: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY but small * size, no write, close, reopen in parallel, extend to large size, then close, * then reopen in parallel and read to verify all return the fill value. * Case 3: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large * size, write just a small part of the dataset (second to the last), close, * then reopen in parallel, read to verify all return the fill value except * those small portion that has been written. Without closing it, writes * all parts of the dataset in a interleave pattern, close it, and reopen * it, read to verify all data are as written. */ void test_chunk_alloc(void) { const char *filename; hid_t file_id, dataset; file_id = dataset = -1; filename = GetTestParameters(); if (VERBOSE_MED) printf("Extend Chunked allocation test on file %s\n", filename); /* Case 1 */ /* Create chunked dataset without writing anything.*/ create_chunked_dataset(filename, DSETCHUNKS, none); /* reopen dataset in parallel and check for file size */ parallel_access_dataset(filename, DSETCHUNKS, open_only, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, DSETCHUNKS, none, CLOSE, &file_id, &dataset); /* Case 2 sometimes fails. See bug 281 and 636. Skip it for now, need to fix it later. */ if (VERBOSE_LO){ printf("Started Case 2\n"); /* Case 2 */ /* Create chunked dataset without writing anything */ create_chunked_dataset(filename, 20, none); /* reopen dataset in parallel and only extend it */ parallel_access_dataset(filename, DSETCHUNKS, extend_only, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, DSETCHUNKS, none, CLOSE, &file_id, &dataset); printf("Finished Case 2\n"); } else { if (MAINPROCESS) printf("Skipped Case 2. Use '-v l' to test it.\n"); } /* Case 3 */ /* Create chunked dataset and write in the second to last chunk */ create_chunked_dataset(filename, DSETCHUNKS, sec_last); /* Reopen dataset in parallel, read and verify the data. The file and dataset are not closed*/ verify_data(filename, DSETCHUNKS, sec_last, NO_CLOSE, &file_id, &dataset); /* All processes write in all the chunks in a interleaved way */ parallel_access_dataset(filename, DSETCHUNKS, write_all, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, DSETCHUNKS, all, CLOSE, &file_id, &dataset); }
/* * Test following possible scenarios, * Case 1: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large * size, no write, close, reopen in parallel, read to verify all return * the fill value. * Case 2: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY but small * size, no write, close, reopen in parallel, extend to large size, then close, * then reopen in parallel and read to verify all return the fill value. * Case 3: * Sequential create a file and dataset with H5D_ALLOC_TIME_EARLY and large * size, write just a small part of the dataset (second to the last), close, * then reopen in parallel, read to verify all return the fill value except * those small portion that has been written. Without closing it, writes * all parts of the dataset in a interleave pattern, close it, and reopen * it, read to verify all data are as written. */ void test_chunk_alloc(void) { const char *filename; hid_t file_id, dataset; file_id = dataset = -1; /* Initialize MPI */ MPI_Comm_size(MPI_COMM_WORLD,&mpi_size); MPI_Comm_rank(MPI_COMM_WORLD,&mpi_rank); filename = GetTestParameters(); if (VERBOSE_MED) printf("Extend Chunked allocation test on file %s\n", filename); /* Case 1 */ /* Create chunked dataset without writing anything.*/ create_chunked_dataset(filename, CHUNK_FACTOR, none); /* reopen dataset in parallel and check for file size */ parallel_access_dataset(filename, CHUNK_FACTOR, open_only, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, CHUNK_FACTOR, none, CLOSE, &file_id, &dataset); /* Case 2 */ /* Create chunked dataset without writing anything */ create_chunked_dataset(filename, 20, none); /* reopen dataset in parallel and only extend it */ parallel_access_dataset(filename, CHUNK_FACTOR, extend_only, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, CHUNK_FACTOR, none, CLOSE, &file_id, &dataset); /* Case 3 */ /* Create chunked dataset and write in the second to last chunk */ create_chunked_dataset(filename, CHUNK_FACTOR, sec_last); /* Reopen dataset in parallel, read and verify the data. The file and dataset are not closed*/ verify_data(filename, CHUNK_FACTOR, sec_last, NO_CLOSE, &file_id, &dataset); /* All processes write in all the chunks in a interleaved way */ parallel_access_dataset(filename, CHUNK_FACTOR, write_all, &file_id, &dataset); /* reopen dataset in parallel, read and verify the data */ verify_data(filename, CHUNK_FACTOR, all, CLOSE, &file_id, &dataset); }