HDF5 C++ API Reference Manual

 

 

 

Main Page | Namespace List | Class Hierarchy | Class List | File List | Namespace Members | Class Members | File Members | Examples

dsets.cpp

Dataset example:
This is an example of how to create, write, and read datasets.
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the files COPYING and Copyright.html. COPYING can be found at the root * * of the source code distribution tree; Copyright.html can be found at the * * root level of an installed copy of the electronic HDF5 document set and * * is linked from the top-level documents page. It can also be found at * * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html. If you do not have * * access to either file, you may request a copy from hdfhelp@ncsa.uiuc.edu. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /***************************************************************************** FILE dsets.cpp - HDF5 C++ testing the functionalities associated with the C dataset interface (H5D) EXTERNAL ROUTINES/VARIABLES: These routines are in the test directory of the C library: h5_reset() -- in h5test.c, resets the library by closing it h5_fileaccess() -- in h5test.c, returns a file access template h5_fixname() -- in h5test.c, create a file name from a file base name h5_cleanup() -- in h5test.c, cleanup temporary test files ***************************************************************************/ #ifdef OLD_HEADER_FILENAME #include <iostream.h> #else #include <iostream> #endif #include "H5Cpp.h" #include "h5test.h" #include "testhdf5.h" #ifndef H5_NO_NAMESPACE using namespace H5; #endif #include "h5cpputil.h" const char *FILENAME[] = { "dataset", NULL }; #define DSET_DEFAULT_NAME "default" #define DSET_CHUNKED_NAME "chunked" #define DSET_SIMPLE_IO_NAME "simple_io" #define DSET_TCONV_NAME "tconv" #define DSET_COMPRESS_NAME "compressed" #define DSET_BOGUS_NAME "bogus" #define H5Z_FILTER_BOGUS 305 /* Local prototypes for filter functions */ static size_t bogus(unsigned int flags, size_t cd_nelmts, const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf); /*------------------------------------------------------------------------- * Function: test_create * * Purpose: Attempts to create a dataset. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_create( H5File& file) { TESTING("create, open, close"); try { /* Create the data space */ hsize_t dims[2]; dims[0] = 256; dims[1] = 512; DataSpace space (2, dims, NULL); /* * Create a dataset using the default dataset creation properties. * We're not sure what they are, so we won't check. */ DataSet *dataset = new DataSet (file.createDataSet (DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space)); /* Close the dataset */ delete dataset; /* Add a comment to the dataset */ file.setComment (DSET_DEFAULT_NAME, "This is a dataset"); /* * Try creating a dataset that already exists. This should fail since a * dataset can only be created once. If an exception is not thrown * for this action by createDataSet, then display failure information * and jump to label error: to return. */ try { dataset = new DataSet (file.createDataSet (DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space)); // continuation here, that means no exception has been thrown H5_FAILED(); cout << " Library allowed overwrite of existing dataset." << endl; goto error; } catch (FileIException E ) // catching invalid creating dataset { // Exception is expected. Do nothing here. } /* * Open the dataset we created above and then close it. This is how * existing datasets are accessed. */ dataset = new DataSet (file.openDataSet (DSET_DEFAULT_NAME)); delete dataset; /* * Try opening a non-existent dataset. This should fail so if an * exception is not thrown for this action by openDataSet, then * display failure information and jump to label error: to return. */ try { dataset = new DataSet (file.openDataSet( "does_not_exist" )); // continuation here, that means no exception has been thrown H5_FAILED(); cout << " Opened a non-existent dataset." << endl; goto error; } catch (FileIException E ) // catching creating non-existent dataset { // Exception is expected. Do nothing here. } /* * Create a new dataset that uses chunked storage instead of the default * layout. */ DSetCreatPropList create_parms; hsize_t csize[2]; csize[0] = 5; csize[1] = 100; create_parms.setChunk( 2, csize ); dataset = new DataSet (file.createDataSet (DSET_CHUNKED_NAME, PredType::NATIVE_DOUBLE, space, create_parms)); // Note: this one has no error message in C when failure occurs? /* * Close the chunked dataset. */ delete dataset; PASSED(); return 0; } // outer most try block // catch all dataset, file, space, plist exceptions catch (Exception E) { goto error; } error: return -1; } /*------------------------------------------------------------------------- * Function: check_values * * Purpose: Checks a read value against the written value. If they are * different, the function will * print out a message and the different values. This function * is made to reuse the code segment that is used in various * places throughout test_compression and in test_simple_io. * Where the C version * of this code segment "goto error," this function will * return -1, so that the caller can "goto error." * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C code segment for checking values) * Friday, February 6, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static int check_values (hsize_t i, hsize_t j, int apoint, int acheck) { if (apoint != acheck) { H5_FAILED(); cout << " Read different values than written.\n" << endl; cout << " At index " << (unsigned long)i << "," << (unsigned long)j << endl; return -1; } return 0; } // check_values /*------------------------------------------------------------------------- * Function: test_simple_io * * Purpose: Tests simple I/O. That is, reading and writing a complete * multi-dimensional array without data type or data space * conversions, without compression, and stored contiguously. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_simple_io( H5File& file) { TESTING("simple I/O"); int points[100][200]; int check[100][200]; int i, j, n; /* Initialize the dataset */ for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { points[i][j] = n++; } } char* tconv_buf = new char [1000]; try { /* Create the data space */ hsize_t dims[2]; dims[0] = 100; dims[1] = 200; DataSpace space (2, dims, NULL); /* Create a small conversion buffer to test strip mining */ DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); /* Create the dataset */ DataSet dataset (file.createDataSet (DSET_SIMPLE_IO_NAME, PredType::NATIVE_INT, space)); /* Write the data to the dataset */ dataset.write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Read the dataset back */ dataset.read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Check that the values read are the same as the values written */ for (i = 0; i < 100; i++) for (j = 0; j < 200; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) goto error; } delete [] tconv_buf; PASSED(); return 0; } // end try // catch all dataset, space, plist exceptions catch (Exception E) { goto error; } error: // cleaning up if (tconv_buf) delete [] tconv_buf; return -1; } /*------------------------------------------------------------------------- * Function: test_tconv * * Purpose: Test some simple data type conversion stuff. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_tconv( H5File& file) { // Prepare buffers for input/output char *out=NULL, *in=NULL; out = new char [4*1000000]; // assert (out); - should use exception handler for new - BMR in = new char [4*1000000]; //assert (in); TESTING("data type conversion"); /* Initialize the dataset */ for (int i = 0; i < 1000000; i++) { out[i*4+0] = 0x11; out[i*4+1] = 0x22; out[i*4+2] = 0x33; out[i*4+3] = 0x44; } try { /* Create the data space */ hsize_t dims[1]; dims[0] = 1000000; DataSpace space (1, dims, NULL); /* Create the data set */ DataSet dataset (file.createDataSet (DSET_TCONV_NAME, PredType::STD_I32LE, space)); /* Write the data to the dataset */ dataset.write ((void*) out, PredType::STD_I32LE); /* Read data with byte order conversion */ dataset.read ((void*) in, PredType::STD_I32BE); /* Check */ for (int i = 0; i < 1000000; i++) { if (in[4*i+0]!=out[4*i+3] || in[4*i+1]!=out[4*i+2] || in[4*i+2]!=out[4*i+1] || in[4*i+3]!=out[4*i+0]) { H5_FAILED(); cout << " Read with byte order conversion failed." << endl; goto error; } } delete [] out; delete [] in; cout << " PASSED" << endl; return 0; } // end try // catch all dataset and space exceptions catch (Exception E) { goto error; } error: delete [] out; delete [] in; return -1; } /* This message derives from H5Z */ const H5Z_class_t H5Z_BOGUS[1] = {{ H5Z_FILTER_BOGUS, /* Filter id number */ "bogus", /* Filter name for debugging */ NULL, /* The "can apply" callback */ NULL, /* The "set local" callback */ bogus, /* The actual filter function */ }}; /*------------------------------------------------------------------------- * Function: bogus * * Purpose: A bogus compression method that doesn't do anything. * * Return: Success: Data chunk size * * Failure: 0 * * Programmer: Robb Matzke * Tuesday, April 21, 1998 * * Modifications: * *------------------------------------------------------------------------- */ static size_t /*bogus(unsigned int UNUSED flags, size_t UNUSED cd_nelmts, const unsigned int UNUSED cd_values[], size_t nbytes, size_t UNUSED *buf_size, void UNUSED **buf) BMR: removed UNUSED for now until asking Q. or R. to pass compilation*/ bogus(unsigned int flags, size_t cd_nelmts, const unsigned int cd_values[], size_t nbytes, size_t *buf_size, void **buf) { return nbytes; } /*------------------------------------------------------------------------- * Function: test_compression * * Purpose: Tests dataset compression. If compression is requested when * it hasn't been compiled into the library (such as when * updating an existing compressed dataset) then data is sent to * the file uncompressed but no errors are returned. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_compression(H5File& file) { const char *not_supported; not_supported = " Deflate compression is not enabled."; int points[100][200]; int check[100][200]; hsize_t i, j, n; /* Initialize the dataset */ for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { points[i][j] = n++; } } char* tconv_buf = new char [1000]; try { const hsize_t size[2] = {100, 200}; /* Create the data space */ DataSpace space1(2, size, NULL); /* * Create a small conversion buffer to test strip mining. We * might as well test all we can! */ DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); /* Use chunked storage with compression */ DSetCreatPropList dscreatplist; const hsize_t chunk_size[2] = {2, 25}; dscreatplist.setChunk (2, chunk_size); dscreatplist.setDeflate (6); DataSet* dataset; #ifdef H5_HAVE_FILTER_DEFLATE TESTING("compression (setup)"); /* Create the dataset */ dataset = new DataSet (file.createDataSet (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist)); PASSED(); /*---------------------------------------------------------------------- * STEP 1: Read uninitialized data. It should be zero. *---------------------------------------------------------------------- */ TESTING("compression (uninitialized read)"); dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); for (i=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { if (0!=check[i][j]) { H5_FAILED(); cout << " Read a non-zero value." << endl; cout << " At index " << (unsigned long)i << "," << (unsigned long)j << endl; goto error; } } } PASSED(); /*---------------------------------------------------------------------- * STEP 2: Test compression by setting up a chunked dataset and writing * to it. *---------------------------------------------------------------------- */ TESTING("compression (write)"); for (i=n=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { points[i][j] = n++; } } dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); PASSED(); /*---------------------------------------------------------------------- * STEP 3: Try to read the data we just wrote. *---------------------------------------------------------------------- */ TESTING("compression (read)"); /* Read the dataset back */ dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Check that the values read are the same as the values written */ for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) goto error; } PASSED(); /*---------------------------------------------------------------------- * STEP 4: Write new data over the top of the old data. The new data is * random thus not very compressible, and will cause the chunks to move * around as they grow. We only change values for the left half of the * dataset although we rewrite the whole thing. *---------------------------------------------------------------------- */ TESTING("compression (modify)"); for (i=0; i<size[0]; i++) { for (j=0; j<size[1]/2; j++) { points[i][j] = rand (); } } dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Read the dataset back and check it */ dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Check that the values read are the same as the values written */ for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) goto error; } PASSED(); /*---------------------------------------------------------------------- * STEP 5: Close the dataset and then open it and read it again. This * insures that the compression message is picked up properly from the * object header. *---------------------------------------------------------------------- */ TESTING("compression (re-open)"); delete dataset; dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME)); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Check that the values read are the same as the values written */ for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) goto error; } PASSED(); /*---------------------------------------------------------------------- * STEP 6: Test partial I/O by writing to and then reading from a * hyperslab of the dataset. The hyperslab does not line up on chunk * boundaries (we know that case already works from above tests). *---------------------------------------------------------------------- */ TESTING("compression (partial I/O)"); const hsize_t hs_size[2] = {4, 50}; const hssize_t hs_offset[2] = {7, 30}; for (i = 0; i < hs_size[0]; i++) { for (j = 0; j < hs_size[1]; j++) { points[hs_offset[0]+i][hs_offset[1]+j] = rand (); } } space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset ); dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer); /* Check that the values read are the same as the values written */ for (i=0; i<hs_size[0]; i++) { for (j=0; j<hs_size[1]; j++) { if (points[hs_offset[0]+i][hs_offset[1]+j] != check[hs_offset[0]+i][hs_offset[1]+j]) { H5_FAILED(); cout << " Read different values than written.\n" << endl; cout << " At index " << (unsigned long)(hs_offset[0]+i) << "," << (unsigned long)(hs_offset[1]+j) << endl; cout << " At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl; cout << " At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl; goto error; } } } delete dataset; PASSED(); #else TESTING("deflate filter"); SKIPPED(); cout << not_supported << endl; #endif /*---------------------------------------------------------------------- * STEP 7: Register an application-defined compression method and use it * to write and then read the dataset. *---------------------------------------------------------------------- */ TESTING("compression (app-defined method)"); #ifdef H5_WANT_H5_V1_4_COMPAT if (H5Zregister (H5Z_FILTER_BOGUS, "bogus", bogus)<0) goto error; #else /* H5_WANT_H5_V1_4_COMPAT */ if (H5Zregister (H5Z_BOGUS)<0) goto error; #endif /* H5_WANT_H5_V1_4_COMPAT */ if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0) goto error; dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL); DataSpace space2 (2, size, NULL); dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist)); dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); /* Check that the values read are the same as the values written */ for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) goto error; } PASSED(); /*---------------------------------------------------------------------- * Cleanup *---------------------------------------------------------------------- */ delete dataset; delete [] tconv_buf; return 0; } // end try // catch all dataset, file, space, and plist exceptions catch (Exception E) { goto error; } error: // cleaning up if (tconv_buf) delete [] tconv_buf; return -1; } /*------------------------------------------------------------------------- * Function: test_multiopen * * Purpose: Tests that a bug no longer exists. If a dataset is opened * twice and one of the handles is used to extend the dataset, * then the other handle should return the new size when * queried. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Saturday, February 17, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_multiopen (H5File& file) { TESTING("multi-open with extending"); try { // Create a dataset creation property list DSetCreatPropList dcpl; // Set chunk size to given size hsize_t cur_size[1] = {10}; dcpl.setChunk (1, cur_size); // Create a simple data space with unlimited size static hsize_t max_size[1] = {H5S_UNLIMITED}; DataSpace* space = new DataSpace (1, cur_size, max_size); // Create first dataset DataSet dset1 = file.createDataSet ("multiopen", PredType::NATIVE_INT, *space, dcpl); // Open again the first dataset from the file to another DataSet object. DataSet dset2 = file.openDataSet ("multiopen"); // Relieve the dataspace delete space; // Extend the dimensionality of the first dataset cur_size[0] = 20; dset1.extend (cur_size); /* Get the size from the second handle */ space = new DataSpace (dset2.getSpace()); hsize_t tmp_size[1]; space->getSimpleExtentDims (tmp_size); if (cur_size[0]!=tmp_size[0]) { H5_FAILED(); cout << " Got " << (int)tmp_size[0] << " instead of " << (int)cur_size[0] << "!" << endl; delete space; goto error; } delete space; PASSED(); return 0; } // end try block // catch all dataset, file, space, and plist exceptions catch (Exception E) { goto error; } error: return -1; } /*------------------------------------------------------------------------- * Function: test_types * * Purpose: Test various types - should be moved to dtypes.cpp * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * February 17, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_types(H5File& file) { size_t i; TESTING("various datatypes"); try { // Create a group in the file that was passed in from the caller Group grp = file.createGroup ("typetests"); /* bitfield_1 */ unsigned char buf[32]; hsize_t nelmts = sizeof(buf); DataType type; try { // block of bitfield_1 // test copying a predefined type type.copy (PredType::STD_B8LE); // Test copying a user-defined type using DataType::copy DataType copied_type; copied_type.copy(type); // Test copying a user-defined type using DataType::operator= DataType another_copied_type; another_copied_type = type; // Test copying a user-defined int type using DataType::operator= IntType orig_int(PredType::STD_B8LE); DataType generic_type; generic_type = orig_int; // Test copying an integer predefined type IntType new_int_type(PredType::STD_B8LE); // Test copying an int predefined type using DataType::operator= IntType another_int_type; another_int_type = new_int_type; DataSpace space (1, &nelmts); DataSet* dset = new DataSet(grp.createDataSet("bitfield_1", type, space)); // Fill buffer for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. try { dset->write (buf, type); } catch(DataSetIException E) { delete dset; goto error; } delete dset; } // end try block of bitfield_1 // catch exceptions thrown in try block of bitfield_1 catch (Exception E) { cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } /* bitfield_2 */ nelmts = sizeof(buf)/2; try { // bitfield_2 block type.copy (PredType::STD_B16LE); DataSpace space (1, &nelmts); DataSet* dset = new DataSet(grp.createDataSet("bitfield_2", type, space)); // Fill buffer for (i=0; i<sizeof(buf); i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. try { dset->write (buf, type); } catch(DataSetIException E) { cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; delete dset; goto error; } delete dset; } // end try block of bitfield_2 // catch exceptions thrown in try block of bitfield_2 catch (Exception E) { cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } /* opaque_1 */ DataType* optype = new DataType(H5T_OPAQUE, 1); try { // opaque_1 block nelmts = sizeof(buf); DataSpace space (1, &nelmts); optype->setTag ("testing 1-byte opaque type"); DataSet* dset = new DataSet(grp.createDataSet("opaque_1", *optype, space)); // Fill buffer for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. try { dset->write (buf, *optype); } catch(DataSetIException E) { delete dset; goto error; } delete dset; delete optype; } // end try block of opaque_1 // catch exceptions thrown in try block of opaque_1 catch (DataSetIException E) { delete optype; cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } catch (Exception E) { cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } /* opaque_2 */ try { // block opaque_2 nelmts = sizeof(buf)/4; DataSpace space (1, &nelmts); optype = new DataType(H5T_OPAQUE, 4); optype->setTag ("testing 4-byte opaque type"); DataSet* dset = new DataSet(grp.createDataSet("opaque_2", *optype, space)); // Fill buffer for (i=0; i<sizeof(buf); i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i; // Write data from buf using all default dataspaces and property // list; if writing fails, deallocate dset and return. try { dset->write (buf, *optype); } catch(DataSetIException E) { delete dset; goto error; } delete dset; delete optype; } //end try block of opaque_2 catch (DataSetIException E) { delete optype; cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } catch (Exception E) { cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } PASSED(); return 0; } // end top try block catch (Exception E) { // Group and DataType exceptions cout << "Failure in " << E.getFuncName() << " - " << E.getDetailMsg() << endl; goto error; } error: return -1; } /*------------------------------------------------------------------------- * Function: main * * Purpose: Tests the dataset interface (H5D) * * Return: Success: exit(0) * * Failure: exit(1) * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * Nov 12, 01: * - moved h5_cleanup to outside of try block because * dataset.h5 cannot be removed until "file" is out of * scope and dataset.h5 is closed. * *------------------------------------------------------------------------- */ int main(void) { h5_reset(); // in h5test.c, resets the library by closing it hid_t fapl_id; fapl_id = h5_fileaccess(); // in h5test.c, returns a file access template char filename[1024]; h5_fixname(FILENAME[0], fapl_id, filename, sizeof filename); int nerrors=0; // keep track of number of failures occurr try { // Turn of the auto-printing when failure occurs so that we can // handle the errors appropriately since sometime failures are // caused deliberately and expected. Exception::dontPrint(); // Use the file access template id to create a file access prop. // list object to pass in H5File::H5File FileAccPropList fapl(fapl_id); H5File file( filename, H5F_ACC_TRUNC, FileCreatPropList::DEFAULT, fapl); /* Cause the library to emit initial messages */ Group grp = file.createGroup( "emit diagnostics", 0); grp.setComment( ".", "Causes diagnostic messages to be emitted"); nerrors += test_create(file)<0 ?1:0; nerrors += test_simple_io(file)<0 ?1:0; nerrors += test_tconv(file)<0 ?1:0; nerrors += test_compression(file)<0 ?1:0; nerrors += test_multiopen (file)<0 ?1:0; nerrors += test_types(file)<0 ?1:0; // increment the ref count of this property list so that the // property list id won't be closed when fapl goes out of scope. // This is a bad hack, but I want to use existing routine h5_cleanup! fapl.incRefCount(); } catch (Exception E) { return(test_report(nerrors, string(" Dataset"))); } /* use C test utility routine to clean up data files */ h5_cleanup(FILENAME, fapl_id); /* print out dsets test results */ cerr << endl << endl; return(test_report(nerrors, string(" Dataset"))); }
00001 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 00002 * Copyright by the Board of Trustees of the University of Illinois. * 00003 * All rights reserved. * 00004 * * 00005 * This file is part of HDF5. The full HDF5 copyright notice, including * 00006 * terms governing use, modification, and redistribution, is contained in * 00007 * the files COPYING and Copyright.html. COPYING can be found at the root * 00008 * of the source code distribution tree; Copyright.html can be found at the * 00009 * root level of an installed copy of the electronic HDF5 document set and * 00010 * is linked from the top-level documents page. It can also be found at * 00011 * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html. If you do not have * 00012 * access to either file, you may request a copy from hdfhelp@ncsa.uiuc.edu. * 00013 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 00014 00015 /***************************************************************************** 00016 FILE 00017 dsets.cpp - HDF5 C++ testing the functionalities associated with the 00018 C dataset interface (H5D) 00019 00020 EXTERNAL ROUTINES/VARIABLES: 00021 These routines are in the test directory of the C library: 00022 h5_reset() -- in h5test.c, resets the library by closing it 00023 h5_fileaccess() -- in h5test.c, returns a file access template 00024 h5_fixname() -- in h5test.c, create a file name from a file base name 00025 h5_cleanup() -- in h5test.c, cleanup temporary test files 00026 00027 ***************************************************************************/ 00028 00029 #ifdef OLD_HEADER_FILENAME 00030 #include <iostream.h> 00031 #else 00032 #include <iostream> 00033 #endif 00034 00035 #include "H5Cpp.h" 00036 #include "h5test.h" 00037 #include "testhdf5.h" 00038 00039 #ifndef H5_NO_NAMESPACE 00040 using namespace H5; 00041 #endif 00042 00043 #include "h5cpputil.h" 00044 00045 const char *FILENAME[] = { 00046 "dataset", 00047 NULL 00048 }; 00049 00050 #define DSET_DEFAULT_NAME "default" 00051 #define DSET_CHUNKED_NAME "chunked" 00052 #define DSET_SIMPLE_IO_NAME "simple_io" 00053 #define DSET_TCONV_NAME "tconv" 00054 #define DSET_COMPRESS_NAME "compressed" 00055 #define DSET_BOGUS_NAME "bogus" 00056 00057 #define H5Z_FILTER_BOGUS 305 00058 00059 /* Local prototypes for filter functions */ 00060 static size_t bogus(unsigned int flags, size_t cd_nelmts, 00061 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf); 00062 00063 00064 /*------------------------------------------------------------------------- 00065 * Function: test_create 00066 * 00067 * Purpose: Attempts to create a dataset. 00068 * 00069 * Return: Success: 0 00070 * 00071 * Failure: -1 00072 * 00073 * Programmer: Binh-Minh Ribler (using C version) 00074 * Friday, January 5, 2001 00075 * 00076 * Modifications: 00077 * 00078 *------------------------------------------------------------------------- 00079 */ 00080 static herr_t 00081 test_create( H5File& file) 00082 { 00083 TESTING("create, open, close"); 00084 00085 try { 00086 /* Create the data space */ 00087 hsize_t dims[2]; 00088 dims[0] = 256; 00089 dims[1] = 512; 00090 DataSpace space (2, dims, NULL); 00091 00092 /* 00093 * Create a dataset using the default dataset creation properties. 00094 * We're not sure what they are, so we won't check. 00095 */ 00096 DataSet *dataset = new DataSet (file.createDataSet 00097 (DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space)); 00098 00099 /* Close the dataset */ 00100 delete dataset; 00101 00102 /* Add a comment to the dataset */ 00103 file.setComment (DSET_DEFAULT_NAME, "This is a dataset"); 00104 00105 /* 00106 * Try creating a dataset that already exists. This should fail since a 00107 * dataset can only be created once. If an exception is not thrown 00108 * for this action by createDataSet, then display failure information 00109 * and jump to label error: to return. 00110 */ 00111 try { 00112 dataset = new DataSet (file.createDataSet 00113 (DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space)); 00114 // continuation here, that means no exception has been thrown 00115 H5_FAILED(); 00116 cout << " Library allowed overwrite of existing dataset." << endl; 00117 goto error; 00118 } 00119 catch (FileIException E ) // catching invalid creating dataset 00120 { 00121 // Exception is expected. Do nothing here. 00122 } 00123 /* 00124 * Open the dataset we created above and then close it. This is how 00125 * existing datasets are accessed. 00126 */ 00127 dataset = new DataSet (file.openDataSet (DSET_DEFAULT_NAME)); 00128 delete dataset; 00129 00130 /* 00131 * Try opening a non-existent dataset. This should fail so if an 00132 * exception is not thrown for this action by openDataSet, then 00133 * display failure information and jump to label error: to return. 00134 */ 00135 try { 00136 dataset = new DataSet (file.openDataSet( "does_not_exist" )); 00137 // continuation here, that means no exception has been thrown 00138 H5_FAILED(); 00139 cout << " Opened a non-existent dataset." << endl; 00140 goto error; 00141 } 00142 catch (FileIException E ) // catching creating non-existent dataset 00143 { 00144 // Exception is expected. Do nothing here. 00145 } 00146 00147 /* 00148 * Create a new dataset that uses chunked storage instead of the default 00149 * layout. 00150 */ 00151 DSetCreatPropList create_parms; 00152 hsize_t csize[2]; 00153 csize[0] = 5; 00154 csize[1] = 100; 00155 create_parms.setChunk( 2, csize ); 00156 00157 dataset = new DataSet (file.createDataSet 00158 (DSET_CHUNKED_NAME, PredType::NATIVE_DOUBLE, space, create_parms)); 00159 // Note: this one has no error message in C when failure occurs? 00160 00161 /* 00162 * Close the chunked dataset. 00163 */ 00164 delete dataset; 00165 00166 PASSED(); 00167 return 0; 00168 } // outer most try block 00169 00170 // catch all dataset, file, space, plist exceptions 00171 catch (Exception E) { goto error; } 00172 00173 error: 00174 return -1; 00175 } 00176 00177 /*------------------------------------------------------------------------- 00178 * Function: check_values 00179 * 00180 * Purpose: Checks a read value against the written value. If they are 00181 * different, the function will 00182 * print out a message and the different values. This function 00183 * is made to reuse the code segment that is used in various 00184 * places throughout test_compression and in test_simple_io. 00185 * Where the C version 00186 * of this code segment "goto error," this function will 00187 * return -1, so that the caller can "goto error." 00188 * 00189 * Return: Success: 0 00190 * 00191 * Failure: -1 00192 * 00193 * Programmer: Binh-Minh Ribler (using C code segment for checking values) 00194 * Friday, February 6, 2001 00195 * 00196 * Modifications: 00197 * 00198 *------------------------------------------------------------------------- 00199 */ 00200 static int 00201 check_values (hsize_t i, hsize_t j, int apoint, int acheck) 00202 { 00203 if (apoint != acheck) 00204 { 00205 H5_FAILED(); 00206 cout << " Read different values than written.\n" << endl; 00207 cout << " At index " << (unsigned long)i << "," << 00208 (unsigned long)j << endl; 00209 return -1; 00210 } 00211 return 0; 00212 } // check_values 00213 00214 /*------------------------------------------------------------------------- 00215 * Function: test_simple_io 00216 * 00217 * Purpose: Tests simple I/O. That is, reading and writing a complete 00218 * multi-dimensional array without data type or data space 00219 * conversions, without compression, and stored contiguously. 00220 * 00221 * Return: Success: 0 00222 * 00223 * Failure: -1 00224 * 00225 * Programmer: Binh-Minh Ribler (using C version) 00226 * Friday, January 5, 2001 00227 * 00228 * Modifications: 00229 * 00230 *------------------------------------------------------------------------- 00231 */ 00232 static herr_t 00233 test_simple_io( H5File& file) 00234 { 00235 00236 TESTING("simple I/O"); 00237 00238 int points[100][200]; 00239 int check[100][200]; 00240 int i, j, n; 00241 00242 /* Initialize the dataset */ 00243 for (i = n = 0; i < 100; i++) 00244 { 00245 for (j = 0; j < 200; j++) { 00246 points[i][j] = n++; 00247 } 00248 } 00249 00250 char* tconv_buf = new char [1000]; 00251 try 00252 { 00253 /* Create the data space */ 00254 hsize_t dims[2]; 00255 dims[0] = 100; 00256 dims[1] = 200; 00257 DataSpace space (2, dims, NULL); 00258 00259 /* Create a small conversion buffer to test strip mining */ 00260 DSetMemXferPropList xfer; 00261 00262 xfer.setBuffer (1000, tconv_buf, NULL); 00263 00264 /* Create the dataset */ 00265 DataSet dataset (file.createDataSet (DSET_SIMPLE_IO_NAME, PredType::NATIVE_INT, space)); 00266 00267 /* Write the data to the dataset */ 00268 dataset.write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00269 00270 /* Read the dataset back */ 00271 dataset.read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00272 00273 /* Check that the values read are the same as the values written */ 00274 for (i = 0; i < 100; i++) 00275 for (j = 0; j < 200; j++) 00276 { 00277 int status = check_values (i, j, points[i][j], check[i][j]); 00278 if (status == -1) goto error; 00279 } 00280 00281 delete [] tconv_buf; 00282 PASSED(); 00283 return 0; 00284 } // end try 00285 00286 // catch all dataset, space, plist exceptions 00287 catch (Exception E) { goto error; } 00288 00289 error: 00290 // cleaning up 00291 if (tconv_buf) 00292 delete [] tconv_buf; 00293 return -1; 00294 } 00295 00296 /*------------------------------------------------------------------------- 00297 * Function: test_tconv 00298 * 00299 * Purpose: Test some simple data type conversion stuff. 00300 * 00301 * Return: Success: 0 00302 * 00303 * Failure: -1 00304 * 00305 * Programmer: Binh-Minh Ribler (using C version) 00306 * Friday, January 5, 2001 00307 * 00308 * Modifications: 00309 * 00310 *------------------------------------------------------------------------- 00311 */ 00312 static herr_t 00313 test_tconv( H5File& file) 00314 { 00315 // Prepare buffers for input/output 00316 char *out=NULL, *in=NULL; 00317 out = new char [4*1000000]; 00318 // assert (out); - should use exception handler for new - BMR 00319 in = new char [4*1000000]; 00320 //assert (in); 00321 00322 TESTING("data type conversion"); 00323 00324 /* Initialize the dataset */ 00325 for (int i = 0; i < 1000000; i++) { 00326 out[i*4+0] = 0x11; 00327 out[i*4+1] = 0x22; 00328 out[i*4+2] = 0x33; 00329 out[i*4+3] = 0x44; 00330 } 00331 00332 try 00333 { 00334 /* Create the data space */ 00335 hsize_t dims[1]; 00336 dims[0] = 1000000; 00337 DataSpace space (1, dims, NULL); 00338 00339 /* Create the data set */ 00340 DataSet dataset (file.createDataSet (DSET_TCONV_NAME, PredType::STD_I32LE, space)); 00341 00342 /* Write the data to the dataset */ 00343 dataset.write ((void*) out, PredType::STD_I32LE); 00344 00345 /* Read data with byte order conversion */ 00346 dataset.read ((void*) in, PredType::STD_I32BE); 00347 00348 /* Check */ 00349 for (int i = 0; i < 1000000; i++) { 00350 if (in[4*i+0]!=out[4*i+3] || 00351 in[4*i+1]!=out[4*i+2] || 00352 in[4*i+2]!=out[4*i+1] || 00353 in[4*i+3]!=out[4*i+0]) 00354 { 00355 H5_FAILED(); 00356 cout << " Read with byte order conversion failed." << endl; 00357 goto error; 00358 } 00359 } 00360 00361 delete [] out; 00362 delete [] in; 00363 cout << " PASSED" << endl; 00364 return 0; 00365 } // end try 00366 00367 // catch all dataset and space exceptions 00368 catch (Exception E) { goto error; } 00369 00370 error: 00371 delete [] out; 00372 delete [] in; 00373 return -1; 00374 } 00375 00376 /* This message derives from H5Z */ 00377 const H5Z_class_t H5Z_BOGUS[1] = {{ 00378 H5Z_FILTER_BOGUS, /* Filter id number */ 00379 "bogus", /* Filter name for debugging */ 00380 NULL, /* The "can apply" callback */ 00381 NULL, /* The "set local" callback */ 00382 bogus, /* The actual filter function */ 00383 }}; 00384 00385 /*------------------------------------------------------------------------- 00386 * Function: bogus 00387 * 00388 * Purpose: A bogus compression method that doesn't do anything. 00389 * 00390 * Return: Success: Data chunk size 00391 * 00392 * Failure: 0 00393 * 00394 * Programmer: Robb Matzke 00395 * Tuesday, April 21, 1998 00396 * 00397 * Modifications: 00398 * 00399 *------------------------------------------------------------------------- 00400 */ 00401 static size_t 00402 /*bogus(unsigned int UNUSED flags, size_t UNUSED cd_nelmts, 00403 const unsigned int UNUSED cd_values[], size_t nbytes, 00404 size_t UNUSED *buf_size, void UNUSED **buf) 00405 BMR: removed UNUSED for now until asking Q. or R. to pass compilation*/ 00406 bogus(unsigned int flags, size_t cd_nelmts, 00407 const unsigned int cd_values[], size_t nbytes, 00408 size_t *buf_size, void **buf) 00409 { 00410 return nbytes; 00411 } 00412 00413 00414 /*------------------------------------------------------------------------- 00415 * Function: test_compression 00416 * 00417 * Purpose: Tests dataset compression. If compression is requested when 00418 * it hasn't been compiled into the library (such as when 00419 * updating an existing compressed dataset) then data is sent to 00420 * the file uncompressed but no errors are returned. 00421 * 00422 * Return: Success: 0 00423 * 00424 * Failure: -1 00425 * 00426 * Programmer: Binh-Minh Ribler (using C version) 00427 * Friday, January 5, 2001 00428 * 00429 * Modifications: 00430 * 00431 *------------------------------------------------------------------------- 00432 */ 00433 00434 static herr_t 00435 test_compression(H5File& file) 00436 { 00437 const char *not_supported; 00438 not_supported = " Deflate compression is not enabled."; 00439 int points[100][200]; 00440 int check[100][200]; 00441 hsize_t i, j, n; 00442 00443 /* Initialize the dataset */ 00444 for (i = n = 0; i < 100; i++) 00445 { 00446 for (j = 0; j < 200; j++) { 00447 points[i][j] = n++; 00448 } 00449 } 00450 char* tconv_buf = new char [1000]; 00451 00452 try 00453 { 00454 const hsize_t size[2] = {100, 200}; 00455 /* Create the data space */ 00456 DataSpace space1(2, size, NULL); 00457 00458 /* 00459 * Create a small conversion buffer to test strip mining. We 00460 * might as well test all we can! 00461 */ 00462 DSetMemXferPropList xfer; 00463 00464 xfer.setBuffer (1000, tconv_buf, NULL); 00465 00466 /* Use chunked storage with compression */ 00467 DSetCreatPropList dscreatplist; 00468 00469 const hsize_t chunk_size[2] = {2, 25}; 00470 dscreatplist.setChunk (2, chunk_size); 00471 dscreatplist.setDeflate (6); 00472 00473 DataSet* dataset; 00474 00475 #ifdef H5_HAVE_FILTER_DEFLATE 00476 TESTING("compression (setup)"); 00477 00478 /* Create the dataset */ 00479 dataset = new DataSet (file.createDataSet 00480 (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist)); 00481 00482 PASSED(); 00483 00484 /*---------------------------------------------------------------------- 00485 * STEP 1: Read uninitialized data. It should be zero. 00486 *---------------------------------------------------------------------- 00487 */ 00488 TESTING("compression (uninitialized read)"); 00489 00490 dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00491 00492 for (i=0; i<size[0]; i++) { 00493 for (j=0; j<size[1]; j++) { 00494 if (0!=check[i][j]) { 00495 H5_FAILED(); 00496 cout << " Read a non-zero value." << endl; 00497 cout << " At index " << (unsigned long)i << "," << 00498 (unsigned long)j << endl; 00499 goto error; 00500 } 00501 } 00502 } 00503 PASSED(); 00504 00505 /*---------------------------------------------------------------------- 00506 * STEP 2: Test compression by setting up a chunked dataset and writing 00507 * to it. 00508 *---------------------------------------------------------------------- 00509 */ 00510 TESTING("compression (write)"); 00511 00512 for (i=n=0; i<size[0]; i++) 00513 { 00514 for (j=0; j<size[1]; j++) 00515 { 00516 points[i][j] = n++; 00517 } 00518 } 00519 00520 dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00521 00522 PASSED(); 00523 00524 /*---------------------------------------------------------------------- 00525 * STEP 3: Try to read the data we just wrote. 00526 *---------------------------------------------------------------------- 00527 */ 00528 TESTING("compression (read)"); 00529 00530 /* Read the dataset back */ 00531 dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00532 00533 /* Check that the values read are the same as the values written */ 00534 for (i = 0; i < size[0]; i++) 00535 for (j = 0; j < size[1]; j++) 00536 { 00537 int status = check_values (i, j, points[i][j], check[i][j]); 00538 if (status == -1) goto error; 00539 } 00540 00541 PASSED(); 00542 00543 /*---------------------------------------------------------------------- 00544 * STEP 4: Write new data over the top of the old data. The new data is 00545 * random thus not very compressible, and will cause the chunks to move 00546 * around as they grow. We only change values for the left half of the 00547 * dataset although we rewrite the whole thing. 00548 *---------------------------------------------------------------------- 00549 */ 00550 TESTING("compression (modify)"); 00551 00552 for (i=0; i<size[0]; i++) 00553 { 00554 for (j=0; j<size[1]/2; j++) 00555 { 00556 points[i][j] = rand (); 00557 } 00558 } 00559 dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00560 00561 /* Read the dataset back and check it */ 00562 dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00563 00564 /* Check that the values read are the same as the values written */ 00565 for (i = 0; i < size[0]; i++) 00566 for (j = 0; j < size[1]; j++) 00567 { 00568 int status = check_values (i, j, points[i][j], check[i][j]); 00569 if (status == -1) goto error; 00570 } 00571 00572 PASSED(); 00573 00574 /*---------------------------------------------------------------------- 00575 * STEP 5: Close the dataset and then open it and read it again. This 00576 * insures that the compression message is picked up properly from the 00577 * object header. 00578 *---------------------------------------------------------------------- 00579 */ 00580 TESTING("compression (re-open)"); 00581 00582 delete dataset; 00583 00584 dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME)); 00585 dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00586 00587 /* Check that the values read are the same as the values written */ 00588 for (i = 0; i < size[0]; i++) 00589 for (j = 0; j < size[1]; j++) 00590 { 00591 int status = check_values (i, j, points[i][j], check[i][j]); 00592 if (status == -1) goto error; 00593 } 00594 00595 PASSED(); 00596 00597 00598 /*---------------------------------------------------------------------- 00599 * STEP 6: Test partial I/O by writing to and then reading from a 00600 * hyperslab of the dataset. The hyperslab does not line up on chunk 00601 * boundaries (we know that case already works from above tests). 00602 *---------------------------------------------------------------------- 00603 */ 00604 TESTING("compression (partial I/O)"); 00605 00606 const hsize_t hs_size[2] = {4, 50}; 00607 const hssize_t hs_offset[2] = {7, 30}; 00608 for (i = 0; i < hs_size[0]; i++) { 00609 for (j = 0; j < hs_size[1]; j++) { 00610 points[hs_offset[0]+i][hs_offset[1]+j] = rand (); 00611 } 00612 } 00613 space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset ); 00614 dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer); 00615 dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer); 00616 00617 /* Check that the values read are the same as the values written */ 00618 for (i=0; i<hs_size[0]; i++) { 00619 for (j=0; j<hs_size[1]; j++) { 00620 if (points[hs_offset[0]+i][hs_offset[1]+j] != 00621 check[hs_offset[0]+i][hs_offset[1]+j]) { 00622 H5_FAILED(); 00623 cout << " Read different values than written.\n" << endl; 00624 cout << " At index " << (unsigned long)(hs_offset[0]+i) << 00625 "," << (unsigned long)(hs_offset[1]+j) << endl; 00626 00627 cout << " At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl; 00628 cout << " At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl; 00629 goto error; 00630 } 00631 } 00632 } 00633 00634 delete dataset; 00635 00636 PASSED(); 00637 00638 #else 00639 TESTING("deflate filter"); 00640 SKIPPED(); 00641 cout << not_supported << endl; 00642 #endif 00643 00644 /*---------------------------------------------------------------------- 00645 * STEP 7: Register an application-defined compression method and use it 00646 * to write and then read the dataset. 00647 *---------------------------------------------------------------------- 00648 */ 00649 TESTING("compression (app-defined method)"); 00650 00651 #ifdef H5_WANT_H5_V1_4_COMPAT 00652 if (H5Zregister (H5Z_FILTER_BOGUS, "bogus", bogus)<0) goto error; 00653 #else /* H5_WANT_H5_V1_4_COMPAT */ 00654 if (H5Zregister (H5Z_BOGUS)<0) goto error; 00655 #endif /* H5_WANT_H5_V1_4_COMPAT */ 00656 if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0) goto error; 00657 dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL); 00658 00659 DataSpace space2 (2, size, NULL); 00660 dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist)); 00661 00662 dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00663 dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); 00664 00665 /* Check that the values read are the same as the values written */ 00666 for (i = 0; i < size[0]; i++) 00667 for (j = 0; j < size[1]; j++) 00668 { 00669 int status = check_values (i, j, points[i][j], check[i][j]); 00670 if (status == -1) 00671 goto error; 00672 } 00673 00674 PASSED(); 00675 00676 /*---------------------------------------------------------------------- 00677 * Cleanup 00678 *---------------------------------------------------------------------- 00679 */ 00680 delete dataset; 00681 delete [] tconv_buf; 00682 return 0; 00683 } // end try 00684 00685 // catch all dataset, file, space, and plist exceptions 00686 catch (Exception E) { goto error; } 00687 00688 error: 00689 // cleaning up 00690 if (tconv_buf) 00691 delete [] tconv_buf; 00692 return -1; 00693 } 00694 00695 /*------------------------------------------------------------------------- 00696 * Function: test_multiopen 00697 * 00698 * Purpose: Tests that a bug no longer exists. If a dataset is opened 00699 * twice and one of the handles is used to extend the dataset, 00700 * then the other handle should return the new size when 00701 * queried. 00702 * 00703 * Return: Success: 0 00704 * 00705 * Failure: -1 00706 * 00707 * Programmer: Binh-Minh Ribler (using C version) 00708 * Saturday, February 17, 2001 00709 * 00710 * Modifications: 00711 * 00712 *------------------------------------------------------------------------- 00713 */ 00714 static herr_t 00715 test_multiopen (H5File& file) 00716 { 00717 00718 TESTING("multi-open with extending"); 00719 try { 00720 00721 // Create a dataset creation property list 00722 DSetCreatPropList dcpl; 00723 00724 // Set chunk size to given size 00725 hsize_t cur_size[1] = {10}; 00726 dcpl.setChunk (1, cur_size); 00727 00728 // Create a simple data space with unlimited size 00729 static hsize_t max_size[1] = {H5S_UNLIMITED}; 00730 DataSpace* space = new DataSpace (1, cur_size, max_size); 00731 00732 // Create first dataset 00733 DataSet dset1 = file.createDataSet ("multiopen", PredType::NATIVE_INT, *space, dcpl); 00734 00735 // Open again the first dataset from the file to another DataSet object. 00736 DataSet dset2 = file.openDataSet ("multiopen"); 00737 00738 // Relieve the dataspace 00739 delete space; 00740 00741 // Extend the dimensionality of the first dataset 00742 cur_size[0] = 20; 00743 dset1.extend (cur_size); 00744 00745 /* Get the size from the second handle */ 00746 space = new DataSpace (dset2.getSpace()); 00747 00748 hsize_t tmp_size[1]; 00749 space->getSimpleExtentDims (tmp_size); 00750 if (cur_size[0]!=tmp_size[0]) 00751 { 00752 H5_FAILED(); 00753 cout << " Got " << (int)tmp_size[0] << " instead of " 00754 << (int)cur_size[0] << "!" << endl; 00755 delete space; 00756 goto error; 00757 } 00758 00759 delete space; 00760 PASSED(); 00761 return 0; 00762 } // end try block 00763 00764 // catch all dataset, file, space, and plist exceptions 00765 catch (Exception E) 00766 { goto error; } 00767 00768 error: 00769 return -1; 00770 } 00771 00772 00773 /*------------------------------------------------------------------------- 00774 * Function: test_types 00775 * 00776 * Purpose: Test various types - should be moved to dtypes.cpp 00777 * 00778 * Return: Success: 0 00779 * 00780 * Failure: -1 00781 * 00782 * Programmer: Binh-Minh Ribler (using C version) 00783 * February 17, 2001 00784 * 00785 * Modifications: 00786 * 00787 *------------------------------------------------------------------------- 00788 */ 00789 static herr_t 00790 test_types(H5File& file) 00791 { 00792 size_t i; 00793 00794 TESTING("various datatypes"); 00795 try { 00796 00797 // Create a group in the file that was passed in from the caller 00798 Group grp = file.createGroup ("typetests"); 00799 00800 /* bitfield_1 */ 00801 unsigned char buf[32]; 00802 hsize_t nelmts = sizeof(buf); 00803 DataType type; 00804 try { // block of bitfield_1 00805 // test copying a predefined type 00806 type.copy (PredType::STD_B8LE); 00807 00808 // Test copying a user-defined type using DataType::copy 00809 DataType copied_type; 00810 copied_type.copy(type); 00811 // Test copying a user-defined type using DataType::operator= 00812 DataType another_copied_type; 00813 another_copied_type = type; 00814 00815 // Test copying a user-defined int type using DataType::operator= 00816 IntType orig_int(PredType::STD_B8LE); 00817 DataType generic_type; 00818 generic_type = orig_int; 00819 00820 // Test copying an integer predefined type 00821 IntType new_int_type(PredType::STD_B8LE); 00822 00823 // Test copying an int predefined type using DataType::operator= 00824 IntType another_int_type; 00825 another_int_type = new_int_type; 00826 00827 DataSpace space (1, &nelmts); 00828 DataSet* dset = new DataSet(grp.createDataSet("bitfield_1", type, space)); 00829 00830 // Fill buffer 00831 for (i=0; i<sizeof buf; i++) 00832 buf[i] = (unsigned char)0xff ^ (unsigned char)i; 00833 00834 // Write data from buf using all default dataspaces and property 00835 // list; if writing fails, deallocate dset and return. 00836 try { dset->write (buf, type); } 00837 catch(DataSetIException E) 00838 { 00839 delete dset; 00840 goto error; 00841 } 00842 delete dset; 00843 00844 } // end try block of bitfield_1 00845 00846 // catch exceptions thrown in try block of bitfield_1 00847 catch (Exception E) { 00848 cout << "Failure in " << E.getFuncName() << " - " 00849 << E.getDetailMsg() << endl; 00850 goto error; 00851 } 00852 00853 /* bitfield_2 */ 00854 nelmts = sizeof(buf)/2; 00855 try { // bitfield_2 block 00856 type.copy (PredType::STD_B16LE); 00857 DataSpace space (1, &nelmts); 00858 DataSet* dset = new DataSet(grp.createDataSet("bitfield_2", type, space)); 00859 00860 // Fill buffer 00861 for (i=0; i<sizeof(buf); i++) 00862 buf[i] = (unsigned char)0xff ^ (unsigned char)i; 00863 00864 // Write data from buf using all default dataspaces and property 00865 // list; if writing fails, deallocate dset and return. 00866 try { dset->write (buf, type); } 00867 catch(DataSetIException E) 00868 { 00869 cout << "Failure in " << E.getFuncName() << " - " 00870 << E.getDetailMsg() << endl; 00871 delete dset; 00872 goto error; 00873 } 00874 delete dset; 00875 } // end try block of bitfield_2 00876 00877 // catch exceptions thrown in try block of bitfield_2 00878 catch (Exception E) { 00879 cout << "Failure in " << E.getFuncName() << " - " 00880 << E.getDetailMsg() << endl; 00881 goto error; 00882 } 00883 00884 /* opaque_1 */ 00885 DataType* optype = new DataType(H5T_OPAQUE, 1); 00886 try { // opaque_1 block 00887 nelmts = sizeof(buf); 00888 DataSpace space (1, &nelmts); 00889 optype->setTag ("testing 1-byte opaque type"); 00890 DataSet* dset = new DataSet(grp.createDataSet("opaque_1", *optype, space)); 00891 00892 // Fill buffer 00893 for (i=0; i<sizeof buf; i++) 00894 buf[i] = (unsigned char)0xff ^ (unsigned char)i; 00895 00896 // Write data from buf using all default dataspaces and property 00897 // list; if writing fails, deallocate dset and return. 00898 try { dset->write (buf, *optype); } 00899 catch(DataSetIException E) 00900 { 00901 delete dset; 00902 goto error; 00903 } 00904 delete dset; 00905 delete optype; 00906 } // end try block of opaque_1 00907 00908 // catch exceptions thrown in try block of opaque_1 00909 catch (DataSetIException E) { 00910 delete optype; 00911 cout << "Failure in " << E.getFuncName() << " - " 00912 << E.getDetailMsg() << endl; 00913 goto error; 00914 } 00915 catch (Exception E) { 00916 cout << "Failure in " << E.getFuncName() << " - " 00917 << E.getDetailMsg() << endl; 00918 goto error; 00919 } 00920 00921 /* opaque_2 */ 00922 try { // block opaque_2 00923 nelmts = sizeof(buf)/4; 00924 DataSpace space (1, &nelmts); 00925 optype = new DataType(H5T_OPAQUE, 4); 00926 optype->setTag ("testing 4-byte opaque type"); 00927 DataSet* dset = new DataSet(grp.createDataSet("opaque_2", *optype, space)); 00928 00929 // Fill buffer 00930 for (i=0; i<sizeof(buf); i++) 00931 buf[i] = (unsigned char)0xff ^ (unsigned char)i; 00932 00933 // Write data from buf using all default dataspaces and property 00934 // list; if writing fails, deallocate dset and return. 00935 try { dset->write (buf, *optype); } 00936 catch(DataSetIException E) 00937 { 00938 delete dset; 00939 goto error; 00940 } 00941 delete dset; 00942 delete optype; 00943 } //end try block of opaque_2 00944 catch (DataSetIException E) { 00945 delete optype; 00946 cout << "Failure in " << E.getFuncName() << " - " 00947 << E.getDetailMsg() << endl; 00948 goto error; 00949 } 00950 catch (Exception E) { 00951 cout << "Failure in " << E.getFuncName() << " - " 00952 << E.getDetailMsg() << endl; 00953 goto error; 00954 } 00955 00956 PASSED(); 00957 return 0; 00958 } // end top try block 00959 00960 catch (Exception E) { // Group and DataType exceptions 00961 cout << "Failure in " << E.getFuncName() << " - " 00962 << E.getDetailMsg() << endl; 00963 goto error; 00964 } 00965 00966 error: 00967 return -1; 00968 } 00969 00970 /*------------------------------------------------------------------------- 00971 * Function: main 00972 * 00973 * Purpose: Tests the dataset interface (H5D) 00974 * 00975 * Return: Success: exit(0) 00976 * 00977 * Failure: exit(1) 00978 * 00979 * Programmer: Binh-Minh Ribler (using C version) 00980 * Friday, January 5, 2001 00981 * 00982 * Modifications: 00983 * Nov 12, 01: 00984 * - moved h5_cleanup to outside of try block because 00985 * dataset.h5 cannot be removed until "file" is out of 00986 * scope and dataset.h5 is closed. 00987 * 00988 *------------------------------------------------------------------------- 00989 */ 00990 int 00991 main(void) 00992 { 00993 h5_reset(); // in h5test.c, resets the library by closing it 00994 00995 hid_t fapl_id; 00996 fapl_id = h5_fileaccess(); // in h5test.c, returns a file access template 00997 00998 char filename[1024]; 00999 h5_fixname(FILENAME[0], fapl_id, filename, sizeof filename); 01000 01001 int nerrors=0; // keep track of number of failures occurr 01002 try 01003 { 01004 // Turn of the auto-printing when failure occurs so that we can 01005 // handle the errors appropriately since sometime failures are 01006 // caused deliberately and expected. 01007 Exception::dontPrint(); 01008 01009 // Use the file access template id to create a file access prop. 01010 // list object to pass in H5File::H5File 01011 FileAccPropList fapl(fapl_id); 01012 01013 H5File file( filename, H5F_ACC_TRUNC, FileCreatPropList::DEFAULT, fapl); 01014 01015 /* Cause the library to emit initial messages */ 01016 Group grp = file.createGroup( "emit diagnostics", 0); 01017 grp.setComment( ".", "Causes diagnostic messages to be emitted"); 01018 01019 nerrors += test_create(file)<0 ?1:0; 01020 nerrors += test_simple_io(file)<0 ?1:0; 01021 nerrors += test_tconv(file)<0 ?1:0; 01022 nerrors += test_compression(file)<0 ?1:0; 01023 nerrors += test_multiopen (file)<0 ?1:0; 01024 nerrors += test_types(file)<0 ?1:0; 01025 01026 // increment the ref count of this property list so that the 01027 // property list id won't be closed when fapl goes out of scope. 01028 // This is a bad hack, but I want to use existing routine h5_cleanup! 01029 fapl.incRefCount(); 01030 } 01031 catch (Exception E) 01032 { 01033 return(test_report(nerrors, string(" Dataset"))); 01034 } 01035 /* use C test utility routine to clean up data files */ 01036 h5_cleanup(FILENAME, fapl_id); 01037 01038 /* print out dsets test results */ 01039 cerr << endl << endl; 01040 return(test_report(nerrors, string(" Dataset"))); 01041 }

Generated on Sun Jul 25 01:27:17 2004 by doxygen 1.3.7