Skip to content

Commit

Permalink
Update the VOL for HDF5 2.0.0 changes. (#73)
Browse files Browse the repository at this point in the history
* Removed the use of H5VLpeek_connector_id_by_name as it was removed in newer than 1.14 HDF5 versions.
* Updated for HDF5 2.0 VOL API changes, 
   * Renaming of H5VLstart_lib_state and H5VLfinish_lib_state, along with new context state
* Updated tests to pull from the newest HDF5 source.
  • Loading branch information
brtnfld authored Nov 14, 2024
1 parent 14f8136 commit 89db3d4
Show file tree
Hide file tree
Showing 25 changed files with 224 additions and 114 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ubuntu_mpich.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
- name: Set up dependencies
run: |
sudo apt-get update
sudo apt-get install automake autoconf libtool libtool-bin m4
sudo apt-get install automake autoconf libtool libtool-bin m4 uuid-dev
# The MPICH installed on github actions is too slow
# sudo apt-get install mpich
# mpicc -v
Expand Down Expand Up @@ -74,6 +74,7 @@ jobs:
--disable-tests \
--disable-fortran \
--disable-cxx \
--enable-shared --disable-static \
CC=${GITHUB_WORKSPACE}/MPICH/bin/mpicc
make -s LIBTOOLFLAGS=--silent V=1 -j 4 install > qout 2>&1
make -s -j 4 distclean >> qout 2>&1
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/ubuntu_openmpi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- name: Set up dependencies
run: |
sudo apt-get update
sudo apt-get install automake autoconf libtool libtool-bin m4
sudo apt-get install automake autoconf libtool libtool-bin m4 uuid-dev
# zlib
sudo apt-get install zlib1g-dev
# mpi
Expand All @@ -59,6 +59,7 @@ jobs:
--disable-tests \
--disable-fortran \
--disable-cxx \
--enable-shared --disable-static \
CC=mpicc
make -s LIBTOOLFLAGS=--silent V=1 -j 4 install > qout 2>&1
make -s -j 4 distclean >> qout 2>&1
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ but require the Log VOL to read them back.

### HDF5 VOL Connector ID
* This Log VOL connector has been registered with the HDF group with
[Connector Identifier 514](https://portal.hdfgroup.org/display/support/Registered+VOL+Connectors).
[Connector Identifier 514](https://support.hdfgroup.org/documentation/hdf5-docs/registered_vol_connectors.html).

### Documents
* [doc/userguide.md](doc/userguide.md) contains the compile and run instructions.
Expand Down
24 changes: 12 additions & 12 deletions examples/hdf5_examples/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -66,31 +66,31 @@ nodist_h5_subset_SOURCES = h5_subset.c
nodist_ph5example_SOURCES = ph5example.c
endif

HDF5_VERSION = hdf5-1_13_0
HDF5_VERSION = develop
HDF5_URL = https://raw.githubusercontent.com/HDFGroup/hdf5/${HDF5_VERSION}

.SECONDEXPANSION:

$(SEQ_SRCS): %.c : $$(wildcard %.patch.$(HDF5_VERSION))
@DOWNLOAD_CMD@ $(HDF5_URL)/examples/$@
if [ -f ${srcdir}/$(basename $@).patch.$(HDF5_VERSION) ]; then patch -st $@ < ${srcdir}/$(basename $@).patch.$(HDF5_VERSION); fi
$(SEQ_SRCS): %.c : $$(wildcard %.patch)
@DOWNLOAD_CMD@ $(HDF5_URL)/HDF5Examples/C/TUTR/$@
if [ -f ${srcdir}/$(basename $@).patch ]; then patch -st $@ < ${srcdir}/$(basename $@).patch; fi
$(SED_I) -e "s|main(.*|main(int argc, char **argv)|g" $@
$(SED_I) -e "s|H5Fcreate(FILE|H5Fcreate(argv[1]|g" $@
$(SED_I) -e "s|H5Fcreate(FILENAME|H5Fcreate(argv[1]|g" $@
$(SED_I) -e "s|H5Fcreate(H5FILE_NAME|H5Fcreate(argv[1]|g" $@
$(SED_I) -e "s|H5Fopen(FILE|H5Fopen(argv[1]|g" $@
$(SED_I) -e "s|H5Fopen(FILENAME|H5Fopen(argv[1]|g" $@
$(SED_I) -e "s|H5Fopen(H5FILE_NAME|H5Fopen(argv[1]|g" $@

$(PAR_SRCS): %.c : $$(wildcard %.patch.$(HDF5_VERSION))
@DOWNLOAD_CMD@ $(HDF5_URL)/examples/$@
if [ -f ${srcdir}/$(basename $@).patch.$(HDF5_VERSION) ]; then patch -st $@ < ${srcdir}/$(basename $@).patch.$(HDF5_VERSION); fi
$(PAR_SRCS): %.c : $$(wildcard %.patch)
@DOWNLOAD_CMD@ $(HDF5_URL)/HDF5Examples/C/H5PAR/$@
if [ -f ${srcdir}/$(basename $@).patch ]; then patch -st $@ < ${srcdir}/$(basename $@).patch; fi

check_PROGRAMS = $(TESTPROGRAMS)

EXTRA_DIST = seq_runs.sh \
parallel_run.sh \
ph5example.patch.$(HDF5_VERSION) \
h5_attribute.patch.$(HDF5_VERSION) \
h5_interm_group.patch.$(HDF5_VERSION)
ph5example.patch \
h5_attribute.patch \
h5_interm_group.patch

# autimake 1.11.3 has not yet implemented AM_TESTS_ENVIRONMENT
# For newer versions, we can use AM_TESTS_ENVIRONMENT instead
Expand Down
45 changes: 45 additions & 0 deletions examples/hdf5_examples/h5_attribute.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
--- h5_attribute.c 2024-11-12 16:49:52.466955555 -0600
+++ h5_attribute_logvol.c 2024-11-12 16:47:13.644521620 -0600
@@ -41,7 +41,7 @@
/* Operator function */

int
-main(void)
+main(int argc, char **argv)
{

hid_t file, dataset; /* File and dataset identifiers */
@@ -79,7 +79,7 @@
/*
* Create a file.
*/
- file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ file = H5Fcreate(argv[1], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

/*
* Create the dataspace for the dataset in the file.
@@ -167,7 +167,7 @@
/*
* Reopen the file.
*/
- file = H5Fopen(H5FILE_NAME, H5F_ACC_RDONLY, H5P_DEFAULT);
+ file = H5Fopen(argv[1], H5F_ACC_RDONLY, H5P_DEFAULT);

/*
* Open the dataset.
@@ -185,6 +185,7 @@

//! [H5Oget_info3_snip]

+#if 0
/*
* Find string attribute by iterating through all attributes
*/
@@ -203,6 +204,7 @@
ret = H5Aclose(attr);
ret = H5Tclose(atype);
}
+#endif

//! [H5Oget_info3_snip]
/*
4 changes: 0 additions & 4 deletions examples/hdf5_examples/h5_attribute.patch.hdf5-1_13_0

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
--- h5_interm_group.c 2022-03-16 18:21:18.837044757 -0500
+++ h5_interm_group_logvol.c 2022-03-16 18:21:46.457044898 -0500
@@ -88,10 +88,13 @@
--- h5_interm_group.c 2024-11-13 11:56:00.913059201 -0600
+++ h5_interm_group_logvol.c 2024-11-13 11:56:54.281932771 -0600
@@ -84,11 +84,12 @@
g2_id = H5Gopen2(file, "/G1/G2", H5P_DEFAULT);
status = H5Gget_info(g2_id, &g2_info);
printf("Group /G1/G2 has %d member(s)\n", (int)g2_info.nlinks);
-
+#if 0
for (i = 0; i < (int)g2_info.nlinks; i++) {
H5Lget_name_by_idx(g2_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE, (hsize_t)i, name, 3, H5P_DEFAULT);
printf("Object's name is %s\n", name);
}
+#endif
+
H5Gclose(g2_id);
}
H5Fclose(file);
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
--- ph5example.c 2022-12-24 17:10:24.423988722 -0600
+++ ph5example_logvol.c 2022-12-24 17:14:47.240314745 -0600
@@ -51,6 +51,7 @@
printf("%s\n", x);
--- ph5example.c 2024-11-12 14:13:11.462119142 -0600
+++ ph5example_logvol.c 2024-11-12 14:15:08.212012234 -0600
@@ -52,12 +52,14 @@
} while (0)

#define MPI_BANNER(mesg) \
+ if (0) \
{ \
+ if (0) { \
do { \
printf("--------------------------------\n"); \
printf("Proc %d: ", mpi_rank); \
@@ -96,7 +97,7 @@
printf("*** %s\n", mesg); \
printf("--------------------------------\n"); \
- } while (0)
+ } while (0); \
+ }

#define SYNC(comm) \
do { \
@@ -97,7 +99,7 @@
int verbose = 0; /* verbose, default as no. */
int doread = 1; /* read test */
int dowrite = 1; /* write test */
Expand All @@ -17,17 +25,18 @@

/* Prototypes */
void slab_set(hsize_t start[], hsize_t count[], hsize_t stride[], int mode);
@@ -888,6 +889,9 @@
@@ -907,6 +909,10 @@
mrc = MPI_File_delete(filenames[color], info);
assert(mrc == MPI_SUCCESS);
}
+
+
+ // Prevent odd-rank to start the next test
+ MPI_Barrier(MPI_COMM_WORLD);
+
MPI_Comm_free(&comm);
}

/*
@@ -972,7 +976,7 @@
@@ -992,7 +998,7 @@
}
break;
case 'c':
Expand All @@ -36,7 +45,7 @@
break;
case 'r':
doread = 0;
@@ -1026,7 +1030,8 @@
@@ -1046,7 +1052,8 @@
char mpi_name[MPI_MAX_PROCESSOR_NAME];
int i, n;

Expand Down
2 changes: 1 addition & 1 deletion src/H5VL_log_att.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ herr_t H5VL_log_attr_specific (void *obj,
void **req) {
H5VL_log_obj_t *op = (H5VL_log_obj_t *)obj;
herr_t err = 0;
H5VL_log_req_t *rp;
H5VL_log_req_t *rp = NULL;
void **ureqp, *ureq;
char *iname = NULL; // Internal name of object
const char *original_name = NULL; // Original value in loc_params before being remapped
Expand Down
9 changes: 5 additions & 4 deletions src/H5VL_log_dataset.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,10 @@ void *H5VL_log_dataset_create (void *obj,
void **ureqp, *ureq;
H5D_fill_value_t stat;
void *lib_state = NULL;
void *lib_context = NULL;
// char lname[1024];
H5VL_logi_err_finally finally (
[&dcpl_id, &lib_state] () -> void { H5VL_logi_restore_lib_stat (lib_state); });
[&dcpl_id, &lib_state, &lib_context] () -> void { H5VL_logi_restore_lib_stat (lib_state, lib_context); });

try {
H5VL_LOGI_PROFILING_TIMER_START;
Expand Down Expand Up @@ -160,7 +161,7 @@ void *H5VL_log_dataset_create (void *obj,
H5VL_logi_get_filters (dcpl_id, dip->filters);

// Reset hdf5 context to allow attr operations within a dataset operation
H5VL_logi_reset_lib_stat (lib_state);
H5VL_logi_reset_lib_stat (lib_state, lib_context);

// Record dataset metadata as attributes
H5VL_logi_add_att (dp, H5VL_LOG_DATASETI_ATTR_DIMS, H5T_STD_I64LE, H5T_NATIVE_INT64,
Expand Down Expand Up @@ -282,7 +283,7 @@ static herr_t H5VL_log_dataset_read_elements (void *dset,
herr_t err = 0;
H5VL_log_dset_t *dp = (H5VL_log_dset_t *)dset;
H5VL_log_dset_info_t *dip = NULL; // Dataset info
hid_t dsid; // Dataset space id
hid_t dsid = H5I_INVALID_HID; // Dataset space id
H5VL_log_selections *dsel = NULL; // Selection blocks

try {
Expand Down Expand Up @@ -325,7 +326,7 @@ static herr_t H5VL_log_dataset_write_elements (void *dset,
H5VL_log_dset_t *dp = (H5VL_log_dset_t *)dset;

H5VL_log_dset_info_t *dip = NULL; // Dataset info
hid_t dsid; // Dataset space id
hid_t dsid = H5I_INVALID_HID; // Dataset space id
H5VL_log_selections *dsel = NULL; // Selection blocks

try {
Expand Down
19 changes: 11 additions & 8 deletions src/H5VL_log_dataseti.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -695,14 +695,15 @@ void *H5VL_log_dataseti_open (void *obj, void *uo, hid_t dxpl_id) {
std::unique_ptr<H5VL_log_dset_info_t> dip; // Dataset info
H5D_fill_value_t stat;
void *lib_state = NULL;
H5VL_logi_err_finally finally ([&dcpl_id, &lib_state] () -> void {
void *lib_context = NULL;
H5VL_logi_err_finally finally ([&dcpl_id, &lib_state, &lib_context] () -> void {
if (dcpl_id >= 0) { H5Pclose (dcpl_id); }
H5VL_logi_restore_lib_stat (lib_state);
H5VL_logi_restore_lib_stat (lib_state, lib_context);
});
H5VL_LOGI_PROFILING_TIMER_START;

// Reset hdf5 context to allow file operations within a dataset operation
H5VL_logi_reset_lib_stat (lib_state);
H5VL_logi_reset_lib_stat (lib_state, lib_context);

dp = std::make_unique<H5VL_log_dset_t> (op, H5I_DATASET, uo);

Expand Down Expand Up @@ -806,9 +807,10 @@ void H5VL_log_dataseti_write (H5VL_log_dset_t *dp,
int clen, inlen; // Compressed size; Size of data to be compressed
#endif
void *lib_state = NULL;
H5VL_logi_err_finally finally ([&ptype, &lib_state] () -> void {
void *lib_context = NULL;
H5VL_logi_err_finally finally ([&ptype, &lib_state, &lib_context] () -> void {
H5VL_log_type_free (ptype);
H5VL_logi_restore_lib_stat (lib_state);
H5VL_logi_restore_lib_stat (lib_state, lib_context);
});
H5VL_LOGI_PROFILING_TIMER_START;

Expand All @@ -830,7 +832,7 @@ void H5VL_log_dataseti_write (H5VL_log_dset_t *dp,
H5VL_LOGI_PROFILING_TIMER_STOP (dp->fp, TIMER_H5VL_LOG_DATASET_WRITE_INIT);

// Reset hdf5 context to allow file operations within a dataset operation
H5VL_logi_reset_lib_stat (lib_state);
H5VL_logi_reset_lib_stat (lib_state, lib_context);

if (dp->fp->config ^ H5VL_FILEI_CONFIG_METADATA_MERGE) {
H5VL_LOGI_PROFILING_TIMER_START;
Expand Down Expand Up @@ -1047,9 +1049,10 @@ void H5VL_log_dataseti_read (H5VL_log_dset_t *dp,
hbool_t rtype; // Non-blocking?
size_t num_pending_writes = 0;
void *lib_state = NULL;
void *lib_context = NULL;
H5FD_mpio_xfer_t xfer_mode;
H5VL_logi_err_finally finally (
[&lib_state] () -> void { H5VL_logi_restore_lib_stat (lib_state); });
[&lib_state, &lib_context] () -> void { H5VL_logi_restore_lib_stat (lib_state, lib_context); });
H5VL_LOGI_PROFILING_TIMER_START;

H5VL_LOGI_PROFILING_TIMER_START;
Expand All @@ -1059,7 +1062,7 @@ void H5VL_log_dataseti_read (H5VL_log_dset_t *dp,
H5VL_LOGI_PROFILING_TIMER_STOP (dp->fp, TIMER_H5VL_LOG_DATASET_READ_INIT);

// Reset hdf5 context to allow file operations within a dataset operation
H5VL_logi_reset_lib_stat (lib_state);
H5VL_logi_reset_lib_stat (lib_state, lib_context);

// Check mem space selection
if (mem_space_id == H5S_ALL)
Expand Down
Loading

0 comments on commit 89db3d4

Please sign in to comment.