diff --git a/.github/workflows/alone.yml b/.github/workflows/alone.yml index 52e0e17d..4a8828a9 100644 --- a/.github/workflows/alone.yml +++ b/.github/workflows/alone.yml @@ -22,7 +22,6 @@ on: env: MPICH_VERSION: 4.1.2 - HDF5_VERSION: 1.14.2 ARGOBOTS_VERSION: 1.1 ASYNC_VOL_VERSION: 1.8.1 @@ -31,7 +30,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 - name: Set up dependencies run: | sudo apt-get update @@ -67,10 +66,9 @@ jobs: rm -rf ${GITHUB_WORKSPACE}/HDF5 mkdir ${GITHUB_WORKSPACE}/HDF5 cd ${GITHUB_WORKSPACE}/HDF5 - VER_MAJOR=${HDF5_VERSION%.*} - wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz - tar -zxf hdf5-${HDF5_VERSION}.tar.gz - cd hdf5-${HDF5_VERSION} + wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + tar -zxf hdf5.tar.gz + cd hdf5-* ./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \ --silent \ --enable-parallel \ @@ -115,7 +113,7 @@ jobs: rm -rf ${ASYNC_DIR} mkdir ${ASYNC_DIR} cd ${ASYNC_DIR} - wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz + wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz tar -xf v${ASYNC_VOL_VERSION}.tar.gz cd vol-async-${ASYNC_VOL_VERSION} mkdir build @@ -136,7 +134,7 @@ jobs: rm -rf ${CAHCE_DIR} mkdir ${CAHCE_DIR} cd ${CAHCE_DIR} - git clone https://github.com/hpc-io/vol-cache.git + git clone https://github.com/HDFGroup/vol-cache.git cd vol-cache mkdir build cd build diff --git a/.github/workflows/mac_mpich.yml b/.github/workflows/mac_mpich.yml index e62d3745..1c19cddb 100644 --- a/.github/workflows/mac_mpich.yml +++ b/.github/workflows/mac_mpich.yml @@ -22,7 +22,6 @@ on: env: MPICH_VERSION: 4.1.2 - HDF5_VERSION: 1.14.2 NETCDF_VERSION: 4.9.2 jobs: @@ -30,7 +29,7 @@ jobs: runs-on: macos-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 - name: Set up dependencies run: | brew instal automake autoconf libtool m4 open-mpi zlib @@ -38,10 +37,9 @@ jobs: run: | cd ${GITHUB_WORKSPACE} rm -rf HDF5 ; mkdir HDF5 ; cd HDF5 - VER_MAJOR=${HDF5_VERSION%.*} - wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz - tar -zxf hdf5-${HDF5_VERSION}.tar.gz - cd hdf5-${HDF5_VERSION} + wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + tar -zxf hdf5.tar.gz + cd hdf5-* ./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \ --silent \ --enable-parallel \ diff --git a/.github/workflows/ubuntu_mpich.yml b/.github/workflows/ubuntu_mpich.yml index 426c7c16..b9018b1a 100644 --- a/.github/workflows/ubuntu_mpich.yml +++ b/.github/workflows/ubuntu_mpich.yml @@ -22,7 +22,6 @@ on: env: MPICH_VERSION: 4.1.2 - HDF5_VERSION: 1.14.2 NETCDF_VERSION: 4.9.2 jobs: @@ -30,7 +29,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 - name: Set up dependencies run: | sudo apt-get update @@ -60,10 +59,9 @@ jobs: run: | cd ${GITHUB_WORKSPACE} rm -rf HDF5 ; mkdir HDF5 ; cd HDF5 - VER_MAJOR=${HDF5_VERSION%.*} - wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz - tar -zxf hdf5-${HDF5_VERSION}.tar.gz - cd hdf5-${HDF5_VERSION} + wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + tar -zxf hdf5.tar.gz + cd hdf5-* ./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \ --silent \ --enable-parallel \ diff --git a/.github/workflows/ubuntu_openmpi.yml b/.github/workflows/ubuntu_openmpi.yml index b14899f5..7f7fea9a 100644 --- a/.github/workflows/ubuntu_openmpi.yml +++ b/.github/workflows/ubuntu_openmpi.yml @@ -21,7 +21,6 @@ on: - 'case_studies/*' env: - HDF5_VERSION: 1.14.2 NETCDF_VERSION: 4.9.2 jobs: @@ -29,7 +28,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 - name: Set up dependencies run: | sudo apt-get update @@ -45,10 +44,9 @@ jobs: cd ${GITHUB_WORKSPACE} echo "Install HDF5 on ${GITHUB_WORKSPACE}/HDF5" rm -rf HDF5 ; mkdir HDF5 ; cd HDF5 - VER_MAJOR=${HDF5_VERSION%.*} - wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz - tar -zxf hdf5-${HDF5_VERSION}.tar.gz - cd hdf5-${HDF5_VERSION} + wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + tar -zxf hdf5.tar.gz + cd hdf5-* ./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \ --silent \ --enable-parallel \ diff --git a/.github/workflows/ubuntu_stack_vols.yml b/.github/workflows/ubuntu_stack_vols.yml index ba0f0af7..76a89879 100644 --- a/.github/workflows/ubuntu_stack_vols.yml +++ b/.github/workflows/ubuntu_stack_vols.yml @@ -22,7 +22,6 @@ on: env: MPICH_VERSION: 4.1.2 - HDF5_VERSION: 1.14.2 ARGOBOTS_VERSION: 1.1 ASYNC_VOL_VERSION: 1.8.1 @@ -31,7 +30,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 - name: Set up dependencies run: | sudo apt-get update @@ -67,10 +66,9 @@ jobs: rm -rf ${GITHUB_WORKSPACE}/HDF5 mkdir ${GITHUB_WORKSPACE}/HDF5 cd ${GITHUB_WORKSPACE}/HDF5 - VER_MAJOR=${HDF5_VERSION%.*} - wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz - tar -zxf hdf5-${HDF5_VERSION}.tar.gz - cd hdf5-${HDF5_VERSION} + wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + tar -zxf hdf5.tar.gz + cd hdf5-* ./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \ --silent \ --enable-parallel \ @@ -115,7 +113,7 @@ jobs: rm -rf ${ASYNC_DIR} mkdir ${ASYNC_DIR} cd ${ASYNC_DIR} - wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz + wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz tar -xf v${ASYNC_VOL_VERSION}.tar.gz cd vol-async-${ASYNC_VOL_VERSION} mkdir build @@ -136,7 +134,7 @@ jobs: rm -rf ${CAHCE_DIR} mkdir ${CAHCE_DIR} cd ${CAHCE_DIR} - git clone https://github.com/hpc-io/vol-cache.git + git clone https://github.com/HDFGroup/vol-cache.git cd vol-cache mkdir build cd build diff --git a/README.md b/README.md index 78f6a367..c28f8090 100644 --- a/README.md +++ b/README.md @@ -17,9 +17,9 @@ Files created by the Log VOL conform with the HDF5 file format specification, but require the Log VOL to read them back. * Current build status: - * [![Ubuntu_mpich](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/ubuntu_mpich.yml/badge.svg)](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/ubuntu_mpich.yml) - * [![Ubuntu with OpenMPI](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/ubuntu_openmpi.yml/badge.svg)](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/ubuntu_openmpi.yml) - * [![Mac with MPICH](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/mac_mpich.yml/badge.svg)](https://github.com/DataLib-ECP/vol-log-based/actions/workflows/mac_mpich.yml) + * [![Ubuntu_mpich](https://github.com/HDFGroup/vol-log-based/actions/workflows/ubuntu_mpich.yml/badge.svg)](https://github.com/HDFGroup/vol-log-based/actions/workflows/ubuntu_mpich.yml) + * [![Ubuntu with OpenMPI](https://github.com/HDFGroup/vol-log-based/actions/workflows/ubuntu_openmpi.yml/badge.svg)](https://github.com/HDFGroup/vol-log-based/actions/workflows/ubuntu_openmpi.yml) + * [![Mac with MPICH](https://github.com/HDFGroup/vol-log-based/actions/workflows/mac_mpich.yml/badge.svg)](https://github.com/HDFGroup/vol-log-based/actions/workflows/mac_mpich.yml) ### HDF5 VOL Connector ID * This Log VOL connector has been registered with the HDF group with diff --git a/configure.ac b/configure.ac index b73901f6..8a9e8180 100644 --- a/configure.ac +++ b/configure.ac @@ -336,31 +336,20 @@ if test "x$have_hdf5" = xno ; then to specify the location of HDF5 installation. Abort. -----------------------------------------------------------------------]) fi -AC_MSG_CHECKING([whether HDF5 version is 1.13.0 (develop branch) or later]) +AC_MSG_CHECKING([whether the HDF5 version is equal to or greater than 1.14.0]) AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ #include -#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1013000) -#error HDF5 version is older than 1.13.0 +#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1014000) +#error HDF5 version is older than 1.14.0 #endif - ]])], [hdf5_ge_1_13_0=yes], [hdf5_ge_1_13_0=no]) -AC_MSG_RESULT([$hdf5_ge_1_13_0]) -if test x$hdf5_ge_1_13_0 = xno; then + ]])], [hdf5_ge_1_14_0=yes], [hdf5_ge_1_14_0=no]) +AC_MSG_RESULT([$hdf5_ge_1_14_0]) +if test x$hdf5_ge_1_14_0 = xno; then AC_MSG_ERROR([ ----------------------------------------------------------------------- - H5VL_log requires HDF5 1.13.0 and later. Abort. + H5VL_log requires HDF5 1.14.0 and later. Abort. -----------------------------------------------------------------------]) fi -AC_MSG_CHECKING([whether HDF5 version is 1.13.3 or later]) -AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ -#include -#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1013003) -#error HDF5 version is older than 1.13.3 -#endif - ]])], [hdf5_ge_1_13_3=yes], [hdf5_ge_1_13_3=no]) -AC_MSG_RESULT([$hdf5_ge_1_13_3]) -if test x$hdf5_ge_1_13_3 = xyes; then - AC_DEFINE(HDF5_GE_1133, 1, ["HDF5 version greater than 1.13.3"]) -fi AC_MSG_CHECKING([whether HDF5 parallel I/O is enabled]) AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ #include diff --git a/doc/INSTALL.md b/doc/INSTALL.md index 6c4b15e1..283c9db3 100644 --- a/doc/INSTALL.md +++ b/doc/INSTALL.md @@ -1,7 +1,7 @@ ## The Log VOL connector - Build Instructions ### Software Requirements -* [HDF5 1.13.0](https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.2/src/hdf5-1.13.2.tar.gz) +* [Minimum HDF5 1.14](https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz) + Configured with parallel I/O support (--enable-parallel) * MPI C and C++ compilers + The plugin uses the constant initializer; a C++ compiler supporting std 17 is required @@ -12,39 +12,39 @@ + [m4](https://www.gnu.org/software/m4/) 1.4.18 ### Building HDF5 libraries -* HDF5 1.13.0 and later (**required**) - + Download HDF5 official release version 1.13.0. +* HDF5 1.14.0 and later (**required**) + + Download HDF5's official latest release. ``` - % wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.2/src/hdf5-1.13.2.tar.gz + % wget https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz ``` + Configure HDF5 with parallel I/O enabled. ``` - % tar -zxf hdf5-1_13_0.tar.gz - % cd hdf5-1_13_0.tar.gz - % ./configure --prefix=${HOME}/HDF5/1.13.0 --enable-parallel CC=mpicc + % tar -zxf hdf5.tar.gz + % cd hdf5-* + % ./configure --prefix=${HOME}/HDF5/latest --enable-parallel CC=mpicc % make -j4 install ``` + The above example commands will install the HD5 library under the folder - `${HOME}/HDF5/1.13.0`. + `${HOME}/HDF5/latest`. ### Building the Log VOL connector * Obtain the source code package by either downloading the official release or cloning the github repository. + Download the latest official release version 1.1.0. ``` - % wget https://github.com/DataLib-ECP/vol-log-based/archive/refs/tags/logvol.1.1.0.tar.gz + % wget https://github.com/HDFGroup/vol-log-based/archive/refs/tags/logvol.1.1.0.tar.gz % tar -zxf logvol.1.1.0.tar.gz % cd vol-log-based-logvol.1.1.0 ``` + Clone from the github repository. ``` - % git clone https://github.com/DataLib-ECP/vol-log-based.git + % git clone https://github.com/HDFGroup/vol-log-based.git % cd log_io_vol % autoreconf -i ``` * Example configure and make commands are given below. ``` - % ./configure --prefix=${HOME}/Log_IO_VOL --with-hdf5=${HOME}/HDF5/1.13.0 + % ./configure --prefix=${HOME}/Log_IO_VOL --with-hdf5=${HOME}/HDF5/latest % make -j 4 install ``` + The above commands will install the log-vol library under the folder `${HOME}/Log_IO_VOL`. diff --git a/doc/log_cache_async_vol.md b/doc/log_cache_async_vol.md index 5f59fdc3..edad85a4 100644 --- a/doc/log_cache_async_vol.md +++ b/doc/log_cache_async_vol.md @@ -2,7 +2,7 @@ * [Build Instructions](#build-instructions) * [Run Instructions](#run-e3sm-io) -This demo uses [E3SM-IO](https://github.com/Parallel-NetCDF/E3SM-IO) to show how to run the Log VOL connector on top of the [Cache VOL connector](https://github.com/hpc-io/vol-cache) and the [Async VOL connector](https://github.com/hpc-io/vol-async). +This demo uses [E3SM-IO](https://github.com/Parallel-NetCDF/E3SM-IO) to show how to run the Log VOL connector on top of the [Cache VOL connector](https://github.com/HDFGroup/vol-cache) and the [Async VOL connector](https://github.com/HDFGroup/vol-async). E3SM-IO is an I/O benchmark suite that measures the performance I/O kernel of [E3SM](https://github.com/E3SM-Project/E3SM), a state-of-the-art Earth system modeling, @@ -30,7 +30,7 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the % export HDF5_ROOT=${HDF5_DIR} ``` -+ HDF5 1.13.3: `--enable-parallel`, `--enable-threadsafe`, and `--enable-unsupported` are [required by Async VOL](https://hdf5-vol-async.readthedocs.io/en/latest/gettingstarted.html#build-async-i-o-vol) at configure time. ++ Minimum HDF5 1.14: `--enable-parallel`, `--enable-threadsafe`, and `--enable-unsupported` are [required by Async VOL](https://hdf5-vol-async.readthedocs.io/en/latest/gettingstarted.html#build-async-i-o-vol) at configure time. ```shell # create a new folder "HDF5" under $WORKSPACE @@ -38,9 +38,9 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the % cd ${HDF5_DIR} # download HDF5 source codes - % wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.3/src/hdf5-1.13.3.tar.gz - % tar -zxf hdf5-1.13.3.tar.gz - % cd hdf5-1.13.3 + % wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz + % tar -zxf hdf5.tar.gz + % cd hdf5-* # configure, output saved to log.config % ./configure --prefix=${HDF5_DIR} \ @@ -98,7 +98,7 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the % cd ${ASYNC_DIR} # download Async VOL source codes and create a build folder - % wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v1.4.tar.gz + % wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v1.4.tar.gz % tar -xf v1.4.tar.gz % cd vol-async-1.4 % mkdir build @@ -127,7 +127,7 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the % export LD_LIBRARY_PATH="$ABT_DIR/lib:$LD_LIBRARY_PATH" # download Cache VOL source codes and create a build folder - % git clone https://github.com/hpc-io/vol-cache.git + % git clone https://github.com/HDFGroup/vol-cache.git % cd vol-cache % mkdir build % cd build @@ -151,7 +151,7 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the % cd ${LOGVOL_DIR} # download Log VOL source codes - % git clone git@github.com:DataLib-ECP/vol-log-based.git + % git clone git@github.com:HDFGroup/vol-log-based.git % cd vol-log-based # create configure file, output saved to log.autoreconf diff --git a/doc/userguide.md b/doc/userguide.md index 941ed09e..9532e58e 100644 --- a/doc/userguide.md +++ b/doc/userguide.md @@ -24,7 +24,7 @@ applications. ## Design of the Log VOL connector -The [the Log VOL connector](https://github.com/DataLib-ECP/vol-log-based) is an HDF5 +The [the Log VOL connector](https://github.com/HDFGroup/vol-log-based) is an HDF5 Virtual Object Layer (VOL) plug-in that stores HDF5 datasets in a log-based storage layout, in contrast to the canonical order layout. The [Log-based storage layout](https://link.springer.com/chapter/10.1007/978-3-540-75416-9_34) diff --git a/src/H5VL_log_dataset.cpp b/src/H5VL_log_dataset.cpp index 3f1e0124..7a33dd03 100644 --- a/src/H5VL_log_dataset.cpp +++ b/src/H5VL_log_dataset.cpp @@ -272,17 +272,7 @@ err_out:; return NULL; } /* end H5VL_log_dataset_open() */ -/*------------------------------------------------------------------------- - * Function: H5VL_log_dataset_read - * - * Purpose: Reads data elements from a dataset into a buffer. - * - * Return: Success: 0 - * Failure: -1 - * - *------------------------------------------------------------------------- - */ -herr_t H5VL_log_dataset_read_1 (void *dset, +static herr_t H5VL_log_dataset_read_elements (void *dset, hid_t mem_type_id, hid_t mem_space_id, hid_t file_space_id, @@ -322,19 +312,9 @@ err_out:; // Note: dsel should be freed when the read request is deleted if (dsid != file_space_id) { H5Sclose (dsid); } return err; -} /* end H5VL_log_dataset_read() */ +} /* end H5VL_log_dataset_read_elements() */ -/*------------------------------------------------------------------------- - * Function: H5VL_log_dataset_write - * - * Purpose: Writes data elements from a buffer into a dataset. - * - * Return: Success: 0 - * Failure: -1 - * - *------------------------------------------------------------------------- - */ -herr_t H5VL_log_dataset_write_1 (void *dset, +static herr_t H5VL_log_dataset_write_elements (void *dset, hid_t mem_type_id, hid_t mem_space_id, hid_t file_space_id, @@ -376,9 +356,19 @@ err_out:; if (dsel) { delete dsel; } if (dsid != file_space_id) { H5Sclose (dsid); } return err; -} /* end H5VL_log_dataset_write() */ +} /* end H5VL_log_dataset_write_elements() */ -herr_t H5VL_log_dataset_read_2 (size_t count, +/*------------------------------------------------------------------------- + * Function: H5VL_log_dataset_read + * + * Purpose: Reads data elements from a dataset into a buffer. + * + * Return: Success: 0 + * Failure: -1 + * + *------------------------------------------------------------------------- + */ +herr_t H5VL_log_dataset_read (size_t count, void *dset[], hid_t mem_type_id[], hid_t mem_space_id[], @@ -390,7 +380,7 @@ herr_t H5VL_log_dataset_read_2 (size_t count, size_t i; for (i = 0; i < count; i++) { - err = H5VL_log_dataset_read_1 (dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], + err = H5VL_log_dataset_read_elements (dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], plist_id, buf[i], NULL); CHECK_ERR } @@ -398,7 +388,17 @@ herr_t H5VL_log_dataset_read_2 (size_t count, return err; } -herr_t H5VL_log_dataset_write_2 (size_t count, +/*------------------------------------------------------------------------- + * Function: H5VL_log_dataset_write + * + * Purpose: Writes data elements from a buffer into a dataset. + * + * Return: Success: 0 + * Failure: -1 + * + *------------------------------------------------------------------------- + */ +herr_t H5VL_log_dataset_write (size_t count, void *dset[], hid_t mem_type_id[], hid_t mem_space_id[], @@ -410,7 +410,7 @@ herr_t H5VL_log_dataset_write_2 (size_t count, size_t i; for (i = 0; i < count; i++) { - err = H5VL_log_dataset_write_1 (dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], + err = H5VL_log_dataset_write_elements (dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], plist_id, buf[i], NULL); CHECK_ERR } diff --git a/src/H5VL_log_dataset.hpp b/src/H5VL_log_dataset.hpp index 3ee7c0e1..60c3c2f5 100644 --- a/src/H5VL_log_dataset.hpp +++ b/src/H5VL_log_dataset.hpp @@ -17,14 +17,6 @@ #define LOGVOL_SELCTION_TYPE_POINTS 0x02 #define LOGVOL_SELCTION_TYPE_OFFSETS 0x04 -#ifdef HDF5_GE_1133 -#define H5VL_log_dataset_read H5VL_log_dataset_read_2 -#define H5VL_log_dataset_write H5VL_log_dataset_write_2 -#else -#define H5VL_log_dataset_read H5VL_log_dataset_read_1 -#define H5VL_log_dataset_write H5VL_log_dataset_write_1 -#endif - /* The log VOL dataset object */ typedef struct H5VL_log_dset_info_t { hsize_t ndim; // Number of dimensions @@ -62,21 +54,7 @@ void *H5VL_log_dataset_open (void *obj, hid_t dapl_id, hid_t dxpl_id, void **req); -herr_t H5VL_log_dataset_read_1 (void *dset, - hid_t mem_type_id, - hid_t mem_space_id, - hid_t file_space_id, - hid_t plist_id, - void *buf, - void **req); -herr_t H5VL_log_dataset_write_1 (void *dset, - hid_t mem_type_id, - hid_t mem_space_id, - hid_t file_space_id, - hid_t plist_id, - const void *buf, - void **req); -herr_t H5VL_log_dataset_read_2 (size_t count, +herr_t H5VL_log_dataset_read (size_t count, void *dset[], hid_t mem_type_id[], hid_t mem_space_id[], @@ -84,7 +62,7 @@ herr_t H5VL_log_dataset_read_2 (size_t count, hid_t plist_id, void *buf[], void **req); -herr_t H5VL_log_dataset_write_2 (size_t count, +herr_t H5VL_log_dataset_write (size_t count, void *dset[], hid_t mem_type_id[], hid_t mem_space_id[], diff --git a/src/H5VL_log_introspect.cpp b/src/H5VL_log_introspect.cpp index e116ad12..9833102f 100644 --- a/src/H5VL_log_introspect.cpp +++ b/src/H5VL_log_introspect.cpp @@ -54,11 +54,7 @@ herr_t H5VL_log_introspect_get_conn_cls (void *obj, * *------------------------------------------------------------------------- */ -#ifdef HDF5_GE_1133 herr_t H5VL_log_introspect_get_cap_flags (const void *info, uint64_t *cap_flags) { -#else -herr_t H5VL_log_introspect_get_cap_flags (const void *info, unsigned *cap_flags) { -#endif herr_t err = 0; H5VL_log_info_t *ip = (H5VL_log_info_t *)info; diff --git a/src/H5VL_log_introspect.hpp b/src/H5VL_log_introspect.hpp index 70e9a4b4..05553485 100644 --- a/src/H5VL_log_introspect.hpp +++ b/src/H5VL_log_introspect.hpp @@ -11,11 +11,7 @@ herr_t H5VL_log_introspect_get_conn_cls (void *obj, H5VL_get_conn_lvl_t lvl, const H5VL_class_t **conn_cls); -#ifdef HDF5_GE_1133 herr_t H5VL_log_introspect_get_cap_flags (const void *info, uint64_t *cap_flags); -#else -herr_t H5VL_log_introspect_get_cap_flags (const void *info, unsigned *cap_flags); -#endif herr_t H5VL_log_introspect_opt_query (void *obj, H5VL_subclass_t cls, int opt_type, diff --git a/src/H5VL_log_main.hpp b/src/H5VL_log_main.hpp index c12a5d4a..66fc1da1 100644 --- a/src/H5VL_log_main.hpp +++ b/src/H5VL_log_main.hpp @@ -11,11 +11,7 @@ /* Characteristics of the pass-through VOL connector */ #define H5VL_log_NAME "LOG" #define H5VL_log_REGID 514 /* VOL connector ID */ -#ifdef HDF5_GE_1133 #define H5VL_log_APIVER 3 // Verion of VOL interface used by logvol -#else -#define H5VL_log_APIVER 2 // Verion of VOL interface used by logvol -#endif /********************* */ /* Function prototypes */ /********************* */ diff --git a/src/H5VL_logi_util.hpp b/src/H5VL_logi_util.hpp index 4ddd3cd7..6d86a309 100644 --- a/src/H5VL_logi_util.hpp +++ b/src/H5VL_logi_util.hpp @@ -11,17 +11,10 @@ #include #include "H5VL_log_obj.hpp" -#ifdef HDF5_GE_1133 #define H5VL_log_under_dataset_read(uo, uvlid, mtid, msid, dsid, dxplid, buf, req) \ H5VLdataset_read (1, &(uo), uvlid, &(mtid), &(msid), &(dsid), dxplid, (void**)&(buf), NULL) #define H5VL_log_under_dataset_write(uo, uvlid, mtid, msid, dsid, dxplid, buf, req) \ H5VLdataset_write (1, &(uo), uvlid, &(mtid), &(msid), &(dsid), dxplid, (const void**)&(buf), NULL) -#else -#define H5VL_log_under_dataset_read(uo, uvlid, mtid, msid, dsid, dxplid, buf, req) \ - H5VLdataset_read (uo, uvlid, mtid, msid, dsid, dxplid, (void*)buf, NULL) -#define H5VL_log_under_dataset_write(uo, uvlid, mtid, msid, dsid, dxplid, buf, req) \ - H5VLdataset_write (uo, uvlid, mtid, msid, dsid, dxplid, (void*)buf, NULL) -#endif // Utils extern MPI_Datatype h5t_to_mpi_type (hid_t type_id); @@ -149,4 +142,4 @@ inline void H5VL_logi_restore_lib_stat (void *&stat) { CHECK_ERR } stat = NULL; -} \ No newline at end of file +}