Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new parallel examples #25

Merged
merged 12 commits into from
Aug 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 32 additions & 6 deletions .github/workflows/daily-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,17 @@ name: hdf5 examples daily build
# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_dispatch:
inputs:
use_ignore:
description: 'Ignore has changes check'
type: string
required: false
default: check
use_environ:
description: 'Environment to locate files'
type: string
required: true
default: snapshots
# push:
# branches: [ "master" ]
pull_request:
Expand All @@ -18,6 +29,8 @@ jobs:
runs-on: ubuntu-latest
outputs:
hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}
run-ignore: ${{ steps.getinputs.outputs.INPUTS_IGNORE }}
run-environ: ${{ steps.getinputs.outputs.INPUTS_ENVIRON }}

steps:
- uses: actions/checkout@v3
Expand All @@ -33,11 +46,24 @@ jobs:
id: gethdf5base
run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT

- run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}."

- name: Read inputs
id: getinputs
run: |
echo "INPUTS_IGNORE=${{ ((github.event.inputs.use_ignore == '' && github.event.inputs.use_ignore) || 'ignore') }}" >> $GITHUB_OUTPUT
echo "INPUTS_ENVIRON=${{ ((github.event.inputs.use_environ == '' && github.event.inputs.use_environ) || 'snapshots') }}" >> $GITHUB_OUTPUT

- run: echo "use_ignore is ${{ steps.getinputs.outputs.INPUTS_IGNORE }}."

- run: echo "use_environ is ${{ steps.getinputs.outputs.INPUTS_ENVIRON }}."

call-workflow-tarball:
needs: get-base-names
uses: ./.github/workflows/tarball.yml
with:
#use_tag: snapshot
use_environ: snapshots
use_ignore: ${{ needs.get-base-names.outputs.run-ignore }}
use_environ: ${{ needs.get-base-names.outputs.run-environ }}

call-workflow-ctest:
needs: [get-base-names, call-workflow-tarball]
Expand All @@ -47,7 +73,7 @@ jobs:
use_hdf: ${{ needs.get-base-names.outputs.hdf5-name }}
#use_tag: snapshot
#use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
if: ${{ ((needs.get-base-names.outputs.run-environ == 'snapshots') && ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore'))) || (needs.get-base-names.outputs.run-environ == 'release') }}

call-workflow-release:
needs: [call-workflow-tarball, call-workflow-ctest]
Expand All @@ -58,7 +84,7 @@ jobs:
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }}
file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }}
use_tag: snapshot
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
use_tag: ${{ needs.get-base-names.outputs.run-environ }}
use_environ: ${{ needs.get-base-names.outputs.run-environ }}
if: ${{ ((needs.get-base-names.outputs.run-environ == 'snapshots') && ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore'))) || (needs.get-base-names.outputs.run-environ == 'release') }}

1 change: 1 addition & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ jobs:
needs: create-files-ctest
uses: ./.github/workflows/cmake-ctest.yml
with:
use_hdf: ${{ needs.log-the-inputs.outputs.rel_tag }}
file_base: ${{ needs.create-files-ctest.outputs.file_base }}

call-workflow-release:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tarball.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
with:
seconds: 86400 # One day in seconds
branch: '${{ steps.get-branch-name.outputs.branch_ref }}'
if: ${{ inputs.use_environ == 'snapshots' }}
if: ${{ (inputs.use_environ == 'snapshots' && inputs.use_ignore == 'check') }}

- run: echo "You have ${{ steps.check-new-commits.outputs.new-commits-number }} new commit(s) in ${{ steps.get-branch-name.outputs.BRANCH_REF }} ✅!"
if: ${{ steps.check-new-commits.outputs.has-new-commits == 'true' }}
Expand Down
4 changes: 2 additions & 2 deletions C/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ add_subdirectory (${PROJECT_SOURCE_DIR}/H5T)

if (${H5_LIBVER_DIR} GREATER 16)
# add_subdirectory (${PROJECT_SOURCE_DIR}/Performance)
if (USE_SHARED_LIBS AND HDF_BUILD_FILTERS)
if (USE_SHARED_LIBS AND HDF_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT)
add_subdirectory (${PROJECT_SOURCE_DIR}/H5Filters)
endif ()
endif ()

if (H5_HAVE_PARALLEL)
if (H5_HAVE_PARALLEL AND HDF5_ENABLE_PARALLEL)
add_subdirectory (${PROJECT_SOURCE_DIR}/H5Parallel)
endif ()
12 changes: 9 additions & 3 deletions C/H5Parallel/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ foreach (example ${examples})
endforeach ()

if (BUILD_TESTING)
macro (ADD_GREP_TEST testname)
macro (ADD_GREP_TEST testname mumprocs)
add_test (
NAME ${EXAMPLE_VARNAME}_${testname}-clearall
COMMAND ${CMAKE_COMMAND}
Expand All @@ -33,7 +33,7 @@ if (BUILD_TESTING)
set_tests_properties (${EXAMPLE_VARNAME}_${testname}-clearall PROPERTIES DEPENDS ${last_test})
endif ()
add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND "${CMAKE_COMMAND}"
-D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$<TARGET_FILE:${EXAMPLE_VARNAME}_${testname}>;${MPIEXEC_POSTFLAGS}"
-D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${mumprocs};${MPIEXEC_PREFLAGS};$<TARGET_FILE:${EXAMPLE_VARNAME}_${testname}>;${MPIEXEC_POSTFLAGS}"
-D "TEST_ARGS:STRING="
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-D "TEST_EXPECT=0"
Expand All @@ -53,7 +53,13 @@ if (BUILD_TESTING)

foreach (example ${examples})
get_filename_component (example_name ${example} NAME_WE)
ADD_GREP_TEST (${example_name})
if (${example_name} STREQUAL "ph5_hyperslab_by_col")
ADD_GREP_TEST (${example_name} 2)
elseif (${example_name} STREQUAL "ph5_hyperslab_by_chunk" OR ${example_name} STREQUAL "ph5_hyperslab_by_pattern")
ADD_GREP_TEST (${example_name} 4)
else ()
ADD_GREP_TEST (${example_name} ${NUMPROCS})
endif ()
endforeach ()

endif ()
6 changes: 6 additions & 0 deletions C/H5Parallel/C_sourcefiles.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ set (examples
ph5example.c
ph5_filtered_writes.c
ph5_filtered_writes_no_sel.c
ph5_dataset.c
ph5_file_create.c
ph5_hyperslab_by_row.c
ph5_hyperslab_by_col.c
ph5_hyperslab_by_pattern.c
ph5_hyperslab_by_chunk.c
)
if (${HDF5_ENABLE_SUBFILING_VFD})
list (APPEND examples ph5_subfiling.c)
Expand Down
103 changes: 103 additions & 0 deletions C/H5Parallel/ph5_dataset.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
/*
* This example writes data to the HDF5 file.
* Number of processes is assumed to be 1 or multiples of 2 (up to 8)
*/

#include "hdf5.h"
#include "stdlib.h"

#define H5FILE_NAME "SDS.h5"
#define DATASETNAME "IntArray"
#define NX 8 /* dataset dimensions */
#define NY 5
#define RANK 2

int
main (int argc, char **argv)
{
/*
* HDF5 APIs definitions
*/
hid_t file_id, dset_id; /* file and dataset identifiers */
hid_t filespace; /* file and memory dataspace identifiers */
hsize_t dimsf[] = {NX, NY}; /* dataset dimensions */
int *data; /* pointer to data buffer to write */
hid_t plist_id; /* property list identifier */
int i;
herr_t status;

/*
* MPI variables
*/
int mpi_size, mpi_rank;
MPI_Comm comm = MPI_COMM_WORLD;
MPI_Info info = MPI_INFO_NULL;

/*
* Initialize MPI
*/
MPI_Init(&argc, &argv);
MPI_Comm_size(comm, &mpi_size);
MPI_Comm_rank(comm, &mpi_rank);

/*
* Initialize data buffer
*/
data = (int *) malloc(sizeof(int)*dimsf[0]*dimsf[1]);
for (i=0; i < dimsf[0]*dimsf[1]; i++) {
data[i] = i;
}
/*
* Set up file access property list with parallel I/O access
*/
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
H5Pclose(plist_id);


/*
* Create the dataspace for the dataset.
*/
filespace = H5Screate_simple(RANK, dimsf, NULL);

/*
* Create the dataset with default properties and close filespace.
*/
dset_id = H5Dcreate(file_id, DATASETNAME, H5T_NATIVE_INT, filespace,
H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
/*
* Create property list for collective dataset write.
*/
plist_id = H5Pcreate(H5P_DATASET_XFER);
H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

/*
* To write dataset independently use
*
* H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT);
*/

status = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
plist_id, data);
free(data);

/*
* Close/release resources.
*/
H5Dclose(dset_id);
H5Sclose(filespace);
H5Pclose(plist_id);
H5Fclose(file_id);

if (mpi_rank == 0)
printf("PHDF5 example finished with no errors\n");

MPI_Finalize();

return 0;
}
60 changes: 60 additions & 0 deletions C/H5Parallel/ph5_file_create.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
/*
* This example creates an HDF5 file.
*/

#include "hdf5.h"

#define H5FILE_NAME "SDS_row.h5"

int
main (int argc, char **argv)
{
/*
* HDF5 APIs definitions
*/
hid_t file_id; /* file and dataset identifiers */
hid_t plist_id; /* property list identifier( access template) */
herr_t status;

/*
* MPI variables
*/
int mpi_size, mpi_rank;
MPI_Comm comm = MPI_COMM_WORLD;
MPI_Info info = MPI_INFO_NULL;

/*
* Initialize MPI
*/
MPI_Init(&argc, &argv);
MPI_Comm_size(comm, &mpi_size);
MPI_Comm_rank(comm, &mpi_rank);

/*
* Set up file access property list with parallel I/O access
*/
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* Create a new file collectively.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);

/*
* Close property list.
*/
H5Pclose(plist_id);

/*
* Close the file.
*/
H5Fclose(file_id);

if (mpi_rank == 0)
printf("PHDF5 example finished with no errors\n");

MPI_Finalize();

return 0;
}
2 changes: 0 additions & 2 deletions C/H5Parallel/ph5_filtered_writes.c
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ write_dataset_no_overlap(hid_t file_id, hid_t dxpl_id)
hsize_t start[EXAMPLE_DSET_DIMS];
hsize_t stride[EXAMPLE_DSET_DIMS];
hsize_t count[EXAMPLE_DSET_DIMS];
size_t i, j;
hid_t dset_id = H5I_INVALID_HID;
hid_t dcpl_id = H5I_INVALID_HID;
hid_t file_dataspace = H5I_INVALID_HID;
Expand Down Expand Up @@ -244,7 +243,6 @@ write_dataset_overlap(hid_t file_id, hid_t dxpl_id)
hsize_t start[EXAMPLE_DSET_DIMS];
hsize_t stride[EXAMPLE_DSET_DIMS];
hsize_t count[EXAMPLE_DSET_DIMS];
size_t i, j;
hid_t dset_id = H5I_INVALID_HID;
hid_t dcpl_id = H5I_INVALID_HID;
hid_t file_dataspace = H5I_INVALID_HID;
Expand Down
1 change: 0 additions & 1 deletion C/H5Parallel/ph5_filtered_writes_no_sel.c
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@ write_dataset_some_no_sel(hid_t file_id, hid_t dxpl_id)
hsize_t stride[EXAMPLE_DSET_DIMS];
hsize_t count[EXAMPLE_DSET_DIMS];
hbool_t no_selection;
size_t i, j;
hid_t dset_id = H5I_INVALID_HID;
hid_t dcpl_id = H5I_INVALID_HID;
hid_t file_dataspace = H5I_INVALID_HID;
Expand Down
Loading