Skip to content

Commit

Permalink
dump_hdf5 added.
Browse files Browse the repository at this point in the history
  • Loading branch information
LutzGross committed Jan 3, 2025
1 parent 05d457c commit a16d3f8
Show file tree
Hide file tree
Showing 16 changed files with 349 additions and 4 deletions.
7 changes: 6 additions & 1 deletion SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,14 @@ vars.AddVariables(
('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
BoolVariable('gmsh', 'Enable gmsh, if available', True),
BoolVariable('use_sympy', 'Enable sympy, if available. Currently the sympy-escript connection is not working due to a problem with code printing. By default symbols are not installed.', False),
BoolVariable('hdf5', 'Enable hdf5, if available', True),
('hdf5_prefix', 'Prefix/Paths of hdf5 installation', default_prefix),
('hdf5_libs', 'HDF5 libraries to link with', 'DEFAULT'),
#TODO: remove
EnumVariable('netcdf', 'Enable netCDF file support', False, allowed_values=netcdf_flavours),
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
('netcdf_libs', 'netCDF libraries to link with', 'DEFAULT'),

BoolVariable('zlib', 'Enable zLib', False),
('zlib_prefix', 'Prefix/Paths to zlib installation', default_prefix),
('zlib_libs', 'zlib libraries to link with', ['zlib']),
Expand Down Expand Up @@ -1058,7 +1063,7 @@ def print_summary():
else:
print(" netcdf: NO")
e_list=[]
for i in ('weipa','debug','openmp','cppunit','mkl','mpi4py', 'zlib',
for i in ('hdf5', 'weipa','debug','openmp','cppunit','mkl','mpi4py', 'zlib',
'mumps', 'scipy','silo','sympy','umfpack','visit'):
if env[i]: e_list.append(i)
else: d_list.append(i)
Expand Down
2 changes: 1 addition & 1 deletion doc/install/flatpak.tex
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
%!TEX root = install.tex
e%!TEX root = install.tex
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Copyright (c) 2012-2018 by The University of Queensland
% http://www.uq.edu.au
Expand Down
31 changes: 31 additions & 0 deletions escriptcore/src/Data.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5435,6 +5435,37 @@ Data::dump(const std::string fileName) const
}
}

void
Data::dump_hdf5(const std::string fileName) const
{
if (isComplex())
{
Data temp_re = (*this).real();
Data temp_im = (*this).imag();
temp_re.dump_hdf5( fileName + "_re" );
temp_im.dump_hdf5( fileName + "_im" );
//throw DataException("Error - Data::dump_hdf5 : complex data are not supported. Split into real and imaginary part.");
} else {
try
{
if (isLazy())
{
Data temp(*this); // this is to get a non-const object which we can resolve
temp.resolve();
temp.dump_hdf5(fileName);
}
else
{
return m_data->dump_hdf5(fileName);
}
}
catch (std::exception& e)
{
std::cout << e.what() << std::endl;
}
}
}

int
Data::get_MPISize() const
{
Expand Down
7 changes: 7 additions & 0 deletions escriptcore/src/Data.h
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,13 @@ contains datapoints.
void
dump(const std::string fileName) const;

/**
\brief
dumps the object into an HDF5 file
*/
void
dump_hdf5(const std::string fileName) const;

/**
\brief returns the values of the object as a list of tuples (one for each datapoint).
Expand Down
6 changes: 6 additions & 0 deletions escriptcore/src/DataAbstract.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,12 @@ DataAbstract::operandCheck(const DataAbstract& right) const
}
}

void
DataAbstract::dump_hdf5(const std::string fileName) const
{
throw DataException("Error - DataAbstract::dump_hdf5: not implemented.");
}

void
DataAbstract::dump(const std::string fileName) const
{
Expand Down
8 changes: 8 additions & 0 deletions escriptcore/src/DataAbstract.h
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,14 @@ class ESCRIPT_DLL_API DataAbstract : public REFCOUNT_BASE_CLASS(DataAbstract)
DataReady_ptr
resolve()=0;

/**
\brief
dumps the object into a HDF5 file
*/
virtual
void
dump_hdf5(const std::string fileName) const;

/**
\brief
dumps the object into a netCDF file
Expand Down
71 changes: 71 additions & 0 deletions escriptcore/src/DataConstant.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
#endif
#endif

#ifdef ESYS_HAVE_HDF5
#include <H5Cpp.h>
#endif

#ifdef SLOWSHARECHECK
#define CHECK_FOR_EX_WRITE if (isShared()) {\
Expand All @@ -55,6 +58,7 @@

using namespace std;
using namespace boost::python;
using namespace escript::DataTypes;

#ifdef NETCDF4
using namespace netCDF;
Expand Down Expand Up @@ -611,6 +615,73 @@ DataConstant::setToZero()
for (int i=0; i<n ;++i) m_data_r[i]=0.;
}
}
void DataConstant::dump_hdf5(const std::string fileName) const
{
#ifdef ESYS_HAVE_HDF5
int rank = getRank();
int fs_type= getFunctionSpace().getTypeCode();
const DataTypes::ShapeType& shape = getShape();
JMPI mpiInfo(getFunctionSpace().getDomain()->getMPI());
if (isComplex())
{
throw DataException("Error - DataConstant::dump_hdf5: complex data are not supported. Split into real and imaginary part.");
}

const std::string newFileName(mpiInfo->appendRankToFileName(fileName));

#ifdef ESYS_MPI
/* Serialize I/O */
const int mpi_iam = mpiInfo->rank;
const int mpi_num = mpiInfo->size;
MPI_Status status;
if (mpi_iam > 0)
MPI_Recv(&ndims, 0, MPI_INT, mpi_iam-1, 81802, mpiInfo->comm, &status);
#endif
try
{
H5::H5File h5_file(newFileName, H5F_ACC_TRUNC);
// .... add meta data ............
uint h5_shape[DataTypes::maxRank]; // dataset dimensions
for (uint i = 0; i < rank; i++) {
h5_shape[i]= shape[i];
}
hsize_t h5_shape_dims[1] = {rank};
H5::DataSet h5_dmeta = h5_file.createDataSet("meta", H5::PredType::NATIVE_UINT, H5::DataSpace(1, h5_shape_dims ) );
h5_dmeta.write( h5_shape, H5::PredType::NATIVE_UINT);
// data type
hsize_t h5_typeid_dims[1] = { 0 };
uint h5_type_id[1] = { 2 };
H5::Attribute h5_typeid_attr = h5_dmeta.createAttribute("type_id", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims ) );
h5_typeid_attr.write( H5::PredType::NATIVE_UINT , h5_type_id );

uint h5_rank[1] = { rank };
H5::Attribute h5_rank_attr = h5_dmeta.createAttribute("rank", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims ) );
h5_rank_attr.write( H5::PredType::NATIVE_UINT , h5_rank );

uint dfs_type[1] = { fs_type };
H5::Attribute h5_fs_type_attr = h5_dmeta.createAttribute("function_space_type", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims) );
h5_fs_type_attr.write( H5::PredType::NATIVE_UINT , dfs_type );
// .... end meta data ............
// ... add data ....
hsize_t h5_data_length[1] = { m_data_r.size() };
const double* d_ptr=&(m_data_r[0]);
H5::DataSet h5_dataset_data = h5_file.createDataSet("data", H5::PredType::NATIVE_DOUBLE, H5::DataSpace(1 , h5_data_length ) );
h5_dataset_data.write(d_ptr, H5::PredType::NATIVE_DOUBLE);
}
// catch failure caused by the H5File operations
catch (H5::Exception& error)
{
error.printErrorStack();
throw DataException("Error - DataConstant:: creating HDF5 file failed.");
}
#ifdef ESYS_MPI
if ( mpi_iam < mpi_num-1 ) MPI_Send(&ndims, 0, MPI_INT, mpi_iam+1, 81802, MPI_COMM_WORLD);
#endif
#else
throw DataException("DataConstant::dump_hdf5: not configured with HDF5. Please contact your installation manager.");
#endif

}

#ifdef NETCDF4
void
Expand Down
9 changes: 9 additions & 0 deletions escriptcore/src/DataConstant.h
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,15 @@ typedef DataReady parent;
void
dump(const std::string fileName) const;

/**
\brief
dumps the object into an HDF5 file
*/
ESCRIPT_DLL_API
virtual
void
dump_hdf5(const std::string fileName) const;

/**
\brief
sets all values to zero
Expand Down
87 changes: 85 additions & 2 deletions escriptcore/src/DataExpanded.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,14 @@
#include <ncVar.h>
#include <ncFile.h>
#else
#include <netcdfcpp.h>
#include <netcdfcpp.h>
#endif
#endif

#ifdef ESYS_HAVE_HDF5
#include <H5Cpp.h>
#endif

using namespace std;
using namespace escript::DataTypes;

Expand All @@ -44,7 +48,6 @@ using namespace escript::DataTypes;
#endif
#endif


#ifdef SLOWSHARECHECK
#define CHECK_FOR_EX_WRITE do {\
if (isShared()) {\
Expand Down Expand Up @@ -1191,6 +1194,86 @@ void DataExpanded::setToZero()
}
}


void DataExpanded::dump_hdf5(const std::string fileName) const
{
#ifdef ESYS_HAVE_HDF5
int rank = getRank();
int fs_type= getFunctionSpace().getTypeCode();
const DataTypes::ShapeType& shape = getShape();

const dim_t* ids_p=getFunctionSpace().borrowSampleReferenceIDs();
JMPI mpiInfo(getFunctionSpace().getDomain()->getMPI());
if (isComplex())
{
throw DataException("Error - DataExpanded::dump_hdf5: complex data are not supported. Split into real and imaginary part.");
}
const std::string newFileName(mpiInfo->appendRankToFileName(fileName));
#ifdef ESYS_MPI
/* Serialize I/O */
const int mpi_iam = mpiInfo->rank;
const int mpi_num = mpiInfo->size;
MPI_Status status;
if (mpi_iam > 0)
MPI_Recv(&ndims, 0, MPI_INT, mpi_iam-1, 81802, mpiInfo->comm, &status);
#endif

try
{
H5::H5File h5_file(newFileName, H5F_ACC_TRUNC);
// .... add meta data ............
uint h5_shape[DataTypes::maxRank]; // dataset dimensions
for (uint i = 0; i < rank; i++) {
h5_shape[i]= shape[i];
}
hsize_t h5_shape_dims[1] = {rank};
H5::DataSet h5_dmeta = h5_file.createDataSet("meta", H5::PredType::NATIVE_UINT, H5::DataSpace(1, h5_shape_dims ) );
h5_dmeta.write( h5_shape, H5::PredType::NATIVE_UINT);
// data type
hsize_t h5_typeid_dims[1] = { 1 };
uint h5_type_id[1] = { 2 };
H5::Attribute h5_typeid_attr = h5_dmeta.createAttribute("type_id", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims ) );
h5_typeid_attr.write( H5::PredType::NATIVE_UINT , h5_type_id );

uint h5_rank[1] = { rank };
H5::Attribute h5_rank_attr = h5_dmeta.createAttribute("rank", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims ) );
h5_rank_attr.write( H5::PredType::NATIVE_UINT , h5_rank );

uint dfs_type[1] = { fs_type };
H5::Attribute h5_fs_type_attr = h5_dmeta.createAttribute("function_space_type", H5::PredType::NATIVE_UINT, H5::DataSpace(1,h5_typeid_dims) );
h5_fs_type_attr.write( H5::PredType::NATIVE_UINT , dfs_type );
// .... end meta data ............
// ... add index of samples ....
hsize_t h5_id_dims[1] = { getFunctionSpace().getNumSamples() };
H5::DataSpace h5_dataspace_ids(1 , h5_id_dims );
#ifdef ESYS_INDEXTYPE_LONG
H5::DataSet h5_dataset_ids = h5_file.createDataSet("sample_id", H5::PredType::NATIVE_LONG, h5_dataspace_ids );
h5_dataset_ids.write(ids_p, H5::PredType::NATIVE_LONG);
#else
H5::DataSet h5_dataset_ids = h5_file.createDataSet("sample_id", H5::PredType::NATIVE_INT, h5_dataspace_ids );
h5_dataset_ids.write(ids_p, H5::PredType::NATIVE_INT);
#endif
// ... add data ....
const double* d_ptr=&(m_data_r[0]);
hsize_t h5_data_length[1] = { m_data_r.size() };
H5::DataSet h5_dataset_data = h5_file.createDataSet("data", H5::PredType::NATIVE_DOUBLE, H5::DataSpace(1 , h5_data_length ) );
h5_dataset_data.write(d_ptr, H5::PredType::NATIVE_DOUBLE);
}
// catch failure caused by the H5File operations
catch (H5::Exception& error)
{
error.printErrorStack();
throw DataException("Error - DataExpanded:: creating HDF5 file failed.");
}
#ifdef ESYS_MPI
if ( mpi_iam < mpi_num-1 ) MPI_Send(&ndims, 0, MPI_INT, mpi_iam+1, 81802, MPI_COMM_WORLD);
#endif
#else
throw DataException("DataExpanded::dump_hdf5: not configured with HDF5. Please contact your installation manager.");
#endif

}

#ifdef NETCDF4
void DataExpanded::dump(const std::string fileName) const
{
Expand Down
9 changes: 9 additions & 0 deletions escriptcore/src/DataExpanded.h
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,15 @@ TODO Note that this constructor will also copy data to all points if it only con
DataAbstract*
zeroedCopy() const;

/**
\brief
dumps the object into a HDF5 file
*/
ESCRIPT_DLL_API
virtual
void
dump_hdf5(const std::string fileName) const;


/**
\brief
Expand Down
Loading

0 comments on commit a16d3f8

Please sign in to comment.