Geant4 11.4.0
Toolkit for the simulation of the passage of particles through matter
Loading...
Searching...
No Matches
HAPI_HDFDataManager.cc
Go to the documentation of this file.
1/*
2# <<BEGIN-copyright>>
3# Copyright 2019, Lawrence Livermore National Security, LLC.
4# This file is part of the gidiplus package (https://github.com/LLNL/gidiplus).
5# gidiplus is licensed under the MIT license (see https://opensource.org/licenses/MIT).
6# SPDX-License-Identifier: MIT
7# <<END-copyright>>
8*/
9
10#include "HAPI.hpp"
11#include <vector>
12
13#ifdef HAPI_USE_HDF5
14namespace HAPI {
15
16 // constructor
17 HDFDataManager::HDFDataManager(std::string const &a_filename) :
18 m_filename( a_filename ) {
19
20#if defined (GIDIP_HAVE_COMPILER_FLOATING_POINT_EXCEPTIONS)
21 LUPI_FPE_disable_and_clear( __FILE__, __LINE__ ); // disable sigfpe cores
22#endif
23
24 m_file_id = H5Fopen( a_filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT );
25 H5Eset_auto1( nullptr, nullptr );
26
27 m_dataset_ints = H5Dopen2( m_file_id, "iData", H5P_DEFAULT );
28 m_iDataPresent = m_dataset_ints != H5I_INVALID_HID;
29 if( m_iDataPresent ) m_dataspace_ints = H5Dget_space( m_dataset_ints );
30
31 m_dataset_doubles = H5Dopen2( m_file_id, "dData", H5P_DEFAULT );
32 m_dDataPresent = m_dataset_doubles != H5I_INVALID_HID;
33 if( m_dDataPresent ) m_dataspace_doubles = H5Dget_space( m_dataset_doubles );
34
35#if defined (GIDIP_HAVE_COMPILER_FLOATING_POINT_EXCEPTIONS)
36 // Re-enable floating point exception detection
37 LUPI_FPE_test( __FILE__, __LINE__ ); // test sigfpe exception
38 LUPI_FPE_enable( __FILE__, __LINE__ ); // reenable sigfpe cores
39#endif
40
41 m_stride[0] = 1;
42 m_block[0] = 1;
43 }
44
45 HDFDataManager::~HDFDataManager()
46 {
47 if( m_iDataPresent ) {
48 H5Dclose(m_dataset_ints);
49 H5Sclose(m_dataspace_ints);
50 }
51 if( m_iDataPresent ) {
52 H5Dclose(m_dataset_doubles);
53 H5Sclose(m_dataspace_doubles);
54 }
55 H5Fclose(m_file_id);
56 }
57
58 void HDFDataManager::getDoubles(nf_Buffer<double> &result, size_t startIndex, size_t endIndex)
59 {
60 if( !m_dDataPresent ) throw LUPI::Exception( "HDFDataManager::getDoubles: HDF5 file " + m_filename + " has no 'dData' dataset." );
61
62 hid_t memspace;
63 herr_t status;
64
65 hsize_t size = endIndex - startIndex;
66
67 hsize_t dims[] {size};
68 hsize_t offset[] {startIndex};
69 hsize_t count[] {size};
70
71 result.resize(size);
72 m_num_double_reads ++;
73 m_num_double_elem += size;
74
75 // now can we access the allocated array and read into that?
76
77 memspace = H5Screate_simple(1, dims, nullptr);
78 status = H5Sselect_hyperslab(m_dataspace_doubles, H5S_SELECT_SET, offset, m_stride, count, m_block);
79 if( status != 0 ) throw "H5Sselect_hyperslab error in HDFDataManager::getDoubles.";
80
81 status = H5Dread(m_dataset_doubles, H5T_NATIVE_DOUBLE, memspace, m_dataspace_doubles, H5P_DEFAULT, result.data());
82 if( status != 0 ) throw "H5Dread error in HDFDataManager::getDoubles.";
83
84 H5Sclose(memspace);
85
86 }
87
88 void HDFDataManager::getInts(nf_Buffer<int> &result, size_t startIndex, size_t endIndex)
89 {
90 if( !m_iDataPresent ) throw LUPI::Exception( "HDFDataManager::getInts: HDF5 file " + m_filename + " has no 'iData' dataset." );
91
92 hid_t memspace;
93 herr_t status;
94 hsize_t size = endIndex - startIndex;
95
96 hsize_t dims[] {size};
97 hsize_t offset[] {startIndex};
98 hsize_t count[] {size};
99
100 result.resize(size);
101
102 m_num_int_reads ++;
103 m_num_int_elem += size;
104
105 memspace = H5Screate_simple(1, dims, nullptr);
106 status = H5Sselect_hyperslab(m_dataspace_ints, H5S_SELECT_SET, offset, m_stride, count, m_block);
107 if( status != 0 ) throw "H5Sselect_hyperslab error in HDFDataManager::getDoubles.";
108
109 status = H5Dread(m_dataset_ints, H5T_NATIVE_INT, memspace, m_dataspace_ints, H5P_DEFAULT, result.data());
110 if( status != 0 ) throw "H5Dread error in HDFDataManager::getDoubles.";
111
112 H5Sclose(memspace);
113
114 }
115
116}
117#endif
G4ThreadLocal T * G4GeomSplitter< T >::offset
Definition HAPI.hpp:34