KronLinInv  0.3
Kronecker-product-based linear inversion

◆ writereal1darrh5()

subroutine readwriteh5::writereal1darrh5 ( character(len=1024), intent(in)  outfile,
character(len=*), intent(in)  dsetname,
real(dp), dimension(:), intent(in)  arr 
)

Definition at line 55 of file rdwrhdf5.f08.

55 
56  character(len=1024),intent(in) :: outfile
57  real(dp),intent(in) :: arr(:)
58  character(len=*),intent(in) :: dsetname
59 
60  ! filename length must be the same than in dummy variable
61  integer(hid_t) :: file_id ! file identifier
62  integer(hid_t) :: dset_id ! dataset identifier
63  integer :: error ! error flag
64  integer(hid_t) :: dspace_id
65  integer(hsize_t), dimension(1) :: data_dims1
66  integer :: rank
67 
68  !! copmpression stuff
69  logical :: avail
70  integer :: filter_info
71  integer :: filter_info_both
72  integer(hid_t) :: dcpl
73  integer :: chunkingfactor
74  integer(hsize_t), dimension(1) :: chunk1
75 
76 
77  chunkingfactor = 10
78 
79  !##############################
80  !# initialize hdf5 #
81  !##############################
82  ! initialize fortran interface.
83  call h5open_f(error)
84  !-------------------------------------------------------------
85  ! check if gzip compression is available and can be used for both
86  ! compression and decompression. normally we do not perform error
87  ! checking in these examples for the sake of clarity, but in this
88  ! case we will make an exception because this filter is an
89  ! optional part of the hdf5 library.
90  call h5zfilter_avail_f(h5z_filter_deflate_f, avail, error)
91  if (.not.avail) then
92  write(*,'("gzip filter not available.",/)')
93  stop
94  endif
95  call h5zget_filter_info_f(h5z_filter_deflate_f, filter_info, error)
96  filter_info_both=ior(h5z_filter_encode_enabled_f,h5z_filter_decode_enabled_f)
97  if (filter_info .ne. filter_info_both) then
98  write(*,'("gzip filter not available for encoding and decoding.",/)')
99  stop
100  endif
101 
102  !-------------------------------------------------------------
103  if (h5firsttimeread.eqv..true.) then
104  call h5fcreate_f(trim(adjustl(outfile)), h5f_acc_trunc_f,file_id,error)
105  h5firsttimeread=.false.
106  else
107  ! open an existing file using the default properties.
108  call h5fopen_f(trim(adjustl(outfile)), h5f_acc_rdwr_f, file_id, error)
109  end if
110 
111 
112  !##############################
113  !# write arr #
114  !##############################
115  rank=1
116  data_dims1=shape(arr)
117  chunk1 = data_dims1/chunkingfactor
118  call checkchunking(chunk1)
119  ! create dataspace. setting maximum size to null sets the maximum
120  ! size to be the current size.
121  call h5screate_simple_f (rank, data_dims1, dspace_id, error)
122  !-------------------------------------------------------------
123  ! create the dataset creation property list, add the gzip
124  ! compression filter and set the chunk size.
125  call h5pcreate_f(h5p_dataset_create_f, dcpl, error)
126  call h5pset_deflate_f(dcpl, 9, error)
127  call h5pset_chunk_f(dcpl, rank, chunk1, error)
128  !-------------------------------------------------------------
129  ! create the dataset with default properties.
130  call h5dcreate_f(file_id, dsetname, h5t_ieee_f64le, dspace_id, &
131  dset_id, error, dcpl)
132  !-------------------------------------------------------------
133  ! write the data to the dataset.
134  call h5dwrite_f(dset_id,h5t_ieee_f64le, arr, data_dims1, error)
135  !-------------------------------------------------------------
136  ! close and release resources.
137  call h5pclose_f(dcpl,error)
138  !-------------------------------------------------------------
139  call h5sclose_f(dspace_id, error)
140  ! close the dataset.
141  call h5dclose_f(dset_id, error)
142 
143 
144  ! ##############################
145  ! # terminate hdf5 stuff #
146  ! ##############################
147  ! close the file.
148  call h5fclose_f(file_id, error)
149  ! close fortran interface.
150  call h5close_f(error)
151 
152  return
Here is the call graph for this function:
Here is the caller graph for this function: