Line data Source code
1 : /**
2 : * @file HDF5WriteReadTimeSlice_test.cxx Application that tests and demonstrates
3 : * the write/read functions of the HDF5RawDataFile class.
4 : *
5 : * This is part of the DUNE DAQ Application Framework, copyright 2020.
6 : * Licensing/copyright details are in the COPYING file that you should have
7 : * received with this code.
8 : */
9 :
10 : #include "hdf5libs/HDF5RawDataFile.hpp"
11 : #include "hdf5libs/test/HDF5TestUtils.hpp"
12 :
13 : #include "detdataformats/DetID.hpp"
14 :
15 : #define BOOST_TEST_MODULE HDF5WriteReadTimeSlice_test // NOLINT
16 :
17 : #include "boost/test/unit_test.hpp"
18 :
19 : #include <cstdlib>
20 : #include <filesystem>
21 : #include <fstream>
22 : #include <iostream>
23 : #include <memory>
24 : #include <regex>
25 : #include <string>
26 : #include <utility>
27 : #include <vector>
28 :
29 : using namespace dunedaq::hdf5libs;
30 :
31 : constexpr int run_number = 53;
32 : constexpr int file_index = 0;
33 : const std::string application_name = "HDF5WriteReadTimeSlice_test";
34 : constexpr size_t fragment_size = 100;
35 : constexpr size_t element_count_tpc = 4;
36 : constexpr size_t element_count_pds = 4;
37 : size_t compressed_raw_data_size = 0;
38 : size_t uncompressed_raw_data_size = 0;
39 :
40 : const size_t components_per_record = element_count_tpc + element_count_pds;
41 :
42 : HDF5FileLayoutParameters
43 12 : create_file_layout_params()
44 : {
45 12 : dunedaq::hdf5libs::HDF5PathParameters params_tpc;
46 12 : params_tpc.detector_group_type = "Detector_Readout";
47 12 : params_tpc.detector_group_name = "TPC";
48 12 : params_tpc.element_name_prefix = "Link";
49 12 : params_tpc.digits_for_element_number = 5;
50 :
51 : // dunedaq::hdf5libs::hdf5filelayout::PathParams params_pds;
52 : // params_pds.detector_group_type = "PDS";
53 : // params_pds.detector_group_name = "PDS";
54 : // params_pds.element_name_prefix = "Element";
55 : // params_pds.digits_for_element_number = 5;
56 :
57 : // note, for unit test json equality checks, 'PDS' needs to come before
58 : //'TPC', as on reading back the filelayout it looks like it's alphabetical.
59 12 : std::vector<dunedaq::hdf5libs::HDF5PathParameters> param_list;
60 : // param_list.push_back(params_pds);
61 12 : param_list.push_back(params_tpc);
62 :
63 12 : dunedaq::hdf5libs::HDF5FileLayoutParameters layout_params;
64 12 : layout_params.path_params_list = param_list;
65 12 : layout_params.record_name_prefix = "TimeSlice";
66 12 : layout_params.digits_for_record_number = 6;
67 12 : layout_params.digits_for_sequence_number = 0;
68 12 : layout_params.record_header_dataset_name = "TimeSliceHeader";
69 :
70 12 : return layout_params;
71 12 : }
72 :
73 : dunedaq::daqdataformats::TimeSlice
74 30 : create_timeslice(int ts_num)
75 : {
76 : // setup our dummy_data
77 30 : std::vector<char> dummy_vector(fragment_size);
78 30 : char* dummy_data = dummy_vector.data();
79 :
80 : // get a timestamp for this trigger
81 30 : int64_t timestamp =
82 30 : std::chrono::duration_cast<std::chrono::milliseconds>(system_clock::now().time_since_epoch()).count();
83 :
84 : // create TimeSliceHeader
85 30 : dunedaq::daqdataformats::TimeSliceHeader tsh;
86 30 : tsh.timeslice_number = ts_num;
87 30 : tsh.run_number = run_number;
88 30 : tsh.element_id = dunedaq::daqdataformats::SourceID(dunedaq::daqdataformats::SourceID::Subsystem::kTRBuilder, 0);
89 :
90 : // create our TimeSlice
91 30 : dunedaq::daqdataformats::TimeSlice ts(tsh);
92 :
93 : // loop over elements tpc
94 150 : for (size_t ele_num = 0; ele_num < element_count_tpc; ++ele_num) {
95 :
96 : // create our fragment
97 120 : dunedaq::daqdataformats::FragmentHeader fh;
98 120 : fh.trigger_number = ts_num;
99 120 : fh.trigger_timestamp = timestamp;
100 120 : fh.window_begin = timestamp;
101 120 : fh.window_end = timestamp;
102 120 : fh.run_number = run_number;
103 120 : fh.fragment_type = 0;
104 120 : fh.fragment_type =
105 : static_cast<dunedaq::daqdataformats::fragment_type_t>(dunedaq::daqdataformats::FragmentType::kWIB);
106 120 : fh.sequence_number = 0;
107 120 : fh.detector_id = static_cast<uint16_t>(dunedaq::detdataformats::DetID::Subdetector::kHD_TPC);
108 240 : fh.element_id =
109 120 : dunedaq::daqdataformats::SourceID(dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout, ele_num);
110 :
111 120 : std::unique_ptr<dunedaq::daqdataformats::Fragment> frag_ptr(
112 120 : new dunedaq::daqdataformats::Fragment(dummy_data, fragment_size));
113 120 : frag_ptr->set_header_fields(fh);
114 :
115 : // add fragment to TriggerRecord
116 120 : ts.add_fragment(std::move(frag_ptr));
117 :
118 120 : } // end loop over elements
119 :
120 : // loop over elements pds
121 150 : for (size_t ele_num = 0; ele_num < element_count_pds; ++ele_num) {
122 :
123 : // create our fragment
124 120 : dunedaq::daqdataformats::FragmentHeader fh;
125 120 : fh.trigger_number = ts_num;
126 120 : fh.trigger_timestamp = timestamp;
127 120 : fh.window_begin = timestamp;
128 120 : fh.window_end = timestamp;
129 120 : fh.run_number = run_number;
130 120 : fh.fragment_type =
131 : static_cast<dunedaq::daqdataformats::fragment_type_t>(dunedaq::daqdataformats::FragmentType::kDAPHNE);
132 120 : fh.sequence_number = 0;
133 120 : fh.detector_id = static_cast<uint16_t>(dunedaq::detdataformats::DetID::Subdetector::kHD_PDS);
134 240 : fh.element_id = dunedaq::daqdataformats::SourceID(dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout,
135 120 : ele_num + element_count_tpc);
136 :
137 120 : std::unique_ptr<dunedaq::daqdataformats::Fragment> frag_ptr(
138 120 : new dunedaq::daqdataformats::Fragment(dummy_data, fragment_size));
139 120 : frag_ptr->set_header_fields(fh);
140 :
141 : // add fragment to TriggerRecord
142 120 : ts.add_fragment(std::move(frag_ptr));
143 :
144 120 : } // end loop over elements
145 :
146 30 : return ts;
147 30 : }
148 :
149 : struct FileWriteFixture
150 : {
151 6 : FileWriteFixture(int num_slices = 5, unsigned comp_lvl = 0)
152 6 : : timeslice_count(num_slices),
153 6 : compression_level(comp_lvl),
154 6 : file_path(std::filesystem::temp_directory_path()),
155 6 : hdf5_filename(
156 12 : "demo" + std::to_string(getpid()) + "_"
157 24 : + std::string(getenv("USER")) + "_comp"
158 24 : + std::to_string(compression_level) + ".hdf5"),
159 6 : fl_pars(create_file_layout_params()),
160 6 : recorded_size_at_write(0)
161 : {
162 6 : delete_files_matching_pattern(file_path, hdf5_filename);
163 :
164 : // convert file_params to json, allows for easy comp later
165 6 : auto fl_pars = create_file_layout_params();
166 :
167 : // create src-geo id map
168 6 : auto srcid_geoid_map = create_srcid_geoid_map();
169 : // create the file
170 12 : std::unique_ptr<HDF5RawDataFile> h5file_ptr(new HDF5RawDataFile(file_path + "/" + hdf5_filename,
171 : run_number,
172 : file_index,
173 : application_name,
174 : fl_pars,
175 : srcid_geoid_map,
176 18 : compression_level));
177 :
178 :
179 : // write several events, each with several fragments
180 36 : for (int timeslice_number = 1; timeslice_number <= timeslice_count; ++timeslice_number)
181 30 : h5file_ptr->write(create_timeslice(timeslice_number));
182 :
183 : // get recorded size for checking
184 6 : recorded_size_at_write = h5file_ptr->get_recorded_size();
185 :
186 6 : h5file_ptr.reset(); // explicit destruction
187 6 : }
188 :
189 6 : ~FileWriteFixture()
190 : {
191 6 : delete_files_matching_pattern(file_path, hdf5_filename);
192 6 : }
193 :
194 2 : void read_file_attributes()
195 : {
196 : // open file for reading now
197 : //std::unique_ptr<HDF5RawDataFile> h5file_ptr = std::make_unique<HDF5RawDataFile>(file_path + "/" + hdf5_filename);
198 2 : h5file_ptr.reset(new HDF5RawDataFile(file_path + "/" + hdf5_filename));
199 :
200 : // check attributes
201 2 : auto recorded_size_attr = h5file_ptr->get_attribute<size_t>("recorded_size");
202 2 : auto run_number_attr = h5file_ptr->get_attribute<size_t>("run_number");
203 2 : auto file_index_attr = h5file_ptr->get_attribute<size_t>("file_index");
204 2 : auto app_name_attr = h5file_ptr->get_attribute<std::string>("application_name");
205 2 : auto record_type_attr = h5file_ptr->get_attribute<std::string>("record_type");
206 2 : auto compression_level_attr = h5file_ptr->get_attribute<unsigned>("compression_level");
207 :
208 2 : BOOST_REQUIRE_EQUAL(recorded_size_at_write, recorded_size_attr);
209 2 : BOOST_REQUIRE_EQUAL(run_number, run_number_attr);
210 2 : BOOST_REQUIRE_EQUAL(file_index, file_index_attr);
211 2 : BOOST_REQUIRE_EQUAL(application_name, app_name_attr);
212 2 : BOOST_REQUIRE_EQUAL("TimeSlice", record_type_attr);
213 2 : BOOST_REQUIRE_EQUAL(this->compression_level, compression_level_attr);
214 :
215 : // extract and check file layout parameters
216 2 : auto file_layout_parameters_read = h5file_ptr->get_file_layout().get_file_layout_params();
217 2 : BOOST_REQUIRE_EQUAL(fl_pars.to_json(), file_layout_parameters_read.to_json());
218 :
219 2 : if (this->compression_level == 0) {uncompressed_raw_data_size = recorded_size_at_write;}
220 : else {
221 1 : compressed_raw_data_size = recorded_size_at_write;
222 1 : BOOST_ASSERT(compressed_raw_data_size < uncompressed_raw_data_size);
223 : }
224 2 : }
225 :
226 2 : void read_file_datasets()
227 : {
228 : // open file for reading now
229 2 : h5file_ptr.reset(new HDF5RawDataFile(file_path + "/" + hdf5_filename));
230 :
231 2 : auto timeslices = h5file_ptr->get_all_timeslice_numbers();
232 2 : BOOST_REQUIRE_EQUAL(timeslice_count, timeslices.size());
233 :
234 2 : auto first_timeslice = *(timeslices.begin());
235 2 : auto last_timeslice = *(std::next(timeslices.begin(), timeslices.size() - 1));
236 2 : BOOST_REQUIRE_EQUAL(1, first_timeslice);
237 2 : BOOST_REQUIRE_EQUAL(timeslice_count, last_timeslice);
238 :
239 2 : auto all_datasets = h5file_ptr->get_dataset_paths();
240 2 : BOOST_REQUIRE_EQUAL(timeslice_count * (1 + components_per_record), all_datasets.size());
241 :
242 2 : auto all_tsh_paths = h5file_ptr->get_timeslice_header_dataset_paths();
243 2 : BOOST_REQUIRE_EQUAL(timeslice_count, all_tsh_paths.size());
244 :
245 2 : auto all_frag_paths = h5file_ptr->get_all_fragment_dataset_paths();
246 2 : BOOST_REQUIRE_EQUAL(timeslice_count * components_per_record, all_frag_paths.size());
247 :
248 : // test access by name
249 2 : std::unique_ptr<dunedaq::daqdataformats::TimeSliceHeader> trs_ptr;
250 2 : trs_ptr = h5file_ptr->get_tsh_ptr(all_tsh_paths.at(2));
251 2 : BOOST_REQUIRE_EQUAL(trs_ptr->timeslice_number, 3);
252 2 : BOOST_REQUIRE_EQUAL(trs_ptr->run_number, run_number);
253 :
254 : // test access by trigger number
255 2 : trs_ptr = h5file_ptr->get_tsh_ptr(2);
256 2 : BOOST_REQUIRE_EQUAL(trs_ptr->timeslice_number, 2);
257 2 : BOOST_REQUIRE_EQUAL(trs_ptr->run_number, run_number);
258 :
259 2 : std::unique_ptr<dunedaq::daqdataformats::Fragment> frag_ptr;
260 :
261 : // test access by name
262 2 : frag_ptr = h5file_ptr->get_frag_ptr(all_frag_paths.back());
263 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), last_timeslice);
264 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
265 :
266 : // test access by trigger number, type, element
267 2 : frag_ptr = h5file_ptr->get_frag_ptr(2, 0, "Detector_Readout", 0);
268 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 2);
269 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
270 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
271 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
272 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 0);
273 :
274 : // test access by trigger number, type, element
275 2 : frag_ptr = h5file_ptr->get_frag_ptr(4, 0, "Detector_Readout", 4);
276 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 4);
277 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
278 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
279 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
280 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 4);
281 :
282 : // test access by passing in SourceID
283 2 : dunedaq::daqdataformats::SourceID gid = { dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout, 1 };
284 2 : frag_ptr = h5file_ptr->get_frag_ptr(5, 0, gid);
285 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 5);
286 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
287 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
288 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
289 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 1);
290 2 : }
291 :
292 2 : void read_file_max_sequence()
293 : {
294 : // open file for reading now
295 2 : h5file_ptr.reset(new HDF5RawDataFile(file_path + "/" + hdf5_filename));
296 :
297 2 : auto timeslices = h5file_ptr->get_all_timeslice_numbers();
298 2 : BOOST_REQUIRE_EQUAL(timeslice_count, timeslices.size());
299 :
300 2 : auto first_timeslice = *(timeslices.begin());
301 2 : auto last_timeslice = *(std::next(timeslices.begin(), timeslices.size() - 1));
302 2 : BOOST_REQUIRE_EQUAL(1, first_timeslice);
303 2 : BOOST_REQUIRE_EQUAL(timeslice_count, last_timeslice);
304 :
305 2 : auto all_datasets = h5file_ptr->get_dataset_paths();
306 2 : BOOST_REQUIRE_EQUAL(timeslice_count * (1 + components_per_record), all_datasets.size());
307 :
308 2 : auto all_tsh_paths = h5file_ptr->get_timeslice_header_dataset_paths();
309 2 : BOOST_REQUIRE_EQUAL(timeslice_count, all_tsh_paths.size());
310 :
311 2 : auto all_frag_paths = h5file_ptr->get_all_fragment_dataset_paths();
312 2 : BOOST_REQUIRE_EQUAL(timeslice_count * components_per_record, all_frag_paths.size());
313 :
314 : // test access by name
315 2 : std::unique_ptr<dunedaq::daqdataformats::TimeSliceHeader> trs_ptr;
316 2 : trs_ptr = h5file_ptr->get_tsh_ptr(all_tsh_paths.at(2));
317 2 : BOOST_REQUIRE_EQUAL(trs_ptr->timeslice_number, 3);
318 2 : BOOST_REQUIRE_EQUAL(trs_ptr->run_number, run_number);
319 :
320 : // test access by trigger number
321 2 : trs_ptr = h5file_ptr->get_tsh_ptr(2);
322 2 : BOOST_REQUIRE_EQUAL(trs_ptr->timeslice_number, 2);
323 2 : BOOST_REQUIRE_EQUAL(trs_ptr->run_number, run_number);
324 :
325 2 : std::unique_ptr<dunedaq::daqdataformats::Fragment> frag_ptr;
326 :
327 : // test access by name
328 2 : frag_ptr = h5file_ptr->get_frag_ptr(all_frag_paths.back());
329 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), last_timeslice);
330 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
331 :
332 : // test access by trigger number, type, element
333 2 : frag_ptr = h5file_ptr->get_frag_ptr(2, 0, "Detector_Readout", 0);
334 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 2);
335 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
336 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
337 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
338 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 0);
339 :
340 : // test access by trigger number, type, element
341 2 : frag_ptr = h5file_ptr->get_frag_ptr(4, 0, "Detector_Readout", 4);
342 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 4);
343 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
344 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
345 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
346 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 4);
347 :
348 : // test access by passing in SourceID
349 2 : dunedaq::daqdataformats::SourceID gid = { dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout, 1 };
350 2 : frag_ptr = h5file_ptr->get_frag_ptr(5, 0, gid);
351 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_trigger_number(), 5);
352 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_run_number(), run_number);
353 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().subsystem,
354 : dunedaq::daqdataformats::SourceID::Subsystem::kDetectorReadout);
355 2 : BOOST_REQUIRE_EQUAL(frag_ptr->get_element_id().id, 1);
356 2 : }
357 :
358 : int timeslice_count;
359 : unsigned compression_level;
360 : std::string file_path;
361 : std::string hdf5_filename;
362 : HDF5FileLayoutParameters fl_pars;
363 : size_t recorded_size_at_write;
364 : std::unique_ptr<HDF5RawDataFile> h5file_ptr;
365 : };
366 :
367 : BOOST_AUTO_TEST_SUITE(HDF5WriteReadTimeSlice_test)
368 :
369 2 : BOOST_AUTO_TEST_CASE(ReadFileAttributes)
370 : {
371 1 : FileWriteFixture fixture(5, 0);
372 1 : fixture.read_file_attributes();
373 1 : }
374 :
375 2 : BOOST_AUTO_TEST_CASE(ReadCompressedFileAttributes)
376 : {
377 1 : FileWriteFixture fixture(5, 1);
378 1 : fixture.read_file_attributes();
379 1 : }
380 :
381 2 : BOOST_AUTO_TEST_CASE(ReadFileDatasets)
382 : {
383 1 : FileWriteFixture fixture(5, 0);
384 1 : fixture.read_file_datasets();
385 1 : }
386 :
387 2 : BOOST_AUTO_TEST_CASE(ReadCompressedFileDatasets)
388 : {
389 1 : FileWriteFixture fixture(5, 1);
390 1 : fixture.read_file_datasets();
391 1 : }
392 :
393 2 : BOOST_AUTO_TEST_CASE(ReadFileMaxSequence)
394 : {
395 1 : FileWriteFixture fixture(5, 0);
396 1 : fixture.read_file_max_sequence();
397 1 : }
398 :
399 2 : BOOST_AUTO_TEST_CASE(ReadCompressedFileMaxSequence)
400 : {
401 1 : FileWriteFixture fixture(5, 1);
402 1 : fixture.read_file_max_sequence();
403 1 : }
404 :
405 : BOOST_AUTO_TEST_SUITE_END()
|