DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
HDF5RawDataFile.cpp
Go to the documentation of this file.
1
9
10#include "logging/Logging.hpp"
11
12#include <algorithm>
13#include <filesystem>
14#include <map>
15#include <memory>
16#include <set>
17#include <sstream>
18#include <string>
19#include <utility>
20#include <vector>
21
22namespace dunedaq {
23namespace hdf5libs {
24
25constexpr uint32_t MAX_FILELAYOUT_VERSION = 4294967295; // NOLINT(build/unsigned)
26
30HDF5RawDataFile::HDF5RawDataFile(std::string file_name,
32 size_t file_index,
33 std::string application_name,
36 unsigned compression_level,
37 std::string inprogress_filename_suffix,
38 unsigned open_flags)
39 : m_bare_file_name(file_name)
40 , m_compression_level(compression_level)
41 , m_open_flags(open_flags)
42{
43
44 // check and make sure that the file isn't ReadOnly
45 if (m_open_flags == HighFive::File::ReadOnly) {
46 throw IncompatibleOpenFlags(ERS_HERE, file_name, m_open_flags);
47 }
48
49 auto filename_to_open = m_bare_file_name + inprogress_filename_suffix;
50
51 // do the file open
52 try {
53 m_file_ptr.reset(new HighFive::File(filename_to_open, m_open_flags));
54 } catch (std::exception const& excpt) {
55 throw FileOpenFailed(ERS_HERE, filename_to_open, excpt.what());
56 }
57
58 m_recorded_size = 0;
59 m_uncompressed_raw_data_size = 0;
60 m_total_file_size = 0;
61
62 size_t file_creation_timestamp =
63 std::chrono::duration_cast<std::chrono::milliseconds>(system_clock::now().time_since_epoch()).count();
64
65 TLOG_DEBUG(TLVL_BASIC) << "Created HDF5 file (" << file_name << ") at time " << file_creation_timestamp << " .";
66
67 // write some file attributes
68 write_attribute("run_number", run_number);
69 write_attribute("file_index", file_index);
70 write_attribute("creation_timestamp", file_creation_timestamp);
71 write_attribute("application_name", application_name);
72
73 // set the file layout contents
74 m_file_layout_ptr.reset(new HDF5FileLayout(fl_params));
75 write_file_layout();
76
77 // write the SourceID-related attributes
78 //HDF5SourceIDHandler::populate_source_id_geo_id_map(srcid_geoid_map, m_file_level_source_id_geo_id_map);
79 m_file_level_source_id_geo_id_map = srcid_geoid_map;
80 HDF5SourceIDHandler::store_file_level_geo_id_info(*m_file_ptr, m_file_level_source_id_geo_id_map);
81
82 // write the record type
83 m_record_type = fl_params.record_name_prefix;
84 write_attribute("record_type", m_record_type);
85
86 // write the compression level
87 write_attribute("compression_level", m_compression_level);
88}
89
90
91HDF5RawDataFile::~HDF5RawDataFile()
92{
93 if (m_file_ptr.get() != nullptr && m_open_flags != HighFive::File::ReadOnly) {
94 if (! m_file_ptr->hasAttribute("recorded_size")) {
95 write_attribute("recorded_size", m_recorded_size);
96 }
97
98 if (! m_file_ptr->hasAttribute("uncompressed_raw_data_size")) {
99 write_attribute("uncompressed_raw_data_size", m_uncompressed_raw_data_size);
100 }
101
102 if (! m_file_ptr->hasAttribute("total_file_size")) {
103 write_attribute("total_file_size", m_total_file_size);
104 }
105
106 if (! m_file_ptr->hasAttribute("closing_timestamp")) {
107 size_t file_closing_timestamp =
108 std::chrono::duration_cast<std::chrono::milliseconds>(system_clock::now().time_since_epoch()).count();
109 write_attribute("closing_timestamp", file_closing_timestamp);
110 }
111
112 m_file_ptr->flush();
113
114 // rename file to the bare name, if needed
115 if (m_file_ptr->getName() != m_bare_file_name) {
116 std::filesystem::rename(m_file_ptr->getName(), m_bare_file_name);
117 }
118 }
119
120 // explicit destruction; not really needed, but nice to be clear...
121 m_file_ptr.reset();
122 m_file_layout_ptr.reset();
123}
124
128std::vector<std::string>
129HDF5RawDataFile::HDF5RawDataFile::get_attribute_names()
130{
131 return m_file_ptr->listAttributeNames();
132}
133
137void
138HDF5RawDataFile::write(const daqdataformats::TriggerRecord& tr)
139{
140 // the source_id_path map that we will build up as we write the TR header
141 // and fragments (and then write the map into the HDF5 TR_record Group)
142 HDF5SourceIDHandler::source_id_path_map_t source_id_path_map;
143
144 // the map of fragment types to SourceIDS
145 HDF5SourceIDHandler::fragment_type_source_id_map_t fragment_type_source_id_map;
146
147 // the map of subdetectors to SourceIDS
148 HDF5SourceIDHandler::subdetector_source_id_map_t subdetector_source_id_map;
149
150 // write the record header into the HDF5 file/group
151 HighFive::Group record_level_group = write(tr.get_header_ref(), source_id_path_map);
152
153 // store the SourceID of the record header in the HDF5 file/group
154 // (since there should only be one entry in the map at this point, we'll take advantage of that...)
155 for (auto const& source_id_path : source_id_path_map) {
156 HDF5SourceIDHandler::store_record_header_source_id(record_level_group, source_id_path.first);
157 }
158
159 // write all of the fragments into the HDF5 file/group
160 for (auto const& frag_ptr : tr.get_fragments_ref()) {
161 write(*frag_ptr, source_id_path_map);
162 HDF5SourceIDHandler::add_fragment_type_source_id_to_map(
163 fragment_type_source_id_map, frag_ptr->get_fragment_type(), frag_ptr->get_element_id());
164 HDF5SourceIDHandler::add_subdetector_source_id_to_map(
165 subdetector_source_id_map,
166 static_cast<detdataformats::DetID::Subdetector>(frag_ptr->get_detector_id()),
167 frag_ptr->get_element_id());
168 }
169
170 // store all of the record-level maps in the HDF5 file/group
171 HDF5SourceIDHandler::store_record_level_path_info(record_level_group, source_id_path_map);
172 HDF5SourceIDHandler::store_record_level_fragment_type_map(record_level_group, fragment_type_source_id_map);
173 HDF5SourceIDHandler::store_record_level_subdetector_map(record_level_group, subdetector_source_id_map);
174}
175
179void
180HDF5RawDataFile::write(const daqdataformats::TimeSlice& ts)
181{
182 std::string tsh_path = m_file_layout_ptr->get_path_string(ts.get_header());
183 if (m_file_ptr->exist(tsh_path)) {throw TimeSliceAlreadyExists(ERS_HERE, tsh_path);}
184
185 // the source_id_path map that we will build up as we write the TR header
186 // and fragments (and then write the map into the HDF5 TR_record Group)
187 HDF5SourceIDHandler::source_id_path_map_t source_id_path_map;
188
189 // the map of fragment types to SourceIDS
190 HDF5SourceIDHandler::fragment_type_source_id_map_t fragment_type_source_id_map;
191
192 // the map of subdetectors to SourceIDS
193 HDF5SourceIDHandler::subdetector_source_id_map_t subdetector_source_id_map;
194
195 // write the record header into the HDF5 file/group
196 HighFive::Group record_level_group = write(ts.get_header(), source_id_path_map);
197
198 // store the SourceID of the record header in the HDF5 file/group
199 // (since there should only be one entry in the map at this point, we'll take advantage of that...)
200 for (auto const& source_id_path : source_id_path_map) {
201 HDF5SourceIDHandler::store_record_header_source_id(record_level_group, source_id_path.first);
202 }
203
204 // write all of the fragments into the HDF5 file/group
205 for (auto const& frag_ptr : ts.get_fragments_ref()) {
206 write(*frag_ptr, source_id_path_map);
207 HDF5SourceIDHandler::add_fragment_type_source_id_to_map(
208 fragment_type_source_id_map, frag_ptr->get_fragment_type(), frag_ptr->get_element_id());
209 HDF5SourceIDHandler::add_subdetector_source_id_to_map(
210 subdetector_source_id_map,
211 static_cast<detdataformats::DetID::Subdetector>(frag_ptr->get_detector_id()),
212 frag_ptr->get_element_id());
213 }
214
215 // store all of the record-level maps in the HDF5 file/group
216 HDF5SourceIDHandler::store_record_level_path_info(record_level_group, source_id_path_map);
217 HDF5SourceIDHandler::store_record_level_fragment_type_map(record_level_group, fragment_type_source_id_map);
218 HDF5SourceIDHandler::store_record_level_subdetector_map(record_level_group, subdetector_source_id_map);
219}
220
224HighFive::Group
225HDF5RawDataFile::write(const daqdataformats::TriggerRecordHeader& trh,
226 HDF5SourceIDHandler::source_id_path_map_t& path_map)
227{
228 std::tuple<size_t, std::string, HighFive::Group> write_results =
229 do_write(m_file_layout_ptr->get_path_elements(trh),
230 static_cast<const char*>(trh.get_storage_location()),
231 trh.get_total_size_bytes(),
232 m_compression_level);
233 m_recorded_size += std::get<0>(write_results);
234 HDF5SourceIDHandler::add_source_id_path_to_map(path_map, trh.get_header().element_id, std::get<1>(write_results));
235 return std::get<2>(write_results);
236}
237
241HighFive::Group
242HDF5RawDataFile::write(const daqdataformats::TimeSliceHeader& tsh, HDF5SourceIDHandler::source_id_path_map_t& path_map)
243{
244 std::tuple<size_t, std::string, HighFive::Group> write_results =
245 do_write(m_file_layout_ptr->get_path_elements(tsh),
246 (const char*)(&tsh),
247 sizeof(daqdataformats::TimeSliceHeader),
248 m_compression_level);
249 m_recorded_size += std::get<0>(write_results);
250 HDF5SourceIDHandler::add_source_id_path_to_map(path_map, tsh.element_id, std::get<1>(write_results));
251 return std::get<2>(write_results);
252}
253
257void
258HDF5RawDataFile::write(const daqdataformats::Fragment& frag, HDF5SourceIDHandler::source_id_path_map_t& path_map)
259{
260 std::tuple<size_t, std::string, HighFive::Group> write_results =
261 do_write(m_file_layout_ptr->get_path_elements(frag.get_header()),
262 static_cast<const char*>(frag.get_storage_location()),
263 frag.get_size(),
264 m_compression_level);
265 m_recorded_size += std::get<0>(write_results);
266
267 daqdataformats::SourceID source_id = frag.get_element_id();
268 HDF5SourceIDHandler::add_source_id_path_to_map(path_map, source_id, std::get<1>(write_results));
269}
270
274void
275HDF5RawDataFile::write_file_layout()
276{
277 auto fl_json = m_file_layout_ptr->get_file_layout_params().to_json();
278 write_attribute("filelayout_params", fl_json.dump());
279 write_attribute("filelayout_version", m_file_layout_ptr->get_version());
280}
281
285std::tuple<size_t, std::string, HighFive::Group>
286HDF5RawDataFile::do_write(std::vector<std::string> const& group_and_dataset_path_elements,
287 const char* raw_data_ptr,
288 size_t raw_data_size_bytes,
289 unsigned compression_level)
290{
291 const std::string dataset_name = group_and_dataset_path_elements.back();
292
293 // create top level group if needed
294 std::string const& top_level_group_name = group_and_dataset_path_elements.at(0);
295 if (!m_file_ptr->exist(top_level_group_name))
296 m_file_ptr->createGroup(top_level_group_name);
297
298 // setup sub_group to work with
299 HighFive::Group sub_group = m_file_ptr->getGroup(top_level_group_name);
300 if (!sub_group.isValid()) {
301 throw InvalidHDF5Group(ERS_HERE, top_level_group_name);
302 }
303 HighFive::Group top_level_group = sub_group;
304
305 // Create the remaining subgroups
306 for (size_t idx = 1; idx < group_and_dataset_path_elements.size() - 1; ++idx) {
307 // group_dataset.size()-1 because the last element is the dataset
308 std::string const& child_group_name = group_and_dataset_path_elements[idx];
309 if (child_group_name.empty()) {
310 throw InvalidHDF5Group(ERS_HERE, child_group_name);
311 }
312 if (!sub_group.exist(child_group_name)) {
313 sub_group.createGroup(child_group_name);
314 }
315 HighFive::Group child_group = sub_group.getGroup(child_group_name);
316 if (!child_group.isValid()) {
317 throw InvalidHDF5Group(ERS_HERE, child_group_name);
318 }
319 sub_group = child_group;
320 }
321
322 // Create dataset
323 HighFive::DataSpace data_space = HighFive::DataSpace({ raw_data_size_bytes, 1 });
324 HighFive::DataSetCreateProps data_set_create_props;
325 HighFive::DataSetAccessProps data_set_access_props;
326
327 if (compression_level > 0) {
328 std::vector<hsize_t> chunk_size = {raw_data_size_bytes, 1};
329 data_set_create_props.add(HighFive::Chunking(chunk_size));
330 data_set_create_props.add(HighFive::Deflate(compression_level));
331 }
332
333 m_uncompressed_raw_data_size += raw_data_size_bytes;
334
335 auto data_set = sub_group.createDataSet<char>(dataset_name, data_space, data_set_create_props, data_set_access_props);
336
337 if (data_set.isValid()) {
338 data_set.write_raw(raw_data_ptr);
339 m_total_file_size = m_file_ptr->getFileSize();
340 m_file_ptr->flush();
341 return std::make_tuple(data_set.getStorageSize(), data_set.getPath(), top_level_group);
342 } else {
343 throw InvalidHDF5Dataset(ERS_HERE, dataset_name, m_file_ptr->getName());
344 }
345}
346
350HDF5RawDataFile::HDF5RawDataFile(const std::string& file_name, bool allow_writing)
351 : m_open_flags(HighFive::File::ReadOnly)
352{
353 if (allow_writing) {m_open_flags = HighFive::File::ReadWrite;}
354 m_bare_file_name = file_name;
355 size_t pos = m_bare_file_name.rfind(s_inprogress_suffix);
356 if (pos != std::string::npos) {
357 m_bare_file_name.erase(pos);
358 }
359
360 // do the file open
361 try {
362 m_file_ptr = std::make_unique<HighFive::File>(file_name, m_open_flags);
363 } catch (std::exception const& excpt) {
364 throw FileOpenFailed(ERS_HERE, file_name, excpt.what());
365 }
366
367 if (m_file_ptr->hasAttribute("recorded_size"))
368 m_recorded_size = get_attribute<size_t>("recorded_size");
369 else
370 m_recorded_size = 0;
371
372 if (m_file_ptr->hasAttribute("uncompressed_raw_data_size"))
373 m_uncompressed_raw_data_size = get_attribute<size_t>("uncompressed_raw_data_size");
374 else
375 m_uncompressed_raw_data_size = 0;
376
377 if (m_file_ptr->hasAttribute("total_file_size"))
378 m_total_file_size = get_attribute<size_t>("total_file_size");
379 else
380 m_total_file_size = 0;
381
382 if (m_file_ptr->hasAttribute("compression_level"))
383 m_compression_level = get_attribute<unsigned>("compression_level");
384 else
385 m_compression_level = 0;
386
387 read_file_layout();
388
389 if (m_file_ptr->hasAttribute("record_type"))
390 m_record_type = get_attribute<std::string>("record_type");
391 else
392 m_record_type = m_file_layout_ptr->get_record_name_prefix();
393
394 check_file_layout();
395
396 // HDF5SourceIDHandler operations need to come *after* read_file_layout()
397 // because they count on the filelayout_version, which is set in read_file_layout().
398 HDF5SourceIDHandler sid_handler(get_version());
399 sid_handler.fetch_file_level_geo_id_info(*m_file_ptr, m_file_level_source_id_geo_id_map);
400}
401
402void
403HDF5RawDataFile::read_file_layout()
404{
405 HDF5FileLayoutParameters fl_params;
406 uint32_t version = 0; // NOLINT(build/unsigned)
407
408 std::string fl_str;
409 try {
410 fl_str = get_attribute<std::string>("filelayout_params");
411 nlohmann::json fl_json = nlohmann::json::parse(fl_str);
412 fl_params = HDF5FileLayoutParameters(fl_json);
413
414 version = get_attribute<uint32_t>("filelayout_version"); // NOLINT(build/unsigned)
415
416 } catch (InvalidHDF5Attribute const&) {
417 ers::info(MissingFileLayout(ERS_HERE, version));
418 }
419
420 // now reset the HDF5Filelayout object
421 m_file_layout_ptr.reset(new HDF5FileLayout(fl_params, version));
422}
423
424void
425HDF5RawDataFile::check_file_layout()
426{
427 if (get_version() < 2)
428 return;
429
430 std::string record_type = get_attribute<std::string>("record_type");
431 if (record_type.compare(m_file_layout_ptr->get_record_name_prefix()) != 0)
432 throw BadRecordType(ERS_HERE, record_type, m_file_layout_ptr->get_record_name_prefix());
433}
434
435void
436HDF5RawDataFile::check_record_type(std::string rt_name)
437{
438 if (get_version() < 2)
439 return;
440
441 if (m_file_layout_ptr->get_record_name_prefix().compare(rt_name) != 0)
442 throw WrongRecordTypeRequested(ERS_HERE, rt_name, m_file_layout_ptr->get_record_name_prefix());
443}
444
445// HDF5 Utility function to recursively traverse a file
446void
447HDF5RawDataFile::explore_subgroup(const HighFive::Group& parent_group,
448 std::string relative_path,
449 std::vector<std::string>& path_list)
450{
451 if (relative_path.size() > 0 && relative_path.compare(relative_path.size() - 1, 1, "/") == 0)
452 relative_path.pop_back();
453
454 std::vector<std::string> childNames = parent_group.listObjectNames();
455
456 for (auto& child_name : childNames) {
457 std::string full_path = relative_path + "/" + child_name;
458 HighFive::ObjectType child_type = parent_group.getObjectType(child_name);
459
460 if (child_type == HighFive::ObjectType::Dataset) {
461 path_list.push_back(full_path);
462 } else if (child_type == HighFive::ObjectType::Group) {
463 HighFive::Group child_group = parent_group.getGroup(child_name);
464 // start the recusion
465 std::string new_path = relative_path + "/" + child_name;
466 explore_subgroup(child_group, new_path, path_list);
467 }
468 }
469}
470
471void
472HDF5RawDataFile::add_record_level_info_to_caches_if_needed(record_id_t rid)
473{
474 // we should probably check that all relevant caches have an entry for the
475 // specified record ID, but we will just check one, in the interest of
476 // performance, and trust the "else" part of this routine to fill in *all*
477 // of the appropriate caches
478 if (m_source_id_path_cache.count(rid) != 0) {
479 return;
480 }
481
482 // create the handler to do the work
483 HDF5SourceIDHandler sid_handler(get_version());
484
485 // determine the HDF5 Group that corresponds to the specified record
486 std::string record_level_group_name = m_file_layout_ptr->get_record_number_string(rid.first, rid.second);
487 HighFive::Group record_group = m_file_ptr->getGroup(record_level_group_name);
488 if (!record_group.isValid()) {
489 throw InvalidHDF5Group(ERS_HERE, record_level_group_name);
490 }
491
492 // start with a copy of the file-level source-id-to-geo-id map and give the
493 // handler an opportunity to add any record-level additions
494 HDF5SourceIDHandler::source_id_geo_id_map_t local_source_id_geo_id_map = m_file_level_source_id_geo_id_map;
495 sid_handler.fetch_record_level_geo_id_info(record_group, local_source_id_geo_id_map);
496
497 // fetch the record-level source-id-to-path map
498 HDF5SourceIDHandler::source_id_path_map_t source_id_path_map;
499 sid_handler.fetch_source_id_path_info(record_group, source_id_path_map);
500
501 // fetch the record-level fragment-type-to-source-id map
502 HDF5SourceIDHandler::fragment_type_source_id_map_t fragment_type_source_id_map;
503 sid_handler.fetch_fragment_type_source_id_info(record_group, fragment_type_source_id_map);
504
505 // fetch the record-level subdetector-to-source-id map
506 HDF5SourceIDHandler::subdetector_source_id_map_t subdetector_source_id_map;
507 sid_handler.fetch_subdetector_source_id_info(record_group, subdetector_source_id_map);
508
509 // loop through the source-id-to-path map to create various lists of SourceIDs in the record
510 daqdataformats::SourceID rh_sid = sid_handler.fetch_record_header_source_id(record_group);
511 std::set<daqdataformats::SourceID> full_source_id_set;
512 std::set<daqdataformats::SourceID> fragment_source_id_set;
513 HDF5SourceIDHandler::subsystem_source_id_map_t subsystem_source_id_map;
514 for (auto const& source_id_path : source_id_path_map) {
515 full_source_id_set.insert(source_id_path.first);
516 if (source_id_path.first != rh_sid) {
517 fragment_source_id_set.insert(source_id_path.first);
518 }
519 HDF5SourceIDHandler::add_subsystem_source_id_to_map(
520 subsystem_source_id_map, source_id_path.first.subsystem, source_id_path.first);
521 }
522
523 // note that even if the "fetch" methods above fail to add anything to the specified
524 // maps, the maps will still be valid (though, possibly empty), and once we add them
525 // to the caches here, we will be assured that lookups from the caches will not fail.
526 m_source_id_cache[rid] = full_source_id_set;
527 m_record_header_source_id_cache[rid] = rh_sid;
528 m_fragment_source_id_cache[rid] = fragment_source_id_set;
529 m_source_id_geo_id_cache[rid] = local_source_id_geo_id_map;
530 m_source_id_path_cache[rid] = source_id_path_map;
531 m_subsystem_source_id_cache[rid] = subsystem_source_id_map;
532 m_fragment_type_source_id_cache[rid] = fragment_type_source_id_map;
533 m_subdetector_source_id_cache[rid] = subdetector_source_id_map;
534}
535
539std::vector<std::string>
540HDF5RawDataFile::get_dataset_paths(std::string top_level_group_name)
541{
542 if (top_level_group_name.empty())
543 top_level_group_name = m_file_ptr->getPath();
544
545 // Vector containing the path list to the HDF5 datasets
546 std::vector<std::string> path_list;
547
548 HighFive::Group parent_group = m_file_ptr->getGroup(top_level_group_name);
549 if (!parent_group.isValid())
550 throw InvalidHDF5Group(ERS_HERE, top_level_group_name);
551
552 explore_subgroup(parent_group, top_level_group_name, path_list);
553
554 return path_list;
555}
556
560HDF5RawDataFile::record_id_set // NOLINT(build/unsigned)
561HDF5RawDataFile::get_all_record_ids()
562{
563 if (!m_all_record_ids_in_file.empty())
564 return m_all_record_ids_in_file;
565
566 // records are at the top level
567
568 HighFive::Group parent_group = m_file_ptr->getGroup(m_file_ptr->getPath());
569
570 std::vector<std::string> childNames = parent_group.listObjectNames();
571 const std::string record_prefix = m_file_layout_ptr->get_record_name_prefix();
572 const size_t record_prefix_size = record_prefix.size();
573
574 for (auto const& name : childNames) {
575 auto loc = name.find(record_prefix);
576
577 if (loc == std::string::npos)
578 continue;
579
580 auto rec_num_string = name.substr(loc + record_prefix_size);
581
582 loc = rec_num_string.find(".");
583 if (loc == std::string::npos) {
584 m_all_record_ids_in_file.insert(std::make_pair(std::stoll(rec_num_string), 0));
585 } else {
586 auto seq_num_string = rec_num_string.substr(loc + 1);
587 rec_num_string.resize(loc); // remove anything from '.' onwards
588 m_all_record_ids_in_file.insert(std::make_pair(std::stoll(rec_num_string), std::stoi(seq_num_string)));
589 }
590
591 } // end loop over childNames
592
593 return m_all_record_ids_in_file;
594}
595
596std::set<uint64_t>
597HDF5RawDataFile::get_all_record_numbers() // NOLINT(build/unsigned)
598{
599 ers::warning(DeprecatedUsage(ERS_HERE,
600 "get_all_record_numbers()",
601 "Use get_all_record_ids(), which returns a record_number,sequence_number pair."));
602
603 std::set<uint64_t> record_numbers; // NOLINT(build/unsigned)
604 for (auto const& rid : get_all_record_ids())
605 record_numbers.insert(rid.first);
606
607 return record_numbers;
608}
609
610HDF5RawDataFile::record_id_set
611HDF5RawDataFile::get_all_trigger_record_ids()
612{
613 check_record_type("TriggerRecord");
614 return get_all_record_ids();
615}
616
617std::set<daqdataformats::trigger_number_t>
618HDF5RawDataFile::get_all_trigger_record_numbers()
619{
621 DeprecatedUsage(ERS_HERE,
622 "get_all_trigger_record_numbers()",
623 "Use get_all_trigger_record_ids(), which returns a record_number,sequence_number pair."));
624
625 return get_all_record_numbers();
626}
627
628HDF5RawDataFile::record_id_set
629HDF5RawDataFile::get_all_timeslice_ids()
630{
631 check_record_type("TimeSlice");
632 return get_all_record_ids();
633}
634
635std::set<daqdataformats::timeslice_number_t>
636HDF5RawDataFile::get_all_timeslice_numbers()
637{
638 check_record_type("TimeSlice");
639 return get_all_record_numbers();
640}
641
645std::vector<std::string>
646HDF5RawDataFile::get_record_header_dataset_paths()
647{
648
649 std::vector<std::string> rec_paths;
650
651 if (get_version() >= 2) {
652 for (auto const& rec_id : get_all_record_ids())
653 rec_paths.push_back(get_record_header_dataset_path(rec_id));
654 } else {
655 for (auto const& path : get_dataset_paths()) {
656 if (path.find(m_file_layout_ptr->get_record_header_dataset_name()) != std::string::npos) {
657 rec_paths.push_back(path);
658 }
659 }
660 }
661
662 return rec_paths;
663}
664
665std::vector<std::string>
666HDF5RawDataFile::get_trigger_record_header_dataset_paths()
667{
668 check_record_type("TriggerRecord");
669 return get_record_header_dataset_paths();
670}
671
672std::vector<std::string>
673HDF5RawDataFile::get_timeslice_header_dataset_paths()
674{
675 check_record_type("TimeSlice");
676 return get_record_header_dataset_paths();
677}
678
679std::string
680HDF5RawDataFile::get_record_header_dataset_path(const record_id_t& rid)
681{
682 auto rec_id = get_all_record_ids().find(rid);
683 if (rec_id == get_all_record_ids().end())
684 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
685
686 if (get_version() <= 2) {
687 return (m_file_ptr->getPath() + m_file_layout_ptr->get_record_header_path(rid.first, rid.second));
688 } else {
689 daqdataformats::SourceID source_id = get_record_header_source_id(rid);
690 return m_source_id_path_cache[rid][source_id];
691 }
692}
693
694std::string
695HDF5RawDataFile::get_record_header_dataset_path(const uint64_t rec_num, // NOLINT (build/unsigned)
697{
698 return get_record_header_dataset_path(std::make_pair(rec_num, seq_num));
699}
700
701std::string
702HDF5RawDataFile::get_trigger_record_header_dataset_path(const record_id_t& rid)
703{
704 check_record_type("TriggerRecord");
705 return get_record_header_dataset_path(rid);
706}
707
708std::string
709HDF5RawDataFile::get_trigger_record_header_dataset_path(const daqdataformats::trigger_number_t trig_num,
711{
712 check_record_type("TriggerRecord");
713 return get_record_header_dataset_path(trig_num, seq_num);
714}
715
716std::string
717HDF5RawDataFile::get_timeslice_header_dataset_path(const record_id_t& rid)
718{
719 check_record_type("TimeSlice");
720 return get_record_header_dataset_path(rid.first, 0);
721}
722
723std::string
724HDF5RawDataFile::get_timeslice_header_dataset_path(const daqdataformats::timeslice_number_t ts_num)
725{
726 check_record_type("TimeSlice");
727 return get_record_header_dataset_path(ts_num);
728}
729
736std::vector<std::string>
737HDF5RawDataFile::get_all_fragment_dataset_paths()
738{
739 std::vector<std::string> frag_paths;
740
741 for (auto const& path : get_dataset_paths()) {
742 if (path.find(m_file_layout_ptr->get_record_header_dataset_name()) == std::string::npos)
743 frag_paths.push_back(path);
744 }
745
746 return frag_paths;
747}
748
749// get all fragment dataset paths for given record ID
750std::vector<std::string>
751HDF5RawDataFile::get_fragment_dataset_paths(const record_id_t& rid)
752{
753 auto rec_id = get_all_record_ids().find(rid);
754 if (rec_id == get_all_record_ids().end())
755 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
756
757 std::vector<std::string> frag_paths;
758 if (get_version() <= 2) {
759 std::string record_group_path =
760 m_file_ptr->getPath() + m_file_layout_ptr->get_record_number_string(rid.first, rid.second);
761
762 for (auto const& path : get_dataset_paths(record_group_path)) {
763 if (path.find(m_file_layout_ptr->get_record_header_dataset_name()) == std::string::npos)
764 frag_paths.push_back(path);
765 }
766 } else {
767 std::set<daqdataformats::SourceID> source_id_list = get_fragment_source_ids(rid);
768 for (auto const& source_id : source_id_list) {
769 frag_paths.push_back(m_source_id_path_cache[rid][source_id]);
770 }
771 }
772 return frag_paths;
773}
774
775// get all fragment dataset paths for given record ID
776std::vector<std::string>
777HDF5RawDataFile::get_fragment_dataset_paths(const uint64_t rec_num, // NOLINT (build/unsigned)
779{
780 return get_fragment_dataset_paths(std::make_pair(rec_num, seq_num));
781}
782
783// get all fragment dataset paths for a Subsystem
784std::vector<std::string>
785HDF5RawDataFile::get_fragment_dataset_paths(const daqdataformats::SourceID::Subsystem subsystem)
786{
787 std::vector<std::string> frag_paths;
788 for (auto const& rid : get_all_record_ids()) {
789 if (get_version() <= 2) {
790 auto datasets = get_dataset_paths(m_file_ptr->getPath() +
791 m_file_layout_ptr->get_fragment_type_path(rid.first, rid.second, subsystem));
792 frag_paths.insert(frag_paths.end(), datasets.begin(), datasets.end());
793 } else {
794 std::set<daqdataformats::SourceID> source_id_list = get_source_ids_for_subsystem(rid, subsystem);
795 for (auto const& source_id : source_id_list) {
796 frag_paths.push_back(m_source_id_path_cache[rid][source_id]);
797 }
798 }
799 }
800 return frag_paths;
801}
802
803std::vector<std::string>
804HDF5RawDataFile::get_fragment_dataset_paths(const std::string& subsystem_name)
805{
807 return get_fragment_dataset_paths(subsystem);
808}
809
810std::vector<std::string>
811HDF5RawDataFile::get_fragment_dataset_paths(const record_id_t& rid, const daqdataformats::SourceID::Subsystem subsystem)
812{
813 auto rec_id = get_all_record_ids().find(rid);
814 if (rec_id == get_all_record_ids().end())
815 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
816
817 if (get_version() <= 2) {
818 return get_dataset_paths(m_file_ptr->getPath() +
819 m_file_layout_ptr->get_fragment_type_path(rid.first, rid.second, subsystem));
820 } else {
821 std::vector<std::string> frag_paths;
822 std::set<daqdataformats::SourceID> source_id_list = get_source_ids_for_subsystem(rid, subsystem);
823 for (auto const& source_id : source_id_list) {
824 frag_paths.push_back(m_source_id_path_cache[rid][source_id]);
825 }
826 return frag_paths;
827 }
828}
829
830std::vector<std::string>
831HDF5RawDataFile::get_fragment_dataset_paths(const record_id_t& rid, const std::string& subsystem_name)
832{
834 return get_fragment_dataset_paths(rid, subsystem);
835}
836
837#if 0
838// get all fragment dataset paths for a SourceID
839std::vector<std::string>
840HDF5RawDataFile::get_fragment_dataset_paths(const daqdataformats::SourceID& source_id)
841{
842 std::vector<std::string> frag_paths;
843
844 for (auto const& rid : get_all_record_ids())
845 frag_paths.push_back(m_file_ptr->getPath() +
846 m_file_layout_ptr->get_fragment_path(rid.first, rid.second, source_id));
847
848 return frag_paths;
849}
850
851std::vector<std::string>
852HDF5RawDataFile::get_fragment_dataset_paths(const daqdataformats::SourceID::Subsystem type,
853 const uint32_t id) // NOLINT(build/unsigned)
854{
855 return get_fragment_dataset_paths(daqdataformats::SourceID(type, source_id));
856}
857std::vector<std::string>
858HDF5RawDataFile::get_fragment_dataset_paths(const std::string& typestring,
859 const uint32_t id) // NOLINT(build/unsigned)
860{
861 return get_fragment_dataset_paths(
862 daqdataformats::SourceID(daqdataformats::SourceID::string_to_subsystem(typestring), source_id));
863}
864
865std::set<daqdataformats::SourceID>
866HDF5RawDataFile::get_source_ids(std::vector<std::string> const& frag_dataset_paths)
867{
868 std::set<daqdataformats::SourceID> source_ids;
869 std::vector<std::string> path_elements;
870 std::string s;
871 for (auto const& frag_dataset : frag_dataset_paths) {
872 path_elements.clear();
873 std::istringstream iss(frag_dataset);
874 while (std::getline(iss, s, '/')) {
875 if (s.size() > 0)
876 path_elements.push_back(s);
877 }
878 source_ids.insert(m_file_layout_ptr->get_source_id_from_path_elements(path_elements));
879 }
880
881 return source_ids;
882}
883#endif
884
885HDF5SourceIDHandler::source_id_geo_id_map_t
886HDF5RawDataFile::get_srcid_geoid_map() const {
887
888 return m_file_level_source_id_geo_id_map;
889}
890
891std::set<uint64_t> // NOLINT(build/unsigned)
892HDF5RawDataFile::get_all_geo_ids() const
893{
894 std::set<uint64_t> set_of_geo_ids;
895 // 13-Sep-2022, KAB
896 // It would be safer, but slower, to fetch all of the geo_ids from the
897 // individual records, and we'll go with faster, for now. If/when we
898 // change the way that we determine the file-level and record-level
899 // source_id-to-geo_id maps, we may need to change this code.
900 for (auto const& map_entry : m_file_level_source_id_geo_id_map) {
901 for (auto const& geo_id : map_entry.second) {
902 set_of_geo_ids.insert(geo_id);
903 }
904 }
905 return set_of_geo_ids;
906}
907
908std::set<uint64_t> // NOLINT(build/unsigned)
909HDF5RawDataFile::get_geo_ids(const record_id_t& rid)
910{
911 auto rec_id = get_all_record_ids().find(rid);
912 if (rec_id == get_all_record_ids().end())
913 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
914
915 add_record_level_info_to_caches_if_needed(rid);
916
917 std::set<uint64_t> set_of_geo_ids;
918 for (auto const& map_entry : m_source_id_geo_id_cache[rid]) {
919 for (auto const& geo_id : map_entry.second) {
920 set_of_geo_ids.insert(geo_id);
921 }
922 }
923 return set_of_geo_ids;
924}
925
926std::set<uint64_t> // NOLINT(build/unsigned)
927HDF5RawDataFile::get_geo_ids_for_subdetector(const record_id_t& rid,
929{
930 auto rec_id = get_all_record_ids().find(rid);
931 if (rec_id == get_all_record_ids().end())
932 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
933
934 add_record_level_info_to_caches_if_needed(rid);
935
936 std::set<uint64_t> set_of_geo_ids;
937 for (auto const& map_entry : m_source_id_geo_id_cache[rid]) {
938 for (auto const& geo_id : map_entry.second) {
939 // FIXME: replace with a proper coder/decoder
940
941 uint16_t det_id = 0xffff & geo_id;
942 if (det_id == static_cast<uint16_t>(subdet)) {
943 set_of_geo_ids.insert(geo_id);
944 }
945 }
946 }
947 return set_of_geo_ids;
948}
949
950
951// get all SourceIDs for given record ID
952std::set<daqdataformats::SourceID>
953HDF5RawDataFile::get_source_ids(const record_id_t& rid)
954{
955 auto rec_id = get_all_record_ids().find(rid);
956 if (rec_id == get_all_record_ids().end())
957 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
958
959 add_record_level_info_to_caches_if_needed(rid);
960
961 return m_source_id_cache[rid];
962}
963
964daqdataformats::SourceID
965HDF5RawDataFile::get_record_header_source_id(const record_id_t& rid)
966{
967 auto rec_id = get_all_record_ids().find(rid);
968 if (rec_id == get_all_record_ids().end())
969 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
970
971 add_record_level_info_to_caches_if_needed(rid);
972
973 return m_record_header_source_id_cache[rid];
974}
975
976std::set<daqdataformats::SourceID>
977HDF5RawDataFile::get_fragment_source_ids(const record_id_t& rid)
978{
979 auto rec_id = get_all_record_ids().find(rid);
980 if (rec_id == get_all_record_ids().end())
981 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
982
983 add_record_level_info_to_caches_if_needed(rid);
984
985 return m_fragment_source_id_cache[rid];
986}
987
988std::set<daqdataformats::SourceID>
989HDF5RawDataFile::get_source_ids_for_subsystem(const record_id_t& rid,
991{
992 auto rec_id = get_all_record_ids().find(rid);
993 if (rec_id == get_all_record_ids().end())
994 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
995
996 add_record_level_info_to_caches_if_needed(rid);
997
998 return m_subsystem_source_id_cache[rid][subsystem];
999}
1000
1001std::set<daqdataformats::SourceID>
1002HDF5RawDataFile::get_source_ids_for_fragment_type(const record_id_t& rid, const daqdataformats::FragmentType frag_type)
1003{
1004 auto rec_id = get_all_record_ids().find(rid);
1005 if (rec_id == get_all_record_ids().end())
1006 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1007
1008 add_record_level_info_to_caches_if_needed(rid);
1009
1010 return m_fragment_type_source_id_cache[rid][frag_type];
1011}
1012
1013std::set<daqdataformats::SourceID>
1014HDF5RawDataFile::get_source_ids_for_fragtype_and_subdetector(const record_id_t& rid,
1015 const std::string& frag_type_name,
1016 const std::string& subdet_name)
1017{
1020 throw InvalidFragmentTypeString(ERS_HERE, frag_type_name);
1023 throw InvalidSubdetectorString(ERS_HERE, subdet_name);
1024
1025 auto rec_id = get_all_record_ids().find(rid);
1026 if (rec_id == get_all_record_ids().end())
1027 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1028
1029 add_record_level_info_to_caches_if_needed(rid);
1030
1031 std::set<daqdataformats::SourceID> fragtype_match_sids = m_fragment_type_source_id_cache[rid][frag_type];
1032 std::set<daqdataformats::SourceID> detid_match_sids = m_subdetector_source_id_cache[rid][subdet];
1033 std::set<daqdataformats::SourceID> combined_set_sids;
1034 for (auto ftsid : fragtype_match_sids) {
1035 if (detid_match_sids.contains(ftsid)) {
1036 combined_set_sids.insert(ftsid);
1037 }
1038 }
1039 return combined_set_sids;
1040}
1041
1042std::set<daqdataformats::SourceID>
1043HDF5RawDataFile::get_source_ids_for_subdetector(const record_id_t& rid, const detdataformats::DetID::Subdetector subdet)
1044{
1045 auto rec_id = get_all_record_ids().find(rid);
1046 if (rec_id == get_all_record_ids().end())
1047 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1048
1049 add_record_level_info_to_caches_if_needed(rid);
1050
1051 return m_subdetector_source_id_cache[rid][subdet];
1052}
1053
1054std::unique_ptr<char[]>
1055HDF5RawDataFile::get_dataset_raw_data(const std::string& dataset_path)
1056{
1057 HighFive::Group parent_group = m_file_ptr->getGroup("/");
1058 HighFive::DataSet data_set = parent_group.getDataSet(dataset_path);
1059
1060 if (!data_set.isValid())
1061 throw InvalidHDF5Dataset(ERS_HERE, dataset_path, get_file_name());
1062
1063 size_t data_size = data_set.getSpace().getElementCount() * sizeof(char);
1064
1065 auto membuffer = std::make_unique<char[]>(data_size);
1066
1067 data_set.read(membuffer.get());
1068 return membuffer;
1069}
1070
1071std::unique_ptr<daqdataformats::Fragment>
1072HDF5RawDataFile::get_frag_ptr(const std::string& dataset_name)
1073{
1074 auto membuffer = get_dataset_raw_data(dataset_name);
1075 auto frag_ptr = std::make_unique<daqdataformats::Fragment>(
1077 return frag_ptr;
1078}
1079
1080std::unique_ptr<daqdataformats::Fragment>
1081HDF5RawDataFile::get_frag_ptr(const record_id_t& rid, const daqdataformats::SourceID& source_id)
1082{
1083 if (get_version() < 2)
1084 throw IncompatibleFileLayoutVersion(ERS_HERE, get_version(), 2, MAX_FILELAYOUT_VERSION);
1085
1086 auto rec_id = get_all_record_ids().find(rid);
1087 if (rec_id == get_all_record_ids().end())
1088 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1089
1090 add_record_level_info_to_caches_if_needed(rid);
1091
1092 return get_frag_ptr(m_source_id_path_cache[rid][source_id]);
1093}
1094
1095std::unique_ptr<daqdataformats::Fragment>
1096HDF5RawDataFile::get_frag_ptr(const uint64_t rec_num, // NOLINT(build/unsigned)
1098 const daqdataformats::SourceID& source_id)
1099{
1100 record_id_t rid = std::make_pair(rec_num, seq_num);
1101 return get_frag_ptr(rid, source_id);
1102}
1103
1104std::unique_ptr<daqdataformats::Fragment>
1105HDF5RawDataFile::get_frag_ptr(const record_id_t& rid,
1107 const uint32_t id) // NOLINT(build/unsigned)
1108{
1109 daqdataformats::SourceID source_id(type, id);
1110 return get_frag_ptr(rid, source_id);
1111}
1112
1113std::unique_ptr<daqdataformats::Fragment>
1114HDF5RawDataFile::get_frag_ptr(const uint64_t rec_num, // NOLINT(build/unsigned)
1117 const uint32_t id) // NOLINT(build/unsigned)
1118{
1119 record_id_t rid = std::make_pair(rec_num, seq_num);
1120 daqdataformats::SourceID source_id(type, id);
1121 return get_frag_ptr(rid, source_id);
1122}
1123
1124std::unique_ptr<daqdataformats::Fragment>
1125HDF5RawDataFile::get_frag_ptr(const record_id_t& rid,
1126 const std::string& typestring,
1127 const uint32_t id) // NOLINT(build/unsigned)
1128{
1129 daqdataformats::SourceID source_id(daqdataformats::SourceID::string_to_subsystem(typestring), id);
1130 return get_frag_ptr(rid, source_id);
1131}
1132
1133std::unique_ptr<daqdataformats::Fragment>
1134HDF5RawDataFile::get_frag_ptr(const uint64_t rec_num, // NOLINT(build/unsigned)
1136 const std::string& typestring,
1137 const uint32_t id) // NOLINT(build/unsigned)
1138{
1139 record_id_t rid = std::make_pair(rec_num, seq_num);
1140 daqdataformats::SourceID source_id(daqdataformats::SourceID::string_to_subsystem(typestring), id);
1141 return get_frag_ptr(rid, source_id);
1142}
1143
1144std::unique_ptr<daqdataformats::Fragment>
1145HDF5RawDataFile::get_frag_ptr(const record_id_t& rid,
1146 const uint64_t geo_id) // NOLINT(build/unsigned)
1147{
1148 daqdataformats::SourceID sid = get_source_id_for_geo_id(rid, geo_id);
1149 return get_frag_ptr(rid, sid);
1150}
1151
1152std::unique_ptr<daqdataformats::Fragment>
1153HDF5RawDataFile::get_frag_ptr(const uint64_t rec_num, // NOLINT(build/unsigned)
1155 const uint64_t geo_id) // NOLINT(build/unsigned)
1156{
1157 record_id_t rid = std::make_pair(rec_num, seq_num);
1158 return get_frag_ptr(rid, geo_id);
1159}
1160
1161std::unique_ptr<daqdataformats::TriggerRecordHeader>
1162HDF5RawDataFile::get_trh_ptr(const std::string& dataset_name)
1163{
1164 auto membuffer = get_dataset_raw_data(dataset_name);
1165 auto trh_ptr = std::make_unique<daqdataformats::TriggerRecordHeader>(membuffer.release(), true);
1166 return trh_ptr;
1167}
1168
1169std::unique_ptr<daqdataformats::TriggerRecordHeader>
1170HDF5RawDataFile::get_trh_ptr(const record_id_t& rid)
1171{
1172 if (get_version() < 2)
1173 throw IncompatibleFileLayoutVersion(ERS_HERE, get_version(), 2, MAX_FILELAYOUT_VERSION);
1174
1175 auto rec_id = get_all_record_ids().find(rid);
1176 if (rec_id == get_all_record_ids().end())
1177 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1178
1179 add_record_level_info_to_caches_if_needed(rid);
1180
1181 daqdataformats::SourceID rh_source_id = m_record_header_source_id_cache[rid];
1182 return get_trh_ptr(m_source_id_path_cache[rid][rh_source_id]);
1183}
1184
1185std::unique_ptr<daqdataformats::TimeSliceHeader>
1186HDF5RawDataFile::get_tsh_ptr(const std::string& dataset_name)
1187{
1188 auto membuffer = get_dataset_raw_data(dataset_name);
1189 auto tsh_ptr = std::make_unique<daqdataformats::TimeSliceHeader>(
1190 *(reinterpret_cast<daqdataformats::TimeSliceHeader*>(membuffer.release()))); // NOLINT
1191 return tsh_ptr;
1192}
1193
1194std::unique_ptr<daqdataformats::TimeSliceHeader>
1195HDF5RawDataFile::get_tsh_ptr(const record_id_t& rid)
1196{
1197 if (get_version() < 2)
1198 throw IncompatibleFileLayoutVersion(ERS_HERE, get_version(), 2, MAX_FILELAYOUT_VERSION);
1199
1200 auto rec_id = get_all_record_ids().find(rid);
1201 if (rec_id == get_all_record_ids().end())
1202 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1203
1204 add_record_level_info_to_caches_if_needed(rid);
1205
1206 daqdataformats::SourceID rh_source_id = m_record_header_source_id_cache[rid];
1207 return get_tsh_ptr(m_source_id_path_cache[rid][rh_source_id]);
1208}
1209
1210daqdataformats::TriggerRecord
1211HDF5RawDataFile::get_trigger_record(const record_id_t& rid)
1212{
1213 daqdataformats::TriggerRecord trigger_record(*get_trh_ptr(rid));
1214 for (auto const& frag_path : get_fragment_dataset_paths(rid)) {
1215 trigger_record.add_fragment(get_frag_ptr(frag_path));
1216 }
1217
1218 return trigger_record;
1219}
1220
1221daqdataformats::TimeSlice
1222HDF5RawDataFile::get_timeslice(const daqdataformats::timeslice_number_t ts_num)
1223{
1224 daqdataformats::TimeSlice timeslice(*get_tsh_ptr(ts_num));
1225 for (auto const& frag_path : get_fragment_dataset_paths(ts_num)) {
1226 timeslice.add_fragment(get_frag_ptr(frag_path));
1227 }
1228
1229 return timeslice;
1230}
1231
1232std::vector<uint64_t> // NOLINT(build/unsigned)
1233HDF5RawDataFile::get_geo_ids_for_source_id(const record_id_t& rid, const daqdataformats::SourceID& source_id)
1234{
1235 auto rec_id = get_all_record_ids().find(rid);
1236 if (rec_id == get_all_record_ids().end())
1237 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1238
1239 add_record_level_info_to_caches_if_needed(rid);
1240
1241 return m_source_id_geo_id_cache[rid][source_id];
1242}
1243
1244daqdataformats::SourceID
1245HDF5RawDataFile::get_source_id_for_geo_id(const record_id_t& rid,
1246 const uint64_t requested_geo_id) // NOLINT(build/unsigned)
1247{
1248 auto rec_id = get_all_record_ids().find(rid);
1249 if (rec_id == get_all_record_ids().end())
1250 throw RecordIDNotFound(ERS_HERE, rid.first, rid.second);
1251
1252 add_record_level_info_to_caches_if_needed(rid);
1253
1254 // if we want to make this faster, we could build a reverse lookup cache in
1255 // add_record_level_info_to_caches_if_needed() and just look up the requested geo_id here
1256 for (auto const& map_entry : m_source_id_geo_id_cache[rid]) {
1257 auto geoid_list = map_entry.second;
1258 for (auto const& geoid_from_list : geoid_list) {
1259 if (geoid_from_list == requested_geo_id) {
1260 return map_entry.first;
1261 }
1262 }
1263 }
1264
1265 daqdataformats::SourceID empty_sid;
1266 return empty_sid;
1267}
1268
1269} // namespace hdf5libs
1270} // namespace dunedaq
#define ERS_HERE
@ kTakeOverBuffer
Take over control of the buffer.
static void store_file_level_geo_id_info(HighFive::File &h5_file, const source_id_geo_id_map_t &the_map)
std::map< daqdataformats::SourceID, std::vector< uint64_t > > source_id_geo_id_map_t
#define TLOG_DEBUG(lvl,...)
Definition Logging.hpp:112
void write(gtool::t_graph const &, std::string const &)
Definition gtool.cpp:221
uint32_t run_number_t
Type used to represent run number.
Definition Types.hpp:20
FragmentType
This enumeration should list all defined Fragment types.
uint64_t trigger_number_t
Type used to represent trigger number.
Definition Types.hpp:24
FragmentType string_to_fragment_type(const std::string &name)
Convert a string to a FragmentType value.
uint16_t sequence_number_t
Type used to represent sequence within a trigger record.
Definition Types.hpp:45
uint64_t timeslice_number_t
Type used to represent timeslice number.
Definition Types.hpp:49
constexpr uint32_t MAX_FILELAYOUT_VERSION
Including Qt Headers.
PDS Frame with unphysical timestamp detected with ts
void warning(const Issue &issue)
Definition ers.hpp:115
void info(const Issue &issue)
Definition ers.hpp:95
static Subsystem string_to_subsystem(const std::string &typestring)
Definition SourceID.hxx:106
Subsystem
The Subsystem enum describes the kind of source we're dealing with.
Definition SourceID.hpp:43
static Subdetector string_to_subdetector(const std::string &typestring)
Definition DetID.hxx:102
Subdetector
The Subdetector enum describes the kind of source we're dealing with.
Definition DetID.hpp:40