LCOV - code coverage report
Current view: top level - hdf5libs/test/apps - HDF5LIBS_TestReader.cpp (source / functions) Coverage Total Hit
Test: code.result Lines: 0.0 % 104 0
Test Date: 2025-12-21 13:07:08 Functions: 0.0 % 11 0

            Line data    Source code
       1              : /**
       2              :  * @file demo_tpc_decoder.cpp
       3              :  *
       4              :  * Demo of HDF5 file reader for TPC fragments: this example shows how to extract fragments from a file and decode WIB
       5              :  * frames.
       6              :  *
       7              :  *
       8              :  * This is part of the DUNE DAQ Software Suite, copyright 2020.
       9              :  * Licensing/copyright details are in the COPYING file that you should have
      10              :  * received with this code.
      11              :  */
      12              : 
      13              : #include "hdf5libs/HDF5RawDataFile.hpp"
      14              : 
      15              : #include "logging/Logging.hpp"
      16              : 
      17              : #include <fstream>
      18              : #include <iostream>
      19              : #include <sstream>
      20              : #include <string>
      21              : 
      22              : using namespace dunedaq::hdf5libs;
      23              : using namespace dunedaq::daqdataformats;
      24              : 
      25              : void
      26            0 : print_usage()
      27              : {
      28            0 :   TLOG() << "Usage: HDF5LIBS_TestReader <input_file_name>";
      29            0 : }
      30              : 
      31              : int
      32            0 : main(int argc, char** argv)
      33              : {
      34              : 
      35            0 :   if (argc != 2) {
      36            0 :     print_usage();
      37            0 :     return 1;
      38              :   }
      39              : 
      40            0 :   const std::string ifile_name = std::string(argv[1]);
      41              : 
      42              :   // open our file reading
      43            0 :   HDF5RawDataFile h5_raw_data_file(ifile_name);
      44              : 
      45            0 :   std::ostringstream ss;
      46              : 
      47            0 :   ss << "\nFile name: " << h5_raw_data_file.get_file_name();
      48            0 :   ss << "\n\tRecorded size from class: " << h5_raw_data_file.get_recorded_size();
      49              : 
      50            0 :   auto recorded_size = h5_raw_data_file.get_attribute<size_t>("recorded_size");
      51            0 :   ss << "\n\tRecorded size from attribute: " << recorded_size;
      52              : 
      53            0 :   auto record_type = h5_raw_data_file.get_record_type();
      54            0 :   ss << "\nRecord type = " << record_type;
      55              : 
      56            0 :   nlohmann::json flp_json;
      57            0 :   auto flp = h5_raw_data_file.get_file_layout().get_file_layout_params();
      58            0 :   flp_json = flp.to_json();
      59            0 :   ss << "\nFile Layout Parameters:\n" << flp_json;
      60              : 
      61            0 :   TLOG() << ss.str();
      62            0 :   ss.str("");
      63              : 
      64              :   // get some file attributes
      65            0 :   auto run_number = h5_raw_data_file.get_attribute<unsigned int>("run_number");
      66            0 :   auto file_index = h5_raw_data_file.get_attribute<unsigned int>("file_index");
      67            0 :   auto app_name = h5_raw_data_file.get_attribute<std::string>("application_name");
      68              : 
      69            0 :   ss << "\n\tRun number: " << run_number;
      70            0 :   ss << "\n\tFile index: " << file_index;
      71            0 :   if (h5_raw_data_file.get_file_layout().get_version() >= 6) {
      72            0 :     auto creation_timestamp = h5_raw_data_file.get_attribute<size_t>("creation_timestamp");
      73            0 :     ss << "\n\tCreation timestamp: " << creation_timestamp;
      74              :   } else {
      75            0 :     auto creation_timestamp = h5_raw_data_file.get_attribute<std::string>("creation_timestamp");
      76            0 :     ss << "\n\tCreation timestamp: " << creation_timestamp;
      77            0 :   }
      78            0 :   ss << "\n\tWriter app name: " << app_name;
      79              : 
      80            0 :   TLOG() << ss.str();
      81            0 :   ss.str("");
      82              : 
      83            0 :   auto records = h5_raw_data_file.get_all_record_ids();
      84            0 :   ss << "\nNumber of records: " << records.size();
      85            0 :   if (records.empty()) {
      86            0 :     ss << "\n\nNO TRIGGER RECORDS FOUND";
      87            0 :     TLOG() << ss.str();
      88            0 :     return 0;
      89              :   }
      90            0 :   auto first_rec = *(records.begin());
      91            0 :   auto last_rec = *(std::next(records.begin(), records.size() - 1));
      92              : 
      93            0 :   ss << "\n\tFirst record: " << first_rec.first << "," << first_rec.second;
      94            0 :   ss << "\n\tLast record: " << last_rec.first << "," << last_rec.second;
      95              : 
      96            0 :   TLOG() << ss.str();
      97            0 :   ss.str("");
      98              : 
      99              :   // for(auto const& rid : records)
     100              :   //   ss << "\n\t\tRecord: (" << rid.first << "," << rid.second << ")";
     101              :   // TLOG() << ss.str();
     102              :   // ss.str("");
     103              : 
     104              :   // do this to get all datasets....
     105              :   //  auto all_datasets = h5_raw_data_file.get_dataset_paths();
     106              :   //  ss << "\nAll datasets found:";
     107              :   //  for (auto const& path : all_datasets)
     108              :   //    ss << "\n\t" << path;
     109              :   //  TLOG() << ss.str();
     110              :   //  ss.str("");
     111              : 
     112            0 :   auto all_rh_paths = h5_raw_data_file.get_record_header_dataset_paths();
     113            0 :   ss << "\nAll record header datasets found:";
     114            0 :   for (auto const& path : all_rh_paths)
     115            0 :     ss << "\n\t" << path;
     116            0 :   TLOG() << ss.str();
     117            0 :   ss.str("");
     118              : 
     119              :   // do this to get all fragment dasets paths...
     120              :   //  auto all_frag_paths = h5_raw_data_file.get_all_fragment_dataset_paths();
     121              :   //  ss << "\nAll fragment datasets found:";
     122              :   //  for (auto const& path : all_frag_paths)
     123              :   //    ss << "\n\t" << path;
     124              :   //  TLOG() << ss.str();
     125              :   //  ss.str("");
     126              : 
     127            0 :   if (h5_raw_data_file.is_trigger_record_type()) {
     128            0 :     auto trh_ptr = h5_raw_data_file.get_trh_ptr(first_rec);
     129            0 :     ss << "\nTrigger Record Headers:";
     130            0 :     ss << "\nFirst: " << trh_ptr->get_header();
     131            0 :     ss << "\nLast: " << h5_raw_data_file.get_trh_ptr(all_rh_paths.back())->get_header();
     132            0 :   } else if (h5_raw_data_file.is_timeslice_type()) {
     133            0 :     auto tsh_ptr = h5_raw_data_file.get_tsh_ptr(first_rec);
     134            0 :     ss << "\nTimeSlice Headers:";
     135            0 :     ss << "\nFirst: " << *tsh_ptr;
     136            0 :     ss << "\nLast: " << *(h5_raw_data_file.get_tsh_ptr(all_rh_paths.back()));
     137            0 :   }
     138            0 :   TLOG() << ss.str();
     139            0 :   ss.str("");
     140              : 
     141              :   // grabbing single fragments at a time...
     142              :   //  ss << "\nFragment Headers:";
     143              :   //  ss << "\nFirst: " << h5_raw_data_file.get_frag_ptr(first_rec, "TPC", 0, 0)->get_header();
     144              :   //  ss << "\nLast: " << h5_raw_data_file.get_frag_ptr(all_frag_paths.back())->get_header();
     145              :   //  TLOG() << ss.str();
     146              :   //  ss.str("");
     147              : 
     148              :   /**
     149              :    * Example of walking through a file record by record, 'efficiently'.
     150              :    * (1) Get all the records in the file.
     151              :    * (2) For a known rid, you can construct the record header dataset path
     152              :    *     using the internally-held file layout parameters. Use that to grab the
     153              :    *     record header pointer.
     154              :    * (3) For a known rid, you could get all of the fragment dataset path names.
     155              :    *     Alternatively, as is done here, you can get the set of GeoIDs found in this record.
     156              :    *     Then, loop over those and grab the appropriate fragment.
     157              :    *     Obviously this allows you to filter out gids that you don't want.
     158              :    */
     159            0 :   for (auto const& rid : h5_raw_data_file.get_all_record_ids()) {
     160            0 :     ss << "Processing record (" << rid.first << "," << rid.second << "):";
     161              : 
     162            0 :     auto record_header_dataset = h5_raw_data_file.get_record_header_dataset_path(rid);
     163            0 :     if (h5_raw_data_file.is_trigger_record_type()) {
     164            0 :       auto trh_ptr = h5_raw_data_file.get_trh_ptr(rid);
     165            0 :       ss << "\n\t" << trh_ptr->get_header();
     166            0 :     } else if (h5_raw_data_file.is_timeslice_type()) {
     167            0 :       auto tsh_ptr = h5_raw_data_file.get_tsh_ptr(rid);
     168            0 :       ss << "\n\t" << *tsh_ptr;
     169            0 :     }
     170              : 
     171            0 :     for (auto const& gid : h5_raw_data_file.get_geo_ids(rid)) {
     172              :       // ss << "\n\t" << gid << ": ";
     173            0 :       auto frag_ptr = h5_raw_data_file.get_frag_ptr(rid, gid);
     174            0 :       ss << "\n\t" << frag_ptr->get_header();
     175            0 :     }
     176              : 
     177              :     // could also do loop like this...
     178              :     // for(auto const& frag_dataset : h5_raw_data_file.get_fragment_dataset_paths(rid))
     179              :     //   auto frag_ptr = h5_raw_data_file.get_frag_ptr(frag_dataset)
     180              : 
     181            0 :     TLOG() << ss.str();
     182            0 :     ss.str("");
     183            0 :   }
     184              : 
     185              :   /**
     186              :    * Example of walking through a file record by record, reconstructing the record.
     187              :    *
     188              :    * *** YOU SHOULD NOT DO THIS ***
     189              :    * Why not? It may seem convenient. But it requires that you hold all of the fragments
     190              :    * in memory. This won't scale well! Instead, use the fact that we can more intelligently
     191              :    * grab the data we want, using something like the example above.
     192              :    *
     193              :    * Still, it was requested, so here it is.
     194              :    */
     195            0 :   for (auto const& rid : h5_raw_data_file.get_all_record_ids()) {
     196            0 :     ss << "Processing record (" << rid.first << "," << rid.second << "):";
     197              : 
     198            0 :     if (h5_raw_data_file.is_trigger_record_type()) {
     199            0 :       auto record = h5_raw_data_file.get_trigger_record(rid);
     200            0 :       auto const& trh = record.get_header_ref();
     201            0 :       ss << "\n\t" << trh.get_header();
     202            0 :       for (auto const& frag_ptr : record.get_fragments_ref()) {
     203              :         // ss << "\n\t" << frag_ptr->get_element_id();
     204            0 :         ss << "\n\t" << frag_ptr->get_header();
     205              :       }
     206            0 :     } else if (h5_raw_data_file.is_timeslice_type()) {
     207            0 :       auto record = h5_raw_data_file.get_timeslice(rid);
     208            0 :       auto const& tsh = record.get_header();
     209            0 :       ss << "\n\t" << tsh;
     210            0 :       for (auto const& frag_ptr : record.get_fragments_ref()) {
     211              :         // ss << "\n\t" << frag_ptr->get_element_id();
     212            0 :         ss << "\n\t" << frag_ptr->get_header();
     213              :       }
     214            0 :     }
     215            0 :     TLOG() << ss.str();
     216            0 :     ss.str("");
     217            0 :   }
     218              : 
     219            0 :   return 0;
     220            0 : } // NOLINT
        

Generated by: LCOV version 2.0-1