Line data Source code
1 : /**
2 : * @file HDF5LIBS_TestWriter.cpp
3 : *
4 : * Demo of HDF5 file writer.
5 : *
6 : *
7 : * This is part of the DUNE DAQ Software Suite, copyright 2020.
8 : * Licensing/copyright details are in the COPYING file that you should have
9 : * received with this code.
10 : */
11 :
12 : #include "hdf5libs/HDF5RawDataFile.hpp"
13 :
14 : #include "detdataformats/DetID.hpp"
15 : #include "logging/Logging.hpp"
16 :
17 : #include <nlohmann/json.hpp>
18 :
19 : #include <fstream>
20 : #include <iostream>
21 : #include <memory>
22 : #include <string>
23 : #include <utility>
24 : #include <vector>
25 :
26 : using namespace dunedaq::hdf5libs;
27 : using namespace dunedaq::daqdataformats;
28 : using namespace dunedaq::detdataformats;
29 :
30 : void
31 0 : print_usage()
32 : {
33 0 : TLOG() << "Usage: HDF5LIBS_TestWriter <configuration_file> <detector_readout_map_file> <output_file_name>";
34 0 : }
35 :
36 : int
37 0 : main(int argc, char** argv)
38 : {
39 :
40 0 : if (argc != 4) {
41 0 : print_usage();
42 0 : return 1;
43 : }
44 :
45 0 : const std::string app_name = std::string(argv[0]);
46 0 : const std::string ifile_name = std::string(argv[1]);
47 0 : const std::string hw_map_file_name = std::string(argv[2]);
48 0 : const std::string ofile_name = std::string(argv[3]);
49 0 : const unsigned compression_level = 0;
50 :
51 : // read in configuration
52 0 : nlohmann::json j_in, fl_conf;
53 0 : std::ifstream ifile(ifile_name);
54 0 : ifile >> j_in;
55 0 : ifile.close();
56 :
57 : // get file_layout config
58 0 : try {
59 0 : fl_conf = j_in["file_layout"];
60 0 : TLOG() << "Read 'file_layout' configuration:\n";
61 0 : TLOG() << fl_conf;
62 0 : } catch (...) {
63 0 : TLOG() << "ERROR: Improper 'file_layout' configuration in " << ifile_name;
64 0 : return 1;
65 0 : }
66 :
67 : // read test writer app configs
68 0 : const int run_number = j_in["run_number"].get<int>();
69 0 : const int file_index = j_in["file_index"].get<int>();
70 :
71 0 : const int trigger_count = j_in["trigger_count"].get<int>();
72 0 : const int fragment_size = j_in["data_size"].get<int>() + sizeof(FragmentHeader);
73 0 : const SourceID::Subsystem stype_to_use = SourceID::string_to_subsystem(j_in["subsystem_type"].get<std::string>());
74 0 : const DetID::Subdetector dtype_to_use = DetID::string_to_subdetector(j_in["subdetector_type"].get<std::string>());
75 0 : const FragmentType ftype_to_use = string_to_fragment_type(j_in["fragment_type"].get<std::string>());
76 0 : const int element_count = j_in["element_count"].get<int>();
77 :
78 0 : TLOG() << "\nOutput file: " << ofile_name << "\nRun number: " << run_number << "\nFile index: " << file_index
79 0 : << "\nNumber of trigger records: " << trigger_count << "\nNumber of fragments: " << element_count
80 0 : << "\nSubsystem: " << SourceID::subsystem_to_string(stype_to_use)
81 0 : << "\nFragment size (bytes, incl. header): " << fragment_size;
82 :
83 : // Read src-geo id map
84 : //std::ifstream f(hw_map_file_name);
85 : //nlohmann::json data = nlohmann::json::parse(f);
86 :
87 : //auto srcid_geoid_map = data.get<hdf5rawdatafile::SrcIDGeoIDMap>();
88 0 : HDF5SourceIDHandler::source_id_geo_id_map_t srcid_geoid_map;
89 :
90 : // open our file for writing
91 0 : HDF5RawDataFile h5_raw_data_file = HDF5RawDataFile(ofile_name,
92 : run_number, // run_number
93 : file_index, // file_index,
94 : app_name, // app_name
95 : fl_conf, // file_layout_confs
96 : srcid_geoid_map,
97 : compression_level, // gzip compression_level
98 : ".writing", // optional: suffix to use for files being written
99 0 : HighFive::File::Overwrite); // optional: overwrite existing file
100 :
101 0 : std::vector<char> dummy_data(fragment_size);
102 :
103 : // loop over desired number of triggers
104 0 : for (int trig_num = 1; trig_num <= trigger_count; ++trig_num) {
105 :
106 : // get a timestamp for this trigger
107 0 : uint64_t ts = std::chrono::duration_cast<std::chrono::milliseconds>( // NOLINT(build/unsigned)
108 0 : system_clock::now().time_since_epoch())
109 0 : .count();
110 :
111 0 : TLOG() << "\tWriting trigger " << trig_num << " with time_stamp " << ts;
112 :
113 : // create TriggerRecordHeader
114 0 : TriggerRecordHeaderData trh_data;
115 0 : trh_data.trigger_number = trig_num;
116 0 : trh_data.trigger_timestamp = ts;
117 0 : trh_data.num_requested_components = element_count;
118 0 : trh_data.run_number = run_number;
119 0 : trh_data.sequence_number = 0;
120 0 : trh_data.max_sequence_number = 1;
121 0 : trh_data.element_id = SourceID(SourceID::Subsystem::kTRBuilder, 0);
122 :
123 0 : TriggerRecordHeader trh(&trh_data);
124 :
125 : // create out TriggerRecord
126 0 : TriggerRecord tr(trh);
127 :
128 : // loop over elements
129 0 : for (int ele_num = 0; ele_num < element_count; ++ele_num) {
130 :
131 : // create our fragment
132 0 : FragmentHeader fh;
133 0 : fh.trigger_number = trig_num;
134 0 : fh.trigger_timestamp = ts;
135 0 : fh.window_begin = ts - 10;
136 0 : fh.window_end = ts;
137 0 : fh.run_number = run_number;
138 0 : fh.fragment_type = static_cast<fragment_type_t>(ftype_to_use);
139 0 : fh.sequence_number = 0;
140 0 : fh.detector_id = static_cast<uint16_t>(dtype_to_use);
141 0 : fh.element_id = SourceID(stype_to_use, ele_num);
142 :
143 0 : auto frag_ptr = std::make_unique<Fragment>(dummy_data.data(), dummy_data.size());
144 0 : frag_ptr->set_header_fields(fh);
145 :
146 : // add fragment to TriggerRecord
147 0 : tr.add_fragment(std::move(frag_ptr));
148 :
149 0 : } // end loop over elements
150 :
151 : // write trigger record to file
152 0 : h5_raw_data_file.write(tr);
153 :
154 0 : } // end loop over triggers
155 :
156 0 : TLOG() << "Finished writing to file " << h5_raw_data_file.get_file_name();
157 0 : TLOG() << "Recorded size: " << h5_raw_data_file.get_recorded_size();
158 :
159 0 : return 0;
160 0 : }
|