118{
119 std::string input_file;
120 bool verbose = false;
121 CLI::App app{"tapipe"};
122
123
124 app.add_option("-i", input_file, "Input TPStream file path")->required();
125 app.add_flag("-v", verbose);
126 CLI11_PARSE(app, argc, argv);
127
128 fmt::print("TPStream file: {}\n", input_file);
129
130
131 std::unique_ptr<hdf5libs::HDF5RawDataFile> tpstream_file;
132
133 try {
134 tpstream_file = std::make_unique<hdf5libs::HDF5RawDataFile>(input_file);
135 } catch(const hdf5libs::FileOpenFailed& e) {
136 std::cout << ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ERROR <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" << std::endl;
137 std::cerr << e.what() << '\n';
138 exit(-1);
139 }
140
141 fmt::print("{} opened\n", input_file);
142
143
144 fmt::print(" File type: {}\n", tpstream_file->get_record_type());
145
146
147 auto records = tpstream_file->get_all_record_ids();
148 std::set<daqdataformats::SourceID> source_ids;
149
150 for( const auto& rid : records ) {
151 const auto& [id, slice] = rid;
152 auto sids = tpstream_file->get_source_ids(rid);
153 source_ids.merge(sids);
154 if (verbose)
155 fmt::print("TR {}:{} [{}]\n", id, slice, fmt::join(sids, ", "));
156 }
157 fmt::print("Source IDs [{}]\n", fmt::join(source_ids, ", "));
158
159
160 std::map<daqdataformats::SourceID, hdf5libs::HDF5RawDataFile::record_id_set>
m;
161 for( const auto& sid: source_ids ) {
162 for( const auto& rid : records ) {
163 auto rec_sids = tpstream_file->get_source_ids(rid);
164 if (rec_sids.find(sid) != rec_sids.end()) {
166 }
167 }
168 if (verbose)
169 fmt::print("Record IDs for {} : [{}]\n", sid, fmt::join(m[sid], ", "));
170 }
171
172
173 fmt::print(" Number of time slices in file: {}\n", records.size());
174
175
176
177
178 daqdataformats::SourceID tp_writer_sid{daqdataformats::SourceID::Subsystem::kTrigger, 0};
179 auto tp_records =
m[tp_writer_sid];
180
181
182 std::vector<trgdataformats::TriggerPrimitive> tp_buffer;
183
184 auto a_slice_id = *tp_records.begin();
185 fmt::print("Processing tp time slice {}\n", a_slice_id);
186
187
188 auto tsl_hdr = tpstream_file->get_tsh_ptr(a_slice_id);
189
190
191 fmt::print(" Run number: {}\n", tsl_hdr->run_number);
192 fmt::print(" TSL number: {}\n", tsl_hdr->timeslice_number);
193
194 auto frag = tpstream_file->get_frag_ptr(a_slice_id, tp_writer_sid);
195
196 fmt::print(" Fragment id: {} [{}]\n", frag->get_element_id().to_string(), daqdataformats::fragment_type_to_string(frag->get_fragment_type()));
197
198 size_t n_tps = frag->get_data_size()/sizeof(trgdataformats::TriggerPrimitive);
199 fmt::print("TP fragment size: {}\n", frag->get_data_size());
200 fmt::print("Num TPs: {}\n", n_tps);
201
202 trgdataformats::TriggerPrimitive* tp_array = static_cast<trgdataformats::TriggerPrimitive*>(frag->get_data());
203
204
205 tp_buffer.resize(tp_buffer.size()+n_tps);
206
207 uint64_t last_ts = 0;
208 for(size_t i(0); i<n_tps; ++i) {
209 auto& tp = tp_array[i];
210 if (tp.time_start <= last_ts) {
211 fmt::print("ERROR: {} {} ", +tp.time_start, last_ts );
212 }
213 tp_buffer.push_back(tp);
214 }
215
216
217 uint64_t d_ts = tp_array[n_tps-1].time_start - tp_array[0].time_start;
218 fmt::print("TS gap: {} {} ms\n", d_ts, d_ts*16.0/1'000'000);
219
220
221
223
224
225 std::vector<triggeralgs::TriggerActivity> ta_buffer;
226
227
228 const nlohmann::json config = {};
230
231
232 for( const auto& tp : tp_buffer ) {
233 hmta(tp, ta_buffer);
234 }
235
236
237 fmt::print("ta_buffer.size() = {}\n", ta_buffer.size());
238
239 size_t payload_size(0);
240 for ( const auto& ta : ta_buffer ) {
242
243 }
244
245
246 fmt::print("ta_buffer in bytes = {}\n", payload_size);
247
248 char*
payload =
static_cast<char*
>(malloc(payload_size));
249
250
252 for ( const auto& ta : ta_buffer ) {
255 }
256
257 daqdataformats::Fragment ta_frag(static_cast<void*>(payload), payload_size);
258
259 free(static_cast<void*>(payload));
260
261
262
263 return 0;
264}
void configure(const nlohmann::json &config)
void write_overlay(const Object &object, void *buffer)
size_t get_overlay_nbytes(const Object &object)