DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
hdf5_dump Namespace Reference

Classes

class  DAQDataFile
 

Functions

 tick_to_timestamp (ticks, clock_speed_hz)
 
 unpack_header (data_array, entry_type, required_version=0)
 
 print_header_dict (hdict, clock_speed_hz)
 
 print_trigger_record_header (data_array, clock_speed_hz, k_list_components)
 
 print_fragment_header (data_array, clock_speed_hz)
 
 print_header (data_array, record_type, clock_speed_hz, k_list_components)
 
 parse_args ()
 
 main ()
 

Variables

int FILELAYOUT_MIN_VERSION = 4
 
int FILELAYOUT_MAX_VERSION = 7
 
int TRIGGER_RECORD_HEADER_VERSION = 5
 
int FRAGMENT_HEADER_VERSION = 6
 
int TIME_SLICE_HEADER_VERSION = 2
 
dict DATA_FORMAT
 

Function Documentation

◆ main()

hdf5_dump.main ( )

Definition at line 328 of file hdf5_dump.py.

328def main():
329 args = parse_args()
330 if args.print_out is None and args.check_fragments is False and \
331 args.binary_output is None:
332 print("Error: use at least one of the two following options:")
333 print(" -p, --print-out {header, fragment, both}")
334 print(" -c, --check-fragments")
335 print(" -b, --binary-output")
336 return
337
338 h5 = DAQDataFile(args.file_name)
339
340 if args.binary_output is not None:
341 h5.convert_to_binary(args.binary_output, args.num_of_records)
342 if args.print_out is not None:
343 h5.set_clock_speed_hz(args.speed_of_clock)
344 h5.printout(args.print_out, args.num_of_records, args.list_components)
345 if args.check_fragments:
346 h5.check_fragments(args.num_of_records)
347
348 return
349
350
int main(int argc, char *argv[])

◆ parse_args()

hdf5_dump.parse_args ( )

Definition at line 285 of file hdf5_dump.py.

285def parse_args():
286 parser = argparse.ArgumentParser(
287 description='Python script to parse DUNE-DAQ HDF5 output files.')
288
289 parser.add_argument('-f', '--file-name',
290 help='path to HDF5 file',
291 required=True)
292
293 parser.add_argument('-b', '--binary-output',
294 help='convert to the specified binary file')
295
296 parser.add_argument('-p', '--print-out', action='append',
297 choices=['header', 'fragment', 'both', 'attributes',
298 'all'],
299 help='''select which part of data to be displayed, this
300 option can be repeated multiple times, "-p both" is
301 equivalent to "-p header -p fragment", "-p all" is
302 equivalent to "-p attributes -p header -p fragment"''')
303
304 parser.add_argument('-c', '--check-fragments',
305 help='''check if fragments written in trigger record
306 matches expected number in trigger record header''',
307 action='store_true')
308
309 parser.add_argument('-l', '--list-components',
310 help='''list components in trigger record header, used
311 with "--print-out header" or "--print-out both", not
312 applicable to TimeSlice data''', action='store_true')
313
314 parser.add_argument('-n', '--num-of-records', type=int,
315 help='specify number of records to be parsed',
316 default=0)
317
318 parser.add_argument('-s', '--speed-of-clock', type=float,
319 help='''specify clock speed in Hz, default is
320 62500000.0 (62.5MHz)''',
321 default=62500000.0)
322
323 parser.add_argument('-v', '--version', action='version',
324 version='%(prog)s 2.0')
325 return parser.parse_args()
326
327

◆ print_fragment_header()

hdf5_dump.print_fragment_header ( data_array,
clock_speed_hz )

Definition at line 269 of file hdf5_dump.py.

269def print_fragment_header(data_array, clock_speed_hz):
270 print_header_dict(unpack_header(data_array, "Fragment Header", FRAGMENT_HEADER_VERSION), clock_speed_hz)
271 return
272
273

◆ print_header()

hdf5_dump.print_header ( data_array,
record_type,
clock_speed_hz,
k_list_components )

Definition at line 274 of file hdf5_dump.py.

274def print_header(data_array, record_type, clock_speed_hz, k_list_components):
275 if record_type == "TriggerRecord":
276 print_trigger_record_header(data_array, clock_speed_hz,
277 k_list_components)
278 elif record_type == "TimeSlice":
279 print_header_dict(unpack_header(data_array, "TimeSlice Header", TIME_SLICE_HEADER_VERSION), clock_speed_hz)
280 else:
281 print(f"Error: Record Type {record_type} is not supported.")
282 return
283
284

◆ print_header_dict()

hdf5_dump.print_header_dict ( hdict,
clock_speed_hz )

Definition at line 231 of file hdf5_dump.py.

231def print_header_dict(hdict, clock_speed_hz):
232 filtered_list = ['Padding', 'Source ID version', 'Component request version']
233 for ik, iv in hdict.items():
234 if any(map(ik.__contains__, filtered_list)):
235 continue
236 elif "time" in ik or "begin" in ik or "end" in ik:
237 print("{:<30}: {} ({})".format(
238 ik, iv, tick_to_timestamp(iv, clock_speed_hz)))
239 elif 'Marker word' in ik:
240 print("{:<30}: {}".format(ik, hex(iv)))
241 elif 'Status bits' in ik:
242 print("{:<30}: {}".format(ik, hex(iv)))
243 elif ik == 'Detector':
244 subdet = detdataformats.DetID.Subdetector(iv)
245 det_name = detdataformats.DetID.subdetector_to_string(subdet)
246 print("{:<30}: {}".format(ik, det_name))
247 elif ik == 'Source ID subsystem' in ik:
248 subsys = daqdataformats.SourceID.Subsystem(iv)
249 subsys_name = daqdataformats.SourceID.subsystem_to_string(subsys)
250 print("{:<30}: {}".format(ik, subsys_name))
251 else:
252 print("{:<30}: {}".format(ik, iv))
253 return
254
255

◆ print_trigger_record_header()

hdf5_dump.print_trigger_record_header ( data_array,
clock_speed_hz,
k_list_components )

Definition at line 256 of file hdf5_dump.py.

256def print_trigger_record_header(data_array, clock_speed_hz, k_list_components):
257 print_header_dict(unpack_header(data_array, "TriggerRecord Header", TRIGGER_RECORD_HEADER_VERSION), clock_speed_hz)
258
259 if k_list_components:
260 comp_keys = DATA_FORMAT["Component Request"]["keys"]
261 comp_unpack_string = DATA_FORMAT["Component Request"]["unpack string"]
262 for i_values in struct.iter_unpack(comp_unpack_string, data_array[64:]):
263 i_comp = dict(zip(comp_keys, i_values))
264 print(80*'-')
265 print_header_dict(i_comp, clock_speed_hz)
266 return
267
268

◆ tick_to_timestamp()

hdf5_dump.tick_to_timestamp ( ticks,
clock_speed_hz )

Definition at line 214 of file hdf5_dump.py.

214def tick_to_timestamp(ticks, clock_speed_hz):
215 ns = float(ticks)/clock_speed_hz
216 if ns < 3000000000:
217 return datetime.datetime.fromtimestamp(ns)
218 else:
219 return "InvalidDateString"
220
221

◆ unpack_header()

hdf5_dump.unpack_header ( data_array,
entry_type,
required_version = 0 )

Definition at line 222 of file hdf5_dump.py.

222def unpack_header(data_array, entry_type, required_version=0):
223 values = struct.unpack(DATA_FORMAT[entry_type]["unpack string"],
224 data_array[:DATA_FORMAT[entry_type]["size"]])
225 if required_version > 0 and len(values) >= 2 and values[1] != required_version:
226 raise ValueError(f"Invalid {entry_type} format version: expected {required_version} and found {values[1]}")
227 header = dict(zip(DATA_FORMAT[entry_type]["keys"], values))
228 return header
229
230

Variable Documentation

◆ DATA_FORMAT

dict hdf5_dump.DATA_FORMAT

Definition at line 22 of file hdf5_dump.py.

◆ FILELAYOUT_MAX_VERSION

int hdf5_dump.FILELAYOUT_MAX_VERSION = 7

Definition at line 15 of file hdf5_dump.py.

◆ FILELAYOUT_MIN_VERSION

int hdf5_dump.FILELAYOUT_MIN_VERSION = 4

Definition at line 14 of file hdf5_dump.py.

◆ FRAGMENT_HEADER_VERSION

int hdf5_dump.FRAGMENT_HEADER_VERSION = 6

Definition at line 19 of file hdf5_dump.py.

◆ TIME_SLICE_HEADER_VERSION

int hdf5_dump.TIME_SLICE_HEADER_VERSION = 2

Definition at line 20 of file hdf5_dump.py.

◆ TRIGGER_RECORD_HEADER_VERSION

int hdf5_dump.TRIGGER_RECORD_HEADER_VERSION = 5

Definition at line 18 of file hdf5_dump.py.