DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
hdf5_dump Namespace Reference

Classes

class  DAQDataFile
 

Functions

 tick_to_timestamp (ticks, clock_speed_hz)
 
 unpack_header (data_array, entry_type, required_version=0)
 
 print_header_dict (hdict, clock_speed_hz)
 
 print_trigger_record_header (data_array, clock_speed_hz, k_list_components)
 
 print_fragment_header (data_array, clock_speed_hz)
 
 print_header (data_array, record_type, clock_speed_hz, k_list_components)
 
 parse_args ()
 
 main ()
 

Variables

int FILELAYOUT_MIN_VERSION = 4
 
int FILELAYOUT_MAX_VERSION = 7
 
int TRIGGER_RECORD_HEADER_VERSION = 4
 
int FRAGMENT_HEADER_VERSION = 5
 
int TIME_SLICE_HEADER_VERSION = 2
 
dict DETECTOR
 
dict SUBSYSTEM
 
dict DATA_FORMAT
 

Function Documentation

◆ main()

hdf5_dump.main ( )

Definition at line 329 of file hdf5_dump.py.

329def main():
330 args = parse_args()
331 if args.print_out is None and args.check_fragments is False and \
332 args.binary_output is None:
333 print("Error: use at least one of the two following options:")
334 print(" -p, --print-out {header, fragment, both}")
335 print(" -c, --check-fragments")
336 print(" -b, --binary-output")
337 return
338
339 h5 = DAQDataFile(args.file_name)
340
341 if args.binary_output is not None:
342 h5.convert_to_binary(args.binary_output, args.num_of_records)
343 if args.print_out is not None:
344 h5.set_clock_speed_hz(args.speed_of_clock)
345 h5.printout(args.print_out, args.num_of_records, args.list_components)
346 if args.check_fragments:
347 h5.check_fragments(args.num_of_records)
348
349 return
350
351
int main(int argc, char **argv)

◆ parse_args()

hdf5_dump.parse_args ( )

Definition at line 286 of file hdf5_dump.py.

286def parse_args():
287 parser = argparse.ArgumentParser(
288 description='Python script to parse DUNE-DAQ HDF5 output files.')
289
290 parser.add_argument('-f', '--file-name',
291 help='path to HDF5 file',
292 required=True)
293
294 parser.add_argument('-b', '--binary-output',
295 help='convert to the specified binary file')
296
297 parser.add_argument('-p', '--print-out', action='append',
298 choices=['header', 'fragment', 'both', 'attributes',
299 'all'],
300 help='''select which part of data to be displayed, this
301 option can be repeated multiple times, "-p both" is
302 equivalent to "-p header -p fragment", "-p all" is
303 equivalent to "-p attributes -p header -p fragment"''')
304
305 parser.add_argument('-c', '--check-fragments',
306 help='''check if fragments written in trigger record
307 matches expected number in trigger record header''',
308 action='store_true')
309
310 parser.add_argument('-l', '--list-components',
311 help='''list components in trigger record header, used
312 with "--print-out header" or "--print-out both", not
313 applicable to TimeSlice data''', action='store_true')
314
315 parser.add_argument('-n', '--num-of-records', type=int,
316 help='specify number of records to be parsed',
317 default=0)
318
319 parser.add_argument('-s', '--speed-of-clock', type=float,
320 help='''specify clock speed in Hz, default is
321 62500000.0 (62.5MHz)''',
322 default=62500000.0)
323
324 parser.add_argument('-v', '--version', action='version',
325 version='%(prog)s 2.0')
326 return parser.parse_args()
327
328

◆ print_fragment_header()

hdf5_dump.print_fragment_header ( data_array,
clock_speed_hz )

Definition at line 270 of file hdf5_dump.py.

270def print_fragment_header(data_array, clock_speed_hz):
271 print_header_dict(unpack_header(data_array, "Fragment Header", FRAGMENT_HEADER_VERSION), clock_speed_hz)
272 return
273
274

◆ print_header()

hdf5_dump.print_header ( data_array,
record_type,
clock_speed_hz,
k_list_components )

Definition at line 275 of file hdf5_dump.py.

275def print_header(data_array, record_type, clock_speed_hz, k_list_components):
276 if record_type == "TriggerRecord":
277 print_trigger_record_header(data_array, clock_speed_hz,
278 k_list_components)
279 elif record_type == "TimeSlice":
280 print_header_dict(unpack_header(data_array, "TimeSlice Header", TIME_SLICE_HEADER_VERSION), clock_speed_hz)
281 else:
282 print(f"Error: Record Type {record_type} is not supported.")
283 return
284
285

◆ print_header_dict()

hdf5_dump.print_header_dict ( hdict,
clock_speed_hz )

Definition at line 238 of file hdf5_dump.py.

238def print_header_dict(hdict, clock_speed_hz):
239 filtered_list = ['Padding', 'Source ID version', 'Component request version']
240 for ik, iv in hdict.items():
241 if any(map(ik.__contains__, filtered_list)):
242 continue
243 elif "time" in ik or "begin" in ik or "end" in ik:
244 print("{:<30}: {} ({})".format(
245 ik, iv, tick_to_timestamp(iv, clock_speed_hz)))
246 elif 'Marker word' in ik:
247 print("{:<30}: {}".format(ik, hex(iv)))
248 elif ik == 'Detector':
249 print("{:<30}: {}".format(ik, DETECTOR[iv]))
250 elif ik == 'Source ID subsystem' in ik:
251 print("{:<30}: {}".format(ik, SUBSYSTEM[iv]))
252 else:
253 print("{:<30}: {}".format(ik, iv))
254 return
255
256

◆ print_trigger_record_header()

hdf5_dump.print_trigger_record_header ( data_array,
clock_speed_hz,
k_list_components )

Definition at line 257 of file hdf5_dump.py.

257def print_trigger_record_header(data_array, clock_speed_hz, k_list_components):
258 print_header_dict(unpack_header(data_array, "TriggerRecord Header", TRIGGER_RECORD_HEADER_VERSION), clock_speed_hz)
259
260 if k_list_components:
261 comp_keys = DATA_FORMAT["Component Request"]["keys"]
262 comp_unpack_string = DATA_FORMAT["Component Request"]["unpack string"]
263 for i_values in struct.iter_unpack(comp_unpack_string, data_array[64:]):
264 i_comp = dict(zip(comp_keys, i_values))
265 print(80*'-')
266 print_header_dict(i_comp, clock_speed_hz)
267 return
268
269

◆ tick_to_timestamp()

hdf5_dump.tick_to_timestamp ( ticks,
clock_speed_hz )

Definition at line 221 of file hdf5_dump.py.

221def tick_to_timestamp(ticks, clock_speed_hz):
222 ns = float(ticks)/clock_speed_hz
223 if ns < 3000000000:
224 return datetime.datetime.fromtimestamp(ns)
225 else:
226 return "InvalidDateString"
227
228

◆ unpack_header()

hdf5_dump.unpack_header ( data_array,
entry_type,
required_version = 0 )

Definition at line 229 of file hdf5_dump.py.

229def unpack_header(data_array, entry_type, required_version=0):
230 values = struct.unpack(DATA_FORMAT[entry_type]["unpack string"],
231 data_array[:DATA_FORMAT[entry_type]["size"]])
232 if required_version > 0 and len(values) >= 2 and values[1] != required_version:
233 raise ValueError(f"Invalid {entry_type} format version: expected {required_version} and found {values[1]}")
234 header = dict(zip(DATA_FORMAT[entry_type]["keys"], values))
235 return header
236
237

Variable Documentation

◆ DATA_FORMAT

dict hdf5_dump.DATA_FORMAT

Definition at line 29 of file hdf5_dump.py.

◆ DETECTOR

dict hdf5_dump.DETECTOR
Initial value:
1= {0: 'Unknown', 1: 'DAQ', 2: 'HD_PDS', 3: 'HD_TPC',
2 4: 'HD_CRT', 8: 'VD_CathodePDS', 9: 'VD_MembranePDS',
3 10: 'VD_BottomTPC', 11: 'VD_TopTPC',
4 32: 'NDLAr_TPC', 33: 'NDLAr_PDS', 'ND_GAr': 34}

Definition at line 20 of file hdf5_dump.py.

◆ FILELAYOUT_MAX_VERSION

int hdf5_dump.FILELAYOUT_MAX_VERSION = 7

Definition at line 12 of file hdf5_dump.py.

◆ FILELAYOUT_MIN_VERSION

int hdf5_dump.FILELAYOUT_MIN_VERSION = 4

Definition at line 11 of file hdf5_dump.py.

◆ FRAGMENT_HEADER_VERSION

int hdf5_dump.FRAGMENT_HEADER_VERSION = 5

Definition at line 16 of file hdf5_dump.py.

◆ SUBSYSTEM

dict hdf5_dump.SUBSYSTEM
Initial value:
1= {0: 'Unknown', 1: 'DetectorReadout', 2: 'HwSignalsInterface',
2 3: 'Trigger', 4: 'TRBuilder'}

Definition at line 26 of file hdf5_dump.py.

◆ TIME_SLICE_HEADER_VERSION

int hdf5_dump.TIME_SLICE_HEADER_VERSION = 2

Definition at line 17 of file hdf5_dump.py.

◆ TRIGGER_RECORD_HEADER_VERSION

int hdf5_dump.TRIGGER_RECORD_HEADER_VERSION = 4

Definition at line 15 of file hdf5_dump.py.