11FILELAYOUT_MIN_VERSION = 4
12FILELAYOUT_MAX_VERSION = 7
15TRIGGER_RECORD_HEADER_VERSION = 4
16FRAGMENT_HEADER_VERSION = 5
17TIME_SLICE_HEADER_VERSION = 2
20DETECTOR = {0:
'Unknown', 1:
'DAQ', 2:
'HD_PDS', 3:
'HD_TPC',
21 4:
'HD_CRT', 8:
'VD_CathodePDS', 9:
'VD_MembranePDS',
22 10:
'VD_BottomTPC', 11:
'VD_TopTPC',
23 32:
'NDLAr_TPC', 33:
'NDLAr_PDS',
'ND_GAr': 34}
26SUBSYSTEM = {0:
'Unknown', 1:
'DetectorReadout', 2:
'HwSignalsInterface',
27 3:
'Trigger', 4:
'TRBuilder'}
32 "keys": [
'Marker word',
'Version',
'TimeSlice number',
33 'Run number',
"Padding",
34 'Source ID version',
'Source ID subsystem',
'Source ID'],
36 "unpack string":
'<2IQ2I2HI'
39 "TriggerRecord Header": {
40 "keys": [
'Marker word',
'Version',
'Trigger number',
41 'Trigger timestamp',
'No. of requested components',
'Run number',
42 'Error bits',
'Trigger type',
'Sequence number',
43 'Max sequence num',
'Padding',
44 'Source ID version',
'Source ID subsystem',
'Source ID'],
46 "unpack string":
'<2I3Q2IQ2HI2HI'
50 "keys": [
'Marker word',
'Version',
'Fragment size',
'Trigger number',
51 'Trigger timestamp',
'Window begin',
'Window end',
'Run number',
52 'Error bits',
'Fragment type',
'Sequence number',
54 'Source ID version',
'Source ID subsystem',
'Source ID'],
56 "unpack string":
'<2I5Q3I4HI'
60 "keys": [
'Component request version',
'Padding',
61 'Source ID version',
'Source ID subsystem',
'Source ID',
62 'Begin time',
'End time'],
64 "unpack string":
"<2I2HI2Q"
72 if os.path.exists(self.
name):
76 sys.exit(f
"ERROR: file \"{self.name}\" couldn't be opened; is it an HDF5 file?")
78 sys.exit(f
"ERROR: HDF5 file \"{self.name}\" is not found!")
84 observed_filelayout_version = self.
h5file.attrs[
'filelayout_version']
85 if 'filelayout_version' in self.
h5file.attrs.keys()
and \
86 observed_filelayout_version >= FILELAYOUT_MIN_VERSION
and \
87 observed_filelayout_version <= FILELAYOUT_MAX_VERSION:
88 print(f
"INFO: input file \"{self.name}\" matches the supported file layout versions: {FILELAYOUT_MIN_VERSION} <= {observed_filelayout_version} <= {FILELAYOUT_MAX_VERSION}")
90 sys.exit(f
"ERROR: this script expects a file layout version between {FILELAYOUT_MIN_VERSION} and {FILELAYOUT_MAX_VERSION} but this wasn't confirmed in the HDF5 file \"{self.name}\", version={observed_filelayout_version}")
91 if 'record_type' in self.
h5file.attrs.keys():
93 for i
in self.
h5file.keys():
96 self.
h5file[i].visititems(record)
109 with open(binary_file,
'wb')
as bf:
112 if n >= k_nrecords
and k_nrecords > 0:
114 dset = self.
h5file[i.header]
115 idata_array = bytearray(dset[:])
116 bf.write(idata_array)
117 for j
in i.fragments:
119 jdata_array = bytearray(dset[:])
120 bf.write(jdata_array)
124 def printout(self, k_header_type, k_nrecords, k_list_components=False):
125 k_header_type = set(k_header_type)
126 if not {
"attributes",
"all"}.isdisjoint(k_header_type):
127 banner_str =
" File Attributes "
128 print(banner_str.center(80,
'='))
129 for k
in self.
h5file.attrs.keys():
130 print(
"{:<30}: {}".format(k, self.
h5file.attrs[k]))
133 if n >= k_nrecords
and k_nrecords > 0:
135 if not {
"attributes",
"all"}.isdisjoint(k_header_type):
136 banner_str =
" Trigger Record Attributes "
137 print(banner_str.center(80,
'='))
138 for k
in self.
h5file[i.path].attrs.keys():
139 print(
"{:<30}: {}".format(k, self.
h5file[i.path].attrs[k]))
140 if not {
"header",
"both",
"all"}.isdisjoint(k_header_type):
141 dset = self.
h5file[i.header]
142 data_array = bytearray(dset[:])
143 banner_str = f
" {self.record_type} Header "
144 print(banner_str.center(80,
'='))
145 print(
'{:<30}:\t{}'.format(
"Path", i.path))
146 print(
'{:<30}:\t{}'.format(
"Size", dset.shape))
147 print(
'{:<30}:\t{}'.format(
"Data type", dset.dtype))
150 if not {
"fragment",
"both",
"all"}.isdisjoint(k_header_type):
151 for j
in i.fragments:
153 data_array = bytearray(dset[:])
154 banner_str =
" Fragment Header "
155 print(banner_str.center(80,
'-'))
156 print(
'{:<30}:\t{}'.format(
"Path", j))
157 print(
'{:<30}:\t{}'.format(
"Size", dset.shape))
158 print(
'{:<30}:\t{}'.format(
"Data type", dset.dtype))
165 print(
"Check fragments only works on TriggerRecord data.")
170 if n >= k_nrecords
and k_nrecords > 0:
172 dset = self.
h5file[i.header]
173 data_array = bytearray(dset[:])
174 (trh_version, ) = struct.unpack(
'<I', data_array[4:8])
175 if trh_version != TRIGGER_RECORD_HEADER_VERSION:
176 raise ValueError(f
"Invalid TriggerRecord Header format version: expected {TRIGGER_RECORD_HEADER_VERSION} and found {trh_version}")
177 (h, j, k) = struct.unpack(
'<3Q', data_array[8:32])
178 (s, ) = struct.unpack(
'<H', data_array[48:50])
179 nf = len(i.fragments)
181 for frag
in i.fragments:
182 frag_dset = self.
h5file[frag]
183 frag_data = bytearray(frag_dset[:])
184 (frag_version, ) = struct.unpack(
'<I', frag_data[4:8])
185 if frag_version != FRAGMENT_HEADER_VERSION:
186 raise ValueError(f
"Invalid Fragment Header format version: expected {FRAGMENT_HEADER_VERSION} and found {frag_version}")
187 (frag_size, ) = struct.unpack(
'<Q', frag_data[8:16])
189 empty_frag_count += 1
190 report.append((h, s, k, nf, nf - k, empty_frag_count))
192 print(
"{:-^80}".format(
"Column Definitions"))
193 print(
"i: Trigger record number;")
194 print(
"s: Sequence number;")
195 print(
"N_frag_exp: expected no. of fragments stored in header;")
196 print(
"N_frag_act: no. of fragments written in trigger record;")
197 print(
"N_diff: N_frag_act - N_frag_exp")
198 print(
"N_frag_empty: no. of empty fragments (size <= 72)")
199 print(
"{:-^80}".format(
"Column Definitions"))
200 print(
"{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(
201 "i",
"s",
"N_frag_exp",
"N_frag_act",
"N_diff",
"N_frag_empty"))
202 for i
in range(len(report)):
203 print(
"{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(*report[i]))
213 if isinstance(dset, h5py.Dataset):
214 if "TR_Builder" in name:
222 ns = float(ticks)/clock_speed_hz
224 return datetime.datetime.fromtimestamp(ns)
226 return "InvalidDateString"
230 values = struct.unpack(DATA_FORMAT[entry_type][
"unpack string"],
231 data_array[:DATA_FORMAT[entry_type][
"size"]])
232 if required_version > 0
and len(values) >= 2
and values[1] != required_version:
233 raise ValueError(f
"Invalid {entry_type} format version: expected {required_version} and found {values[1]}")
234 header = dict(zip(DATA_FORMAT[entry_type][
"keys"], values))
239 filtered_list = [
'Padding',
'Source ID version',
'Component request version']
240 for ik, iv
in hdict.items():
241 if any(map(ik.__contains__, filtered_list)):
243 elif "time" in ik
or "begin" in ik
or "end" in ik:
244 print(
"{:<30}: {} ({})".format(
246 elif 'Marker word' in ik:
247 print(
"{:<30}: {}".format(ik, hex(iv)))
248 elif ik ==
'Detector':
249 print(
"{:<30}: {}".format(ik, DETECTOR[iv]))
250 elif ik ==
'Source ID subsystem' in ik:
251 print(
"{:<30}: {}".format(ik, SUBSYSTEM[iv]))
253 print(
"{:<30}: {}".format(ik, iv))
260 if k_list_components:
261 comp_keys = DATA_FORMAT[
"Component Request"][
"keys"]
262 comp_unpack_string = DATA_FORMAT[
"Component Request"][
"unpack string"]
263 for i_values
in struct.iter_unpack(comp_unpack_string, data_array[64:]):
264 i_comp = dict(zip(comp_keys, i_values))
275def print_header(data_array, record_type, clock_speed_hz, k_list_components):
276 if record_type ==
"TriggerRecord":
279 elif record_type ==
"TimeSlice":
282 print(f
"Error: Record Type {record_type} is not supported.")
287 parser = argparse.ArgumentParser(
288 description=
'Python script to parse DUNE-DAQ HDF5 output files.')
290 parser.add_argument(
'-f',
'--file-name',
291 help=
'path to HDF5 file',
294 parser.add_argument(
'-b',
'--binary-output',
295 help=
'convert to the specified binary file')
297 parser.add_argument(
'-p',
'--print-out', action=
'append',
298 choices=[
'header',
'fragment',
'both',
'attributes',
300 help=
'''select which part of data to be displayed, this
301 option can be repeated multiple times, "-p both" is
302 equivalent to "-p header -p fragment", "-p all" is
303 equivalent to "-p attributes -p header -p fragment"''')
305 parser.add_argument(
'-c',
'--check-fragments',
306 help=
'''check if fragments written in trigger record
307 matches expected number in trigger record header''',
310 parser.add_argument('-l', '--list-components',
311 help='''list components in trigger record header, used
312 with "--print-out header" or "--print-out both", not
313 applicable to TimeSlice data''', action='store_true')
315 parser.add_argument('-n', '--num-of-records', type=int,
316 help='specify number of records to be parsed',
319 parser.add_argument('-s', '--speed-of-clock', type=float,
320 help='''specify clock speed in Hz, default is
321 62500000.0 (62.5MHz)''',
324 parser.add_argument('-v', '--version', action='version',
325 version='%(prog)s 2.0')
326 return parser.parse_args()
331 if args.print_out is None and args.check_fragments is False and \
332 args.binary_output is None:
333 print("Error: use at least one of the two following options:")
334 print(" -p, --print-out {header, fragment, both}")
335 print(" -c, --check-fragments")
336 print(" -b, --binary-output")
339 h5 = DAQDataFile(args.file_name)
341 if args.binary_output is not None:
342 h5.convert_to_binary(args.binary_output, args.num_of_records)
343 if args.print_out is not None:
344 h5.set_clock_speed_hz(args.speed_of_clock)
345 h5.printout(args.print_out, args.num_of_records, args.list_components)
346 if args.check_fragments:
347 h5.check_fragments(args.num_of_records)
352if __name__ == "__main__":
__call__(self, name, dset)
printout(self, k_header_type, k_nrecords, k_list_components=False)
convert_to_binary(self, binary_file, k_nrecords)
set_clock_speed_hz(self, k_clock_speed_hz)
check_fragments(self, k_nrecords)
unpack_header(data_array, entry_type, required_version=0)
print_header_dict(hdict, clock_speed_hz)
print_header(data_array, record_type, clock_speed_hz, k_list_components)
tick_to_timestamp(ticks, clock_speed_hz)
print_fragment_header(data_array, clock_speed_hz)
print_trigger_record_header(data_array, clock_speed_hz, k_list_components)