14FILELAYOUT_MIN_VERSION = 4
15FILELAYOUT_MAX_VERSION = 7
18TRIGGER_RECORD_HEADER_VERSION = 5
19FRAGMENT_HEADER_VERSION = 6
20TIME_SLICE_HEADER_VERSION = 2
25 "keys": [
'Marker word',
'Version',
'TimeSlice number',
26 'Run number',
"Padding",
27 'Source ID version',
'Source ID subsystem',
'Source ID'],
29 "unpack string":
'<2IQ2I2HI'
32 "TriggerRecord Header": {
33 "keys": [
'Marker word',
'Version',
'Trigger number',
34 'Trigger timestamp',
'No. of requested components',
'Run number',
35 'Status bits',
'Trigger type',
'Sequence number',
36 'Max sequence num',
'Padding',
37 'Source ID version',
'Source ID subsystem',
'Source ID'],
39 "unpack string":
'<2I3Q2IQ2HI2HI'
43 "keys": [
'Marker word',
'Version',
'Fragment size',
'Trigger number',
44 'Trigger timestamp',
'Window begin',
'Window end',
'Run number',
45 'Status bits',
'Fragment type',
'Sequence number',
47 'Source ID version',
'Source ID subsystem',
'Source ID'],
49 "unpack string":
'<2I5Q3I4HI'
53 "keys": [
'Component request version',
'Padding',
54 'Source ID version',
'Source ID subsystem',
'Source ID',
55 'Begin time',
'End time'],
57 "unpack string":
"<2I2HI2Q"
65 if os.path.exists(self.
name):
69 sys.exit(f
"ERROR: file \"{self.name}\" couldn't be opened; is it an HDF5 file?")
71 sys.exit(f
"ERROR: HDF5 file \"{self.name}\" is not found!")
77 observed_filelayout_version = self.
h5file.attrs[
'filelayout_version']
78 if 'filelayout_version' in self.
h5file.attrs.keys()
and \
79 observed_filelayout_version >= FILELAYOUT_MIN_VERSION
and \
80 observed_filelayout_version <= FILELAYOUT_MAX_VERSION:
81 print(f
"INFO: input file \"{self.name}\" matches the supported file layout versions: {FILELAYOUT_MIN_VERSION} <= {observed_filelayout_version} <= {FILELAYOUT_MAX_VERSION}")
83 sys.exit(f
"ERROR: this script expects a file layout version between {FILELAYOUT_MIN_VERSION} and {FILELAYOUT_MAX_VERSION} but this wasn't confirmed in the HDF5 file \"{self.name}\", version={observed_filelayout_version}")
84 if 'record_type' in self.
h5file.attrs.keys():
86 for i
in self.
h5file.keys():
89 self.
h5file[i].visititems(record)
102 with open(binary_file,
'wb')
as bf:
105 if n >= k_nrecords
and k_nrecords > 0:
107 dset = self.
h5file[i.header]
108 idata_array = bytearray(dset[:])
109 bf.write(idata_array)
110 for j
in i.fragments:
112 jdata_array = bytearray(dset[:])
113 bf.write(jdata_array)
117 def printout(self, k_header_type, k_nrecords, k_list_components=False):
118 k_header_type = set(k_header_type)
119 if not {
"attributes",
"all"}.isdisjoint(k_header_type):
120 banner_str =
" File Attributes "
121 print(banner_str.center(80,
'='))
122 for k
in self.
h5file.attrs.keys():
123 print(
"{:<30}: {}".format(k, self.
h5file.attrs[k]))
126 if n >= k_nrecords
and k_nrecords > 0:
128 if not {
"attributes",
"all"}.isdisjoint(k_header_type):
129 banner_str =
" Trigger Record Attributes "
130 print(banner_str.center(80,
'='))
131 for k
in self.
h5file[i.path].attrs.keys():
132 print(
"{:<30}: {}".format(k, self.
h5file[i.path].attrs[k]))
133 if not {
"header",
"both",
"all"}.isdisjoint(k_header_type):
134 dset = self.
h5file[i.header]
135 data_array = bytearray(dset[:])
136 banner_str = f
" {self.record_type} Header "
137 print(banner_str.center(80,
'='))
138 print(
'{:<30}:\t{}'.format(
"Path", i.path))
139 print(
'{:<30}:\t{}'.format(
"Size", dset.shape))
140 print(
'{:<30}:\t{}'.format(
"Data type", dset.dtype))
143 if not {
"fragment",
"both",
"all"}.isdisjoint(k_header_type):
144 for j
in i.fragments:
146 data_array = bytearray(dset[:])
147 banner_str =
" Fragment Header "
148 print(banner_str.center(80,
'-'))
149 print(
'{:<30}:\t{}'.format(
"Path", j))
150 print(
'{:<30}:\t{}'.format(
"Size", dset.shape))
151 print(
'{:<30}:\t{}'.format(
"Data type", dset.dtype))
158 print(
"Check fragments only works on TriggerRecord data.")
163 if n >= k_nrecords
and k_nrecords > 0:
165 dset = self.
h5file[i.header]
166 data_array = bytearray(dset[:])
167 (trh_version, ) = struct.unpack(
'<I', data_array[4:8])
168 if trh_version != TRIGGER_RECORD_HEADER_VERSION:
169 raise ValueError(f
"Invalid TriggerRecord Header format version: expected {TRIGGER_RECORD_HEADER_VERSION} and found {trh_version}")
170 (h, j, k) = struct.unpack(
'<3Q', data_array[8:32])
171 (s, ) = struct.unpack(
'<H', data_array[48:50])
172 nf = len(i.fragments)
174 for frag
in i.fragments:
175 frag_dset = self.
h5file[frag]
176 frag_data = bytearray(frag_dset[:])
177 (frag_version, ) = struct.unpack(
'<I', frag_data[4:8])
178 if frag_version != FRAGMENT_HEADER_VERSION:
179 raise ValueError(f
"Invalid Fragment Header format version: expected {FRAGMENT_HEADER_VERSION} and found {frag_version}")
180 (frag_size, ) = struct.unpack(
'<Q', frag_data[8:16])
182 empty_frag_count += 1
183 report.append((h, s, k, nf, nf - k, empty_frag_count))
185 print(
"{:-^80}".format(
"Column Definitions"))
186 print(
"i: Trigger record number;")
187 print(
"s: Sequence number;")
188 print(
"N_frag_exp: expected no. of fragments stored in header;")
189 print(
"N_frag_act: no. of fragments written in trigger record;")
190 print(
"N_diff: N_frag_act - N_frag_exp")
191 print(
"N_frag_empty: no. of empty fragments (size <= 72)")
192 print(
"{:-^80}".format(
"Column Definitions"))
193 print(
"{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(
194 "i",
"s",
"N_frag_exp",
"N_frag_act",
"N_diff",
"N_frag_empty"))
195 for i
in range(len(report)):
196 print(
"{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(*report[i]))
206 if isinstance(dset, h5py.Dataset):
207 if "TR_Builder" in name:
215 ns = float(ticks)/clock_speed_hz
217 return datetime.datetime.fromtimestamp(ns)
219 return "InvalidDateString"
223 values = struct.unpack(DATA_FORMAT[entry_type][
"unpack string"],
224 data_array[:DATA_FORMAT[entry_type][
"size"]])
225 if required_version > 0
and len(values) >= 2
and values[1] != required_version:
226 raise ValueError(f
"Invalid {entry_type} format version: expected {required_version} and found {values[1]}")
227 header = dict(zip(DATA_FORMAT[entry_type][
"keys"], values))
232 filtered_list = [
'Padding',
'Source ID version',
'Component request version']
233 for ik, iv
in hdict.items():
234 if any(map(ik.__contains__, filtered_list)):
236 elif "time" in ik
or "begin" in ik
or "end" in ik:
237 print(
"{:<30}: {} ({})".format(
239 elif 'Marker word' in ik:
240 print(
"{:<30}: {}".format(ik, hex(iv)))
241 elif 'Status bits' in ik:
242 print(
"{:<30}: {}".format(ik, hex(iv)))
243 elif ik ==
'Detector':
244 subdet = detdataformats.DetID.Subdetector(iv)
245 det_name = detdataformats.DetID.subdetector_to_string(subdet)
246 print(
"{:<30}: {}".format(ik, det_name))
247 elif ik ==
'Source ID subsystem' in ik:
248 subsys = daqdataformats.SourceID.Subsystem(iv)
249 subsys_name = daqdataformats.SourceID.subsystem_to_string(subsys)
250 print(
"{:<30}: {}".format(ik, subsys_name))
252 print(
"{:<30}: {}".format(ik, iv))
259 if k_list_components:
260 comp_keys = DATA_FORMAT[
"Component Request"][
"keys"]
261 comp_unpack_string = DATA_FORMAT[
"Component Request"][
"unpack string"]
262 for i_values
in struct.iter_unpack(comp_unpack_string, data_array[64:]):
263 i_comp = dict(zip(comp_keys, i_values))
274def print_header(data_array, record_type, clock_speed_hz, k_list_components):
275 if record_type ==
"TriggerRecord":
278 elif record_type ==
"TimeSlice":
281 print(f
"Error: Record Type {record_type} is not supported.")
286 parser = argparse.ArgumentParser(
287 description=
'Python script to parse DUNE-DAQ HDF5 output files.')
289 parser.add_argument(
'-f',
'--file-name',
290 help=
'path to HDF5 file',
293 parser.add_argument(
'-b',
'--binary-output',
294 help=
'convert to the specified binary file')
296 parser.add_argument(
'-p',
'--print-out', action=
'append',
297 choices=[
'header',
'fragment',
'both',
'attributes',
299 help=
'''select which part of data to be displayed, this
300 option can be repeated multiple times, "-p both" is
301 equivalent to "-p header -p fragment", "-p all" is
302 equivalent to "-p attributes -p header -p fragment"''')
304 parser.add_argument(
'-c',
'--check-fragments',
305 help=
'''check if fragments written in trigger record
306 matches expected number in trigger record header''',
309 parser.add_argument('-l', '--list-components',
310 help='''list components in trigger record header, used
311 with "--print-out header" or "--print-out both", not
312 applicable to TimeSlice data''', action='store_true')
314 parser.add_argument('-n', '--num-of-records', type=int,
315 help='specify number of records to be parsed',
318 parser.add_argument('-s', '--speed-of-clock', type=float,
319 help='''specify clock speed in Hz, default is
320 62500000.0 (62.5MHz)''',
323 parser.add_argument('-v', '--version', action='version',
324 version='%(prog)s 2.0')
325 return parser.parse_args()
330 if args.print_out is None and args.check_fragments is False and \
331 args.binary_output is None:
332 print("Error: use at least one of the two following options:")
333 print(" -p, --print-out {header, fragment, both}")
334 print(" -c, --check-fragments")
335 print(" -b, --binary-output")
338 h5 = DAQDataFile(args.file_name)
340 if args.binary_output is not None:
341 h5.convert_to_binary(args.binary_output, args.num_of_records)
342 if args.print_out is not None:
343 h5.set_clock_speed_hz(args.speed_of_clock)
344 h5.printout(args.print_out, args.num_of_records, args.list_components)
345 if args.check_fragments:
346 h5.check_fragments(args.num_of_records)
351if __name__ == "__main__":
__call__(self, name, dset)
printout(self, k_header_type, k_nrecords, k_list_components=False)
convert_to_binary(self, binary_file, k_nrecords)
set_clock_speed_hz(self, k_clock_speed_hz)
check_fragments(self, k_nrecords)
unpack_header(data_array, entry_type, required_version=0)
print_header_dict(hdict, clock_speed_hz)
print_header(data_array, record_type, clock_speed_hz, k_list_components)
tick_to_timestamp(ticks, clock_speed_hz)
print_fragment_header(data_array, clock_speed_hz)
print_trigger_record_header(data_array, clock_speed_hz, k_list_components)