DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
hdf5_dump.DAQDataFile Class Reference

Classes

class  Record
 

Public Member Functions

 __init__ (self, name)
 
 __del__ (self)
 
 set_clock_speed_hz (self, k_clock_speed_hz)
 
 convert_to_binary (self, binary_file, k_nrecords)
 
 printout (self, k_header_type, k_nrecords, k_list_components=False)
 
 check_fragments (self, k_nrecords)
 

Public Attributes

 name = name
 
 h5file = h5py.File(self.name, 'r')
 
str record_type = 'TriggerRecord'
 
float clock_speed_hz = 50000000.0
 
list records = []
 
int records = k_nrecords and k_nrecords > 0:
 

Detailed Description

Definition at line 69 of file hdf5_dump.py.

Constructor & Destructor Documentation

◆ __init__()

hdf5_dump.DAQDataFile.__init__ ( self,
name )

Definition at line 70 of file hdf5_dump.py.

70 def __init__(self, name):
71 self.name = name
72 if os.path.exists(self.name):
73 try:
74 self.h5file = h5py.File(self.name, 'r')
75 except OSError:
76 sys.exit(f"ERROR: file \"{self.name}\" couldn't be opened; is it an HDF5 file?")
77 else:
78 sys.exit(f"ERROR: HDF5 file \"{self.name}\" is not found!")
79 # Assume HDf5 files without file attributes field "record_type"
80 # are old data files which only contain "TriggerRecord" data.
81 self.record_type = 'TriggerRecord'
82 self.clock_speed_hz = 50000000.0
83 self.records = []
84 observed_filelayout_version = self.h5file.attrs['filelayout_version']
85 if 'filelayout_version' in self.h5file.attrs.keys() and \
86 observed_filelayout_version >= FILELAYOUT_MIN_VERSION and \
87 observed_filelayout_version <= FILELAYOUT_MAX_VERSION:
88 print(f"INFO: input file \"{self.name}\" matches the supported file layout versions: {FILELAYOUT_MIN_VERSION} <= {observed_filelayout_version} <= {FILELAYOUT_MAX_VERSION}")
89 else:
90 sys.exit(f"ERROR: this script expects a file layout version between {FILELAYOUT_MIN_VERSION} and {FILELAYOUT_MAX_VERSION} but this wasn't confirmed in the HDF5 file \"{self.name}\", version={observed_filelayout_version}")
91 if 'record_type' in self.h5file.attrs.keys():
92 self.record_type = self.h5file.attrs['record_type']
93 for i in self.h5file.keys():
94 record = self.Record()
95 record.path = i
96 self.h5file[i].visititems(record)
97 self.records.append(record)
98

◆ __del__()

hdf5_dump.DAQDataFile.__del__ ( self)

Definition at line 99 of file hdf5_dump.py.

99 def __del__(self):
100 try:
101 self.h5file.close()
102 except:
103 pass # OK if the file was never opened
104

Member Function Documentation

◆ check_fragments()

hdf5_dump.DAQDataFile.check_fragments ( self,
k_nrecords )

Definition at line 163 of file hdf5_dump.py.

163 def check_fragments(self, k_nrecords):
164 if self.record_type != "TriggerRecord":
165 print("Check fragments only works on TriggerRecord data.")
166 else:
167 report = []
168 n = 0
169 for i in self.records:
170 if n >= k_nrecords and k_nrecords > 0:
171 break
172 dset = self.h5file[i.header]
173 data_array = bytearray(dset[:])
174 (trh_version, ) = struct.unpack('<I', data_array[4:8])
175 if trh_version != TRIGGER_RECORD_HEADER_VERSION:
176 raise ValueError(f"Invalid TriggerRecord Header format version: expected {TRIGGER_RECORD_HEADER_VERSION} and found {trh_version}")
177 (h, j, k) = struct.unpack('<3Q', data_array[8:32])
178 (s, ) = struct.unpack('<H', data_array[48:50])
179 nf = len(i.fragments)
180 empty_frag_count = 0
181 for frag in i.fragments:
182 frag_dset = self.h5file[frag]
183 frag_data = bytearray(frag_dset[:])
184 (frag_version, ) = struct.unpack('<I', frag_data[4:8])
185 if frag_version != FRAGMENT_HEADER_VERSION:
186 raise ValueError(f"Invalid Fragment Header format version: expected {FRAGMENT_HEADER_VERSION} and found {frag_version}")
187 (frag_size, ) = struct.unpack('<Q', frag_data[8:16])
188 if frag_size <= 72:
189 empty_frag_count += 1
190 report.append((h, s, k, nf, nf - k, empty_frag_count))
191 n += 1
192 print("{:-^80}".format("Column Definitions"))
193 print("i: Trigger record number;")
194 print("s: Sequence number;")
195 print("N_frag_exp: expected no. of fragments stored in header;")
196 print("N_frag_act: no. of fragments written in trigger record;")
197 print("N_diff: N_frag_act - N_frag_exp")
198 print("N_frag_empty: no. of empty fragments (size <= 72)")
199 print("{:-^80}".format("Column Definitions"))
200 print("{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(
201 "i", "s", "N_frag_exp", "N_frag_act", "N_diff", "N_frag_empty"))
202 for i in range(len(report)):
203 print("{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(*report[i]))
204 return
205

◆ convert_to_binary()

hdf5_dump.DAQDataFile.convert_to_binary ( self,
binary_file,
k_nrecords )

Definition at line 108 of file hdf5_dump.py.

108 def convert_to_binary(self, binary_file, k_nrecords):
109 with open(binary_file, 'wb') as bf:
110 n = 0
111 for i in self.records:
112 if n >= k_nrecords and k_nrecords > 0:
113 break
114 dset = self.h5file[i.header]
115 idata_array = bytearray(dset[:])
116 bf.write(idata_array)
117 for j in i.fragments:
118 dset = self.h5file[j]
119 jdata_array = bytearray(dset[:])
120 bf.write(jdata_array)
121 n += 1
122 return
123

◆ printout()

hdf5_dump.DAQDataFile.printout ( self,
k_header_type,
k_nrecords,
k_list_components = False )

Definition at line 124 of file hdf5_dump.py.

124 def printout(self, k_header_type, k_nrecords, k_list_components=False):
125 k_header_type = set(k_header_type)
126 if not {"attributes", "all"}.isdisjoint(k_header_type):
127 banner_str = " File Attributes "
128 print(banner_str.center(80, '='))
129 for k in self.h5file.attrs.keys():
130 print("{:<30}: {}".format(k, self.h5file.attrs[k]))
131 n = 0
132 for i in self.records:
133 if n >= k_nrecords and k_nrecords > 0:
134 break
135 if not {"attributes", "all"}.isdisjoint(k_header_type):
136 banner_str = " Trigger Record Attributes "
137 print(banner_str.center(80, '='))
138 for k in self.h5file[i.path].attrs.keys():
139 print("{:<30}: {}".format(k, self.h5file[i.path].attrs[k]))
140 if not {"header", "both", "all"}.isdisjoint(k_header_type):
141 dset = self.h5file[i.header]
142 data_array = bytearray(dset[:])
143 banner_str = f" {self.record_type} Header "
144 print(banner_str.center(80, '='))
145 print('{:<30}:\t{}'.format("Path", i.path))
146 print('{:<30}:\t{}'.format("Size", dset.shape))
147 print('{:<30}:\t{}'.format("Data type", dset.dtype))
148 print_header(data_array, self.record_type, self.clock_speed_hz,
149 k_list_components)
150 if not {"fragment", "both", "all"}.isdisjoint(k_header_type):
151 for j in i.fragments:
152 dset = self.h5file[j]
153 data_array = bytearray(dset[:])
154 banner_str = " Fragment Header "
155 print(banner_str.center(80, '-'))
156 print('{:<30}:\t{}'.format("Path", j))
157 print('{:<30}:\t{}'.format("Size", dset.shape))
158 print('{:<30}:\t{}'.format("Data type", dset.dtype))
159 print_fragment_header(data_array, self.clock_speed_hz)
160 n += 1
161 return
162

◆ set_clock_speed_hz()

hdf5_dump.DAQDataFile.set_clock_speed_hz ( self,
k_clock_speed_hz )

Definition at line 105 of file hdf5_dump.py.

105 def set_clock_speed_hz(self, k_clock_speed_hz):
106 self.clock_speed_hz = k_clock_speed_hz
107

Member Data Documentation

◆ clock_speed_hz

hdf5_dump.DAQDataFile.clock_speed_hz = 50000000.0

Definition at line 82 of file hdf5_dump.py.

◆ h5file

hdf5_dump.DAQDataFile.h5file = h5py.File(self.name, 'r')

Definition at line 74 of file hdf5_dump.py.

◆ name

hdf5_dump.DAQDataFile.name = name

Definition at line 71 of file hdf5_dump.py.

◆ record_type

hdf5_dump.DAQDataFile.record_type = 'TriggerRecord'

Definition at line 81 of file hdf5_dump.py.

◆ records [1/2]

int hdf5_dump.DAQDataFile.records = []

Definition at line 83 of file hdf5_dump.py.

◆ records [2/2]

int hdf5_dump.DAQDataFile.records = k_nrecords and k_nrecords > 0:

Definition at line 111 of file hdf5_dump.py.


The documentation for this class was generated from the following file: