DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
hdf5_dump.DAQDataFile Class Reference

Classes

class  Record
 

Public Member Functions

 __init__ (self, name)
 
 __del__ (self)
 
 set_clock_speed_hz (self, k_clock_speed_hz)
 
 convert_to_binary (self, binary_file, k_nrecords)
 
 printout (self, k_header_type, k_nrecords, k_list_components=False)
 
 check_fragments (self, k_nrecords)
 

Public Attributes

 name = name
 
 h5file = h5py.File(self.name, 'r')
 
str record_type = 'TriggerRecord'
 
float clock_speed_hz = 50000000.0
 
list records = []
 
int records = k_nrecords and k_nrecords > 0:
 

Detailed Description

Definition at line 62 of file hdf5_dump.py.

Constructor & Destructor Documentation

◆ __init__()

hdf5_dump.DAQDataFile.__init__ ( self,
name )

Definition at line 63 of file hdf5_dump.py.

63 def __init__(self, name):
64 self.name = name
65 if os.path.exists(self.name):
66 try:
67 self.h5file = h5py.File(self.name, 'r')
68 except OSError:
69 sys.exit(f"ERROR: file \"{self.name}\" couldn't be opened; is it an HDF5 file?")
70 else:
71 sys.exit(f"ERROR: HDF5 file \"{self.name}\" is not found!")
72 # Assume HDf5 files without file attributes field "record_type"
73 # are old data files which only contain "TriggerRecord" data.
74 self.record_type = 'TriggerRecord'
75 self.clock_speed_hz = 50000000.0
76 self.records = []
77 observed_filelayout_version = self.h5file.attrs['filelayout_version']
78 if 'filelayout_version' in self.h5file.attrs.keys() and \
79 observed_filelayout_version >= FILELAYOUT_MIN_VERSION and \
80 observed_filelayout_version <= FILELAYOUT_MAX_VERSION:
81 print(f"INFO: input file \"{self.name}\" matches the supported file layout versions: {FILELAYOUT_MIN_VERSION} <= {observed_filelayout_version} <= {FILELAYOUT_MAX_VERSION}")
82 else:
83 sys.exit(f"ERROR: this script expects a file layout version between {FILELAYOUT_MIN_VERSION} and {FILELAYOUT_MAX_VERSION} but this wasn't confirmed in the HDF5 file \"{self.name}\", version={observed_filelayout_version}")
84 if 'record_type' in self.h5file.attrs.keys():
85 self.record_type = self.h5file.attrs['record_type']
86 for i in self.h5file.keys():
87 record = self.Record()
88 record.path = i
89 self.h5file[i].visititems(record)
90 self.records.append(record)
91

◆ __del__()

hdf5_dump.DAQDataFile.__del__ ( self)

Definition at line 92 of file hdf5_dump.py.

92 def __del__(self):
93 try:
94 self.h5file.close()
95 except:
96 pass # OK if the file was never opened
97

Member Function Documentation

◆ check_fragments()

hdf5_dump.DAQDataFile.check_fragments ( self,
k_nrecords )

Definition at line 156 of file hdf5_dump.py.

156 def check_fragments(self, k_nrecords):
157 if self.record_type != "TriggerRecord":
158 print("Check fragments only works on TriggerRecord data.")
159 else:
160 report = []
161 n = 0
162 for i in self.records:
163 if n >= k_nrecords and k_nrecords > 0:
164 break
165 dset = self.h5file[i.header]
166 data_array = bytearray(dset[:])
167 (trh_version, ) = struct.unpack('<I', data_array[4:8])
168 if trh_version != TRIGGER_RECORD_HEADER_VERSION:
169 raise ValueError(f"Invalid TriggerRecord Header format version: expected {TRIGGER_RECORD_HEADER_VERSION} and found {trh_version}")
170 (h, j, k) = struct.unpack('<3Q', data_array[8:32])
171 (s, ) = struct.unpack('<H', data_array[48:50])
172 nf = len(i.fragments)
173 empty_frag_count = 0
174 for frag in i.fragments:
175 frag_dset = self.h5file[frag]
176 frag_data = bytearray(frag_dset[:])
177 (frag_version, ) = struct.unpack('<I', frag_data[4:8])
178 if frag_version != FRAGMENT_HEADER_VERSION:
179 raise ValueError(f"Invalid Fragment Header format version: expected {FRAGMENT_HEADER_VERSION} and found {frag_version}")
180 (frag_size, ) = struct.unpack('<Q', frag_data[8:16])
181 if frag_size <= 72:
182 empty_frag_count += 1
183 report.append((h, s, k, nf, nf - k, empty_frag_count))
184 n += 1
185 print("{:-^80}".format("Column Definitions"))
186 print("i: Trigger record number;")
187 print("s: Sequence number;")
188 print("N_frag_exp: expected no. of fragments stored in header;")
189 print("N_frag_act: no. of fragments written in trigger record;")
190 print("N_diff: N_frag_act - N_frag_exp")
191 print("N_frag_empty: no. of empty fragments (size <= 72)")
192 print("{:-^80}".format("Column Definitions"))
193 print("{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(
194 "i", "s", "N_frag_exp", "N_frag_act", "N_diff", "N_frag_empty"))
195 for i in range(len(report)):
196 print("{:^10}{:^10}{:^15}{:^15}{:^10}{:^12}".format(*report[i]))
197 return
198

◆ convert_to_binary()

hdf5_dump.DAQDataFile.convert_to_binary ( self,
binary_file,
k_nrecords )

Definition at line 101 of file hdf5_dump.py.

101 def convert_to_binary(self, binary_file, k_nrecords):
102 with open(binary_file, 'wb') as bf:
103 n = 0
104 for i in self.records:
105 if n >= k_nrecords and k_nrecords > 0:
106 break
107 dset = self.h5file[i.header]
108 idata_array = bytearray(dset[:])
109 bf.write(idata_array)
110 for j in i.fragments:
111 dset = self.h5file[j]
112 jdata_array = bytearray(dset[:])
113 bf.write(jdata_array)
114 n += 1
115 return
116

◆ printout()

hdf5_dump.DAQDataFile.printout ( self,
k_header_type,
k_nrecords,
k_list_components = False )

Definition at line 117 of file hdf5_dump.py.

117 def printout(self, k_header_type, k_nrecords, k_list_components=False):
118 k_header_type = set(k_header_type)
119 if not {"attributes", "all"}.isdisjoint(k_header_type):
120 banner_str = " File Attributes "
121 print(banner_str.center(80, '='))
122 for k in self.h5file.attrs.keys():
123 print("{:<30}: {}".format(k, self.h5file.attrs[k]))
124 n = 0
125 for i in self.records:
126 if n >= k_nrecords and k_nrecords > 0:
127 break
128 if not {"attributes", "all"}.isdisjoint(k_header_type):
129 banner_str = " Trigger Record Attributes "
130 print(banner_str.center(80, '='))
131 for k in self.h5file[i.path].attrs.keys():
132 print("{:<30}: {}".format(k, self.h5file[i.path].attrs[k]))
133 if not {"header", "both", "all"}.isdisjoint(k_header_type):
134 dset = self.h5file[i.header]
135 data_array = bytearray(dset[:])
136 banner_str = f" {self.record_type} Header "
137 print(banner_str.center(80, '='))
138 print('{:<30}:\t{}'.format("Path", i.path))
139 print('{:<30}:\t{}'.format("Size", dset.shape))
140 print('{:<30}:\t{}'.format("Data type", dset.dtype))
141 print_header(data_array, self.record_type, self.clock_speed_hz,
142 k_list_components)
143 if not {"fragment", "both", "all"}.isdisjoint(k_header_type):
144 for j in i.fragments:
145 dset = self.h5file[j]
146 data_array = bytearray(dset[:])
147 banner_str = " Fragment Header "
148 print(banner_str.center(80, '-'))
149 print('{:<30}:\t{}'.format("Path", j))
150 print('{:<30}:\t{}'.format("Size", dset.shape))
151 print('{:<30}:\t{}'.format("Data type", dset.dtype))
152 print_fragment_header(data_array, self.clock_speed_hz)
153 n += 1
154 return
155

◆ set_clock_speed_hz()

hdf5_dump.DAQDataFile.set_clock_speed_hz ( self,
k_clock_speed_hz )

Definition at line 98 of file hdf5_dump.py.

98 def set_clock_speed_hz(self, k_clock_speed_hz):
99 self.clock_speed_hz = k_clock_speed_hz
100

Member Data Documentation

◆ clock_speed_hz

hdf5_dump.DAQDataFile.clock_speed_hz = 50000000.0

Definition at line 75 of file hdf5_dump.py.

◆ h5file

hdf5_dump.DAQDataFile.h5file = h5py.File(self.name, 'r')

Definition at line 67 of file hdf5_dump.py.

◆ name

hdf5_dump.DAQDataFile.name = name

Definition at line 64 of file hdf5_dump.py.

◆ record_type

hdf5_dump.DAQDataFile.record_type = 'TriggerRecord'

Definition at line 74 of file hdf5_dump.py.

◆ records [1/2]

int hdf5_dump.DAQDataFile.records = []

Definition at line 76 of file hdf5_dump.py.

◆ records [2/2]

int hdf5_dump.DAQDataFile.records = k_nrecords and k_nrecords > 0:

Definition at line 104 of file hdf5_dump.py.


The documentation for this class was generated from the following file: