DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
TCReader.py
Go to the documentation of this file.
1"""
2Reader class for TC data.
3"""
4from .HDF5Reader import HDF5Reader
5
6import daqdataformats # noqa: F401 : Not used, but needed to recognize formats.
7import trgdataformats
8
9import numpy as np
10from numpy.typing import NDArray
11
12
14 """
15 Class that reads a given HDF5 data file and can
16 process the TC fragments within.
17
18 Loading fragments appends to :self.tc_data: and :self.ta_data:.
19 NumPy dtypes of :self.tc_data: and :self.ta_data: are available
20 as :TCReader.tc_dt: and :TCReader.ta_dt:.
21
22 TC reading can print information that is relevant about the
23 loading process by specifying the verbose level. 0 for errors
24 only. 1 for warnings. 2 for all information.
25 """
26 # TC data type
27 tc_dt = np.dtype([
28 ('algorithm', trgdataformats.TriggerCandidateData.Algorithm),
29 ('detid', np.uint16),
30 ('num_tas', np.uint64), # Greedy
31 ('time_candidate', np.uint64),
32 ('time_end', np.uint64),
33 ('time_start', np.uint64),
34 ('type', trgdataformats.TriggerCandidateData.Type),
35 ('version', np.uint16),
36 ('trigger_number', np.uint64)
37 ])
38
39 # TA data type
40 ta_dt = np.dtype([
41 ('adc_integral', np.uint64),
42 ('adc_peak', np.uint64),
43 ('algorithm', trgdataformats.TriggerActivityData.Algorithm),
44 ('channel_end', np.int32),
45 ('channel_peak', np.int32),
46 ('channel_start', np.int32),
47 ('detid', np.uint16),
48 ('time_activity', np.uint64),
49 ('time_end', np.uint64),
50 ('time_peak', np.uint64),
51 ('time_start', np.uint64),
52 ('type', trgdataformats.TriggerActivityData.Type),
53 ('version', np.uint16)
54 ])
55
56 def __init__(self, filename: str, verbosity: int = 0, batch_mode: bool = False) -> None:
57 """
58 Loads a given HDF5 file.
59
60 Parameters:
61 filename (str): HDF5 file to open.
62 verbosity (int): Verbose level. 0: Only errors. 1: Warnings. 2: All.
63
64 Returns nothing.
65 """
66 super().__init__(filename, verbosity, batch_mode)
67 self.tc_data = np.array([], dtype=self.tc_dt) # Will concatenate new TCs
68 self.ta_data = [] # ta_data[i] will be a np.ndarray of TAs from the i-th TC
69 return None
70
71 def __getitem__(self, key: int | str) -> NDArray[tc_dt]:
72 return self.tc_data[key]
73
74 def __setitem__(self, key: int | str, value: NDArray[tc_dt]) -> None:
75 self.tc_data[key] = value
76 return
77
78 def __len__(self) -> int:
79 return len(self.tc_data)
80
81 def _filter_fragment_paths(self) -> None:
82 """ Filter the fragment paths for TCs. """
83 fragment_paths = []
84
85 # TC fragment paths contain their name in the path.
87 if "Trigger_Candidate" in path:
88 fragment_paths.append(path)
89
90 self._fragment_paths_fragment_paths = fragment_paths
91 return None
92
93 def read_fragment(self, fragment_path: str) -> NDArray:
94 """
95 Read from the given data fragment path.
96
97 Returns a np.ndarray of the first TC that was read and appends all TCs in the fragment to :self.tc_data:.
98 """
99 if self._verbosity >= 2:
100 print("="*60)
101 print(f"INFO: Reading from the path\n{fragment_path}")
102
103 fragment = self._h5_file.get_frag(fragment_path)
104 fragment_data_size = fragment.get_data_size()
105 trigger_number = fragment.get_trigger_number()
106
107 if fragment_data_size == 0: # Empty fragment
108 self._num_empty += 1
109 if self._verbosity >= 1:
110 print(
112 + self._BOLD_TEXT
113 + "WARNING: Empty fragment."
114 + self._END_TEXT_COLOR
115 )
116 print("="*60)
117 return np.array([], dtype=self.tc_dt)
118
119 tc_idx = 0 # Debugging output.
120 byte_idx = 0 # Variable TC sizing, must do a while loop.
121 while byte_idx < fragment_data_size:
122 if self._verbosity >= 2:
123 print(f"INFO: Fragment Index: {tc_idx}.")
124 tc_idx += 1
125 print(f"INFO: Byte Index / Frag Size: {byte_idx} / {fragment_data_size}")
126
127 # Process TC data
128 tc_datum = trgdataformats.TriggerCandidate(fragment.get_data(byte_idx))
129 np_tc_datum = np.array([(
130 tc_datum.data.algorithm,
131 tc_datum.data.detid,
132 tc_datum.n_inputs(),
133 tc_datum.data.time_candidate,
134 tc_datum.data.time_end,
135 tc_datum.data.time_start,
136 tc_datum.data.type,
137 tc_datum.data.version,
138 trigger_number)],
139 dtype=self.tc_dt)
140
141 self.tc_data = np.hstack((self.tc_data, np_tc_datum))
142
143 byte_idx += tc_datum.sizeof()
144 if self._verbosity >= 2:
145 print(f"INFO: Upcoming byte index: {byte_idx}.")
146
147 # Process TA data
148 np_ta_data = np.zeros(np_tc_datum['num_tas'], dtype=self.ta_dt)
149 for ta_idx, ta in enumerate(tc_datum):
150 np_ta_data[ta_idx] = np.array([(
151 ta.adc_integral,
152 ta.adc_peak,
153 ta.algorithm,
154 ta.channel_end,
155 ta.channel_peak,
156 ta.channel_start,
157 np.uint16(ta.detid),
158 ta.time_activity,
159 ta.time_end,
160 ta.time_peak,
161 ta.time_start,
162 ta.type,
163 ta.version)],
164 dtype=self.ta_dt)
165 self.ta_data.append(np_ta_data) # Jagged array
166
167 if self._verbosity >= 2:
168 print("INFO: Finished reading.")
169 print("="*60)
170 return np_tc_datum
171
172 def clear_data(self) -> None:
173 self.tc_data = np.array([], dtype=self.tc_dt)
174 self.ta_data = []
NDArray read_fragment(self, str fragment_path)
Definition TCReader.py:93
None _filter_fragment_paths(self)
Definition TCReader.py:81
None __init__(self, str filename, int verbosity=0, bool batch_mode=False)
Definition TCReader.py:56
NDArray[tc_dt] __getitem__(self, int|str key)
Definition TCReader.py:71
None __setitem__(self, int|str key, NDArray[tc_dt] value)
Definition TCReader.py:74