DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
daphne_decoder.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2"""
3Created on: 17/05/2023
4
5Author: Vitaliy Popov
6
7Description: Script checks PDS data and prints some of the ADC stats.
8
9"""
10
11
12from hdf5libs import HDF5RawDataFile
13
14import daqdataformats
15import detdataformats
16import fddetdataformats
17from daqdataformats import FragmentType
19import detchannelmaps
20
21import click
22import datetime
23import time
24import numpy as np
25import time
26#import matplotlib.pyplot as plt
27
28class bcolors:
29 HEADER = '\033[95m'
30 OKBLUE = '\033[94m'
31 OKCYAN = '\033[96m'
32 OKGREEN = '\033[92m'
33 WARNING = '\033[93m'
34 FAIL = '\033[91m'
35 ENDC = '\033[0m'
36 BOLD = '\033[1m'
37 UNDERLINE = '\033[4m'
38
39dmodes = {3 : "Self-triggered", 13 : "Streaming"}
40
41def print_links(pds_geo_ids):
42
43 print("-"*60)
44 split = " "*6 + "|" + " "*6
45 geo_data = [[] for i in range(4)]
46 for gid in pds_geo_ids:
47 #geo_info = detchannelmaps.HardwareMapService.parse_geo_id(gid)
48 det_link = 0xffff & (gid >> 48);
49 det_slot = 0xffff & (gid >> 32);
50 det_crate = 0xffff & (gid >> 16);
51 det_id = 0xffff & gid;
52 subdet = detdataformats.DetID.Subdetector(det_id)
53 det_name = detdataformats.DetID.subdetector_to_string(subdet)
54 geo_data[det_slot].append(det_link)
55
56
57 for i in range(len(geo_data)):
58 if len(geo_data[i])>0:
59 print(f"\t{geo_info.det_crate:3}{split}{i:3}{split}{geo_data[i]}")
60 return
61
62
63@click.command()
64@click.argument('filename', type=click.Path(exists=True))
65@click.option('--det', default='HD_PDS', help='Subdetector string (default: HD_PDS)')
66@click.option('--nrecords', '-n', default=-1, help='How many Trigger Records to process (default: all)')
67@click.option('--nskip', default=0, help='How many Trigger Records to skip (default: 0)')
68@click.option('--summary', is_flag=True, help="Print checks summary (currently broken?)")
69@click.option('--check_ts', is_flag=True, help="Print timestamps check (works for streaming data)")
70@click.option('--adc_stats', is_flag=True, help="Print adc stats (works for streaming data)")
71@click.option('--print_frame_timestamps', is_flag=True, help="Print individual frame timestamps (can be very verbose)")
72
73def main(filename, det, nrecords, nskip, adc_stats, check_ts, summary, print_frame_timestamps):
74
75 h5_file = HDF5RawDataFile(filename)
76 records = h5_file.get_all_record_ids()
77
78 if nskip > len(records):
79 print(f'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
80 return
81
82 if nrecords > 0:
83 if (nskip+nrecords) > len(records):
84 nrecords = -1
85 else:
86 nrecords=nskip+nrecords
87
88 records_to_process = []
89
90 if nrecords==-1:
91 records_to_process = records[nskip:]
92 else:
93 records_to_process = records[nskip:nrecords]
94
95 print(f'Will process {len(records_to_process)} of {len(records)} records.')
96
97 for r in records_to_process:
98
99 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
100
101 if len(pds_geo_ids) == 0:
102 print(f"Record {r} has no data for {det}. Exiting..")
103 return
104
105 trigger_stamps = []
106 stamp_begin = []
107 timelines = []
108
109 active_channels = 0
110 n_channels = 0
111
112
113 headline = f"{'CRATE':^10} {'SLOT':^10} {'LINK':^10} {'Fragment Type':^15} {'CHANNEL':^10} "
114
115 if adc_stats:
116 headline += f" {'MEAN':^10} {'Std.dev.':^10}"
117
118 if check_ts:
119 headline += f" {'TS stats':^17} {'TS Check':^18}"
120
121 trg_ts_nsec = float(h5_file.get_frag(r,pds_geo_ids[0]).get_trigger_timestamp())/62500000.0
122 trg_time_string = datetime.datetime.fromtimestamp(trg_ts_nsec)
123
124 print("-"*114)
125 print(f"{'RECORD':>50}: {r[0]:<15} {str(trg_time_string):^26}")
126 print("-"*114)
127 print(headline)
128 print("-"*114)
129
130 scanned_channels = 0
131 tslot = -1
132
133 for gid in pds_geo_ids:
134
135 frag = h5_file.get_frag(r,gid)
136 det_link = 0xffff & (gid >> 48);
137 det_slot = 0xffff & (gid >> 32);
138 det_crate = 0xffff & (gid >> 16);
139 det_id = 0xffff & gid;
140 subdet = detdataformats.DetID.Subdetector(det_id)
141 det_name = detdataformats.DetID.subdetector_to_string(subdet)
142 fragType = frag.get_header().fragment_type
143
144 if fragType == FragmentType.kDAPHNE.value:
145
146 first_frame = fddetdataformats.DAPHNEFrame(frag.get_data())
147 timestamps = np_array_timestamp(frag)
148 adcs = np_array_adc(frag)
149 channels = np_array_channels(frag)
150 n_channels = len(np.unique(channels))
151
152 elif fragType == FragmentType.kDAPHNEStream.value:
153
154 first_frame = fddetdataformats.DAPHNEStreamFrame(frag.get_data())
155 timestamps = np_array_timestamp_stream(frag)
156 adcs = np_array_adc_stream(frag)
157 channels = np_array_channels_stream(frag)[0]
158 n_channels = len(np.unique(channels))
159 #print(f'Frame size = {first_frame.sizeof()}, number of timestamps, adcs, channels, channels_in_list = {len(timestamps)}, {len(adcs)}, {len(channels)}, {len(np_array_channels_stream(frag))}')
160
161 trigger_stamps.append(frag.get_trigger_timestamp())
162
163 daq_header = first_frame.get_daqheader()
164 print(daq_header,daq_header.version)
165
166 daq_header = first_frame.get_daqheader()
167 print(daq_header,daq_header.version)
168
169 ts_status = f"{bcolors.FAIL}{'Problems':^20}{bcolors.ENDC}"
170
171 for ch_num in range(n_channels):
172 scanned_channels += 1
173 line = f"{det_crate:^10} {det_slot:^10} {det_link:^10} {dmodes[fragType] :^15} {channels[ch_num]:^10} "
174
175 if np.mean(adcs[:]) > 10:
176 active_channels += 1
177
178 if adc_stats:
179 if fragType == FragmentType.kDAPHNE.value:
180 line += f"{np.mean(adcs[:]):^10.2f} {np.std(adcs[:]):^10.2f} "
181 else:
182 line += f"{np.mean(adcs[:, ch_num]):^10.2f} {np.std(adcs[:, ch_num]):^10.2f} "
183
184 if check_ts:
185 delta = np.diff(timestamps)
186 line += f"{np.mean(delta):>8.1f}/{np.std(delta):<8.1f}"
187
188 if np.std(delta) < 2:
189 ts_status = f"{bcolors.OKGREEN}{'OK':^18}{bcolors.ENDC}"
190
191 line += ts_status
192
193 print(line)
194
195 if (print_frame_timestamps):
196 temp_channels = np_array_channels_stream(frag)
197 temp_dashes_string = "-"*110
198 print(f" {temp_dashes_string}")
199 print(" --> Frame timestamp details <--")
200 print(" Index Channel DTS Timestamp (ticks) DTS Timestamp (time string)")
201 print(f" {temp_dashes_string}")
202 loop_counter = 0;
203 for idx in range(len(timestamps)):
204 if fragType == FragmentType.kDAPHNEStream.value and (idx % 64) != 0:
205 continue
206 ts_nsec = float(timestamps[idx])/62500000.0
207 time_string = datetime.datetime.fromtimestamp(ts_nsec)
208 if fragType == FragmentType.kDAPHNEStream.value:
209 print(f' {(idx/64):>5} {temp_channels[loop_counter]} {timestamps[idx]:>20} {str(time_string):<26}')
210 else:
211 print(f' {idx:>5} {channels[idx]:>5} {timestamps[idx]:>20} {str(time_string):<26}')
212 loop_counter += 1
213 print()
214
215 if tslot == det_slot:
216 continue
217 else:
218 tslot = det_slot
219 print("")
220
221 print(f"Number of active/total channels \t-- {active_channels:>20}/{scanned_channels}\n")
222
223 if summary:
224
225 print("-"*80)
226 print(f"{'SUMMARY':^80}")
227 print("-"*80)
228 print(f"Processed records \t - \t {len(records_to_process)} \n")
229 print("-"*60)
230 print(f"\t crates \t slots \t\t links")
231 print_links(pds_geo_ids)
232 print("-"*60)
233 s = " "
234 if (np.all(timelines) == 1):
235 print(f"Timelines \t - \t {bcolors.OKGREEN} OK {bcolors.ENDC} \n")
236 else:
237 print(f"Timelines \t - \t {bcolors.FAIL} PROBLEMS {bcolors.ENDC} \n")
238
239 print(f"{'Processing fnished': ^80}")
240
241if __name__ == '__main__':
242 main()
243
244
245
246
247
main(filename, det, nrecords, nskip, adc_stats, check_ts, summary, print_frame_timestamps)
print_links(pds_geo_ids)