DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
daphne_decoder.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2"""
3Created on: 17/05/2023
4
5Author: Vitaliy Popov
6
7Description: Script checks PDS data and prints some of the ADC stats.
8
9"""
10
11
12from hdf5libs import HDF5RawDataFile
13
14import daqdataformats
15import detdataformats
16import fddetdataformats
17from daqdataformats import FragmentType
19from rawdatautils.unpack.utils import *
20import detchannelmaps
21
22import click
23import datetime
24import time
25import numpy as np
26import time
27
28from rawdatautils.unpack.dataclasses import dts_to_datetime
29
30
31#import matplotlib.pyplot as plt
32
33class bcolors:
34 HEADER = '\033[95m'
35 OKBLUE = '\033[94m'
36 OKCYAN = '\033[96m'
37 OKGREEN = '\033[92m'
38 WARNING = '\033[93m'
39 FAIL = '\033[91m'
40 ENDC = '\033[0m'
41 BOLD = '\033[1m'
42 UNDERLINE = '\033[4m'
43
44dmodes = {3 : "Self-triggered", 13 : "Streaming"}
45
46def print_links(pds_geo_ids):
47
48 print("-"*60)
49 split = " "*6 + "|" + " "*6
50 geo_data = [[] for i in range(4)]
51 for gid in pds_geo_ids:
52 #geo_info = detchannelmaps.HardwareMapService.parse_geo_id(gid)
53 det_link = 0xffff & (gid >> 48);
54 det_slot = 0xffff & (gid >> 32);
55 det_crate = 0xffff & (gid >> 16);
56 det_id = 0xffff & gid;
57 subdet = detdataformats.DetID.Subdetector(det_id)
58 det_name = detdataformats.DetID.subdetector_to_string(subdet)
59 geo_data[det_slot].append(det_link)
60
61
62 for i in range(len(geo_data)):
63 if len(geo_data[i])>0:
64 print(f"\t{geo_info.det_crate:3}{split}{i:3}{split}{geo_data[i]}")
65 return
66
67
68@click.command()
69@click.argument('filename', type=click.Path(exists=True))
70@click.option('--det', default='HD_PDS', help='Subdetector string (default: HD_PDS)')
71@click.option('--nrecords', '-n', default=-1, help='How many Trigger Records to process (default: all)')
72@click.option('--nskip', default=0, help='How many Trigger Records to skip (default: 0)')
73@click.option('--channel-map', default=None, help="Channel map to load (default: None)")
74@click.option('--adc-stats', is_flag=True, help="Print adc stats (works for streaming data)")
75@click.option('--print-wvfm-samples', default=0, help='How many samples in each waveform to print.')
76@click.option('--print-tp-info', is_flag=True, help='Print TP info from DAPHNFrame.')
77
78def main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info):
79
80 h5_file = HDF5RawDataFile(filename)
81 records = h5_file.get_all_record_ids()
82
83 if nskip > len(records):
84 print(f'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
85 return
86
87 if nrecords > 0:
88 if (nskip+nrecords) > len(records):
89 nrecords = -1
90 else:
91 nrecords=nskip+nrecords
92
93 records_to_process = records[nskip:] if nrecords==-1 else records[nskip:nrecords]
94
95 print(f'Will process {len(records_to_process)} of {len(records)} records.')
96
97 unpacker_stream = DAPHNEStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
98 unpacker_slftrg = DAPHNEUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
99 unpacker_eth_stream = DAPHNEEthStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
100 unpacker_eth_slftrg = DAPHNEEthUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
101
102
103 #have channel numbers per geoid in here
104 ch_map = None
105 if channel_map is not None:
106 ch_map = detchannelmaps.make_pds_map(channel_map)
107 offline_ch_num_dict = {}
108
109 for r in records_to_process:
110
111 print(f'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
112 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
113
114 if len(pds_geo_ids) == 0:
115 print(f"Record {r} has no data for {det}. Exiting..")
116 return
117
118 for gid in pds_geo_ids:
119
120 det_stream = 0xffff & (gid >> 48);
121 det_slot = 0xffff & (gid >> 32);
122 det_crate = 0xffff & (gid >> 16);
123 det_id = 0xffff & gid;
124 subdet = detdataformats.DetID.Subdetector(det_id)
125 det_name = detdataformats.DetID.subdetector_to_string(subdet)
126 print(f'\tProcessing gid {gid}: ',
127 f'subdetector {det_name}, '
128 f'crate {det_crate}, '
129 f'slot {det_slot}, '
130 f'stream {det_stream}')
131
132 frag = h5_file.get_frag(r,gid)
133 fragType = frag.get_header().fragment_type
134 fragType_string = daqdataformats.fragment_type_to_string(daqdataformats.FragmentType(fragType))
135
136 is_selftrigger = (fragType==FragmentType.kDAPHNE.value or fragType==FragmentType.kDAPHNEEth.value)
137 is_eth = (fragType==FragmentType.kDAPHNEEth.value or fragType==FragmentType.kDAPHNEEthStream.value)
138
139 unpacker = None
140 if is_eth:
141 unpacker = unpacker_eth_slftrg if is_selftrigger else unpacker_eth_stream
142 else:
143 unpacker = unpacker_slftrg if is_selftrigger else unpacker_stream
144
145
146
147 #get and print fragment header
148 frag_header = unpacker.get_frh_data(frag)[0]
149 print('\t',frag_header)
150
151 n_frames = unpacker.get_n_obj(frag)
152 print(f'\tFound {n_frames} {fragType_string} frames in this fragment.')
153 if n_frames==0:
154 continue
155
156 daq_header_data = unpacker.get_daq_header_data(frag)
157 det_header_data = unpacker.get_det_header_data(frag)
158
159 for i_daqh, daqh in enumerate(daq_header_data):
160 print(f'\tDAQ header {i_daqh}:\n\t\t',daq_header_data[i_daqh])
161 print(det_header_data)
162 if det_header_data is None or len(det_header_data)<(i_daqh+1): continue
163 print(f'\tDAPHNE header info {i_daqh}:\n\t\t',det_header_data[i_daqh])
164
165 pds_ana_data, pds_wvfm_data = unpacker.get_det_data_all(frag)
166
167 if adc_stats:
168 for pds_ana in pds_ana_data:
169 if is_selftrigger:
170 print(f'\t\tPDS channel {pds_ana.channel}, timestamp {pds_ana.timestamp_dts} adc stats:')
171 else:
172 print(f'\t\tPDS channel {pds_ana.channel} adc stats:')
173 print('\t\t',pds_ana)
174
175 if print_wvfm_samples:
176 for pds_wvfm in pds_wvfm_data:
177 if is_selftrigger:
178 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamp_dts} ({dts_to_datetime(pds_wvfm.timestamp_dts)}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
179 else:
180 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamps[0]} ({dts_to_datetime(pds_wvfm.timestamps[0])}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
181 for i_sample in range(print_wvfm_samples):
182 print(f'\t\t\t {i_sample:>5}: ts={pds_wvfm.timestamps[i_sample]:<25.0f} val={pds_wvfm.adcs[i_sample]}')
183
184 if print_tp_info:
185 if not is_selftrigger:
186 print(f'--print-tp-info called, but fragment is not of self-trigger type. Skipping...')
187 elif is_eth:
188 print(f'--print-tp-info not currently supported for DAPHNEEthFrame. Skipping...')
189 else:
190 print(f'--PRINTING TP INFO--')
191 dict_tp_ch_ts = {}
192 for i_f in range(n_frames):
193 frame = fddetdataformats.DAPHNEFrame(frag.get_data(i_f*fddetdataformats.DAPHNEFrame.sizeof()))
194 print(f'\tAnalyzing Frame {i_f}: TS={frame.get_timestamp()} DAPHNE_CH={frame.get_channel()}')
195 peaks_data = frame.get_peaks_data()
196
197 n_tps = 0
198 for i_p in range(5):
199 if peaks_data.is_found(i_p): n_tps+=1
200 print(f'\t\tFound {n_tps} TPs:')
201 if frame.get_channel() not in dict_tp_ch_ts.keys():
202 dict_tp_ch_ts[frame.get_channel()] = set()
203 for i_p in range(n_tps):
204 if not peaks_data.is_found(i_p): continue
205 print(f'\t\t\tTP Peak {i_p} at ts={peaks_data.get_sample_start(i_p)}, ',
206 f'adc_integral={peaks_data.get_adc_integral(i_p)}, '
207 f'adc_max={peaks_data.get_adc_max(i_p)}, ',
208 f't_over_baseline={peaks_data.get_samples_over_baseline(i_p)}, ',
209 f'n_subpeaks={peaks_data.get_num_subpeaks(i_p)}')
210 if (frame.get_timestamp()+peaks_data.get_sample_start(i_p)) in dict_tp_ch_ts[frame.get_channel()]:
211 print (f"\t\t\t\t=====PREVIOUSLY FOUND TP w/ CH={frame.get_channel()}, TS={frame.get_timestamp()+peaks_data.get_sample_start(i_p)}")
212 else:
213 dict_tp_ch_ts[frame.get_channel()].add(frame.get_timestamp()+peaks_data.get_sample_start(i_p))
214
215 print(f"{'Processing finished': ^80}")
216
217if __name__ == '__main__':
218 main()
219
220
221
222
223
print_links(pds_geo_ids)
main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info)