78def main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info):
79
80 h5_file = HDF5RawDataFile(filename)
81 records = h5_file.get_all_record_ids()
82
83 if nskip > len(records):
84 print(f'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
85 return
86
87 if nrecords > 0:
88 if (nskip+nrecords) > len(records):
89 nrecords = -1
90 else:
91 nrecords=nskip+nrecords
92
93 records_to_process = records[nskip:] if nrecords==-1 else records[nskip:nrecords]
94
95 print(f'Will process {len(records_to_process)} of {len(records)} records.')
96
97 unpacker_stream = DAPHNEStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
98 unpacker_slftrg = DAPHNEUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
99
100
101
102 ch_map = None
103 if channel_map is not None:
104 ch_map = detchannelmaps.make_pds_map(channel_map)
105 offline_ch_num_dict = {}
106
107 for r in records_to_process:
108
109 print(f'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
110 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
111
112 if len(pds_geo_ids) == 0:
113 print(f"Record {r} has no data for {det}. Exiting..")
114 return
115
116 for gid in pds_geo_ids:
117
118 det_stream = 0xffff & (gid >> 48);
119 det_slot = 0xffff & (gid >> 32);
120 det_crate = 0xffff & (gid >> 16);
121 det_id = 0xffff & gid;
122 subdet = detdataformats.DetID.Subdetector(det_id)
123 det_name = detdataformats.DetID.subdetector_to_string(subdet)
124 print(f'\tProcessing gid {gid}: ',
125 f'subdetector {det_name}, '
126 f'crate {det_crate}, '
127 f'slot {det_slot}, '
128 f'stream {det_stream}')
129
130 frag = h5_file.get_frag(r,gid)
131 fragType = frag.get_header().fragment_type
132 fragType_string = daqdataformats.fragment_type_to_string(daqdataformats.FragmentType(fragType))
133
134 is_selftrigger = (fragType==FragmentType.kDAPHNE.value)
135
136 unpacker = unpacker_slftrg if is_selftrigger else unpacker_stream
137
138
139
140 frag_header = unpacker.get_frh_data(frag)[0]
141 print('\t',frag_header)
142
143 n_frames = unpacker.get_n_obj(frag)
144 print(f'\tFound {n_frames} {fragType_string} frames in this fragment.')
145 if n_frames==0:
146 continue
147
148 daq_header_data = unpacker.get_daq_header_data(frag)
149 det_header_data = unpacker.get_det_header_data(frag)
150
151 for i_daqh, daqh in enumerate(daq_header_data):
152 print(f'\tDAQ header {i_daqh}:\n\t\t',daq_header_data[i_daqh])
153 print(det_header_data)
154 if det_header_data is None or len(det_header_data)<(i_daqh+1): continue
155 print(f'\tDAPHNE header info {i_daqh}:\n\t\t',det_header_data[i_daqh])
156
157 pds_ana_data, pds_wvfm_data = unpacker.get_det_data_all(frag)
158
159 if adc_stats:
160 for pds_ana in pds_ana_data:
161 if is_selftrigger:
162 print(f'\t\tPDS channel {pds_ana.channel}, timestamp {pds_ana.timestamp_dts} adc stats:')
163 else:
164 print(f'\t\tPDS channel {pds_ana.channel} adc stats:')
165 print('\t\t',pds_ana)
166
167 if print_wvfm_samples:
168 for pds_wvfm in pds_wvfm_data:
169 if is_selftrigger:
170 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamp_dts} ({dts_to_datetime(pds_wvfm.timestamp_dts)}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
171 else:
172 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamps[0]} ({dts_to_datetime(pds_wvfm.timestamps[0])}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
173 for i_sample in range(print_wvfm_samples):
174 print(f'\t\t\t {i_sample:>5}: ts={pds_wvfm.timestamps[i_sample]:<25.0f} val={pds_wvfm.adcs[i_sample]}')
175
176 if print_tp_info:
177 if not is_selftrigger:
178 print(f'--print-tp-info called, but fragment is not kDAPHNE. Skipping...')
179 else:
180 print(f'--PRINTING TP INFO--')
181 dict_tp_ch_ts = {}
182 for i_f in range(n_frames):
183 frame = fddetdataformats.DAPHNEFrame(frag.get_data(i_f*fddetdataformats.DAPHNEFrame.sizeof()))
184 print(f'\tAnalyzing Frame {i_f}: TS={frame.get_timestamp()} DAPHNE_CH={frame.get_channel()}')
185 peaks_data = frame.get_peaks_data()
186
187 n_tps = 0
188 for i_p in range(5):
189 if peaks_data.is_found(i_p): n_tps+=1
190 print(f'\t\tFound {n_tps} TPs:')
191 if frame.get_channel() not in dict_tp_ch_ts.keys():
192 dict_tp_ch_ts[frame.get_channel()] = set()
193 for i_p in range(n_tps):
194 if not peaks_data.is_found(i_p): continue
195 print(f'\t\t\tTP Peak {i_p} at ts={peaks_data.get_sample_start(i_p)}, ',
196 f'adc_integral={peaks_data.get_adc_integral(i_p)}, '
197 f'adc_max={peaks_data.get_adc_max(i_p)}, ',
198 f't_over_baseline={peaks_data.get_samples_over_baseline(i_p)}, ',
199 f'n_subpeaks={peaks_data.get_num_subpeaks(i_p)}')
200 if (frame.get_timestamp()+peaks_data.get_sample_start(i_p)) in dict_tp_ch_ts[frame.get_channel()]:
201 print (f"\t\t\t\t=====PREVIOUSLY FOUND TP w/ CH={frame.get_channel()}, TS={frame.get_timestamp()+peaks_data.get_sample_start(i_p)}")
202 else:
203 dict_tp_ch_ts[frame.get_channel()].add(frame.get_timestamp()+peaks_data.get_sample_start(i_p))
204
205 print(f"{'Processing finished': ^80}")
206
int main(int argc, char **argv)