78def main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info):
79
80 h5_file = HDF5RawDataFile(filename)
81 records = h5_file.get_all_record_ids()
82
83 if nskip > len(records):
84 print(f'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
85 return
86
87 if nrecords > 0:
88 if (nskip+nrecords) > len(records):
89 nrecords = -1
90 else:
91 nrecords=nskip+nrecords
92
93 records_to_process = records[nskip:] if nrecords==-1 else records[nskip:nrecords]
94
95 print(f'Will process {len(records_to_process)} of {len(records)} records.')
96
97 unpacker_stream = DAPHNEStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
98 unpacker_slftrg = DAPHNEUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
99 unpacker_eth_stream = DAPHNEEthStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
100 unpacker_eth_slftrg = DAPHNEEthUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
101
102
103
104 ch_map = None
105 if channel_map is not None:
106 ch_map = detchannelmaps.make_pds_map(channel_map)
107 offline_ch_num_dict = {}
108
109 for r in records_to_process:
110
111 print(f'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
112 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
113
114 if len(pds_geo_ids) == 0:
115 print(f"Record {r} has no data for {det}. Exiting..")
116 return
117
118 for gid in pds_geo_ids:
119
120 det_stream = 0xffff & (gid >> 48);
121 det_slot = 0xffff & (gid >> 32);
122 det_crate = 0xffff & (gid >> 16);
123 det_id = 0xffff & gid;
124 subdet = detdataformats.DetID.Subdetector(det_id)
125 det_name = detdataformats.DetID.subdetector_to_string(subdet)
126 print(f'\tProcessing gid {gid}: ',
127 f'subdetector {det_name}, '
128 f'crate {det_crate}, '
129 f'slot {det_slot}, '
130 f'stream {det_stream}')
131
132 frag = h5_file.get_frag(r,gid)
133 fragType = frag.get_header().fragment_type
134 fragType_string = daqdataformats.fragment_type_to_string(daqdataformats.FragmentType(fragType))
135
136 is_selftrigger = (fragType==FragmentType.kDAPHNE.value or fragType==FragmentType.kDAPHNEEth.value)
137 is_eth = (fragType==FragmentType.kDAPHNEEth.value or fragType==FragmentType.kDAPHNEEthStream.value)
138
139 unpacker = None
140 if is_eth:
141 unpacker = unpacker_eth_slftrg if is_selftrigger else unpacker_eth_stream
142 else:
143 unpacker = unpacker_slftrg if is_selftrigger else unpacker_stream
144
145
146
147
148 frag_header = unpacker.get_frh_data(frag)[0]
149 print('\t',frag_header)
150
151 n_frames = unpacker.get_n_obj(frag)
152 print(f'\tFound {n_frames} {fragType_string} frames in this fragment.')
153 if n_frames==0:
154 continue
155
156 daq_header_data = unpacker.get_daq_header_data(frag)
157 det_header_data = unpacker.get_det_header_data(frag)
158
159 for i_daqh, daqh in enumerate(daq_header_data):
160 print(f'\tDAQ header {i_daqh}:\n\t\t',daq_header_data[i_daqh])
161 print(det_header_data)
162 if det_header_data is None or len(det_header_data)<(i_daqh+1): continue
163 print(f'\tDAPHNE header info {i_daqh}:\n\t\t',det_header_data[i_daqh])
164
165 pds_ana_data, pds_wvfm_data = unpacker.get_det_data_all(frag)
166
167 if adc_stats:
168 for pds_ana in pds_ana_data:
169 if is_selftrigger:
170 print(f'\t\tPDS channel {pds_ana.channel}, timestamp {pds_ana.timestamp_dts} adc stats:')
171 else:
172 print(f'\t\tPDS channel {pds_ana.channel} adc stats:')
173 print('\t\t',pds_ana)
174
175 if print_wvfm_samples:
176 for pds_wvfm in pds_wvfm_data:
177 if is_selftrigger:
178 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamp_dts} ({dts_to_datetime(pds_wvfm.timestamp_dts)}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
179 else:
180 print(f'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamps[0]} ({dts_to_datetime(pds_wvfm.timestamps[0])}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
181 for i_sample in range(print_wvfm_samples):
182 print(f'\t\t\t {i_sample:>5}: ts={pds_wvfm.timestamps[i_sample]:<25.0f} val={pds_wvfm.adcs[i_sample]}')
183
184 if print_tp_info:
185 if not is_selftrigger:
186 print(f'--print-tp-info called, but fragment is not of self-trigger type. Skipping...')
187 elif is_eth:
188 print(f'--print-tp-info not currently supported for DAPHNEEthFrame. Skipping...')
189 else:
190 print(f'--PRINTING TP INFO--')
191 dict_tp_ch_ts = {}
192 for i_f in range(n_frames):
193 frame = fddetdataformats.DAPHNEFrame(frag.get_data(i_f*fddetdataformats.DAPHNEFrame.sizeof()))
194 print(f'\tAnalyzing Frame {i_f}: TS={frame.get_timestamp()} DAPHNE_CH={frame.get_channel()}')
195 peaks_data = frame.get_peaks_data()
196
197 n_tps = 0
198 for i_p in range(5):
199 if peaks_data.is_found(i_p): n_tps+=1
200 print(f'\t\tFound {n_tps} TPs:')
201 if frame.get_channel() not in dict_tp_ch_ts.keys():
202 dict_tp_ch_ts[frame.get_channel()] = set()
203 for i_p in range(n_tps):
204 if not peaks_data.is_found(i_p): continue
205 print(f'\t\t\tTP Peak {i_p} at ts={peaks_data.get_sample_start(i_p)}, ',
206 f'adc_integral={peaks_data.get_adc_integral(i_p)}, '
207 f'adc_max={peaks_data.get_adc_max(i_p)}, ',
208 f't_over_baseline={peaks_data.get_samples_over_baseline(i_p)}, ',
209 f'n_subpeaks={peaks_data.get_num_subpeaks(i_p)}')
210 if (frame.get_timestamp()+peaks_data.get_sample_start(i_p)) in dict_tp_ch_ts[frame.get_channel()]:
211 print (f"\t\t\t\t=====PREVIOUSLY FOUND TP w/ CH={frame.get_channel()}, TS={frame.get_timestamp()+peaks_data.get_sample_start(i_p)}")
212 else:
213 dict_tp_ch_ts[frame.get_channel()].add(frame.get_timestamp()+peaks_data.get_sample_start(i_p))
214
215 print(f"{'Processing finished': ^80}")
216
int main(int argc, char *argv[])