78def main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info):
81 records = h5_file.get_all_record_ids()
83 if nskip > len(records):
84 print(f
'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
88 if (nskip+nrecords) > len(records):
91 nrecords=nskip+nrecords
93 records_to_process = records[nskip:]
if nrecords==-1
else records[nskip:nrecords]
95 print(f
'Will process {len(records_to_process)} of {len(records)} records.')
97 unpacker_stream =
DAPHNEStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
98 unpacker_slftrg =
DAPHNEUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
100 unpacker_eth_slftrg =
DAPHNEEthUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
105 if channel_map
is not None:
106 ch_map = detchannelmaps.make_pds_map(channel_map)
107 offline_ch_num_dict = {}
109 for r
in records_to_process:
111 print(f
'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
112 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
114 if len(pds_geo_ids) == 0:
115 print(f
"Record {r} has no data for {det}. Exiting..")
118 for gid
in pds_geo_ids:
120 det_stream = 0xffff & (gid >> 48);
121 det_slot = 0xffff & (gid >> 32);
122 det_crate = 0xffff & (gid >> 16);
123 det_id = 0xffff & gid;
124 subdet = detdataformats.DetID.Subdetector(det_id)
125 det_name = detdataformats.DetID.subdetector_to_string(subdet)
126 print(f
'\tProcessing gid {gid}: ',
127 f
'subdetector {det_name}, '
128 f
'crate {det_crate}, '
130 f
'stream {det_stream}')
132 frag = h5_file.get_frag(r,gid)
133 fragType = frag.get_header().fragment_type
134 fragType_string = daqdataformats.fragment_type_to_string(daqdataformats.FragmentType(fragType))
136 is_selftrigger = (fragType==FragmentType.kDAPHNE.value
or fragType==FragmentType.kDAPHNEEth.value)
137 is_eth = (fragType==FragmentType.kDAPHNEEth.value
or fragType==FragmentType.kDAPHNEEthStream.value)
141 unpacker = unpacker_eth_slftrg
if is_selftrigger
else unpacker_eth_stream
143 unpacker = unpacker_slftrg
if is_selftrigger
else unpacker_stream
148 frag_header = unpacker.get_frh_data(frag)[0]
149 print(
'\t',frag_header)
151 n_frames = unpacker.get_n_obj(frag)
152 print(f
'\tFound {n_frames} {fragType_string} frames in this fragment.')
156 daq_header_data = unpacker.get_daq_header_data(frag)
157 det_header_data = unpacker.get_det_header_data(frag)
159 for i_daqh, daqh
in enumerate(daq_header_data):
160 print(f
'\tDAQ header {i_daqh}:\n\t\t',daq_header_data[i_daqh])
161 print(det_header_data)
162 if det_header_data
is None or len(det_header_data)<(i_daqh+1):
continue
163 print(f
'\tDAPHNE header info {i_daqh}:\n\t\t',det_header_data[i_daqh])
165 pds_ana_data, pds_wvfm_data = unpacker.get_det_data_all(frag)
168 for pds_ana
in pds_ana_data:
170 print(f
'\t\tPDS channel {pds_ana.channel}, timestamp {pds_ana.timestamp_dts} adc stats:')
172 print(f
'\t\tPDS channel {pds_ana.channel} adc stats:')
173 print(
'\t\t',pds_ana)
175 if print_wvfm_samples:
176 for pds_wvfm
in pds_wvfm_data:
178 print(f
'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamp_dts} ({dts_to_datetime(pds_wvfm.timestamp_dts)}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
180 print(f
'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamps[0]} ({dts_to_datetime(pds_wvfm.timestamps[0])}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
181 for i_sample
in range(print_wvfm_samples):
182 print(f
'\t\t\t {i_sample:>5}: ts={pds_wvfm.timestamps[i_sample]:<25.0f} val={pds_wvfm.adcs[i_sample]}')
185 if not is_selftrigger:
186 print(f
'--print-tp-info called, but fragment is not of self-trigger type. Skipping...')
188 print(f
'--print-tp-info not currently supported for DAPHNEEthFrame. Skipping...')
190 print(f
'--PRINTING TP INFO--')
192 for i_f
in range(n_frames):
193 frame = fddetdataformats.DAPHNEFrame(frag.get_data(i_f*fddetdataformats.DAPHNEFrame.sizeof()))
194 print(f
'\tAnalyzing Frame {i_f}: TS={frame.get_timestamp()} DAPHNE_CH={frame.get_channel()}')
195 peaks_data = frame.get_peaks_data()
199 if peaks_data.is_found(i_p): n_tps+=1
200 print(f
'\t\tFound {n_tps} TPs:')
201 if frame.get_channel()
not in dict_tp_ch_ts.keys():
202 dict_tp_ch_ts[frame.get_channel()] = set()
203 for i_p
in range(n_tps):
204 if not peaks_data.is_found(i_p):
continue
205 print(f
'\t\t\tTP Peak {i_p} at ts={peaks_data.get_sample_start(i_p)}, ',
206 f
'adc_integral={peaks_data.get_adc_integral(i_p)}, '
207 f
'adc_max={peaks_data.get_adc_max(i_p)}, ',
208 f
't_over_baseline={peaks_data.get_samples_over_baseline(i_p)}, ',
209 f
'n_subpeaks={peaks_data.get_num_subpeaks(i_p)}')
210 if (frame.get_timestamp()+peaks_data.get_sample_start(i_p))
in dict_tp_ch_ts[frame.get_channel()]:
211 print (f
"\t\t\t\t=====PREVIOUSLY FOUND TP w/ CH={frame.get_channel()}, TS={frame.get_timestamp()+peaks_data.get_sample_start(i_p)}")
213 dict_tp_ch_ts[frame.get_channel()].add(frame.get_timestamp()+peaks_data.get_sample_start(i_p))
215 print(f
"{'Processing finished': ^80}")