49 split =
" "*6 +
"|" +
" "*6
50 geo_data = [[]
for i
in range(4)]
51 for gid
in pds_geo_ids:
53 det_link = 0xffff & (gid >> 48);
54 det_slot = 0xffff & (gid >> 32);
55 det_crate = 0xffff & (gid >> 16);
56 det_id = 0xffff & gid;
57 subdet = detdataformats.DetID.Subdetector(det_id)
58 det_name = detdataformats.DetID.subdetector_to_string(subdet)
59 geo_data[det_slot].append(det_link)
62 for i
in range(len(geo_data)):
63 if len(geo_data[i])>0:
64 print(f
"\t{geo_info.det_crate:3}{split}{i:3}{split}{geo_data[i]}")
69@click.argument('filename', type=click.Path(exists=True))
70@click.option('--det', default='HD_PDS', help='Subdetector string (default: HD_PDS)')
71@click.option('--nrecords', '-n', default=-1, help='How many Trigger Records to process (default: all)')
72@click.option('--nskip', default=0, help='How many Trigger Records to skip (default: 0)')
73@click.option('--channel-map', default=None, help="Channel map to load (default: None)")
74@click.option('--adc-stats', is_flag=True, help="Print adc stats (works for streaming data)")
75@click.option('--print-wvfm-samples', default=0, help='How many samples in each waveform to print.')
76@click.option('--print-tp-info', is_flag=True, help='Print TP info from DAPHNFrame.')
78def main(filename, det, nrecords, nskip, channel_map, adc_stats, print_wvfm_samples, print_tp_info):
81 records = h5_file.get_all_record_ids()
83 if nskip > len(records):
84 print(f
'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
88 if (nskip+nrecords) > len(records):
91 nrecords=nskip+nrecords
93 records_to_process = records[nskip:]
if nrecords==-1
else records[nskip:nrecords]
95 print(f
'Will process {len(records_to_process)} of {len(records)} records.')
97 unpacker_stream =
DAPHNEStreamUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
98 unpacker_slftrg =
DAPHNEUnpacker(channel_map=channel_map,ana_data_prescale=1,wvfm_data_prescale=1)
103 if channel_map
is not None:
104 ch_map = detchannelmaps.make_pds_map(channel_map)
105 offline_ch_num_dict = {}
107 for r
in records_to_process:
109 print(f
'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
110 pds_geo_ids = list(h5_file.get_geo_ids_for_subdetector(r,detdataformats.DetID.string_to_subdetector(det)))
112 if len(pds_geo_ids) == 0:
113 print(f
"Record {r} has no data for {det}. Exiting..")
116 for gid
in pds_geo_ids:
118 det_stream = 0xffff & (gid >> 48);
119 det_slot = 0xffff & (gid >> 32);
120 det_crate = 0xffff & (gid >> 16);
121 det_id = 0xffff & gid;
122 subdet = detdataformats.DetID.Subdetector(det_id)
123 det_name = detdataformats.DetID.subdetector_to_string(subdet)
124 print(f
'\tProcessing gid {gid}: ',
125 f
'subdetector {det_name}, '
126 f
'crate {det_crate}, '
128 f
'stream {det_stream}')
130 frag = h5_file.get_frag(r,gid)
131 fragType = frag.get_header().fragment_type
132 fragType_string = daqdataformats.fragment_type_to_string(daqdataformats.FragmentType(fragType))
134 is_selftrigger = (fragType==FragmentType.kDAPHNE.value)
136 unpacker = unpacker_slftrg
if is_selftrigger
else unpacker_stream
140 frag_header = unpacker.get_frh_data(frag)[0]
141 print(
'\t',frag_header)
143 n_frames = unpacker.get_n_obj(frag)
144 print(f
'\tFound {n_frames} {fragType_string} frames in this fragment.')
148 daq_header_data = unpacker.get_daq_header_data(frag)
149 det_header_data = unpacker.get_det_header_data(frag)
151 for i_daqh, daqh
in enumerate(daq_header_data):
152 print(f
'\tDAQ header {i_daqh}:\n\t\t',daq_header_data[i_daqh])
153 print(det_header_data)
154 if det_header_data
is None or len(det_header_data)<(i_daqh+1):
continue
155 print(f
'\tDAPHNE header info {i_daqh}:\n\t\t',det_header_data[i_daqh])
157 pds_ana_data, pds_wvfm_data = unpacker.get_det_data_all(frag)
160 for pds_ana
in pds_ana_data:
162 print(f
'\t\tPDS channel {pds_ana.channel}, timestamp {pds_ana.timestamp_dts} adc stats:')
164 print(f
'\t\tPDS channel {pds_ana.channel} adc stats:')
165 print(
'\t\t',pds_ana)
167 if print_wvfm_samples:
168 for pds_wvfm
in pds_wvfm_data:
170 print(f
'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamp_dts} ({dts_to_datetime(pds_wvfm.timestamp_dts)}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
172 print(f
'\t\tPDS channel {pds_wvfm.channel}, timestamp {pds_wvfm.timestamps[0]} ({dts_to_datetime(pds_wvfm.timestamps[0])}), waveform ({print_wvfm_samples}/{len(pds_wvfm.timestamps)} samples):')
173 for i_sample
in range(print_wvfm_samples):
174 print(f
'\t\t\t {i_sample:>5}: ts={pds_wvfm.timestamps[i_sample]:<25.0f} val={pds_wvfm.adcs[i_sample]}')
177 if not is_selftrigger:
178 print(f
'--print-tp-info called, but fragment is not kDAPHNE. Skipping...')
180 print(f
'--PRINTING TP INFO--')
182 for i_f
in range(n_frames):
183 frame = fddetdataformats.DAPHNEFrame(frag.get_data(i_f*fddetdataformats.DAPHNEFrame.sizeof()))
184 print(f
'\tAnalyzing Frame {i_f}: TS={frame.get_timestamp()} DAPHNE_CH={frame.get_channel()}')
185 peaks_data = frame.get_peaks_data()
189 if peaks_data.is_found(i_p): n_tps+=1
190 print(f
'\t\tFound {n_tps} TPs:')
191 if frame.get_channel()
not in dict_tp_ch_ts.keys():
192 dict_tp_ch_ts[frame.get_channel()] = set()
193 for i_p
in range(n_tps):
194 if not peaks_data.is_found(i_p):
continue
195 print(f
'\t\t\tTP Peak {i_p} at ts={peaks_data.get_sample_start(i_p)}, ',
196 f
'adc_integral={peaks_data.get_adc_integral(i_p)}, '
197 f
'adc_max={peaks_data.get_adc_max(i_p)}, ',
198 f
't_over_baseline={peaks_data.get_samples_over_baseline(i_p)}, ',
199 f
'n_subpeaks={peaks_data.get_num_subpeaks(i_p)}')
200 if (frame.get_timestamp()+peaks_data.get_sample_start(i_p))
in dict_tp_ch_ts[frame.get_channel()]:
201 print (f
"\t\t\t\t=====PREVIOUSLY FOUND TP w/ CH={frame.get_channel()}, TS={frame.get_timestamp()+peaks_data.get_sample_start(i_p)}")
203 dict_tp_ch_ts[frame.get_channel()].add(frame.get_timestamp()+peaks_data.get_sample_start(i_p))
205 print(f
"{'Processing finished': ^80}")