DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
tpdecoder.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2
3from hdf5libs import HDF5RawDataFile
4import h5py
5
6import daqdataformats
7import trgdataformats
8import detchannelmaps
9
10import click
11import time
12import numpy as np
13import sys
14
15from rawdatautils.unpack.dataclasses import TriggerPrimitiveData
16
17@click.command()
18@click.argument('filenames', nargs=-1, type=click.Path(exists=True))
19@click.option('--nrecords', '-n', default=-1, help='How many records to process (default: all)')
20@click.option('--nskip', default=0, help='How many records to skip (default: 0)')
21@click.option('--channel-map', default=None, help="Channel map to load (default: None)")
22
23def main(filenames, nrecords, nskip, channel_map):
24
25 for filename in filenames:
26
27
28 h5_file = HDF5RawDataFile(filename)
29 records = h5_file.get_all_record_ids()
30
31 if nskip > len(records):
32 print(f'Requested records to skip {nskip} is greater than number of records {len(records)}. Exiting...')
33 return
34 if nrecords>0:
35 if (nskip+nrecords)>len(records):
36 nrecords=-1
37 else:
38 nrecords=nskip+nrecords
39
40 records_to_process = []
41 if nrecords==-1:
42 records_to_process = records[nskip:]
43 else:
44 records_to_process = records[nskip:nrecords]
45 print(f'Will process {len(records_to_process)} of {len(records)} records.')
46
47
48 #have channel numbers per geoid in here
49 ch_map = None
50 if channel_map is not None:
51 if 'TPC' in channel_map:
52 ch_map = detchannelmaps.make_tpc_map(channel_map)
53 elif 'PDS' in channel_map:
54 ch_map = detchannelmaps.make_pds_map(channel_map)
55
56 with h5py.File(h5_file.get_file_name(), 'r') as f:
57 record_type = f.attrs["record_type"]
58 print(f'Record type is {record_type}')
59
60 for r in records_to_process:
61
62 print(f'Processing (Record Number,Sequence Number)=({r[0],r[1]})')
63
64 src_ids = h5_file.get_source_ids(r)
65
66 for sid in src_ids:
67 if(sid.subsystem!=daqdataformats.SourceID.Subsystem.kTrigger): continue
68
69 frag = h5_file.get_frag(r,sid)
70 if(frag.get_fragment_type()!=daqdataformats.FragmentType.kTriggerPrimitive): continue
71
72 print(f'Fragment (run,trigger,sequence)=({frag.get_run_number()},{frag.get_trigger_number()},{frag.get_sequence_number()})')
73
74 n_tps = int(frag.get_data_size() / trgdataformats.TriggerPrimitive.sizeof())
75
76 print(f'Found {n_tps} TPs in fragment.')
77
78 for i in range(n_tps):
79 tp = trgdataformats.TriggerPrimitive(frag.get_data(i*trgdataformats.TriggerPrimitive.sizeof()))
80 if tp.version != trgdataformats.TriggerPrimitive.s_trigger_primitive_version:
81 sys.exit(f"ERROR: the TP data structure version found in the data ({tp.version}) does not match the version expected by this version of the software ({trgdataformats.TriggerPrimitive.s_trigger_primitive_version}). Please use a version of the software that matches the data.")
82
83 plane=-1
84 apa="Unknown"
85 if ch_map is not None and (tp.detid==3 or tp.detid==10):
86 plane=ch_map.get_plane_from_offline_channel(tp.channel)
87 apa=ch_map.get_tpc_element_from_offline_channel(tp.channel)
88
89 tpd = TriggerPrimitiveData(run=frag.get_run_number(),
90 trigger=frag.get_trigger_number(),
91 sequence=frag.get_sequence_number(),
92 src_id=frag.get_element_id().id,
93 time_start=tp.time_start,
94 samples_to_peak=tp.samples_to_peak,
95 samples_over_threshold=tp.samples_over_threshold,
96 channel=tp.channel,
97 plane=plane,
98 apa=apa,
99 adc_integral=tp.adc_integral,
100 adc_peak=tp.adc_peak,
101 detid=tp.detid,
102 flag=tp.flag,
103 id_ta=-1)
104 print(tpd)
105
106
107
108 #end file loop
109 print(f'Processed all requested records')
110
111
112if __name__ == '__main__':
113 main()
main(filenames, nrecords, nskip, channel_map)
Definition tpdecoder.py:23