DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
testapp_noreadout_confgen.py
Go to the documentation of this file.
1# Set moo schema search path
2from dunedaq.env import get_moo_model_path
3import moo.io
4moo.io.default_load_path = get_moo_model_path()
5
6# Load configuration types
7import moo.otypes
8
9moo.otypes.load_types('rcif/cmd.jsonnet')
10moo.otypes.load_types('appfwk/cmd.jsonnet')
11moo.otypes.load_types('appfwk/app.jsonnet')
12
13moo.otypes.load_types('trigemu/triggerdecisionemulator.jsonnet')
14moo.otypes.load_types('trigemu/faketimesyncsource.jsonnet')
15moo.otypes.load_types('dfmodules/requestgenerator.jsonnet')
16moo.otypes.load_types('dfmodules/fragmentreceiver.jsonnet')
17moo.otypes.load_types('dfmodules/datawriter.jsonnet')
18moo.otypes.load_types('dfmodules/hdf5datastore.jsonnet')
19moo.otypes.load_types('dfmodules/fakedataprod.jsonnet')
20
21# Import new types
22import dunedaq.cmdlib.cmd as basecmd # AddressedCmd,
23import dunedaq.rcif.cmd as rccmd # AddressedCmd,
24import dunedaq.appfwk.cmd as cmd # AddressedCmd,
25import dunedaq.appfwk.app as app # AddressedCmd,
26import dunedaq.trigemu.triggerdecisionemulator as tde
27import dunedaq.trigemu.faketimesyncsource as ftss
28import dunedaq.dfmodules.requestgenerator as rqg
29import dunedaq.dfmodules.fragmentreceiver as ffr
30import dunedaq.dfmodules.datawriter as dw
31import dunedaq.dfmodules.hdf5datastore as hdf5ds
32import dunedaq.dfmodules.fakedataprod as fdp
33
34from appfwk.utils import mcmd, mrccmd, mspec
35
36import json
37import math
38# Time to waait on pop()
39QUEUE_POP_WAIT_MS=100;
40# local clock speed Hz
41CLOCK_SPEED_HZ = 62500000;
42
44 NUMBER_OF_DATA_PRODUCERS=2,
45 DATA_RATE_SLOWDOWN_FACTOR = 1,
46 RUN_NUMBER = 333,
47 TRIGGER_RATE_HZ = 1.0,
48 DATA_FILE="./frames.bin",
49 OUTPUT_PATH=".",
50 DISABLE_OUTPUT=False,
51 TOKEN_COUNT=10
52 ):
53
54 trigger_interval_ticks = math.floor((1/TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR)
55
56 # Define modules and queues
57 queue_bare_specs = [
58 app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
59 app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=20),
60 app.QueueSpec(inst="trigger_decision_q", kind='FollySPSCQueue', capacity=20),
61 app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=20),
62 app.QueueSpec(inst="trigger_record_q", kind='FollySPSCQueue', capacity=20),
63 app.QueueSpec(inst="data_fragments_q", kind='FollyMPMCQueue', capacity=100),
64 ] + [
65 app.QueueSpec(inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=20)
66 for idx in range(NUMBER_OF_DATA_PRODUCERS)
67 ]
68
69
70 # Only needed to reproduce the same order as when using jsonnet
71 queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst))
72
73
74 mod_specs = [
75 mspec("tde", "TriggerDecisionEmulator", [
76 app.QueueInfo(name="time_sync_source", inst="time_sync_q", dir="input"),
77 app.QueueInfo(name="token_source", inst="token_q", dir="input"),
78 app.QueueInfo(name="trigger_decision_sink", inst="trigger_decision_q", dir="output"),
79 ]),
80
81 mspec("rqg", "RequestGenerator", [
82 app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_q", dir="input"),
83 app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"),
84 ] + [
85 app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output")
86 for idx in range(NUMBER_OF_DATA_PRODUCERS)
87 ]),
88
89 mspec("ffr", "FragmentReceiver", [
90 app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"),
91 app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"),
92 app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"),
93 ]),
94
95 mspec("datawriter", "DataWriterModule", [
96 app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"),
97 app.QueueInfo(name="token_output_queue", inst="token_q", dir="output"),
98 ]),
99
100 mspec("fake_timesync_source", "FakeTimeSyncSource", [
101 app.QueueInfo(name="time_sync_sink", inst="time_sync_q", dir="output"),
102 ]),
103
104 ] + [
105
106 mspec(f"fakedataprod_{idx}", "FakeDataProdModule", [
107 app.QueueInfo(name="data_request_input_queue", inst=f"data_requests_{idx}", dir="input"),
108 app.QueueInfo(name="data_fragment_output_queue", inst="data_fragments_q", dir="output"),
109 ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
110 ]
111
112 init_specs = app.Init(queues=queue_specs, modules=mod_specs)
113
114 jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
115 print(jstr)
116
117 initcmd = rccmd.RCCommand(
118 id=basecmd.CmdId("init"),
119 entry_state="NONE",
120 exit_state="INITIAL",
121 data=init_specs
122 )
123
124
125 confcmd = mrccmd("conf", "INITIAL", "CONFIGURED",[
126 ("tde", tde.ConfParams(
127 links=[idx for idx in range(NUMBER_OF_DATA_PRODUCERS)],
128 min_links_in_request=NUMBER_OF_DATA_PRODUCERS,
129 max_links_in_request=NUMBER_OF_DATA_PRODUCERS,
130 min_readout_window_ticks=1200,
131 max_readout_window_ticks=1200,
132 trigger_window_offset=1000,
133 # The delay is set to put the trigger well within the latency buff
134 trigger_delay_ticks=math.floor( 2* CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
135 # We divide the trigger interval by
136 # DATA_RATE_SLOWDOWN_FACTOR so the triggers are still
137 # emitted per (wall-clock) second, rather than being
138 # spaced out further
139 trigger_interval_ticks=trigger_interval_ticks,
140 clock_frequency_hz=CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR
141 )),
142 ("rqg", rqg.ConfParams(
143 map=rqg.mapgeoidqueue([
144 rqg.geoidinst(apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS)
145 ])
146 )),
147 ("ffr", ffr.ConfParams(
148 general_queue_timeout=QUEUE_POP_WAIT_MS
149 )),
150 ("datawriter", dw.ConfParams(
151 initial_token_count=TOKEN_COUNT,
152 data_store_parameters=hdf5ds.ConfParams(
153 name="data_store",
154 # type = "HDF5DataStore", # default
155 directory_path = OUTPUT_PATH, # default
156 # mode = "all-per-file", # default
157 max_file_size_bytes = 1073741834,
158 disable_unique_filename_suffix = False,
159 filename_parameters = hdf5ds.HDF5DataStoreFileNameParams(
160 overall_prefix = "fake_minidaqapp",
161 # digits_for_run_number = 6, #default
162 file_index_prefix = "file"
163 ),
164 file_layout_parameters = hdf5ds.HDF5DataStoreFileLayoutParams(
165 trigger_record_name_prefix= "TriggerRecord",
166 digits_for_trigger_number = 5,
167 )
168 )
169 )),
170 ("fake_timesync_source", ftss.ConfParams(
171 sync_interval_ticks = (CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
172 clock_frequency_hz = (CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
173 )),
174 ] + [
175 (f"fakedataprod_{idx}", fdp.ConfParams(
176 temporarily_hacked_link_number = idx
177 )) for idx in range(NUMBER_OF_DATA_PRODUCERS)
178 ])
179
180 jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
181 print(jstr)
182
183 startpars = rccmd.StartParams(run=RUN_NUMBER, disable_data_storage=DISABLE_OUTPUT)
184 startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [
185 ("datawriter", startpars),
186 ("ffr", startpars),
187 ("fakedataprod_.*", startpars),
188 ("rqg", startpars),
189 ("fake_timesync_source", startpars),
190 ("tde", startpars),
191 ])
192
193 jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
194 print("="*80+"\nStart\n\n", jstr)
195
196 stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [
197 ("fake_timesync_source", None),
198 ("tde", None),
199 ("rqg", None),
200 ("fakedataprod_.*", None),
201 ("ffr", None),
202 ("datawriter", None),
203 ])
204
205 jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
206 print("="*80+"\nStop\n\n", jstr)
207
208 pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [
209 ("", None)
210 ])
211
212 jstr = json.dumps(pausecmd.pod(), indent=4, sort_keys=True)
213 print("="*80+"\nPause\n\n", jstr)
214
215 resumecmd = mrccmd("resume", "RUNNING", "RUNNING", [
216 ("tde", tde.ResumeParams(
217 trigger_interval_ticks=trigger_interval_ticks
218 ))
219 ])
220
221 jstr = json.dumps(resumecmd.pod(), indent=4, sort_keys=True)
222 print("="*80+"\nResume\n\n", jstr)
223
224 scrapcmd = mcmd("scrap", [
225 ("", None)
226 ])
227
228 jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
229 print("="*80+"\nScrap\n\n", jstr)
230
231 # Create a list of commands
232 cmd_seq = [initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd]
233
234 # Print them as json (to be improved/moved out)
235 jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
236 return jstr
237
238if __name__ == '__main__':
239 # Add -h as default help option
240 CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
241
242 import click
243
244 @click.command(context_settings=CONTEXT_SETTINGS)
245 @click.option('-n', '--number-of-data-producers', default=2)
246 @click.option('-s', '--data-rate-slowdown-factor', default=1)
247 @click.option('-r', '--run-number', default=333)
248 @click.option('-t', '--trigger-rate-hz', default=1.0)
249 @click.option('-d', '--data-file', type=click.Path(), default='./frames.bin')
250 @click.option('-o', '--output-path', type=click.Path(), default='.')
251 @click.option('-c', '--token-count', default=10)
252 @click.option('--disable-data-storage', is_flag=True)
253 @click.argument('json_file', type=click.Path(), default='testapp_noreadout.json')
254 def cli(number_of_data_producers, data_rate_slowdown_factor, run_number, trigger_rate_hz, data_file, output_path, token_count, disable_data_storage, json_file):
255 """
256 JSON_FILE: Input raw data file.
257 JSON_FILE: Output json configuration file.
258 """
259
260 with open(json_file, 'w') as f:
261 f.write(generate(
262 NUMBER_OF_DATA_PRODUCERS = number_of_data_producers,
263 DATA_RATE_SLOWDOWN_FACTOR = data_rate_slowdown_factor,
264 RUN_NUMBER = run_number,
265 TRIGGER_RATE_HZ = trigger_rate_hz,
266 DATA_FILE = data_file,
267 OUTPUT_PATH = output_path,
268 DISABLE_OUTPUT = disable_data_storage,
269 TOKEN_COUNT = token_count
270 ))
271
272 print(f"'{json_file}' generation completed.")
273
274 cli()
275
cli(number_of_data_producers, data_rate_slowdown_factor, run_number, trigger_rate_hz, data_file, output_path, token_count, disable_data_storage, json_file)