DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
testapp_noreadout_confgen Namespace Reference

Functions

 generate (NUMBER_OF_DATA_PRODUCERS=2, DATA_RATE_SLOWDOWN_FACTOR=1, RUN_NUMBER=333, TRIGGER_RATE_HZ=1.0, DATA_FILE="./frames.bin", OUTPUT_PATH=".", DISABLE_OUTPUT=False, TOKEN_COUNT=10)
 
 cli (number_of_data_producers, data_rate_slowdown_factor, run_number, trigger_rate_hz, data_file, output_path, token_count, disable_data_storage, json_file)
 

Variables

 default_load_path
 
int QUEUE_POP_WAIT_MS = 100;
 
int CLOCK_SPEED_HZ = 62500000;
 
 CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
 

Function Documentation

◆ cli()

testapp_noreadout_confgen.cli ( number_of_data_producers,
data_rate_slowdown_factor,
run_number,
trigger_rate_hz,
data_file,
output_path,
token_count,
disable_data_storage,
json_file )
  JSON_FILE: Input raw data file.
  JSON_FILE: Output json configuration file.

Definition at line 254 of file testapp_noreadout_confgen.py.

254 def cli(number_of_data_producers, data_rate_slowdown_factor, run_number, trigger_rate_hz, data_file, output_path, token_count, disable_data_storage, json_file):
255 """
256 JSON_FILE: Input raw data file.
257 JSON_FILE: Output json configuration file.
258 """
259
260 with open(json_file, 'w') as f:
261 f.write(generate(
262 NUMBER_OF_DATA_PRODUCERS = number_of_data_producers,
263 DATA_RATE_SLOWDOWN_FACTOR = data_rate_slowdown_factor,
264 RUN_NUMBER = run_number,
265 TRIGGER_RATE_HZ = trigger_rate_hz,
266 DATA_FILE = data_file,
267 OUTPUT_PATH = output_path,
268 DISABLE_OUTPUT = disable_data_storage,
269 TOKEN_COUNT = token_count
270 ))
271
272 print(f"'{json_file}' generation completed.")
273

◆ generate()

testapp_noreadout_confgen.generate ( NUMBER_OF_DATA_PRODUCERS = 2,
DATA_RATE_SLOWDOWN_FACTOR = 1,
RUN_NUMBER = 333,
TRIGGER_RATE_HZ = 1.0,
DATA_FILE = "./frames.bin",
OUTPUT_PATH = ".",
DISABLE_OUTPUT = False,
TOKEN_COUNT = 10 )

Definition at line 43 of file testapp_noreadout_confgen.py.

52 ):
53
54 trigger_interval_ticks = math.floor((1/TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR)
55
56 # Define modules and queues
57 queue_bare_specs = [
58 app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
59 app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=20),
60 app.QueueSpec(inst="trigger_decision_q", kind='FollySPSCQueue', capacity=20),
61 app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=20),
62 app.QueueSpec(inst="trigger_record_q", kind='FollySPSCQueue', capacity=20),
63 app.QueueSpec(inst="data_fragments_q", kind='FollyMPMCQueue', capacity=100),
64 ] + [
65 app.QueueSpec(inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=20)
66 for idx in range(NUMBER_OF_DATA_PRODUCERS)
67 ]
68
69
70 # Only needed to reproduce the same order as when using jsonnet
71 queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst))
72
73
74 mod_specs = [
75 mspec("tde", "TriggerDecisionEmulator", [
76 app.QueueInfo(name="time_sync_source", inst="time_sync_q", dir="input"),
77 app.QueueInfo(name="token_source", inst="token_q", dir="input"),
78 app.QueueInfo(name="trigger_decision_sink", inst="trigger_decision_q", dir="output"),
79 ]),
80
81 mspec("rqg", "RequestGenerator", [
82 app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_q", dir="input"),
83 app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"),
84 ] + [
85 app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output")
86 for idx in range(NUMBER_OF_DATA_PRODUCERS)
87 ]),
88
89 mspec("ffr", "FragmentReceiver", [
90 app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"),
91 app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"),
92 app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"),
93 ]),
94
95 mspec("datawriter", "DataWriterModule", [
96 app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"),
97 app.QueueInfo(name="token_output_queue", inst="token_q", dir="output"),
98 ]),
99
100 mspec("fake_timesync_source", "FakeTimeSyncSource", [
101 app.QueueInfo(name="time_sync_sink", inst="time_sync_q", dir="output"),
102 ]),
103
104 ] + [
105
106 mspec(f"fakedataprod_{idx}", "FakeDataProdModule", [
107 app.QueueInfo(name="data_request_input_queue", inst=f"data_requests_{idx}", dir="input"),
108 app.QueueInfo(name="data_fragment_output_queue", inst="data_fragments_q", dir="output"),
109 ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
110 ]
111
112 init_specs = app.Init(queues=queue_specs, modules=mod_specs)
113
114 jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
115 print(jstr)
116
117 initcmd = rccmd.RCCommand(
118 id=basecmd.CmdId("init"),
119 entry_state="NONE",
120 exit_state="INITIAL",
121 data=init_specs
122 )
123
124
125 confcmd = mrccmd("conf", "INITIAL", "CONFIGURED",[
126 ("tde", tde.ConfParams(
127 links=[idx for idx in range(NUMBER_OF_DATA_PRODUCERS)],
128 min_links_in_request=NUMBER_OF_DATA_PRODUCERS,
129 max_links_in_request=NUMBER_OF_DATA_PRODUCERS,
130 min_readout_window_ticks=1200,
131 max_readout_window_ticks=1200,
132 trigger_window_offset=1000,
133 # The delay is set to put the trigger well within the latency buff
134 trigger_delay_ticks=math.floor( 2* CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
135 # We divide the trigger interval by
136 # DATA_RATE_SLOWDOWN_FACTOR so the triggers are still
137 # emitted per (wall-clock) second, rather than being
138 # spaced out further
139 trigger_interval_ticks=trigger_interval_ticks,
140 clock_frequency_hz=CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR
141 )),
142 ("rqg", rqg.ConfParams(
143 map=rqg.mapgeoidqueue([
144 rqg.geoidinst(apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS)
145 ])
146 )),
147 ("ffr", ffr.ConfParams(
148 general_queue_timeout=QUEUE_POP_WAIT_MS
149 )),
150 ("datawriter", dw.ConfParams(
151 initial_token_count=TOKEN_COUNT,
152 data_store_parameters=hdf5ds.ConfParams(
153 name="data_store",
154 # type = "HDF5DataStore", # default
155 directory_path = OUTPUT_PATH, # default
156 # mode = "all-per-file", # default
157 max_file_size_bytes = 1073741834,
158 disable_unique_filename_suffix = False,
159 filename_parameters = hdf5ds.HDF5DataStoreFileNameParams(
160 overall_prefix = "fake_minidaqapp",
161 # digits_for_run_number = 6, #default
162 file_index_prefix = "file"
163 ),
164 file_layout_parameters = hdf5ds.HDF5DataStoreFileLayoutParams(
165 trigger_record_name_prefix= "TriggerRecord",
166 digits_for_trigger_number = 5,
167 )
168 )
169 )),
170 ("fake_timesync_source", ftss.ConfParams(
171 sync_interval_ticks = (CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
172 clock_frequency_hz = (CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR),
173 )),
174 ] + [
175 (f"fakedataprod_{idx}", fdp.ConfParams(
176 temporarily_hacked_link_number = idx
177 )) for idx in range(NUMBER_OF_DATA_PRODUCERS)
178 ])
179
180 jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
181 print(jstr)
182
183 startpars = rccmd.StartParams(run=RUN_NUMBER, disable_data_storage=DISABLE_OUTPUT)
184 startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [
185 ("datawriter", startpars),
186 ("ffr", startpars),
187 ("fakedataprod_.*", startpars),
188 ("rqg", startpars),
189 ("fake_timesync_source", startpars),
190 ("tde", startpars),
191 ])
192
193 jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
194 print("="*80+"\nStart\n\n", jstr)
195
196 stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [
197 ("fake_timesync_source", None),
198 ("tde", None),
199 ("rqg", None),
200 ("fakedataprod_.*", None),
201 ("ffr", None),
202 ("datawriter", None),
203 ])
204
205 jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
206 print("="*80+"\nStop\n\n", jstr)
207
208 pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [
209 ("", None)
210 ])
211
212 jstr = json.dumps(pausecmd.pod(), indent=4, sort_keys=True)
213 print("="*80+"\nPause\n\n", jstr)
214
215 resumecmd = mrccmd("resume", "RUNNING", "RUNNING", [
216 ("tde", tde.ResumeParams(
217 trigger_interval_ticks=trigger_interval_ticks
218 ))
219 ])
220
221 jstr = json.dumps(resumecmd.pod(), indent=4, sort_keys=True)
222 print("="*80+"\nResume\n\n", jstr)
223
224 scrapcmd = mcmd("scrap", [
225 ("", None)
226 ])
227
228 jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
229 print("="*80+"\nScrap\n\n", jstr)
230
231 # Create a list of commands
232 cmd_seq = [initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd]
233
234 # Print them as json (to be improved/moved out)
235 jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
236 return jstr
237

Variable Documentation

◆ CLOCK_SPEED_HZ

int testapp_noreadout_confgen.CLOCK_SPEED_HZ = 62500000;

Definition at line 41 of file testapp_noreadout_confgen.py.

◆ CONTEXT_SETTINGS

testapp_noreadout_confgen.CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])

Definition at line 240 of file testapp_noreadout_confgen.py.

◆ default_load_path

testapp_noreadout_confgen.default_load_path

Definition at line 4 of file testapp_noreadout_confgen.py.

◆ QUEUE_POP_WAIT_MS

int testapp_noreadout_confgen.QUEUE_POP_WAIT_MS = 100;

Definition at line 39 of file testapp_noreadout_confgen.py.