242 """
243 Drives the processing and plotting.
244 """
245
246 args = parse()
247 filename = args.filename
248 verbosity = args.verbose
249 start_frag = args.start_frag
250 end_frag = args.end_frag
251 no_anomaly = args.no_anomaly
252 seconds = args.seconds
253 overwrite = args.overwrite
254 batch_mode = args.batch_mode
255
256 linear = args.linear
257 log = args.log
258
259
260 if (not linear) and (not log):
261 linear = True
262 log = True
263
265
266
267 if len(data.get_fragment_paths()) == 0:
268 print("File doesn't contain any TriggerPrimitive fragments.")
269 return 1
270
271
272 if start_frag == 0 and end_frag == -1:
273 data.read_all_fragments()
274 else:
275 if end_frag == 0:
276 frag_paths = data.get_fragment_paths()[start_frag:]
277 else:
278 frag_paths = data.get_fragment_paths()[start_frag:end_frag]
279
280 for path in frag_paths:
281 data.read_fragment(path)
282
283
284 save_name = find_save_name(data.run_id, data.file_index, overwrite)
285
286 print(f"Number of TPs: {data.tp_data.shape[0]}")
287
288
289
290 if not no_anomaly:
291 anomaly_filename = f"{save_name}.txt"
292 if verbosity >= 2:
293 print(f"Writing descriptive statistics to {anomaly_filename}.")
294 if os.path.isfile(anomaly_filename):
295
296 os.remove(anomaly_filename)
297
298 time_label = "Time (s)" if seconds else "Time (Ticks)"
299
300
301 plot_hist_dict = {
302 'adc_integral': {
303 'title': "ADC Integral Histogram",
304 'xlabel': "ADC Integral",
305 'ylabel': "Count",
306 'linear': linear,
307 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
308 'log': log,
309 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
310 },
311 'adc_peak': {
312 'title': "ADC Peak Histogram",
313 'xlabel': "ADC Count",
314 'ylabel': "Count",
315 'linear': linear,
316 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
317 'log': log,
318 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
319 },
320
321
322
323
324 'channel': {
325 'title': "Channel Histogram",
326 'xlabel': "Channel Number",
327 'ylabel': "Count",
328 'linear': linear,
329 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
330 'log': log,
331 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
332 },
333 'detid': {
334 'title': "Detector ID Histogram",
335 'xlabel': "Detector IDs",
336 'ylabel': "Count",
337 'linear': linear,
338 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
339 'log': log,
340 'log_style': dict(color='#EE442F', alpha=0.6, label='Log'),
341 'use_integer_xticks': True
342 },
343 'flag': {
344 'title': "Flag Histogram",
345 'xlabel': "Flags",
346 'ylabel': "Count",
347 'linear': linear,
348 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
349 'log': log,
350 'log_style': dict(color='#EE442F', alpha=0.6, label='Log'),
351 'use_integer_xticks': True
352 },
353 'samples_over_threshold': {
354 'title': "Samples Over Threshold Histogram",
355 'xlabel': time_label,
356 'ylabel': "Count",
357 'linear': linear,
358 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
359 'log': log,
360 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
361 },
362 'samples_to_peak': {
363 'title': "Samples To Peak Histogram",
364 'xlabel': time_label,
365 'ylabel': "Count",
366 'linear': linear,
367 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
368 'log': log,
369 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
370 },
371 'time_start': {
372 'title': "Relative Time Start Histogram",
373 'xlabel': time_label,
374 'ylabel': "Count",
375 'linear': linear,
376 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
377 'log': log,
378 'log_style': dict(color='#EE442F', alpha=0.6, label='Log')
379 },
380 'version': {
381 'title': "Version Histogram",
382 'xlabel': "Versions",
383 'ylabel': "Count",
384 'linear': linear,
385 'linear_style': dict(color='#63ACBE', alpha=0.6, label='Linear'),
386 'log': log,
387 'log_style': dict(color='#EE442F', alpha=0.6, label='Log'),
388 'use_integer_xticks': True
389 }
390 }
391
392 pdf_plotter = PDFPlotter(save_name)
393
394
395 for tp_key in data.tp_data.dtype.names:
396 if 'time' in tp_key:
397 time = data.tp_data[tp_key]
398 if seconds:
399 time = time * TICK_TO_SEC_SCALE
400 min_time = np.min(time)
401 pdf_plotter.plot_histogram(time - min_time, plot_hist_dict[tp_key])
402 if not no_anomaly:
403 write_summary_stats(time - min_time, anomaly_filename, tp_key)
404 continue
405
406 pdf_plotter.plot_histogram(data.tp_data[tp_key], plot_hist_dict[tp_key])
407 if not no_anomaly:
408 write_summary_stats(data.tp_data[tp_key], anomaly_filename, tp_key)
409
410 pdf = pdf_plotter.get_pdf()
411
412
413 plot_pdf_sot_vs_channel(data.tp_data, pdf)
414
415
416
417 plot_pdf_adc_integral_vs_peak(data.tp_data, pdf, verbosity)
418
419 pdf_plotter.close()
420
421 return None
422
423
int main(int argc, char **argv)