DUNE-DAQ
DUNE Trigger and Data Acquisition software
Loading...
Searching...
No Matches
run_check.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2
3import dfmodules.data_file_checks as data_file_checks
4import integrationtest.log_file_checks as log_file_checks
5import os
6import json
7import math
8import re
9import rich.traceback
10from rich.console import Console
11from os.path import exists, join
12
13# Add -h as default help option
14CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
15
16console = Console()
17
18import click
19
20@click.command(context_settings=CONTEXT_SETTINGS)
21@click.option('-t', '--skip-tps', is_flag=True, help="Skip check for TPSet Fragments in output")
22@click.option('-n', '--number-of-data-producers', default=2, help="Number of links in output")
23@click.option('-l', '--check-for-logfile-errors', default=True, help="Whether to check for errors in the log files")
24@click.option('-d', '--run_duration', default=60, help="How long the runs lasted")
25@click.option('--trigger-rate', default=1, help="The trigger rate, in Hz")
26@click.argument('run_dir', type=click.Path(exists=True), default=os.curdir)
27
28def cli(skip_tps, number_of_data_producers, check_for_logfile_errors, run_duration, trigger_rate, run_dir):
29
30 dirfiles = [ join(run_dir, f) for f in os.listdir(run_dir) if os.path.isfile(f) ]
31 log_files = [ f for f in dirfiles if "log_" in f]
32 data_files = [ f for f in dirfiles if ".hdf5" in f]
33
34
35 current_td_match = 0
36 current_run_match = 0
37 run_trigger_counts = {}
38
39 for f in log_files:
40 if "log_trigger" in f:
41 for line in open(f).readlines():
42 match_tds = re.search(r"Sent (\d+) TDs\.", line)
43 if match_tds != None:
44 current_td_match = match_tds.group(1)
45 match_run = re.search(r"End of run (\d+)", line)
46 if match_run != None:
47 current_run_match = match_run.group(1)
48
49 if current_td_match != 0 and current_run_match != 0:
50 run_trigger_counts[current_run_match] = current_td_match
51 current_run_match = 0
52 current_td_match = 0
53
54
55 print(run_trigger_counts)
56 expected_event_count = run_duration * trigger_rate
57 expected_event_count_tolerance = 2
58
59 wib1_frag_hsi_trig_params = {"fragment_type_description": "WIB",
60 "hdf5_detector_group": "TPC", "hdf5_region_prefix": "APA",
61 "expected_fragment_count": number_of_data_producers,
62 "min_size_bytes": 37200, "max_size_bytes": 37200}
63 wib1_frag_multi_trig_params = {"fragment_type_description": "WIB",
64 "hdf5_detector_group": "TPC", "hdf5_region_prefix": "APA",
65 "expected_fragment_count": number_of_data_producers,
66 "min_size_bytes": 80, "max_size_bytes": 37200}
67 rawtp_frag_params = {"fragment_type_description": "Raw TP",
68 "hdf5_detector_group": "TPC", "hdf5_region_prefix": "TP_APA",
69 "expected_fragment_count": number_of_data_producers,
70 "min_size_bytes": 80, "max_size_bytes": 80}
71 triggertp_frag_params = {"fragment_type_description": "Trigger TP",
72 "hdf5_detector_group": "Trigger", "hdf5_region_prefix": "Region",
73 "expected_fragment_count": number_of_data_producers,
74 "min_size_bytes": 80, "max_size_bytes": 80}
75
76 def test_log_files():
77 if check_for_logfile_errors:
78 # Check that there are no warnings or errors in the log files
79 log_file_checks.logs_are_error_free(log_files)
80
81 def test_data_file():
82 local_expected_event_count = expected_event_count
83 local_event_count_tolerance = expected_event_count_tolerance
84 fragment_check_list = []
85 if not skip_tps:
86 local_expected_event_count+=(270 * number_of_data_producers * run_duration / 100)
87 local_event_count_tolerance+=(10 * number_of_data_producers * run_duration / 100)
88 fragment_check_list.append(wib1_frag_multi_trig_params)
89 fragment_check_list.append(rawtp_frag_params)
90 fragment_check_list.append(triggertp_frag_params)
91 if len(fragment_check_list) == 0:
92 fragment_check_list.append(wib1_frag_hsi_trig_params)
93
94 for idx in range(len(data_files)):
95 data_file = data_file_checks.DataFile(data_files[idx])
96 data_file_checks.sanity_check(data_file)
97 data_file_checks.check_file_attributes(data_file)
98 data_file_checks.check_event_count(data_file, local_expected_event_count, local_event_count_tolerance)
99 for jdx in range(len(fragment_check_list)):
100 data_file_checks.check_fragment_count(data_file, fragment_check_list[jdx])
101 data_file_checks.check_fragment_sizes(data_file, fragment_check_list[jdx])
102
103 test_log_files()
104 test_data_file()
105
106if __name__ == '__main__':
107 try:
108 cli(show_default=True, standalone_mode=True)
109 except Exception as e:
110 console.print_exception()
cli(skip_tps, number_of_data_producers, check_for_logfile_errors, run_duration, trigger_rate, run_dir)
Definition run_check.py:28