diff --git a/scripts/analysis/parse-logs.py b/scripts/analysis/parse-logs.py index ec3dcc96430fb63fcc511fbac0d8d3c025e7ca26..6db8e19cac93321f4dc30b42de722fda2713ad98 100755 --- a/scripts/analysis/parse-logs.py +++ b/scripts/analysis/parse-logs.py @@ -32,7 +32,6 @@ from transformer.output_raw_throughput import OutputRawThroughputTransformer from transformer.print_warnings import PrintWarningsFilter from transformer.resources import ResourceLogFilter from transformer.server import ServerLogFilter - # Mass processing: # mv */* . && find . -iname '*.DS_Store' -delete && \ # ../heterogeneous-smr/scripts/group-results.py . . rezk \ @@ -99,7 +98,8 @@ def setup_group_transformers(scenario: str) -> Iterable[Transformer]: ] -def walk_folders(folder_name, relative_time_range): +def walk_folders(folder_name: str, relative_time_range: Optional[Tuple[float, float]], + use_min_log_start: bool = False): # split into groups groups = defaultdict(lambda: []) for root, dirs, _ in os.walk(folder_name): @@ -124,7 +124,7 @@ def walk_folders(folder_name, relative_time_range): for dirname in sorted(dirnames): scenario, client_count = parse_folder_name(dirname) log_bundle, used_time_range = analyze_folder(os.path.join(folder_name, dirname), scenario, client_count, - relative_time_range) + relative_time_range, use_min_log_start) bundle.extend(log_bundle) if effective_time_range is None: effective_time_range = used_time_range @@ -159,11 +159,12 @@ def parse_folder_name(name: str) -> Tuple[Optional[str], int]: def analyze_folder(path: str, scenario: str, client_count: int, - relative_time_range: Optional[Tuple[float, float]] = None) -> \ + relative_time_range: Optional[Tuple[float, float]] = None, + use_min_log_start: bool = False) -> \ Tuple[Iterable[LogData], Tuple[float, float]]: parsers = setup_parsers() logs = parse_logs(parsers, path, scenario, client_count) - log_base_time, auto_start_time, auto_end_time = auto_range(logs) + log_base_time, auto_start_time, auto_end_time = auto_range(logs, use_min_log_start) if relative_time_range is None: print("Auto range {} {}".format(auto_start_time, auto_end_time)) for log in logs: @@ -204,10 +205,20 @@ def parse_logs(parsers: Iterable[Parser], path: str, scenario: str, client_count return logs -def auto_range(logs: List[LogData]) -> Tuple[float, float, float]: +def is_auto_range_log(log: LogData) -> bool: + if "channel-micro" in log.scenario: + return log.logtype == "server" + return log.logtype == "client" + + +def auto_range(logs: List[LogData], use_min_log_start: bool = False) -> Tuple[float, float, float]: base = float("inf") start = 0 end = -base + merge_func = max + if use_min_log_start: + start = float("inf") + merge_func = min for log in logs: if not is_auto_range_log(log): @@ -217,10 +228,13 @@ def auto_range(logs: List[LogData]) -> Tuple[float, float, float]: base = min(base, entry["timestamp"]) end = max(end, entry["timestamp"]) - log_start = find_start(log) - if log_start is None: - raise LogError("Failed to find start in log {}".format(log.source_path)) - start = max(start, log_start) + if log.logtype == "client": + log_start = find_start(log) + if log_start is None: + raise LogError("Failed to find start in log {}".format(log.source_path)) + start = merge_func(start, log_start) + else: + start = merge_func(start, base + 5) return base, start - base, end - AUTO_RANGE_SLACK_AT_END - base @@ -261,10 +275,14 @@ def run(): parser = argparse.ArgumentParser(description='Parse, check and process results') parser.add_argument('--range', type=int, nargs=2, default=None, help='Use range between start and end from results', metavar=('START', 'END')) + parser.add_argument('--min-log-start', help="Use the minimal log start as range limit instead of the maximum. " + "This is useful when not all clients are active at the same time", + action="store_true") args = parser.parse_args() relative_time_range = args.range - walk_folders(".", relative_time_range) + use_min_log_start = args.min_log_start + walk_folders(".", relative_time_range, use_min_log_start) if __name__ == '__main__':