Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

kunit: tool: refactoring printing logic into kunit_printer.py

Context:
* kunit_kernel.py is importing kunit_parser.py just to use the
print_with_timestamp() function
* the parser is directly printing to stdout, which will become an issue
if we ever try to run multiple kernels in parallel

This patch introduces a kunit_printer.py file and migrates callers of
kunit_parser.print_with_timestamp() to call
kunit_printer.stdout.print_with_timestamp() instead.

Future changes:
If we want to support showing results for parallel runs, we could then
create new Printer's that don't directly write to stdout and refactor
the code to pass around these Printer objects.

Signed-off-by: Daniel Latypov <dlatypov@google.com>
Reviewed-by: David Gow <davidgow@google.com>
Reviewed-by: Brendan Higgins <brendanhiggins@google.com>
Signed-off-by: Shuah Khan <skhan@linuxfoundation.org>

authored by

Daniel Latypov and committed by
Shuah Khan
e756dbeb 8a04930f

+82 -58
+9 -8
tools/testing/kunit/kunit.py
··· 22 22 import kunit_json 23 23 import kunit_kernel 24 24 import kunit_parser 25 + from kunit_printer import stdout 25 26 26 27 class KunitStatus(Enum): 27 28 SUCCESS = auto() ··· 73 72 74 73 def config_tests(linux: kunit_kernel.LinuxSourceTree, 75 74 request: KunitConfigRequest) -> KunitResult: 76 - kunit_parser.print_with_timestamp('Configuring KUnit Kernel ...') 75 + stdout.print_with_timestamp('Configuring KUnit Kernel ...') 77 76 78 77 config_start = time.time() 79 78 success = linux.build_reconfig(request.build_dir, request.make_options) ··· 86 85 87 86 def build_tests(linux: kunit_kernel.LinuxSourceTree, 88 87 request: KunitBuildRequest) -> KunitResult: 89 - kunit_parser.print_with_timestamp('Building KUnit Kernel ...') 88 + stdout.print_with_timestamp('Building KUnit Kernel ...') 90 89 91 90 build_start = time.time() 92 91 success = linux.build_kernel(request.alltests, ··· 159 158 test_counts = kunit_parser.TestCounts() 160 159 exec_time = 0.0 161 160 for i, filter_glob in enumerate(filter_globs): 162 - kunit_parser.print_with_timestamp('Starting KUnit Kernel ({}/{})...'.format(i+1, len(filter_globs))) 161 + stdout.print_with_timestamp('Starting KUnit Kernel ({}/{})...'.format(i+1, len(filter_globs))) 163 162 164 163 test_start = time.time() 165 164 run_result = linux.run_kernel( ··· 222 221 else: 223 222 with open(request.json, 'w') as f: 224 223 f.write(json_str) 225 - kunit_parser.print_with_timestamp("Test results stored in %s" % 224 + stdout.print_with_timestamp("Test results stored in %s" % 226 225 os.path.abspath(request.json)) 227 226 228 227 if test_result.status != kunit_parser.TestStatus.SUCCESS: ··· 246 245 247 246 run_end = time.time() 248 247 249 - kunit_parser.print_with_timestamp(( 248 + stdout.print_with_timestamp(( 250 249 'Elapsed time: %.3fs total, %.3fs configuring, %.3fs ' + 251 250 'building, %.3fs running\n') % ( 252 251 run_end - run_start, ··· 447 446 request = KunitConfigRequest(build_dir=cli_args.build_dir, 448 447 make_options=cli_args.make_options) 449 448 result = config_tests(linux, request) 450 - kunit_parser.print_with_timestamp(( 449 + stdout.print_with_timestamp(( 451 450 'Elapsed time: %.3fs\n') % ( 452 451 result.elapsed_time)) 453 452 if result.status != KunitStatus.SUCCESS: ··· 459 458 jobs=cli_args.jobs, 460 459 alltests=cli_args.alltests) 461 460 result = config_and_build_tests(linux, request) 462 - kunit_parser.print_with_timestamp(( 461 + stdout.print_with_timestamp(( 463 462 'Elapsed time: %.3fs\n') % ( 464 463 result.elapsed_time)) 465 464 if result.status != KunitStatus.SUCCESS: ··· 475 474 kernel_args=cli_args.kernel_args, 476 475 run_isolated=cli_args.run_isolated) 477 476 result = exec_tests(linux, exec_request) 478 - kunit_parser.print_with_timestamp(( 477 + stdout.print_with_timestamp(( 479 478 'Elapsed time: %.3fs\n') % (result.elapsed_time)) 480 479 if result.status != KunitStatus.SUCCESS: 481 480 sys.exit(1)
+4 -4
tools/testing/kunit/kunit_kernel.py
··· 18 18 from typing import Iterator, List, Optional, Tuple 19 19 20 20 import kunit_config 21 - import kunit_parser 21 + from kunit_printer import stdout 22 22 import qemu_config 23 23 24 24 KCONFIG_PATH = '.config' ··· 138 138 super().__init__(linux_arch='um', cross_compile=cross_compile) 139 139 140 140 def make_allyesconfig(self, build_dir: str, make_options) -> None: 141 - kunit_parser.print_with_timestamp( 141 + stdout.print_with_timestamp( 142 142 'Enabling all CONFIGs for UML...') 143 143 command = ['make', 'ARCH=um', 'O=' + build_dir, 'allyesconfig'] 144 144 if make_options: ··· 148 148 stdout=subprocess.DEVNULL, 149 149 stderr=subprocess.STDOUT) 150 150 process.wait() 151 - kunit_parser.print_with_timestamp( 151 + stdout.print_with_timestamp( 152 152 'Disabling broken configs to run KUnit tests...') 153 153 154 154 with open(get_kconfig_path(build_dir), 'a') as config: 155 155 with open(BROKEN_ALLCONFIG_PATH, 'r') as disable: 156 156 config.write(disable.read()) 157 - kunit_parser.print_with_timestamp( 157 + stdout.print_with_timestamp( 158 158 'Starting Kernel with all configs takes a few minutes...') 159 159 160 160 def start(self, params: List[str], build_dir: str) -> subprocess.Popen:
+19 -44
tools/testing/kunit/kunit_parser.py
··· 13 13 import re 14 14 import sys 15 15 16 - import datetime 17 16 from enum import Enum, auto 18 17 from typing import Iterable, Iterator, List, Optional, Tuple 18 + 19 + from kunit_printer import stdout 19 20 20 21 class Test: 21 22 """ ··· 56 55 def add_error(self, error_message: str) -> None: 57 56 """Records an error that occurred while parsing this test.""" 58 57 self.counts.errors += 1 59 - print_with_timestamp(red('[ERROR]') + f' Test: {self.name}: {error_message}') 58 + stdout.print_with_timestamp(stdout.red('[ERROR]') + f' Test: {self.name}: {error_message}') 60 59 61 60 class TestStatus(Enum): 62 61 """An enumeration class to represent the status of a test.""" ··· 462 461 463 462 DIVIDER = '=' * 60 464 463 465 - RESET = '\033[0;0m' 466 - 467 - def red(text: str) -> str: 468 - """Returns inputted string with red color code.""" 469 - if not sys.stdout.isatty(): 470 - return text 471 - return '\033[1;31m' + text + RESET 472 - 473 - def yellow(text: str) -> str: 474 - """Returns inputted string with yellow color code.""" 475 - if not sys.stdout.isatty(): 476 - return text 477 - return '\033[1;33m' + text + RESET 478 - 479 - def green(text: str) -> str: 480 - """Returns inputted string with green color code.""" 481 - if not sys.stdout.isatty(): 482 - return text 483 - return '\033[1;32m' + text + RESET 484 - 485 - ANSI_LEN = len(red('')) 486 - 487 - def print_with_timestamp(message: str) -> None: 488 - """Prints message with timestamp at beginning.""" 489 - print('[%s] %s' % (datetime.datetime.now().strftime('%H:%M:%S'), message)) 490 - 491 464 def format_test_divider(message: str, len_message: int) -> str: 492 465 """ 493 466 Returns string with message centered in fixed width divider. ··· 504 529 message += ' (1 subtest)' 505 530 else: 506 531 message += f' ({test.expected_count} subtests)' 507 - print_with_timestamp(format_test_divider(message, len(message))) 532 + stdout.print_with_timestamp(format_test_divider(message, len(message))) 508 533 509 534 def print_log(log: Iterable[str]) -> None: 510 535 """Prints all strings in saved log for test in yellow.""" 511 536 for m in log: 512 - print_with_timestamp(yellow(m)) 537 + stdout.print_with_timestamp(stdout.yellow(m)) 513 538 514 539 def format_test_result(test: Test) -> str: 515 540 """ ··· 526 551 String containing formatted test result 527 552 """ 528 553 if test.status == TestStatus.SUCCESS: 529 - return green('[PASSED] ') + test.name 554 + return stdout.green('[PASSED] ') + test.name 530 555 if test.status == TestStatus.SKIPPED: 531 - return yellow('[SKIPPED] ') + test.name 556 + return stdout.yellow('[SKIPPED] ') + test.name 532 557 if test.status == TestStatus.NO_TESTS: 533 - return yellow('[NO TESTS RUN] ') + test.name 558 + return stdout.yellow('[NO TESTS RUN] ') + test.name 534 559 if test.status == TestStatus.TEST_CRASHED: 535 560 print_log(test.log) 536 - return red('[CRASHED] ') + test.name 561 + return stdout.red('[CRASHED] ') + test.name 537 562 print_log(test.log) 538 - return red('[FAILED] ') + test.name 563 + return stdout.red('[FAILED] ') + test.name 539 564 540 565 def print_test_result(test: Test) -> None: 541 566 """ ··· 547 572 Parameters: 548 573 test - Test object representing current test being printed 549 574 """ 550 - print_with_timestamp(format_test_result(test)) 575 + stdout.print_with_timestamp(format_test_result(test)) 551 576 552 577 def print_test_footer(test: Test) -> None: 553 578 """ ··· 560 585 test - Test object representing current test being printed 561 586 """ 562 587 message = format_test_result(test) 563 - print_with_timestamp(format_test_divider(message, 564 - len(message) - ANSI_LEN)) 588 + stdout.print_with_timestamp(format_test_divider(message, 589 + len(message) - stdout.color_len())) 565 590 566 591 def print_summary_line(test: Test) -> None: 567 592 """ ··· 578 603 test - Test object representing current test being printed 579 604 """ 580 605 if test.status == TestStatus.SUCCESS: 581 - color = green 606 + color = stdout.green 582 607 elif test.status in (TestStatus.SKIPPED, TestStatus.NO_TESTS): 583 - color = yellow 608 + color = stdout.yellow 584 609 else: 585 - color = red 586 - print_with_timestamp(color(f'Testing complete. {test.counts}')) 610 + color = stdout.red 611 + stdout.print_with_timestamp(color(f'Testing complete. {test.counts}')) 587 612 588 613 # Other methods: 589 614 ··· 737 762 Return: 738 763 Test - the main test object with all subtests. 739 764 """ 740 - print_with_timestamp(DIVIDER) 765 + stdout.print_with_timestamp(DIVIDER) 741 766 lines = extract_tap_lines(kernel_output) 742 767 test = Test() 743 768 if not lines: ··· 748 773 test = parse_test(lines, 0, []) 749 774 if test.status != TestStatus.NO_TESTS: 750 775 test.status = test.counts.get_status() 751 - print_with_timestamp(DIVIDER) 776 + stdout.print_with_timestamp(DIVIDER) 752 777 print_summary_line(test) 753 778 return test
+48
tools/testing/kunit/kunit_printer.py
··· 1 + #!/usr/bin/env python3 2 + # SPDX-License-Identifier: GPL-2.0 3 + # 4 + # Utilities for printing and coloring output. 5 + # 6 + # Copyright (C) 2022, Google LLC. 7 + # Author: Daniel Latypov <dlatypov@google.com> 8 + 9 + import datetime 10 + import sys 11 + import typing 12 + 13 + _RESET = '\033[0;0m' 14 + 15 + class Printer: 16 + """Wraps a file object, providing utilities for coloring output, etc.""" 17 + 18 + def __init__(self, output: typing.IO): 19 + self._output = output 20 + self._use_color = output.isatty() 21 + 22 + def print(self, message: str) -> None: 23 + print(message, file=self._output) 24 + 25 + def print_with_timestamp(self, message: str) -> None: 26 + ts = datetime.datetime.now().strftime('%H:%M:%S') 27 + self.print(f'[{ts}] {message}') 28 + 29 + def _color(self, code: str, text: str) -> str: 30 + if not self._use_color: 31 + return text 32 + return code + text + _RESET 33 + 34 + def red(self, text: str) -> str: 35 + return self._color('\033[1;31m', text) 36 + 37 + def yellow(self, text: str) -> str: 38 + return self._color('\033[1;33m', text) 39 + 40 + def green(self, text: str) -> str: 41 + return self._color('\033[1;32m', text) 42 + 43 + def color_len(self) -> int: 44 + """Returns the length of the color escape codes.""" 45 + return len(self.red('')) 46 + 47 + # Provides a default instance that prints to stdout 48 + stdout = Printer(sys.stdout)
+2 -2
tools/testing/kunit/kunit_tool_test.py
··· 222 222 223 223 def test_no_kunit_output(self): 224 224 crash_log = test_data_path('test_insufficient_memory.log') 225 - print_mock = mock.patch('builtins.print').start() 225 + print_mock = mock.patch('kunit_printer.Printer.print').start() 226 226 with open(crash_log) as file: 227 227 result = kunit_parser.parse_run_tests( 228 228 kunit_parser.extract_tap_lines(file.readlines())) ··· 500 500 with open(path) as file: 501 501 all_passed_log = file.readlines() 502 502 503 - self.print_mock = mock.patch('builtins.print').start() 503 + self.print_mock = mock.patch('kunit_printer.Printer.print').start() 504 504 self.addCleanup(mock.patch.stopall) 505 505 506 506 self.mock_linux_init = mock.patch.object(kunit_kernel, 'LinuxSourceTree').start()