Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

Merge tag 'linux-kselftest-kunit-5.15-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/shuah/linux-kselftest

Pull KUnit updates from Shuah Khan:
"This KUnit update for Linux 5.15-rc1 adds new features and tests:

Tool:

- support for '--kernel_args' to allow setting module params

- support for '--raw_output' option to show just the kunit output
during make

Tests:

- new KUnit tests for checksums and timestamps

- Print test statistics on failure

- Integrates UBSAN into the KUnit testing framework. It fails KUnit
tests whenever it reports undefined behavior"

* tag 'linux-kselftest-kunit-5.15-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/shuah/linux-kselftest:
kunit: Print test statistics on failure
kunit: tool: make --raw_output support only showing kunit output
kunit: tool: add --kernel_args to allow setting module params
kunit: ubsan integration
fat: Add KUnit tests for checksums and timestamps

+398 -24
+6 -3
Documentation/dev-tools/kunit/kunit-tool.rst
··· 114 114 115 115 ./tools/testing/kunit/kunit.py run --raw_output 116 116 117 - .. note:: 118 - The raw output from test runs may contain other, non-KUnit kernel log 119 - lines. 117 + The raw output from test runs may contain other, non-KUnit kernel log 118 + lines. You can see just KUnit output with ``--raw_output=kunit``: 119 + 120 + .. code-block:: bash 121 + 122 + ./tools/testing/kunit/kunit.py run --raw_output=kunit 120 123 121 124 If you have KUnit results in their raw TAP format, you can parse them and print 122 125 the human-readable summary with the ``parse`` command for kunit_tool. This
+10
Documentation/dev-tools/kunit/running_tips.rst
··· 80 80 automagically, but tests could theoretically depend on incompatible 81 81 options, so handling that would be tricky. 82 82 83 + Setting kernel commandline parameters 84 + ------------------------------------- 85 + 86 + You can use ``--kernel_args`` to pass arbitrary kernel arguments, e.g. 87 + 88 + .. code-block:: bash 89 + 90 + $ ./tools/testing/kunit/kunit.py run --kernel_args=param=42 --kernel_args=param2=false 91 + 92 + 83 93 Generating code coverage reports under UML 84 94 ------------------------------------------ 85 95
+5
fs/fat/.kunitconfig
··· 1 + CONFIG_KUNIT=y 2 + CONFIG_FAT_FS=y 3 + CONFIG_MSDOS_FS=y 4 + CONFIG_VFAT_FS=y 5 + CONFIG_FAT_KUNIT_TEST=y
+13 -1
fs/fat/Kconfig
··· 77 77 78 78 config FAT_DEFAULT_CODEPAGE 79 79 int "Default codepage for FAT" 80 - depends on MSDOS_FS || VFAT_FS 80 + depends on FAT_FS 81 81 default 437 82 82 help 83 83 This option should be set to the codepage of your FAT filesystems. ··· 115 115 Say Y if you use UTF-8 encoding for file names, N otherwise. 116 116 117 117 See <file:Documentation/filesystems/vfat.rst> for more information. 118 + 119 + config FAT_KUNIT_TEST 120 + tristate "Unit Tests for FAT filesystems" if !KUNIT_ALL_TESTS 121 + depends on KUNIT && FAT_FS 122 + default KUNIT_ALL_TESTS 123 + help 124 + This builds the FAT KUnit tests 125 + 126 + For more information on KUnit and unit tests in general, please refer 127 + to the KUnit documentation in Documentation/dev-tools/kunit 128 + 129 + If unsure, say N
+2
fs/fat/Makefile
··· 10 10 fat-y := cache.o dir.o fatent.o file.o inode.o misc.o nfs.o 11 11 vfat-y := namei_vfat.o 12 12 msdos-y := namei_msdos.o 13 + 14 + obj-$(CONFIG_FAT_KUNIT_TEST) += fat_test.o
+196
fs/fat/fat_test.c
··· 1 + // SPDX-License-Identifier: GPL-2.0 2 + /* 3 + * KUnit tests for FAT filesystems. 4 + * 5 + * Copyright (C) 2020 Google LLC. 6 + * Author: David Gow <davidgow@google.com> 7 + */ 8 + 9 + #include <kunit/test.h> 10 + 11 + #include "fat.h" 12 + 13 + static void fat_checksum_test(struct kunit *test) 14 + { 15 + /* With no extension. */ 16 + KUNIT_EXPECT_EQ(test, fat_checksum("VMLINUX "), (u8)44); 17 + /* With 3-letter extension. */ 18 + KUNIT_EXPECT_EQ(test, fat_checksum("README TXT"), (u8)115); 19 + /* With short (1-letter) extension. */ 20 + KUNIT_EXPECT_EQ(test, fat_checksum("ABCDEFGHA "), (u8)98); 21 + } 22 + 23 + struct fat_timestamp_testcase { 24 + const char *name; 25 + struct timespec64 ts; 26 + __le16 time; 27 + __le16 date; 28 + u8 cs; 29 + int time_offset; 30 + }; 31 + 32 + static struct fat_timestamp_testcase time_test_cases[] = { 33 + { 34 + .name = "Earliest possible UTC (1980-01-01 00:00:00)", 35 + .ts = {.tv_sec = 315532800LL, .tv_nsec = 0L}, 36 + .time = cpu_to_le16(0), 37 + .date = cpu_to_le16(33), 38 + .cs = 0, 39 + .time_offset = 0, 40 + }, 41 + { 42 + .name = "Latest possible UTC (2107-12-31 23:59:58)", 43 + .ts = {.tv_sec = 4354819198LL, .tv_nsec = 0L}, 44 + .time = cpu_to_le16(49021), 45 + .date = cpu_to_le16(65439), 46 + .cs = 0, 47 + .time_offset = 0, 48 + }, 49 + { 50 + .name = "Earliest possible (UTC-11) (== 1979-12-31 13:00:00 UTC)", 51 + .ts = {.tv_sec = 315493200LL, .tv_nsec = 0L}, 52 + .time = cpu_to_le16(0), 53 + .date = cpu_to_le16(33), 54 + .cs = 0, 55 + .time_offset = 11 * 60, 56 + }, 57 + { 58 + .name = "Latest possible (UTC+11) (== 2108-01-01 10:59:58 UTC)", 59 + .ts = {.tv_sec = 4354858798LL, .tv_nsec = 0L}, 60 + .time = cpu_to_le16(49021), 61 + .date = cpu_to_le16(65439), 62 + .cs = 0, 63 + .time_offset = -11 * 60, 64 + }, 65 + { 66 + .name = "Leap Day / Year (1996-02-29 00:00:00)", 67 + .ts = {.tv_sec = 825552000LL, .tv_nsec = 0L}, 68 + .time = cpu_to_le16(0), 69 + .date = cpu_to_le16(8285), 70 + .cs = 0, 71 + .time_offset = 0, 72 + }, 73 + { 74 + .name = "Year 2000 is leap year (2000-02-29 00:00:00)", 75 + .ts = {.tv_sec = 951782400LL, .tv_nsec = 0L}, 76 + .time = cpu_to_le16(0), 77 + .date = cpu_to_le16(10333), 78 + .cs = 0, 79 + .time_offset = 0, 80 + }, 81 + { 82 + .name = "Year 2100 not leap year (2100-03-01 00:00:00)", 83 + .ts = {.tv_sec = 4107542400LL, .tv_nsec = 0L}, 84 + .time = cpu_to_le16(0), 85 + .date = cpu_to_le16(61537), 86 + .cs = 0, 87 + .time_offset = 0, 88 + }, 89 + { 90 + .name = "Leap year + timezone UTC+1 (== 2004-02-29 00:30:00 UTC)", 91 + .ts = {.tv_sec = 1078014600LL, .tv_nsec = 0L}, 92 + .time = cpu_to_le16(48064), 93 + .date = cpu_to_le16(12380), 94 + .cs = 0, 95 + .time_offset = -60, 96 + }, 97 + { 98 + .name = "Leap year + timezone UTC-1 (== 2004-02-29 23:30:00 UTC)", 99 + .ts = {.tv_sec = 1078097400LL, .tv_nsec = 0L}, 100 + .time = cpu_to_le16(960), 101 + .date = cpu_to_le16(12385), 102 + .cs = 0, 103 + .time_offset = 60, 104 + }, 105 + { 106 + .name = "VFAT odd-second resolution (1999-12-31 23:59:59)", 107 + .ts = {.tv_sec = 946684799LL, .tv_nsec = 0L}, 108 + .time = cpu_to_le16(49021), 109 + .date = cpu_to_le16(10143), 110 + .cs = 100, 111 + .time_offset = 0, 112 + }, 113 + { 114 + .name = "VFAT 10ms resolution (1980-01-01 00:00:00:0010)", 115 + .ts = {.tv_sec = 315532800LL, .tv_nsec = 10000000L}, 116 + .time = cpu_to_le16(0), 117 + .date = cpu_to_le16(33), 118 + .cs = 1, 119 + .time_offset = 0, 120 + }, 121 + }; 122 + 123 + static void time_testcase_desc(struct fat_timestamp_testcase *t, 124 + char *desc) 125 + { 126 + strscpy(desc, t->name, KUNIT_PARAM_DESC_SIZE); 127 + } 128 + 129 + KUNIT_ARRAY_PARAM(fat_time, time_test_cases, time_testcase_desc); 130 + 131 + static void fat_time_fat2unix_test(struct kunit *test) 132 + { 133 + static struct msdos_sb_info fake_sb; 134 + struct timespec64 ts; 135 + struct fat_timestamp_testcase *testcase = 136 + (struct fat_timestamp_testcase *)test->param_value; 137 + 138 + fake_sb.options.tz_set = 1; 139 + fake_sb.options.time_offset = testcase->time_offset; 140 + 141 + fat_time_fat2unix(&fake_sb, &ts, 142 + testcase->time, 143 + testcase->date, 144 + testcase->cs); 145 + KUNIT_EXPECT_EQ_MSG(test, 146 + testcase->ts.tv_sec, 147 + ts.tv_sec, 148 + "Timestamp mismatch (seconds)\n"); 149 + KUNIT_EXPECT_EQ_MSG(test, 150 + testcase->ts.tv_nsec, 151 + ts.tv_nsec, 152 + "Timestamp mismatch (nanoseconds)\n"); 153 + } 154 + 155 + static void fat_time_unix2fat_test(struct kunit *test) 156 + { 157 + static struct msdos_sb_info fake_sb; 158 + __le16 date, time; 159 + u8 cs; 160 + struct fat_timestamp_testcase *testcase = 161 + (struct fat_timestamp_testcase *)test->param_value; 162 + 163 + fake_sb.options.tz_set = 1; 164 + fake_sb.options.time_offset = testcase->time_offset; 165 + 166 + fat_time_unix2fat(&fake_sb, &testcase->ts, 167 + &time, &date, &cs); 168 + KUNIT_EXPECT_EQ_MSG(test, 169 + le16_to_cpu(testcase->time), 170 + le16_to_cpu(time), 171 + "Time mismatch\n"); 172 + KUNIT_EXPECT_EQ_MSG(test, 173 + le16_to_cpu(testcase->date), 174 + le16_to_cpu(date), 175 + "Date mismatch\n"); 176 + KUNIT_EXPECT_EQ_MSG(test, 177 + testcase->cs, 178 + cs, 179 + "Centisecond mismatch\n"); 180 + } 181 + 182 + static struct kunit_case fat_test_cases[] = { 183 + KUNIT_CASE(fat_checksum_test), 184 + KUNIT_CASE_PARAM(fat_time_fat2unix_test, fat_time_gen_params), 185 + KUNIT_CASE_PARAM(fat_time_unix2fat_test, fat_time_gen_params), 186 + {}, 187 + }; 188 + 189 + static struct kunit_suite fat_test_suite = { 190 + .name = "fat_test", 191 + .test_cases = fat_test_cases, 192 + }; 193 + 194 + kunit_test_suites(&fat_test_suite); 195 + 196 + MODULE_LICENSE("GPL v2");
+3
fs/fat/misc.c
··· 230 230 } 231 231 } 232 232 233 + /* Export fat_time_fat2unix() for the fat_test KUnit tests. */ 234 + EXPORT_SYMBOL_GPL(fat_time_fat2unix); 235 + 233 236 /* Convert linear UNIX date to a FAT time/date pair. */ 234 237 void fat_time_unix2fat(struct msdos_sb_info *sbi, struct timespec64 *ts, 235 238 __le16 *time, __le16 *date, u8 *time_cs)
+109
lib/kunit/test.c
··· 10 10 #include <kunit/test-bug.h> 11 11 #include <linux/kernel.h> 12 12 #include <linux/kref.h> 13 + #include <linux/moduleparam.h> 13 14 #include <linux/sched/debug.h> 14 15 #include <linux/sched.h> 15 16 ··· 51 50 } 52 51 EXPORT_SYMBOL_GPL(__kunit_fail_current_test); 53 52 #endif 53 + 54 + /* 55 + * KUnit statistic mode: 56 + * 0 - disabled 57 + * 1 - only when there is more than one subtest 58 + * 2 - enabled 59 + */ 60 + static int kunit_stats_enabled = 1; 61 + module_param_named(stats_enabled, kunit_stats_enabled, int, 0644); 62 + MODULE_PARM_DESC(stats_enabled, 63 + "Print test stats: never (0), only for multiple subtests (1), or always (2)"); 64 + 65 + struct kunit_result_stats { 66 + unsigned long passed; 67 + unsigned long skipped; 68 + unsigned long failed; 69 + unsigned long total; 70 + }; 71 + 72 + static bool kunit_should_print_stats(struct kunit_result_stats stats) 73 + { 74 + if (kunit_stats_enabled == 0) 75 + return false; 76 + 77 + if (kunit_stats_enabled == 2) 78 + return true; 79 + 80 + return (stats.total > 1); 81 + } 82 + 83 + static void kunit_print_test_stats(struct kunit *test, 84 + struct kunit_result_stats stats) 85 + { 86 + if (!kunit_should_print_stats(stats)) 87 + return; 88 + 89 + kunit_log(KERN_INFO, test, 90 + KUNIT_SUBTEST_INDENT 91 + "# %s: pass:%lu fail:%lu skip:%lu total:%lu", 92 + test->name, 93 + stats.passed, 94 + stats.failed, 95 + stats.skipped, 96 + stats.total); 97 + } 54 98 55 99 /* 56 100 * Append formatted message to log, size of which is limited to ··· 439 393 test_case->status = KUNIT_SUCCESS; 440 394 } 441 395 396 + static void kunit_print_suite_stats(struct kunit_suite *suite, 397 + struct kunit_result_stats suite_stats, 398 + struct kunit_result_stats param_stats) 399 + { 400 + if (kunit_should_print_stats(suite_stats)) { 401 + kunit_log(KERN_INFO, suite, 402 + "# %s: pass:%lu fail:%lu skip:%lu total:%lu", 403 + suite->name, 404 + suite_stats.passed, 405 + suite_stats.failed, 406 + suite_stats.skipped, 407 + suite_stats.total); 408 + } 409 + 410 + if (kunit_should_print_stats(param_stats)) { 411 + kunit_log(KERN_INFO, suite, 412 + "# Totals: pass:%lu fail:%lu skip:%lu total:%lu", 413 + param_stats.passed, 414 + param_stats.failed, 415 + param_stats.skipped, 416 + param_stats.total); 417 + } 418 + } 419 + 420 + static void kunit_update_stats(struct kunit_result_stats *stats, 421 + enum kunit_status status) 422 + { 423 + switch (status) { 424 + case KUNIT_SUCCESS: 425 + stats->passed++; 426 + break; 427 + case KUNIT_SKIPPED: 428 + stats->skipped++; 429 + break; 430 + case KUNIT_FAILURE: 431 + stats->failed++; 432 + break; 433 + } 434 + 435 + stats->total++; 436 + } 437 + 438 + static void kunit_accumulate_stats(struct kunit_result_stats *total, 439 + struct kunit_result_stats add) 440 + { 441 + total->passed += add.passed; 442 + total->skipped += add.skipped; 443 + total->failed += add.failed; 444 + total->total += add.total; 445 + } 446 + 442 447 int kunit_run_tests(struct kunit_suite *suite) 443 448 { 444 449 char param_desc[KUNIT_PARAM_DESC_SIZE]; 445 450 struct kunit_case *test_case; 451 + struct kunit_result_stats suite_stats = { 0 }; 452 + struct kunit_result_stats total_stats = { 0 }; 446 453 447 454 kunit_print_subtest_start(suite); 448 455 449 456 kunit_suite_for_each_test_case(suite, test_case) { 450 457 struct kunit test = { .param_value = NULL, .param_index = 0 }; 458 + struct kunit_result_stats param_stats = { 0 }; 451 459 test_case->status = KUNIT_SKIPPED; 452 460 453 461 if (test_case->generate_params) { ··· 531 431 test.param_value = test_case->generate_params(test.param_value, param_desc); 532 432 test.param_index++; 533 433 } 434 + 435 + kunit_update_stats(&param_stats, test.status); 436 + 534 437 } while (test.param_value); 438 + 439 + kunit_print_test_stats(&test, param_stats); 535 440 536 441 kunit_print_ok_not_ok(&test, true, test_case->status, 537 442 kunit_test_case_num(suite, test_case), 538 443 test_case->name, 539 444 test.status_comment); 445 + 446 + kunit_update_stats(&suite_stats, test_case->status); 447 + kunit_accumulate_stats(&total_stats, param_stats); 540 448 } 541 449 450 + kunit_print_suite_stats(suite, suite_stats, total_stats); 542 451 kunit_print_subtest_end(suite); 543 452 544 453 return 0;
+3
lib/ubsan.c
··· 14 14 #include <linux/types.h> 15 15 #include <linux/sched.h> 16 16 #include <linux/uaccess.h> 17 + #include <kunit/test-bug.h> 17 18 18 19 #include "ubsan.h" 19 20 ··· 142 141 "========================================\n"); 143 142 pr_err("UBSAN: %s in %s:%d:%d\n", reason, loc->file_name, 144 143 loc->line & LINE_MASK, loc->column & COLUMN_MASK); 144 + 145 + kunit_fail_current_test("%s in %s", reason, loc->file_name); 145 146 } 146 147 147 148 static void ubsan_epilogue(void)
+27 -9
tools/testing/kunit/kunit.py
··· 16 16 17 17 from collections import namedtuple 18 18 from enum import Enum, auto 19 + from typing import Iterable 19 20 20 21 import kunit_config 21 22 import kunit_json ··· 31 30 ['jobs', 'build_dir', 'alltests', 32 31 'make_options']) 33 32 KunitExecRequest = namedtuple('KunitExecRequest', 34 - ['timeout', 'build_dir', 'alltests', 'filter_glob']) 33 + ['timeout', 'build_dir', 'alltests', 34 + 'filter_glob', 'kernel_args']) 35 35 KunitParseRequest = namedtuple('KunitParseRequest', 36 36 ['raw_output', 'input_data', 'build_dir', 'json']) 37 37 KunitRequest = namedtuple('KunitRequest', ['raw_output','timeout', 'jobs', 38 38 'build_dir', 'alltests', 'filter_glob', 39 - 'json', 'make_options']) 39 + 'kernel_args', 'json', 'make_options']) 40 40 41 41 KernelDirectoryPath = sys.argv[0].split('tools/testing/kunit/')[0] 42 42 ··· 96 94 kunit_parser.print_with_timestamp('Starting KUnit Kernel ...') 97 95 test_start = time.time() 98 96 result = linux.run_kernel( 97 + args=request.kernel_args, 99 98 timeout=None if request.alltests else request.timeout, 100 99 filter_glob=request.filter_glob, 101 100 build_dir=request.build_dir) ··· 115 112 'Tests not Parsed.') 116 113 117 114 if request.raw_output: 118 - kunit_parser.raw_output(request.input_data) 115 + output: Iterable[str] = request.input_data 116 + if request.raw_output == 'all': 117 + pass 118 + elif request.raw_output == 'kunit': 119 + output = kunit_parser.extract_tap_lines(output) 120 + else: 121 + print(f'Unknown --raw_output option "{request.raw_output}"', file=sys.stderr) 122 + for line in output: 123 + print(line.rstrip()) 124 + 119 125 else: 120 126 test_result = kunit_parser.parse_run_tests(request.input_data) 121 127 parse_end = time.time() ··· 145 133 return KunitResult(KunitStatus.SUCCESS, test_result, 146 134 parse_end - parse_start) 147 135 148 - 149 136 def run_tests(linux: kunit_kernel.LinuxSourceTree, 150 137 request: KunitRequest) -> KunitResult: 151 138 run_start = time.time() ··· 163 152 return build_result 164 153 165 154 exec_request = KunitExecRequest(request.timeout, request.build_dir, 166 - request.alltests, request.filter_glob) 155 + request.alltests, request.filter_glob, 156 + request.kernel_args) 167 157 exec_result = exec_tests(linux, exec_request) 168 158 if exec_result.status != KunitStatus.SUCCESS: 169 159 return exec_result ··· 190 178 parser.add_argument('--build_dir', 191 179 help='As in the make command, it specifies the build ' 192 180 'directory.', 193 - type=str, default='.kunit', metavar='build_dir') 181 + type=str, default='.kunit', metavar='build_dir') 194 182 parser.add_argument('--make_options', 195 183 help='X=Y make option, can be repeated.', 196 184 action='append') ··· 250 238 nargs='?', 251 239 default='', 252 240 metavar='filter_glob') 241 + parser.add_argument('--kernel_args', 242 + help='Kernel command-line parameters. Maybe be repeated', 243 + action='append') 253 244 254 245 def add_parse_opts(parser) -> None: 255 - parser.add_argument('--raw_output', help='don\'t format output from kernel', 256 - action='store_true') 246 + parser.add_argument('--raw_output', help='If set don\'t format output from kernel. ' 247 + 'If set to --raw_output=kunit, filters to just KUnit output.', 248 + type=str, nargs='?', const='all', default=None) 257 249 parser.add_argument('--json', 258 250 nargs='?', 259 251 help='Stores test results in a JSON, and either ' ··· 325 309 cli_args.build_dir, 326 310 cli_args.alltests, 327 311 cli_args.filter_glob, 312 + cli_args.kernel_args, 328 313 cli_args.json, 329 314 cli_args.make_options) 330 315 result = run_tests(linux, request) ··· 380 363 exec_request = KunitExecRequest(cli_args.timeout, 381 364 cli_args.build_dir, 382 365 cli_args.alltests, 383 - cli_args.filter_glob) 366 + cli_args.filter_glob, 367 + cli_args.kernel_args) 384 368 exec_result = exec_tests(linux, exec_request) 385 369 parse_request = KunitParseRequest(cli_args.raw_output, 386 370 exec_result.result,
+1 -5
tools/testing/kunit/kunit_parser.py
··· 106 106 yield line_num, line[prefix_len:] 107 107 return LineStream(lines=isolate_kunit_output(kernel_output)) 108 108 109 - def raw_output(kernel_output) -> None: 110 - for line in kernel_output: 111 - print(line.rstrip()) 112 - 113 109 DIVIDER = '=' * 60 114 110 115 111 RESET = '\033[0;0m' ··· 133 137 for m in log: 134 138 print_with_timestamp(m) 135 139 136 - TAP_ENTRIES = re.compile(r'^(TAP|[\s]*ok|[\s]*not ok|[\s]*[0-9]+\.\.[0-9]+|[\s]*#).*$') 140 + TAP_ENTRIES = re.compile(r'^(TAP|[\s]*ok|[\s]*not ok|[\s]*[0-9]+\.\.[0-9]+|[\s]*# (Subtest:|.*: kunit test case crashed!)).*$') 137 141 138 142 def consume_non_diagnostic(lines: LineStream) -> None: 139 143 while lines and not TAP_ENTRIES.match(lines.peek()):
+23 -6
tools/testing/kunit/kunit_tool_test.py
··· 356 356 self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 0) 357 357 self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1) 358 358 self.linux_source_mock.run_kernel.assert_called_once_with( 359 - build_dir='.kunit', filter_glob='', timeout=300) 359 + args=None, build_dir='.kunit', filter_glob='', timeout=300) 360 360 self.print_mock.assert_any_call(StrContains('Testing complete.')) 361 361 362 362 def test_run_passes_args_pass(self): ··· 364 364 self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1) 365 365 self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1) 366 366 self.linux_source_mock.run_kernel.assert_called_once_with( 367 - build_dir='.kunit', filter_glob='', timeout=300) 367 + args=None, build_dir='.kunit', filter_glob='', timeout=300) 368 368 self.print_mock.assert_any_call(StrContains('Testing complete.')) 369 369 370 370 def test_exec_passes_args_fail(self): ··· 399 399 self.assertNotEqual(call, mock.call(StrContains('Testing complete.'))) 400 400 self.assertNotEqual(call, mock.call(StrContains(' 0 tests run'))) 401 401 402 + def test_run_raw_output_kunit(self): 403 + self.linux_source_mock.run_kernel = mock.Mock(return_value=[]) 404 + kunit.main(['run', '--raw_output=kunit'], self.linux_source_mock) 405 + self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1) 406 + self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1) 407 + for call in self.print_mock.call_args_list: 408 + self.assertNotEqual(call, mock.call(StrContains('Testing complete.'))) 409 + self.assertNotEqual(call, mock.call(StrContains(' 0 tests run'))) 410 + 402 411 def test_exec_timeout(self): 403 412 timeout = 3453 404 413 kunit.main(['exec', '--timeout', str(timeout)], self.linux_source_mock) 405 414 self.linux_source_mock.run_kernel.assert_called_once_with( 406 - build_dir='.kunit', filter_glob='', timeout=timeout) 415 + args=None, build_dir='.kunit', filter_glob='', timeout=timeout) 407 416 self.print_mock.assert_any_call(StrContains('Testing complete.')) 408 417 409 418 def test_run_timeout(self): ··· 420 411 kunit.main(['run', '--timeout', str(timeout)], self.linux_source_mock) 421 412 self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1) 422 413 self.linux_source_mock.run_kernel.assert_called_once_with( 423 - build_dir='.kunit', filter_glob='', timeout=timeout) 414 + args=None, build_dir='.kunit', filter_glob='', timeout=timeout) 424 415 self.print_mock.assert_any_call(StrContains('Testing complete.')) 425 416 426 417 def test_run_builddir(self): ··· 428 419 kunit.main(['run', '--build_dir=.kunit'], self.linux_source_mock) 429 420 self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1) 430 421 self.linux_source_mock.run_kernel.assert_called_once_with( 431 - build_dir=build_dir, filter_glob='', timeout=300) 422 + args=None, build_dir=build_dir, filter_glob='', timeout=300) 432 423 self.print_mock.assert_any_call(StrContains('Testing complete.')) 433 424 434 425 def test_config_builddir(self): ··· 445 436 build_dir = '.kunit' 446 437 kunit.main(['exec', '--build_dir', build_dir], self.linux_source_mock) 447 438 self.linux_source_mock.run_kernel.assert_called_once_with( 448 - build_dir=build_dir, filter_glob='', timeout=300) 439 + args=None, build_dir=build_dir, filter_glob='', timeout=300) 449 440 self.print_mock.assert_any_call(StrContains('Testing complete.')) 450 441 451 442 @mock.patch.object(kunit_kernel, 'LinuxSourceTree') ··· 469 460 arch='um', 470 461 cross_compile=None, 471 462 qemu_config_path=None) 463 + 464 + def test_run_kernel_args(self): 465 + kunit.main(['run', '--kernel_args=a=1', '--kernel_args=b=2'], self.linux_source_mock) 466 + self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1) 467 + self.linux_source_mock.run_kernel.assert_called_once_with( 468 + args=['a=1','b=2'], build_dir='.kunit', filter_glob='', timeout=300) 469 + self.print_mock.assert_any_call(StrContains('Testing complete.')) 470 + 472 471 473 472 if __name__ == '__main__': 474 473 unittest.main()