Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 2 | #include <linux/kernel.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 3 | #include <linux/types.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 4 | #include <linux/init.h> |
Yinghai Lu | a9ce6bc | 2010-08-25 13:39:17 -0700 | [diff] [blame] | 5 | #include <linux/memblock.h> |
Kefeng Wang | 3f32c49 | 2023-08-08 11:33:59 +0800 | [diff] [blame] | 6 | #include <linux/seq_file.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 7 | |
Kefeng Wang | 3f32c49 | 2023-08-08 11:33:59 +0800 | [diff] [blame] | 8 | static bool early_memtest_done; |
| 9 | static phys_addr_t early_memtest_bad_size; |
Tomas Mudrunka | bd23024 | 2023-03-21 11:34:30 +0100 | [diff] [blame] | 10 | |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 11 | static u64 patterns[] __initdata = { |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 12 | /* The first entry has to be 0 to leave memtest with zeroed memory */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 13 | 0, |
| 14 | 0xffffffffffffffffULL, |
| 15 | 0x5555555555555555ULL, |
| 16 | 0xaaaaaaaaaaaaaaaaULL, |
Andreas Herrmann | 6382312 | 2009-02-25 11:31:49 +0100 | [diff] [blame] | 17 | 0x1111111111111111ULL, |
| 18 | 0x2222222222222222ULL, |
| 19 | 0x4444444444444444ULL, |
| 20 | 0x8888888888888888ULL, |
| 21 | 0x3333333333333333ULL, |
| 22 | 0x6666666666666666ULL, |
| 23 | 0x9999999999999999ULL, |
| 24 | 0xccccccccccccccccULL, |
| 25 | 0x7777777777777777ULL, |
| 26 | 0xbbbbbbbbbbbbbbbbULL, |
| 27 | 0xddddddddddddddddULL, |
| 28 | 0xeeeeeeeeeeeeeeeeULL, |
| 29 | 0x7a6c7258554e494cULL, /* yeah ;-) */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 30 | }; |
Andreas Herrmann | 40823f7 | 2009-02-25 11:26:18 +0100 | [diff] [blame] | 31 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 32 | static void __init reserve_bad_mem(u64 pattern, phys_addr_t start_bad, phys_addr_t end_bad) |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 33 | { |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 34 | pr_info(" %016llx bad mem addr %pa - %pa reserved\n", |
| 35 | cpu_to_be64(pattern), &start_bad, &end_bad); |
Tejun Heo | 24aa078 | 2011-07-12 11:16:06 +0200 | [diff] [blame] | 36 | memblock_reserve(start_bad, end_bad - start_bad); |
Tomas Mudrunka | bd23024 | 2023-03-21 11:34:30 +0100 | [diff] [blame] | 37 | early_memtest_bad_size += (end_bad - start_bad); |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 38 | } |
| 39 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 40 | static void __init memtest(u64 pattern, phys_addr_t start_phys, phys_addr_t size) |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 41 | { |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 42 | u64 *p, *start, *end; |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 43 | phys_addr_t start_bad, last_bad; |
| 44 | phys_addr_t start_phys_aligned; |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 45 | const size_t incr = sizeof(pattern); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 46 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 47 | start_phys_aligned = ALIGN(start_phys, incr); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 48 | start = __va(start_phys_aligned); |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 49 | end = start + (size - (start_phys_aligned - start_phys)) / incr; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 50 | start_bad = 0; |
| 51 | last_bad = 0; |
| 52 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 53 | for (p = start; p < end; p++) |
Qiang Zhang | 82634d7 | 2024-03-12 16:04:23 +0800 | [diff] [blame] | 54 | WRITE_ONCE(*p, pattern); |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 55 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 56 | for (p = start; p < end; p++, start_phys_aligned += incr) { |
Qiang Zhang | 82634d7 | 2024-03-12 16:04:23 +0800 | [diff] [blame] | 57 | if (READ_ONCE(*p) == pattern) |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 58 | continue; |
| 59 | if (start_phys_aligned == last_bad + incr) { |
| 60 | last_bad += incr; |
| 61 | continue; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 62 | } |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 63 | if (start_bad) |
| 64 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
| 65 | start_bad = last_bad = start_phys_aligned; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 66 | } |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 67 | if (start_bad) |
| 68 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
Tomas Mudrunka | bd23024 | 2023-03-21 11:34:30 +0100 | [diff] [blame] | 69 | |
| 70 | early_memtest_done = true; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 71 | } |
| 72 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 73 | static void __init do_one_pass(u64 pattern, phys_addr_t start, phys_addr_t end) |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 74 | { |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 75 | u64 i; |
| 76 | phys_addr_t this_start, this_end; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 77 | |
Tony Luck | fc6daaf | 2015-06-24 16:58:09 -0700 | [diff] [blame] | 78 | for_each_free_mem_range(i, NUMA_NO_NODE, MEMBLOCK_NONE, &this_start, |
| 79 | &this_end, NULL) { |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 80 | this_start = clamp(this_start, start, end); |
| 81 | this_end = clamp(this_end, start, end); |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 82 | if (this_start < this_end) { |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 83 | pr_info(" %pa - %pa pattern %016llx\n", |
| 84 | &this_start, &this_end, cpu_to_be64(pattern)); |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 85 | memtest(pattern, this_start, this_end - this_start); |
| 86 | } |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 87 | } |
| 88 | } |
| 89 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 90 | /* default is disabled */ |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 91 | static unsigned int memtest_pattern __initdata; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 92 | |
| 93 | static int __init parse_memtest(char *arg) |
| 94 | { |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 95 | int ret = 0; |
| 96 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 97 | if (arg) |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 98 | ret = kstrtouint(arg, 0, &memtest_pattern); |
Yinghai Lu | d1a8e77 | 2009-03-06 03:12:50 -0800 | [diff] [blame] | 99 | else |
| 100 | memtest_pattern = ARRAY_SIZE(patterns); |
| 101 | |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 102 | return ret; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 103 | } |
| 104 | |
| 105 | early_param("memtest", parse_memtest); |
| 106 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 107 | void __init early_memtest(phys_addr_t start, phys_addr_t end) |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 108 | { |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 109 | unsigned int i; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 110 | unsigned int idx = 0; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 111 | |
| 112 | if (!memtest_pattern) |
| 113 | return; |
| 114 | |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 115 | pr_info("early_memtest: # of tests: %u\n", memtest_pattern); |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 116 | for (i = memtest_pattern-1; i < UINT_MAX; --i) { |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 117 | idx = i % ARRAY_SIZE(patterns); |
| 118 | do_one_pass(patterns[idx], start, end); |
| 119 | } |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 120 | } |
Kefeng Wang | 3f32c49 | 2023-08-08 11:33:59 +0800 | [diff] [blame] | 121 | |
| 122 | void memtest_report_meminfo(struct seq_file *m) |
| 123 | { |
| 124 | unsigned long early_memtest_bad_size_kb; |
| 125 | |
| 126 | if (!IS_ENABLED(CONFIG_PROC_FS)) |
| 127 | return; |
| 128 | |
| 129 | if (!early_memtest_done) |
| 130 | return; |
| 131 | |
| 132 | early_memtest_bad_size_kb = early_memtest_bad_size >> 10; |
| 133 | if (early_memtest_bad_size && !early_memtest_bad_size_kb) |
| 134 | early_memtest_bad_size_kb = 1; |
| 135 | /* When 0 is reported, it means there actually was a successful test */ |
| 136 | seq_printf(m, "EarlyMemtestBad: %5lu kB\n", early_memtest_bad_size_kb); |
| 137 | } |