Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 1 | #include <linux/kernel.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 2 | #include <linux/types.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 3 | #include <linux/init.h> |
Yinghai Lu | a9ce6bc | 2010-08-25 13:39:17 -0700 | [diff] [blame] | 4 | #include <linux/memblock.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 5 | |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 6 | static u64 patterns[] __initdata = { |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 7 | /* The first entry has to be 0 to leave memtest with zeroed memory */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 8 | 0, |
| 9 | 0xffffffffffffffffULL, |
| 10 | 0x5555555555555555ULL, |
| 11 | 0xaaaaaaaaaaaaaaaaULL, |
Andreas Herrmann | 6382312 | 2009-02-25 11:31:49 +0100 | [diff] [blame] | 12 | 0x1111111111111111ULL, |
| 13 | 0x2222222222222222ULL, |
| 14 | 0x4444444444444444ULL, |
| 15 | 0x8888888888888888ULL, |
| 16 | 0x3333333333333333ULL, |
| 17 | 0x6666666666666666ULL, |
| 18 | 0x9999999999999999ULL, |
| 19 | 0xccccccccccccccccULL, |
| 20 | 0x7777777777777777ULL, |
| 21 | 0xbbbbbbbbbbbbbbbbULL, |
| 22 | 0xddddddddddddddddULL, |
| 23 | 0xeeeeeeeeeeeeeeeeULL, |
| 24 | 0x7a6c7258554e494cULL, /* yeah ;-) */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 25 | }; |
Andreas Herrmann | 40823f7 | 2009-02-25 11:26:18 +0100 | [diff] [blame] | 26 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 27 | static void __init reserve_bad_mem(u64 pattern, phys_addr_t start_bad, phys_addr_t end_bad) |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 28 | { |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 29 | pr_info(" %016llx bad mem addr %pa - %pa reserved\n", |
| 30 | cpu_to_be64(pattern), &start_bad, &end_bad); |
Tejun Heo | 24aa078 | 2011-07-12 11:16:06 +0200 | [diff] [blame] | 31 | memblock_reserve(start_bad, end_bad - start_bad); |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 32 | } |
| 33 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 34 | static void __init memtest(u64 pattern, phys_addr_t start_phys, phys_addr_t size) |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 35 | { |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 36 | u64 *p, *start, *end; |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 37 | phys_addr_t start_bad, last_bad; |
| 38 | phys_addr_t start_phys_aligned; |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 39 | const size_t incr = sizeof(pattern); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 40 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 41 | start_phys_aligned = ALIGN(start_phys, incr); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 42 | start = __va(start_phys_aligned); |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 43 | end = start + (size - (start_phys_aligned - start_phys)) / incr; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 44 | start_bad = 0; |
| 45 | last_bad = 0; |
| 46 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 47 | for (p = start; p < end; p++) |
| 48 | *p = pattern; |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 49 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 50 | for (p = start; p < end; p++, start_phys_aligned += incr) { |
| 51 | if (*p == pattern) |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 52 | continue; |
| 53 | if (start_phys_aligned == last_bad + incr) { |
| 54 | last_bad += incr; |
| 55 | continue; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 56 | } |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 57 | if (start_bad) |
| 58 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
| 59 | start_bad = last_bad = start_phys_aligned; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 60 | } |
Andreas Herrmann | 7dad169e | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 61 | if (start_bad) |
| 62 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 63 | } |
| 64 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 65 | static void __init do_one_pass(u64 pattern, phys_addr_t start, phys_addr_t end) |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 66 | { |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 67 | u64 i; |
| 68 | phys_addr_t this_start, this_end; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 69 | |
Tony Luck | fc6daaf | 2015-06-24 16:58:09 -0700 | [diff] [blame] | 70 | for_each_free_mem_range(i, NUMA_NO_NODE, MEMBLOCK_NONE, &this_start, |
| 71 | &this_end, NULL) { |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 72 | this_start = clamp(this_start, start, end); |
| 73 | this_end = clamp(this_end, start, end); |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 74 | if (this_start < this_end) { |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 75 | pr_info(" %pa - %pa pattern %016llx\n", |
| 76 | &this_start, &this_end, cpu_to_be64(pattern)); |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 77 | memtest(pattern, this_start, this_end - this_start); |
| 78 | } |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 79 | } |
| 80 | } |
| 81 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 82 | /* default is disabled */ |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 83 | static unsigned int memtest_pattern __initdata; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 84 | |
| 85 | static int __init parse_memtest(char *arg) |
| 86 | { |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 87 | int ret = 0; |
| 88 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 89 | if (arg) |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 90 | ret = kstrtouint(arg, 0, &memtest_pattern); |
Yinghai Lu | d1a8e77 | 2009-03-06 03:12:50 -0800 | [diff] [blame] | 91 | else |
| 92 | memtest_pattern = ARRAY_SIZE(patterns); |
| 93 | |
Vladimir Murzin | 06f8059 | 2015-09-08 15:00:16 -0700 | [diff] [blame] | 94 | return ret; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 95 | } |
| 96 | |
| 97 | early_param("memtest", parse_memtest); |
| 98 | |
Vladimir Murzin | 7f70bae | 2015-04-14 15:48:30 -0700 | [diff] [blame] | 99 | void __init early_memtest(phys_addr_t start, phys_addr_t end) |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 100 | { |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 101 | unsigned int i; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 102 | unsigned int idx = 0; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 103 | |
| 104 | if (!memtest_pattern) |
| 105 | return; |
| 106 | |
Vladimir Murzin | f373baf | 2015-09-08 15:00:19 -0700 | [diff] [blame] | 107 | pr_info("early_memtest: # of tests: %u\n", memtest_pattern); |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 108 | for (i = memtest_pattern-1; i < UINT_MAX; --i) { |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 109 | idx = i % ARRAY_SIZE(patterns); |
| 110 | do_one_pass(patterns[idx], start, end); |
| 111 | } |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 112 | } |