Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 1 | #include <linux/kernel.h> |
| 2 | #include <linux/errno.h> |
| 3 | #include <linux/string.h> |
| 4 | #include <linux/types.h> |
| 5 | #include <linux/mm.h> |
| 6 | #include <linux/smp.h> |
| 7 | #include <linux/init.h> |
| 8 | #include <linux/pfn.h> |
Yinghai Lu | a9ce6bc | 2010-08-25 13:39:17 -0700 | [diff] [blame] | 9 | #include <linux/memblock.h> |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 10 | |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 11 | static u64 patterns[] __initdata = { |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 12 | /* The first entry has to be 0 to leave memtest with zeroed memory */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 13 | 0, |
| 14 | 0xffffffffffffffffULL, |
| 15 | 0x5555555555555555ULL, |
| 16 | 0xaaaaaaaaaaaaaaaaULL, |
Andreas Herrmann | 6382312 | 2009-02-25 11:31:49 +0100 | [diff] [blame] | 17 | 0x1111111111111111ULL, |
| 18 | 0x2222222222222222ULL, |
| 19 | 0x4444444444444444ULL, |
| 20 | 0x8888888888888888ULL, |
| 21 | 0x3333333333333333ULL, |
| 22 | 0x6666666666666666ULL, |
| 23 | 0x9999999999999999ULL, |
| 24 | 0xccccccccccccccccULL, |
| 25 | 0x7777777777777777ULL, |
| 26 | 0xbbbbbbbbbbbbbbbbULL, |
| 27 | 0xddddddddddddddddULL, |
| 28 | 0xeeeeeeeeeeeeeeeeULL, |
| 29 | 0x7a6c7258554e494cULL, /* yeah ;-) */ |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 30 | }; |
Andreas Herrmann | 40823f7 | 2009-02-25 11:26:18 +0100 | [diff] [blame] | 31 | |
Andreas Herrmann | 570c9e6 | 2009-02-25 11:28:58 +0100 | [diff] [blame] | 32 | static void __init reserve_bad_mem(u64 pattern, u64 start_bad, u64 end_bad) |
Andreas Herrmann | 7dad169 | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 33 | { |
Andreas Herrmann | 570c9e6 | 2009-02-25 11:28:58 +0100 | [diff] [blame] | 34 | printk(KERN_INFO " %016llx bad mem addr %010llx - %010llx reserved\n", |
| 35 | (unsigned long long) pattern, |
| 36 | (unsigned long long) start_bad, |
| 37 | (unsigned long long) end_bad); |
Tejun Heo | 24aa078 | 2011-07-12 11:16:06 +0200 | [diff] [blame] | 38 | memblock_reserve(start_bad, end_bad - start_bad); |
Andreas Herrmann | 7dad169 | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 39 | } |
| 40 | |
Andreas Herrmann | 570c9e6 | 2009-02-25 11:28:58 +0100 | [diff] [blame] | 41 | static void __init memtest(u64 pattern, u64 start_phys, u64 size) |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 42 | { |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 43 | u64 *p, *start, *end; |
Andreas Herrmann | 570c9e6 | 2009-02-25 11:28:58 +0100 | [diff] [blame] | 44 | u64 start_bad, last_bad; |
| 45 | u64 start_phys_aligned; |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 46 | const size_t incr = sizeof(pattern); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 47 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 48 | start_phys_aligned = ALIGN(start_phys, incr); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 49 | start = __va(start_phys_aligned); |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 50 | end = start + (size - (start_phys_aligned - start_phys)) / incr; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 51 | start_bad = 0; |
| 52 | last_bad = 0; |
| 53 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 54 | for (p = start; p < end; p++) |
| 55 | *p = pattern; |
Thomas Gleixner | 9866b7e | 2009-06-11 16:25:01 +0200 | [diff] [blame] | 56 | |
Andreas Herrmann | c969099 | 2009-06-08 19:09:39 +0200 | [diff] [blame] | 57 | for (p = start; p < end; p++, start_phys_aligned += incr) { |
| 58 | if (*p == pattern) |
Andreas Herrmann | 7dad169 | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 59 | continue; |
| 60 | if (start_phys_aligned == last_bad + incr) { |
| 61 | last_bad += incr; |
| 62 | continue; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 63 | } |
Andreas Herrmann | 7dad169 | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 64 | if (start_bad) |
| 65 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
| 66 | start_bad = last_bad = start_phys_aligned; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 67 | } |
Andreas Herrmann | 7dad169 | 2009-02-25 11:28:07 +0100 | [diff] [blame] | 68 | if (start_bad) |
| 69 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 70 | } |
| 71 | |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 72 | static void __init do_one_pass(u64 pattern, u64 start, u64 end) |
| 73 | { |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 74 | u64 i; |
| 75 | phys_addr_t this_start, this_end; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 76 | |
Grygorii Strashko | 9a28f9d | 2014-01-21 15:50:53 -0800 | [diff] [blame] | 77 | for_each_free_mem_range(i, NUMA_NO_NODE, &this_start, &this_end, NULL) { |
Tejun Heo | 8d89ac8 | 2011-07-12 11:16:00 +0200 | [diff] [blame] | 78 | this_start = clamp_t(phys_addr_t, this_start, start, end); |
| 79 | this_end = clamp_t(phys_addr_t, this_end, start, end); |
| 80 | if (this_start < this_end) { |
| 81 | printk(KERN_INFO " %010llx - %010llx pattern %016llx\n", |
| 82 | (unsigned long long)this_start, |
| 83 | (unsigned long long)this_end, |
| 84 | (unsigned long long)cpu_to_be64(pattern)); |
| 85 | memtest(pattern, this_start, this_end - this_start); |
| 86 | } |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 87 | } |
| 88 | } |
| 89 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 90 | /* default is disabled */ |
| 91 | static int memtest_pattern __initdata; |
| 92 | |
| 93 | static int __init parse_memtest(char *arg) |
| 94 | { |
| 95 | if (arg) |
| 96 | memtest_pattern = simple_strtoul(arg, NULL, 0); |
Yinghai Lu | d1a8e77 | 2009-03-06 03:12:50 -0800 | [diff] [blame] | 97 | else |
| 98 | memtest_pattern = ARRAY_SIZE(patterns); |
| 99 | |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 100 | return 0; |
| 101 | } |
| 102 | |
| 103 | early_param("memtest", parse_memtest); |
| 104 | |
| 105 | void __init early_memtest(unsigned long start, unsigned long end) |
| 106 | { |
Andreas Herrmann | 6d74171 | 2009-02-25 11:27:27 +0100 | [diff] [blame] | 107 | unsigned int i; |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 108 | unsigned int idx = 0; |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 109 | |
| 110 | if (!memtest_pattern) |
| 111 | return; |
| 112 | |
Andreas Herrmann | 570c9e6 | 2009-02-25 11:28:58 +0100 | [diff] [blame] | 113 | printk(KERN_INFO "early_memtest: # of tests: %d\n", memtest_pattern); |
Alexander Holler | 20bf062 | 2013-02-16 16:38:17 +0100 | [diff] [blame] | 114 | for (i = memtest_pattern-1; i < UINT_MAX; --i) { |
Andreas Herrmann | bfb4dc0 | 2009-02-25 11:30:04 +0100 | [diff] [blame] | 115 | idx = i % ARRAY_SIZE(patterns); |
| 116 | do_one_pass(patterns[idx], start, end); |
| 117 | } |
Yinghai Lu | 1f06716 | 2008-07-15 00:02:28 -0700 | [diff] [blame] | 118 | } |