KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 1 | #ifndef __ASM_MEMORY_MODEL_H |
| 2 | #define __ASM_MEMORY_MODEL_H |
| 3 | |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 4 | #ifndef __ASSEMBLY__ |
| 5 | |
| 6 | #if defined(CONFIG_FLATMEM) |
| 7 | |
| 8 | #ifndef ARCH_PFN_OFFSET |
| 9 | #define ARCH_PFN_OFFSET (0UL) |
| 10 | #endif |
| 11 | |
| 12 | #elif defined(CONFIG_DISCONTIGMEM) |
| 13 | |
| 14 | #ifndef arch_pfn_to_nid |
| 15 | #define arch_pfn_to_nid(pfn) pfn_to_nid(pfn) |
| 16 | #endif |
| 17 | |
| 18 | #ifndef arch_local_page_offset |
| 19 | #define arch_local_page_offset(pfn, nid) \ |
| 20 | ((pfn) - NODE_DATA(nid)->node_start_pfn) |
| 21 | #endif |
| 22 | |
| 23 | #endif /* CONFIG_DISCONTIGMEM */ |
| 24 | |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 25 | /* |
| 26 | * supports 3 memory models. |
| 27 | */ |
| 28 | #if defined(CONFIG_FLATMEM) |
| 29 | |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 30 | #define __pfn_to_page(pfn) (mem_map + ((pfn) - ARCH_PFN_OFFSET)) |
| 31 | #define __page_to_pfn(page) ((unsigned long)((page) - mem_map) + \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 32 | ARCH_PFN_OFFSET) |
| 33 | #elif defined(CONFIG_DISCONTIGMEM) |
| 34 | |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 35 | #define __pfn_to_page(pfn) \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 36 | ({ unsigned long __pfn = (pfn); \ |
Rafael J. Wysocki | c5d7124 | 2008-11-08 13:53:33 +0100 | [diff] [blame] | 37 | unsigned long __nid = arch_pfn_to_nid(__pfn); \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 38 | NODE_DATA(__nid)->node_mem_map + arch_local_page_offset(__pfn, __nid);\ |
| 39 | }) |
| 40 | |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 41 | #define __page_to_pfn(pg) \ |
Ian Campbell | aa462ab | 2011-08-17 17:40:33 +0100 | [diff] [blame] | 42 | ({ const struct page *__pg = (pg); \ |
KAMEZAWA Hiroyuki | a0140c1 | 2006-03-27 01:15:55 -0800 | [diff] [blame] | 43 | struct pglist_data *__pgdat = NODE_DATA(page_to_nid(__pg)); \ |
| 44 | (unsigned long)(__pg - __pgdat->node_mem_map) + \ |
| 45 | __pgdat->node_start_pfn; \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 46 | }) |
| 47 | |
Christoph Lameter | 8f6aac4 | 2007-10-16 01:24:13 -0700 | [diff] [blame] | 48 | #elif defined(CONFIG_SPARSEMEM_VMEMMAP) |
| 49 | |
André Goddard Rosa | af901ca | 2009-11-14 13:09:05 -0200 | [diff] [blame] | 50 | /* memmap is virtually contiguous. */ |
Christoph Lameter | 8f6aac4 | 2007-10-16 01:24:13 -0700 | [diff] [blame] | 51 | #define __pfn_to_page(pfn) (vmemmap + (pfn)) |
Martin Schwidefsky | 32272a2 | 2008-12-25 13:38:59 +0100 | [diff] [blame] | 52 | #define __page_to_pfn(page) (unsigned long)((page) - vmemmap) |
Christoph Lameter | 8f6aac4 | 2007-10-16 01:24:13 -0700 | [diff] [blame] | 53 | |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 54 | #elif defined(CONFIG_SPARSEMEM) |
| 55 | /* |
Zhang Yanfei | 1a49123 | 2013-10-03 19:38:14 +0800 | [diff] [blame] | 56 | * Note: section's mem_map is encoded to reflect its start_pfn. |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 57 | * section[i].section_mem_map == mem_map's address - start_pfn; |
| 58 | */ |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 59 | #define __page_to_pfn(pg) \ |
Ian Campbell | aa462ab | 2011-08-17 17:40:33 +0100 | [diff] [blame] | 60 | ({ const struct page *__pg = (pg); \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 61 | int __sec = page_to_section(__pg); \ |
Randy Dunlap | f05b628 | 2007-02-10 01:42:59 -0800 | [diff] [blame] | 62 | (unsigned long)(__pg - __section_mem_map_addr(__nr_to_section(__sec))); \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 63 | }) |
| 64 | |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 65 | #define __pfn_to_page(pfn) \ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 66 | ({ unsigned long __pfn = (pfn); \ |
| 67 | struct mem_section *__sec = __pfn_to_section(__pfn); \ |
| 68 | __section_mem_map_addr(__sec) + __pfn; \ |
| 69 | }) |
| 70 | #endif /* CONFIG_FLATMEM/DISCONTIGMEM/SPARSEMEM */ |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 71 | |
Christoph Hellwig | 012dcef | 2015-08-07 17:41:01 -0400 | [diff] [blame] | 72 | /* |
| 73 | * Convert a physical address to a Page Frame Number and back |
| 74 | */ |
| 75 | #define __phys_to_pfn(paddr) ((unsigned long)((paddr) >> PAGE_SHIFT)) |
Tyler Baker | ae4f976 | 2015-09-19 03:58:10 -0400 | [diff] [blame] | 76 | #define __pfn_to_phys(pfn) PFN_PHYS(pfn) |
Christoph Hellwig | 012dcef | 2015-08-07 17:41:01 -0400 | [diff] [blame] | 77 | |
Andy Whitcroft | 67de648 | 2006-06-23 02:03:12 -0700 | [diff] [blame] | 78 | #define page_to_pfn __page_to_pfn |
| 79 | #define pfn_to_page __pfn_to_page |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 80 | |
| 81 | #endif /* __ASSEMBLY__ */ |
KAMEZAWA Hiroyuki | a117e66 | 2006-03-27 01:15:25 -0800 | [diff] [blame] | 82 | |
| 83 | #endif |