Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* clear_page.S: UltraSparc optimized copy page. |
| 2 | * |
| 3 | * Copyright (C) 1996, 1998, 1999, 2000, 2004 David S. Miller (davem@redhat.com) |
| 4 | * Copyright (C) 1997 Jakub Jelinek (jakub@redhat.com) |
| 5 | */ |
| 6 | |
| 7 | #include <asm/visasm.h> |
| 8 | #include <asm/thread_info.h> |
| 9 | #include <asm/page.h> |
| 10 | #include <asm/pgtable.h> |
| 11 | #include <asm/spitfire.h> |
| 12 | #include <asm/head.h> |
| 13 | |
| 14 | /* What we used to do was lock a TLB entry into a specific |
| 15 | * TLB slot, clear the page with interrupts disabled, then |
| 16 | * restore the original TLB entry. This was great for |
| 17 | * disturbing the TLB as little as possible, but it meant |
| 18 | * we had to keep interrupts disabled for a long time. |
| 19 | * |
| 20 | * Now, we simply use the normal TLB loading mechanism, |
| 21 | * and this makes the cpu choose a slot all by itself. |
| 22 | * Then we do a normal TLB flush on exit. We need only |
| 23 | * disable preemption during the clear. |
| 24 | */ |
| 25 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 26 | #define DCACHE_SIZE (PAGE_SIZE * 2) |
| 27 | |
| 28 | #if (PAGE_SHIFT == 13) || (PAGE_SHIFT == 19) |
| 29 | #define PAGE_SIZE_REM 0x80 |
| 30 | #elif (PAGE_SHIFT == 16) || (PAGE_SHIFT == 22) |
| 31 | #define PAGE_SIZE_REM 0x100 |
| 32 | #else |
| 33 | #error Wrong PAGE_SHIFT specified |
| 34 | #endif |
| 35 | |
| 36 | #define TOUCH(reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7) \ |
| 37 | fmovd %reg0, %f48; fmovd %reg1, %f50; \ |
| 38 | fmovd %reg2, %f52; fmovd %reg3, %f54; \ |
| 39 | fmovd %reg4, %f56; fmovd %reg5, %f58; \ |
| 40 | fmovd %reg6, %f60; fmovd %reg7, %f62; |
| 41 | |
| 42 | .text |
| 43 | |
| 44 | .align 32 |
| 45 | .globl copy_user_page |
| 46 | .type copy_user_page,#function |
| 47 | copy_user_page: /* %o0=dest, %o1=src, %o2=vaddr */ |
| 48 | lduw [%g6 + TI_PRE_COUNT], %o4 |
| 49 | sethi %uhi(PAGE_OFFSET), %g2 |
| 50 | sethi %hi(PAGE_SIZE), %o3 |
| 51 | |
| 52 | sllx %g2, 32, %g2 |
David S. Miller | c4bce90 | 2006-02-11 21:57:54 -0800 | [diff] [blame^] | 53 | sethi %hi(PAGE_KERNEL_LOCKED), %g3 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 54 | |
David S. Miller | c4bce90 | 2006-02-11 21:57:54 -0800 | [diff] [blame^] | 55 | ldx [%g3 + %lo(PAGE_KERNEL_LOCKED)], %g3 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 56 | sub %o0, %g2, %g1 ! dest paddr |
| 57 | |
| 58 | sub %o1, %g2, %g2 ! src paddr |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 59 | |
| 60 | and %o2, %o3, %o0 ! vaddr D-cache alias bit |
| 61 | or %g1, %g3, %g1 ! dest TTE data |
| 62 | |
| 63 | or %g2, %g3, %g2 ! src TTE data |
| 64 | sethi %hi(TLBTEMP_BASE), %o3 |
| 65 | |
| 66 | sethi %hi(DCACHE_SIZE), %o1 |
| 67 | add %o0, %o3, %o0 ! dest TTE vaddr |
| 68 | |
| 69 | add %o4, 1, %o2 |
| 70 | add %o0, %o1, %o1 ! src TTE vaddr |
| 71 | |
| 72 | /* Disable preemption. */ |
| 73 | mov TLB_TAG_ACCESS, %g3 |
| 74 | stw %o2, [%g6 + TI_PRE_COUNT] |
| 75 | |
| 76 | /* Load TLB entries. */ |
| 77 | rdpr %pstate, %o2 |
| 78 | wrpr %o2, PSTATE_IE, %pstate |
| 79 | stxa %o0, [%g3] ASI_DMMU |
| 80 | stxa %g1, [%g0] ASI_DTLB_DATA_IN |
| 81 | membar #Sync |
| 82 | stxa %o1, [%g3] ASI_DMMU |
| 83 | stxa %g2, [%g0] ASI_DTLB_DATA_IN |
| 84 | membar #Sync |
| 85 | wrpr %o2, 0x0, %pstate |
| 86 | |
David S. Miller | dbd2fdf | 2005-08-30 11:26:15 -0700 | [diff] [blame] | 87 | cheetah_copy_page_insn: |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | ba,pt %xcc, 9f |
| 89 | nop |
| 90 | |
| 91 | 1: |
| 92 | VISEntryHalf |
| 93 | membar #StoreLoad | #StoreStore | #LoadStore |
| 94 | sethi %hi((PAGE_SIZE/64)-2), %o2 |
| 95 | mov %o0, %g1 |
| 96 | prefetch [%o1 + 0x000], #one_read |
| 97 | or %o2, %lo((PAGE_SIZE/64)-2), %o2 |
| 98 | prefetch [%o1 + 0x040], #one_read |
| 99 | prefetch [%o1 + 0x080], #one_read |
| 100 | prefetch [%o1 + 0x0c0], #one_read |
| 101 | ldd [%o1 + 0x000], %f0 |
| 102 | prefetch [%o1 + 0x100], #one_read |
| 103 | ldd [%o1 + 0x008], %f2 |
| 104 | prefetch [%o1 + 0x140], #one_read |
| 105 | ldd [%o1 + 0x010], %f4 |
| 106 | prefetch [%o1 + 0x180], #one_read |
| 107 | fmovd %f0, %f16 |
| 108 | ldd [%o1 + 0x018], %f6 |
| 109 | fmovd %f2, %f18 |
| 110 | ldd [%o1 + 0x020], %f8 |
| 111 | fmovd %f4, %f20 |
| 112 | ldd [%o1 + 0x028], %f10 |
| 113 | fmovd %f6, %f22 |
| 114 | ldd [%o1 + 0x030], %f12 |
| 115 | fmovd %f8, %f24 |
| 116 | ldd [%o1 + 0x038], %f14 |
| 117 | fmovd %f10, %f26 |
| 118 | ldd [%o1 + 0x040], %f0 |
| 119 | 1: ldd [%o1 + 0x048], %f2 |
| 120 | fmovd %f12, %f28 |
| 121 | ldd [%o1 + 0x050], %f4 |
| 122 | fmovd %f14, %f30 |
| 123 | stda %f16, [%o0] ASI_BLK_P |
| 124 | ldd [%o1 + 0x058], %f6 |
| 125 | fmovd %f0, %f16 |
| 126 | ldd [%o1 + 0x060], %f8 |
| 127 | fmovd %f2, %f18 |
| 128 | ldd [%o1 + 0x068], %f10 |
| 129 | fmovd %f4, %f20 |
| 130 | ldd [%o1 + 0x070], %f12 |
| 131 | fmovd %f6, %f22 |
| 132 | ldd [%o1 + 0x078], %f14 |
| 133 | fmovd %f8, %f24 |
| 134 | ldd [%o1 + 0x080], %f0 |
| 135 | prefetch [%o1 + 0x180], #one_read |
| 136 | fmovd %f10, %f26 |
| 137 | subcc %o2, 1, %o2 |
| 138 | add %o0, 0x40, %o0 |
| 139 | bne,pt %xcc, 1b |
| 140 | add %o1, 0x40, %o1 |
| 141 | |
| 142 | ldd [%o1 + 0x048], %f2 |
| 143 | fmovd %f12, %f28 |
| 144 | ldd [%o1 + 0x050], %f4 |
| 145 | fmovd %f14, %f30 |
| 146 | stda %f16, [%o0] ASI_BLK_P |
| 147 | ldd [%o1 + 0x058], %f6 |
| 148 | fmovd %f0, %f16 |
| 149 | ldd [%o1 + 0x060], %f8 |
| 150 | fmovd %f2, %f18 |
| 151 | ldd [%o1 + 0x068], %f10 |
| 152 | fmovd %f4, %f20 |
| 153 | ldd [%o1 + 0x070], %f12 |
| 154 | fmovd %f6, %f22 |
| 155 | add %o0, 0x40, %o0 |
| 156 | ldd [%o1 + 0x078], %f14 |
| 157 | fmovd %f8, %f24 |
| 158 | fmovd %f10, %f26 |
| 159 | fmovd %f12, %f28 |
| 160 | fmovd %f14, %f30 |
| 161 | stda %f16, [%o0] ASI_BLK_P |
| 162 | membar #Sync |
| 163 | VISExitHalf |
| 164 | ba,pt %xcc, 5f |
| 165 | nop |
| 166 | |
| 167 | 9: |
| 168 | VISEntry |
| 169 | ldub [%g6 + TI_FAULT_CODE], %g3 |
| 170 | mov %o0, %g1 |
| 171 | cmp %g3, 0 |
| 172 | rd %asi, %g3 |
| 173 | be,a,pt %icc, 1f |
| 174 | wr %g0, ASI_BLK_P, %asi |
| 175 | wr %g0, ASI_BLK_COMMIT_P, %asi |
| 176 | 1: ldda [%o1] ASI_BLK_P, %f0 |
| 177 | add %o1, 0x40, %o1 |
| 178 | ldda [%o1] ASI_BLK_P, %f16 |
| 179 | add %o1, 0x40, %o1 |
| 180 | sethi %hi(PAGE_SIZE), %o2 |
| 181 | 1: TOUCH(f0, f2, f4, f6, f8, f10, f12, f14) |
| 182 | ldda [%o1] ASI_BLK_P, %f32 |
| 183 | stda %f48, [%o0] %asi |
| 184 | add %o1, 0x40, %o1 |
| 185 | sub %o2, 0x40, %o2 |
| 186 | add %o0, 0x40, %o0 |
| 187 | TOUCH(f16, f18, f20, f22, f24, f26, f28, f30) |
| 188 | ldda [%o1] ASI_BLK_P, %f0 |
| 189 | stda %f48, [%o0] %asi |
| 190 | add %o1, 0x40, %o1 |
| 191 | sub %o2, 0x40, %o2 |
| 192 | add %o0, 0x40, %o0 |
| 193 | TOUCH(f32, f34, f36, f38, f40, f42, f44, f46) |
| 194 | ldda [%o1] ASI_BLK_P, %f16 |
| 195 | stda %f48, [%o0] %asi |
| 196 | sub %o2, 0x40, %o2 |
| 197 | add %o1, 0x40, %o1 |
| 198 | cmp %o2, PAGE_SIZE_REM |
| 199 | bne,pt %xcc, 1b |
| 200 | add %o0, 0x40, %o0 |
| 201 | #if (PAGE_SHIFT == 16) || (PAGE_SHIFT == 22) |
| 202 | TOUCH(f0, f2, f4, f6, f8, f10, f12, f14) |
| 203 | ldda [%o1] ASI_BLK_P, %f32 |
| 204 | stda %f48, [%o0] %asi |
| 205 | add %o1, 0x40, %o1 |
| 206 | sub %o2, 0x40, %o2 |
| 207 | add %o0, 0x40, %o0 |
| 208 | TOUCH(f16, f18, f20, f22, f24, f26, f28, f30) |
| 209 | ldda [%o1] ASI_BLK_P, %f0 |
| 210 | stda %f48, [%o0] %asi |
| 211 | add %o1, 0x40, %o1 |
| 212 | sub %o2, 0x40, %o2 |
| 213 | add %o0, 0x40, %o0 |
| 214 | membar #Sync |
| 215 | stda %f32, [%o0] %asi |
| 216 | add %o0, 0x40, %o0 |
| 217 | stda %f0, [%o0] %asi |
| 218 | #else |
| 219 | membar #Sync |
| 220 | stda %f0, [%o0] %asi |
| 221 | add %o0, 0x40, %o0 |
| 222 | stda %f16, [%o0] %asi |
| 223 | #endif |
| 224 | membar #Sync |
| 225 | wr %g3, 0x0, %asi |
| 226 | VISExit |
| 227 | |
| 228 | 5: |
| 229 | stxa %g0, [%g1] ASI_DMMU_DEMAP |
| 230 | membar #Sync |
| 231 | |
| 232 | sethi %hi(DCACHE_SIZE), %g2 |
| 233 | stxa %g0, [%g1 + %g2] ASI_DMMU_DEMAP |
| 234 | membar #Sync |
| 235 | |
| 236 | retl |
| 237 | stw %o4, [%g6 + TI_PRE_COUNT] |
| 238 | |
| 239 | .size copy_user_page, .-copy_user_page |
David S. Miller | dbd2fdf | 2005-08-30 11:26:15 -0700 | [diff] [blame] | 240 | |
| 241 | .globl cheetah_patch_copy_page |
| 242 | cheetah_patch_copy_page: |
| 243 | sethi %hi(0x01000000), %o1 ! NOP |
| 244 | sethi %hi(cheetah_copy_page_insn), %o0 |
| 245 | or %o0, %lo(cheetah_copy_page_insn), %o0 |
| 246 | stw %o1, [%o0] |
| 247 | membar #StoreStore |
| 248 | flush %o0 |
| 249 | retl |
| 250 | nop |