Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* linux/arch/sparc/lib/memset.S: Sparc optimized memset, bzero and clear_user code |
| 2 | * Copyright (C) 1991,1996 Free Software Foundation |
| 3 | * Copyright (C) 1996,1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz) |
| 4 | * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) |
| 5 | * |
Andreas Larsson | 74cad25c | 2014-08-29 17:08:21 +0200 | [diff] [blame] | 6 | * Calls to memset returns initial %o0. Calls to bzero returns 0, if ok, and |
| 7 | * number of bytes not yet set if exception occurs and we were called as |
| 8 | * clear_user. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | */ |
| 10 | |
| 11 | #include <asm/ptrace.h> |
| 12 | |
| 13 | /* Work around cpp -rob */ |
| 14 | #define ALLOC #alloc |
| 15 | #define EXECINSTR #execinstr |
| 16 | #define EX(x,y,a,b) \ |
| 17 | 98: x,y; \ |
| 18 | .section .fixup,ALLOC,EXECINSTR; \ |
| 19 | .align 4; \ |
| 20 | 99: ba 30f; \ |
| 21 | a, b, %o0; \ |
| 22 | .section __ex_table,ALLOC; \ |
| 23 | .align 4; \ |
| 24 | .word 98b, 99b; \ |
| 25 | .text; \ |
| 26 | .align 4 |
| 27 | |
| 28 | #define EXT(start,end,handler) \ |
| 29 | .section __ex_table,ALLOC; \ |
| 30 | .align 4; \ |
| 31 | .word start, 0, end, handler; \ |
| 32 | .text; \ |
| 33 | .align 4 |
| 34 | |
| 35 | /* Please don't change these macros, unless you change the logic |
| 36 | * in the .fixup section below as well. |
| 37 | * Store 64 bytes at (BASE + OFFSET) using value SOURCE. */ |
| 38 | #define ZERO_BIG_BLOCK(base, offset, source) \ |
| 39 | std source, [base + offset + 0x00]; \ |
| 40 | std source, [base + offset + 0x08]; \ |
| 41 | std source, [base + offset + 0x10]; \ |
| 42 | std source, [base + offset + 0x18]; \ |
| 43 | std source, [base + offset + 0x20]; \ |
| 44 | std source, [base + offset + 0x28]; \ |
| 45 | std source, [base + offset + 0x30]; \ |
| 46 | std source, [base + offset + 0x38]; |
| 47 | |
| 48 | #define ZERO_LAST_BLOCKS(base, offset, source) \ |
| 49 | std source, [base - offset - 0x38]; \ |
| 50 | std source, [base - offset - 0x30]; \ |
| 51 | std source, [base - offset - 0x28]; \ |
| 52 | std source, [base - offset - 0x20]; \ |
| 53 | std source, [base - offset - 0x18]; \ |
| 54 | std source, [base - offset - 0x10]; \ |
| 55 | std source, [base - offset - 0x08]; \ |
| 56 | std source, [base - offset - 0x00]; |
| 57 | |
| 58 | .text |
| 59 | .align 4 |
| 60 | |
| 61 | .globl __bzero_begin |
| 62 | __bzero_begin: |
| 63 | |
David S. Miller | 4d14a45 | 2009-12-10 23:32:10 -0800 | [diff] [blame] | 64 | .globl __bzero |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 | .globl memset |
| 66 | .globl __memset_start, __memset_end |
| 67 | __memset_start: |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | memset: |
Andreas Larsson | 74cad25c | 2014-08-29 17:08:21 +0200 | [diff] [blame] | 69 | mov %o0, %g1 |
| 70 | mov 1, %g4 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 71 | and %o1, 0xff, %g3 |
| 72 | sll %g3, 8, %g2 |
| 73 | or %g3, %g2, %g3 |
| 74 | sll %g3, 16, %g2 |
| 75 | or %g3, %g2, %g3 |
| 76 | b 1f |
| 77 | mov %o2, %o1 |
| 78 | 3: |
| 79 | cmp %o2, 3 |
| 80 | be 2f |
| 81 | EX(stb %g3, [%o0], sub %o1, 0) |
| 82 | |
| 83 | cmp %o2, 2 |
| 84 | be 2f |
| 85 | EX(stb %g3, [%o0 + 0x01], sub %o1, 1) |
| 86 | |
| 87 | EX(stb %g3, [%o0 + 0x02], sub %o1, 2) |
| 88 | 2: |
| 89 | sub %o2, 4, %o2 |
| 90 | add %o1, %o2, %o1 |
| 91 | b 4f |
| 92 | sub %o0, %o2, %o0 |
| 93 | |
| 94 | __bzero: |
Andreas Larsson | 74cad25c | 2014-08-29 17:08:21 +0200 | [diff] [blame] | 95 | clr %g4 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 96 | mov %g0, %g3 |
| 97 | 1: |
| 98 | cmp %o1, 7 |
| 99 | bleu 7f |
| 100 | andcc %o0, 3, %o2 |
| 101 | |
| 102 | bne 3b |
| 103 | 4: |
| 104 | andcc %o0, 4, %g0 |
| 105 | |
| 106 | be 2f |
| 107 | mov %g3, %g2 |
| 108 | |
| 109 | EX(st %g3, [%o0], sub %o1, 0) |
| 110 | sub %o1, 4, %o1 |
| 111 | add %o0, 4, %o0 |
| 112 | 2: |
| 113 | andcc %o1, 0xffffff80, %o3 ! Now everything is 8 aligned and o1 is len to run |
| 114 | be 9f |
| 115 | andcc %o1, 0x78, %o2 |
| 116 | 10: |
| 117 | ZERO_BIG_BLOCK(%o0, 0x00, %g2) |
| 118 | subcc %o3, 128, %o3 |
| 119 | ZERO_BIG_BLOCK(%o0, 0x40, %g2) |
| 120 | 11: |
| 121 | EXT(10b, 11b, 20f) |
| 122 | bne 10b |
| 123 | add %o0, 128, %o0 |
| 124 | |
| 125 | orcc %o2, %g0, %g0 |
| 126 | 9: |
| 127 | be 13f |
| 128 | andcc %o1, 7, %o1 |
| 129 | |
| 130 | srl %o2, 1, %o3 |
| 131 | set 13f, %o4 |
| 132 | sub %o4, %o3, %o4 |
| 133 | jmp %o4 |
| 134 | add %o0, %o2, %o0 |
| 135 | |
| 136 | 12: |
| 137 | ZERO_LAST_BLOCKS(%o0, 0x48, %g2) |
| 138 | ZERO_LAST_BLOCKS(%o0, 0x08, %g2) |
| 139 | 13: |
| 140 | be 8f |
| 141 | andcc %o1, 4, %g0 |
| 142 | |
| 143 | be 1f |
| 144 | andcc %o1, 2, %g0 |
| 145 | |
| 146 | EX(st %g3, [%o0], and %o1, 7) |
| 147 | add %o0, 4, %o0 |
| 148 | 1: |
| 149 | be 1f |
| 150 | andcc %o1, 1, %g0 |
| 151 | |
| 152 | EX(sth %g3, [%o0], and %o1, 3) |
| 153 | add %o0, 2, %o0 |
| 154 | 1: |
| 155 | bne,a 8f |
| 156 | EX(stb %g3, [%o0], and %o1, 1) |
| 157 | 8: |
Andreas Larsson | 74cad25c | 2014-08-29 17:08:21 +0200 | [diff] [blame] | 158 | b 0f |
| 159 | nop |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 160 | 7: |
| 161 | be 13b |
| 162 | orcc %o1, 0, %g0 |
| 163 | |
| 164 | be 0f |
| 165 | 8: |
| 166 | add %o0, 1, %o0 |
| 167 | subcc %o1, 1, %o1 |
Alexander Shmelev | f61698e | 2007-07-24 13:41:44 -0700 | [diff] [blame] | 168 | bne 8b |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 169 | EX(stb %g3, [%o0 - 1], add %o1, 1) |
| 170 | 0: |
Andreas Larsson | 74cad25c | 2014-08-29 17:08:21 +0200 | [diff] [blame] | 171 | andcc %g4, 1, %g0 |
| 172 | be 5f |
| 173 | nop |
| 174 | retl |
| 175 | mov %g1, %o0 |
| 176 | 5: |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 177 | retl |
| 178 | clr %o0 |
| 179 | __memset_end: |
| 180 | |
| 181 | .section .fixup,#alloc,#execinstr |
| 182 | .align 4 |
| 183 | 20: |
| 184 | cmp %g2, 8 |
| 185 | bleu 1f |
| 186 | and %o1, 0x7f, %o1 |
| 187 | sub %g2, 9, %g2 |
| 188 | add %o3, 64, %o3 |
| 189 | 1: |
| 190 | sll %g2, 3, %g2 |
| 191 | add %o3, %o1, %o0 |
| 192 | b 30f |
| 193 | sub %o0, %g2, %o0 |
| 194 | 21: |
| 195 | mov 8, %o0 |
| 196 | and %o1, 7, %o1 |
| 197 | sub %o0, %g2, %o0 |
| 198 | sll %o0, 3, %o0 |
| 199 | b 30f |
| 200 | add %o0, %o1, %o0 |
| 201 | 30: |
| 202 | /* %o4 is faulting address, %o5 is %pc where fault occurred */ |
| 203 | save %sp, -104, %sp |
| 204 | mov %i5, %o0 |
| 205 | mov %i7, %o1 |
| 206 | call lookup_fault |
| 207 | mov %i4, %o2 |
| 208 | ret |
| 209 | restore |
| 210 | |
| 211 | .globl __bzero_end |
| 212 | __bzero_end: |