Adrian Bunk | b00dc83 | 2008-05-19 16:52:27 -0700 | [diff] [blame] | 1 | /* |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | * strncpy_from_user.S: Sparc64 strncpy from userspace. |
| 3 | * |
| 4 | * Copyright (C) 1997, 1999 Jakub Jelinek (jj@ultra.linux.cz) |
| 5 | */ |
| 6 | |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 7 | #include <linux/linkage.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | #include <asm/asi.h> |
| 9 | #include <asm/errno.h> |
| 10 | |
| 11 | .data |
| 12 | .align 8 |
| 13 | 0: .xword 0x0101010101010101 |
| 14 | |
| 15 | .text |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 | |
| 17 | /* Must return: |
| 18 | * |
| 19 | * -EFAULT for an exception |
| 20 | * count if we hit the buffer limit |
| 21 | * bytes copied if we hit a null byte |
| 22 | * (without the null byte) |
| 23 | * |
| 24 | * This implementation assumes: |
| 25 | * %o1 is 8 aligned => !(%o2 & 7) |
| 26 | * %o0 is 8 aligned (if not, it will be slooooow, but will work) |
| 27 | * |
| 28 | * This is optimized for the common case: |
| 29 | * in my stats, 90% of src are 8 aligned (even on sparc32) |
| 30 | * and average length is 18 or so. |
| 31 | */ |
| 32 | |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 33 | ENTRY(__strncpy_from_user) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 34 | /* %o0=dest, %o1=src, %o2=count */ |
| 35 | andcc %o1, 7, %g0 ! IEU1 Group |
| 36 | bne,pn %icc, 30f ! CTI |
| 37 | add %o0, %o2, %g3 ! IEU0 |
| 38 | 60: ldxa [%o1] %asi, %g1 ! Load Group |
| 39 | brlez,pn %o2, 10f ! CTI |
| 40 | mov %o0, %o3 ! IEU0 |
| 41 | 50: sethi %hi(0b), %o4 ! IEU0 Group |
| 42 | ldx [%o4 + %lo(0b)], %o4 ! Load |
| 43 | sllx %o4, 7, %o5 ! IEU1 Group |
| 44 | 1: sub %g1, %o4, %g2 ! IEU0 Group |
| 45 | stx %g1, [%o0] ! Store |
| 46 | add %o0, 8, %o0 ! IEU1 |
| 47 | andcc %g2, %o5, %g0 ! IEU1 Group |
| 48 | bne,pn %xcc, 5f ! CTI |
| 49 | add %o1, 8, %o1 ! IEU0 |
| 50 | cmp %o0, %g3 ! IEU1 Group |
| 51 | bl,a,pt %xcc, 1b ! CTI |
| 52 | 61: ldxa [%o1] %asi, %g1 ! Load |
| 53 | 10: retl ! CTI Group |
| 54 | mov %o2, %o0 ! IEU0 |
| 55 | 5: srlx %g2, 32, %g7 ! IEU0 Group |
| 56 | sethi %hi(0xff00), %o4 ! IEU1 |
| 57 | andcc %g7, %o5, %g0 ! IEU1 Group |
| 58 | be,pn %icc, 2f ! CTI |
| 59 | or %o4, %lo(0xff00), %o4 ! IEU0 |
| 60 | srlx %g1, 48, %g7 ! IEU0 Group |
| 61 | andcc %g7, %o4, %g0 ! IEU1 Group |
| 62 | be,pn %icc, 50f ! CTI |
| 63 | andcc %g7, 0xff, %g0 ! IEU1 Group |
| 64 | be,pn %icc, 51f ! CTI |
| 65 | srlx %g1, 32, %g7 ! IEU0 |
| 66 | andcc %g7, %o4, %g0 ! IEU1 Group |
| 67 | be,pn %icc, 52f ! CTI |
| 68 | andcc %g7, 0xff, %g0 ! IEU1 Group |
| 69 | be,pn %icc, 53f ! CTI |
| 70 | 2: andcc %g2, %o5, %g0 ! IEU1 Group |
| 71 | be,pn %icc, 2f ! CTI |
| 72 | srl %g1, 16, %g7 ! IEU0 |
| 73 | andcc %g7, %o4, %g0 ! IEU1 Group |
| 74 | be,pn %icc, 54f ! CTI |
| 75 | andcc %g7, 0xff, %g0 ! IEU1 Group |
| 76 | be,pn %icc, 55f ! CTI |
| 77 | andcc %g1, %o4, %g0 ! IEU1 Group |
| 78 | be,pn %icc, 56f ! CTI |
| 79 | andcc %g1, 0xff, %g0 ! IEU1 Group |
| 80 | be,a,pn %icc, 57f ! CTI |
| 81 | sub %o0, %o3, %o0 ! IEU0 |
| 82 | 2: cmp %o0, %g3 ! IEU1 Group |
| 83 | bl,a,pt %xcc, 50b ! CTI |
| 84 | 62: ldxa [%o1] %asi, %g1 ! Load |
| 85 | retl ! CTI Group |
| 86 | mov %o2, %o0 ! IEU0 |
| 87 | 50: sub %o0, %o3, %o0 |
| 88 | retl |
| 89 | sub %o0, 8, %o0 |
| 90 | 51: sub %o0, %o3, %o0 |
| 91 | retl |
| 92 | sub %o0, 7, %o0 |
| 93 | 52: sub %o0, %o3, %o0 |
| 94 | retl |
| 95 | sub %o0, 6, %o0 |
| 96 | 53: sub %o0, %o3, %o0 |
| 97 | retl |
| 98 | sub %o0, 5, %o0 |
| 99 | 54: sub %o0, %o3, %o0 |
| 100 | retl |
| 101 | sub %o0, 4, %o0 |
| 102 | 55: sub %o0, %o3, %o0 |
| 103 | retl |
| 104 | sub %o0, 3, %o0 |
| 105 | 56: sub %o0, %o3, %o0 |
| 106 | retl |
| 107 | sub %o0, 2, %o0 |
| 108 | 57: retl |
| 109 | sub %o0, 1, %o0 |
| 110 | 30: brlez,pn %o2, 3f |
| 111 | sub %g0, %o2, %o3 |
| 112 | add %o0, %o2, %o0 |
| 113 | 63: lduba [%o1] %asi, %o4 |
| 114 | 1: add %o1, 1, %o1 |
| 115 | brz,pn %o4, 2f |
| 116 | stb %o4, [%o0 + %o3] |
| 117 | addcc %o3, 1, %o3 |
| 118 | bne,pt %xcc, 1b |
| 119 | 64: lduba [%o1] %asi, %o4 |
| 120 | 3: retl |
| 121 | mov %o2, %o0 |
| 122 | 2: retl |
| 123 | add %o2, %o3, %o0 |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 124 | ENDPROC(__strncpy_from_user) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 125 | |
David S. Miller | 4d000d5 | 2006-03-04 23:23:56 -0800 | [diff] [blame] | 126 | .section __ex_table,"a" |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 127 | .align 4 |
David S. Miller | 5fd29752 | 2005-09-28 20:41:45 -0700 | [diff] [blame] | 128 | .word 60b, __retl_efault |
| 129 | .word 61b, __retl_efault |
| 130 | .word 62b, __retl_efault |
| 131 | .word 63b, __retl_efault |
| 132 | .word 64b, __retl_efault |
| 133 | .previous |