David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 1 | /* atomic.S: These things are too big to do inline. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | * |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 3 | * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 4 | */ |
| 5 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 | #include <asm/asi.h> |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 7 | #include <asm/backoff.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | .text |
| 10 | |
| 11 | /* Two versions of the atomic routines, one that |
| 12 | * does not return a value and does not perform |
| 13 | * memory barriers, and a second which returns |
| 14 | * a value and does the barriers. |
| 15 | */ |
| 16 | .globl atomic_add |
| 17 | .type atomic_add,#function |
| 18 | atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 19 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 20 | 1: lduw [%o1], %g1 |
| 21 | add %g1, %o0, %g7 |
| 22 | cas [%o1], %g1, %g7 |
| 23 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 24 | bne,pn %icc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | nop |
| 26 | retl |
| 27 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 28 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 29 | .size atomic_add, .-atomic_add |
| 30 | |
| 31 | .globl atomic_sub |
| 32 | .type atomic_sub,#function |
| 33 | atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 34 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 35 | 1: lduw [%o1], %g1 |
| 36 | sub %g1, %o0, %g7 |
| 37 | cas [%o1], %g1, %g7 |
| 38 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 39 | bne,pn %icc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 40 | nop |
| 41 | retl |
| 42 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 43 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | .size atomic_sub, .-atomic_sub |
| 45 | |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 46 | /* On SMP we need to use memory barriers to ensure |
| 47 | * correct memory operation ordering, nop these out |
| 48 | * for uniprocessor. |
| 49 | */ |
| 50 | #ifdef CONFIG_SMP |
| 51 | |
| 52 | #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad; |
| 53 | #define ATOMIC_POST_BARRIER \ |
| 54 | ba,pt %xcc, 80b; \ |
| 55 | membar #StoreLoad | #StoreStore |
| 56 | |
| 57 | 80: retl |
| 58 | nop |
| 59 | #else |
| 60 | #define ATOMIC_PRE_BARRIER |
| 61 | #define ATOMIC_POST_BARRIER |
| 62 | #endif |
| 63 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | .globl atomic_add_ret |
| 65 | .type atomic_add_ret,#function |
| 66 | atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 67 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | ATOMIC_PRE_BARRIER |
| 69 | 1: lduw [%o1], %g1 |
| 70 | add %g1, %o0, %g7 |
| 71 | cas [%o1], %g1, %g7 |
| 72 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 73 | bne,pn %icc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 74 | add %g7, %o0, %g7 |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 75 | sra %g7, 0, %o0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 76 | ATOMIC_POST_BARRIER |
| 77 | retl |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 78 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 79 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 80 | .size atomic_add_ret, .-atomic_add_ret |
| 81 | |
| 82 | .globl atomic_sub_ret |
| 83 | .type atomic_sub_ret,#function |
| 84 | atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 85 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 86 | ATOMIC_PRE_BARRIER |
| 87 | 1: lduw [%o1], %g1 |
| 88 | sub %g1, %o0, %g7 |
| 89 | cas [%o1], %g1, %g7 |
| 90 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 91 | bne,pn %icc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 92 | sub %g7, %o0, %g7 |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 93 | sra %g7, 0, %o0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 94 | ATOMIC_POST_BARRIER |
| 95 | retl |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 96 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 97 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 98 | .size atomic_sub_ret, .-atomic_sub_ret |
| 99 | |
| 100 | .globl atomic64_add |
| 101 | .type atomic64_add,#function |
| 102 | atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 103 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 104 | 1: ldx [%o1], %g1 |
| 105 | add %g1, %o0, %g7 |
| 106 | casx [%o1], %g1, %g7 |
| 107 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 108 | bne,pn %xcc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 109 | nop |
| 110 | retl |
| 111 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 112 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 113 | .size atomic64_add, .-atomic64_add |
| 114 | |
| 115 | .globl atomic64_sub |
| 116 | .type atomic64_sub,#function |
| 117 | atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 118 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 119 | 1: ldx [%o1], %g1 |
| 120 | sub %g1, %o0, %g7 |
| 121 | casx [%o1], %g1, %g7 |
| 122 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 123 | bne,pn %xcc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 124 | nop |
| 125 | retl |
| 126 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 127 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 128 | .size atomic64_sub, .-atomic64_sub |
| 129 | |
| 130 | .globl atomic64_add_ret |
| 131 | .type atomic64_add_ret,#function |
| 132 | atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 133 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 134 | ATOMIC_PRE_BARRIER |
| 135 | 1: ldx [%o1], %g1 |
| 136 | add %g1, %o0, %g7 |
| 137 | casx [%o1], %g1, %g7 |
| 138 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 139 | bne,pn %xcc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 140 | add %g7, %o0, %g7 |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 141 | mov %g7, %o0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 142 | ATOMIC_POST_BARRIER |
| 143 | retl |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 144 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 145 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 146 | .size atomic64_add_ret, .-atomic64_add_ret |
| 147 | |
| 148 | .globl atomic64_sub_ret |
| 149 | .type atomic64_sub_ret,#function |
| 150 | atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 151 | BACKOFF_SETUP(%o2) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 152 | ATOMIC_PRE_BARRIER |
| 153 | 1: ldx [%o1], %g1 |
| 154 | sub %g1, %o0, %g7 |
| 155 | casx [%o1], %g1, %g7 |
| 156 | cmp %g1, %g7 |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 157 | bne,pn %xcc, 2f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 158 | sub %g7, %o0, %g7 |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 159 | mov %g7, %o0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 160 | ATOMIC_POST_BARRIER |
| 161 | retl |
David S. Miller | b445e26 | 2005-06-27 15:42:04 -0700 | [diff] [blame] | 162 | nop |
David S. Miller | 24f287e | 2007-10-15 16:41:44 -0700 | [diff] [blame] | 163 | 2: BACKOFF_SPIN(%o2, %o3, 1b) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 164 | .size atomic64_sub_ret, .-atomic64_sub_ret |