blob: 7e6fdaebedbab9bf088170e40420307f8f730ed7 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/* $Id: dec_and_lock.S,v 1.5 2001/11/18 00:12:56 davem Exp $
2 * dec_and_lock.S: Sparc64 version of "atomic_dec_and_lock()"
3 * using cas and ldstub instructions.
4 *
5 * Copyright (C) 2000 David S. Miller (davem@redhat.com)
6 */
7#include <linux/config.h>
8#include <asm/thread_info.h>
9
10 .text
11 .align 64
12
13 /* CAS basically works like this:
14 *
15 * void CAS(MEM, REG1, REG2)
16 * {
17 * START_ATOMIC();
18 * if (*(MEM) == REG1) {
19 * TMP = *(MEM);
20 * *(MEM) = REG2;
21 * REG2 = TMP;
22 * } else
23 * REG2 = *(MEM);
24 * END_ATOMIC();
25 * }
26 */
27
28 .globl _atomic_dec_and_lock
29_atomic_dec_and_lock: /* %o0 = counter, %o1 = lock */
30loop1: lduw [%o0], %g2
31 subcc %g2, 1, %g7
32 be,pn %icc, start_to_zero
33 nop
34nzero: cas [%o0], %g2, %g7
35 cmp %g2, %g7
36 bne,pn %icc, loop1
37 mov 0, %g1
38
39out:
40 membar #StoreLoad | #StoreStore
41 retl
42 mov %g1, %o0
43start_to_zero:
44#ifdef CONFIG_PREEMPT
45 ldsw [%g6 + TI_PRE_COUNT], %g3
46 add %g3, 1, %g3
47 stw %g3, [%g6 + TI_PRE_COUNT]
48#endif
49to_zero:
50 ldstub [%o1], %g3
51 brnz,pn %g3, spin_on_lock
52 membar #StoreLoad | #StoreStore
53loop2: cas [%o0], %g2, %g7 /* ASSERT(g7 == 0) */
54 cmp %g2, %g7
55
56 be,pt %icc, out
57 mov 1, %g1
58 lduw [%o0], %g2
59 subcc %g2, 1, %g7
60 be,pn %icc, loop2
61 nop
62 membar #StoreStore | #LoadStore
63 stb %g0, [%o1]
64#ifdef CONFIG_PREEMPT
65 ldsw [%g6 + TI_PRE_COUNT], %g3
66 sub %g3, 1, %g3
67 stw %g3, [%g6 + TI_PRE_COUNT]
68#endif
69
70 b,pt %xcc, nzero
71 nop
72spin_on_lock:
73 ldub [%o1], %g3
74 brnz,pt %g3, spin_on_lock
75 membar #LoadLoad
76 ba,pt %xcc, to_zero
77 nop
78 nop