blob: 97a5fda83450e74bde284fead9cd2f3a8b31200e [file] [log] [blame]
Stuart Menefy1efe4ce2007-11-30 16:12:36 +09001#ifndef __ASM_SH_ATOMIC_GRB_H
2#define __ASM_SH_ATOMIC_GRB_H
3
Peter Zijlstrac6470152014-03-26 18:12:45 +01004#define ATOMIC_OP(op) \
5static inline void atomic_##op(int i, atomic_t *v) \
6{ \
7 int tmp; \
8 \
9 __asm__ __volatile__ ( \
10 " .align 2 \n\t" \
11 " mova 1f, r0 \n\t" /* r0 = end point */ \
12 " mov r15, r1 \n\t" /* r1 = saved sp */ \
13 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
14 " mov.l @%1, %0 \n\t" /* load old value */ \
15 " " #op " %2, %0 \n\t" /* $op */ \
16 " mov.l %0, @%1 \n\t" /* store new value */ \
17 "1: mov r1, r15 \n\t" /* LOGOUT */ \
18 : "=&r" (tmp), \
19 "+r" (v) \
20 : "r" (i) \
21 : "memory" , "r0", "r1"); \
22} \
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090023
Peter Zijlstrac6470152014-03-26 18:12:45 +010024#define ATOMIC_OP_RETURN(op) \
25static inline int atomic_##op##_return(int i, atomic_t *v) \
26{ \
27 int tmp; \
28 \
29 __asm__ __volatile__ ( \
30 " .align 2 \n\t" \
31 " mova 1f, r0 \n\t" /* r0 = end point */ \
32 " mov r15, r1 \n\t" /* r1 = saved sp */ \
33 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
34 " mov.l @%1, %0 \n\t" /* load old value */ \
35 " " #op " %2, %0 \n\t" /* $op */ \
36 " mov.l %0, @%1 \n\t" /* store new value */ \
37 "1: mov r1, r15 \n\t" /* LOGOUT */ \
38 : "=&r" (tmp), \
39 "+r" (v) \
40 : "r" (i) \
41 : "memory" , "r0", "r1"); \
42 \
43 return tmp; \
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090044}
45
Peter Zijlstrac6470152014-03-26 18:12:45 +010046#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090047
Peter Zijlstrac6470152014-03-26 18:12:45 +010048ATOMIC_OPS(add)
49ATOMIC_OPS(sub)
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090050
Peter Zijlstrac6470152014-03-26 18:12:45 +010051#undef ATOMIC_OPS
52#undef ATOMIC_OP_RETURN
53#undef ATOMIC_OP
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090054
55static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
56{
57 int tmp;
58 unsigned int _mask = ~mask;
59
60 __asm__ __volatile__ (
61 " .align 2 \n\t"
62 " mova 1f, r0 \n\t" /* r0 = end point */
63 " mov r15, r1 \n\t" /* r1 = saved sp */
64 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
65 " mov.l @%1, %0 \n\t" /* load old value */
66 " and %2, %0 \n\t" /* add */
67 " mov.l %0, @%1 \n\t" /* store new value */
68 "1: mov r1, r15 \n\t" /* LOGOUT */
69 : "=&r" (tmp),
70 "+r" (v)
71 : "r" (_mask)
72 : "memory" , "r0", "r1");
73}
74
75static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
76{
77 int tmp;
78
79 __asm__ __volatile__ (
80 " .align 2 \n\t"
81 " mova 1f, r0 \n\t" /* r0 = end point */
82 " mov r15, r1 \n\t" /* r1 = saved sp */
83 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
84 " mov.l @%1, %0 \n\t" /* load old value */
85 " or %2, %0 \n\t" /* or */
86 " mov.l %0, @%1 \n\t" /* store new value */
87 "1: mov r1, r15 \n\t" /* LOGOUT */
88 : "=&r" (tmp),
89 "+r" (v)
90 : "r" (mask)
91 : "memory" , "r0", "r1");
92}
93
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090094#endif /* __ASM_SH_ATOMIC_GRB_H */