blob: 0ccda3c425be0d3b19a27c13b68ad6b6eaa37525 [file] [log] [blame]
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -04001/*
2 * cmpxchg.h -- forked from asm/atomic.h with this copyright:
3 *
4 * Copyright 2010 Tilera Corporation. All Rights Reserved.
5 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License
8 * as published by the Free Software Foundation, version 2.
9 *
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
13 * NON INFRINGEMENT. See the GNU General Public License for
14 * more details.
15 *
16 */
17
18#ifndef _ASM_TILE_CMPXCHG_H
19#define _ASM_TILE_CMPXCHG_H
20
21#ifndef __ASSEMBLY__
22
Chris Metcalf6dc96582013-09-06 08:56:45 -040023#include <asm/barrier.h>
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040024
Chris Metcalf6dc96582013-09-06 08:56:45 -040025/* Nonexistent functions intended to cause compile errors. */
26extern void __xchg_called_with_bad_pointer(void)
27 __compiletime_error("Bad argument size for xchg");
28extern void __cmpxchg_called_with_bad_pointer(void)
29 __compiletime_error("Bad argument size for cmpxchg");
30
31#ifndef __tilegx__
32
33/* Note the _atomic_xxx() routines include a final mb(). */
34int _atomic_xchg(int *ptr, int n);
35int _atomic_xchg_add(int *v, int i);
36int _atomic_xchg_add_unless(int *v, int a, int u);
37int _atomic_cmpxchg(int *ptr, int o, int n);
Chen Gangb924a692013-09-25 12:14:08 +080038long long _atomic64_xchg(long long *v, long long n);
39long long _atomic64_xchg_add(long long *v, long long i);
40long long _atomic64_xchg_add_unless(long long *v, long long a, long long u);
41long long _atomic64_cmpxchg(long long *v, long long o, long long n);
Chris Metcalf6dc96582013-09-06 08:56:45 -040042
43#define xchg(ptr, n) \
44 ({ \
45 if (sizeof(*(ptr)) != 4) \
46 __xchg_called_with_bad_pointer(); \
47 smp_mb(); \
48 (typeof(*(ptr)))_atomic_xchg((int *)(ptr), (int)(n)); \
49 })
50
51#define cmpxchg(ptr, o, n) \
52 ({ \
53 if (sizeof(*(ptr)) != 4) \
54 __cmpxchg_called_with_bad_pointer(); \
55 smp_mb(); \
Chen Gangb924a692013-09-25 12:14:08 +080056 (typeof(*(ptr)))_atomic_cmpxchg((int *)ptr, (int)o, \
57 (int)n); \
Chris Metcalf6dc96582013-09-06 08:56:45 -040058 })
59
60#define xchg64(ptr, n) \
61 ({ \
62 if (sizeof(*(ptr)) != 8) \
63 __xchg_called_with_bad_pointer(); \
64 smp_mb(); \
Chen Gangb924a692013-09-25 12:14:08 +080065 (typeof(*(ptr)))_atomic64_xchg((long long *)(ptr), \
66 (long long)(n)); \
Chris Metcalf6dc96582013-09-06 08:56:45 -040067 })
68
69#define cmpxchg64(ptr, o, n) \
70 ({ \
71 if (sizeof(*(ptr)) != 8) \
72 __cmpxchg_called_with_bad_pointer(); \
73 smp_mb(); \
Chen Gangb924a692013-09-25 12:14:08 +080074 (typeof(*(ptr)))_atomic64_cmpxchg((long long *)ptr, \
75 (long long)o, (long long)n); \
Chris Metcalf6dc96582013-09-06 08:56:45 -040076 })
77
78#else
79
80#define xchg(ptr, n) \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040081 ({ \
82 typeof(*(ptr)) __x; \
Chris Metcalf6dc96582013-09-06 08:56:45 -040083 smp_mb(); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040084 switch (sizeof(*(ptr))) { \
85 case 4: \
Chris Metcalf6dc96582013-09-06 08:56:45 -040086 __x = (typeof(__x))(unsigned long) \
Chen Gangb924a692013-09-25 12:14:08 +080087 __insn_exch4((ptr), \
88 (u32)(unsigned long)(n)); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040089 break; \
90 case 8: \
Chen Gangb924a692013-09-25 12:14:08 +080091 __x = (typeof(__x)) \
Chris Metcalf6dc96582013-09-06 08:56:45 -040092 __insn_exch((ptr), (unsigned long)(n)); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040093 break; \
94 default: \
95 __xchg_called_with_bad_pointer(); \
Chris Metcalf6dc96582013-09-06 08:56:45 -040096 break; \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040097 } \
Chris Metcalf6dc96582013-09-06 08:56:45 -040098 smp_mb(); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -040099 __x; \
100 })
101
102#define cmpxchg(ptr, o, n) \
103 ({ \
104 typeof(*(ptr)) __x; \
Chris Metcalf6dc96582013-09-06 08:56:45 -0400105 __insn_mtspr(SPR_CMPEXCH_VALUE, (unsigned long)(o)); \
106 smp_mb(); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400107 switch (sizeof(*(ptr))) { \
108 case 4: \
Chris Metcalf6dc96582013-09-06 08:56:45 -0400109 __x = (typeof(__x))(unsigned long) \
Chen Gangb924a692013-09-25 12:14:08 +0800110 __insn_cmpexch4((ptr), \
111 (u32)(unsigned long)(n)); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400112 break; \
113 case 8: \
Chen Gangb924a692013-09-25 12:14:08 +0800114 __x = (typeof(__x))__insn_cmpexch((ptr), \
115 (long long)(n)); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400116 break; \
117 default: \
118 __cmpxchg_called_with_bad_pointer(); \
Chris Metcalf6dc96582013-09-06 08:56:45 -0400119 break; \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400120 } \
Chris Metcalf6dc96582013-09-06 08:56:45 -0400121 smp_mb(); \
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400122 __x; \
123 })
124
Chris Metcalf6dc96582013-09-06 08:56:45 -0400125#define xchg64 xchg
126#define cmpxchg64 cmpxchg
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400127
Chris Metcalf6dc96582013-09-06 08:56:45 -0400128#endif
129
130#define tas(ptr) xchg((ptr), 1)
Chen Gangcf3a13d2013-07-01 10:21:51 +0800131
Paul Gortmaker34f2c0a2012-04-01 16:38:46 -0400132#endif /* __ASSEMBLY__ */
133
134#endif /* _ASM_TILE_CMPXCHG_H */