Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* |
| 2 | * bitops.c: atomic operations which got too long to be inlined all over |
| 3 | * the place. |
| 4 | * |
| 5 | * Copyright 1999 Philipp Rumpf (prumpf@tux.org) |
| 6 | * Copyright 2000 Grant Grundler (grundler@cup.hp.com) |
| 7 | */ |
| 8 | |
| 9 | #include <linux/config.h> |
| 10 | #include <linux/kernel.h> |
| 11 | #include <linux/spinlock.h> |
| 12 | #include <asm/system.h> |
| 13 | #include <asm/atomic.h> |
| 14 | |
| 15 | #ifdef CONFIG_SMP |
Ingo Molnar | fb1c8f9 | 2005-09-10 00:25:56 -0700 | [diff] [blame] | 16 | raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { |
| 17 | [0 ... (ATOMIC_HASH_SIZE-1)] = __RAW_SPIN_LOCK_UNLOCKED |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 18 | }; |
| 19 | #endif |
| 20 | |
| 21 | #ifdef __LP64__ |
| 22 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) |
| 23 | { |
| 24 | unsigned long temp, flags; |
| 25 | |
| 26 | _atomic_spin_lock_irqsave(ptr, flags); |
| 27 | temp = *ptr; |
| 28 | *ptr = x; |
| 29 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 30 | return temp; |
| 31 | } |
| 32 | #endif |
| 33 | |
| 34 | unsigned long __xchg32(int x, int *ptr) |
| 35 | { |
| 36 | unsigned long flags; |
| 37 | long temp; |
| 38 | |
| 39 | _atomic_spin_lock_irqsave(ptr, flags); |
| 40 | temp = (long) *ptr; /* XXX - sign extension wanted? */ |
| 41 | *ptr = x; |
| 42 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 43 | return (unsigned long)temp; |
| 44 | } |
| 45 | |
| 46 | |
| 47 | unsigned long __xchg8(char x, char *ptr) |
| 48 | { |
| 49 | unsigned long flags; |
| 50 | long temp; |
| 51 | |
| 52 | _atomic_spin_lock_irqsave(ptr, flags); |
| 53 | temp = (long) *ptr; /* XXX - sign extension wanted? */ |
| 54 | *ptr = x; |
| 55 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 56 | return (unsigned long)temp; |
| 57 | } |
| 58 | |
| 59 | |
| 60 | #ifdef __LP64__ |
| 61 | unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new) |
| 62 | { |
| 63 | unsigned long flags; |
| 64 | unsigned long prev; |
| 65 | |
| 66 | _atomic_spin_lock_irqsave(ptr, flags); |
| 67 | if ((prev = *ptr) == old) |
| 68 | *ptr = new; |
| 69 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 70 | return prev; |
| 71 | } |
| 72 | #endif |
| 73 | |
| 74 | unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) |
| 75 | { |
| 76 | unsigned long flags; |
| 77 | unsigned int prev; |
| 78 | |
| 79 | _atomic_spin_lock_irqsave(ptr, flags); |
| 80 | if ((prev = *ptr) == old) |
| 81 | *ptr = new; |
| 82 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 83 | return (unsigned long)prev; |
| 84 | } |