Akira Takeuchi | 8be0628 | 2010-10-27 17:28:47 +0100 | [diff] [blame] | 1 | /* SMP caching definitions |
| 2 | * |
| 3 | * Copyright (C) 2010 Red Hat, Inc. All Rights Reserved. |
| 4 | * Written by David Howells (dhowells@redhat.com) |
| 5 | * |
| 6 | * This program is free software; you can redistribute it and/or |
| 7 | * modify it under the terms of the GNU General Public Licence |
| 8 | * as published by the Free Software Foundation; either version |
| 9 | * 2 of the Licence, or (at your option) any later version. |
| 10 | */ |
| 11 | |
| 12 | |
| 13 | /* |
| 14 | * Operation requests for smp_cache_call(). |
| 15 | * |
| 16 | * One of smp_icache_ops and one of smp_dcache_ops can be OR'd together. |
| 17 | */ |
| 18 | enum smp_icache_ops { |
| 19 | SMP_ICACHE_NOP = 0x0000, |
| 20 | SMP_ICACHE_INV = 0x0001, |
| 21 | SMP_ICACHE_INV_RANGE = 0x0002, |
| 22 | }; |
| 23 | #define SMP_ICACHE_OP_MASK 0x0003 |
| 24 | |
| 25 | enum smp_dcache_ops { |
| 26 | SMP_DCACHE_NOP = 0x0000, |
| 27 | SMP_DCACHE_INV = 0x0004, |
| 28 | SMP_DCACHE_INV_RANGE = 0x0008, |
| 29 | SMP_DCACHE_FLUSH = 0x000c, |
| 30 | SMP_DCACHE_FLUSH_RANGE = 0x0010, |
| 31 | SMP_DCACHE_FLUSH_INV = 0x0014, |
| 32 | SMP_DCACHE_FLUSH_INV_RANGE = 0x0018, |
| 33 | }; |
| 34 | #define SMP_DCACHE_OP_MASK 0x001c |
| 35 | |
| 36 | #define SMP_IDCACHE_INV_FLUSH (SMP_ICACHE_INV | SMP_DCACHE_FLUSH) |
| 37 | #define SMP_IDCACHE_INV_FLUSH_RANGE (SMP_ICACHE_INV_RANGE | SMP_DCACHE_FLUSH_RANGE) |
| 38 | |
| 39 | /* |
| 40 | * cache-smp.c |
| 41 | */ |
| 42 | #ifdef CONFIG_SMP |
| 43 | extern spinlock_t smp_cache_lock; |
| 44 | |
| 45 | extern void smp_cache_call(unsigned long opr_mask, |
| 46 | unsigned long addr, unsigned long end); |
| 47 | |
| 48 | static inline unsigned long smp_lock_cache(void) |
| 49 | __acquires(&smp_cache_lock) |
| 50 | { |
| 51 | unsigned long flags; |
| 52 | spin_lock_irqsave(&smp_cache_lock, flags); |
| 53 | return flags; |
| 54 | } |
| 55 | |
| 56 | static inline void smp_unlock_cache(unsigned long flags) |
| 57 | __releases(&smp_cache_lock) |
| 58 | { |
| 59 | spin_unlock_irqrestore(&smp_cache_lock, flags); |
| 60 | } |
| 61 | |
| 62 | #else |
| 63 | static inline unsigned long smp_lock_cache(void) { return 0; } |
| 64 | static inline void smp_unlock_cache(unsigned long flags) {} |
| 65 | static inline void smp_cache_call(unsigned long opr_mask, |
| 66 | unsigned long addr, unsigned long end) |
| 67 | { |
| 68 | } |
| 69 | #endif /* CONFIG_SMP */ |