blob: 86637814cf252b9146f398367aea451b9b829b41 [file] [log] [blame]
Bryan Wu1394f032007-05-06 14:50:22 -07001/*
2 * include/asm-blackfin/cache.h
3 */
4#ifndef __ARCH_BLACKFIN_CACHE_H
5#define __ARCH_BLACKFIN_CACHE_H
6
7/*
8 * Bytes per L1 cache line
9 * Blackfin loads 32 bytes for cache
10 */
11#define L1_CACHE_SHIFT 5
12#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
13#define SMP_CACHE_BYTES L1_CACHE_BYTES
14
Graf Yang6b3087c2009-01-07 23:14:39 +080015#ifdef CONFIG_SMP
16#define __cacheline_aligned
17#else
18#define ____cacheline_aligned
19
Bryan Wu1394f032007-05-06 14:50:22 -070020/*
21 * Put cacheline_aliged data to L1 data memory
22 */
23#ifdef CONFIG_CACHELINE_ALIGNED_L1
24#define __cacheline_aligned \
25 __attribute__((__aligned__(L1_CACHE_BYTES), \
26 __section__(".data_l1.cacheline_aligned")))
27#endif
28
Graf Yang6b3087c2009-01-07 23:14:39 +080029#endif
30
Bryan Wu1394f032007-05-06 14:50:22 -070031/*
32 * largest L1 which this arch supports
33 */
34#define L1_CACHE_SHIFT_MAX 5
35
Graf Yang6b3087c2009-01-07 23:14:39 +080036#if defined(CONFIG_SMP) && \
37 !defined(CONFIG_BFIN_CACHE_COHERENT) && \
38 defined(CONFIG_BFIN_DCACHE)
39#define __ARCH_SYNC_CORE_DCACHE
40#ifndef __ASSEMBLY__
41asmlinkage void __raw_smp_mark_barrier_asm(void);
42asmlinkage void __raw_smp_check_barrier_asm(void);
43
44static inline void smp_mark_barrier(void)
45{
46 __raw_smp_mark_barrier_asm();
47}
48static inline void smp_check_barrier(void)
49{
50 __raw_smp_check_barrier_asm();
51}
52
53void resync_core_dcache(void);
54#endif
55#endif
56
57
Bryan Wu1394f032007-05-06 14:50:22 -070058#endif