blob: 8542bc31f63cae7445482a4a46b41d797f06acfd [file] [log] [blame]
Bryan Wu1394f032007-05-06 14:50:22 -07001/*
Robin Getz96f10502009-09-24 14:11:24 +00002 * Copyright 2004-2009 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
Bryan Wu1394f032007-05-06 14:50:22 -07005 */
Robin Getz96f10502009-09-24 14:11:24 +00006
Bryan Wu1394f032007-05-06 14:50:22 -07007#ifndef __ARCH_BLACKFIN_CACHE_H
8#define __ARCH_BLACKFIN_CACHE_H
9
10/*
11 * Bytes per L1 cache line
12 * Blackfin loads 32 bytes for cache
13 */
14#define L1_CACHE_SHIFT 5
15#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
16#define SMP_CACHE_BYTES L1_CACHE_BYTES
17
Graf Yang6b3087c2009-01-07 23:14:39 +080018#ifdef CONFIG_SMP
19#define __cacheline_aligned
20#else
21#define ____cacheline_aligned
22
Bryan Wu1394f032007-05-06 14:50:22 -070023/*
24 * Put cacheline_aliged data to L1 data memory
25 */
26#ifdef CONFIG_CACHELINE_ALIGNED_L1
27#define __cacheline_aligned \
28 __attribute__((__aligned__(L1_CACHE_BYTES), \
29 __section__(".data_l1.cacheline_aligned")))
30#endif
31
Graf Yang6b3087c2009-01-07 23:14:39 +080032#endif
33
Bryan Wu1394f032007-05-06 14:50:22 -070034/*
35 * largest L1 which this arch supports
36 */
37#define L1_CACHE_SHIFT_MAX 5
38
Graf Yang6b3087c2009-01-07 23:14:39 +080039#if defined(CONFIG_SMP) && \
Sonic Zhang47e9ded2009-06-10 08:57:08 +000040 !defined(CONFIG_BFIN_CACHE_COHERENT)
Graf Yang19a3b602009-09-22 04:55:28 +000041# if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
Sonic Zhang47e9ded2009-06-10 08:57:08 +000042# define __ARCH_SYNC_CORE_ICACHE
43# endif
Graf Yang19a3b602009-09-22 04:55:28 +000044# if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
Sonic Zhang47e9ded2009-06-10 08:57:08 +000045# define __ARCH_SYNC_CORE_DCACHE
46# endif
Graf Yang6b3087c2009-01-07 23:14:39 +080047#ifndef __ASSEMBLY__
48asmlinkage void __raw_smp_mark_barrier_asm(void);
49asmlinkage void __raw_smp_check_barrier_asm(void);
50
51static inline void smp_mark_barrier(void)
52{
53 __raw_smp_mark_barrier_asm();
54}
55static inline void smp_check_barrier(void)
56{
57 __raw_smp_check_barrier_asm();
58}
59
60void resync_core_dcache(void);
Sonic Zhang47e9ded2009-06-10 08:57:08 +000061void resync_core_icache(void);
Graf Yang6b3087c2009-01-07 23:14:39 +080062#endif
63#endif
64
65
Bryan Wu1394f032007-05-06 14:50:22 -070066#endif