2 * Copyright 2004-2009 Analog Devices Inc.
4 * Licensed under the GPL-2 or later.
7 #ifndef __ARCH_BLACKFIN_CACHE_H
8 #define __ARCH_BLACKFIN_CACHE_H
11 * Bytes per L1 cache line
12 * Blackfin loads 32 bytes for cache
14 #define L1_CACHE_SHIFT 5
15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
16 #define SMP_CACHE_BYTES L1_CACHE_BYTES
18 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
21 #define __cacheline_aligned
23 #define ____cacheline_aligned
26 * Put cacheline_aliged data to L1 data memory
28 #ifdef CONFIG_CACHELINE_ALIGNED_L1
29 #define __cacheline_aligned \
30 __attribute__((__aligned__(L1_CACHE_BYTES), \
31 __section__(".data_l1.cacheline_aligned")))
37 * largest L1 which this arch supports
39 #define L1_CACHE_SHIFT_MAX 5
41 #if defined(CONFIG_SMP) && \
42 !defined(CONFIG_BFIN_CACHE_COHERENT)
43 # if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
44 # define __ARCH_SYNC_CORE_ICACHE
46 # if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
47 # define __ARCH_SYNC_CORE_DCACHE
50 asmlinkage void __raw_smp_mark_barrier_asm(void);
51 asmlinkage void __raw_smp_check_barrier_asm(void);
53 static inline void smp_mark_barrier(void)
55 __raw_smp_mark_barrier_asm();
57 static inline void smp_check_barrier(void)
59 __raw_smp_check_barrier_asm();
62 void resync_core_dcache(void);
63 void resync_core_icache(void);