1 |
1275 |
phoenix |
/*
|
2 |
|
|
* include/asm-parisc/cache.h
|
3 |
|
|
*/
|
4 |
|
|
|
5 |
|
|
#ifndef __ARCH_PARISC_CACHE_H
|
6 |
|
|
#define __ARCH_PARISC_CACHE_H
|
7 |
|
|
|
8 |
|
|
#include <linux/config.h>
|
9 |
|
|
|
10 |
|
|
#ifndef __ASSEMBLY__
|
11 |
|
|
/*
|
12 |
|
|
* PA 2.0 processors have 64-byte cachelines; PA 1.1 processors have
|
13 |
|
|
* 32-byte cachelines. The default configuration is not for SMP anyway,
|
14 |
|
|
* so if you're building for SMP, you should select the appropriate
|
15 |
|
|
* processor type. There is a potential livelock danger when running
|
16 |
|
|
* a machine with this value set too small, but it's more probable you'll
|
17 |
|
|
* just ruin performance.
|
18 |
|
|
*/
|
19 |
|
|
#ifdef CONFIG_PA20
|
20 |
|
|
#define L1_CACHE_BYTES 64
|
21 |
|
|
#else
|
22 |
|
|
#define L1_CACHE_BYTES 32
|
23 |
|
|
#endif
|
24 |
|
|
|
25 |
|
|
#define L1_CACHE_ALIGN(x) (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
|
26 |
|
|
|
27 |
|
|
#define SMP_CACHE_BYTES L1_CACHE_BYTES
|
28 |
|
|
|
29 |
|
|
#define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES)))
|
30 |
|
|
|
31 |
|
|
extern void flush_data_cache_local(void); /* flushes local data-cache only */
|
32 |
|
|
extern void flush_instruction_cache_local(void); /* flushes local code-cache only */
|
33 |
|
|
#ifdef CONFIG_SMP
|
34 |
|
|
extern void flush_data_cache(void); /* flushes data-cache only (all processors) */
|
35 |
|
|
#else
|
36 |
|
|
#define flush_data_cache flush_data_cache_local
|
37 |
|
|
#define flush_instruction_cache flush_instruction_cache_local
|
38 |
|
|
#endif
|
39 |
|
|
|
40 |
|
|
extern void cache_init(void); /* initializes cache-flushing */
|
41 |
|
|
extern void flush_all_caches(void); /* flush everything (tlb & cache) */
|
42 |
|
|
extern int get_cache_info(char *);
|
43 |
|
|
extern void flush_user_icache_range_asm(unsigned long, unsigned long);
|
44 |
|
|
extern void flush_kernel_icache_range_asm(unsigned long, unsigned long);
|
45 |
|
|
extern void flush_user_dcache_range_asm(unsigned long, unsigned long);
|
46 |
|
|
extern void flush_kernel_dcache_range_asm(unsigned long, unsigned long);
|
47 |
|
|
extern void flush_kernel_dcache_page(void *);
|
48 |
|
|
extern void flush_kernel_icache_page(void *);
|
49 |
|
|
extern void disable_sr_hashing(void); /* turns off space register hashing */
|
50 |
|
|
extern void disable_sr_hashing_asm(int); /* low level support for above */
|
51 |
|
|
extern void free_sid(unsigned long);
|
52 |
|
|
unsigned long alloc_sid(void);
|
53 |
|
|
|
54 |
|
|
struct seq_file;
|
55 |
|
|
extern void show_cache_info(struct seq_file *m);
|
56 |
|
|
|
57 |
|
|
extern int split_tlb;
|
58 |
|
|
extern int dcache_stride;
|
59 |
|
|
extern int icache_stride;
|
60 |
|
|
extern struct pdc_cache_info cache_info;
|
61 |
|
|
|
62 |
|
|
#define pdtlb(addr) asm volatile("pdtlb 0(%%sr1,%0)" : : "r" (addr));
|
63 |
|
|
#define pitlb(addr) asm volatile("pitlb 0(%%sr1,%0)" : : "r" (addr));
|
64 |
|
|
#define pdtlb_kernel(addr) asm volatile("pdtlb 0(%0)" : : "r" (addr));
|
65 |
|
|
|
66 |
|
|
#endif /* ! __ASSEMBLY__ */
|
67 |
|
|
|
68 |
|
|
/* Classes of processor wrt: disabling space register hashing */
|
69 |
|
|
|
70 |
|
|
#define SRHASH_PCXST 0 /* pcxs, pcxt, pcxt_ */
|
71 |
|
|
#define SRHASH_PCXL 1 /* pcxl */
|
72 |
|
|
#define SRHASH_PA20 2 /* pcxu, pcxu_, pcxw, pcxw_ */
|
73 |
|
|
|
74 |
|
|
#endif
|