blob: ae16fef7a4d63ad65a3a8b259b1340f1d45fc559 [file] [log] [blame]
David Gibson26ef5c02005-11-10 11:50:16 +11001/*
2 * Contains register definitions common to PowerPC 8xx CPUs. Notice
3 */
4#ifndef _ASM_POWERPC_REG_8xx_H
5#define _ASM_POWERPC_REG_8xx_H
6
Christophe Leroy4b9142862016-12-07 08:47:28 +01007#include <asm/mmu.h>
Christophe Leroy7ee5cf62016-02-09 17:08:12 +01008
David Gibson26ef5c02005-11-10 11:50:16 +11009/* Cache control on the MPC8xx is provided through some additional
10 * special purpose registers.
11 */
12#define SPRN_IC_CST 560 /* Instruction cache control/status */
13#define SPRN_IC_ADR 561 /* Address needed for some commands */
14#define SPRN_IC_DAT 562 /* Read-only data register */
15#define SPRN_DC_CST 568 /* Data cache control/status */
16#define SPRN_DC_ADR 569 /* Address needed for some commands */
17#define SPRN_DC_DAT 570 /* Read-only data register */
18
Christophe Leroy7ee5cf62016-02-09 17:08:12 +010019/* Misc Debug */
20#define SPRN_DPDR 630
21#define SPRN_MI_CAM 816
22#define SPRN_MI_RAM0 817
23#define SPRN_MI_RAM1 818
24#define SPRN_MD_CAM 824
25#define SPRN_MD_RAM0 825
26#define SPRN_MD_RAM1 826
27
Christophe Leroy834e5a62016-08-23 15:58:56 +020028/* Special MSR manipulation registers */
29#define SPRN_EIE 80 /* External interrupt enable (EE=1, RI=1) */
30#define SPRN_EID 81 /* External interrupt disable (EE=0, RI=1) */
Christophe Leroy75b82472016-12-15 13:42:18 +010031#define SPRN_NRI 82 /* Non recoverable interrupt (EE=0, RI=0) */
Christophe Leroy834e5a62016-08-23 15:58:56 +020032
Christophe Leroy4ad86222016-11-29 09:52:15 +010033/* Debug registers */
Christophe Leroy75b82472016-12-15 13:42:18 +010034#define SPRN_CMPA 144
35#define SPRN_COUNTA 150
Christophe Leroy4ad86222016-11-29 09:52:15 +010036#define SPRN_CMPE 152
37#define SPRN_CMPF 153
38#define SPRN_LCTRL1 156
39#define SPRN_LCTRL2 157
Christophe Leroy75b82472016-12-15 13:42:18 +010040#define SPRN_ICTRL 158
Christophe Leroy4ad86222016-11-29 09:52:15 +010041#define SPRN_BAR 159
42
David Gibson26ef5c02005-11-10 11:50:16 +110043/* Commands. Only the first few are available to the instruction cache.
44*/
45#define IDC_ENABLE 0x02000000 /* Cache enable */
46#define IDC_DISABLE 0x04000000 /* Cache disable */
47#define IDC_LDLCK 0x06000000 /* Load and lock */
48#define IDC_UNLINE 0x08000000 /* Unlock line */
49#define IDC_UNALL 0x0a000000 /* Unlock all */
50#define IDC_INVALL 0x0c000000 /* Invalidate all */
51
52#define DC_FLINE 0x0e000000 /* Flush data cache line */
53#define DC_SFWT 0x01000000 /* Set forced writethrough mode */
54#define DC_CFWT 0x03000000 /* Clear forced writethrough mode */
55#define DC_SLES 0x05000000 /* Set little endian swap mode */
56#define DC_CLES 0x07000000 /* Clear little endian swap mode */
57
58/* Status.
59*/
60#define IDC_ENABLED 0x80000000 /* Cache is enabled */
61#define IDC_CERR1 0x00200000 /* Cache error 1 */
62#define IDC_CERR2 0x00100000 /* Cache error 2 */
63#define IDC_CERR3 0x00080000 /* Cache error 3 */
64
65#define DC_DFWT 0x40000000 /* Data cache is forced write through */
66#define DC_LES 0x20000000 /* Caches are little endian mode */
67
Christophe Leroy1458dd92016-02-09 17:08:14 +010068#ifdef CONFIG_8xx_CPU6
69#define do_mtspr_cpu6(rn, rn_addr, v) \
70 do { \
Christophe Leroy2e098dc2016-03-15 14:07:49 +010071 int _reg_cpu6 = rn_addr, _tmp_cpu6; \
Christophe Leroy1458dd92016-02-09 17:08:14 +010072 asm volatile("stw %0, %1;" \
73 "lwz %0, %1;" \
74 "mtspr " __stringify(rn) ",%2" : \
75 : "r" (_reg_cpu6), "m"(_tmp_cpu6), \
76 "r" ((unsigned long)(v)) \
77 : "memory"); \
78 } while (0)
79
80#define do_mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : \
81 : "r" ((unsigned long)(v)) \
82 : "memory")
83#define mtspr(rn, v) \
84 do { \
85 if (rn == SPRN_IMMR) \
86 do_mtspr_cpu6(rn, 0x3d30, v); \
87 else if (rn == SPRN_IC_CST) \
88 do_mtspr_cpu6(rn, 0x2110, v); \
89 else if (rn == SPRN_IC_ADR) \
90 do_mtspr_cpu6(rn, 0x2310, v); \
91 else if (rn == SPRN_IC_DAT) \
92 do_mtspr_cpu6(rn, 0x2510, v); \
93 else if (rn == SPRN_DC_CST) \
94 do_mtspr_cpu6(rn, 0x3110, v); \
95 else if (rn == SPRN_DC_ADR) \
96 do_mtspr_cpu6(rn, 0x3310, v); \
97 else if (rn == SPRN_DC_DAT) \
98 do_mtspr_cpu6(rn, 0x3510, v); \
99 else if (rn == SPRN_MI_CTR) \
100 do_mtspr_cpu6(rn, 0x2180, v); \
101 else if (rn == SPRN_MI_AP) \
102 do_mtspr_cpu6(rn, 0x2580, v); \
103 else if (rn == SPRN_MI_EPN) \
104 do_mtspr_cpu6(rn, 0x2780, v); \
105 else if (rn == SPRN_MI_TWC) \
106 do_mtspr_cpu6(rn, 0x2b80, v); \
107 else if (rn == SPRN_MI_RPN) \
108 do_mtspr_cpu6(rn, 0x2d80, v); \
109 else if (rn == SPRN_MI_CAM) \
110 do_mtspr_cpu6(rn, 0x2190, v); \
111 else if (rn == SPRN_MI_RAM0) \
112 do_mtspr_cpu6(rn, 0x2390, v); \
113 else if (rn == SPRN_MI_RAM1) \
114 do_mtspr_cpu6(rn, 0x2590, v); \
115 else if (rn == SPRN_MD_CTR) \
116 do_mtspr_cpu6(rn, 0x3180, v); \
117 else if (rn == SPRN_M_CASID) \
118 do_mtspr_cpu6(rn, 0x3380, v); \
119 else if (rn == SPRN_MD_AP) \
120 do_mtspr_cpu6(rn, 0x3580, v); \
121 else if (rn == SPRN_MD_EPN) \
122 do_mtspr_cpu6(rn, 0x3780, v); \
123 else if (rn == SPRN_M_TWB) \
124 do_mtspr_cpu6(rn, 0x3980, v); \
125 else if (rn == SPRN_MD_TWC) \
126 do_mtspr_cpu6(rn, 0x3b80, v); \
127 else if (rn == SPRN_MD_RPN) \
128 do_mtspr_cpu6(rn, 0x3d80, v); \
129 else if (rn == SPRN_M_TW) \
130 do_mtspr_cpu6(rn, 0x3f80, v); \
131 else if (rn == SPRN_MD_CAM) \
132 do_mtspr_cpu6(rn, 0x3190, v); \
133 else if (rn == SPRN_MD_RAM0) \
134 do_mtspr_cpu6(rn, 0x3390, v); \
135 else if (rn == SPRN_MD_RAM1) \
136 do_mtspr_cpu6(rn, 0x3590, v); \
137 else if (rn == SPRN_DEC) \
138 do_mtspr_cpu6(rn, 0x2c00, v); \
139 else if (rn == SPRN_TBWL) \
140 do_mtspr_cpu6(rn, 0x3880, v); \
141 else if (rn == SPRN_TBWU) \
142 do_mtspr_cpu6(rn, 0x3a80, v); \
143 else if (rn == SPRN_DPDR) \
144 do_mtspr_cpu6(rn, 0x2d30, v); \
145 else \
146 do_mtspr(rn, v); \
147 } while (0)
148#endif
149
David Gibson26ef5c02005-11-10 11:50:16 +1100150#endif /* _ASM_POWERPC_REG_8xx_H */