powerpc: Fix bad inline asm constraint in create_zero_mask()
[deliverable/linux.git] / arch / powerpc / include / asm / reg_8xx.h
CommitLineData
1da177e4 1/*
26ef5c09 2 * Contains register definitions common to PowerPC 8xx CPUs. Notice
1da177e4 3 */
26ef5c09
DG
4#ifndef _ASM_POWERPC_REG_8xx_H
5#define _ASM_POWERPC_REG_8xx_H
1da177e4 6
7ee5cf6b
CL
7#include <asm/mmu-8xx.h>
8
1da177e4
LT
9/* Cache control on the MPC8xx is provided through some additional
10 * special purpose registers.
11 */
12#define SPRN_IC_CST 560 /* Instruction cache control/status */
13#define SPRN_IC_ADR 561 /* Address needed for some commands */
14#define SPRN_IC_DAT 562 /* Read-only data register */
15#define SPRN_DC_CST 568 /* Data cache control/status */
16#define SPRN_DC_ADR 569 /* Address needed for some commands */
17#define SPRN_DC_DAT 570 /* Read-only data register */
18
7ee5cf6b
CL
19/* Misc Debug */
20#define SPRN_DPDR 630
21#define SPRN_MI_CAM 816
22#define SPRN_MI_RAM0 817
23#define SPRN_MI_RAM1 818
24#define SPRN_MD_CAM 824
25#define SPRN_MD_RAM0 825
26#define SPRN_MD_RAM1 826
27
1da177e4
LT
28/* Commands. Only the first few are available to the instruction cache.
29*/
30#define IDC_ENABLE 0x02000000 /* Cache enable */
31#define IDC_DISABLE 0x04000000 /* Cache disable */
32#define IDC_LDLCK 0x06000000 /* Load and lock */
33#define IDC_UNLINE 0x08000000 /* Unlock line */
34#define IDC_UNALL 0x0a000000 /* Unlock all */
35#define IDC_INVALL 0x0c000000 /* Invalidate all */
36
37#define DC_FLINE 0x0e000000 /* Flush data cache line */
38#define DC_SFWT 0x01000000 /* Set forced writethrough mode */
39#define DC_CFWT 0x03000000 /* Clear forced writethrough mode */
40#define DC_SLES 0x05000000 /* Set little endian swap mode */
41#define DC_CLES 0x07000000 /* Clear little endian swap mode */
42
43/* Status.
44*/
45#define IDC_ENABLED 0x80000000 /* Cache is enabled */
46#define IDC_CERR1 0x00200000 /* Cache error 1 */
47#define IDC_CERR2 0x00100000 /* Cache error 2 */
48#define IDC_CERR3 0x00080000 /* Cache error 3 */
49
50#define DC_DFWT 0x40000000 /* Data cache is forced write through */
51#define DC_LES 0x20000000 /* Caches are little endian mode */
1da177e4 52
1458dd95
CL
53#ifdef CONFIG_8xx_CPU6
54#define do_mtspr_cpu6(rn, rn_addr, v) \
55 do { \
2e098dce 56 int _reg_cpu6 = rn_addr, _tmp_cpu6; \
1458dd95
CL
57 asm volatile("stw %0, %1;" \
58 "lwz %0, %1;" \
59 "mtspr " __stringify(rn) ",%2" : \
60 : "r" (_reg_cpu6), "m"(_tmp_cpu6), \
61 "r" ((unsigned long)(v)) \
62 : "memory"); \
63 } while (0)
64
65#define do_mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : \
66 : "r" ((unsigned long)(v)) \
67 : "memory")
68#define mtspr(rn, v) \
69 do { \
70 if (rn == SPRN_IMMR) \
71 do_mtspr_cpu6(rn, 0x3d30, v); \
72 else if (rn == SPRN_IC_CST) \
73 do_mtspr_cpu6(rn, 0x2110, v); \
74 else if (rn == SPRN_IC_ADR) \
75 do_mtspr_cpu6(rn, 0x2310, v); \
76 else if (rn == SPRN_IC_DAT) \
77 do_mtspr_cpu6(rn, 0x2510, v); \
78 else if (rn == SPRN_DC_CST) \
79 do_mtspr_cpu6(rn, 0x3110, v); \
80 else if (rn == SPRN_DC_ADR) \
81 do_mtspr_cpu6(rn, 0x3310, v); \
82 else if (rn == SPRN_DC_DAT) \
83 do_mtspr_cpu6(rn, 0x3510, v); \
84 else if (rn == SPRN_MI_CTR) \
85 do_mtspr_cpu6(rn, 0x2180, v); \
86 else if (rn == SPRN_MI_AP) \
87 do_mtspr_cpu6(rn, 0x2580, v); \
88 else if (rn == SPRN_MI_EPN) \
89 do_mtspr_cpu6(rn, 0x2780, v); \
90 else if (rn == SPRN_MI_TWC) \
91 do_mtspr_cpu6(rn, 0x2b80, v); \
92 else if (rn == SPRN_MI_RPN) \
93 do_mtspr_cpu6(rn, 0x2d80, v); \
94 else if (rn == SPRN_MI_CAM) \
95 do_mtspr_cpu6(rn, 0x2190, v); \
96 else if (rn == SPRN_MI_RAM0) \
97 do_mtspr_cpu6(rn, 0x2390, v); \
98 else if (rn == SPRN_MI_RAM1) \
99 do_mtspr_cpu6(rn, 0x2590, v); \
100 else if (rn == SPRN_MD_CTR) \
101 do_mtspr_cpu6(rn, 0x3180, v); \
102 else if (rn == SPRN_M_CASID) \
103 do_mtspr_cpu6(rn, 0x3380, v); \
104 else if (rn == SPRN_MD_AP) \
105 do_mtspr_cpu6(rn, 0x3580, v); \
106 else if (rn == SPRN_MD_EPN) \
107 do_mtspr_cpu6(rn, 0x3780, v); \
108 else if (rn == SPRN_M_TWB) \
109 do_mtspr_cpu6(rn, 0x3980, v); \
110 else if (rn == SPRN_MD_TWC) \
111 do_mtspr_cpu6(rn, 0x3b80, v); \
112 else if (rn == SPRN_MD_RPN) \
113 do_mtspr_cpu6(rn, 0x3d80, v); \
114 else if (rn == SPRN_M_TW) \
115 do_mtspr_cpu6(rn, 0x3f80, v); \
116 else if (rn == SPRN_MD_CAM) \
117 do_mtspr_cpu6(rn, 0x3190, v); \
118 else if (rn == SPRN_MD_RAM0) \
119 do_mtspr_cpu6(rn, 0x3390, v); \
120 else if (rn == SPRN_MD_RAM1) \
121 do_mtspr_cpu6(rn, 0x3590, v); \
122 else if (rn == SPRN_DEC) \
123 do_mtspr_cpu6(rn, 0x2c00, v); \
124 else if (rn == SPRN_TBWL) \
125 do_mtspr_cpu6(rn, 0x3880, v); \
126 else if (rn == SPRN_TBWU) \
127 do_mtspr_cpu6(rn, 0x3a80, v); \
128 else if (rn == SPRN_DPDR) \
129 do_mtspr_cpu6(rn, 0x2d30, v); \
130 else \
131 do_mtspr(rn, v); \
132 } while (0)
133#endif
134
26ef5c09 135#endif /* _ASM_POWERPC_REG_8xx_H */
This page took 0.82072 seconds and 5 git commands to generate.