Commit | Line | Data |
---|---|---|
1394f032 | 1 | /* |
14b03204 | 2 | * Copyright 2004-2008 Analog Devices Inc. |
1394f032 | 3 | * |
14b03204 | 4 | * Licensed under the GPL-2 or later. |
1394f032 BW |
5 | */ |
6 | ||
7 | #include <linux/linkage.h> | |
8 | #include <asm/blackfin.h> | |
639f6571 | 9 | #include <mach/irq.h> |
1efc80b5 | 10 | #include <asm/dpmc.h> |
1394f032 BW |
11 | |
12 | .section .l1.text | |
1394f032 | 13 | ENTRY(_sleep_mode) |
da31d6fb MF |
14 | [--SP] = (R7:4, P5:3); |
15 | [--SP] = RETS; | |
1394f032 BW |
16 | |
17 | call _set_sic_iwr; | |
18 | ||
1394f032 BW |
19 | P0.H = hi(PLL_CTL); |
20 | P0.L = lo(PLL_CTL); | |
21 | R1 = W[P0](z); | |
22 | BITSET (R1, 3); | |
23 | W[P0] = R1.L; | |
24 | ||
25 | CLI R2; | |
26 | SSYNC; | |
27 | IDLE; | |
28 | STI R2; | |
29 | ||
30 | call _test_pll_locked; | |
31 | ||
32 | R0 = IWR_ENABLE(0); | |
cfefe3c6 MH |
33 | R1 = IWR_DISABLE_ALL; |
34 | R2 = IWR_DISABLE_ALL; | |
35 | ||
1394f032 BW |
36 | call _set_sic_iwr; |
37 | ||
38 | P0.H = hi(PLL_CTL); | |
39 | P0.L = lo(PLL_CTL); | |
40 | R7 = w[p0](z); | |
41 | BITCLR (R7, 3); | |
42 | BITCLR (R7, 5); | |
43 | w[p0] = R7.L; | |
44 | IDLE; | |
72b099ed SZ |
45 | |
46 | bfin_init_pm_bench_cycles; | |
47 | ||
1394f032 BW |
48 | call _test_pll_locked; |
49 | ||
50 | RETS = [SP++]; | |
da31d6fb | 51 | (R7:4, P5:3) = [SP++]; |
1394f032 | 52 | RTS; |
1a8caeeb | 53 | ENDPROC(_sleep_mode) |
1394f032 | 54 | |
eed7b836 MF |
55 | /* |
56 | * This func never returns as it puts the part into hibernate, and | |
57 | * is only called from do_hibernate, so we don't bother saving or | |
58 | * restoring any of the normal C runtime state. When we wake up, | |
59 | * the entry point will be in do_hibernate and not here. | |
60 | * | |
61 | * We accept just one argument -- the value to write to VR_CTL. | |
62 | */ | |
93f89519 | 63 | |
1394f032 | 64 | ENTRY(_hibernate_mode) |
eed7b836 MF |
65 | /* Save/setup the regs we need early for minor pipeline optimization */ |
66 | R4 = R0; | |
93f89519 | 67 | |
eed7b836 MF |
68 | P3.H = hi(VR_CTL); |
69 | P3.L = lo(VR_CTL); | |
eed7b836 | 70 | /* Disable all wakeup sources */ |
1efc80b5 MH |
71 | R0 = IWR_DISABLE_ALL; |
72 | R1 = IWR_DISABLE_ALL; | |
73 | R2 = IWR_DISABLE_ALL; | |
1394f032 | 74 | call _set_sic_iwr; |
1efc80b5 MH |
75 | call _set_dram_srfs; |
76 | SSYNC; | |
1394f032 | 77 | |
eed7b836 MF |
78 | /* Finally, we climb into our cave to hibernate */ |
79 | W[P3] = R4.L; | |
72b099ed SZ |
80 | |
81 | bfin_init_pm_bench_cycles; | |
82 | ||
1394f032 BW |
83 | CLI R2; |
84 | IDLE; | |
1efc80b5 MH |
85 | .Lforever: |
86 | jump .Lforever; | |
1a8caeeb | 87 | ENDPROC(_hibernate_mode) |
1394f032 | 88 | |
1394f032 | 89 | ENTRY(_sleep_deeper) |
da31d6fb MF |
90 | [--SP] = (R7:4, P5:3); |
91 | [--SP] = RETS; | |
1394f032 BW |
92 | |
93 | CLI R4; | |
94 | ||
95 | P3 = R0; | |
cfefe3c6 MH |
96 | P4 = R1; |
97 | P5 = R2; | |
98 | ||
1394f032 | 99 | R0 = IWR_ENABLE(0); |
cfefe3c6 MH |
100 | R1 = IWR_DISABLE_ALL; |
101 | R2 = IWR_DISABLE_ALL; | |
102 | ||
1394f032 | 103 | call _set_sic_iwr; |
4521ef42 | 104 | call _set_dram_srfs; /* Set SDRAM Self Refresh */ |
1394f032 | 105 | |
1394f032 BW |
106 | P0.H = hi(PLL_DIV); |
107 | P0.L = lo(PLL_DIV); | |
108 | R6 = W[P0](z); | |
109 | R0.L = 0xF; | |
4521ef42 | 110 | W[P0] = R0.l; /* Set Max VCO to SCLK divider */ |
1394f032 BW |
111 | |
112 | P0.H = hi(PLL_CTL); | |
113 | P0.L = lo(PLL_CTL); | |
114 | R5 = W[P0](z); | |
f16295e7 | 115 | R0.L = (CONFIG_MIN_VCO_HZ/CONFIG_CLKIN_HZ) << 9; |
4521ef42 | 116 | W[P0] = R0.l; /* Set Min CLKIN to VCO multiplier */ |
1394f032 BW |
117 | |
118 | SSYNC; | |
119 | IDLE; | |
120 | ||
121 | call _test_pll_locked; | |
122 | ||
123 | P0.H = hi(VR_CTL); | |
124 | P0.L = lo(VR_CTL); | |
125 | R7 = W[P0](z); | |
126 | R1 = 0x6; | |
127 | R1 <<= 16; | |
128 | R2 = 0x0404(Z); | |
129 | R1 = R1|R2; | |
130 | ||
131 | R2 = DEPOSIT(R7, R1); | |
4521ef42 | 132 | W[P0] = R2; /* Set Min Core Voltage */ |
1394f032 BW |
133 | |
134 | SSYNC; | |
135 | IDLE; | |
136 | ||
137 | call _test_pll_locked; | |
138 | ||
4521ef42 | 139 | R0 = P3; |
cfefe3c6 MH |
140 | R1 = P4; |
141 | R3 = P5; | |
4521ef42 MH |
142 | call _set_sic_iwr; /* Set Awake from IDLE */ |
143 | ||
1394f032 BW |
144 | P0.H = hi(PLL_CTL); |
145 | P0.L = lo(PLL_CTL); | |
146 | R0 = W[P0](z); | |
147 | BITSET (R0, 3); | |
4521ef42 | 148 | W[P0] = R0.L; /* Turn CCLK OFF */ |
1394f032 BW |
149 | SSYNC; |
150 | IDLE; | |
151 | ||
152 | call _test_pll_locked; | |
153 | ||
154 | R0 = IWR_ENABLE(0); | |
cfefe3c6 MH |
155 | R1 = IWR_DISABLE_ALL; |
156 | R2 = IWR_DISABLE_ALL; | |
157 | ||
4521ef42 | 158 | call _set_sic_iwr; /* Set Awake from IDLE PLL */ |
1394f032 BW |
159 | |
160 | P0.H = hi(VR_CTL); | |
161 | P0.L = lo(VR_CTL); | |
162 | W[P0]= R7; | |
163 | ||
164 | SSYNC; | |
165 | IDLE; | |
166 | ||
72b099ed SZ |
167 | bfin_init_pm_bench_cycles; |
168 | ||
1394f032 BW |
169 | call _test_pll_locked; |
170 | ||
171 | P0.H = hi(PLL_DIV); | |
172 | P0.L = lo(PLL_DIV); | |
4521ef42 | 173 | W[P0]= R6; /* Restore CCLK and SCLK divider */ |
1394f032 BW |
174 | |
175 | P0.H = hi(PLL_CTL); | |
176 | P0.L = lo(PLL_CTL); | |
4521ef42 | 177 | w[p0] = R5; /* Restore VCO multiplier */ |
1394f032 BW |
178 | IDLE; |
179 | call _test_pll_locked; | |
180 | ||
4521ef42 | 181 | call _unset_dram_srfs; /* SDRAM Self Refresh Off */ |
1394f032 BW |
182 | |
183 | STI R4; | |
184 | ||
185 | RETS = [SP++]; | |
da31d6fb | 186 | (R7:4, P5:3) = [SP++]; |
1394f032 | 187 | RTS; |
1a8caeeb | 188 | ENDPROC(_sleep_deeper) |
1efc80b5 | 189 | |
fb5f0049 SZ |
190 | ENTRY(_set_dram_srfs) |
191 | /* set the dram to self refresh mode */ | |
1efc80b5 MH |
192 | SSYNC; |
193 | #if defined(EBIU_RSTCTL) /* DDR */ | |
fb5f0049 SZ |
194 | P0.H = hi(EBIU_RSTCTL); |
195 | P0.L = lo(EBIU_RSTCTL); | |
196 | R2 = [P0]; | |
1efc80b5 MH |
197 | BITSET(R2, 3); /* SRREQ enter self-refresh mode */ |
198 | [P0] = R2; | |
199 | SSYNC; | |
200 | 1: | |
201 | R2 = [P0]; | |
202 | CC = BITTST(R2, 4); | |
203 | if !CC JUMP 1b; | |
204 | #else /* SDRAM */ | |
1394f032 | 205 | P0.L = lo(EBIU_SDGCTL); |
1efc80b5 | 206 | P0.H = hi(EBIU_SDGCTL); |
9e770f77 MF |
207 | P1.L = lo(EBIU_SDSTAT); |
208 | P1.H = hi(EBIU_SDSTAT); | |
209 | ||
1394f032 | 210 | R2 = [P0]; |
1efc80b5 | 211 | BITSET(R2, 24); /* SRFS enter self-refresh mode */ |
1394f032 | 212 | [P0] = R2; |
1efc80b5 MH |
213 | SSYNC; |
214 | ||
1efc80b5 | 215 | 1: |
9e770f77 | 216 | R2 = w[P1]; |
1efc80b5 MH |
217 | SSYNC; |
218 | cc = BITTST(R2, 1); /* SDSRA poll self-refresh status */ | |
219 | if !cc jump 1b; | |
220 | ||
fb5f0049 | 221 | R2 = [P0]; |
1efc80b5 MH |
222 | BITCLR(R2, 0); /* SCTLE disable CLKOUT */ |
223 | [P0] = R2; | |
fb5f0049 | 224 | #endif |
1394f032 | 225 | RTS; |
1a8caeeb | 226 | ENDPROC(_set_dram_srfs) |
1efc80b5 | 227 | |
fb5f0049 SZ |
228 | ENTRY(_unset_dram_srfs) |
229 | /* set the dram out of self refresh mode */ | |
9e770f77 | 230 | |
1efc80b5 | 231 | #if defined(EBIU_RSTCTL) /* DDR */ |
fb5f0049 SZ |
232 | P0.H = hi(EBIU_RSTCTL); |
233 | P0.L = lo(EBIU_RSTCTL); | |
234 | R2 = [P0]; | |
1efc80b5 MH |
235 | BITCLR(R2, 3); /* clear SRREQ bit */ |
236 | [P0] = R2; | |
237 | #elif defined(EBIU_SDGCTL) /* SDRAM */ | |
9e770f77 MF |
238 | /* release CLKOUT from self-refresh */ |
239 | P0.L = lo(EBIU_SDGCTL); | |
1394f032 | 240 | P0.H = hi(EBIU_SDGCTL); |
9e770f77 | 241 | |
1394f032 | 242 | R2 = [P0]; |
1efc80b5 MH |
243 | BITSET(R2, 0); /* SCTLE enable CLKOUT */ |
244 | [P0] = R2 | |
245 | SSYNC; | |
246 | ||
9e770f77 | 247 | /* release SDRAM from self-refresh */ |
1efc80b5 MH |
248 | R2 = [P0]; |
249 | BITCLR(R2, 24); /* clear SRFS bit */ | |
250 | [P0] = R2 | |
fb5f0049 | 251 | #endif |
9e770f77 | 252 | |
1efc80b5 | 253 | SSYNC; |
1394f032 | 254 | RTS; |
1a8caeeb | 255 | ENDPROC(_unset_dram_srfs) |
1394f032 BW |
256 | |
257 | ENTRY(_set_sic_iwr) | |
85c2737a | 258 | #ifdef SIC_IWR0 |
4705a25c MF |
259 | P0.H = hi(SYSMMR_BASE); |
260 | P0.L = lo(SYSMMR_BASE); | |
261 | [P0 + (SIC_IWR0 - SYSMMR_BASE)] = R0; | |
262 | [P0 + (SIC_IWR1 - SYSMMR_BASE)] = R1; | |
85c2737a | 263 | # ifdef SIC_IWR2 |
4705a25c | 264 | [P0 + (SIC_IWR2 - SYSMMR_BASE)] = R2; |
85c2737a | 265 | # endif |
fb5f0049 | 266 | #else |
1394f032 BW |
267 | P0.H = hi(SIC_IWR); |
268 | P0.L = lo(SIC_IWR); | |
269 | [P0] = R0; | |
4705a25c | 270 | #endif |
cfefe3c6 | 271 | |
1394f032 BW |
272 | SSYNC; |
273 | RTS; | |
1a8caeeb | 274 | ENDPROC(_set_sic_iwr) |
1394f032 | 275 | |
1394f032 BW |
276 | ENTRY(_test_pll_locked) |
277 | P0.H = hi(PLL_STAT); | |
278 | P0.L = lo(PLL_STAT); | |
279 | 1: | |
280 | R0 = W[P0] (Z); | |
281 | CC = BITTST(R0,5); | |
282 | IF !CC JUMP 1b; | |
283 | RTS; | |
1a8caeeb | 284 | ENDPROC(_test_pll_locked) |
1efc80b5 MH |
285 | |
286 | .section .text | |
1efc80b5 | 287 | ENTRY(_do_hibernate) |
93f89519 SM |
288 | bfin_cpu_reg_save; |
289 | bfin_sys_mmr_save; | |
290 | bfin_core_mmr_save; | |
eed7b836 MF |
291 | |
292 | /* Setup args to hibernate mode early for pipeline optimization */ | |
293 | R0 = M3; | |
294 | P1.H = _hibernate_mode; | |
295 | P1.L = _hibernate_mode; | |
1efc80b5 MH |
296 | |
297 | /* Save Magic, return address and Stack Pointer */ | |
eed7b836 MF |
298 | P0 = 0; |
299 | R1.H = 0xDEAD; /* Hibernate Magic */ | |
300 | R1.L = 0xBEEF; | |
301 | R2.H = .Lpm_resume_here; | |
302 | R2.L = .Lpm_resume_here; | |
303 | [P0++] = R1; /* Store Hibernate Magic */ | |
304 | [P0++] = R2; /* Save Return Address */ | |
1efc80b5 | 305 | [P0++] = SP; /* Save Stack Pointer */ |
eed7b836 MF |
306 | |
307 | /* Must use an indirect call as we need to jump to L1 */ | |
308 | call (P1); /* Goodbye */ | |
1efc80b5 | 309 | |
1a8caeeb | 310 | .Lpm_resume_here: |
1efc80b5 | 311 | |
93f89519 SM |
312 | bfin_core_mmr_restore; |
313 | bfin_sys_mmr_restore; | |
314 | bfin_cpu_reg_restore; | |
1efc80b5 MH |
315 | |
316 | [--sp] = RETI; /* Clear Global Interrupt Disable */ | |
317 | SP += 4; | |
318 | ||
1efc80b5 | 319 | RTS; |
1a8caeeb | 320 | ENDPROC(_do_hibernate) |