2e2516e1cd4a17fee791b5bcafadbfed59dc2ef5
[deliverable/linux.git] / drivers / net / wireless / ath / ath9k / hw.c
1 /*
2 * Copyright (c) 2008-2009 Atheros Communications Inc.
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17 #include <linux/io.h>
18 #include <asm/unaligned.h>
19
20 #include "hw.h"
21 #include "rc.h"
22 #include "initvals.h"
23
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
27
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
32 u32 reg, u32 value);
33 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
34 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
35
36 MODULE_AUTHOR("Atheros Communications");
37 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
38 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
39 MODULE_LICENSE("Dual BSD/GPL");
40
41 static int __init ath9k_init(void)
42 {
43 return 0;
44 }
45 module_init(ath9k_init);
46
47 static void __exit ath9k_exit(void)
48 {
49 return;
50 }
51 module_exit(ath9k_exit);
52
53 /********************/
54 /* Helper Functions */
55 /********************/
56
57 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
58 {
59 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
60
61 if (!ah->curchan) /* should really check for CCK instead */
62 return clks / ATH9K_CLOCK_RATE_CCK;
63 if (conf->channel->band == IEEE80211_BAND_2GHZ)
64 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
65
66 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
67 }
68
69 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
70 {
71 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
72
73 if (conf_is_ht40(conf))
74 return ath9k_hw_mac_usec(ah, clks) / 2;
75 else
76 return ath9k_hw_mac_usec(ah, clks);
77 }
78
79 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
80 {
81 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
82
83 if (!ah->curchan) /* should really check for CCK instead */
84 return usecs *ATH9K_CLOCK_RATE_CCK;
85 if (conf->channel->band == IEEE80211_BAND_2GHZ)
86 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
87 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
88 }
89
90 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
91 {
92 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
93
94 if (conf_is_ht40(conf))
95 return ath9k_hw_mac_clks(ah, usecs) * 2;
96 else
97 return ath9k_hw_mac_clks(ah, usecs);
98 }
99
100 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
101 {
102 int i;
103
104 BUG_ON(timeout < AH_TIME_QUANTUM);
105
106 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
107 if ((REG_READ(ah, reg) & mask) == val)
108 return true;
109
110 udelay(AH_TIME_QUANTUM);
111 }
112
113 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
114 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
115 timeout, reg, REG_READ(ah, reg), mask, val);
116
117 return false;
118 }
119 EXPORT_SYMBOL(ath9k_hw_wait);
120
121 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
122 {
123 u32 retval;
124 int i;
125
126 for (i = 0, retval = 0; i < n; i++) {
127 retval = (retval << 1) | (val & 1);
128 val >>= 1;
129 }
130 return retval;
131 }
132
133 bool ath9k_get_channel_edges(struct ath_hw *ah,
134 u16 flags, u16 *low,
135 u16 *high)
136 {
137 struct ath9k_hw_capabilities *pCap = &ah->caps;
138
139 if (flags & CHANNEL_5GHZ) {
140 *low = pCap->low_5ghz_chan;
141 *high = pCap->high_5ghz_chan;
142 return true;
143 }
144 if ((flags & CHANNEL_2GHZ)) {
145 *low = pCap->low_2ghz_chan;
146 *high = pCap->high_2ghz_chan;
147 return true;
148 }
149 return false;
150 }
151
152 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
153 const struct ath_rate_table *rates,
154 u32 frameLen, u16 rateix,
155 bool shortPreamble)
156 {
157 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
158 u32 kbps;
159
160 kbps = rates->info[rateix].ratekbps;
161
162 if (kbps == 0)
163 return 0;
164
165 switch (rates->info[rateix].phy) {
166 case WLAN_RC_PHY_CCK:
167 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
168 if (shortPreamble && rates->info[rateix].short_preamble)
169 phyTime >>= 1;
170 numBits = frameLen << 3;
171 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
172 break;
173 case WLAN_RC_PHY_OFDM:
174 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
175 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
176 numBits = OFDM_PLCP_BITS + (frameLen << 3);
177 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
178 txTime = OFDM_SIFS_TIME_QUARTER
179 + OFDM_PREAMBLE_TIME_QUARTER
180 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
181 } else if (ah->curchan &&
182 IS_CHAN_HALF_RATE(ah->curchan)) {
183 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
184 numBits = OFDM_PLCP_BITS + (frameLen << 3);
185 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
186 txTime = OFDM_SIFS_TIME_HALF +
187 OFDM_PREAMBLE_TIME_HALF
188 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
189 } else {
190 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
191 numBits = OFDM_PLCP_BITS + (frameLen << 3);
192 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
193 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
194 + (numSymbols * OFDM_SYMBOL_TIME);
195 }
196 break;
197 default:
198 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
199 "Unknown phy %u (rate ix %u)\n",
200 rates->info[rateix].phy, rateix);
201 txTime = 0;
202 break;
203 }
204
205 return txTime;
206 }
207 EXPORT_SYMBOL(ath9k_hw_computetxtime);
208
209 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
210 struct ath9k_channel *chan,
211 struct chan_centers *centers)
212 {
213 int8_t extoff;
214
215 if (!IS_CHAN_HT40(chan)) {
216 centers->ctl_center = centers->ext_center =
217 centers->synth_center = chan->channel;
218 return;
219 }
220
221 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
222 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
223 centers->synth_center =
224 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
225 extoff = 1;
226 } else {
227 centers->synth_center =
228 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
229 extoff = -1;
230 }
231
232 centers->ctl_center =
233 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
234 /* 25 MHz spacing is supported by hw but not on upper layers */
235 centers->ext_center =
236 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
237 }
238
239 /******************/
240 /* Chip Revisions */
241 /******************/
242
243 static void ath9k_hw_read_revisions(struct ath_hw *ah)
244 {
245 u32 val;
246
247 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
248
249 if (val == 0xFF) {
250 val = REG_READ(ah, AR_SREV);
251 ah->hw_version.macVersion =
252 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
253 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
254 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
255 } else {
256 if (!AR_SREV_9100(ah))
257 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
258
259 ah->hw_version.macRev = val & AR_SREV_REVISION;
260
261 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
262 ah->is_pciexpress = true;
263 }
264 }
265
266 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
267 {
268 u32 val;
269 int i;
270
271 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
272
273 for (i = 0; i < 8; i++)
274 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
275 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
276 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
277
278 return ath9k_hw_reverse_bits(val, 8);
279 }
280
281 /************************************/
282 /* HW Attach, Detach, Init Routines */
283 /************************************/
284
285 static void ath9k_hw_disablepcie(struct ath_hw *ah)
286 {
287 if (AR_SREV_9100(ah))
288 return;
289
290 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
291 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
292 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
293 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
294 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
295 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
296 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
297 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
298 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
299
300 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
301 }
302
303 static bool ath9k_hw_chip_test(struct ath_hw *ah)
304 {
305 struct ath_common *common = ath9k_hw_common(ah);
306 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
307 u32 regHold[2];
308 u32 patternData[4] = { 0x55555555,
309 0xaaaaaaaa,
310 0x66666666,
311 0x99999999 };
312 int i, j;
313
314 for (i = 0; i < 2; i++) {
315 u32 addr = regAddr[i];
316 u32 wrData, rdData;
317
318 regHold[i] = REG_READ(ah, addr);
319 for (j = 0; j < 0x100; j++) {
320 wrData = (j << 16) | j;
321 REG_WRITE(ah, addr, wrData);
322 rdData = REG_READ(ah, addr);
323 if (rdData != wrData) {
324 ath_print(common, ATH_DBG_FATAL,
325 "address test failed "
326 "addr: 0x%08x - wr:0x%08x != "
327 "rd:0x%08x\n",
328 addr, wrData, rdData);
329 return false;
330 }
331 }
332 for (j = 0; j < 4; j++) {
333 wrData = patternData[j];
334 REG_WRITE(ah, addr, wrData);
335 rdData = REG_READ(ah, addr);
336 if (wrData != rdData) {
337 ath_print(common, ATH_DBG_FATAL,
338 "address test failed "
339 "addr: 0x%08x - wr:0x%08x != "
340 "rd:0x%08x\n",
341 addr, wrData, rdData);
342 return false;
343 }
344 }
345 REG_WRITE(ah, regAddr[i], regHold[i]);
346 }
347 udelay(100);
348
349 return true;
350 }
351
352 static const char *ath9k_hw_devname(u16 devid)
353 {
354 switch (devid) {
355 case AR5416_DEVID_PCI:
356 return "Atheros 5416";
357 case AR5416_DEVID_PCIE:
358 return "Atheros 5418";
359 case AR9160_DEVID_PCI:
360 return "Atheros 9160";
361 case AR5416_AR9100_DEVID:
362 return "Atheros 9100";
363 case AR9280_DEVID_PCI:
364 case AR9280_DEVID_PCIE:
365 return "Atheros 9280";
366 case AR9285_DEVID_PCIE:
367 return "Atheros 9285";
368 case AR5416_DEVID_AR9287_PCI:
369 case AR5416_DEVID_AR9287_PCIE:
370 return "Atheros 9287";
371 }
372
373 return NULL;
374 }
375
376 static void ath9k_hw_init_config(struct ath_hw *ah)
377 {
378 int i;
379
380 ah->config.dma_beacon_response_time = 2;
381 ah->config.sw_beacon_response_time = 10;
382 ah->config.additional_swba_backoff = 0;
383 ah->config.ack_6mb = 0x0;
384 ah->config.cwm_ignore_extcca = 0;
385 ah->config.pcie_powersave_enable = 0;
386 ah->config.pcie_clock_req = 0;
387 ah->config.pcie_waen = 0;
388 ah->config.analog_shiftreg = 1;
389 ah->config.ht_enable = 1;
390 ah->config.ofdm_trig_low = 200;
391 ah->config.ofdm_trig_high = 500;
392 ah->config.cck_trig_high = 200;
393 ah->config.cck_trig_low = 100;
394 ah->config.enable_ani = 1;
395 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
396 ah->config.antenna_switch_swap = 0;
397
398 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
399 ah->config.spurchans[i][0] = AR_NO_SPUR;
400 ah->config.spurchans[i][1] = AR_NO_SPUR;
401 }
402
403 ah->config.intr_mitigation = true;
404
405 /*
406 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
407 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
408 * This means we use it for all AR5416 devices, and the few
409 * minor PCI AR9280 devices out there.
410 *
411 * Serialization is required because these devices do not handle
412 * well the case of two concurrent reads/writes due to the latency
413 * involved. During one read/write another read/write can be issued
414 * on another CPU while the previous read/write may still be working
415 * on our hardware, if we hit this case the hardware poops in a loop.
416 * We prevent this by serializing reads and writes.
417 *
418 * This issue is not present on PCI-Express devices or pre-AR5416
419 * devices (legacy, 802.11abg).
420 */
421 if (num_possible_cpus() > 1)
422 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
423 }
424 EXPORT_SYMBOL(ath9k_hw_init);
425
426 static void ath9k_hw_init_defaults(struct ath_hw *ah)
427 {
428 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
429
430 regulatory->country_code = CTRY_DEFAULT;
431 regulatory->power_limit = MAX_RATE_POWER;
432 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
433
434 ah->hw_version.magic = AR5416_MAGIC;
435 ah->hw_version.subvendorid = 0;
436
437 ah->ah_flags = 0;
438 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
439 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
440 if (!AR_SREV_9100(ah))
441 ah->ah_flags = AH_USE_EEPROM;
442
443 ah->atim_window = 0;
444 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
445 ah->beacon_interval = 100;
446 ah->enable_32kHz_clock = DONT_USE_32KHZ;
447 ah->slottime = (u32) -1;
448 ah->acktimeout = (u32) -1;
449 ah->ctstimeout = (u32) -1;
450 ah->globaltxtimeout = (u32) -1;
451
452 ah->gbeacon_rate = 0;
453
454 ah->power_mode = ATH9K_PM_UNDEFINED;
455 }
456
457 static int ath9k_hw_rf_claim(struct ath_hw *ah)
458 {
459 u32 val;
460
461 REG_WRITE(ah, AR_PHY(0), 0x00000007);
462
463 val = ath9k_hw_get_radiorev(ah);
464 switch (val & AR_RADIO_SREV_MAJOR) {
465 case 0:
466 val = AR_RAD5133_SREV_MAJOR;
467 break;
468 case AR_RAD5133_SREV_MAJOR:
469 case AR_RAD5122_SREV_MAJOR:
470 case AR_RAD2133_SREV_MAJOR:
471 case AR_RAD2122_SREV_MAJOR:
472 break;
473 default:
474 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
475 "Radio Chip Rev 0x%02X not supported\n",
476 val & AR_RADIO_SREV_MAJOR);
477 return -EOPNOTSUPP;
478 }
479
480 ah->hw_version.analog5GhzRev = val;
481
482 return 0;
483 }
484
485 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
486 {
487 struct ath_common *common = ath9k_hw_common(ah);
488 u32 sum;
489 int i;
490 u16 eeval;
491
492 sum = 0;
493 for (i = 0; i < 3; i++) {
494 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
495 sum += eeval;
496 common->macaddr[2 * i] = eeval >> 8;
497 common->macaddr[2 * i + 1] = eeval & 0xff;
498 }
499 if (sum == 0 || sum == 0xffff * 3)
500 return -EADDRNOTAVAIL;
501
502 return 0;
503 }
504
505 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
506 {
507 u32 rxgain_type;
508
509 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
510 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
511
512 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
513 INIT_INI_ARRAY(&ah->iniModesRxGain,
514 ar9280Modes_backoff_13db_rxgain_9280_2,
515 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
516 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
517 INIT_INI_ARRAY(&ah->iniModesRxGain,
518 ar9280Modes_backoff_23db_rxgain_9280_2,
519 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
520 else
521 INIT_INI_ARRAY(&ah->iniModesRxGain,
522 ar9280Modes_original_rxgain_9280_2,
523 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
524 } else {
525 INIT_INI_ARRAY(&ah->iniModesRxGain,
526 ar9280Modes_original_rxgain_9280_2,
527 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
528 }
529 }
530
531 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
532 {
533 u32 txgain_type;
534
535 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
536 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
537
538 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
539 INIT_INI_ARRAY(&ah->iniModesTxGain,
540 ar9280Modes_high_power_tx_gain_9280_2,
541 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
542 else
543 INIT_INI_ARRAY(&ah->iniModesTxGain,
544 ar9280Modes_original_tx_gain_9280_2,
545 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
546 } else {
547 INIT_INI_ARRAY(&ah->iniModesTxGain,
548 ar9280Modes_original_tx_gain_9280_2,
549 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
550 }
551 }
552
553 static int ath9k_hw_post_init(struct ath_hw *ah)
554 {
555 int ecode;
556
557 if (!ath9k_hw_chip_test(ah))
558 return -ENODEV;
559
560 ecode = ath9k_hw_rf_claim(ah);
561 if (ecode != 0)
562 return ecode;
563
564 ecode = ath9k_hw_eeprom_init(ah);
565 if (ecode != 0)
566 return ecode;
567
568 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
569 "Eeprom VER: %d, REV: %d\n",
570 ah->eep_ops->get_eeprom_ver(ah),
571 ah->eep_ops->get_eeprom_rev(ah));
572
573 if (!AR_SREV_9280_10_OR_LATER(ah)) {
574 ecode = ath9k_hw_rf_alloc_ext_banks(ah);
575 if (ecode) {
576 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
577 "Failed allocating banks for "
578 "external radio\n");
579 return ecode;
580 }
581 }
582
583 if (!AR_SREV_9100(ah)) {
584 ath9k_hw_ani_setup(ah);
585 ath9k_hw_ani_init(ah);
586 }
587
588 return 0;
589 }
590
591 static bool ath9k_hw_devid_supported(u16 devid)
592 {
593 switch (devid) {
594 case AR5416_DEVID_PCI:
595 case AR5416_DEVID_PCIE:
596 case AR5416_AR9100_DEVID:
597 case AR9160_DEVID_PCI:
598 case AR9280_DEVID_PCI:
599 case AR9280_DEVID_PCIE:
600 case AR9285_DEVID_PCIE:
601 case AR5416_DEVID_AR9287_PCI:
602 case AR5416_DEVID_AR9287_PCIE:
603 case AR9271_USB:
604 return true;
605 default:
606 break;
607 }
608 return false;
609 }
610
611 static bool ath9k_hw_macversion_supported(u32 macversion)
612 {
613 switch (macversion) {
614 case AR_SREV_VERSION_5416_PCI:
615 case AR_SREV_VERSION_5416_PCIE:
616 case AR_SREV_VERSION_9160:
617 case AR_SREV_VERSION_9100:
618 case AR_SREV_VERSION_9280:
619 case AR_SREV_VERSION_9285:
620 case AR_SREV_VERSION_9287:
621 case AR_SREV_VERSION_9271:
622 return true;
623 default:
624 break;
625 }
626 return false;
627 }
628
629 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
630 {
631 if (AR_SREV_9160_10_OR_LATER(ah)) {
632 if (AR_SREV_9280_10_OR_LATER(ah)) {
633 ah->iq_caldata.calData = &iq_cal_single_sample;
634 ah->adcgain_caldata.calData =
635 &adc_gain_cal_single_sample;
636 ah->adcdc_caldata.calData =
637 &adc_dc_cal_single_sample;
638 ah->adcdc_calinitdata.calData =
639 &adc_init_dc_cal;
640 } else {
641 ah->iq_caldata.calData = &iq_cal_multi_sample;
642 ah->adcgain_caldata.calData =
643 &adc_gain_cal_multi_sample;
644 ah->adcdc_caldata.calData =
645 &adc_dc_cal_multi_sample;
646 ah->adcdc_calinitdata.calData =
647 &adc_init_dc_cal;
648 }
649 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
650 }
651 }
652
653 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
654 {
655 if (AR_SREV_9271(ah)) {
656 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271,
657 ARRAY_SIZE(ar9271Modes_9271), 6);
658 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271,
659 ARRAY_SIZE(ar9271Common_9271), 2);
660 INIT_INI_ARRAY(&ah->iniModes_9271_1_0_only,
661 ar9271Modes_9271_1_0_only,
662 ARRAY_SIZE(ar9271Modes_9271_1_0_only), 6);
663 return;
664 }
665
666 if (AR_SREV_9287_11_OR_LATER(ah)) {
667 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
668 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
669 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
670 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
671 if (ah->config.pcie_clock_req)
672 INIT_INI_ARRAY(&ah->iniPcieSerdes,
673 ar9287PciePhy_clkreq_off_L1_9287_1_1,
674 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
675 else
676 INIT_INI_ARRAY(&ah->iniPcieSerdes,
677 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
678 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
679 2);
680 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
681 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
682 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
683 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
684 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
685
686 if (ah->config.pcie_clock_req)
687 INIT_INI_ARRAY(&ah->iniPcieSerdes,
688 ar9287PciePhy_clkreq_off_L1_9287_1_0,
689 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
690 else
691 INIT_INI_ARRAY(&ah->iniPcieSerdes,
692 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
693 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
694 2);
695 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
696
697
698 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
699 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
700 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
701 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
702
703 if (ah->config.pcie_clock_req) {
704 INIT_INI_ARRAY(&ah->iniPcieSerdes,
705 ar9285PciePhy_clkreq_off_L1_9285_1_2,
706 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
707 } else {
708 INIT_INI_ARRAY(&ah->iniPcieSerdes,
709 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
710 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
711 2);
712 }
713 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
714 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
715 ARRAY_SIZE(ar9285Modes_9285), 6);
716 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
717 ARRAY_SIZE(ar9285Common_9285), 2);
718
719 if (ah->config.pcie_clock_req) {
720 INIT_INI_ARRAY(&ah->iniPcieSerdes,
721 ar9285PciePhy_clkreq_off_L1_9285,
722 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
723 } else {
724 INIT_INI_ARRAY(&ah->iniPcieSerdes,
725 ar9285PciePhy_clkreq_always_on_L1_9285,
726 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
727 }
728 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
729 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
730 ARRAY_SIZE(ar9280Modes_9280_2), 6);
731 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
732 ARRAY_SIZE(ar9280Common_9280_2), 2);
733
734 if (ah->config.pcie_clock_req) {
735 INIT_INI_ARRAY(&ah->iniPcieSerdes,
736 ar9280PciePhy_clkreq_off_L1_9280,
737 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
738 } else {
739 INIT_INI_ARRAY(&ah->iniPcieSerdes,
740 ar9280PciePhy_clkreq_always_on_L1_9280,
741 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
742 }
743 INIT_INI_ARRAY(&ah->iniModesAdditional,
744 ar9280Modes_fast_clock_9280_2,
745 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
746 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
747 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
748 ARRAY_SIZE(ar9280Modes_9280), 6);
749 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
750 ARRAY_SIZE(ar9280Common_9280), 2);
751 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
752 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
753 ARRAY_SIZE(ar5416Modes_9160), 6);
754 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
755 ARRAY_SIZE(ar5416Common_9160), 2);
756 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
757 ARRAY_SIZE(ar5416Bank0_9160), 2);
758 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
759 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
760 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
761 ARRAY_SIZE(ar5416Bank1_9160), 2);
762 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
763 ARRAY_SIZE(ar5416Bank2_9160), 2);
764 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
765 ARRAY_SIZE(ar5416Bank3_9160), 3);
766 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
767 ARRAY_SIZE(ar5416Bank6_9160), 3);
768 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
769 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
770 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
771 ARRAY_SIZE(ar5416Bank7_9160), 2);
772 if (AR_SREV_9160_11(ah)) {
773 INIT_INI_ARRAY(&ah->iniAddac,
774 ar5416Addac_91601_1,
775 ARRAY_SIZE(ar5416Addac_91601_1), 2);
776 } else {
777 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
778 ARRAY_SIZE(ar5416Addac_9160), 2);
779 }
780 } else if (AR_SREV_9100_OR_LATER(ah)) {
781 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
782 ARRAY_SIZE(ar5416Modes_9100), 6);
783 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
784 ARRAY_SIZE(ar5416Common_9100), 2);
785 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
786 ARRAY_SIZE(ar5416Bank0_9100), 2);
787 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
788 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
789 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
790 ARRAY_SIZE(ar5416Bank1_9100), 2);
791 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
792 ARRAY_SIZE(ar5416Bank2_9100), 2);
793 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
794 ARRAY_SIZE(ar5416Bank3_9100), 3);
795 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
796 ARRAY_SIZE(ar5416Bank6_9100), 3);
797 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
798 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
799 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
800 ARRAY_SIZE(ar5416Bank7_9100), 2);
801 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
802 ARRAY_SIZE(ar5416Addac_9100), 2);
803 } else {
804 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
805 ARRAY_SIZE(ar5416Modes), 6);
806 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
807 ARRAY_SIZE(ar5416Common), 2);
808 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
809 ARRAY_SIZE(ar5416Bank0), 2);
810 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
811 ARRAY_SIZE(ar5416BB_RfGain), 3);
812 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
813 ARRAY_SIZE(ar5416Bank1), 2);
814 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
815 ARRAY_SIZE(ar5416Bank2), 2);
816 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
817 ARRAY_SIZE(ar5416Bank3), 3);
818 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
819 ARRAY_SIZE(ar5416Bank6), 3);
820 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
821 ARRAY_SIZE(ar5416Bank6TPC), 3);
822 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
823 ARRAY_SIZE(ar5416Bank7), 2);
824 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
825 ARRAY_SIZE(ar5416Addac), 2);
826 }
827 }
828
829 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
830 {
831 if (AR_SREV_9287_11_OR_LATER(ah))
832 INIT_INI_ARRAY(&ah->iniModesRxGain,
833 ar9287Modes_rx_gain_9287_1_1,
834 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
835 else if (AR_SREV_9287_10(ah))
836 INIT_INI_ARRAY(&ah->iniModesRxGain,
837 ar9287Modes_rx_gain_9287_1_0,
838 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
839 else if (AR_SREV_9280_20(ah))
840 ath9k_hw_init_rxgain_ini(ah);
841
842 if (AR_SREV_9287_11_OR_LATER(ah)) {
843 INIT_INI_ARRAY(&ah->iniModesTxGain,
844 ar9287Modes_tx_gain_9287_1_1,
845 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
846 } else if (AR_SREV_9287_10(ah)) {
847 INIT_INI_ARRAY(&ah->iniModesTxGain,
848 ar9287Modes_tx_gain_9287_1_0,
849 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
850 } else if (AR_SREV_9280_20(ah)) {
851 ath9k_hw_init_txgain_ini(ah);
852 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
853 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
854
855 /* txgain table */
856 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
857 INIT_INI_ARRAY(&ah->iniModesTxGain,
858 ar9285Modes_high_power_tx_gain_9285_1_2,
859 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
860 } else {
861 INIT_INI_ARRAY(&ah->iniModesTxGain,
862 ar9285Modes_original_tx_gain_9285_1_2,
863 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
864 }
865
866 }
867 }
868
869 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
870 {
871 u32 i, j;
872
873 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
874 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
875
876 /* EEPROM Fixup */
877 for (i = 0; i < ah->iniModes.ia_rows; i++) {
878 u32 reg = INI_RA(&ah->iniModes, i, 0);
879
880 for (j = 1; j < ah->iniModes.ia_columns; j++) {
881 u32 val = INI_RA(&ah->iniModes, i, j);
882
883 INI_RA(&ah->iniModes, i, j) =
884 ath9k_hw_ini_fixup(ah,
885 &ah->eeprom.def,
886 reg, val);
887 }
888 }
889 }
890 }
891
892 int ath9k_hw_init(struct ath_hw *ah)
893 {
894 struct ath_common *common = ath9k_hw_common(ah);
895 int r = 0;
896
897 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) {
898 ath_print(common, ATH_DBG_FATAL,
899 "Unsupported device ID: 0x%0x\n",
900 ah->hw_version.devid);
901 return -EOPNOTSUPP;
902 }
903
904 ath9k_hw_init_defaults(ah);
905 ath9k_hw_init_config(ah);
906
907 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
908 ath_print(common, ATH_DBG_FATAL,
909 "Couldn't reset chip\n");
910 return -EIO;
911 }
912
913 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
914 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
915 return -EIO;
916 }
917
918 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
919 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
920 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
921 ah->config.serialize_regmode =
922 SER_REG_MODE_ON;
923 } else {
924 ah->config.serialize_regmode =
925 SER_REG_MODE_OFF;
926 }
927 }
928
929 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
930 ah->config.serialize_regmode);
931
932 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
933 ath_print(common, ATH_DBG_FATAL,
934 "Mac Chip Rev 0x%02x.%x is not supported by "
935 "this driver\n", ah->hw_version.macVersion,
936 ah->hw_version.macRev);
937 return -EOPNOTSUPP;
938 }
939
940 if (AR_SREV_9100(ah)) {
941 ah->iq_caldata.calData = &iq_cal_multi_sample;
942 ah->supp_cals = IQ_MISMATCH_CAL;
943 ah->is_pciexpress = false;
944 }
945
946 if (AR_SREV_9271(ah))
947 ah->is_pciexpress = false;
948
949 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
950
951 ath9k_hw_init_cal_settings(ah);
952
953 ah->ani_function = ATH9K_ANI_ALL;
954 if (AR_SREV_9280_10_OR_LATER(ah))
955 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
956
957 ath9k_hw_init_mode_regs(ah);
958
959 if (ah->is_pciexpress)
960 ath9k_hw_configpcipowersave(ah, 0, 0);
961 else
962 ath9k_hw_disablepcie(ah);
963
964 /* Support for Japan ch.14 (2484) spread */
965 if (AR_SREV_9287_11_OR_LATER(ah)) {
966 INIT_INI_ARRAY(&ah->iniCckfirNormal,
967 ar9287Common_normal_cck_fir_coeff_92871_1,
968 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2);
969 INIT_INI_ARRAY(&ah->iniCckfirJapan2484,
970 ar9287Common_japan_2484_cck_fir_coeff_92871_1,
971 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2);
972 }
973
974 r = ath9k_hw_post_init(ah);
975 if (r)
976 return r;
977
978 ath9k_hw_init_mode_gain_regs(ah);
979 ath9k_hw_fill_cap_info(ah);
980 ath9k_hw_init_11a_eeprom_fix(ah);
981
982 r = ath9k_hw_init_macaddr(ah);
983 if (r) {
984 ath_print(common, ATH_DBG_FATAL,
985 "Failed to initialize MAC address\n");
986 return r;
987 }
988
989 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
990 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
991 else
992 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
993
994 ath9k_init_nfcal_hist_buffer(ah);
995
996 common->state = ATH_HW_INITIALIZED;
997
998 return 0;
999 }
1000
1001 static void ath9k_hw_init_bb(struct ath_hw *ah,
1002 struct ath9k_channel *chan)
1003 {
1004 u32 synthDelay;
1005
1006 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1007 if (IS_CHAN_B(chan))
1008 synthDelay = (4 * synthDelay) / 22;
1009 else
1010 synthDelay /= 10;
1011
1012 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1013
1014 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1015 }
1016
1017 static void ath9k_hw_init_qos(struct ath_hw *ah)
1018 {
1019 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1020 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1021
1022 REG_WRITE(ah, AR_QOS_NO_ACK,
1023 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1024 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1025 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1026
1027 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1028 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1029 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1030 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1031 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1032 }
1033
1034 static void ath9k_hw_change_target_baud(struct ath_hw *ah, u32 freq, u32 baud)
1035 {
1036 u32 lcr;
1037 u32 baud_divider = freq * 1000 * 1000 / 16 / baud;
1038
1039 lcr = REG_READ(ah , 0x5100c);
1040 lcr |= 0x80;
1041
1042 REG_WRITE(ah, 0x5100c, lcr);
1043 REG_WRITE(ah, 0x51004, (baud_divider >> 8));
1044 REG_WRITE(ah, 0x51000, (baud_divider & 0xff));
1045
1046 lcr &= ~0x80;
1047 REG_WRITE(ah, 0x5100c, lcr);
1048 }
1049
1050 static void ath9k_hw_init_pll(struct ath_hw *ah,
1051 struct ath9k_channel *chan)
1052 {
1053 u32 pll;
1054
1055 if (AR_SREV_9100(ah)) {
1056 if (chan && IS_CHAN_5GHZ(chan))
1057 pll = 0x1450;
1058 else
1059 pll = 0x1458;
1060 } else {
1061 if (AR_SREV_9280_10_OR_LATER(ah)) {
1062 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1063
1064 if (chan && IS_CHAN_HALF_RATE(chan))
1065 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1066 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1067 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1068
1069 if (chan && IS_CHAN_5GHZ(chan)) {
1070 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1071
1072
1073 if (AR_SREV_9280_20(ah)) {
1074 if (((chan->channel % 20) == 0)
1075 || ((chan->channel % 10) == 0))
1076 pll = 0x2850;
1077 else
1078 pll = 0x142c;
1079 }
1080 } else {
1081 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1082 }
1083
1084 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1085
1086 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1087
1088 if (chan && IS_CHAN_HALF_RATE(chan))
1089 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1090 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1091 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1092
1093 if (chan && IS_CHAN_5GHZ(chan))
1094 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1095 else
1096 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1097 } else {
1098 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1099
1100 if (chan && IS_CHAN_HALF_RATE(chan))
1101 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1102 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1103 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1104
1105 if (chan && IS_CHAN_5GHZ(chan))
1106 pll |= SM(0xa, AR_RTC_PLL_DIV);
1107 else
1108 pll |= SM(0xb, AR_RTC_PLL_DIV);
1109 }
1110 }
1111 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1112
1113 /* Switch the core clock for ar9271 to 117Mhz */
1114 if (AR_SREV_9271(ah)) {
1115 if ((pll == 0x142c) || (pll == 0x2850) ) {
1116 udelay(500);
1117 /* set CLKOBS to output AHB clock */
1118 REG_WRITE(ah, 0x7020, 0xe);
1119 /*
1120 * 0x304: 117Mhz, ahb_ratio: 1x1
1121 * 0x306: 40Mhz, ahb_ratio: 1x1
1122 */
1123 REG_WRITE(ah, 0x50040, 0x304);
1124 /*
1125 * makes adjustments for the baud dividor to keep the
1126 * targetted baud rate based on the used core clock.
1127 */
1128 ath9k_hw_change_target_baud(ah, AR9271_CORE_CLOCK,
1129 AR9271_TARGET_BAUD_RATE);
1130 }
1131 }
1132
1133 udelay(RTC_PLL_SETTLE_DELAY);
1134
1135 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1136 }
1137
1138 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1139 {
1140 int rx_chainmask, tx_chainmask;
1141
1142 rx_chainmask = ah->rxchainmask;
1143 tx_chainmask = ah->txchainmask;
1144
1145 switch (rx_chainmask) {
1146 case 0x5:
1147 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1148 AR_PHY_SWAP_ALT_CHAIN);
1149 case 0x3:
1150 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1151 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1152 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1153 break;
1154 }
1155 case 0x1:
1156 case 0x2:
1157 case 0x7:
1158 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1159 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1160 break;
1161 default:
1162 break;
1163 }
1164
1165 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1166 if (tx_chainmask == 0x5) {
1167 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1168 AR_PHY_SWAP_ALT_CHAIN);
1169 }
1170 if (AR_SREV_9100(ah))
1171 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1172 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1173 }
1174
1175 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1176 enum nl80211_iftype opmode)
1177 {
1178 ah->mask_reg = AR_IMR_TXERR |
1179 AR_IMR_TXURN |
1180 AR_IMR_RXERR |
1181 AR_IMR_RXORN |
1182 AR_IMR_BCNMISC;
1183
1184 if (ah->config.intr_mitigation)
1185 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1186 else
1187 ah->mask_reg |= AR_IMR_RXOK;
1188
1189 ah->mask_reg |= AR_IMR_TXOK;
1190
1191 if (opmode == NL80211_IFTYPE_AP)
1192 ah->mask_reg |= AR_IMR_MIB;
1193
1194 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1195 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1196
1197 if (!AR_SREV_9100(ah)) {
1198 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1199 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1200 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1201 }
1202 }
1203
1204 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1205 {
1206 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1207 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1208 "bad ack timeout %u\n", us);
1209 ah->acktimeout = (u32) -1;
1210 return false;
1211 } else {
1212 REG_RMW_FIELD(ah, AR_TIME_OUT,
1213 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1214 ah->acktimeout = us;
1215 return true;
1216 }
1217 }
1218
1219 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1220 {
1221 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1222 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1223 "bad cts timeout %u\n", us);
1224 ah->ctstimeout = (u32) -1;
1225 return false;
1226 } else {
1227 REG_RMW_FIELD(ah, AR_TIME_OUT,
1228 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1229 ah->ctstimeout = us;
1230 return true;
1231 }
1232 }
1233
1234 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1235 {
1236 if (tu > 0xFFFF) {
1237 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1238 "bad global tx timeout %u\n", tu);
1239 ah->globaltxtimeout = (u32) -1;
1240 return false;
1241 } else {
1242 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1243 ah->globaltxtimeout = tu;
1244 return true;
1245 }
1246 }
1247
1248 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1249 {
1250 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1251 ah->misc_mode);
1252
1253 if (ah->misc_mode != 0)
1254 REG_WRITE(ah, AR_PCU_MISC,
1255 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1256 if (ah->slottime != (u32) -1)
1257 ath9k_hw_setslottime(ah, ah->slottime);
1258 if (ah->acktimeout != (u32) -1)
1259 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1260 if (ah->ctstimeout != (u32) -1)
1261 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1262 if (ah->globaltxtimeout != (u32) -1)
1263 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1264 }
1265
1266 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1267 {
1268 return vendorid == ATHEROS_VENDOR_ID ?
1269 ath9k_hw_devname(devid) : NULL;
1270 }
1271
1272 void ath9k_hw_detach(struct ath_hw *ah)
1273 {
1274 struct ath_common *common = ath9k_hw_common(ah);
1275
1276 if (common->state <= ATH_HW_INITIALIZED)
1277 goto free_hw;
1278
1279 if (!AR_SREV_9100(ah))
1280 ath9k_hw_ani_disable(ah);
1281
1282 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1283
1284 free_hw:
1285 ath9k_hw_rf_free(ah);
1286 kfree(ah);
1287 ah = NULL;
1288 }
1289 EXPORT_SYMBOL(ath9k_hw_detach);
1290
1291 /*******/
1292 /* INI */
1293 /*******/
1294
1295 static void ath9k_hw_override_ini(struct ath_hw *ah,
1296 struct ath9k_channel *chan)
1297 {
1298 u32 val;
1299
1300 if (AR_SREV_9271(ah)) {
1301 /*
1302 * Enable spectral scan to solution for issues with stuck
1303 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1304 * AR9271 1.1
1305 */
1306 if (AR_SREV_9271_10(ah)) {
1307 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) |
1308 AR_PHY_SPECTRAL_SCAN_ENABLE;
1309 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1310 }
1311 else if (AR_SREV_9271_11(ah))
1312 /*
1313 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1314 * present on AR9271 1.1
1315 */
1316 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1317 return;
1318 }
1319
1320 /*
1321 * Set the RX_ABORT and RX_DIS and clear if off only after
1322 * RXE is set for MAC. This prevents frames with corrupted
1323 * descriptor status.
1324 */
1325 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1326
1327 if (AR_SREV_9280_10_OR_LATER(ah)) {
1328 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1329 (~AR_PCU_MISC_MODE2_HWWAR1);
1330
1331 if (AR_SREV_9287_10_OR_LATER(ah))
1332 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1333
1334 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1335 }
1336
1337 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1338 AR_SREV_9280_10_OR_LATER(ah))
1339 return;
1340 /*
1341 * Disable BB clock gating
1342 * Necessary to avoid issues on AR5416 2.0
1343 */
1344 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1345 }
1346
1347 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1348 struct ar5416_eeprom_def *pEepData,
1349 u32 reg, u32 value)
1350 {
1351 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1352 struct ath_common *common = ath9k_hw_common(ah);
1353
1354 switch (ah->hw_version.devid) {
1355 case AR9280_DEVID_PCI:
1356 if (reg == 0x7894) {
1357 ath_print(common, ATH_DBG_EEPROM,
1358 "ini VAL: %x EEPROM: %x\n", value,
1359 (pBase->version & 0xff));
1360
1361 if ((pBase->version & 0xff) > 0x0a) {
1362 ath_print(common, ATH_DBG_EEPROM,
1363 "PWDCLKIND: %d\n",
1364 pBase->pwdclkind);
1365 value &= ~AR_AN_TOP2_PWDCLKIND;
1366 value |= AR_AN_TOP2_PWDCLKIND &
1367 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1368 } else {
1369 ath_print(common, ATH_DBG_EEPROM,
1370 "PWDCLKIND Earlier Rev\n");
1371 }
1372
1373 ath_print(common, ATH_DBG_EEPROM,
1374 "final ini VAL: %x\n", value);
1375 }
1376 break;
1377 }
1378
1379 return value;
1380 }
1381
1382 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1383 struct ar5416_eeprom_def *pEepData,
1384 u32 reg, u32 value)
1385 {
1386 if (ah->eep_map == EEP_MAP_4KBITS)
1387 return value;
1388 else
1389 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1390 }
1391
1392 static void ath9k_olc_init(struct ath_hw *ah)
1393 {
1394 u32 i;
1395
1396 if (OLC_FOR_AR9287_10_LATER) {
1397 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1398 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1399 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1400 AR9287_AN_TXPC0_TXPCMODE,
1401 AR9287_AN_TXPC0_TXPCMODE_S,
1402 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1403 udelay(100);
1404 } else {
1405 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1406 ah->originalGain[i] =
1407 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1408 AR_PHY_TX_GAIN);
1409 ah->PDADCdelta = 0;
1410 }
1411 }
1412
1413 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1414 struct ath9k_channel *chan)
1415 {
1416 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1417
1418 if (IS_CHAN_B(chan))
1419 ctl |= CTL_11B;
1420 else if (IS_CHAN_G(chan))
1421 ctl |= CTL_11G;
1422 else
1423 ctl |= CTL_11A;
1424
1425 return ctl;
1426 }
1427
1428 static int ath9k_hw_process_ini(struct ath_hw *ah,
1429 struct ath9k_channel *chan)
1430 {
1431 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1432 int i, regWrites = 0;
1433 struct ieee80211_channel *channel = chan->chan;
1434 u32 modesIndex, freqIndex;
1435
1436 switch (chan->chanmode) {
1437 case CHANNEL_A:
1438 case CHANNEL_A_HT20:
1439 modesIndex = 1;
1440 freqIndex = 1;
1441 break;
1442 case CHANNEL_A_HT40PLUS:
1443 case CHANNEL_A_HT40MINUS:
1444 modesIndex = 2;
1445 freqIndex = 1;
1446 break;
1447 case CHANNEL_G:
1448 case CHANNEL_G_HT20:
1449 case CHANNEL_B:
1450 modesIndex = 4;
1451 freqIndex = 2;
1452 break;
1453 case CHANNEL_G_HT40PLUS:
1454 case CHANNEL_G_HT40MINUS:
1455 modesIndex = 3;
1456 freqIndex = 2;
1457 break;
1458
1459 default:
1460 return -EINVAL;
1461 }
1462
1463 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1464 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1465 ah->eep_ops->set_addac(ah, chan);
1466
1467 if (AR_SREV_5416_22_OR_LATER(ah)) {
1468 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1469 } else {
1470 struct ar5416IniArray temp;
1471 u32 addacSize =
1472 sizeof(u32) * ah->iniAddac.ia_rows *
1473 ah->iniAddac.ia_columns;
1474
1475 memcpy(ah->addac5416_21,
1476 ah->iniAddac.ia_array, addacSize);
1477
1478 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1479
1480 temp.ia_array = ah->addac5416_21;
1481 temp.ia_columns = ah->iniAddac.ia_columns;
1482 temp.ia_rows = ah->iniAddac.ia_rows;
1483 REG_WRITE_ARRAY(&temp, 1, regWrites);
1484 }
1485
1486 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1487
1488 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1489 u32 reg = INI_RA(&ah->iniModes, i, 0);
1490 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1491
1492 REG_WRITE(ah, reg, val);
1493
1494 if (reg >= 0x7800 && reg < 0x78a0
1495 && ah->config.analog_shiftreg) {
1496 udelay(100);
1497 }
1498
1499 DO_DELAY(regWrites);
1500 }
1501
1502 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1503 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1504
1505 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1506 AR_SREV_9287_10_OR_LATER(ah))
1507 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1508
1509 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1510 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1511 u32 val = INI_RA(&ah->iniCommon, i, 1);
1512
1513 REG_WRITE(ah, reg, val);
1514
1515 if (reg >= 0x7800 && reg < 0x78a0
1516 && ah->config.analog_shiftreg) {
1517 udelay(100);
1518 }
1519
1520 DO_DELAY(regWrites);
1521 }
1522
1523 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1524
1525 if (AR_SREV_9271_10(ah))
1526 REG_WRITE_ARRAY(&ah->iniModes_9271_1_0_only,
1527 modesIndex, regWrites);
1528
1529 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1530 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1531 regWrites);
1532 }
1533
1534 ath9k_hw_override_ini(ah, chan);
1535 ath9k_hw_set_regs(ah, chan);
1536 ath9k_hw_init_chain_masks(ah);
1537
1538 if (OLC_FOR_AR9280_20_LATER)
1539 ath9k_olc_init(ah);
1540
1541 ah->eep_ops->set_txpower(ah, chan,
1542 ath9k_regd_get_ctl(regulatory, chan),
1543 channel->max_antenna_gain * 2,
1544 channel->max_power * 2,
1545 min((u32) MAX_RATE_POWER,
1546 (u32) regulatory->power_limit));
1547
1548 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1549 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1550 "ar5416SetRfRegs failed\n");
1551 return -EIO;
1552 }
1553
1554 return 0;
1555 }
1556
1557 /****************************************/
1558 /* Reset and Channel Switching Routines */
1559 /****************************************/
1560
1561 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1562 {
1563 u32 rfMode = 0;
1564
1565 if (chan == NULL)
1566 return;
1567
1568 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1569 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1570
1571 if (!AR_SREV_9280_10_OR_LATER(ah))
1572 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1573 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1574
1575 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1576 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1577
1578 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1579 }
1580
1581 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1582 {
1583 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1584 }
1585
1586 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1587 {
1588 u32 regval;
1589
1590 /*
1591 * set AHB_MODE not to do cacheline prefetches
1592 */
1593 regval = REG_READ(ah, AR_AHB_MODE);
1594 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1595
1596 /*
1597 * let mac dma reads be in 128 byte chunks
1598 */
1599 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1600 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1601
1602 /*
1603 * Restore TX Trigger Level to its pre-reset value.
1604 * The initial value depends on whether aggregation is enabled, and is
1605 * adjusted whenever underruns are detected.
1606 */
1607 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1608
1609 /*
1610 * let mac dma writes be in 128 byte chunks
1611 */
1612 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1613 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1614
1615 /*
1616 * Setup receive FIFO threshold to hold off TX activities
1617 */
1618 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1619
1620 /*
1621 * reduce the number of usable entries in PCU TXBUF to avoid
1622 * wrap around issues.
1623 */
1624 if (AR_SREV_9285(ah)) {
1625 /* For AR9285 the number of Fifos are reduced to half.
1626 * So set the usable tx buf size also to half to
1627 * avoid data/delimiter underruns
1628 */
1629 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1630 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1631 } else if (!AR_SREV_9271(ah)) {
1632 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1633 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1634 }
1635 }
1636
1637 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1638 {
1639 u32 val;
1640
1641 val = REG_READ(ah, AR_STA_ID1);
1642 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1643 switch (opmode) {
1644 case NL80211_IFTYPE_AP:
1645 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1646 | AR_STA_ID1_KSRCH_MODE);
1647 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1648 break;
1649 case NL80211_IFTYPE_ADHOC:
1650 case NL80211_IFTYPE_MESH_POINT:
1651 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1652 | AR_STA_ID1_KSRCH_MODE);
1653 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1654 break;
1655 case NL80211_IFTYPE_STATION:
1656 case NL80211_IFTYPE_MONITOR:
1657 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1658 break;
1659 }
1660 }
1661
1662 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1663 u32 coef_scaled,
1664 u32 *coef_mantissa,
1665 u32 *coef_exponent)
1666 {
1667 u32 coef_exp, coef_man;
1668
1669 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1670 if ((coef_scaled >> coef_exp) & 0x1)
1671 break;
1672
1673 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1674
1675 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1676
1677 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1678 *coef_exponent = coef_exp - 16;
1679 }
1680
1681 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1682 struct ath9k_channel *chan)
1683 {
1684 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1685 u32 clockMhzScaled = 0x64000000;
1686 struct chan_centers centers;
1687
1688 if (IS_CHAN_HALF_RATE(chan))
1689 clockMhzScaled = clockMhzScaled >> 1;
1690 else if (IS_CHAN_QUARTER_RATE(chan))
1691 clockMhzScaled = clockMhzScaled >> 2;
1692
1693 ath9k_hw_get_channel_centers(ah, chan, &centers);
1694 coef_scaled = clockMhzScaled / centers.synth_center;
1695
1696 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1697 &ds_coef_exp);
1698
1699 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1700 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1701 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1702 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1703
1704 coef_scaled = (9 * coef_scaled) / 10;
1705
1706 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1707 &ds_coef_exp);
1708
1709 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1710 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1711 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1712 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1713 }
1714
1715 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1716 {
1717 u32 rst_flags;
1718 u32 tmpReg;
1719
1720 if (AR_SREV_9100(ah)) {
1721 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1722 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1723 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1724 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1725 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1726 }
1727
1728 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1729 AR_RTC_FORCE_WAKE_ON_INT);
1730
1731 if (AR_SREV_9100(ah)) {
1732 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1733 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1734 } else {
1735 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1736 if (tmpReg &
1737 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1738 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1739 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1740 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1741 } else {
1742 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1743 }
1744
1745 rst_flags = AR_RTC_RC_MAC_WARM;
1746 if (type == ATH9K_RESET_COLD)
1747 rst_flags |= AR_RTC_RC_MAC_COLD;
1748 }
1749
1750 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1751 udelay(50);
1752
1753 REG_WRITE(ah, AR_RTC_RC, 0);
1754 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1755 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1756 "RTC stuck in MAC reset\n");
1757 return false;
1758 }
1759
1760 if (!AR_SREV_9100(ah))
1761 REG_WRITE(ah, AR_RC, 0);
1762
1763 if (AR_SREV_9100(ah))
1764 udelay(50);
1765
1766 return true;
1767 }
1768
1769 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1770 {
1771 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1772 AR_RTC_FORCE_WAKE_ON_INT);
1773
1774 if (!AR_SREV_9100(ah))
1775 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1776
1777 REG_WRITE(ah, AR_RTC_RESET, 0);
1778 udelay(2);
1779
1780 if (!AR_SREV_9100(ah))
1781 REG_WRITE(ah, AR_RC, 0);
1782
1783 REG_WRITE(ah, AR_RTC_RESET, 1);
1784
1785 if (!ath9k_hw_wait(ah,
1786 AR_RTC_STATUS,
1787 AR_RTC_STATUS_M,
1788 AR_RTC_STATUS_ON,
1789 AH_WAIT_TIMEOUT)) {
1790 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1791 "RTC not waking up\n");
1792 return false;
1793 }
1794
1795 ath9k_hw_read_revisions(ah);
1796
1797 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1798 }
1799
1800 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1801 {
1802 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1803 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1804
1805 switch (type) {
1806 case ATH9K_RESET_POWER_ON:
1807 return ath9k_hw_set_reset_power_on(ah);
1808 case ATH9K_RESET_WARM:
1809 case ATH9K_RESET_COLD:
1810 return ath9k_hw_set_reset(ah, type);
1811 default:
1812 return false;
1813 }
1814 }
1815
1816 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1817 {
1818 u32 phymode;
1819 u32 enableDacFifo = 0;
1820
1821 if (AR_SREV_9285_10_OR_LATER(ah))
1822 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1823 AR_PHY_FC_ENABLE_DAC_FIFO);
1824
1825 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1826 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1827
1828 if (IS_CHAN_HT40(chan)) {
1829 phymode |= AR_PHY_FC_DYN2040_EN;
1830
1831 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1832 (chan->chanmode == CHANNEL_G_HT40PLUS))
1833 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1834
1835 }
1836 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1837
1838 ath9k_hw_set11nmac2040(ah);
1839
1840 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1841 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1842 }
1843
1844 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1845 struct ath9k_channel *chan)
1846 {
1847 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1848 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1849 return false;
1850 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1851 return false;
1852
1853 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1854 return false;
1855
1856 ah->chip_fullsleep = false;
1857 ath9k_hw_init_pll(ah, chan);
1858 ath9k_hw_set_rfmode(ah, chan);
1859
1860 return true;
1861 }
1862
1863 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1864 struct ath9k_channel *chan)
1865 {
1866 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1867 struct ath_common *common = ath9k_hw_common(ah);
1868 struct ieee80211_channel *channel = chan->chan;
1869 u32 synthDelay, qnum;
1870
1871 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1872 if (ath9k_hw_numtxpending(ah, qnum)) {
1873 ath_print(common, ATH_DBG_QUEUE,
1874 "Transmit frames pending on "
1875 "queue %d\n", qnum);
1876 return false;
1877 }
1878 }
1879
1880 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1881 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1882 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1883 ath_print(common, ATH_DBG_FATAL,
1884 "Could not kill baseband RX\n");
1885 return false;
1886 }
1887
1888 ath9k_hw_set_regs(ah, chan);
1889
1890 if (AR_SREV_9280_10_OR_LATER(ah)) {
1891 ath9k_hw_ar9280_set_channel(ah, chan);
1892 } else {
1893 if (!(ath9k_hw_set_channel(ah, chan))) {
1894 ath_print(common, ATH_DBG_FATAL,
1895 "Failed to set channel\n");
1896 return false;
1897 }
1898 }
1899
1900 ah->eep_ops->set_txpower(ah, chan,
1901 ath9k_regd_get_ctl(regulatory, chan),
1902 channel->max_antenna_gain * 2,
1903 channel->max_power * 2,
1904 min((u32) MAX_RATE_POWER,
1905 (u32) regulatory->power_limit));
1906
1907 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1908 if (IS_CHAN_B(chan))
1909 synthDelay = (4 * synthDelay) / 22;
1910 else
1911 synthDelay /= 10;
1912
1913 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1914
1915 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1916
1917 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1918 ath9k_hw_set_delta_slope(ah, chan);
1919
1920 if (AR_SREV_9280_10_OR_LATER(ah))
1921 ath9k_hw_9280_spur_mitigate(ah, chan);
1922 else
1923 ath9k_hw_spur_mitigate(ah, chan);
1924
1925 if (!chan->oneTimeCalsDone)
1926 chan->oneTimeCalsDone = true;
1927
1928 return true;
1929 }
1930
1931 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1932 {
1933 int bb_spur = AR_NO_SPUR;
1934 int freq;
1935 int bin, cur_bin;
1936 int bb_spur_off, spur_subchannel_sd;
1937 int spur_freq_sd;
1938 int spur_delta_phase;
1939 int denominator;
1940 int upper, lower, cur_vit_mask;
1941 int tmp, newVal;
1942 int i;
1943 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1944 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1945 };
1946 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1947 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1948 };
1949 int inc[4] = { 0, 100, 0, 0 };
1950 struct chan_centers centers;
1951
1952 int8_t mask_m[123];
1953 int8_t mask_p[123];
1954 int8_t mask_amt;
1955 int tmp_mask;
1956 int cur_bb_spur;
1957 bool is2GHz = IS_CHAN_2GHZ(chan);
1958
1959 memset(&mask_m, 0, sizeof(int8_t) * 123);
1960 memset(&mask_p, 0, sizeof(int8_t) * 123);
1961
1962 ath9k_hw_get_channel_centers(ah, chan, &centers);
1963 freq = centers.synth_center;
1964
1965 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1966 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1967 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1968
1969 if (is2GHz)
1970 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1971 else
1972 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1973
1974 if (AR_NO_SPUR == cur_bb_spur)
1975 break;
1976 cur_bb_spur = cur_bb_spur - freq;
1977
1978 if (IS_CHAN_HT40(chan)) {
1979 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1980 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1981 bb_spur = cur_bb_spur;
1982 break;
1983 }
1984 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1985 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1986 bb_spur = cur_bb_spur;
1987 break;
1988 }
1989 }
1990
1991 if (AR_NO_SPUR == bb_spur) {
1992 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1993 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1994 return;
1995 } else {
1996 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1997 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1998 }
1999
2000 bin = bb_spur * 320;
2001
2002 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2003
2004 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2005 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2006 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2007 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2008 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
2009
2010 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2011 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2012 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2013 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2014 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2015 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
2016
2017 if (IS_CHAN_HT40(chan)) {
2018 if (bb_spur < 0) {
2019 spur_subchannel_sd = 1;
2020 bb_spur_off = bb_spur + 10;
2021 } else {
2022 spur_subchannel_sd = 0;
2023 bb_spur_off = bb_spur - 10;
2024 }
2025 } else {
2026 spur_subchannel_sd = 0;
2027 bb_spur_off = bb_spur;
2028 }
2029
2030 if (IS_CHAN_HT40(chan))
2031 spur_delta_phase =
2032 ((bb_spur * 262144) /
2033 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2034 else
2035 spur_delta_phase =
2036 ((bb_spur * 524288) /
2037 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2038
2039 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
2040 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
2041
2042 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2043 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2044 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2045 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
2046
2047 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
2048 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
2049
2050 cur_bin = -6000;
2051 upper = bin + 100;
2052 lower = bin - 100;
2053
2054 for (i = 0; i < 4; i++) {
2055 int pilot_mask = 0;
2056 int chan_mask = 0;
2057 int bp = 0;
2058 for (bp = 0; bp < 30; bp++) {
2059 if ((cur_bin > lower) && (cur_bin < upper)) {
2060 pilot_mask = pilot_mask | 0x1 << bp;
2061 chan_mask = chan_mask | 0x1 << bp;
2062 }
2063 cur_bin += 100;
2064 }
2065 cur_bin += inc[i];
2066 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2067 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2068 }
2069
2070 cur_vit_mask = 6100;
2071 upper = bin + 120;
2072 lower = bin - 120;
2073
2074 for (i = 0; i < 123; i++) {
2075 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2076
2077 /* workaround for gcc bug #37014 */
2078 volatile int tmp_v = abs(cur_vit_mask - bin);
2079
2080 if (tmp_v < 75)
2081 mask_amt = 1;
2082 else
2083 mask_amt = 0;
2084 if (cur_vit_mask < 0)
2085 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2086 else
2087 mask_p[cur_vit_mask / 100] = mask_amt;
2088 }
2089 cur_vit_mask -= 100;
2090 }
2091
2092 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2093 | (mask_m[48] << 26) | (mask_m[49] << 24)
2094 | (mask_m[50] << 22) | (mask_m[51] << 20)
2095 | (mask_m[52] << 18) | (mask_m[53] << 16)
2096 | (mask_m[54] << 14) | (mask_m[55] << 12)
2097 | (mask_m[56] << 10) | (mask_m[57] << 8)
2098 | (mask_m[58] << 6) | (mask_m[59] << 4)
2099 | (mask_m[60] << 2) | (mask_m[61] << 0);
2100 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2101 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2102
2103 tmp_mask = (mask_m[31] << 28)
2104 | (mask_m[32] << 26) | (mask_m[33] << 24)
2105 | (mask_m[34] << 22) | (mask_m[35] << 20)
2106 | (mask_m[36] << 18) | (mask_m[37] << 16)
2107 | (mask_m[48] << 14) | (mask_m[39] << 12)
2108 | (mask_m[40] << 10) | (mask_m[41] << 8)
2109 | (mask_m[42] << 6) | (mask_m[43] << 4)
2110 | (mask_m[44] << 2) | (mask_m[45] << 0);
2111 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2112 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2113
2114 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2115 | (mask_m[18] << 26) | (mask_m[18] << 24)
2116 | (mask_m[20] << 22) | (mask_m[20] << 20)
2117 | (mask_m[22] << 18) | (mask_m[22] << 16)
2118 | (mask_m[24] << 14) | (mask_m[24] << 12)
2119 | (mask_m[25] << 10) | (mask_m[26] << 8)
2120 | (mask_m[27] << 6) | (mask_m[28] << 4)
2121 | (mask_m[29] << 2) | (mask_m[30] << 0);
2122 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2123 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2124
2125 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2126 | (mask_m[2] << 26) | (mask_m[3] << 24)
2127 | (mask_m[4] << 22) | (mask_m[5] << 20)
2128 | (mask_m[6] << 18) | (mask_m[7] << 16)
2129 | (mask_m[8] << 14) | (mask_m[9] << 12)
2130 | (mask_m[10] << 10) | (mask_m[11] << 8)
2131 | (mask_m[12] << 6) | (mask_m[13] << 4)
2132 | (mask_m[14] << 2) | (mask_m[15] << 0);
2133 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2134 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2135
2136 tmp_mask = (mask_p[15] << 28)
2137 | (mask_p[14] << 26) | (mask_p[13] << 24)
2138 | (mask_p[12] << 22) | (mask_p[11] << 20)
2139 | (mask_p[10] << 18) | (mask_p[9] << 16)
2140 | (mask_p[8] << 14) | (mask_p[7] << 12)
2141 | (mask_p[6] << 10) | (mask_p[5] << 8)
2142 | (mask_p[4] << 6) | (mask_p[3] << 4)
2143 | (mask_p[2] << 2) | (mask_p[1] << 0);
2144 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2145 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2146
2147 tmp_mask = (mask_p[30] << 28)
2148 | (mask_p[29] << 26) | (mask_p[28] << 24)
2149 | (mask_p[27] << 22) | (mask_p[26] << 20)
2150 | (mask_p[25] << 18) | (mask_p[24] << 16)
2151 | (mask_p[23] << 14) | (mask_p[22] << 12)
2152 | (mask_p[21] << 10) | (mask_p[20] << 8)
2153 | (mask_p[19] << 6) | (mask_p[18] << 4)
2154 | (mask_p[17] << 2) | (mask_p[16] << 0);
2155 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2156 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2157
2158 tmp_mask = (mask_p[45] << 28)
2159 | (mask_p[44] << 26) | (mask_p[43] << 24)
2160 | (mask_p[42] << 22) | (mask_p[41] << 20)
2161 | (mask_p[40] << 18) | (mask_p[39] << 16)
2162 | (mask_p[38] << 14) | (mask_p[37] << 12)
2163 | (mask_p[36] << 10) | (mask_p[35] << 8)
2164 | (mask_p[34] << 6) | (mask_p[33] << 4)
2165 | (mask_p[32] << 2) | (mask_p[31] << 0);
2166 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2167 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2168
2169 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2170 | (mask_p[59] << 26) | (mask_p[58] << 24)
2171 | (mask_p[57] << 22) | (mask_p[56] << 20)
2172 | (mask_p[55] << 18) | (mask_p[54] << 16)
2173 | (mask_p[53] << 14) | (mask_p[52] << 12)
2174 | (mask_p[51] << 10) | (mask_p[50] << 8)
2175 | (mask_p[49] << 6) | (mask_p[48] << 4)
2176 | (mask_p[47] << 2) | (mask_p[46] << 0);
2177 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2178 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2179 }
2180
2181 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2182 {
2183 int bb_spur = AR_NO_SPUR;
2184 int bin, cur_bin;
2185 int spur_freq_sd;
2186 int spur_delta_phase;
2187 int denominator;
2188 int upper, lower, cur_vit_mask;
2189 int tmp, new;
2190 int i;
2191 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2192 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2193 };
2194 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2195 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2196 };
2197 int inc[4] = { 0, 100, 0, 0 };
2198
2199 int8_t mask_m[123];
2200 int8_t mask_p[123];
2201 int8_t mask_amt;
2202 int tmp_mask;
2203 int cur_bb_spur;
2204 bool is2GHz = IS_CHAN_2GHZ(chan);
2205
2206 memset(&mask_m, 0, sizeof(int8_t) * 123);
2207 memset(&mask_p, 0, sizeof(int8_t) * 123);
2208
2209 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2210 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2211 if (AR_NO_SPUR == cur_bb_spur)
2212 break;
2213 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2214 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2215 bb_spur = cur_bb_spur;
2216 break;
2217 }
2218 }
2219
2220 if (AR_NO_SPUR == bb_spur)
2221 return;
2222
2223 bin = bb_spur * 32;
2224
2225 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2226 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2227 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2228 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2229 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2230
2231 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2232
2233 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2234 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2235 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2236 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2237 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2238 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2239
2240 spur_delta_phase = ((bb_spur * 524288) / 100) &
2241 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2242
2243 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2244 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2245
2246 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2247 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2248 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2249 REG_WRITE(ah, AR_PHY_TIMING11, new);
2250
2251 cur_bin = -6000;
2252 upper = bin + 100;
2253 lower = bin - 100;
2254
2255 for (i = 0; i < 4; i++) {
2256 int pilot_mask = 0;
2257 int chan_mask = 0;
2258 int bp = 0;
2259 for (bp = 0; bp < 30; bp++) {
2260 if ((cur_bin > lower) && (cur_bin < upper)) {
2261 pilot_mask = pilot_mask | 0x1 << bp;
2262 chan_mask = chan_mask | 0x1 << bp;
2263 }
2264 cur_bin += 100;
2265 }
2266 cur_bin += inc[i];
2267 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2268 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2269 }
2270
2271 cur_vit_mask = 6100;
2272 upper = bin + 120;
2273 lower = bin - 120;
2274
2275 for (i = 0; i < 123; i++) {
2276 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2277
2278 /* workaround for gcc bug #37014 */
2279 volatile int tmp_v = abs(cur_vit_mask - bin);
2280
2281 if (tmp_v < 75)
2282 mask_amt = 1;
2283 else
2284 mask_amt = 0;
2285 if (cur_vit_mask < 0)
2286 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2287 else
2288 mask_p[cur_vit_mask / 100] = mask_amt;
2289 }
2290 cur_vit_mask -= 100;
2291 }
2292
2293 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2294 | (mask_m[48] << 26) | (mask_m[49] << 24)
2295 | (mask_m[50] << 22) | (mask_m[51] << 20)
2296 | (mask_m[52] << 18) | (mask_m[53] << 16)
2297 | (mask_m[54] << 14) | (mask_m[55] << 12)
2298 | (mask_m[56] << 10) | (mask_m[57] << 8)
2299 | (mask_m[58] << 6) | (mask_m[59] << 4)
2300 | (mask_m[60] << 2) | (mask_m[61] << 0);
2301 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2302 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2303
2304 tmp_mask = (mask_m[31] << 28)
2305 | (mask_m[32] << 26) | (mask_m[33] << 24)
2306 | (mask_m[34] << 22) | (mask_m[35] << 20)
2307 | (mask_m[36] << 18) | (mask_m[37] << 16)
2308 | (mask_m[48] << 14) | (mask_m[39] << 12)
2309 | (mask_m[40] << 10) | (mask_m[41] << 8)
2310 | (mask_m[42] << 6) | (mask_m[43] << 4)
2311 | (mask_m[44] << 2) | (mask_m[45] << 0);
2312 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2313 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2314
2315 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2316 | (mask_m[18] << 26) | (mask_m[18] << 24)
2317 | (mask_m[20] << 22) | (mask_m[20] << 20)
2318 | (mask_m[22] << 18) | (mask_m[22] << 16)
2319 | (mask_m[24] << 14) | (mask_m[24] << 12)
2320 | (mask_m[25] << 10) | (mask_m[26] << 8)
2321 | (mask_m[27] << 6) | (mask_m[28] << 4)
2322 | (mask_m[29] << 2) | (mask_m[30] << 0);
2323 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2324 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2325
2326 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2327 | (mask_m[2] << 26) | (mask_m[3] << 24)
2328 | (mask_m[4] << 22) | (mask_m[5] << 20)
2329 | (mask_m[6] << 18) | (mask_m[7] << 16)
2330 | (mask_m[8] << 14) | (mask_m[9] << 12)
2331 | (mask_m[10] << 10) | (mask_m[11] << 8)
2332 | (mask_m[12] << 6) | (mask_m[13] << 4)
2333 | (mask_m[14] << 2) | (mask_m[15] << 0);
2334 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2335 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2336
2337 tmp_mask = (mask_p[15] << 28)
2338 | (mask_p[14] << 26) | (mask_p[13] << 24)
2339 | (mask_p[12] << 22) | (mask_p[11] << 20)
2340 | (mask_p[10] << 18) | (mask_p[9] << 16)
2341 | (mask_p[8] << 14) | (mask_p[7] << 12)
2342 | (mask_p[6] << 10) | (mask_p[5] << 8)
2343 | (mask_p[4] << 6) | (mask_p[3] << 4)
2344 | (mask_p[2] << 2) | (mask_p[1] << 0);
2345 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2346 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2347
2348 tmp_mask = (mask_p[30] << 28)
2349 | (mask_p[29] << 26) | (mask_p[28] << 24)
2350 | (mask_p[27] << 22) | (mask_p[26] << 20)
2351 | (mask_p[25] << 18) | (mask_p[24] << 16)
2352 | (mask_p[23] << 14) | (mask_p[22] << 12)
2353 | (mask_p[21] << 10) | (mask_p[20] << 8)
2354 | (mask_p[19] << 6) | (mask_p[18] << 4)
2355 | (mask_p[17] << 2) | (mask_p[16] << 0);
2356 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2357 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2358
2359 tmp_mask = (mask_p[45] << 28)
2360 | (mask_p[44] << 26) | (mask_p[43] << 24)
2361 | (mask_p[42] << 22) | (mask_p[41] << 20)
2362 | (mask_p[40] << 18) | (mask_p[39] << 16)
2363 | (mask_p[38] << 14) | (mask_p[37] << 12)
2364 | (mask_p[36] << 10) | (mask_p[35] << 8)
2365 | (mask_p[34] << 6) | (mask_p[33] << 4)
2366 | (mask_p[32] << 2) | (mask_p[31] << 0);
2367 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2368 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2369
2370 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2371 | (mask_p[59] << 26) | (mask_p[58] << 24)
2372 | (mask_p[57] << 22) | (mask_p[56] << 20)
2373 | (mask_p[55] << 18) | (mask_p[54] << 16)
2374 | (mask_p[53] << 14) | (mask_p[52] << 12)
2375 | (mask_p[51] << 10) | (mask_p[50] << 8)
2376 | (mask_p[49] << 6) | (mask_p[48] << 4)
2377 | (mask_p[47] << 2) | (mask_p[46] << 0);
2378 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2379 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2380 }
2381
2382 static void ath9k_enable_rfkill(struct ath_hw *ah)
2383 {
2384 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2385 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2386
2387 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2388 AR_GPIO_INPUT_MUX2_RFSILENT);
2389
2390 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2391 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2392 }
2393
2394 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2395 bool bChannelChange)
2396 {
2397 struct ath_common *common = ath9k_hw_common(ah);
2398 u32 saveLedState;
2399 struct ath9k_channel *curchan = ah->curchan;
2400 u32 saveDefAntenna;
2401 u32 macStaId1;
2402 u64 tsf = 0;
2403 int i, rx_chainmask, r;
2404
2405 ah->txchainmask = common->tx_chainmask;
2406 ah->rxchainmask = common->rx_chainmask;
2407
2408 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2409 return -EIO;
2410
2411 if (curchan && !ah->chip_fullsleep)
2412 ath9k_hw_getnf(ah, curchan);
2413
2414 if (bChannelChange &&
2415 (ah->chip_fullsleep != true) &&
2416 (ah->curchan != NULL) &&
2417 (chan->channel != ah->curchan->channel) &&
2418 ((chan->channelFlags & CHANNEL_ALL) ==
2419 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2420 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2421 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2422
2423 if (ath9k_hw_channel_change(ah, chan)) {
2424 ath9k_hw_loadnf(ah, ah->curchan);
2425 ath9k_hw_start_nfcal(ah);
2426 return 0;
2427 }
2428 }
2429
2430 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2431 if (saveDefAntenna == 0)
2432 saveDefAntenna = 1;
2433
2434 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2435
2436 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2437 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2438 tsf = ath9k_hw_gettsf64(ah);
2439
2440 saveLedState = REG_READ(ah, AR_CFG_LED) &
2441 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2442 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2443
2444 ath9k_hw_mark_phy_inactive(ah);
2445
2446 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2447 REG_WRITE(ah,
2448 AR9271_RESET_POWER_DOWN_CONTROL,
2449 AR9271_RADIO_RF_RST);
2450 udelay(50);
2451 }
2452
2453 if (!ath9k_hw_chip_reset(ah, chan)) {
2454 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2455 return -EINVAL;
2456 }
2457
2458 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2459 ah->htc_reset_init = false;
2460 REG_WRITE(ah,
2461 AR9271_RESET_POWER_DOWN_CONTROL,
2462 AR9271_GATE_MAC_CTL);
2463 udelay(50);
2464 }
2465
2466 /* Restore TSF */
2467 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2468 ath9k_hw_settsf64(ah, tsf);
2469
2470 if (AR_SREV_9280_10_OR_LATER(ah))
2471 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2472
2473 if (AR_SREV_9287_12_OR_LATER(ah)) {
2474 /* Enable ASYNC FIFO */
2475 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2476 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2477 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2478 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2479 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2480 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2481 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2482 }
2483 r = ath9k_hw_process_ini(ah, chan);
2484 if (r)
2485 return r;
2486
2487 /* Setup MFP options for CCMP */
2488 if (AR_SREV_9280_20_OR_LATER(ah)) {
2489 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2490 * frames when constructing CCMP AAD. */
2491 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2492 0xc7ff);
2493 ah->sw_mgmt_crypto = false;
2494 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2495 /* Disable hardware crypto for management frames */
2496 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2497 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2498 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2499 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2500 ah->sw_mgmt_crypto = true;
2501 } else
2502 ah->sw_mgmt_crypto = true;
2503
2504 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2505 ath9k_hw_set_delta_slope(ah, chan);
2506
2507 if (AR_SREV_9280_10_OR_LATER(ah))
2508 ath9k_hw_9280_spur_mitigate(ah, chan);
2509 else
2510 ath9k_hw_spur_mitigate(ah, chan);
2511
2512 ah->eep_ops->set_board_values(ah, chan);
2513
2514 ath9k_hw_decrease_chain_power(ah, chan);
2515
2516 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2517 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2518 | macStaId1
2519 | AR_STA_ID1_RTS_USE_DEF
2520 | (ah->config.
2521 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2522 | ah->sta_id1_defaults);
2523 ath9k_hw_set_operating_mode(ah, ah->opmode);
2524
2525 ath_hw_setbssidmask(common);
2526
2527 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2528
2529 ath9k_hw_write_associd(ah);
2530
2531 REG_WRITE(ah, AR_ISR, ~0);
2532
2533 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2534
2535 if (AR_SREV_9280_10_OR_LATER(ah))
2536 ath9k_hw_ar9280_set_channel(ah, chan);
2537 else
2538 if (!(ath9k_hw_set_channel(ah, chan)))
2539 return -EIO;
2540
2541 for (i = 0; i < AR_NUM_DCU; i++)
2542 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2543
2544 ah->intr_txqs = 0;
2545 for (i = 0; i < ah->caps.total_queues; i++)
2546 ath9k_hw_resettxqueue(ah, i);
2547
2548 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2549 ath9k_hw_init_qos(ah);
2550
2551 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2552 ath9k_enable_rfkill(ah);
2553
2554 ath9k_hw_init_user_settings(ah);
2555
2556 if (AR_SREV_9287_12_OR_LATER(ah)) {
2557 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2558 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2559 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2560 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2561 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2562 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2563
2564 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2565 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2566
2567 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2568 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2569 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2570 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2571 }
2572 if (AR_SREV_9287_12_OR_LATER(ah)) {
2573 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2574 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2575 }
2576
2577 REG_WRITE(ah, AR_STA_ID1,
2578 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2579
2580 ath9k_hw_set_dma(ah);
2581
2582 REG_WRITE(ah, AR_OBS, 8);
2583
2584 if (ah->config.intr_mitigation) {
2585 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2586 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2587 }
2588
2589 ath9k_hw_init_bb(ah, chan);
2590
2591 if (!ath9k_hw_init_cal(ah, chan))
2592 return -EIO;
2593
2594 rx_chainmask = ah->rxchainmask;
2595 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2596 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2597 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2598 }
2599
2600 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2601
2602 /*
2603 * For big endian systems turn on swapping for descriptors
2604 */
2605 if (AR_SREV_9100(ah)) {
2606 u32 mask;
2607 mask = REG_READ(ah, AR_CFG);
2608 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2609 ath_print(common, ATH_DBG_RESET,
2610 "CFG Byte Swap Set 0x%x\n", mask);
2611 } else {
2612 mask =
2613 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2614 REG_WRITE(ah, AR_CFG, mask);
2615 ath_print(common, ATH_DBG_RESET,
2616 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2617 }
2618 } else {
2619 /* Configure AR9271 target WLAN */
2620 if (AR_SREV_9271(ah))
2621 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2622 #ifdef __BIG_ENDIAN
2623 else
2624 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2625 #endif
2626 }
2627
2628 if (ah->btcoex_hw.enabled)
2629 ath9k_hw_btcoex_enable(ah);
2630
2631 return 0;
2632 }
2633 EXPORT_SYMBOL(ath9k_hw_reset);
2634
2635 /************************/
2636 /* Key Cache Management */
2637 /************************/
2638
2639 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2640 {
2641 u32 keyType;
2642
2643 if (entry >= ah->caps.keycache_size) {
2644 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2645 "keychache entry %u out of range\n", entry);
2646 return false;
2647 }
2648
2649 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2650
2651 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2652 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2653 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2654 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2655 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2656 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2657 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2658 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2659
2660 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2661 u16 micentry = entry + 64;
2662
2663 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2664 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2665 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2666 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2667
2668 }
2669
2670 return true;
2671 }
2672 EXPORT_SYMBOL(ath9k_hw_keyreset);
2673
2674 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2675 {
2676 u32 macHi, macLo;
2677
2678 if (entry >= ah->caps.keycache_size) {
2679 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2680 "keychache entry %u out of range\n", entry);
2681 return false;
2682 }
2683
2684 if (mac != NULL) {
2685 macHi = (mac[5] << 8) | mac[4];
2686 macLo = (mac[3] << 24) |
2687 (mac[2] << 16) |
2688 (mac[1] << 8) |
2689 mac[0];
2690 macLo >>= 1;
2691 macLo |= (macHi & 1) << 31;
2692 macHi >>= 1;
2693 } else {
2694 macLo = macHi = 0;
2695 }
2696 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2697 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2698
2699 return true;
2700 }
2701 EXPORT_SYMBOL(ath9k_hw_keysetmac);
2702
2703 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2704 const struct ath9k_keyval *k,
2705 const u8 *mac)
2706 {
2707 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2708 struct ath_common *common = ath9k_hw_common(ah);
2709 u32 key0, key1, key2, key3, key4;
2710 u32 keyType;
2711
2712 if (entry >= pCap->keycache_size) {
2713 ath_print(common, ATH_DBG_FATAL,
2714 "keycache entry %u out of range\n", entry);
2715 return false;
2716 }
2717
2718 switch (k->kv_type) {
2719 case ATH9K_CIPHER_AES_OCB:
2720 keyType = AR_KEYTABLE_TYPE_AES;
2721 break;
2722 case ATH9K_CIPHER_AES_CCM:
2723 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2724 ath_print(common, ATH_DBG_ANY,
2725 "AES-CCM not supported by mac rev 0x%x\n",
2726 ah->hw_version.macRev);
2727 return false;
2728 }
2729 keyType = AR_KEYTABLE_TYPE_CCM;
2730 break;
2731 case ATH9K_CIPHER_TKIP:
2732 keyType = AR_KEYTABLE_TYPE_TKIP;
2733 if (ATH9K_IS_MIC_ENABLED(ah)
2734 && entry + 64 >= pCap->keycache_size) {
2735 ath_print(common, ATH_DBG_ANY,
2736 "entry %u inappropriate for TKIP\n", entry);
2737 return false;
2738 }
2739 break;
2740 case ATH9K_CIPHER_WEP:
2741 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2742 ath_print(common, ATH_DBG_ANY,
2743 "WEP key length %u too small\n", k->kv_len);
2744 return false;
2745 }
2746 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2747 keyType = AR_KEYTABLE_TYPE_40;
2748 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2749 keyType = AR_KEYTABLE_TYPE_104;
2750 else
2751 keyType = AR_KEYTABLE_TYPE_128;
2752 break;
2753 case ATH9K_CIPHER_CLR:
2754 keyType = AR_KEYTABLE_TYPE_CLR;
2755 break;
2756 default:
2757 ath_print(common, ATH_DBG_FATAL,
2758 "cipher %u not supported\n", k->kv_type);
2759 return false;
2760 }
2761
2762 key0 = get_unaligned_le32(k->kv_val + 0);
2763 key1 = get_unaligned_le16(k->kv_val + 4);
2764 key2 = get_unaligned_le32(k->kv_val + 6);
2765 key3 = get_unaligned_le16(k->kv_val + 10);
2766 key4 = get_unaligned_le32(k->kv_val + 12);
2767 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2768 key4 &= 0xff;
2769
2770 /*
2771 * Note: Key cache registers access special memory area that requires
2772 * two 32-bit writes to actually update the values in the internal
2773 * memory. Consequently, the exact order and pairs used here must be
2774 * maintained.
2775 */
2776
2777 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2778 u16 micentry = entry + 64;
2779
2780 /*
2781 * Write inverted key[47:0] first to avoid Michael MIC errors
2782 * on frames that could be sent or received at the same time.
2783 * The correct key will be written in the end once everything
2784 * else is ready.
2785 */
2786 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2787 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2788
2789 /* Write key[95:48] */
2790 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2791 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2792
2793 /* Write key[127:96] and key type */
2794 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2795 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2796
2797 /* Write MAC address for the entry */
2798 (void) ath9k_hw_keysetmac(ah, entry, mac);
2799
2800 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2801 /*
2802 * TKIP uses two key cache entries:
2803 * Michael MIC TX/RX keys in the same key cache entry
2804 * (idx = main index + 64):
2805 * key0 [31:0] = RX key [31:0]
2806 * key1 [15:0] = TX key [31:16]
2807 * key1 [31:16] = reserved
2808 * key2 [31:0] = RX key [63:32]
2809 * key3 [15:0] = TX key [15:0]
2810 * key3 [31:16] = reserved
2811 * key4 [31:0] = TX key [63:32]
2812 */
2813 u32 mic0, mic1, mic2, mic3, mic4;
2814
2815 mic0 = get_unaligned_le32(k->kv_mic + 0);
2816 mic2 = get_unaligned_le32(k->kv_mic + 4);
2817 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2818 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2819 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2820
2821 /* Write RX[31:0] and TX[31:16] */
2822 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2823 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2824
2825 /* Write RX[63:32] and TX[15:0] */
2826 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2827 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2828
2829 /* Write TX[63:32] and keyType(reserved) */
2830 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2831 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2832 AR_KEYTABLE_TYPE_CLR);
2833
2834 } else {
2835 /*
2836 * TKIP uses four key cache entries (two for group
2837 * keys):
2838 * Michael MIC TX/RX keys are in different key cache
2839 * entries (idx = main index + 64 for TX and
2840 * main index + 32 + 96 for RX):
2841 * key0 [31:0] = TX/RX MIC key [31:0]
2842 * key1 [31:0] = reserved
2843 * key2 [31:0] = TX/RX MIC key [63:32]
2844 * key3 [31:0] = reserved
2845 * key4 [31:0] = reserved
2846 *
2847 * Upper layer code will call this function separately
2848 * for TX and RX keys when these registers offsets are
2849 * used.
2850 */
2851 u32 mic0, mic2;
2852
2853 mic0 = get_unaligned_le32(k->kv_mic + 0);
2854 mic2 = get_unaligned_le32(k->kv_mic + 4);
2855
2856 /* Write MIC key[31:0] */
2857 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2858 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2859
2860 /* Write MIC key[63:32] */
2861 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2862 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2863
2864 /* Write TX[63:32] and keyType(reserved) */
2865 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2866 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2867 AR_KEYTABLE_TYPE_CLR);
2868 }
2869
2870 /* MAC address registers are reserved for the MIC entry */
2871 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2872 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2873
2874 /*
2875 * Write the correct (un-inverted) key[47:0] last to enable
2876 * TKIP now that all other registers are set with correct
2877 * values.
2878 */
2879 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2880 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2881 } else {
2882 /* Write key[47:0] */
2883 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2884 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2885
2886 /* Write key[95:48] */
2887 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2888 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2889
2890 /* Write key[127:96] and key type */
2891 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2892 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2893
2894 /* Write MAC address for the entry */
2895 (void) ath9k_hw_keysetmac(ah, entry, mac);
2896 }
2897
2898 return true;
2899 }
2900 EXPORT_SYMBOL(ath9k_hw_set_keycache_entry);
2901
2902 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2903 {
2904 if (entry < ah->caps.keycache_size) {
2905 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2906 if (val & AR_KEYTABLE_VALID)
2907 return true;
2908 }
2909 return false;
2910 }
2911 EXPORT_SYMBOL(ath9k_hw_keyisvalid);
2912
2913 /******************************/
2914 /* Power Management (Chipset) */
2915 /******************************/
2916
2917 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2918 {
2919 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2920 if (setChip) {
2921 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2922 AR_RTC_FORCE_WAKE_EN);
2923 if (!AR_SREV_9100(ah))
2924 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2925
2926 if(!AR_SREV_5416(ah))
2927 REG_CLR_BIT(ah, (AR_RTC_RESET),
2928 AR_RTC_RESET_EN);
2929 }
2930 }
2931
2932 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2933 {
2934 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2935 if (setChip) {
2936 struct ath9k_hw_capabilities *pCap = &ah->caps;
2937
2938 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2939 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2940 AR_RTC_FORCE_WAKE_ON_INT);
2941 } else {
2942 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2943 AR_RTC_FORCE_WAKE_EN);
2944 }
2945 }
2946 }
2947
2948 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2949 {
2950 u32 val;
2951 int i;
2952
2953 if (setChip) {
2954 if ((REG_READ(ah, AR_RTC_STATUS) &
2955 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2956 if (ath9k_hw_set_reset_reg(ah,
2957 ATH9K_RESET_POWER_ON) != true) {
2958 return false;
2959 }
2960 ath9k_hw_init_pll(ah, NULL);
2961 }
2962 if (AR_SREV_9100(ah))
2963 REG_SET_BIT(ah, AR_RTC_RESET,
2964 AR_RTC_RESET_EN);
2965
2966 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2967 AR_RTC_FORCE_WAKE_EN);
2968 udelay(50);
2969
2970 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2971 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2972 if (val == AR_RTC_STATUS_ON)
2973 break;
2974 udelay(50);
2975 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2976 AR_RTC_FORCE_WAKE_EN);
2977 }
2978 if (i == 0) {
2979 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2980 "Failed to wakeup in %uus\n",
2981 POWER_UP_TIME / 20);
2982 return false;
2983 }
2984 }
2985
2986 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2987
2988 return true;
2989 }
2990
2991 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2992 {
2993 struct ath_common *common = ath9k_hw_common(ah);
2994 int status = true, setChip = true;
2995 static const char *modes[] = {
2996 "AWAKE",
2997 "FULL-SLEEP",
2998 "NETWORK SLEEP",
2999 "UNDEFINED"
3000 };
3001
3002 if (ah->power_mode == mode)
3003 return status;
3004
3005 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
3006 modes[ah->power_mode], modes[mode]);
3007
3008 switch (mode) {
3009 case ATH9K_PM_AWAKE:
3010 status = ath9k_hw_set_power_awake(ah, setChip);
3011 break;
3012 case ATH9K_PM_FULL_SLEEP:
3013 ath9k_set_power_sleep(ah, setChip);
3014 ah->chip_fullsleep = true;
3015 break;
3016 case ATH9K_PM_NETWORK_SLEEP:
3017 ath9k_set_power_network_sleep(ah, setChip);
3018 break;
3019 default:
3020 ath_print(common, ATH_DBG_FATAL,
3021 "Unknown power mode %u\n", mode);
3022 return false;
3023 }
3024 ah->power_mode = mode;
3025
3026 return status;
3027 }
3028 EXPORT_SYMBOL(ath9k_hw_setpower);
3029
3030 /*
3031 * Helper for ASPM support.
3032 *
3033 * Disable PLL when in L0s as well as receiver clock when in L1.
3034 * This power saving option must be enabled through the SerDes.
3035 *
3036 * Programming the SerDes must go through the same 288 bit serial shift
3037 * register as the other analog registers. Hence the 9 writes.
3038 */
3039 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
3040 {
3041 u8 i;
3042 u32 val;
3043
3044 if (ah->is_pciexpress != true)
3045 return;
3046
3047 /* Do not touch SerDes registers */
3048 if (ah->config.pcie_powersave_enable == 2)
3049 return;
3050
3051 /* Nothing to do on restore for 11N */
3052 if (!restore) {
3053 if (AR_SREV_9280_20_OR_LATER(ah)) {
3054 /*
3055 * AR9280 2.0 or later chips use SerDes values from the
3056 * initvals.h initialized depending on chipset during
3057 * ath9k_hw_init()
3058 */
3059 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3060 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3061 INI_RA(&ah->iniPcieSerdes, i, 1));
3062 }
3063 } else if (AR_SREV_9280(ah) &&
3064 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3066 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3067
3068 /* RX shut off when elecidle is asserted */
3069 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3070 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3071 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3072
3073 /* Shut off CLKREQ active in L1 */
3074 if (ah->config.pcie_clock_req)
3075 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3076 else
3077 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3078
3079 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3080 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3081 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3082
3083 /* Load the new settings */
3084 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3085
3086 } else {
3087 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3088 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3089
3090 /* RX shut off when elecidle is asserted */
3091 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3092 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3093 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3094
3095 /*
3096 * Ignore ah->ah_config.pcie_clock_req setting for
3097 * pre-AR9280 11n
3098 */
3099 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3100
3101 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3102 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3103 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3104
3105 /* Load the new settings */
3106 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3107 }
3108
3109 udelay(1000);
3110
3111 /* set bit 19 to allow forcing of pcie core into L1 state */
3112 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3113
3114 /* Several PCIe massages to ensure proper behaviour */
3115 if (ah->config.pcie_waen) {
3116 val = ah->config.pcie_waen;
3117 if (!power_off)
3118 val &= (~AR_WA_D3_L1_DISABLE);
3119 } else {
3120 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3121 AR_SREV_9287(ah)) {
3122 val = AR9285_WA_DEFAULT;
3123 if (!power_off)
3124 val &= (~AR_WA_D3_L1_DISABLE);
3125 } else if (AR_SREV_9280(ah)) {
3126 /*
3127 * On AR9280 chips bit 22 of 0x4004 needs to be
3128 * set otherwise card may disappear.
3129 */
3130 val = AR9280_WA_DEFAULT;
3131 if (!power_off)
3132 val &= (~AR_WA_D3_L1_DISABLE);
3133 } else
3134 val = AR_WA_DEFAULT;
3135 }
3136
3137 REG_WRITE(ah, AR_WA, val);
3138 }
3139
3140 if (power_off) {
3141 /*
3142 * Set PCIe workaround bits
3143 * bit 14 in WA register (disable L1) should only
3144 * be set when device enters D3 and be cleared
3145 * when device comes back to D0.
3146 */
3147 if (ah->config.pcie_waen) {
3148 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3149 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3150 } else {
3151 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3152 AR_SREV_9287(ah)) &&
3153 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3154 (AR_SREV_9280(ah) &&
3155 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3156 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3157 }
3158 }
3159 }
3160 }
3161 EXPORT_SYMBOL(ath9k_hw_configpcipowersave);
3162
3163 /**********************/
3164 /* Interrupt Handling */
3165 /**********************/
3166
3167 bool ath9k_hw_intrpend(struct ath_hw *ah)
3168 {
3169 u32 host_isr;
3170
3171 if (AR_SREV_9100(ah))
3172 return true;
3173
3174 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3175 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3176 return true;
3177
3178 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3179 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3180 && (host_isr != AR_INTR_SPURIOUS))
3181 return true;
3182
3183 return false;
3184 }
3185 EXPORT_SYMBOL(ath9k_hw_intrpend);
3186
3187 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3188 {
3189 u32 isr = 0;
3190 u32 mask2 = 0;
3191 struct ath9k_hw_capabilities *pCap = &ah->caps;
3192 u32 sync_cause = 0;
3193 bool fatal_int = false;
3194 struct ath_common *common = ath9k_hw_common(ah);
3195
3196 if (!AR_SREV_9100(ah)) {
3197 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3198 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3199 == AR_RTC_STATUS_ON) {
3200 isr = REG_READ(ah, AR_ISR);
3201 }
3202 }
3203
3204 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3205 AR_INTR_SYNC_DEFAULT;
3206
3207 *masked = 0;
3208
3209 if (!isr && !sync_cause)
3210 return false;
3211 } else {
3212 *masked = 0;
3213 isr = REG_READ(ah, AR_ISR);
3214 }
3215
3216 if (isr) {
3217 if (isr & AR_ISR_BCNMISC) {
3218 u32 isr2;
3219 isr2 = REG_READ(ah, AR_ISR_S2);
3220 if (isr2 & AR_ISR_S2_TIM)
3221 mask2 |= ATH9K_INT_TIM;
3222 if (isr2 & AR_ISR_S2_DTIM)
3223 mask2 |= ATH9K_INT_DTIM;
3224 if (isr2 & AR_ISR_S2_DTIMSYNC)
3225 mask2 |= ATH9K_INT_DTIMSYNC;
3226 if (isr2 & (AR_ISR_S2_CABEND))
3227 mask2 |= ATH9K_INT_CABEND;
3228 if (isr2 & AR_ISR_S2_GTT)
3229 mask2 |= ATH9K_INT_GTT;
3230 if (isr2 & AR_ISR_S2_CST)
3231 mask2 |= ATH9K_INT_CST;
3232 if (isr2 & AR_ISR_S2_TSFOOR)
3233 mask2 |= ATH9K_INT_TSFOOR;
3234 }
3235
3236 isr = REG_READ(ah, AR_ISR_RAC);
3237 if (isr == 0xffffffff) {
3238 *masked = 0;
3239 return false;
3240 }
3241
3242 *masked = isr & ATH9K_INT_COMMON;
3243
3244 if (ah->config.intr_mitigation) {
3245 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3246 *masked |= ATH9K_INT_RX;
3247 }
3248
3249 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3250 *masked |= ATH9K_INT_RX;
3251 if (isr &
3252 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3253 AR_ISR_TXEOL)) {
3254 u32 s0_s, s1_s;
3255
3256 *masked |= ATH9K_INT_TX;
3257
3258 s0_s = REG_READ(ah, AR_ISR_S0_S);
3259 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3260 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3261
3262 s1_s = REG_READ(ah, AR_ISR_S1_S);
3263 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3264 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3265 }
3266
3267 if (isr & AR_ISR_RXORN) {
3268 ath_print(common, ATH_DBG_INTERRUPT,
3269 "receive FIFO overrun interrupt\n");
3270 }
3271
3272 if (!AR_SREV_9100(ah)) {
3273 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3274 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3275 if (isr5 & AR_ISR_S5_TIM_TIMER)
3276 *masked |= ATH9K_INT_TIM_TIMER;
3277 }
3278 }
3279
3280 *masked |= mask2;
3281 }
3282
3283 if (AR_SREV_9100(ah))
3284 return true;
3285
3286 if (isr & AR_ISR_GENTMR) {
3287 u32 s5_s;
3288
3289 s5_s = REG_READ(ah, AR_ISR_S5_S);
3290 if (isr & AR_ISR_GENTMR) {
3291 ah->intr_gen_timer_trigger =
3292 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3293
3294 ah->intr_gen_timer_thresh =
3295 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3296
3297 if (ah->intr_gen_timer_trigger)
3298 *masked |= ATH9K_INT_GENTIMER;
3299
3300 }
3301 }
3302
3303 if (sync_cause) {
3304 fatal_int =
3305 (sync_cause &
3306 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3307 ? true : false;
3308
3309 if (fatal_int) {
3310 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3311 ath_print(common, ATH_DBG_ANY,
3312 "received PCI FATAL interrupt\n");
3313 }
3314 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3315 ath_print(common, ATH_DBG_ANY,
3316 "received PCI PERR interrupt\n");
3317 }
3318 *masked |= ATH9K_INT_FATAL;
3319 }
3320 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3321 ath_print(common, ATH_DBG_INTERRUPT,
3322 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3323 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3324 REG_WRITE(ah, AR_RC, 0);
3325 *masked |= ATH9K_INT_FATAL;
3326 }
3327 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3328 ath_print(common, ATH_DBG_INTERRUPT,
3329 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3330 }
3331
3332 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3333 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3334 }
3335
3336 return true;
3337 }
3338 EXPORT_SYMBOL(ath9k_hw_getisr);
3339
3340 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3341 {
3342 u32 omask = ah->mask_reg;
3343 u32 mask, mask2;
3344 struct ath9k_hw_capabilities *pCap = &ah->caps;
3345 struct ath_common *common = ath9k_hw_common(ah);
3346
3347 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3348
3349 if (omask & ATH9K_INT_GLOBAL) {
3350 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n");
3351 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3352 (void) REG_READ(ah, AR_IER);
3353 if (!AR_SREV_9100(ah)) {
3354 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3355 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3356
3357 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3358 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3359 }
3360 }
3361
3362 mask = ints & ATH9K_INT_COMMON;
3363 mask2 = 0;
3364
3365 if (ints & ATH9K_INT_TX) {
3366 if (ah->txok_interrupt_mask)
3367 mask |= AR_IMR_TXOK;
3368 if (ah->txdesc_interrupt_mask)
3369 mask |= AR_IMR_TXDESC;
3370 if (ah->txerr_interrupt_mask)
3371 mask |= AR_IMR_TXERR;
3372 if (ah->txeol_interrupt_mask)
3373 mask |= AR_IMR_TXEOL;
3374 }
3375 if (ints & ATH9K_INT_RX) {
3376 mask |= AR_IMR_RXERR;
3377 if (ah->config.intr_mitigation)
3378 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3379 else
3380 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3381 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3382 mask |= AR_IMR_GENTMR;
3383 }
3384
3385 if (ints & (ATH9K_INT_BMISC)) {
3386 mask |= AR_IMR_BCNMISC;
3387 if (ints & ATH9K_INT_TIM)
3388 mask2 |= AR_IMR_S2_TIM;
3389 if (ints & ATH9K_INT_DTIM)
3390 mask2 |= AR_IMR_S2_DTIM;
3391 if (ints & ATH9K_INT_DTIMSYNC)
3392 mask2 |= AR_IMR_S2_DTIMSYNC;
3393 if (ints & ATH9K_INT_CABEND)
3394 mask2 |= AR_IMR_S2_CABEND;
3395 if (ints & ATH9K_INT_TSFOOR)
3396 mask2 |= AR_IMR_S2_TSFOOR;
3397 }
3398
3399 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3400 mask |= AR_IMR_BCNMISC;
3401 if (ints & ATH9K_INT_GTT)
3402 mask2 |= AR_IMR_S2_GTT;
3403 if (ints & ATH9K_INT_CST)
3404 mask2 |= AR_IMR_S2_CST;
3405 }
3406
3407 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3408 REG_WRITE(ah, AR_IMR, mask);
3409 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3410 AR_IMR_S2_DTIM |
3411 AR_IMR_S2_DTIMSYNC |
3412 AR_IMR_S2_CABEND |
3413 AR_IMR_S2_CABTO |
3414 AR_IMR_S2_TSFOOR |
3415 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3416 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3417 ah->mask_reg = ints;
3418
3419 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3420 if (ints & ATH9K_INT_TIM_TIMER)
3421 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3422 else
3423 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3424 }
3425
3426 if (ints & ATH9K_INT_GLOBAL) {
3427 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n");
3428 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3429 if (!AR_SREV_9100(ah)) {
3430 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3431 AR_INTR_MAC_IRQ);
3432 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3433
3434
3435 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3436 AR_INTR_SYNC_DEFAULT);
3437 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3438 AR_INTR_SYNC_DEFAULT);
3439 }
3440 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3441 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3442 }
3443
3444 return omask;
3445 }
3446 EXPORT_SYMBOL(ath9k_hw_set_interrupts);
3447
3448 /*******************/
3449 /* Beacon Handling */
3450 /*******************/
3451
3452 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3453 {
3454 int flags = 0;
3455
3456 ah->beacon_interval = beacon_period;
3457
3458 switch (ah->opmode) {
3459 case NL80211_IFTYPE_STATION:
3460 case NL80211_IFTYPE_MONITOR:
3461 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3462 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3463 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3464 flags |= AR_TBTT_TIMER_EN;
3465 break;
3466 case NL80211_IFTYPE_ADHOC:
3467 case NL80211_IFTYPE_MESH_POINT:
3468 REG_SET_BIT(ah, AR_TXCFG,
3469 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3470 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3471 TU_TO_USEC(next_beacon +
3472 (ah->atim_window ? ah->
3473 atim_window : 1)));
3474 flags |= AR_NDP_TIMER_EN;
3475 case NL80211_IFTYPE_AP:
3476 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3477 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3478 TU_TO_USEC(next_beacon -
3479 ah->config.
3480 dma_beacon_response_time));
3481 REG_WRITE(ah, AR_NEXT_SWBA,
3482 TU_TO_USEC(next_beacon -
3483 ah->config.
3484 sw_beacon_response_time));
3485 flags |=
3486 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3487 break;
3488 default:
3489 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON,
3490 "%s: unsupported opmode: %d\n",
3491 __func__, ah->opmode);
3492 return;
3493 break;
3494 }
3495
3496 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3497 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3498 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3499 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3500
3501 beacon_period &= ~ATH9K_BEACON_ENA;
3502 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3503 ath9k_hw_reset_tsf(ah);
3504 }
3505
3506 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3507 }
3508 EXPORT_SYMBOL(ath9k_hw_beaconinit);
3509
3510 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3511 const struct ath9k_beacon_state *bs)
3512 {
3513 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3514 struct ath9k_hw_capabilities *pCap = &ah->caps;
3515 struct ath_common *common = ath9k_hw_common(ah);
3516
3517 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3518
3519 REG_WRITE(ah, AR_BEACON_PERIOD,
3520 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3521 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3522 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3523
3524 REG_RMW_FIELD(ah, AR_RSSI_THR,
3525 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3526
3527 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3528
3529 if (bs->bs_sleepduration > beaconintval)
3530 beaconintval = bs->bs_sleepduration;
3531
3532 dtimperiod = bs->bs_dtimperiod;
3533 if (bs->bs_sleepduration > dtimperiod)
3534 dtimperiod = bs->bs_sleepduration;
3535
3536 if (beaconintval == dtimperiod)
3537 nextTbtt = bs->bs_nextdtim;
3538 else
3539 nextTbtt = bs->bs_nexttbtt;
3540
3541 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3542 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3543 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3544 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3545
3546 REG_WRITE(ah, AR_NEXT_DTIM,
3547 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3548 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3549
3550 REG_WRITE(ah, AR_SLEEP1,
3551 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3552 | AR_SLEEP1_ASSUME_DTIM);
3553
3554 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3555 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3556 else
3557 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3558
3559 REG_WRITE(ah, AR_SLEEP2,
3560 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3561
3562 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3563 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3564
3565 REG_SET_BIT(ah, AR_TIMER_MODE,
3566 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3567 AR_DTIM_TIMER_EN);
3568
3569 /* TSF Out of Range Threshold */
3570 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3571 }
3572 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
3573
3574 /*******************/
3575 /* HW Capabilities */
3576 /*******************/
3577
3578 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3579 {
3580 struct ath9k_hw_capabilities *pCap = &ah->caps;
3581 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3582 struct ath_common *common = ath9k_hw_common(ah);
3583 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
3584
3585 u16 capField = 0, eeval;
3586
3587 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3588 regulatory->current_rd = eeval;
3589
3590 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3591 if (AR_SREV_9285_10_OR_LATER(ah))
3592 eeval |= AR9285_RDEXT_DEFAULT;
3593 regulatory->current_rd_ext = eeval;
3594
3595 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3596
3597 if (ah->opmode != NL80211_IFTYPE_AP &&
3598 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3599 if (regulatory->current_rd == 0x64 ||
3600 regulatory->current_rd == 0x65)
3601 regulatory->current_rd += 5;
3602 else if (regulatory->current_rd == 0x41)
3603 regulatory->current_rd = 0x43;
3604 ath_print(common, ATH_DBG_REGULATORY,
3605 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3606 }
3607
3608 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3609 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3610
3611 if (eeval & AR5416_OPFLAGS_11A) {
3612 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3613 if (ah->config.ht_enable) {
3614 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3615 set_bit(ATH9K_MODE_11NA_HT20,
3616 pCap->wireless_modes);
3617 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3618 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3619 pCap->wireless_modes);
3620 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3621 pCap->wireless_modes);
3622 }
3623 }
3624 }
3625
3626 if (eeval & AR5416_OPFLAGS_11G) {
3627 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3628 if (ah->config.ht_enable) {
3629 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3630 set_bit(ATH9K_MODE_11NG_HT20,
3631 pCap->wireless_modes);
3632 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3633 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3634 pCap->wireless_modes);
3635 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3636 pCap->wireless_modes);
3637 }
3638 }
3639 }
3640
3641 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3642 /*
3643 * For AR9271 we will temporarilly uses the rx chainmax as read from
3644 * the EEPROM.
3645 */
3646 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3647 !(eeval & AR5416_OPFLAGS_11A) &&
3648 !(AR_SREV_9271(ah)))
3649 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3650 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3651 else
3652 /* Use rx_chainmask from EEPROM. */
3653 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3654
3655 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3656 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3657
3658 pCap->low_2ghz_chan = 2312;
3659 pCap->high_2ghz_chan = 2732;
3660
3661 pCap->low_5ghz_chan = 4920;
3662 pCap->high_5ghz_chan = 6100;
3663
3664 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3665 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3666 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3667
3668 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3669 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3670 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3671
3672 if (ah->config.ht_enable)
3673 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3674 else
3675 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3676
3677 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3678 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3679 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3680 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3681
3682 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3683 pCap->total_queues =
3684 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3685 else
3686 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3687
3688 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3689 pCap->keycache_size =
3690 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3691 else
3692 pCap->keycache_size = AR_KEYTABLE_SIZE;
3693
3694 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3695 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3696
3697 if (AR_SREV_9285_10_OR_LATER(ah))
3698 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3699 else if (AR_SREV_9280_10_OR_LATER(ah))
3700 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3701 else
3702 pCap->num_gpio_pins = AR_NUM_GPIO;
3703
3704 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3705 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3706 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3707 } else {
3708 pCap->rts_aggr_limit = (8 * 1024);
3709 }
3710
3711 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3712
3713 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3714 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3715 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3716 ah->rfkill_gpio =
3717 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3718 ah->rfkill_polarity =
3719 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3720
3721 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3722 }
3723 #endif
3724
3725 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3726
3727 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3728 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3729 else
3730 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3731
3732 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3733 pCap->reg_cap =
3734 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3735 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3736 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3737 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3738 } else {
3739 pCap->reg_cap =
3740 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3741 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3742 }
3743
3744 /* Advertise midband for AR5416 with FCC midband set in eeprom */
3745 if (regulatory->current_rd_ext & (1 << REG_EXT_FCC_MIDBAND) &&
3746 AR_SREV_5416(ah))
3747 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3748
3749 pCap->num_antcfg_5ghz =
3750 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3751 pCap->num_antcfg_2ghz =
3752 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3753
3754 if (AR_SREV_9280_10_OR_LATER(ah) &&
3755 ath9k_hw_btcoex_supported(ah)) {
3756 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO;
3757 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3758
3759 if (AR_SREV_9285(ah)) {
3760 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
3761 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3762 } else {
3763 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
3764 }
3765 } else {
3766 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
3767 }
3768 }
3769
3770 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3771 u32 capability, u32 *result)
3772 {
3773 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3774 switch (type) {
3775 case ATH9K_CAP_CIPHER:
3776 switch (capability) {
3777 case ATH9K_CIPHER_AES_CCM:
3778 case ATH9K_CIPHER_AES_OCB:
3779 case ATH9K_CIPHER_TKIP:
3780 case ATH9K_CIPHER_WEP:
3781 case ATH9K_CIPHER_MIC:
3782 case ATH9K_CIPHER_CLR:
3783 return true;
3784 default:
3785 return false;
3786 }
3787 case ATH9K_CAP_TKIP_MIC:
3788 switch (capability) {
3789 case 0:
3790 return true;
3791 case 1:
3792 return (ah->sta_id1_defaults &
3793 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3794 false;
3795 }
3796 case ATH9K_CAP_TKIP_SPLIT:
3797 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3798 false : true;
3799 case ATH9K_CAP_DIVERSITY:
3800 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3801 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3802 true : false;
3803 case ATH9K_CAP_MCAST_KEYSRCH:
3804 switch (capability) {
3805 case 0:
3806 return true;
3807 case 1:
3808 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3809 return false;
3810 } else {
3811 return (ah->sta_id1_defaults &
3812 AR_STA_ID1_MCAST_KSRCH) ? true :
3813 false;
3814 }
3815 }
3816 return false;
3817 case ATH9K_CAP_TXPOW:
3818 switch (capability) {
3819 case 0:
3820 return 0;
3821 case 1:
3822 *result = regulatory->power_limit;
3823 return 0;
3824 case 2:
3825 *result = regulatory->max_power_level;
3826 return 0;
3827 case 3:
3828 *result = regulatory->tp_scale;
3829 return 0;
3830 }
3831 return false;
3832 case ATH9K_CAP_DS:
3833 return (AR_SREV_9280_20_OR_LATER(ah) &&
3834 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3835 ? false : true;
3836 default:
3837 return false;
3838 }
3839 }
3840 EXPORT_SYMBOL(ath9k_hw_getcapability);
3841
3842 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3843 u32 capability, u32 setting, int *status)
3844 {
3845 u32 v;
3846
3847 switch (type) {
3848 case ATH9K_CAP_TKIP_MIC:
3849 if (setting)
3850 ah->sta_id1_defaults |=
3851 AR_STA_ID1_CRPT_MIC_ENABLE;
3852 else
3853 ah->sta_id1_defaults &=
3854 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3855 return true;
3856 case ATH9K_CAP_DIVERSITY:
3857 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3858 if (setting)
3859 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3860 else
3861 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3862 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3863 return true;
3864 case ATH9K_CAP_MCAST_KEYSRCH:
3865 if (setting)
3866 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3867 else
3868 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3869 return true;
3870 default:
3871 return false;
3872 }
3873 }
3874 EXPORT_SYMBOL(ath9k_hw_setcapability);
3875
3876 /****************************/
3877 /* GPIO / RFKILL / Antennae */
3878 /****************************/
3879
3880 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3881 u32 gpio, u32 type)
3882 {
3883 int addr;
3884 u32 gpio_shift, tmp;
3885
3886 if (gpio > 11)
3887 addr = AR_GPIO_OUTPUT_MUX3;
3888 else if (gpio > 5)
3889 addr = AR_GPIO_OUTPUT_MUX2;
3890 else
3891 addr = AR_GPIO_OUTPUT_MUX1;
3892
3893 gpio_shift = (gpio % 6) * 5;
3894
3895 if (AR_SREV_9280_20_OR_LATER(ah)
3896 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3897 REG_RMW(ah, addr, (type << gpio_shift),
3898 (0x1f << gpio_shift));
3899 } else {
3900 tmp = REG_READ(ah, addr);
3901 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3902 tmp &= ~(0x1f << gpio_shift);
3903 tmp |= (type << gpio_shift);
3904 REG_WRITE(ah, addr, tmp);
3905 }
3906 }
3907
3908 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3909 {
3910 u32 gpio_shift;
3911
3912 BUG_ON(gpio >= ah->caps.num_gpio_pins);
3913
3914 gpio_shift = gpio << 1;
3915
3916 REG_RMW(ah,
3917 AR_GPIO_OE_OUT,
3918 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3919 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3920 }
3921 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
3922
3923 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3924 {
3925 #define MS_REG_READ(x, y) \
3926 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3927
3928 if (gpio >= ah->caps.num_gpio_pins)
3929 return 0xffffffff;
3930
3931 if (AR_SREV_9287_10_OR_LATER(ah))
3932 return MS_REG_READ(AR9287, gpio) != 0;
3933 else if (AR_SREV_9285_10_OR_LATER(ah))
3934 return MS_REG_READ(AR9285, gpio) != 0;
3935 else if (AR_SREV_9280_10_OR_LATER(ah))
3936 return MS_REG_READ(AR928X, gpio) != 0;
3937 else
3938 return MS_REG_READ(AR, gpio) != 0;
3939 }
3940 EXPORT_SYMBOL(ath9k_hw_gpio_get);
3941
3942 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3943 u32 ah_signal_type)
3944 {
3945 u32 gpio_shift;
3946
3947 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3948
3949 gpio_shift = 2 * gpio;
3950
3951 REG_RMW(ah,
3952 AR_GPIO_OE_OUT,
3953 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3954 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3955 }
3956 EXPORT_SYMBOL(ath9k_hw_cfg_output);
3957
3958 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3959 {
3960 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3961 AR_GPIO_BIT(gpio));
3962 }
3963 EXPORT_SYMBOL(ath9k_hw_set_gpio);
3964
3965 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3966 {
3967 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3968 }
3969 EXPORT_SYMBOL(ath9k_hw_getdefantenna);
3970
3971 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3972 {
3973 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3974 }
3975 EXPORT_SYMBOL(ath9k_hw_setantenna);
3976
3977 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3978 enum ath9k_ant_setting settings,
3979 struct ath9k_channel *chan,
3980 u8 *tx_chainmask,
3981 u8 *rx_chainmask,
3982 u8 *antenna_cfgd)
3983 {
3984 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3985
3986 if (AR_SREV_9280(ah)) {
3987 if (!tx_chainmask_cfg) {
3988
3989 tx_chainmask_cfg = *tx_chainmask;
3990 rx_chainmask_cfg = *rx_chainmask;
3991 }
3992
3993 switch (settings) {
3994 case ATH9K_ANT_FIXED_A:
3995 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3996 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3997 *antenna_cfgd = true;
3998 break;
3999 case ATH9K_ANT_FIXED_B:
4000 if (ah->caps.tx_chainmask >
4001 ATH9K_ANTENNA1_CHAINMASK) {
4002 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
4003 }
4004 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
4005 *antenna_cfgd = true;
4006 break;
4007 case ATH9K_ANT_VARIABLE:
4008 *tx_chainmask = tx_chainmask_cfg;
4009 *rx_chainmask = rx_chainmask_cfg;
4010 *antenna_cfgd = true;
4011 break;
4012 default:
4013 break;
4014 }
4015 } else {
4016 ah->config.diversity_control = settings;
4017 }
4018
4019 return true;
4020 }
4021
4022 /*********************/
4023 /* General Operation */
4024 /*********************/
4025
4026 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
4027 {
4028 u32 bits = REG_READ(ah, AR_RX_FILTER);
4029 u32 phybits = REG_READ(ah, AR_PHY_ERR);
4030
4031 if (phybits & AR_PHY_ERR_RADAR)
4032 bits |= ATH9K_RX_FILTER_PHYRADAR;
4033 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
4034 bits |= ATH9K_RX_FILTER_PHYERR;
4035
4036 return bits;
4037 }
4038 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
4039
4040 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
4041 {
4042 u32 phybits;
4043
4044 REG_WRITE(ah, AR_RX_FILTER, bits);
4045
4046 phybits = 0;
4047 if (bits & ATH9K_RX_FILTER_PHYRADAR)
4048 phybits |= AR_PHY_ERR_RADAR;
4049 if (bits & ATH9K_RX_FILTER_PHYERR)
4050 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
4051 REG_WRITE(ah, AR_PHY_ERR, phybits);
4052
4053 if (phybits)
4054 REG_WRITE(ah, AR_RXCFG,
4055 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
4056 else
4057 REG_WRITE(ah, AR_RXCFG,
4058 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
4059 }
4060 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
4061
4062 bool ath9k_hw_phy_disable(struct ath_hw *ah)
4063 {
4064 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
4065 return false;
4066
4067 ath9k_hw_init_pll(ah, NULL);
4068 return true;
4069 }
4070 EXPORT_SYMBOL(ath9k_hw_phy_disable);
4071
4072 bool ath9k_hw_disable(struct ath_hw *ah)
4073 {
4074 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
4075 return false;
4076
4077 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
4078 return false;
4079
4080 ath9k_hw_init_pll(ah, NULL);
4081 return true;
4082 }
4083 EXPORT_SYMBOL(ath9k_hw_disable);
4084
4085 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
4086 {
4087 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
4088 struct ath9k_channel *chan = ah->curchan;
4089 struct ieee80211_channel *channel = chan->chan;
4090
4091 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
4092
4093 ah->eep_ops->set_txpower(ah, chan,
4094 ath9k_regd_get_ctl(regulatory, chan),
4095 channel->max_antenna_gain * 2,
4096 channel->max_power * 2,
4097 min((u32) MAX_RATE_POWER,
4098 (u32) regulatory->power_limit));
4099 }
4100 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
4101
4102 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4103 {
4104 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN);
4105 }
4106 EXPORT_SYMBOL(ath9k_hw_setmac);
4107
4108 void ath9k_hw_setopmode(struct ath_hw *ah)
4109 {
4110 ath9k_hw_set_operating_mode(ah, ah->opmode);
4111 }
4112 EXPORT_SYMBOL(ath9k_hw_setopmode);
4113
4114 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4115 {
4116 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4117 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4118 }
4119 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
4120
4121 void ath9k_hw_write_associd(struct ath_hw *ah)
4122 {
4123 struct ath_common *common = ath9k_hw_common(ah);
4124
4125 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
4126 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
4127 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4128 }
4129 EXPORT_SYMBOL(ath9k_hw_write_associd);
4130
4131 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4132 {
4133 u64 tsf;
4134
4135 tsf = REG_READ(ah, AR_TSF_U32);
4136 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4137
4138 return tsf;
4139 }
4140 EXPORT_SYMBOL(ath9k_hw_gettsf64);
4141
4142 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4143 {
4144 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4145 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4146 }
4147 EXPORT_SYMBOL(ath9k_hw_settsf64);
4148
4149 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4150 {
4151 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4152 AH_TSF_WRITE_TIMEOUT))
4153 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4154 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4155
4156 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4157 }
4158 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
4159
4160 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4161 {
4162 if (setting)
4163 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4164 else
4165 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4166 }
4167 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
4168
4169 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4170 {
4171 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4172 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4173 "bad slot time %u\n", us);
4174 ah->slottime = (u32) -1;
4175 return false;
4176 } else {
4177 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4178 ah->slottime = us;
4179 return true;
4180 }
4181 }
4182 EXPORT_SYMBOL(ath9k_hw_setslottime);
4183
4184 void ath9k_hw_set11nmac2040(struct ath_hw *ah)
4185 {
4186 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
4187 u32 macmode;
4188
4189 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
4190 macmode = AR_2040_JOINED_RX_CLEAR;
4191 else
4192 macmode = 0;
4193
4194 REG_WRITE(ah, AR_2040_MODE, macmode);
4195 }
4196
4197 /* HW Generic timers configuration */
4198
4199 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4200 {
4201 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4202 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4203 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4204 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4205 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4206 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4207 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4208 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4209 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4210 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4211 AR_NDP2_TIMER_MODE, 0x0002},
4212 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4213 AR_NDP2_TIMER_MODE, 0x0004},
4214 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4215 AR_NDP2_TIMER_MODE, 0x0008},
4216 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4217 AR_NDP2_TIMER_MODE, 0x0010},
4218 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4219 AR_NDP2_TIMER_MODE, 0x0020},
4220 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4221 AR_NDP2_TIMER_MODE, 0x0040},
4222 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4223 AR_NDP2_TIMER_MODE, 0x0080}
4224 };
4225
4226 /* HW generic timer primitives */
4227
4228 /* compute and clear index of rightmost 1 */
4229 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4230 {
4231 u32 b;
4232
4233 b = *mask;
4234 b &= (0-b);
4235 *mask &= ~b;
4236 b *= debruijn32;
4237 b >>= 27;
4238
4239 return timer_table->gen_timer_index[b];
4240 }
4241
4242 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4243 {
4244 return REG_READ(ah, AR_TSF_L32);
4245 }
4246 EXPORT_SYMBOL(ath9k_hw_gettsf32);
4247
4248 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4249 void (*trigger)(void *),
4250 void (*overflow)(void *),
4251 void *arg,
4252 u8 timer_index)
4253 {
4254 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4255 struct ath_gen_timer *timer;
4256
4257 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4258
4259 if (timer == NULL) {
4260 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
4261 "Failed to allocate memory"
4262 "for hw timer[%d]\n", timer_index);
4263 return NULL;
4264 }
4265
4266 /* allocate a hardware generic timer slot */
4267 timer_table->timers[timer_index] = timer;
4268 timer->index = timer_index;
4269 timer->trigger = trigger;
4270 timer->overflow = overflow;
4271 timer->arg = arg;
4272
4273 return timer;
4274 }
4275 EXPORT_SYMBOL(ath_gen_timer_alloc);
4276
4277 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
4278 struct ath_gen_timer *timer,
4279 u32 timer_next,
4280 u32 timer_period)
4281 {
4282 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4283 u32 tsf;
4284
4285 BUG_ON(!timer_period);
4286
4287 set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4288
4289 tsf = ath9k_hw_gettsf32(ah);
4290
4291 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER,
4292 "curent tsf %x period %x"
4293 "timer_next %x\n", tsf, timer_period, timer_next);
4294
4295 /*
4296 * Pull timer_next forward if the current TSF already passed it
4297 * because of software latency
4298 */
4299 if (timer_next < tsf)
4300 timer_next = tsf + timer_period;
4301
4302 /*
4303 * Program generic timer registers
4304 */
4305 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4306 timer_next);
4307 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4308 timer_period);
4309 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4310 gen_tmr_configuration[timer->index].mode_mask);
4311
4312 /* Enable both trigger and thresh interrupt masks */
4313 REG_SET_BIT(ah, AR_IMR_S5,
4314 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4315 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4316 }
4317 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
4318
4319 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4320 {
4321 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4322
4323 if ((timer->index < AR_FIRST_NDP_TIMER) ||
4324 (timer->index >= ATH_MAX_GEN_TIMER)) {
4325 return;
4326 }
4327
4328 /* Clear generic timer enable bits. */
4329 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4330 gen_tmr_configuration[timer->index].mode_mask);
4331
4332 /* Disable both trigger and thresh interrupt masks */
4333 REG_CLR_BIT(ah, AR_IMR_S5,
4334 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4335 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4336
4337 clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4338 }
4339 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
4340
4341 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4342 {
4343 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4344
4345 /* free the hardware generic timer slot */
4346 timer_table->timers[timer->index] = NULL;
4347 kfree(timer);
4348 }
4349 EXPORT_SYMBOL(ath_gen_timer_free);
4350
4351 /*
4352 * Generic Timer Interrupts handling
4353 */
4354 void ath_gen_timer_isr(struct ath_hw *ah)
4355 {
4356 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4357 struct ath_gen_timer *timer;
4358 struct ath_common *common = ath9k_hw_common(ah);
4359 u32 trigger_mask, thresh_mask, index;
4360
4361 /* get hardware generic timer interrupt status */
4362 trigger_mask = ah->intr_gen_timer_trigger;
4363 thresh_mask = ah->intr_gen_timer_thresh;
4364 trigger_mask &= timer_table->timer_mask.val;
4365 thresh_mask &= timer_table->timer_mask.val;
4366
4367 trigger_mask &= ~thresh_mask;
4368
4369 while (thresh_mask) {
4370 index = rightmost_index(timer_table, &thresh_mask);
4371 timer = timer_table->timers[index];
4372 BUG_ON(!timer);
4373 ath_print(common, ATH_DBG_HWTIMER,
4374 "TSF overflow for Gen timer %d\n", index);
4375 timer->overflow(timer->arg);
4376 }
4377
4378 while (trigger_mask) {
4379 index = rightmost_index(timer_table, &trigger_mask);
4380 timer = timer_table->timers[index];
4381 BUG_ON(!timer);
4382 ath_print(common, ATH_DBG_HWTIMER,
4383 "Gen timer[%d] trigger\n", index);
4384 timer->trigger(timer->arg);
4385 }
4386 }
4387 EXPORT_SYMBOL(ath_gen_timer_isr);
4388
4389 static struct {
4390 u32 version;
4391 const char * name;
4392 } ath_mac_bb_names[] = {
4393 /* Devices with external radios */
4394 { AR_SREV_VERSION_5416_PCI, "5416" },
4395 { AR_SREV_VERSION_5416_PCIE, "5418" },
4396 { AR_SREV_VERSION_9100, "9100" },
4397 { AR_SREV_VERSION_9160, "9160" },
4398 /* Single-chip solutions */
4399 { AR_SREV_VERSION_9280, "9280" },
4400 { AR_SREV_VERSION_9285, "9285" },
4401 { AR_SREV_VERSION_9287, "9287" },
4402 { AR_SREV_VERSION_9271, "9271" },
4403 };
4404
4405 /* For devices with external radios */
4406 static struct {
4407 u16 version;
4408 const char * name;
4409 } ath_rf_names[] = {
4410 { 0, "5133" },
4411 { AR_RAD5133_SREV_MAJOR, "5133" },
4412 { AR_RAD5122_SREV_MAJOR, "5122" },
4413 { AR_RAD2133_SREV_MAJOR, "2133" },
4414 { AR_RAD2122_SREV_MAJOR, "2122" }
4415 };
4416
4417 /*
4418 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
4419 */
4420 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
4421 {
4422 int i;
4423
4424 for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
4425 if (ath_mac_bb_names[i].version == mac_bb_version) {
4426 return ath_mac_bb_names[i].name;
4427 }
4428 }
4429
4430 return "????";
4431 }
4432
4433 /*
4434 * Return the RF name. "????" is returned if the RF is unknown.
4435 * Used for devices with external radios.
4436 */
4437 static const char *ath9k_hw_rf_name(u16 rf_version)
4438 {
4439 int i;
4440
4441 for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
4442 if (ath_rf_names[i].version == rf_version) {
4443 return ath_rf_names[i].name;
4444 }
4445 }
4446
4447 return "????";
4448 }
4449
4450 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
4451 {
4452 int used;
4453
4454 /* chipsets >= AR9280 are single-chip */
4455 if (AR_SREV_9280_10_OR_LATER(ah)) {
4456 used = snprintf(hw_name, len,
4457 "Atheros AR%s Rev:%x",
4458 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4459 ah->hw_version.macRev);
4460 }
4461 else {
4462 used = snprintf(hw_name, len,
4463 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
4464 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
4465 ah->hw_version.macRev,
4466 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
4467 AR_RADIO_SREV_MAJOR)),
4468 ah->hw_version.phyRev);
4469 }
4470
4471 hw_name[used] = '\0';
4472 }
4473 EXPORT_SYMBOL(ath9k_hw_name);
This page took 0.116493 seconds and 4 git commands to generate.