debugfs: Modified default dir of debugfs for debugging UHCI.
[linux/fpc-iii.git] / drivers / net / wireless / ath / ath9k / hw.c
blobb6c6cca07812fd4c3c5d998787a5f7e0c4085b49
1 /*
2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 #include <linux/io.h>
18 #include <asm/unaligned.h>
19 #include <linux/pci.h>
21 #include "ath9k.h"
22 #include "initvals.h"
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
30 enum ath9k_ht_macmode macmode);
31 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
32 struct ar5416_eeprom_def *pEepData,
33 u32 reg, u32 value);
34 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
35 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
37 /********************/
38 /* Helper Functions */
39 /********************/
41 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
43 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
45 if (!ah->curchan) /* should really check for CCK instead */
46 return clks / ATH9K_CLOCK_RATE_CCK;
47 if (conf->channel->band == IEEE80211_BAND_2GHZ)
48 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
50 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
53 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
55 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
57 if (conf_is_ht40(conf))
58 return ath9k_hw_mac_usec(ah, clks) / 2;
59 else
60 return ath9k_hw_mac_usec(ah, clks);
63 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
65 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
67 if (!ah->curchan) /* should really check for CCK instead */
68 return usecs *ATH9K_CLOCK_RATE_CCK;
69 if (conf->channel->band == IEEE80211_BAND_2GHZ)
70 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
71 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
74 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
76 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
78 if (conf_is_ht40(conf))
79 return ath9k_hw_mac_clks(ah, usecs) * 2;
80 else
81 return ath9k_hw_mac_clks(ah, usecs);
85 * Read and write, they both share the same lock. We do this to serialize
86 * reads and writes on Atheros 802.11n PCI devices only. This is required
87 * as the FIFO on these devices can only accept sanely 2 requests. After
88 * that the device goes bananas. Serializing the reads/writes prevents this
89 * from happening.
92 void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
94 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
95 unsigned long flags;
96 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
97 iowrite32(val, ah->ah_sc->mem + reg_offset);
98 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
99 } else
100 iowrite32(val, ah->ah_sc->mem + reg_offset);
103 unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
105 u32 val;
106 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
107 unsigned long flags;
108 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
109 val = ioread32(ah->ah_sc->mem + reg_offset);
110 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
111 } else
112 val = ioread32(ah->ah_sc->mem + reg_offset);
113 return val;
116 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
118 int i;
120 BUG_ON(timeout < AH_TIME_QUANTUM);
122 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
123 if ((REG_READ(ah, reg) & mask) == val)
124 return true;
126 udelay(AH_TIME_QUANTUM);
129 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
130 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
131 timeout, reg, REG_READ(ah, reg), mask, val);
133 return false;
136 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
138 u32 retval;
139 int i;
141 for (i = 0, retval = 0; i < n; i++) {
142 retval = (retval << 1) | (val & 1);
143 val >>= 1;
145 return retval;
148 bool ath9k_get_channel_edges(struct ath_hw *ah,
149 u16 flags, u16 *low,
150 u16 *high)
152 struct ath9k_hw_capabilities *pCap = &ah->caps;
154 if (flags & CHANNEL_5GHZ) {
155 *low = pCap->low_5ghz_chan;
156 *high = pCap->high_5ghz_chan;
157 return true;
159 if ((flags & CHANNEL_2GHZ)) {
160 *low = pCap->low_2ghz_chan;
161 *high = pCap->high_2ghz_chan;
162 return true;
164 return false;
167 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
168 const struct ath_rate_table *rates,
169 u32 frameLen, u16 rateix,
170 bool shortPreamble)
172 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
173 u32 kbps;
175 kbps = rates->info[rateix].ratekbps;
177 if (kbps == 0)
178 return 0;
180 switch (rates->info[rateix].phy) {
181 case WLAN_RC_PHY_CCK:
182 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
183 if (shortPreamble && rates->info[rateix].short_preamble)
184 phyTime >>= 1;
185 numBits = frameLen << 3;
186 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
187 break;
188 case WLAN_RC_PHY_OFDM:
189 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
190 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
191 numBits = OFDM_PLCP_BITS + (frameLen << 3);
192 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
193 txTime = OFDM_SIFS_TIME_QUARTER
194 + OFDM_PREAMBLE_TIME_QUARTER
195 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
196 } else if (ah->curchan &&
197 IS_CHAN_HALF_RATE(ah->curchan)) {
198 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
199 numBits = OFDM_PLCP_BITS + (frameLen << 3);
200 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
201 txTime = OFDM_SIFS_TIME_HALF +
202 OFDM_PREAMBLE_TIME_HALF
203 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
204 } else {
205 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
206 numBits = OFDM_PLCP_BITS + (frameLen << 3);
207 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
208 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
209 + (numSymbols * OFDM_SYMBOL_TIME);
211 break;
212 default:
213 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
214 "Unknown phy %u (rate ix %u)\n",
215 rates->info[rateix].phy, rateix);
216 txTime = 0;
217 break;
220 return txTime;
223 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
224 struct ath9k_channel *chan,
225 struct chan_centers *centers)
227 int8_t extoff;
229 if (!IS_CHAN_HT40(chan)) {
230 centers->ctl_center = centers->ext_center =
231 centers->synth_center = chan->channel;
232 return;
235 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
236 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
237 centers->synth_center =
238 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
239 extoff = 1;
240 } else {
241 centers->synth_center =
242 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
243 extoff = -1;
246 centers->ctl_center =
247 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
248 centers->ext_center =
249 centers->synth_center + (extoff *
250 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
251 HT40_CHANNEL_CENTER_SHIFT : 15));
254 /******************/
255 /* Chip Revisions */
256 /******************/
258 static void ath9k_hw_read_revisions(struct ath_hw *ah)
260 u32 val;
262 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
264 if (val == 0xFF) {
265 val = REG_READ(ah, AR_SREV);
266 ah->hw_version.macVersion =
267 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
268 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
269 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
270 } else {
271 if (!AR_SREV_9100(ah))
272 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
274 ah->hw_version.macRev = val & AR_SREV_REVISION;
276 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
277 ah->is_pciexpress = true;
281 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
283 u32 val;
284 int i;
286 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
288 for (i = 0; i < 8; i++)
289 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
290 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
291 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
293 return ath9k_hw_reverse_bits(val, 8);
296 /************************************/
297 /* HW Attach, Detach, Init Routines */
298 /************************************/
300 static void ath9k_hw_disablepcie(struct ath_hw *ah)
302 if (AR_SREV_9100(ah))
303 return;
305 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
306 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
307 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
308 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
309 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
310 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
311 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
312 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
313 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
315 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
318 static bool ath9k_hw_chip_test(struct ath_hw *ah)
320 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
321 u32 regHold[2];
322 u32 patternData[4] = { 0x55555555,
323 0xaaaaaaaa,
324 0x66666666,
325 0x99999999 };
326 int i, j;
328 for (i = 0; i < 2; i++) {
329 u32 addr = regAddr[i];
330 u32 wrData, rdData;
332 regHold[i] = REG_READ(ah, addr);
333 for (j = 0; j < 0x100; j++) {
334 wrData = (j << 16) | j;
335 REG_WRITE(ah, addr, wrData);
336 rdData = REG_READ(ah, addr);
337 if (rdData != wrData) {
338 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
339 "address test failed "
340 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
341 addr, wrData, rdData);
342 return false;
345 for (j = 0; j < 4; j++) {
346 wrData = patternData[j];
347 REG_WRITE(ah, addr, wrData);
348 rdData = REG_READ(ah, addr);
349 if (wrData != rdData) {
350 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
351 "address test failed "
352 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
353 addr, wrData, rdData);
354 return false;
357 REG_WRITE(ah, regAddr[i], regHold[i]);
359 udelay(100);
361 return true;
364 static const char *ath9k_hw_devname(u16 devid)
366 switch (devid) {
367 case AR5416_DEVID_PCI:
368 return "Atheros 5416";
369 case AR5416_DEVID_PCIE:
370 return "Atheros 5418";
371 case AR9160_DEVID_PCI:
372 return "Atheros 9160";
373 case AR5416_AR9100_DEVID:
374 return "Atheros 9100";
375 case AR9280_DEVID_PCI:
376 case AR9280_DEVID_PCIE:
377 return "Atheros 9280";
378 case AR9285_DEVID_PCIE:
379 return "Atheros 9285";
380 case AR5416_DEVID_AR9287_PCI:
381 case AR5416_DEVID_AR9287_PCIE:
382 return "Atheros 9287";
385 return NULL;
388 static void ath9k_hw_init_config(struct ath_hw *ah)
390 int i;
392 ah->config.dma_beacon_response_time = 2;
393 ah->config.sw_beacon_response_time = 10;
394 ah->config.additional_swba_backoff = 0;
395 ah->config.ack_6mb = 0x0;
396 ah->config.cwm_ignore_extcca = 0;
397 ah->config.pcie_powersave_enable = 0;
398 ah->config.pcie_clock_req = 0;
399 ah->config.pcie_waen = 0;
400 ah->config.analog_shiftreg = 1;
401 ah->config.ht_enable = 1;
402 ah->config.ofdm_trig_low = 200;
403 ah->config.ofdm_trig_high = 500;
404 ah->config.cck_trig_high = 200;
405 ah->config.cck_trig_low = 100;
406 ah->config.enable_ani = 1;
407 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
408 ah->config.antenna_switch_swap = 0;
410 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
411 ah->config.spurchans[i][0] = AR_NO_SPUR;
412 ah->config.spurchans[i][1] = AR_NO_SPUR;
415 ah->config.intr_mitigation = true;
418 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
419 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
420 * This means we use it for all AR5416 devices, and the few
421 * minor PCI AR9280 devices out there.
423 * Serialization is required because these devices do not handle
424 * well the case of two concurrent reads/writes due to the latency
425 * involved. During one read/write another read/write can be issued
426 * on another CPU while the previous read/write may still be working
427 * on our hardware, if we hit this case the hardware poops in a loop.
428 * We prevent this by serializing reads and writes.
430 * This issue is not present on PCI-Express devices or pre-AR5416
431 * devices (legacy, 802.11abg).
433 if (num_possible_cpus() > 1)
434 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
437 static void ath9k_hw_init_defaults(struct ath_hw *ah)
439 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
441 regulatory->country_code = CTRY_DEFAULT;
442 regulatory->power_limit = MAX_RATE_POWER;
443 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
445 ah->hw_version.magic = AR5416_MAGIC;
446 ah->hw_version.subvendorid = 0;
448 ah->ah_flags = 0;
449 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
450 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
451 if (!AR_SREV_9100(ah))
452 ah->ah_flags = AH_USE_EEPROM;
454 ah->atim_window = 0;
455 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
456 ah->beacon_interval = 100;
457 ah->enable_32kHz_clock = DONT_USE_32KHZ;
458 ah->slottime = (u32) -1;
459 ah->acktimeout = (u32) -1;
460 ah->ctstimeout = (u32) -1;
461 ah->globaltxtimeout = (u32) -1;
463 ah->gbeacon_rate = 0;
465 ah->power_mode = ATH9K_PM_UNDEFINED;
468 static int ath9k_hw_rfattach(struct ath_hw *ah)
470 bool rfStatus = false;
471 int ecode = 0;
473 rfStatus = ath9k_hw_init_rf(ah, &ecode);
474 if (!rfStatus) {
475 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
476 "RF setup failed, status: %u\n", ecode);
477 return ecode;
480 return 0;
483 static int ath9k_hw_rf_claim(struct ath_hw *ah)
485 u32 val;
487 REG_WRITE(ah, AR_PHY(0), 0x00000007);
489 val = ath9k_hw_get_radiorev(ah);
490 switch (val & AR_RADIO_SREV_MAJOR) {
491 case 0:
492 val = AR_RAD5133_SREV_MAJOR;
493 break;
494 case AR_RAD5133_SREV_MAJOR:
495 case AR_RAD5122_SREV_MAJOR:
496 case AR_RAD2133_SREV_MAJOR:
497 case AR_RAD2122_SREV_MAJOR:
498 break;
499 default:
500 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
501 "Radio Chip Rev 0x%02X not supported\n",
502 val & AR_RADIO_SREV_MAJOR);
503 return -EOPNOTSUPP;
506 ah->hw_version.analog5GhzRev = val;
508 return 0;
511 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
513 u32 sum;
514 int i;
515 u16 eeval;
517 sum = 0;
518 for (i = 0; i < 3; i++) {
519 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
520 sum += eeval;
521 ah->macaddr[2 * i] = eeval >> 8;
522 ah->macaddr[2 * i + 1] = eeval & 0xff;
524 if (sum == 0 || sum == 0xffff * 3)
525 return -EADDRNOTAVAIL;
527 return 0;
530 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
532 u32 rxgain_type;
534 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
535 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
537 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
538 INIT_INI_ARRAY(&ah->iniModesRxGain,
539 ar9280Modes_backoff_13db_rxgain_9280_2,
540 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
541 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
542 INIT_INI_ARRAY(&ah->iniModesRxGain,
543 ar9280Modes_backoff_23db_rxgain_9280_2,
544 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
545 else
546 INIT_INI_ARRAY(&ah->iniModesRxGain,
547 ar9280Modes_original_rxgain_9280_2,
548 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
549 } else {
550 INIT_INI_ARRAY(&ah->iniModesRxGain,
551 ar9280Modes_original_rxgain_9280_2,
552 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
556 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
558 u32 txgain_type;
560 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
561 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
563 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
564 INIT_INI_ARRAY(&ah->iniModesTxGain,
565 ar9280Modes_high_power_tx_gain_9280_2,
566 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
567 else
568 INIT_INI_ARRAY(&ah->iniModesTxGain,
569 ar9280Modes_original_tx_gain_9280_2,
570 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
571 } else {
572 INIT_INI_ARRAY(&ah->iniModesTxGain,
573 ar9280Modes_original_tx_gain_9280_2,
574 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
578 static int ath9k_hw_post_init(struct ath_hw *ah)
580 int ecode;
582 if (!ath9k_hw_chip_test(ah))
583 return -ENODEV;
585 ecode = ath9k_hw_rf_claim(ah);
586 if (ecode != 0)
587 return ecode;
589 ecode = ath9k_hw_eeprom_init(ah);
590 if (ecode != 0)
591 return ecode;
593 DPRINTF(ah->ah_sc, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
594 ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
596 ecode = ath9k_hw_rfattach(ah);
597 if (ecode != 0)
598 return ecode;
600 if (!AR_SREV_9100(ah)) {
601 ath9k_hw_ani_setup(ah);
602 ath9k_hw_ani_init(ah);
605 return 0;
608 static bool ath9k_hw_devid_supported(u16 devid)
610 switch (devid) {
611 case AR5416_DEVID_PCI:
612 case AR5416_DEVID_PCIE:
613 case AR5416_AR9100_DEVID:
614 case AR9160_DEVID_PCI:
615 case AR9280_DEVID_PCI:
616 case AR9280_DEVID_PCIE:
617 case AR9285_DEVID_PCIE:
618 case AR5416_DEVID_AR9287_PCI:
619 case AR5416_DEVID_AR9287_PCIE:
620 return true;
621 default:
622 break;
624 return false;
627 static bool ath9k_hw_macversion_supported(u32 macversion)
629 switch (macversion) {
630 case AR_SREV_VERSION_5416_PCI:
631 case AR_SREV_VERSION_5416_PCIE:
632 case AR_SREV_VERSION_9160:
633 case AR_SREV_VERSION_9100:
634 case AR_SREV_VERSION_9280:
635 case AR_SREV_VERSION_9285:
636 case AR_SREV_VERSION_9287:
637 return true;
638 /* Not yet */
639 case AR_SREV_VERSION_9271:
640 default:
641 break;
643 return false;
646 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
648 if (AR_SREV_9160_10_OR_LATER(ah)) {
649 if (AR_SREV_9280_10_OR_LATER(ah)) {
650 ah->iq_caldata.calData = &iq_cal_single_sample;
651 ah->adcgain_caldata.calData =
652 &adc_gain_cal_single_sample;
653 ah->adcdc_caldata.calData =
654 &adc_dc_cal_single_sample;
655 ah->adcdc_calinitdata.calData =
656 &adc_init_dc_cal;
657 } else {
658 ah->iq_caldata.calData = &iq_cal_multi_sample;
659 ah->adcgain_caldata.calData =
660 &adc_gain_cal_multi_sample;
661 ah->adcdc_caldata.calData =
662 &adc_dc_cal_multi_sample;
663 ah->adcdc_calinitdata.calData =
664 &adc_init_dc_cal;
666 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
670 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
672 if (AR_SREV_9271(ah)) {
673 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
674 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
675 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
676 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
677 return;
680 if (AR_SREV_9287_11_OR_LATER(ah)) {
681 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
682 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
683 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
684 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
685 if (ah->config.pcie_clock_req)
686 INIT_INI_ARRAY(&ah->iniPcieSerdes,
687 ar9287PciePhy_clkreq_off_L1_9287_1_1,
688 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
689 else
690 INIT_INI_ARRAY(&ah->iniPcieSerdes,
691 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
692 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
694 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
695 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
696 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
697 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
698 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
700 if (ah->config.pcie_clock_req)
701 INIT_INI_ARRAY(&ah->iniPcieSerdes,
702 ar9287PciePhy_clkreq_off_L1_9287_1_0,
703 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
704 else
705 INIT_INI_ARRAY(&ah->iniPcieSerdes,
706 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
707 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
709 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
712 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
713 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
714 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
715 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
717 if (ah->config.pcie_clock_req) {
718 INIT_INI_ARRAY(&ah->iniPcieSerdes,
719 ar9285PciePhy_clkreq_off_L1_9285_1_2,
720 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
721 } else {
722 INIT_INI_ARRAY(&ah->iniPcieSerdes,
723 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
724 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
727 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
728 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
729 ARRAY_SIZE(ar9285Modes_9285), 6);
730 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
731 ARRAY_SIZE(ar9285Common_9285), 2);
733 if (ah->config.pcie_clock_req) {
734 INIT_INI_ARRAY(&ah->iniPcieSerdes,
735 ar9285PciePhy_clkreq_off_L1_9285,
736 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
737 } else {
738 INIT_INI_ARRAY(&ah->iniPcieSerdes,
739 ar9285PciePhy_clkreq_always_on_L1_9285,
740 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
742 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
743 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
744 ARRAY_SIZE(ar9280Modes_9280_2), 6);
745 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
746 ARRAY_SIZE(ar9280Common_9280_2), 2);
748 if (ah->config.pcie_clock_req) {
749 INIT_INI_ARRAY(&ah->iniPcieSerdes,
750 ar9280PciePhy_clkreq_off_L1_9280,
751 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
752 } else {
753 INIT_INI_ARRAY(&ah->iniPcieSerdes,
754 ar9280PciePhy_clkreq_always_on_L1_9280,
755 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
757 INIT_INI_ARRAY(&ah->iniModesAdditional,
758 ar9280Modes_fast_clock_9280_2,
759 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
760 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
761 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
762 ARRAY_SIZE(ar9280Modes_9280), 6);
763 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
764 ARRAY_SIZE(ar9280Common_9280), 2);
765 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
766 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
767 ARRAY_SIZE(ar5416Modes_9160), 6);
768 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
769 ARRAY_SIZE(ar5416Common_9160), 2);
770 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
771 ARRAY_SIZE(ar5416Bank0_9160), 2);
772 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
773 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
774 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
775 ARRAY_SIZE(ar5416Bank1_9160), 2);
776 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
777 ARRAY_SIZE(ar5416Bank2_9160), 2);
778 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
779 ARRAY_SIZE(ar5416Bank3_9160), 3);
780 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
781 ARRAY_SIZE(ar5416Bank6_9160), 3);
782 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
783 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
784 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
785 ARRAY_SIZE(ar5416Bank7_9160), 2);
786 if (AR_SREV_9160_11(ah)) {
787 INIT_INI_ARRAY(&ah->iniAddac,
788 ar5416Addac_91601_1,
789 ARRAY_SIZE(ar5416Addac_91601_1), 2);
790 } else {
791 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
792 ARRAY_SIZE(ar5416Addac_9160), 2);
794 } else if (AR_SREV_9100_OR_LATER(ah)) {
795 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
796 ARRAY_SIZE(ar5416Modes_9100), 6);
797 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
798 ARRAY_SIZE(ar5416Common_9100), 2);
799 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
800 ARRAY_SIZE(ar5416Bank0_9100), 2);
801 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
802 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
803 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
804 ARRAY_SIZE(ar5416Bank1_9100), 2);
805 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
806 ARRAY_SIZE(ar5416Bank2_9100), 2);
807 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
808 ARRAY_SIZE(ar5416Bank3_9100), 3);
809 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
810 ARRAY_SIZE(ar5416Bank6_9100), 3);
811 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
812 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
813 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
814 ARRAY_SIZE(ar5416Bank7_9100), 2);
815 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
816 ARRAY_SIZE(ar5416Addac_9100), 2);
817 } else {
818 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
819 ARRAY_SIZE(ar5416Modes), 6);
820 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
821 ARRAY_SIZE(ar5416Common), 2);
822 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
823 ARRAY_SIZE(ar5416Bank0), 2);
824 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
825 ARRAY_SIZE(ar5416BB_RfGain), 3);
826 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
827 ARRAY_SIZE(ar5416Bank1), 2);
828 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
829 ARRAY_SIZE(ar5416Bank2), 2);
830 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
831 ARRAY_SIZE(ar5416Bank3), 3);
832 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
833 ARRAY_SIZE(ar5416Bank6), 3);
834 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
835 ARRAY_SIZE(ar5416Bank6TPC), 3);
836 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
837 ARRAY_SIZE(ar5416Bank7), 2);
838 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
839 ARRAY_SIZE(ar5416Addac), 2);
843 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
845 if (AR_SREV_9287_11(ah))
846 INIT_INI_ARRAY(&ah->iniModesRxGain,
847 ar9287Modes_rx_gain_9287_1_1,
848 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
849 else if (AR_SREV_9287_10(ah))
850 INIT_INI_ARRAY(&ah->iniModesRxGain,
851 ar9287Modes_rx_gain_9287_1_0,
852 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
853 else if (AR_SREV_9280_20(ah))
854 ath9k_hw_init_rxgain_ini(ah);
856 if (AR_SREV_9287_11(ah)) {
857 INIT_INI_ARRAY(&ah->iniModesTxGain,
858 ar9287Modes_tx_gain_9287_1_1,
859 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
860 } else if (AR_SREV_9287_10(ah)) {
861 INIT_INI_ARRAY(&ah->iniModesTxGain,
862 ar9287Modes_tx_gain_9287_1_0,
863 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
864 } else if (AR_SREV_9280_20(ah)) {
865 ath9k_hw_init_txgain_ini(ah);
866 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
867 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
869 /* txgain table */
870 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
871 INIT_INI_ARRAY(&ah->iniModesTxGain,
872 ar9285Modes_high_power_tx_gain_9285_1_2,
873 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
874 } else {
875 INIT_INI_ARRAY(&ah->iniModesTxGain,
876 ar9285Modes_original_tx_gain_9285_1_2,
877 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
883 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
885 u32 i, j;
887 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
888 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
890 /* EEPROM Fixup */
891 for (i = 0; i < ah->iniModes.ia_rows; i++) {
892 u32 reg = INI_RA(&ah->iniModes, i, 0);
894 for (j = 1; j < ah->iniModes.ia_columns; j++) {
895 u32 val = INI_RA(&ah->iniModes, i, j);
897 INI_RA(&ah->iniModes, i, j) =
898 ath9k_hw_ini_fixup(ah,
899 &ah->eeprom.def,
900 reg, val);
906 int ath9k_hw_init(struct ath_hw *ah)
908 int r = 0;
910 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
911 return -EOPNOTSUPP;
913 ath9k_hw_init_defaults(ah);
914 ath9k_hw_init_config(ah);
916 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
917 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't reset chip\n");
918 return -EIO;
921 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
922 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
923 return -EIO;
926 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
927 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
928 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
929 ah->config.serialize_regmode =
930 SER_REG_MODE_ON;
931 } else {
932 ah->config.serialize_regmode =
933 SER_REG_MODE_OFF;
937 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "serialize_regmode is %d\n",
938 ah->config.serialize_regmode);
940 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
941 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
942 "Mac Chip Rev 0x%02x.%x is not supported by "
943 "this driver\n", ah->hw_version.macVersion,
944 ah->hw_version.macRev);
945 return -EOPNOTSUPP;
948 if (AR_SREV_9100(ah)) {
949 ah->iq_caldata.calData = &iq_cal_multi_sample;
950 ah->supp_cals = IQ_MISMATCH_CAL;
951 ah->is_pciexpress = false;
954 if (AR_SREV_9271(ah))
955 ah->is_pciexpress = false;
957 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
959 ath9k_hw_init_cal_settings(ah);
961 ah->ani_function = ATH9K_ANI_ALL;
962 if (AR_SREV_9280_10_OR_LATER(ah))
963 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
965 ath9k_hw_init_mode_regs(ah);
967 if (ah->is_pciexpress)
968 ath9k_hw_configpcipowersave(ah, 0);
969 else
970 ath9k_hw_disablepcie(ah);
972 r = ath9k_hw_post_init(ah);
973 if (r)
974 return r;
976 ath9k_hw_init_mode_gain_regs(ah);
977 ath9k_hw_fill_cap_info(ah);
978 ath9k_hw_init_11a_eeprom_fix(ah);
980 r = ath9k_hw_init_macaddr(ah);
981 if (r) {
982 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
983 "Failed to initialize MAC address\n");
984 return r;
987 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
988 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
989 else
990 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
992 ath9k_init_nfcal_hist_buffer(ah);
994 return 0;
997 static void ath9k_hw_init_bb(struct ath_hw *ah,
998 struct ath9k_channel *chan)
1000 u32 synthDelay;
1002 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1003 if (IS_CHAN_B(chan))
1004 synthDelay = (4 * synthDelay) / 22;
1005 else
1006 synthDelay /= 10;
1008 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1010 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1013 static void ath9k_hw_init_qos(struct ath_hw *ah)
1015 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1016 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1018 REG_WRITE(ah, AR_QOS_NO_ACK,
1019 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1020 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1021 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1023 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1024 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1025 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1026 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1027 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1030 static void ath9k_hw_init_pll(struct ath_hw *ah,
1031 struct ath9k_channel *chan)
1033 u32 pll;
1035 if (AR_SREV_9100(ah)) {
1036 if (chan && IS_CHAN_5GHZ(chan))
1037 pll = 0x1450;
1038 else
1039 pll = 0x1458;
1040 } else {
1041 if (AR_SREV_9280_10_OR_LATER(ah)) {
1042 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1044 if (chan && IS_CHAN_HALF_RATE(chan))
1045 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1046 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1047 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1049 if (chan && IS_CHAN_5GHZ(chan)) {
1050 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1053 if (AR_SREV_9280_20(ah)) {
1054 if (((chan->channel % 20) == 0)
1055 || ((chan->channel % 10) == 0))
1056 pll = 0x2850;
1057 else
1058 pll = 0x142c;
1060 } else {
1061 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1064 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1066 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1068 if (chan && IS_CHAN_HALF_RATE(chan))
1069 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1070 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1071 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1073 if (chan && IS_CHAN_5GHZ(chan))
1074 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1075 else
1076 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1077 } else {
1078 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1080 if (chan && IS_CHAN_HALF_RATE(chan))
1081 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1082 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1083 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1085 if (chan && IS_CHAN_5GHZ(chan))
1086 pll |= SM(0xa, AR_RTC_PLL_DIV);
1087 else
1088 pll |= SM(0xb, AR_RTC_PLL_DIV);
1091 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1093 udelay(RTC_PLL_SETTLE_DELAY);
1095 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1098 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1100 int rx_chainmask, tx_chainmask;
1102 rx_chainmask = ah->rxchainmask;
1103 tx_chainmask = ah->txchainmask;
1105 switch (rx_chainmask) {
1106 case 0x5:
1107 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1108 AR_PHY_SWAP_ALT_CHAIN);
1109 case 0x3:
1110 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1111 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1112 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1113 break;
1115 case 0x1:
1116 case 0x2:
1117 case 0x7:
1118 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1119 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1120 break;
1121 default:
1122 break;
1125 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1126 if (tx_chainmask == 0x5) {
1127 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1128 AR_PHY_SWAP_ALT_CHAIN);
1130 if (AR_SREV_9100(ah))
1131 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1132 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1135 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1136 enum nl80211_iftype opmode)
1138 ah->mask_reg = AR_IMR_TXERR |
1139 AR_IMR_TXURN |
1140 AR_IMR_RXERR |
1141 AR_IMR_RXORN |
1142 AR_IMR_BCNMISC;
1144 if (ah->config.intr_mitigation)
1145 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1146 else
1147 ah->mask_reg |= AR_IMR_RXOK;
1149 ah->mask_reg |= AR_IMR_TXOK;
1151 if (opmode == NL80211_IFTYPE_AP)
1152 ah->mask_reg |= AR_IMR_MIB;
1154 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1155 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1157 if (!AR_SREV_9100(ah)) {
1158 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1159 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1160 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1164 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1166 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1167 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1168 ah->acktimeout = (u32) -1;
1169 return false;
1170 } else {
1171 REG_RMW_FIELD(ah, AR_TIME_OUT,
1172 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1173 ah->acktimeout = us;
1174 return true;
1178 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1180 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1181 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1182 ah->ctstimeout = (u32) -1;
1183 return false;
1184 } else {
1185 REG_RMW_FIELD(ah, AR_TIME_OUT,
1186 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1187 ah->ctstimeout = us;
1188 return true;
1192 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1194 if (tu > 0xFFFF) {
1195 DPRINTF(ah->ah_sc, ATH_DBG_XMIT,
1196 "bad global tx timeout %u\n", tu);
1197 ah->globaltxtimeout = (u32) -1;
1198 return false;
1199 } else {
1200 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1201 ah->globaltxtimeout = tu;
1202 return true;
1206 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1208 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1209 ah->misc_mode);
1211 if (ah->misc_mode != 0)
1212 REG_WRITE(ah, AR_PCU_MISC,
1213 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1214 if (ah->slottime != (u32) -1)
1215 ath9k_hw_setslottime(ah, ah->slottime);
1216 if (ah->acktimeout != (u32) -1)
1217 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1218 if (ah->ctstimeout != (u32) -1)
1219 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1220 if (ah->globaltxtimeout != (u32) -1)
1221 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1224 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1226 return vendorid == ATHEROS_VENDOR_ID ?
1227 ath9k_hw_devname(devid) : NULL;
1230 void ath9k_hw_detach(struct ath_hw *ah)
1232 if (!AR_SREV_9100(ah))
1233 ath9k_hw_ani_disable(ah);
1235 ath9k_hw_rf_free(ah);
1236 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1237 kfree(ah);
1238 ah = NULL;
1241 /*******/
1242 /* INI */
1243 /*******/
1245 static void ath9k_hw_override_ini(struct ath_hw *ah,
1246 struct ath9k_channel *chan)
1248 u32 val;
1250 if (AR_SREV_9271(ah)) {
1252 * Enable spectral scan to solution for issues with stuck
1253 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1254 * AR9271 1.1
1256 if (AR_SREV_9271_10(ah)) {
1257 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1258 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1260 else if (AR_SREV_9271_11(ah))
1262 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1263 * present on AR9271 1.1
1265 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1266 return;
1270 * Set the RX_ABORT and RX_DIS and clear if off only after
1271 * RXE is set for MAC. This prevents frames with corrupted
1272 * descriptor status.
1274 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1277 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1278 AR_SREV_9280_10_OR_LATER(ah))
1279 return;
1281 * Disable BB clock gating
1282 * Necessary to avoid issues on AR5416 2.0
1284 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1287 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1288 struct ar5416_eeprom_def *pEepData,
1289 u32 reg, u32 value)
1291 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1293 switch (ah->hw_version.devid) {
1294 case AR9280_DEVID_PCI:
1295 if (reg == 0x7894) {
1296 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1297 "ini VAL: %x EEPROM: %x\n", value,
1298 (pBase->version & 0xff));
1300 if ((pBase->version & 0xff) > 0x0a) {
1301 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1302 "PWDCLKIND: %d\n",
1303 pBase->pwdclkind);
1304 value &= ~AR_AN_TOP2_PWDCLKIND;
1305 value |= AR_AN_TOP2_PWDCLKIND &
1306 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1307 } else {
1308 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1309 "PWDCLKIND Earlier Rev\n");
1312 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1313 "final ini VAL: %x\n", value);
1315 break;
1318 return value;
1321 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1322 struct ar5416_eeprom_def *pEepData,
1323 u32 reg, u32 value)
1325 if (ah->eep_map == EEP_MAP_4KBITS)
1326 return value;
1327 else
1328 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1331 static void ath9k_olc_init(struct ath_hw *ah)
1333 u32 i;
1335 if (OLC_FOR_AR9287_10_LATER) {
1336 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1337 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1338 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1339 AR9287_AN_TXPC0_TXPCMODE,
1340 AR9287_AN_TXPC0_TXPCMODE_S,
1341 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1342 udelay(100);
1343 } else {
1344 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1345 ah->originalGain[i] =
1346 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1347 AR_PHY_TX_GAIN);
1348 ah->PDADCdelta = 0;
1352 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1353 struct ath9k_channel *chan)
1355 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1357 if (IS_CHAN_B(chan))
1358 ctl |= CTL_11B;
1359 else if (IS_CHAN_G(chan))
1360 ctl |= CTL_11G;
1361 else
1362 ctl |= CTL_11A;
1364 return ctl;
1367 static int ath9k_hw_process_ini(struct ath_hw *ah,
1368 struct ath9k_channel *chan,
1369 enum ath9k_ht_macmode macmode)
1371 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1372 int i, regWrites = 0;
1373 struct ieee80211_channel *channel = chan->chan;
1374 u32 modesIndex, freqIndex;
1376 switch (chan->chanmode) {
1377 case CHANNEL_A:
1378 case CHANNEL_A_HT20:
1379 modesIndex = 1;
1380 freqIndex = 1;
1381 break;
1382 case CHANNEL_A_HT40PLUS:
1383 case CHANNEL_A_HT40MINUS:
1384 modesIndex = 2;
1385 freqIndex = 1;
1386 break;
1387 case CHANNEL_G:
1388 case CHANNEL_G_HT20:
1389 case CHANNEL_B:
1390 modesIndex = 4;
1391 freqIndex = 2;
1392 break;
1393 case CHANNEL_G_HT40PLUS:
1394 case CHANNEL_G_HT40MINUS:
1395 modesIndex = 3;
1396 freqIndex = 2;
1397 break;
1399 default:
1400 return -EINVAL;
1403 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1404 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1405 ah->eep_ops->set_addac(ah, chan);
1407 if (AR_SREV_5416_22_OR_LATER(ah)) {
1408 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1409 } else {
1410 struct ar5416IniArray temp;
1411 u32 addacSize =
1412 sizeof(u32) * ah->iniAddac.ia_rows *
1413 ah->iniAddac.ia_columns;
1415 memcpy(ah->addac5416_21,
1416 ah->iniAddac.ia_array, addacSize);
1418 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1420 temp.ia_array = ah->addac5416_21;
1421 temp.ia_columns = ah->iniAddac.ia_columns;
1422 temp.ia_rows = ah->iniAddac.ia_rows;
1423 REG_WRITE_ARRAY(&temp, 1, regWrites);
1426 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1428 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1429 u32 reg = INI_RA(&ah->iniModes, i, 0);
1430 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1432 REG_WRITE(ah, reg, val);
1434 if (reg >= 0x7800 && reg < 0x78a0
1435 && ah->config.analog_shiftreg) {
1436 udelay(100);
1439 DO_DELAY(regWrites);
1442 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1443 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1445 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1446 AR_SREV_9287_10_OR_LATER(ah))
1447 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1449 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1450 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1451 u32 val = INI_RA(&ah->iniCommon, i, 1);
1453 REG_WRITE(ah, reg, val);
1455 if (reg >= 0x7800 && reg < 0x78a0
1456 && ah->config.analog_shiftreg) {
1457 udelay(100);
1460 DO_DELAY(regWrites);
1463 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1465 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1466 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1467 regWrites);
1470 ath9k_hw_override_ini(ah, chan);
1471 ath9k_hw_set_regs(ah, chan, macmode);
1472 ath9k_hw_init_chain_masks(ah);
1474 if (OLC_FOR_AR9280_20_LATER)
1475 ath9k_olc_init(ah);
1477 ah->eep_ops->set_txpower(ah, chan,
1478 ath9k_regd_get_ctl(regulatory, chan),
1479 channel->max_antenna_gain * 2,
1480 channel->max_power * 2,
1481 min((u32) MAX_RATE_POWER,
1482 (u32) regulatory->power_limit));
1484 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1485 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1486 "ar5416SetRfRegs failed\n");
1487 return -EIO;
1490 return 0;
1493 /****************************************/
1494 /* Reset and Channel Switching Routines */
1495 /****************************************/
1497 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1499 u32 rfMode = 0;
1501 if (chan == NULL)
1502 return;
1504 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1505 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1507 if (!AR_SREV_9280_10_OR_LATER(ah))
1508 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1509 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1511 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1512 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1514 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1517 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1519 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1522 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1524 u32 regval;
1527 * set AHB_MODE not to do cacheline prefetches
1529 regval = REG_READ(ah, AR_AHB_MODE);
1530 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1533 * let mac dma reads be in 128 byte chunks
1535 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1536 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1539 * Restore TX Trigger Level to its pre-reset value.
1540 * The initial value depends on whether aggregation is enabled, and is
1541 * adjusted whenever underruns are detected.
1543 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1546 * let mac dma writes be in 128 byte chunks
1548 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1549 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1552 * Setup receive FIFO threshold to hold off TX activities
1554 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1557 * reduce the number of usable entries in PCU TXBUF to avoid
1558 * wrap around issues.
1560 if (AR_SREV_9285(ah)) {
1561 /* For AR9285 the number of Fifos are reduced to half.
1562 * So set the usable tx buf size also to half to
1563 * avoid data/delimiter underruns
1565 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1566 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1567 } else if (!AR_SREV_9271(ah)) {
1568 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1569 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1573 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1575 u32 val;
1577 val = REG_READ(ah, AR_STA_ID1);
1578 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1579 switch (opmode) {
1580 case NL80211_IFTYPE_AP:
1581 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1582 | AR_STA_ID1_KSRCH_MODE);
1583 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1584 break;
1585 case NL80211_IFTYPE_ADHOC:
1586 case NL80211_IFTYPE_MESH_POINT:
1587 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1588 | AR_STA_ID1_KSRCH_MODE);
1589 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1590 break;
1591 case NL80211_IFTYPE_STATION:
1592 case NL80211_IFTYPE_MONITOR:
1593 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1594 break;
1598 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1599 u32 coef_scaled,
1600 u32 *coef_mantissa,
1601 u32 *coef_exponent)
1603 u32 coef_exp, coef_man;
1605 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1606 if ((coef_scaled >> coef_exp) & 0x1)
1607 break;
1609 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1611 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1613 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1614 *coef_exponent = coef_exp - 16;
1617 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1618 struct ath9k_channel *chan)
1620 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1621 u32 clockMhzScaled = 0x64000000;
1622 struct chan_centers centers;
1624 if (IS_CHAN_HALF_RATE(chan))
1625 clockMhzScaled = clockMhzScaled >> 1;
1626 else if (IS_CHAN_QUARTER_RATE(chan))
1627 clockMhzScaled = clockMhzScaled >> 2;
1629 ath9k_hw_get_channel_centers(ah, chan, &centers);
1630 coef_scaled = clockMhzScaled / centers.synth_center;
1632 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1633 &ds_coef_exp);
1635 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1636 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1637 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1638 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1640 coef_scaled = (9 * coef_scaled) / 10;
1642 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1643 &ds_coef_exp);
1645 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1646 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1647 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1648 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1651 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1653 u32 rst_flags;
1654 u32 tmpReg;
1656 if (AR_SREV_9100(ah)) {
1657 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1658 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1659 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1660 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1661 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1664 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1665 AR_RTC_FORCE_WAKE_ON_INT);
1667 if (AR_SREV_9100(ah)) {
1668 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1669 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1670 } else {
1671 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1672 if (tmpReg &
1673 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1674 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1675 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1676 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1677 } else {
1678 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1681 rst_flags = AR_RTC_RC_MAC_WARM;
1682 if (type == ATH9K_RESET_COLD)
1683 rst_flags |= AR_RTC_RC_MAC_COLD;
1686 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1687 udelay(50);
1689 REG_WRITE(ah, AR_RTC_RC, 0);
1690 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1691 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
1692 "RTC stuck in MAC reset\n");
1693 return false;
1696 if (!AR_SREV_9100(ah))
1697 REG_WRITE(ah, AR_RC, 0);
1699 ath9k_hw_init_pll(ah, NULL);
1701 if (AR_SREV_9100(ah))
1702 udelay(50);
1704 return true;
1707 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1709 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1710 AR_RTC_FORCE_WAKE_ON_INT);
1712 if (!AR_SREV_9100(ah))
1713 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1715 REG_WRITE(ah, AR_RTC_RESET, 0);
1716 udelay(2);
1718 if (!AR_SREV_9100(ah))
1719 REG_WRITE(ah, AR_RC, 0);
1721 REG_WRITE(ah, AR_RTC_RESET, 1);
1723 if (!ath9k_hw_wait(ah,
1724 AR_RTC_STATUS,
1725 AR_RTC_STATUS_M,
1726 AR_RTC_STATUS_ON,
1727 AH_WAIT_TIMEOUT)) {
1728 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "RTC not waking up\n");
1729 return false;
1732 ath9k_hw_read_revisions(ah);
1734 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1737 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1739 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1740 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1742 switch (type) {
1743 case ATH9K_RESET_POWER_ON:
1744 return ath9k_hw_set_reset_power_on(ah);
1745 case ATH9K_RESET_WARM:
1746 case ATH9K_RESET_COLD:
1747 return ath9k_hw_set_reset(ah, type);
1748 default:
1749 return false;
1753 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1754 enum ath9k_ht_macmode macmode)
1756 u32 phymode;
1757 u32 enableDacFifo = 0;
1759 if (AR_SREV_9285_10_OR_LATER(ah))
1760 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1761 AR_PHY_FC_ENABLE_DAC_FIFO);
1763 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1764 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1766 if (IS_CHAN_HT40(chan)) {
1767 phymode |= AR_PHY_FC_DYN2040_EN;
1769 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1770 (chan->chanmode == CHANNEL_G_HT40PLUS))
1771 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1773 if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1774 phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1776 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1778 ath9k_hw_set11nmac2040(ah, macmode);
1780 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1781 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1784 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1785 struct ath9k_channel *chan)
1787 if (OLC_FOR_AR9280_20_LATER) {
1788 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1789 return false;
1790 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1791 return false;
1793 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1794 return false;
1796 ah->chip_fullsleep = false;
1797 ath9k_hw_init_pll(ah, chan);
1798 ath9k_hw_set_rfmode(ah, chan);
1800 return true;
1803 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1804 struct ath9k_channel *chan,
1805 enum ath9k_ht_macmode macmode)
1807 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1808 struct ieee80211_channel *channel = chan->chan;
1809 u32 synthDelay, qnum;
1811 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1812 if (ath9k_hw_numtxpending(ah, qnum)) {
1813 DPRINTF(ah->ah_sc, ATH_DBG_QUEUE,
1814 "Transmit frames pending on queue %d\n", qnum);
1815 return false;
1819 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1820 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1821 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1822 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1823 "Could not kill baseband RX\n");
1824 return false;
1827 ath9k_hw_set_regs(ah, chan, macmode);
1829 if (AR_SREV_9280_10_OR_LATER(ah)) {
1830 ath9k_hw_ar9280_set_channel(ah, chan);
1831 } else {
1832 if (!(ath9k_hw_set_channel(ah, chan))) {
1833 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1834 "Failed to set channel\n");
1835 return false;
1839 ah->eep_ops->set_txpower(ah, chan,
1840 ath9k_regd_get_ctl(regulatory, chan),
1841 channel->max_antenna_gain * 2,
1842 channel->max_power * 2,
1843 min((u32) MAX_RATE_POWER,
1844 (u32) regulatory->power_limit));
1846 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1847 if (IS_CHAN_B(chan))
1848 synthDelay = (4 * synthDelay) / 22;
1849 else
1850 synthDelay /= 10;
1852 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1854 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1856 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1857 ath9k_hw_set_delta_slope(ah, chan);
1859 if (AR_SREV_9280_10_OR_LATER(ah))
1860 ath9k_hw_9280_spur_mitigate(ah, chan);
1861 else
1862 ath9k_hw_spur_mitigate(ah, chan);
1864 if (!chan->oneTimeCalsDone)
1865 chan->oneTimeCalsDone = true;
1867 return true;
1870 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1872 int bb_spur = AR_NO_SPUR;
1873 int freq;
1874 int bin, cur_bin;
1875 int bb_spur_off, spur_subchannel_sd;
1876 int spur_freq_sd;
1877 int spur_delta_phase;
1878 int denominator;
1879 int upper, lower, cur_vit_mask;
1880 int tmp, newVal;
1881 int i;
1882 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1883 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1885 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1886 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1888 int inc[4] = { 0, 100, 0, 0 };
1889 struct chan_centers centers;
1891 int8_t mask_m[123];
1892 int8_t mask_p[123];
1893 int8_t mask_amt;
1894 int tmp_mask;
1895 int cur_bb_spur;
1896 bool is2GHz = IS_CHAN_2GHZ(chan);
1898 memset(&mask_m, 0, sizeof(int8_t) * 123);
1899 memset(&mask_p, 0, sizeof(int8_t) * 123);
1901 ath9k_hw_get_channel_centers(ah, chan, &centers);
1902 freq = centers.synth_center;
1904 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1905 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1906 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1908 if (is2GHz)
1909 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1910 else
1911 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1913 if (AR_NO_SPUR == cur_bb_spur)
1914 break;
1915 cur_bb_spur = cur_bb_spur - freq;
1917 if (IS_CHAN_HT40(chan)) {
1918 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1919 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1920 bb_spur = cur_bb_spur;
1921 break;
1923 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1924 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1925 bb_spur = cur_bb_spur;
1926 break;
1930 if (AR_NO_SPUR == bb_spur) {
1931 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1932 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1933 return;
1934 } else {
1935 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1936 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1939 bin = bb_spur * 320;
1941 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1943 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1944 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1945 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1946 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1947 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1949 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1950 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1951 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1952 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1953 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1954 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1956 if (IS_CHAN_HT40(chan)) {
1957 if (bb_spur < 0) {
1958 spur_subchannel_sd = 1;
1959 bb_spur_off = bb_spur + 10;
1960 } else {
1961 spur_subchannel_sd = 0;
1962 bb_spur_off = bb_spur - 10;
1964 } else {
1965 spur_subchannel_sd = 0;
1966 bb_spur_off = bb_spur;
1969 if (IS_CHAN_HT40(chan))
1970 spur_delta_phase =
1971 ((bb_spur * 262144) /
1972 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1973 else
1974 spur_delta_phase =
1975 ((bb_spur * 524288) /
1976 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1978 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1979 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1981 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1982 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1983 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1984 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1986 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1987 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1989 cur_bin = -6000;
1990 upper = bin + 100;
1991 lower = bin - 100;
1993 for (i = 0; i < 4; i++) {
1994 int pilot_mask = 0;
1995 int chan_mask = 0;
1996 int bp = 0;
1997 for (bp = 0; bp < 30; bp++) {
1998 if ((cur_bin > lower) && (cur_bin < upper)) {
1999 pilot_mask = pilot_mask | 0x1 << bp;
2000 chan_mask = chan_mask | 0x1 << bp;
2002 cur_bin += 100;
2004 cur_bin += inc[i];
2005 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2006 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2009 cur_vit_mask = 6100;
2010 upper = bin + 120;
2011 lower = bin - 120;
2013 for (i = 0; i < 123; i++) {
2014 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2016 /* workaround for gcc bug #37014 */
2017 volatile int tmp_v = abs(cur_vit_mask - bin);
2019 if (tmp_v < 75)
2020 mask_amt = 1;
2021 else
2022 mask_amt = 0;
2023 if (cur_vit_mask < 0)
2024 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2025 else
2026 mask_p[cur_vit_mask / 100] = mask_amt;
2028 cur_vit_mask -= 100;
2031 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2032 | (mask_m[48] << 26) | (mask_m[49] << 24)
2033 | (mask_m[50] << 22) | (mask_m[51] << 20)
2034 | (mask_m[52] << 18) | (mask_m[53] << 16)
2035 | (mask_m[54] << 14) | (mask_m[55] << 12)
2036 | (mask_m[56] << 10) | (mask_m[57] << 8)
2037 | (mask_m[58] << 6) | (mask_m[59] << 4)
2038 | (mask_m[60] << 2) | (mask_m[61] << 0);
2039 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2040 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2042 tmp_mask = (mask_m[31] << 28)
2043 | (mask_m[32] << 26) | (mask_m[33] << 24)
2044 | (mask_m[34] << 22) | (mask_m[35] << 20)
2045 | (mask_m[36] << 18) | (mask_m[37] << 16)
2046 | (mask_m[48] << 14) | (mask_m[39] << 12)
2047 | (mask_m[40] << 10) | (mask_m[41] << 8)
2048 | (mask_m[42] << 6) | (mask_m[43] << 4)
2049 | (mask_m[44] << 2) | (mask_m[45] << 0);
2050 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2051 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2053 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2054 | (mask_m[18] << 26) | (mask_m[18] << 24)
2055 | (mask_m[20] << 22) | (mask_m[20] << 20)
2056 | (mask_m[22] << 18) | (mask_m[22] << 16)
2057 | (mask_m[24] << 14) | (mask_m[24] << 12)
2058 | (mask_m[25] << 10) | (mask_m[26] << 8)
2059 | (mask_m[27] << 6) | (mask_m[28] << 4)
2060 | (mask_m[29] << 2) | (mask_m[30] << 0);
2061 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2062 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2064 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2065 | (mask_m[2] << 26) | (mask_m[3] << 24)
2066 | (mask_m[4] << 22) | (mask_m[5] << 20)
2067 | (mask_m[6] << 18) | (mask_m[7] << 16)
2068 | (mask_m[8] << 14) | (mask_m[9] << 12)
2069 | (mask_m[10] << 10) | (mask_m[11] << 8)
2070 | (mask_m[12] << 6) | (mask_m[13] << 4)
2071 | (mask_m[14] << 2) | (mask_m[15] << 0);
2072 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2073 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2075 tmp_mask = (mask_p[15] << 28)
2076 | (mask_p[14] << 26) | (mask_p[13] << 24)
2077 | (mask_p[12] << 22) | (mask_p[11] << 20)
2078 | (mask_p[10] << 18) | (mask_p[9] << 16)
2079 | (mask_p[8] << 14) | (mask_p[7] << 12)
2080 | (mask_p[6] << 10) | (mask_p[5] << 8)
2081 | (mask_p[4] << 6) | (mask_p[3] << 4)
2082 | (mask_p[2] << 2) | (mask_p[1] << 0);
2083 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2084 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2086 tmp_mask = (mask_p[30] << 28)
2087 | (mask_p[29] << 26) | (mask_p[28] << 24)
2088 | (mask_p[27] << 22) | (mask_p[26] << 20)
2089 | (mask_p[25] << 18) | (mask_p[24] << 16)
2090 | (mask_p[23] << 14) | (mask_p[22] << 12)
2091 | (mask_p[21] << 10) | (mask_p[20] << 8)
2092 | (mask_p[19] << 6) | (mask_p[18] << 4)
2093 | (mask_p[17] << 2) | (mask_p[16] << 0);
2094 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2095 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2097 tmp_mask = (mask_p[45] << 28)
2098 | (mask_p[44] << 26) | (mask_p[43] << 24)
2099 | (mask_p[42] << 22) | (mask_p[41] << 20)
2100 | (mask_p[40] << 18) | (mask_p[39] << 16)
2101 | (mask_p[38] << 14) | (mask_p[37] << 12)
2102 | (mask_p[36] << 10) | (mask_p[35] << 8)
2103 | (mask_p[34] << 6) | (mask_p[33] << 4)
2104 | (mask_p[32] << 2) | (mask_p[31] << 0);
2105 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2106 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2108 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2109 | (mask_p[59] << 26) | (mask_p[58] << 24)
2110 | (mask_p[57] << 22) | (mask_p[56] << 20)
2111 | (mask_p[55] << 18) | (mask_p[54] << 16)
2112 | (mask_p[53] << 14) | (mask_p[52] << 12)
2113 | (mask_p[51] << 10) | (mask_p[50] << 8)
2114 | (mask_p[49] << 6) | (mask_p[48] << 4)
2115 | (mask_p[47] << 2) | (mask_p[46] << 0);
2116 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2117 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2120 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2122 int bb_spur = AR_NO_SPUR;
2123 int bin, cur_bin;
2124 int spur_freq_sd;
2125 int spur_delta_phase;
2126 int denominator;
2127 int upper, lower, cur_vit_mask;
2128 int tmp, new;
2129 int i;
2130 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2131 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2133 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2134 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2136 int inc[4] = { 0, 100, 0, 0 };
2138 int8_t mask_m[123];
2139 int8_t mask_p[123];
2140 int8_t mask_amt;
2141 int tmp_mask;
2142 int cur_bb_spur;
2143 bool is2GHz = IS_CHAN_2GHZ(chan);
2145 memset(&mask_m, 0, sizeof(int8_t) * 123);
2146 memset(&mask_p, 0, sizeof(int8_t) * 123);
2148 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2149 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2150 if (AR_NO_SPUR == cur_bb_spur)
2151 break;
2152 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2153 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2154 bb_spur = cur_bb_spur;
2155 break;
2159 if (AR_NO_SPUR == bb_spur)
2160 return;
2162 bin = bb_spur * 32;
2164 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2165 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2166 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2167 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2168 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2170 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2172 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2173 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2174 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2175 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2176 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2177 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2179 spur_delta_phase = ((bb_spur * 524288) / 100) &
2180 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2182 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2183 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2185 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2186 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2187 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2188 REG_WRITE(ah, AR_PHY_TIMING11, new);
2190 cur_bin = -6000;
2191 upper = bin + 100;
2192 lower = bin - 100;
2194 for (i = 0; i < 4; i++) {
2195 int pilot_mask = 0;
2196 int chan_mask = 0;
2197 int bp = 0;
2198 for (bp = 0; bp < 30; bp++) {
2199 if ((cur_bin > lower) && (cur_bin < upper)) {
2200 pilot_mask = pilot_mask | 0x1 << bp;
2201 chan_mask = chan_mask | 0x1 << bp;
2203 cur_bin += 100;
2205 cur_bin += inc[i];
2206 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2207 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2210 cur_vit_mask = 6100;
2211 upper = bin + 120;
2212 lower = bin - 120;
2214 for (i = 0; i < 123; i++) {
2215 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2217 /* workaround for gcc bug #37014 */
2218 volatile int tmp_v = abs(cur_vit_mask - bin);
2220 if (tmp_v < 75)
2221 mask_amt = 1;
2222 else
2223 mask_amt = 0;
2224 if (cur_vit_mask < 0)
2225 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2226 else
2227 mask_p[cur_vit_mask / 100] = mask_amt;
2229 cur_vit_mask -= 100;
2232 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2233 | (mask_m[48] << 26) | (mask_m[49] << 24)
2234 | (mask_m[50] << 22) | (mask_m[51] << 20)
2235 | (mask_m[52] << 18) | (mask_m[53] << 16)
2236 | (mask_m[54] << 14) | (mask_m[55] << 12)
2237 | (mask_m[56] << 10) | (mask_m[57] << 8)
2238 | (mask_m[58] << 6) | (mask_m[59] << 4)
2239 | (mask_m[60] << 2) | (mask_m[61] << 0);
2240 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2241 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2243 tmp_mask = (mask_m[31] << 28)
2244 | (mask_m[32] << 26) | (mask_m[33] << 24)
2245 | (mask_m[34] << 22) | (mask_m[35] << 20)
2246 | (mask_m[36] << 18) | (mask_m[37] << 16)
2247 | (mask_m[48] << 14) | (mask_m[39] << 12)
2248 | (mask_m[40] << 10) | (mask_m[41] << 8)
2249 | (mask_m[42] << 6) | (mask_m[43] << 4)
2250 | (mask_m[44] << 2) | (mask_m[45] << 0);
2251 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2252 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2254 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2255 | (mask_m[18] << 26) | (mask_m[18] << 24)
2256 | (mask_m[20] << 22) | (mask_m[20] << 20)
2257 | (mask_m[22] << 18) | (mask_m[22] << 16)
2258 | (mask_m[24] << 14) | (mask_m[24] << 12)
2259 | (mask_m[25] << 10) | (mask_m[26] << 8)
2260 | (mask_m[27] << 6) | (mask_m[28] << 4)
2261 | (mask_m[29] << 2) | (mask_m[30] << 0);
2262 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2263 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2265 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2266 | (mask_m[2] << 26) | (mask_m[3] << 24)
2267 | (mask_m[4] << 22) | (mask_m[5] << 20)
2268 | (mask_m[6] << 18) | (mask_m[7] << 16)
2269 | (mask_m[8] << 14) | (mask_m[9] << 12)
2270 | (mask_m[10] << 10) | (mask_m[11] << 8)
2271 | (mask_m[12] << 6) | (mask_m[13] << 4)
2272 | (mask_m[14] << 2) | (mask_m[15] << 0);
2273 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2274 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2276 tmp_mask = (mask_p[15] << 28)
2277 | (mask_p[14] << 26) | (mask_p[13] << 24)
2278 | (mask_p[12] << 22) | (mask_p[11] << 20)
2279 | (mask_p[10] << 18) | (mask_p[9] << 16)
2280 | (mask_p[8] << 14) | (mask_p[7] << 12)
2281 | (mask_p[6] << 10) | (mask_p[5] << 8)
2282 | (mask_p[4] << 6) | (mask_p[3] << 4)
2283 | (mask_p[2] << 2) | (mask_p[1] << 0);
2284 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2285 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2287 tmp_mask = (mask_p[30] << 28)
2288 | (mask_p[29] << 26) | (mask_p[28] << 24)
2289 | (mask_p[27] << 22) | (mask_p[26] << 20)
2290 | (mask_p[25] << 18) | (mask_p[24] << 16)
2291 | (mask_p[23] << 14) | (mask_p[22] << 12)
2292 | (mask_p[21] << 10) | (mask_p[20] << 8)
2293 | (mask_p[19] << 6) | (mask_p[18] << 4)
2294 | (mask_p[17] << 2) | (mask_p[16] << 0);
2295 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2296 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2298 tmp_mask = (mask_p[45] << 28)
2299 | (mask_p[44] << 26) | (mask_p[43] << 24)
2300 | (mask_p[42] << 22) | (mask_p[41] << 20)
2301 | (mask_p[40] << 18) | (mask_p[39] << 16)
2302 | (mask_p[38] << 14) | (mask_p[37] << 12)
2303 | (mask_p[36] << 10) | (mask_p[35] << 8)
2304 | (mask_p[34] << 6) | (mask_p[33] << 4)
2305 | (mask_p[32] << 2) | (mask_p[31] << 0);
2306 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2307 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2309 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2310 | (mask_p[59] << 26) | (mask_p[58] << 24)
2311 | (mask_p[57] << 22) | (mask_p[56] << 20)
2312 | (mask_p[55] << 18) | (mask_p[54] << 16)
2313 | (mask_p[53] << 14) | (mask_p[52] << 12)
2314 | (mask_p[51] << 10) | (mask_p[50] << 8)
2315 | (mask_p[49] << 6) | (mask_p[48] << 4)
2316 | (mask_p[47] << 2) | (mask_p[46] << 0);
2317 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2318 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2321 static void ath9k_enable_rfkill(struct ath_hw *ah)
2323 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2324 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2326 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2327 AR_GPIO_INPUT_MUX2_RFSILENT);
2329 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2330 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2333 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2334 bool bChannelChange)
2336 u32 saveLedState;
2337 struct ath_softc *sc = ah->ah_sc;
2338 struct ath9k_channel *curchan = ah->curchan;
2339 u32 saveDefAntenna;
2340 u32 macStaId1;
2341 int i, rx_chainmask, r;
2343 ah->extprotspacing = sc->ht_extprotspacing;
2344 ah->txchainmask = sc->tx_chainmask;
2345 ah->rxchainmask = sc->rx_chainmask;
2347 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2348 return -EIO;
2350 if (curchan)
2351 ath9k_hw_getnf(ah, curchan);
2353 if (bChannelChange &&
2354 (ah->chip_fullsleep != true) &&
2355 (ah->curchan != NULL) &&
2356 (chan->channel != ah->curchan->channel) &&
2357 ((chan->channelFlags & CHANNEL_ALL) ==
2358 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2359 (!AR_SREV_9280(ah) || (!IS_CHAN_A_5MHZ_SPACED(chan) &&
2360 !IS_CHAN_A_5MHZ_SPACED(ah->curchan)))) {
2362 if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2363 ath9k_hw_loadnf(ah, ah->curchan);
2364 ath9k_hw_start_nfcal(ah);
2365 return 0;
2369 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2370 if (saveDefAntenna == 0)
2371 saveDefAntenna = 1;
2373 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2375 saveLedState = REG_READ(ah, AR_CFG_LED) &
2376 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2377 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2379 ath9k_hw_mark_phy_inactive(ah);
2381 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2382 REG_WRITE(ah,
2383 AR9271_RESET_POWER_DOWN_CONTROL,
2384 AR9271_RADIO_RF_RST);
2385 udelay(50);
2388 if (!ath9k_hw_chip_reset(ah, chan)) {
2389 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Chip reset failed\n");
2390 return -EINVAL;
2393 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2394 ah->htc_reset_init = false;
2395 REG_WRITE(ah,
2396 AR9271_RESET_POWER_DOWN_CONTROL,
2397 AR9271_GATE_MAC_CTL);
2398 udelay(50);
2401 if (AR_SREV_9280_10_OR_LATER(ah))
2402 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2404 if (AR_SREV_9287_12_OR_LATER(ah)) {
2405 /* Enable ASYNC FIFO */
2406 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2407 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2408 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2409 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2410 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2411 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2412 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2414 r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2415 if (r)
2416 return r;
2418 /* Setup MFP options for CCMP */
2419 if (AR_SREV_9280_20_OR_LATER(ah)) {
2420 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2421 * frames when constructing CCMP AAD. */
2422 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2423 0xc7ff);
2424 ah->sw_mgmt_crypto = false;
2425 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2426 /* Disable hardware crypto for management frames */
2427 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2428 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2429 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2430 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2431 ah->sw_mgmt_crypto = true;
2432 } else
2433 ah->sw_mgmt_crypto = true;
2435 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2436 ath9k_hw_set_delta_slope(ah, chan);
2438 if (AR_SREV_9280_10_OR_LATER(ah))
2439 ath9k_hw_9280_spur_mitigate(ah, chan);
2440 else
2441 ath9k_hw_spur_mitigate(ah, chan);
2443 ah->eep_ops->set_board_values(ah, chan);
2445 ath9k_hw_decrease_chain_power(ah, chan);
2447 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2448 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2449 | macStaId1
2450 | AR_STA_ID1_RTS_USE_DEF
2451 | (ah->config.
2452 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2453 | ah->sta_id1_defaults);
2454 ath9k_hw_set_operating_mode(ah, ah->opmode);
2456 REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
2457 REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
2459 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2461 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
2462 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
2463 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2465 REG_WRITE(ah, AR_ISR, ~0);
2467 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2469 if (AR_SREV_9280_10_OR_LATER(ah))
2470 ath9k_hw_ar9280_set_channel(ah, chan);
2471 else
2472 if (!(ath9k_hw_set_channel(ah, chan)))
2473 return -EIO;
2475 for (i = 0; i < AR_NUM_DCU; i++)
2476 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2478 ah->intr_txqs = 0;
2479 for (i = 0; i < ah->caps.total_queues; i++)
2480 ath9k_hw_resettxqueue(ah, i);
2482 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2483 ath9k_hw_init_qos(ah);
2485 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2486 ath9k_enable_rfkill(ah);
2488 ath9k_hw_init_user_settings(ah);
2490 if (AR_SREV_9287_12_OR_LATER(ah)) {
2491 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2492 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2493 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2494 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2495 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2496 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2498 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2499 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2501 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2502 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2503 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2504 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2506 if (AR_SREV_9287_12_OR_LATER(ah)) {
2507 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2508 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2511 REG_WRITE(ah, AR_STA_ID1,
2512 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2514 ath9k_hw_set_dma(ah);
2516 REG_WRITE(ah, AR_OBS, 8);
2518 if (ah->config.intr_mitigation) {
2519 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2520 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2523 ath9k_hw_init_bb(ah, chan);
2525 if (!ath9k_hw_init_cal(ah, chan))
2526 return -EIO;
2528 rx_chainmask = ah->rxchainmask;
2529 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2530 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2531 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2534 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2537 * For big endian systems turn on swapping for descriptors
2539 if (AR_SREV_9100(ah)) {
2540 u32 mask;
2541 mask = REG_READ(ah, AR_CFG);
2542 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2543 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2544 "CFG Byte Swap Set 0x%x\n", mask);
2545 } else {
2546 mask =
2547 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2548 REG_WRITE(ah, AR_CFG, mask);
2549 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2550 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2552 } else {
2553 /* Configure AR9271 target WLAN */
2554 if (AR_SREV_9271(ah))
2555 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2556 #ifdef __BIG_ENDIAN
2557 else
2558 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2559 #endif
2562 if (ah->ah_sc->sc_flags & SC_OP_BTCOEX_ENABLED)
2563 ath9k_hw_btcoex_enable(ah);
2565 return 0;
2568 /************************/
2569 /* Key Cache Management */
2570 /************************/
2572 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2574 u32 keyType;
2576 if (entry >= ah->caps.keycache_size) {
2577 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2578 "keychache entry %u out of range\n", entry);
2579 return false;
2582 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2584 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2585 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2586 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2587 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2588 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2589 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2590 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2591 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2593 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2594 u16 micentry = entry + 64;
2596 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2597 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2598 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2599 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2603 return true;
2606 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2608 u32 macHi, macLo;
2610 if (entry >= ah->caps.keycache_size) {
2611 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2612 "keychache entry %u out of range\n", entry);
2613 return false;
2616 if (mac != NULL) {
2617 macHi = (mac[5] << 8) | mac[4];
2618 macLo = (mac[3] << 24) |
2619 (mac[2] << 16) |
2620 (mac[1] << 8) |
2621 mac[0];
2622 macLo >>= 1;
2623 macLo |= (macHi & 1) << 31;
2624 macHi >>= 1;
2625 } else {
2626 macLo = macHi = 0;
2628 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2629 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2631 return true;
2634 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2635 const struct ath9k_keyval *k,
2636 const u8 *mac)
2638 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2639 u32 key0, key1, key2, key3, key4;
2640 u32 keyType;
2642 if (entry >= pCap->keycache_size) {
2643 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2644 "keycache entry %u out of range\n", entry);
2645 return false;
2648 switch (k->kv_type) {
2649 case ATH9K_CIPHER_AES_OCB:
2650 keyType = AR_KEYTABLE_TYPE_AES;
2651 break;
2652 case ATH9K_CIPHER_AES_CCM:
2653 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2654 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2655 "AES-CCM not supported by mac rev 0x%x\n",
2656 ah->hw_version.macRev);
2657 return false;
2659 keyType = AR_KEYTABLE_TYPE_CCM;
2660 break;
2661 case ATH9K_CIPHER_TKIP:
2662 keyType = AR_KEYTABLE_TYPE_TKIP;
2663 if (ATH9K_IS_MIC_ENABLED(ah)
2664 && entry + 64 >= pCap->keycache_size) {
2665 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2666 "entry %u inappropriate for TKIP\n", entry);
2667 return false;
2669 break;
2670 case ATH9K_CIPHER_WEP:
2671 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2672 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2673 "WEP key length %u too small\n", k->kv_len);
2674 return false;
2676 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2677 keyType = AR_KEYTABLE_TYPE_40;
2678 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2679 keyType = AR_KEYTABLE_TYPE_104;
2680 else
2681 keyType = AR_KEYTABLE_TYPE_128;
2682 break;
2683 case ATH9K_CIPHER_CLR:
2684 keyType = AR_KEYTABLE_TYPE_CLR;
2685 break;
2686 default:
2687 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2688 "cipher %u not supported\n", k->kv_type);
2689 return false;
2692 key0 = get_unaligned_le32(k->kv_val + 0);
2693 key1 = get_unaligned_le16(k->kv_val + 4);
2694 key2 = get_unaligned_le32(k->kv_val + 6);
2695 key3 = get_unaligned_le16(k->kv_val + 10);
2696 key4 = get_unaligned_le32(k->kv_val + 12);
2697 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2698 key4 &= 0xff;
2701 * Note: Key cache registers access special memory area that requires
2702 * two 32-bit writes to actually update the values in the internal
2703 * memory. Consequently, the exact order and pairs used here must be
2704 * maintained.
2707 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2708 u16 micentry = entry + 64;
2711 * Write inverted key[47:0] first to avoid Michael MIC errors
2712 * on frames that could be sent or received at the same time.
2713 * The correct key will be written in the end once everything
2714 * else is ready.
2716 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2717 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2719 /* Write key[95:48] */
2720 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2721 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2723 /* Write key[127:96] and key type */
2724 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2725 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2727 /* Write MAC address for the entry */
2728 (void) ath9k_hw_keysetmac(ah, entry, mac);
2730 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2732 * TKIP uses two key cache entries:
2733 * Michael MIC TX/RX keys in the same key cache entry
2734 * (idx = main index + 64):
2735 * key0 [31:0] = RX key [31:0]
2736 * key1 [15:0] = TX key [31:16]
2737 * key1 [31:16] = reserved
2738 * key2 [31:0] = RX key [63:32]
2739 * key3 [15:0] = TX key [15:0]
2740 * key3 [31:16] = reserved
2741 * key4 [31:0] = TX key [63:32]
2743 u32 mic0, mic1, mic2, mic3, mic4;
2745 mic0 = get_unaligned_le32(k->kv_mic + 0);
2746 mic2 = get_unaligned_le32(k->kv_mic + 4);
2747 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2748 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2749 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2751 /* Write RX[31:0] and TX[31:16] */
2752 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2753 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2755 /* Write RX[63:32] and TX[15:0] */
2756 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2757 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2759 /* Write TX[63:32] and keyType(reserved) */
2760 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2761 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2762 AR_KEYTABLE_TYPE_CLR);
2764 } else {
2766 * TKIP uses four key cache entries (two for group
2767 * keys):
2768 * Michael MIC TX/RX keys are in different key cache
2769 * entries (idx = main index + 64 for TX and
2770 * main index + 32 + 96 for RX):
2771 * key0 [31:0] = TX/RX MIC key [31:0]
2772 * key1 [31:0] = reserved
2773 * key2 [31:0] = TX/RX MIC key [63:32]
2774 * key3 [31:0] = reserved
2775 * key4 [31:0] = reserved
2777 * Upper layer code will call this function separately
2778 * for TX and RX keys when these registers offsets are
2779 * used.
2781 u32 mic0, mic2;
2783 mic0 = get_unaligned_le32(k->kv_mic + 0);
2784 mic2 = get_unaligned_le32(k->kv_mic + 4);
2786 /* Write MIC key[31:0] */
2787 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2788 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2790 /* Write MIC key[63:32] */
2791 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2792 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2794 /* Write TX[63:32] and keyType(reserved) */
2795 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2796 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2797 AR_KEYTABLE_TYPE_CLR);
2800 /* MAC address registers are reserved for the MIC entry */
2801 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2802 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2805 * Write the correct (un-inverted) key[47:0] last to enable
2806 * TKIP now that all other registers are set with correct
2807 * values.
2809 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2810 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2811 } else {
2812 /* Write key[47:0] */
2813 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2814 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2816 /* Write key[95:48] */
2817 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2818 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2820 /* Write key[127:96] and key type */
2821 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2822 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2824 /* Write MAC address for the entry */
2825 (void) ath9k_hw_keysetmac(ah, entry, mac);
2828 return true;
2831 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2833 if (entry < ah->caps.keycache_size) {
2834 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2835 if (val & AR_KEYTABLE_VALID)
2836 return true;
2838 return false;
2841 /******************************/
2842 /* Power Management (Chipset) */
2843 /******************************/
2845 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2847 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2848 if (setChip) {
2849 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2850 AR_RTC_FORCE_WAKE_EN);
2851 if (!AR_SREV_9100(ah))
2852 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2854 REG_CLR_BIT(ah, (AR_RTC_RESET),
2855 AR_RTC_RESET_EN);
2859 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2861 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2862 if (setChip) {
2863 struct ath9k_hw_capabilities *pCap = &ah->caps;
2865 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2866 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2867 AR_RTC_FORCE_WAKE_ON_INT);
2868 } else {
2869 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2870 AR_RTC_FORCE_WAKE_EN);
2875 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2877 u32 val;
2878 int i;
2880 if (setChip) {
2881 if ((REG_READ(ah, AR_RTC_STATUS) &
2882 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2883 if (ath9k_hw_set_reset_reg(ah,
2884 ATH9K_RESET_POWER_ON) != true) {
2885 return false;
2888 if (AR_SREV_9100(ah))
2889 REG_SET_BIT(ah, AR_RTC_RESET,
2890 AR_RTC_RESET_EN);
2892 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2893 AR_RTC_FORCE_WAKE_EN);
2894 udelay(50);
2896 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2897 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2898 if (val == AR_RTC_STATUS_ON)
2899 break;
2900 udelay(50);
2901 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2902 AR_RTC_FORCE_WAKE_EN);
2904 if (i == 0) {
2905 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2906 "Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2907 return false;
2911 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2913 return true;
2916 static bool ath9k_hw_setpower_nolock(struct ath_hw *ah,
2917 enum ath9k_power_mode mode)
2919 int status = true, setChip = true;
2920 static const char *modes[] = {
2921 "AWAKE",
2922 "FULL-SLEEP",
2923 "NETWORK SLEEP",
2924 "UNDEFINED"
2927 if (ah->power_mode == mode)
2928 return status;
2930 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "%s -> %s\n",
2931 modes[ah->power_mode], modes[mode]);
2933 switch (mode) {
2934 case ATH9K_PM_AWAKE:
2935 status = ath9k_hw_set_power_awake(ah, setChip);
2936 break;
2937 case ATH9K_PM_FULL_SLEEP:
2938 ath9k_set_power_sleep(ah, setChip);
2939 ah->chip_fullsleep = true;
2940 break;
2941 case ATH9K_PM_NETWORK_SLEEP:
2942 ath9k_set_power_network_sleep(ah, setChip);
2943 break;
2944 default:
2945 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2946 "Unknown power mode %u\n", mode);
2947 return false;
2949 ah->power_mode = mode;
2951 return status;
2954 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2956 unsigned long flags;
2957 bool ret;
2959 spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags);
2960 ret = ath9k_hw_setpower_nolock(ah, mode);
2961 spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags);
2963 return ret;
2966 void ath9k_ps_wakeup(struct ath_softc *sc)
2968 unsigned long flags;
2970 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2971 if (++sc->ps_usecount != 1)
2972 goto unlock;
2974 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE);
2976 unlock:
2977 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2980 void ath9k_ps_restore(struct ath_softc *sc)
2982 unsigned long flags;
2984 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2985 if (--sc->ps_usecount != 0)
2986 goto unlock;
2988 if (sc->ps_enabled &&
2989 !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON |
2990 SC_OP_WAIT_FOR_CAB |
2991 SC_OP_WAIT_FOR_PSPOLL_DATA |
2992 SC_OP_WAIT_FOR_TX_ACK)))
2993 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP);
2995 unlock:
2996 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
3000 * Helper for ASPM support.
3002 * Disable PLL when in L0s as well as receiver clock when in L1.
3003 * This power saving option must be enabled through the SerDes.
3005 * Programming the SerDes must go through the same 288 bit serial shift
3006 * register as the other analog registers. Hence the 9 writes.
3008 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore)
3010 u8 i;
3012 if (ah->is_pciexpress != true)
3013 return;
3015 /* Do not touch SerDes registers */
3016 if (ah->config.pcie_powersave_enable == 2)
3017 return;
3019 /* Nothing to do on restore for 11N */
3020 if (restore)
3021 return;
3023 if (AR_SREV_9280_20_OR_LATER(ah)) {
3025 * AR9280 2.0 or later chips use SerDes values from the
3026 * initvals.h initialized depending on chipset during
3027 * ath9k_hw_init()
3029 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3030 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3031 INI_RA(&ah->iniPcieSerdes, i, 1));
3033 } else if (AR_SREV_9280(ah) &&
3034 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3035 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3036 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3038 /* RX shut off when elecidle is asserted */
3039 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3040 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3041 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3043 /* Shut off CLKREQ active in L1 */
3044 if (ah->config.pcie_clock_req)
3045 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3046 else
3047 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3049 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3050 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3051 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3053 /* Load the new settings */
3054 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3056 } else {
3057 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3058 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3060 /* RX shut off when elecidle is asserted */
3061 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3062 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3063 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3066 * Ignore ah->ah_config.pcie_clock_req setting for
3067 * pre-AR9280 11n
3069 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3071 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3072 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3073 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3075 /* Load the new settings */
3076 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3079 udelay(1000);
3081 /* set bit 19 to allow forcing of pcie core into L1 state */
3082 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3084 /* Several PCIe massages to ensure proper behaviour */
3085 if (ah->config.pcie_waen) {
3086 REG_WRITE(ah, AR_WA, ah->config.pcie_waen);
3087 } else {
3088 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || AR_SREV_9287(ah))
3089 REG_WRITE(ah, AR_WA, AR9285_WA_DEFAULT);
3091 * On AR9280 chips bit 22 of 0x4004 needs to be set to
3092 * otherwise card may disappear.
3094 else if (AR_SREV_9280(ah))
3095 REG_WRITE(ah, AR_WA, AR9280_WA_DEFAULT);
3096 else
3097 REG_WRITE(ah, AR_WA, AR_WA_DEFAULT);
3101 /**********************/
3102 /* Interrupt Handling */
3103 /**********************/
3105 bool ath9k_hw_intrpend(struct ath_hw *ah)
3107 u32 host_isr;
3109 if (AR_SREV_9100(ah))
3110 return true;
3112 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3113 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3114 return true;
3116 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3117 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3118 && (host_isr != AR_INTR_SPURIOUS))
3119 return true;
3121 return false;
3124 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3126 u32 isr = 0;
3127 u32 mask2 = 0;
3128 struct ath9k_hw_capabilities *pCap = &ah->caps;
3129 u32 sync_cause = 0;
3130 bool fatal_int = false;
3132 if (!AR_SREV_9100(ah)) {
3133 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3134 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3135 == AR_RTC_STATUS_ON) {
3136 isr = REG_READ(ah, AR_ISR);
3140 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3141 AR_INTR_SYNC_DEFAULT;
3143 *masked = 0;
3145 if (!isr && !sync_cause)
3146 return false;
3147 } else {
3148 *masked = 0;
3149 isr = REG_READ(ah, AR_ISR);
3152 if (isr) {
3153 if (isr & AR_ISR_BCNMISC) {
3154 u32 isr2;
3155 isr2 = REG_READ(ah, AR_ISR_S2);
3156 if (isr2 & AR_ISR_S2_TIM)
3157 mask2 |= ATH9K_INT_TIM;
3158 if (isr2 & AR_ISR_S2_DTIM)
3159 mask2 |= ATH9K_INT_DTIM;
3160 if (isr2 & AR_ISR_S2_DTIMSYNC)
3161 mask2 |= ATH9K_INT_DTIMSYNC;
3162 if (isr2 & (AR_ISR_S2_CABEND))
3163 mask2 |= ATH9K_INT_CABEND;
3164 if (isr2 & AR_ISR_S2_GTT)
3165 mask2 |= ATH9K_INT_GTT;
3166 if (isr2 & AR_ISR_S2_CST)
3167 mask2 |= ATH9K_INT_CST;
3168 if (isr2 & AR_ISR_S2_TSFOOR)
3169 mask2 |= ATH9K_INT_TSFOOR;
3172 isr = REG_READ(ah, AR_ISR_RAC);
3173 if (isr == 0xffffffff) {
3174 *masked = 0;
3175 return false;
3178 *masked = isr & ATH9K_INT_COMMON;
3180 if (ah->config.intr_mitigation) {
3181 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3182 *masked |= ATH9K_INT_RX;
3185 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3186 *masked |= ATH9K_INT_RX;
3187 if (isr &
3188 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3189 AR_ISR_TXEOL)) {
3190 u32 s0_s, s1_s;
3192 *masked |= ATH9K_INT_TX;
3194 s0_s = REG_READ(ah, AR_ISR_S0_S);
3195 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3196 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3198 s1_s = REG_READ(ah, AR_ISR_S1_S);
3199 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3200 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3203 if (isr & AR_ISR_RXORN) {
3204 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3205 "receive FIFO overrun interrupt\n");
3208 if (!AR_SREV_9100(ah)) {
3209 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3210 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3211 if (isr5 & AR_ISR_S5_TIM_TIMER)
3212 *masked |= ATH9K_INT_TIM_TIMER;
3216 *masked |= mask2;
3219 if (AR_SREV_9100(ah))
3220 return true;
3222 if (isr & AR_ISR_GENTMR) {
3223 u32 s5_s;
3225 s5_s = REG_READ(ah, AR_ISR_S5_S);
3226 if (isr & AR_ISR_GENTMR) {
3227 ah->intr_gen_timer_trigger =
3228 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3230 ah->intr_gen_timer_thresh =
3231 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3233 if (ah->intr_gen_timer_trigger)
3234 *masked |= ATH9K_INT_GENTIMER;
3239 if (sync_cause) {
3240 fatal_int =
3241 (sync_cause &
3242 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3243 ? true : false;
3245 if (fatal_int) {
3246 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3247 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3248 "received PCI FATAL interrupt\n");
3250 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3251 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3252 "received PCI PERR interrupt\n");
3254 *masked |= ATH9K_INT_FATAL;
3256 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3257 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3258 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3259 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3260 REG_WRITE(ah, AR_RC, 0);
3261 *masked |= ATH9K_INT_FATAL;
3263 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3264 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3265 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3268 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3269 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3272 return true;
3275 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3277 u32 omask = ah->mask_reg;
3278 u32 mask, mask2;
3279 struct ath9k_hw_capabilities *pCap = &ah->caps;
3281 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3283 if (omask & ATH9K_INT_GLOBAL) {
3284 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "disable IER\n");
3285 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3286 (void) REG_READ(ah, AR_IER);
3287 if (!AR_SREV_9100(ah)) {
3288 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3289 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3291 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3292 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3296 mask = ints & ATH9K_INT_COMMON;
3297 mask2 = 0;
3299 if (ints & ATH9K_INT_TX) {
3300 if (ah->txok_interrupt_mask)
3301 mask |= AR_IMR_TXOK;
3302 if (ah->txdesc_interrupt_mask)
3303 mask |= AR_IMR_TXDESC;
3304 if (ah->txerr_interrupt_mask)
3305 mask |= AR_IMR_TXERR;
3306 if (ah->txeol_interrupt_mask)
3307 mask |= AR_IMR_TXEOL;
3309 if (ints & ATH9K_INT_RX) {
3310 mask |= AR_IMR_RXERR;
3311 if (ah->config.intr_mitigation)
3312 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3313 else
3314 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3315 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3316 mask |= AR_IMR_GENTMR;
3319 if (ints & (ATH9K_INT_BMISC)) {
3320 mask |= AR_IMR_BCNMISC;
3321 if (ints & ATH9K_INT_TIM)
3322 mask2 |= AR_IMR_S2_TIM;
3323 if (ints & ATH9K_INT_DTIM)
3324 mask2 |= AR_IMR_S2_DTIM;
3325 if (ints & ATH9K_INT_DTIMSYNC)
3326 mask2 |= AR_IMR_S2_DTIMSYNC;
3327 if (ints & ATH9K_INT_CABEND)
3328 mask2 |= AR_IMR_S2_CABEND;
3329 if (ints & ATH9K_INT_TSFOOR)
3330 mask2 |= AR_IMR_S2_TSFOOR;
3333 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3334 mask |= AR_IMR_BCNMISC;
3335 if (ints & ATH9K_INT_GTT)
3336 mask2 |= AR_IMR_S2_GTT;
3337 if (ints & ATH9K_INT_CST)
3338 mask2 |= AR_IMR_S2_CST;
3341 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3342 REG_WRITE(ah, AR_IMR, mask);
3343 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3344 AR_IMR_S2_DTIM |
3345 AR_IMR_S2_DTIMSYNC |
3346 AR_IMR_S2_CABEND |
3347 AR_IMR_S2_CABTO |
3348 AR_IMR_S2_TSFOOR |
3349 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3350 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3351 ah->mask_reg = ints;
3353 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3354 if (ints & ATH9K_INT_TIM_TIMER)
3355 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3356 else
3357 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3360 if (ints & ATH9K_INT_GLOBAL) {
3361 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "enable IER\n");
3362 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3363 if (!AR_SREV_9100(ah)) {
3364 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3365 AR_INTR_MAC_IRQ);
3366 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3369 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3370 AR_INTR_SYNC_DEFAULT);
3371 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3372 AR_INTR_SYNC_DEFAULT);
3374 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3375 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3378 return omask;
3381 /*******************/
3382 /* Beacon Handling */
3383 /*******************/
3385 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3387 int flags = 0;
3389 ah->beacon_interval = beacon_period;
3391 switch (ah->opmode) {
3392 case NL80211_IFTYPE_STATION:
3393 case NL80211_IFTYPE_MONITOR:
3394 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3395 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3396 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3397 flags |= AR_TBTT_TIMER_EN;
3398 break;
3399 case NL80211_IFTYPE_ADHOC:
3400 case NL80211_IFTYPE_MESH_POINT:
3401 REG_SET_BIT(ah, AR_TXCFG,
3402 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3403 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3404 TU_TO_USEC(next_beacon +
3405 (ah->atim_window ? ah->
3406 atim_window : 1)));
3407 flags |= AR_NDP_TIMER_EN;
3408 case NL80211_IFTYPE_AP:
3409 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3410 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3411 TU_TO_USEC(next_beacon -
3412 ah->config.
3413 dma_beacon_response_time));
3414 REG_WRITE(ah, AR_NEXT_SWBA,
3415 TU_TO_USEC(next_beacon -
3416 ah->config.
3417 sw_beacon_response_time));
3418 flags |=
3419 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3420 break;
3421 default:
3422 DPRINTF(ah->ah_sc, ATH_DBG_BEACON,
3423 "%s: unsupported opmode: %d\n",
3424 __func__, ah->opmode);
3425 return;
3426 break;
3429 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3430 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3431 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3432 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3434 beacon_period &= ~ATH9K_BEACON_ENA;
3435 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3436 beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3437 ath9k_hw_reset_tsf(ah);
3440 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3443 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3444 const struct ath9k_beacon_state *bs)
3446 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3447 struct ath9k_hw_capabilities *pCap = &ah->caps;
3449 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3451 REG_WRITE(ah, AR_BEACON_PERIOD,
3452 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3453 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3454 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3456 REG_RMW_FIELD(ah, AR_RSSI_THR,
3457 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3459 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3461 if (bs->bs_sleepduration > beaconintval)
3462 beaconintval = bs->bs_sleepduration;
3464 dtimperiod = bs->bs_dtimperiod;
3465 if (bs->bs_sleepduration > dtimperiod)
3466 dtimperiod = bs->bs_sleepduration;
3468 if (beaconintval == dtimperiod)
3469 nextTbtt = bs->bs_nextdtim;
3470 else
3471 nextTbtt = bs->bs_nexttbtt;
3473 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3474 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3475 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3476 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3478 REG_WRITE(ah, AR_NEXT_DTIM,
3479 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3480 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3482 REG_WRITE(ah, AR_SLEEP1,
3483 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3484 | AR_SLEEP1_ASSUME_DTIM);
3486 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3487 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3488 else
3489 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3491 REG_WRITE(ah, AR_SLEEP2,
3492 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3494 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3495 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3497 REG_SET_BIT(ah, AR_TIMER_MODE,
3498 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3499 AR_DTIM_TIMER_EN);
3501 /* TSF Out of Range Threshold */
3502 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3505 /*******************/
3506 /* HW Capabilities */
3507 /*******************/
3509 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3511 struct ath9k_hw_capabilities *pCap = &ah->caps;
3512 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3513 struct ath_btcoex_info *btcoex_info = &ah->ah_sc->btcoex_info;
3515 u16 capField = 0, eeval;
3517 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3518 regulatory->current_rd = eeval;
3520 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3521 if (AR_SREV_9285_10_OR_LATER(ah))
3522 eeval |= AR9285_RDEXT_DEFAULT;
3523 regulatory->current_rd_ext = eeval;
3525 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3527 if (ah->opmode != NL80211_IFTYPE_AP &&
3528 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3529 if (regulatory->current_rd == 0x64 ||
3530 regulatory->current_rd == 0x65)
3531 regulatory->current_rd += 5;
3532 else if (regulatory->current_rd == 0x41)
3533 regulatory->current_rd = 0x43;
3534 DPRINTF(ah->ah_sc, ATH_DBG_REGULATORY,
3535 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3538 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3539 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3541 if (eeval & AR5416_OPFLAGS_11A) {
3542 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3543 if (ah->config.ht_enable) {
3544 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3545 set_bit(ATH9K_MODE_11NA_HT20,
3546 pCap->wireless_modes);
3547 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3548 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3549 pCap->wireless_modes);
3550 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3551 pCap->wireless_modes);
3556 if (eeval & AR5416_OPFLAGS_11G) {
3557 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3558 if (ah->config.ht_enable) {
3559 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3560 set_bit(ATH9K_MODE_11NG_HT20,
3561 pCap->wireless_modes);
3562 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3563 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3564 pCap->wireless_modes);
3565 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3566 pCap->wireless_modes);
3571 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3573 * For AR9271 we will temporarilly uses the rx chainmax as read from
3574 * the EEPROM.
3576 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3577 !(eeval & AR5416_OPFLAGS_11A) &&
3578 !(AR_SREV_9271(ah)))
3579 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3580 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3581 else
3582 /* Use rx_chainmask from EEPROM. */
3583 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3585 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3586 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3588 pCap->low_2ghz_chan = 2312;
3589 pCap->high_2ghz_chan = 2732;
3591 pCap->low_5ghz_chan = 4920;
3592 pCap->high_5ghz_chan = 6100;
3594 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3595 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3596 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3598 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3599 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3600 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3602 if (ah->config.ht_enable)
3603 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3604 else
3605 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3607 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3608 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3609 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3610 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3612 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3613 pCap->total_queues =
3614 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3615 else
3616 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3618 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3619 pCap->keycache_size =
3620 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3621 else
3622 pCap->keycache_size = AR_KEYTABLE_SIZE;
3624 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3625 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3627 if (AR_SREV_9285_10_OR_LATER(ah))
3628 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3629 else if (AR_SREV_9280_10_OR_LATER(ah))
3630 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3631 else
3632 pCap->num_gpio_pins = AR_NUM_GPIO;
3634 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3635 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3636 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3637 } else {
3638 pCap->rts_aggr_limit = (8 * 1024);
3641 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3643 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3644 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3645 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3646 ah->rfkill_gpio =
3647 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3648 ah->rfkill_polarity =
3649 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3651 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3653 #endif
3655 if ((ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI) ||
3656 (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) ||
3657 (ah->hw_version.macVersion == AR_SREV_VERSION_9160) ||
3658 (ah->hw_version.macVersion == AR_SREV_VERSION_9100) ||
3659 (ah->hw_version.macVersion == AR_SREV_VERSION_9280) ||
3660 (ah->hw_version.macVersion == AR_SREV_VERSION_9285))
3661 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3662 else
3663 pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
3665 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3666 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3667 else
3668 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3670 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3671 pCap->reg_cap =
3672 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3673 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3674 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3675 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3676 } else {
3677 pCap->reg_cap =
3678 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3679 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3682 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3684 pCap->num_antcfg_5ghz =
3685 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3686 pCap->num_antcfg_2ghz =
3687 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3689 if (AR_SREV_9280_10_OR_LATER(ah) &&
3690 ath_btcoex_supported(ah->hw_version.subsysid)) {
3691 btcoex_info->btactive_gpio = ATH_BTACTIVE_GPIO;
3692 btcoex_info->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3694 if (AR_SREV_9285(ah)) {
3695 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_3WIRE;
3696 btcoex_info->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3697 } else {
3698 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_2WIRE;
3700 } else {
3701 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_NONE;
3705 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3706 u32 capability, u32 *result)
3708 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3709 switch (type) {
3710 case ATH9K_CAP_CIPHER:
3711 switch (capability) {
3712 case ATH9K_CIPHER_AES_CCM:
3713 case ATH9K_CIPHER_AES_OCB:
3714 case ATH9K_CIPHER_TKIP:
3715 case ATH9K_CIPHER_WEP:
3716 case ATH9K_CIPHER_MIC:
3717 case ATH9K_CIPHER_CLR:
3718 return true;
3719 default:
3720 return false;
3722 case ATH9K_CAP_TKIP_MIC:
3723 switch (capability) {
3724 case 0:
3725 return true;
3726 case 1:
3727 return (ah->sta_id1_defaults &
3728 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3729 false;
3731 case ATH9K_CAP_TKIP_SPLIT:
3732 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3733 false : true;
3734 case ATH9K_CAP_DIVERSITY:
3735 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3736 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3737 true : false;
3738 case ATH9K_CAP_MCAST_KEYSRCH:
3739 switch (capability) {
3740 case 0:
3741 return true;
3742 case 1:
3743 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3744 return false;
3745 } else {
3746 return (ah->sta_id1_defaults &
3747 AR_STA_ID1_MCAST_KSRCH) ? true :
3748 false;
3751 return false;
3752 case ATH9K_CAP_TXPOW:
3753 switch (capability) {
3754 case 0:
3755 return 0;
3756 case 1:
3757 *result = regulatory->power_limit;
3758 return 0;
3759 case 2:
3760 *result = regulatory->max_power_level;
3761 return 0;
3762 case 3:
3763 *result = regulatory->tp_scale;
3764 return 0;
3766 return false;
3767 case ATH9K_CAP_DS:
3768 return (AR_SREV_9280_20_OR_LATER(ah) &&
3769 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3770 ? false : true;
3771 default:
3772 return false;
3776 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3777 u32 capability, u32 setting, int *status)
3779 u32 v;
3781 switch (type) {
3782 case ATH9K_CAP_TKIP_MIC:
3783 if (setting)
3784 ah->sta_id1_defaults |=
3785 AR_STA_ID1_CRPT_MIC_ENABLE;
3786 else
3787 ah->sta_id1_defaults &=
3788 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3789 return true;
3790 case ATH9K_CAP_DIVERSITY:
3791 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3792 if (setting)
3793 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3794 else
3795 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3796 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3797 return true;
3798 case ATH9K_CAP_MCAST_KEYSRCH:
3799 if (setting)
3800 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3801 else
3802 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3803 return true;
3804 default:
3805 return false;
3809 /****************************/
3810 /* GPIO / RFKILL / Antennae */
3811 /****************************/
3813 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3814 u32 gpio, u32 type)
3816 int addr;
3817 u32 gpio_shift, tmp;
3819 if (gpio > 11)
3820 addr = AR_GPIO_OUTPUT_MUX3;
3821 else if (gpio > 5)
3822 addr = AR_GPIO_OUTPUT_MUX2;
3823 else
3824 addr = AR_GPIO_OUTPUT_MUX1;
3826 gpio_shift = (gpio % 6) * 5;
3828 if (AR_SREV_9280_20_OR_LATER(ah)
3829 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3830 REG_RMW(ah, addr, (type << gpio_shift),
3831 (0x1f << gpio_shift));
3832 } else {
3833 tmp = REG_READ(ah, addr);
3834 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3835 tmp &= ~(0x1f << gpio_shift);
3836 tmp |= (type << gpio_shift);
3837 REG_WRITE(ah, addr, tmp);
3841 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3843 u32 gpio_shift;
3845 ASSERT(gpio < ah->caps.num_gpio_pins);
3847 gpio_shift = gpio << 1;
3849 REG_RMW(ah,
3850 AR_GPIO_OE_OUT,
3851 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3852 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3855 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3857 #define MS_REG_READ(x, y) \
3858 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3860 if (gpio >= ah->caps.num_gpio_pins)
3861 return 0xffffffff;
3863 if (AR_SREV_9287_10_OR_LATER(ah))
3864 return MS_REG_READ(AR9287, gpio) != 0;
3865 else if (AR_SREV_9285_10_OR_LATER(ah))
3866 return MS_REG_READ(AR9285, gpio) != 0;
3867 else if (AR_SREV_9280_10_OR_LATER(ah))
3868 return MS_REG_READ(AR928X, gpio) != 0;
3869 else
3870 return MS_REG_READ(AR, gpio) != 0;
3873 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3874 u32 ah_signal_type)
3876 u32 gpio_shift;
3878 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3880 gpio_shift = 2 * gpio;
3882 REG_RMW(ah,
3883 AR_GPIO_OE_OUT,
3884 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3885 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3888 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3890 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3891 AR_GPIO_BIT(gpio));
3894 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3896 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3899 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3901 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3904 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3905 enum ath9k_ant_setting settings,
3906 struct ath9k_channel *chan,
3907 u8 *tx_chainmask,
3908 u8 *rx_chainmask,
3909 u8 *antenna_cfgd)
3911 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3913 if (AR_SREV_9280(ah)) {
3914 if (!tx_chainmask_cfg) {
3916 tx_chainmask_cfg = *tx_chainmask;
3917 rx_chainmask_cfg = *rx_chainmask;
3920 switch (settings) {
3921 case ATH9K_ANT_FIXED_A:
3922 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3923 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3924 *antenna_cfgd = true;
3925 break;
3926 case ATH9K_ANT_FIXED_B:
3927 if (ah->caps.tx_chainmask >
3928 ATH9K_ANTENNA1_CHAINMASK) {
3929 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3931 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3932 *antenna_cfgd = true;
3933 break;
3934 case ATH9K_ANT_VARIABLE:
3935 *tx_chainmask = tx_chainmask_cfg;
3936 *rx_chainmask = rx_chainmask_cfg;
3937 *antenna_cfgd = true;
3938 break;
3939 default:
3940 break;
3942 } else {
3943 ah->config.diversity_control = settings;
3946 return true;
3949 /*********************/
3950 /* General Operation */
3951 /*********************/
3953 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3955 u32 bits = REG_READ(ah, AR_RX_FILTER);
3956 u32 phybits = REG_READ(ah, AR_PHY_ERR);
3958 if (phybits & AR_PHY_ERR_RADAR)
3959 bits |= ATH9K_RX_FILTER_PHYRADAR;
3960 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
3961 bits |= ATH9K_RX_FILTER_PHYERR;
3963 return bits;
3966 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
3968 u32 phybits;
3970 REG_WRITE(ah, AR_RX_FILTER, bits);
3972 phybits = 0;
3973 if (bits & ATH9K_RX_FILTER_PHYRADAR)
3974 phybits |= AR_PHY_ERR_RADAR;
3975 if (bits & ATH9K_RX_FILTER_PHYERR)
3976 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
3977 REG_WRITE(ah, AR_PHY_ERR, phybits);
3979 if (phybits)
3980 REG_WRITE(ah, AR_RXCFG,
3981 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
3982 else
3983 REG_WRITE(ah, AR_RXCFG,
3984 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
3987 bool ath9k_hw_phy_disable(struct ath_hw *ah)
3989 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM);
3992 bool ath9k_hw_disable(struct ath_hw *ah)
3994 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
3995 return false;
3997 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD);
4000 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
4002 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
4003 struct ath9k_channel *chan = ah->curchan;
4004 struct ieee80211_channel *channel = chan->chan;
4006 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
4008 ah->eep_ops->set_txpower(ah, chan,
4009 ath9k_regd_get_ctl(regulatory, chan),
4010 channel->max_antenna_gain * 2,
4011 channel->max_power * 2,
4012 min((u32) MAX_RATE_POWER,
4013 (u32) regulatory->power_limit));
4016 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4018 memcpy(ah->macaddr, mac, ETH_ALEN);
4021 void ath9k_hw_setopmode(struct ath_hw *ah)
4023 ath9k_hw_set_operating_mode(ah, ah->opmode);
4026 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4028 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4029 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4032 void ath9k_hw_setbssidmask(struct ath_softc *sc)
4034 REG_WRITE(sc->sc_ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
4035 REG_WRITE(sc->sc_ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
4038 void ath9k_hw_write_associd(struct ath_softc *sc)
4040 REG_WRITE(sc->sc_ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
4041 REG_WRITE(sc->sc_ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
4042 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4045 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4047 u64 tsf;
4049 tsf = REG_READ(ah, AR_TSF_U32);
4050 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4052 return tsf;
4055 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4057 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4058 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4061 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4063 ath9k_ps_wakeup(ah->ah_sc);
4064 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4065 AH_TSF_WRITE_TIMEOUT))
4066 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
4067 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4069 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4070 ath9k_ps_restore(ah->ah_sc);
4073 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4075 if (setting)
4076 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4077 else
4078 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4081 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4083 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4084 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad slot time %u\n", us);
4085 ah->slottime = (u32) -1;
4086 return false;
4087 } else {
4088 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4089 ah->slottime = us;
4090 return true;
4094 void ath9k_hw_set11nmac2040(struct ath_hw *ah, enum ath9k_ht_macmode mode)
4096 u32 macmode;
4098 if (mode == ATH9K_HT_MACMODE_2040 &&
4099 !ah->config.cwm_ignore_extcca)
4100 macmode = AR_2040_JOINED_RX_CLEAR;
4101 else
4102 macmode = 0;
4104 REG_WRITE(ah, AR_2040_MODE, macmode);
4107 /* HW Generic timers configuration */
4109 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4111 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4112 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4113 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4114 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4115 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4116 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4117 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4118 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4119 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4120 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4121 AR_NDP2_TIMER_MODE, 0x0002},
4122 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4123 AR_NDP2_TIMER_MODE, 0x0004},
4124 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4125 AR_NDP2_TIMER_MODE, 0x0008},
4126 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4127 AR_NDP2_TIMER_MODE, 0x0010},
4128 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4129 AR_NDP2_TIMER_MODE, 0x0020},
4130 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4131 AR_NDP2_TIMER_MODE, 0x0040},
4132 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4133 AR_NDP2_TIMER_MODE, 0x0080}
4136 /* HW generic timer primitives */
4138 /* compute and clear index of rightmost 1 */
4139 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4141 u32 b;
4143 b = *mask;
4144 b &= (0-b);
4145 *mask &= ~b;
4146 b *= debruijn32;
4147 b >>= 27;
4149 return timer_table->gen_timer_index[b];
4152 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4154 return REG_READ(ah, AR_TSF_L32);
4157 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4158 void (*trigger)(void *),
4159 void (*overflow)(void *),
4160 void *arg,
4161 u8 timer_index)
4163 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4164 struct ath_gen_timer *timer;
4166 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4168 if (timer == NULL) {
4169 printk(KERN_DEBUG "Failed to allocate memory"
4170 "for hw timer[%d]\n", timer_index);
4171 return NULL;
4174 /* allocate a hardware generic timer slot */
4175 timer_table->timers[timer_index] = timer;
4176 timer->index = timer_index;
4177 timer->trigger = trigger;
4178 timer->overflow = overflow;
4179 timer->arg = arg;
4181 return timer;
4184 void ath_gen_timer_start(struct ath_hw *ah,
4185 struct ath_gen_timer *timer,
4186 u32 timer_next, u32 timer_period)
4188 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4189 u32 tsf;
4191 BUG_ON(!timer_period);
4193 set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4195 tsf = ath9k_hw_gettsf32(ah);
4197 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER, "curent tsf %x period %x"
4198 "timer_next %x\n", tsf, timer_period, timer_next);
4201 * Pull timer_next forward if the current TSF already passed it
4202 * because of software latency
4204 if (timer_next < tsf)
4205 timer_next = tsf + timer_period;
4208 * Program generic timer registers
4210 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4211 timer_next);
4212 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4213 timer_period);
4214 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4215 gen_tmr_configuration[timer->index].mode_mask);
4217 /* Enable both trigger and thresh interrupt masks */
4218 REG_SET_BIT(ah, AR_IMR_S5,
4219 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4220 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4222 if ((ah->ah_sc->imask & ATH9K_INT_GENTIMER) == 0) {
4223 ath9k_hw_set_interrupts(ah, 0);
4224 ah->ah_sc->imask |= ATH9K_INT_GENTIMER;
4225 ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4229 void ath_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4231 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4233 if ((timer->index < AR_FIRST_NDP_TIMER) ||
4234 (timer->index >= ATH_MAX_GEN_TIMER)) {
4235 return;
4238 /* Clear generic timer enable bits. */
4239 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4240 gen_tmr_configuration[timer->index].mode_mask);
4242 /* Disable both trigger and thresh interrupt masks */
4243 REG_CLR_BIT(ah, AR_IMR_S5,
4244 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4245 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4247 clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4249 /* if no timer is enabled, turn off interrupt mask */
4250 if (timer_table->timer_mask.val == 0) {
4251 ath9k_hw_set_interrupts(ah, 0);
4252 ah->ah_sc->imask &= ~ATH9K_INT_GENTIMER;
4253 ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4257 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4259 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4261 /* free the hardware generic timer slot */
4262 timer_table->timers[timer->index] = NULL;
4263 kfree(timer);
4267 * Generic Timer Interrupts handling
4269 void ath_gen_timer_isr(struct ath_hw *ah)
4271 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4272 struct ath_gen_timer *timer;
4273 u32 trigger_mask, thresh_mask, index;
4275 /* get hardware generic timer interrupt status */
4276 trigger_mask = ah->intr_gen_timer_trigger;
4277 thresh_mask = ah->intr_gen_timer_thresh;
4278 trigger_mask &= timer_table->timer_mask.val;
4279 thresh_mask &= timer_table->timer_mask.val;
4281 trigger_mask &= ~thresh_mask;
4283 while (thresh_mask) {
4284 index = rightmost_index(timer_table, &thresh_mask);
4285 timer = timer_table->timers[index];
4286 BUG_ON(!timer);
4287 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER,
4288 "TSF overflow for Gen timer %d\n", index);
4289 timer->overflow(timer->arg);
4292 while (trigger_mask) {
4293 index = rightmost_index(timer_table, &trigger_mask);
4294 timer = timer_table->timers[index];
4295 BUG_ON(!timer);
4296 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER,
4297 "Gen timer[%d] trigger\n", index);
4298 timer->trigger(timer->arg);
4303 * Primitive to disable ASPM
4305 void ath_pcie_aspm_disable(struct ath_softc *sc)
4307 struct pci_dev *pdev = to_pci_dev(sc->dev);
4308 u8 aspm;
4310 pci_read_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, &aspm);
4311 aspm &= ~(ATH_PCIE_CAP_LINK_L0S | ATH_PCIE_CAP_LINK_L1);
4312 pci_write_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, aspm);