dt-bindings: mtd: ingenic: Use standard ecc-engine property
[linux/fpc-iii.git] / drivers / net / wireless / ath / ath9k / hw.c
blob8581d917635a7d1f5ad08a5fed14d246d38f6031
1 /*
2 * Copyright (c) 2008-2011 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 #include <linux/io.h>
18 #include <linux/slab.h>
19 #include <linux/module.h>
20 #include <linux/time.h>
21 #include <linux/bitops.h>
22 #include <linux/etherdevice.h>
23 #include <linux/gpio.h>
24 #include <asm/unaligned.h>
26 #include "hw.h"
27 #include "hw-ops.h"
28 #include "ar9003_mac.h"
29 #include "ar9003_mci.h"
30 #include "ar9003_phy.h"
31 #include "ath9k.h"
33 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
35 MODULE_AUTHOR("Atheros Communications");
36 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
37 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
38 MODULE_LICENSE("Dual BSD/GPL");
40 static void ath9k_hw_set_clockrate(struct ath_hw *ah)
42 struct ath_common *common = ath9k_hw_common(ah);
43 struct ath9k_channel *chan = ah->curchan;
44 unsigned int clockrate;
46 /* AR9287 v1.3+ uses async FIFO and runs the MAC at 117 MHz */
47 if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah))
48 clockrate = 117;
49 else if (!chan) /* should really check for CCK instead */
50 clockrate = ATH9K_CLOCK_RATE_CCK;
51 else if (IS_CHAN_2GHZ(chan))
52 clockrate = ATH9K_CLOCK_RATE_2GHZ_OFDM;
53 else if (ah->caps.hw_caps & ATH9K_HW_CAP_FASTCLOCK)
54 clockrate = ATH9K_CLOCK_FAST_RATE_5GHZ_OFDM;
55 else
56 clockrate = ATH9K_CLOCK_RATE_5GHZ_OFDM;
58 if (chan) {
59 if (IS_CHAN_HT40(chan))
60 clockrate *= 2;
61 if (IS_CHAN_HALF_RATE(chan))
62 clockrate /= 2;
63 if (IS_CHAN_QUARTER_RATE(chan))
64 clockrate /= 4;
67 common->clockrate = clockrate;
70 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
72 struct ath_common *common = ath9k_hw_common(ah);
74 return usecs * common->clockrate;
77 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
79 int i;
81 BUG_ON(timeout < AH_TIME_QUANTUM);
83 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
84 if ((REG_READ(ah, reg) & mask) == val)
85 return true;
87 udelay(AH_TIME_QUANTUM);
90 ath_dbg(ath9k_hw_common(ah), ANY,
91 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
92 timeout, reg, REG_READ(ah, reg), mask, val);
94 return false;
96 EXPORT_SYMBOL(ath9k_hw_wait);
98 void ath9k_hw_synth_delay(struct ath_hw *ah, struct ath9k_channel *chan,
99 int hw_delay)
101 hw_delay /= 10;
103 if (IS_CHAN_HALF_RATE(chan))
104 hw_delay *= 2;
105 else if (IS_CHAN_QUARTER_RATE(chan))
106 hw_delay *= 4;
108 udelay(hw_delay + BASE_ACTIVATE_DELAY);
111 void ath9k_hw_write_array(struct ath_hw *ah, const struct ar5416IniArray *array,
112 int column, unsigned int *writecnt)
114 int r;
116 ENABLE_REGWRITE_BUFFER(ah);
117 for (r = 0; r < array->ia_rows; r++) {
118 REG_WRITE(ah, INI_RA(array, r, 0),
119 INI_RA(array, r, column));
120 DO_DELAY(*writecnt);
122 REGWRITE_BUFFER_FLUSH(ah);
125 void ath9k_hw_read_array(struct ath_hw *ah, u32 array[][2], int size)
127 u32 *tmp_reg_list, *tmp_data;
128 int i;
130 tmp_reg_list = kmalloc_array(size, sizeof(u32), GFP_KERNEL);
131 if (!tmp_reg_list) {
132 dev_err(ah->dev, "%s: tmp_reg_list: alloc filed\n", __func__);
133 return;
136 tmp_data = kmalloc_array(size, sizeof(u32), GFP_KERNEL);
137 if (!tmp_data) {
138 dev_err(ah->dev, "%s tmp_data: alloc filed\n", __func__);
139 goto error_tmp_data;
142 for (i = 0; i < size; i++)
143 tmp_reg_list[i] = array[i][0];
145 REG_READ_MULTI(ah, tmp_reg_list, tmp_data, size);
147 for (i = 0; i < size; i++)
148 array[i][1] = tmp_data[i];
150 kfree(tmp_data);
151 error_tmp_data:
152 kfree(tmp_reg_list);
155 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
157 u32 retval;
158 int i;
160 for (i = 0, retval = 0; i < n; i++) {
161 retval = (retval << 1) | (val & 1);
162 val >>= 1;
164 return retval;
167 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
168 u8 phy, int kbps,
169 u32 frameLen, u16 rateix,
170 bool shortPreamble)
172 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
174 if (kbps == 0)
175 return 0;
177 switch (phy) {
178 case WLAN_RC_PHY_CCK:
179 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
180 if (shortPreamble)
181 phyTime >>= 1;
182 numBits = frameLen << 3;
183 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
184 break;
185 case WLAN_RC_PHY_OFDM:
186 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
187 bitsPerSymbol =
188 ((kbps >> 2) * OFDM_SYMBOL_TIME_QUARTER) / 1000;
189 numBits = OFDM_PLCP_BITS + (frameLen << 3);
190 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
191 txTime = OFDM_SIFS_TIME_QUARTER
192 + OFDM_PREAMBLE_TIME_QUARTER
193 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
194 } else if (ah->curchan &&
195 IS_CHAN_HALF_RATE(ah->curchan)) {
196 bitsPerSymbol =
197 ((kbps >> 1) * OFDM_SYMBOL_TIME_HALF) / 1000;
198 numBits = OFDM_PLCP_BITS + (frameLen << 3);
199 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
200 txTime = OFDM_SIFS_TIME_HALF +
201 OFDM_PREAMBLE_TIME_HALF
202 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
203 } else {
204 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
205 numBits = OFDM_PLCP_BITS + (frameLen << 3);
206 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
207 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
208 + (numSymbols * OFDM_SYMBOL_TIME);
210 break;
211 default:
212 ath_err(ath9k_hw_common(ah),
213 "Unknown phy %u (rate ix %u)\n", phy, rateix);
214 txTime = 0;
215 break;
218 return txTime;
220 EXPORT_SYMBOL(ath9k_hw_computetxtime);
222 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
223 struct ath9k_channel *chan,
224 struct chan_centers *centers)
226 int8_t extoff;
228 if (!IS_CHAN_HT40(chan)) {
229 centers->ctl_center = centers->ext_center =
230 centers->synth_center = chan->channel;
231 return;
234 if (IS_CHAN_HT40PLUS(chan)) {
235 centers->synth_center =
236 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
237 extoff = 1;
238 } else {
239 centers->synth_center =
240 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
241 extoff = -1;
244 centers->ctl_center =
245 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
246 /* 25 MHz spacing is supported by hw but not on upper layers */
247 centers->ext_center =
248 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
251 /******************/
252 /* Chip Revisions */
253 /******************/
255 static void ath9k_hw_read_revisions(struct ath_hw *ah)
257 u32 val;
259 if (ah->get_mac_revision)
260 ah->hw_version.macRev = ah->get_mac_revision();
262 switch (ah->hw_version.devid) {
263 case AR5416_AR9100_DEVID:
264 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
265 break;
266 case AR9300_DEVID_AR9330:
267 ah->hw_version.macVersion = AR_SREV_VERSION_9330;
268 if (!ah->get_mac_revision) {
269 val = REG_READ(ah, AR_SREV);
270 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
272 return;
273 case AR9300_DEVID_AR9340:
274 ah->hw_version.macVersion = AR_SREV_VERSION_9340;
275 return;
276 case AR9300_DEVID_QCA955X:
277 ah->hw_version.macVersion = AR_SREV_VERSION_9550;
278 return;
279 case AR9300_DEVID_AR953X:
280 ah->hw_version.macVersion = AR_SREV_VERSION_9531;
281 return;
282 case AR9300_DEVID_QCA956X:
283 ah->hw_version.macVersion = AR_SREV_VERSION_9561;
284 return;
287 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
289 if (val == 0xFF) {
290 val = REG_READ(ah, AR_SREV);
291 ah->hw_version.macVersion =
292 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
293 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
295 if (AR_SREV_9462(ah) || AR_SREV_9565(ah))
296 ah->is_pciexpress = true;
297 else
298 ah->is_pciexpress = (val &
299 AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
300 } else {
301 if (!AR_SREV_9100(ah))
302 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
304 ah->hw_version.macRev = val & AR_SREV_REVISION;
306 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
307 ah->is_pciexpress = true;
311 /************************************/
312 /* HW Attach, Detach, Init Routines */
313 /************************************/
315 static void ath9k_hw_disablepcie(struct ath_hw *ah)
317 if (!AR_SREV_5416(ah))
318 return;
320 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
321 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
322 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
323 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
324 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
325 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
326 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
327 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
328 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
330 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
333 /* This should work for all families including legacy */
334 static bool ath9k_hw_chip_test(struct ath_hw *ah)
336 struct ath_common *common = ath9k_hw_common(ah);
337 u32 regAddr[2] = { AR_STA_ID0 };
338 u32 regHold[2];
339 static const u32 patternData[4] = {
340 0x55555555, 0xaaaaaaaa, 0x66666666, 0x99999999
342 int i, j, loop_max;
344 if (!AR_SREV_9300_20_OR_LATER(ah)) {
345 loop_max = 2;
346 regAddr[1] = AR_PHY_BASE + (8 << 2);
347 } else
348 loop_max = 1;
350 for (i = 0; i < loop_max; i++) {
351 u32 addr = regAddr[i];
352 u32 wrData, rdData;
354 regHold[i] = REG_READ(ah, addr);
355 for (j = 0; j < 0x100; j++) {
356 wrData = (j << 16) | j;
357 REG_WRITE(ah, addr, wrData);
358 rdData = REG_READ(ah, addr);
359 if (rdData != wrData) {
360 ath_err(common,
361 "address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
362 addr, wrData, rdData);
363 return false;
366 for (j = 0; j < 4; j++) {
367 wrData = patternData[j];
368 REG_WRITE(ah, addr, wrData);
369 rdData = REG_READ(ah, addr);
370 if (wrData != rdData) {
371 ath_err(common,
372 "address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
373 addr, wrData, rdData);
374 return false;
377 REG_WRITE(ah, regAddr[i], regHold[i]);
379 udelay(100);
381 return true;
384 static void ath9k_hw_init_config(struct ath_hw *ah)
386 struct ath_common *common = ath9k_hw_common(ah);
388 ah->config.dma_beacon_response_time = 1;
389 ah->config.sw_beacon_response_time = 6;
390 ah->config.cwm_ignore_extcca = false;
391 ah->config.analog_shiftreg = 1;
393 ah->config.rx_intr_mitigation = true;
395 if (AR_SREV_9300_20_OR_LATER(ah)) {
396 ah->config.rimt_last = 500;
397 ah->config.rimt_first = 2000;
398 } else {
399 ah->config.rimt_last = 250;
400 ah->config.rimt_first = 700;
403 if (AR_SREV_9462(ah) || AR_SREV_9565(ah))
404 ah->config.pll_pwrsave = 7;
407 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
408 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
409 * This means we use it for all AR5416 devices, and the few
410 * minor PCI AR9280 devices out there.
412 * Serialization is required because these devices do not handle
413 * well the case of two concurrent reads/writes due to the latency
414 * involved. During one read/write another read/write can be issued
415 * on another CPU while the previous read/write may still be working
416 * on our hardware, if we hit this case the hardware poops in a loop.
417 * We prevent this by serializing reads and writes.
419 * This issue is not present on PCI-Express devices or pre-AR5416
420 * devices (legacy, 802.11abg).
422 if (num_possible_cpus() > 1)
423 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
425 if (NR_CPUS > 1 && ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
426 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
427 ((AR_SREV_9160(ah) || AR_SREV_9280(ah) || AR_SREV_9287(ah)) &&
428 !ah->is_pciexpress)) {
429 ah->config.serialize_regmode = SER_REG_MODE_ON;
430 } else {
431 ah->config.serialize_regmode = SER_REG_MODE_OFF;
435 ath_dbg(common, RESET, "serialize_regmode is %d\n",
436 ah->config.serialize_regmode);
438 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
439 ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1;
440 else
441 ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD;
444 static void ath9k_hw_init_defaults(struct ath_hw *ah)
446 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
448 regulatory->country_code = CTRY_DEFAULT;
449 regulatory->power_limit = MAX_RATE_POWER;
451 ah->hw_version.magic = AR5416_MAGIC;
452 ah->hw_version.subvendorid = 0;
454 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE |
455 AR_STA_ID1_MCAST_KSRCH;
456 if (AR_SREV_9100(ah))
457 ah->sta_id1_defaults |= AR_STA_ID1_AR9100_BA_FIX;
459 ah->slottime = 9;
460 ah->globaltxtimeout = (u32) -1;
461 ah->power_mode = ATH9K_PM_UNDEFINED;
462 ah->htc_reset_init = true;
464 ah->tpc_enabled = false;
466 ah->ani_function = ATH9K_ANI_ALL;
467 if (!AR_SREV_9300_20_OR_LATER(ah))
468 ah->ani_function &= ~ATH9K_ANI_MRC_CCK;
470 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
471 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
472 else
473 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
476 static void ath9k_hw_init_macaddr(struct ath_hw *ah)
478 struct ath_common *common = ath9k_hw_common(ah);
479 int i;
480 u16 eeval;
481 static const u32 EEP_MAC[] = { EEP_MAC_LSW, EEP_MAC_MID, EEP_MAC_MSW };
483 /* MAC address may already be loaded via ath9k_platform_data */
484 if (is_valid_ether_addr(common->macaddr))
485 return;
487 for (i = 0; i < 3; i++) {
488 eeval = ah->eep_ops->get_eeprom(ah, EEP_MAC[i]);
489 common->macaddr[2 * i] = eeval >> 8;
490 common->macaddr[2 * i + 1] = eeval & 0xff;
493 if (is_valid_ether_addr(common->macaddr))
494 return;
496 ath_err(common, "eeprom contains invalid mac address: %pM\n",
497 common->macaddr);
499 eth_random_addr(common->macaddr);
500 ath_err(common, "random mac address will be used: %pM\n",
501 common->macaddr);
503 return;
506 static int ath9k_hw_post_init(struct ath_hw *ah)
508 struct ath_common *common = ath9k_hw_common(ah);
509 int ecode;
511 if (common->bus_ops->ath_bus_type != ATH_USB) {
512 if (!ath9k_hw_chip_test(ah))
513 return -ENODEV;
516 if (!AR_SREV_9300_20_OR_LATER(ah)) {
517 ecode = ar9002_hw_rf_claim(ah);
518 if (ecode != 0)
519 return ecode;
522 ecode = ath9k_hw_eeprom_init(ah);
523 if (ecode != 0)
524 return ecode;
526 ath_dbg(ath9k_hw_common(ah), CONFIG, "Eeprom VER: %d, REV: %d\n",
527 ah->eep_ops->get_eeprom_ver(ah),
528 ah->eep_ops->get_eeprom_rev(ah));
530 ath9k_hw_ani_init(ah);
533 * EEPROM needs to be initialized before we do this.
534 * This is required for regulatory compliance.
536 if (AR_SREV_9300_20_OR_LATER(ah)) {
537 u16 regdmn = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
538 if ((regdmn & 0xF0) == CTL_FCC) {
539 ah->nf_2g.max = AR_PHY_CCA_MAX_GOOD_VAL_9300_FCC_2GHZ;
540 ah->nf_5g.max = AR_PHY_CCA_MAX_GOOD_VAL_9300_FCC_5GHZ;
544 return 0;
547 static int ath9k_hw_attach_ops(struct ath_hw *ah)
549 if (!AR_SREV_9300_20_OR_LATER(ah))
550 return ar9002_hw_attach_ops(ah);
552 ar9003_hw_attach_ops(ah);
553 return 0;
556 /* Called for all hardware families */
557 static int __ath9k_hw_init(struct ath_hw *ah)
559 struct ath_common *common = ath9k_hw_common(ah);
560 int r = 0;
562 ath9k_hw_read_revisions(ah);
564 switch (ah->hw_version.macVersion) {
565 case AR_SREV_VERSION_5416_PCI:
566 case AR_SREV_VERSION_5416_PCIE:
567 case AR_SREV_VERSION_9160:
568 case AR_SREV_VERSION_9100:
569 case AR_SREV_VERSION_9280:
570 case AR_SREV_VERSION_9285:
571 case AR_SREV_VERSION_9287:
572 case AR_SREV_VERSION_9271:
573 case AR_SREV_VERSION_9300:
574 case AR_SREV_VERSION_9330:
575 case AR_SREV_VERSION_9485:
576 case AR_SREV_VERSION_9340:
577 case AR_SREV_VERSION_9462:
578 case AR_SREV_VERSION_9550:
579 case AR_SREV_VERSION_9565:
580 case AR_SREV_VERSION_9531:
581 case AR_SREV_VERSION_9561:
582 break;
583 default:
584 ath_err(common,
585 "Mac Chip Rev 0x%02x.%x is not supported by this driver\n",
586 ah->hw_version.macVersion, ah->hw_version.macRev);
587 return -EOPNOTSUPP;
591 * Read back AR_WA into a permanent copy and set bits 14 and 17.
592 * We need to do this to avoid RMW of this register. We cannot
593 * read the reg when chip is asleep.
595 if (AR_SREV_9300_20_OR_LATER(ah)) {
596 ah->WARegVal = REG_READ(ah, AR_WA);
597 ah->WARegVal |= (AR_WA_D3_L1_DISABLE |
598 AR_WA_ASPM_TIMER_BASED_DISABLE);
601 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
602 ath_err(common, "Couldn't reset chip\n");
603 return -EIO;
606 if (AR_SREV_9565(ah)) {
607 ah->WARegVal |= AR_WA_BIT22;
608 REG_WRITE(ah, AR_WA, ah->WARegVal);
611 ath9k_hw_init_defaults(ah);
612 ath9k_hw_init_config(ah);
614 r = ath9k_hw_attach_ops(ah);
615 if (r)
616 return r;
618 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
619 ath_err(common, "Couldn't wakeup chip\n");
620 return -EIO;
623 if (AR_SREV_9271(ah) || AR_SREV_9100(ah) || AR_SREV_9340(ah) ||
624 AR_SREV_9330(ah) || AR_SREV_9550(ah))
625 ah->is_pciexpress = false;
627 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
628 ath9k_hw_init_cal_settings(ah);
630 if (!ah->is_pciexpress)
631 ath9k_hw_disablepcie(ah);
633 r = ath9k_hw_post_init(ah);
634 if (r)
635 return r;
637 ath9k_hw_init_mode_gain_regs(ah);
638 r = ath9k_hw_fill_cap_info(ah);
639 if (r)
640 return r;
642 ath9k_hw_init_macaddr(ah);
643 ath9k_hw_init_hang_checks(ah);
645 common->state = ATH_HW_INITIALIZED;
647 return 0;
650 int ath9k_hw_init(struct ath_hw *ah)
652 int ret;
653 struct ath_common *common = ath9k_hw_common(ah);
655 /* These are all the AR5008/AR9001/AR9002/AR9003 hardware family of chipsets */
656 switch (ah->hw_version.devid) {
657 case AR5416_DEVID_PCI:
658 case AR5416_DEVID_PCIE:
659 case AR5416_AR9100_DEVID:
660 case AR9160_DEVID_PCI:
661 case AR9280_DEVID_PCI:
662 case AR9280_DEVID_PCIE:
663 case AR9285_DEVID_PCIE:
664 case AR9287_DEVID_PCI:
665 case AR9287_DEVID_PCIE:
666 case AR2427_DEVID_PCIE:
667 case AR9300_DEVID_PCIE:
668 case AR9300_DEVID_AR9485_PCIE:
669 case AR9300_DEVID_AR9330:
670 case AR9300_DEVID_AR9340:
671 case AR9300_DEVID_QCA955X:
672 case AR9300_DEVID_AR9580:
673 case AR9300_DEVID_AR9462:
674 case AR9485_DEVID_AR1111:
675 case AR9300_DEVID_AR9565:
676 case AR9300_DEVID_AR953X:
677 case AR9300_DEVID_QCA956X:
678 break;
679 default:
680 if (common->bus_ops->ath_bus_type == ATH_USB)
681 break;
682 ath_err(common, "Hardware device ID 0x%04x not supported\n",
683 ah->hw_version.devid);
684 return -EOPNOTSUPP;
687 ret = __ath9k_hw_init(ah);
688 if (ret) {
689 ath_err(common,
690 "Unable to initialize hardware; initialization status: %d\n",
691 ret);
692 return ret;
695 ath_dynack_init(ah);
697 return 0;
699 EXPORT_SYMBOL(ath9k_hw_init);
701 static void ath9k_hw_init_qos(struct ath_hw *ah)
703 ENABLE_REGWRITE_BUFFER(ah);
705 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
706 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
708 REG_WRITE(ah, AR_QOS_NO_ACK,
709 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
710 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
711 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
713 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
714 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
715 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
716 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
717 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
719 REGWRITE_BUFFER_FLUSH(ah);
722 u32 ar9003_get_pll_sqsum_dvc(struct ath_hw *ah)
724 struct ath_common *common = ath9k_hw_common(ah);
725 int i = 0;
727 REG_CLR_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
728 udelay(100);
729 REG_SET_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
731 while ((REG_READ(ah, PLL4) & PLL4_MEAS_DONE) == 0) {
733 udelay(100);
735 if (WARN_ON_ONCE(i >= 100)) {
736 ath_err(common, "PLL4 measurement not done\n");
737 break;
740 i++;
743 return (REG_READ(ah, PLL3) & SQSUM_DVC_MASK) >> 3;
745 EXPORT_SYMBOL(ar9003_get_pll_sqsum_dvc);
747 static void ath9k_hw_init_pll(struct ath_hw *ah,
748 struct ath9k_channel *chan)
750 u32 pll;
752 pll = ath9k_hw_compute_pll_control(ah, chan);
754 if (AR_SREV_9485(ah) || AR_SREV_9565(ah)) {
755 /* program BB PLL ki and kd value, ki=0x4, kd=0x40 */
756 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
757 AR_CH0_BB_DPLL2_PLL_PWD, 0x1);
758 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
759 AR_CH0_DPLL2_KD, 0x40);
760 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
761 AR_CH0_DPLL2_KI, 0x4);
763 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
764 AR_CH0_BB_DPLL1_REFDIV, 0x5);
765 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
766 AR_CH0_BB_DPLL1_NINI, 0x58);
767 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
768 AR_CH0_BB_DPLL1_NFRAC, 0x0);
770 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
771 AR_CH0_BB_DPLL2_OUTDIV, 0x1);
772 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
773 AR_CH0_BB_DPLL2_LOCAL_PLL, 0x1);
774 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
775 AR_CH0_BB_DPLL2_EN_NEGTRIG, 0x1);
777 /* program BB PLL phase_shift to 0x6 */
778 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
779 AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x6);
781 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
782 AR_CH0_BB_DPLL2_PLL_PWD, 0x0);
783 udelay(1000);
784 } else if (AR_SREV_9330(ah)) {
785 u32 ddr_dpll2, pll_control2, kd;
787 if (ah->is_clk_25mhz) {
788 ddr_dpll2 = 0x18e82f01;
789 pll_control2 = 0xe04a3d;
790 kd = 0x1d;
791 } else {
792 ddr_dpll2 = 0x19e82f01;
793 pll_control2 = 0x886666;
794 kd = 0x3d;
797 /* program DDR PLL ki and kd value */
798 REG_WRITE(ah, AR_CH0_DDR_DPLL2, ddr_dpll2);
800 /* program DDR PLL phase_shift */
801 REG_RMW_FIELD(ah, AR_CH0_DDR_DPLL3,
802 AR_CH0_DPLL3_PHASE_SHIFT, 0x1);
804 REG_WRITE(ah, AR_RTC_PLL_CONTROL,
805 pll | AR_RTC_9300_PLL_BYPASS);
806 udelay(1000);
808 /* program refdiv, nint, frac to RTC register */
809 REG_WRITE(ah, AR_RTC_PLL_CONTROL2, pll_control2);
811 /* program BB PLL kd and ki value */
812 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KD, kd);
813 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KI, 0x06);
815 /* program BB PLL phase_shift */
816 REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
817 AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x1);
818 } else if (AR_SREV_9340(ah) || AR_SREV_9550(ah) || AR_SREV_9531(ah) ||
819 AR_SREV_9561(ah)) {
820 u32 regval, pll2_divint, pll2_divfrac, refdiv;
822 REG_WRITE(ah, AR_RTC_PLL_CONTROL,
823 pll | AR_RTC_9300_SOC_PLL_BYPASS);
824 udelay(1000);
826 REG_SET_BIT(ah, AR_PHY_PLL_MODE, 0x1 << 16);
827 udelay(100);
829 if (ah->is_clk_25mhz) {
830 if (AR_SREV_9531(ah) || AR_SREV_9561(ah)) {
831 pll2_divint = 0x1c;
832 pll2_divfrac = 0xa3d2;
833 refdiv = 1;
834 } else {
835 pll2_divint = 0x54;
836 pll2_divfrac = 0x1eb85;
837 refdiv = 3;
839 } else {
840 if (AR_SREV_9340(ah)) {
841 pll2_divint = 88;
842 pll2_divfrac = 0;
843 refdiv = 5;
844 } else {
845 pll2_divint = 0x11;
846 pll2_divfrac = (AR_SREV_9531(ah) ||
847 AR_SREV_9561(ah)) ?
848 0x26665 : 0x26666;
849 refdiv = 1;
853 regval = REG_READ(ah, AR_PHY_PLL_MODE);
854 if (AR_SREV_9531(ah) || AR_SREV_9561(ah))
855 regval |= (0x1 << 22);
856 else
857 regval |= (0x1 << 16);
858 REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
859 udelay(100);
861 REG_WRITE(ah, AR_PHY_PLL_CONTROL, (refdiv << 27) |
862 (pll2_divint << 18) | pll2_divfrac);
863 udelay(100);
865 regval = REG_READ(ah, AR_PHY_PLL_MODE);
866 if (AR_SREV_9340(ah))
867 regval = (regval & 0x80071fff) |
868 (0x1 << 30) |
869 (0x1 << 13) |
870 (0x4 << 26) |
871 (0x18 << 19);
872 else if (AR_SREV_9531(ah) || AR_SREV_9561(ah)) {
873 regval = (regval & 0x01c00fff) |
874 (0x1 << 31) |
875 (0x2 << 29) |
876 (0xa << 25) |
877 (0x1 << 19);
879 if (AR_SREV_9531(ah))
880 regval |= (0x6 << 12);
881 } else
882 regval = (regval & 0x80071fff) |
883 (0x3 << 30) |
884 (0x1 << 13) |
885 (0x4 << 26) |
886 (0x60 << 19);
887 REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
889 if (AR_SREV_9531(ah) || AR_SREV_9561(ah))
890 REG_WRITE(ah, AR_PHY_PLL_MODE,
891 REG_READ(ah, AR_PHY_PLL_MODE) & 0xffbfffff);
892 else
893 REG_WRITE(ah, AR_PHY_PLL_MODE,
894 REG_READ(ah, AR_PHY_PLL_MODE) & 0xfffeffff);
896 udelay(1000);
899 if (AR_SREV_9565(ah))
900 pll |= 0x40000;
901 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
903 if (AR_SREV_9485(ah) || AR_SREV_9340(ah) || AR_SREV_9330(ah) ||
904 AR_SREV_9550(ah))
905 udelay(1000);
907 /* Switch the core clock for ar9271 to 117Mhz */
908 if (AR_SREV_9271(ah)) {
909 udelay(500);
910 REG_WRITE(ah, 0x50040, 0x304);
913 udelay(RTC_PLL_SETTLE_DELAY);
915 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
918 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
919 enum nl80211_iftype opmode)
921 u32 sync_default = AR_INTR_SYNC_DEFAULT;
922 u32 imr_reg = AR_IMR_TXERR |
923 AR_IMR_TXURN |
924 AR_IMR_RXERR |
925 AR_IMR_RXORN |
926 AR_IMR_BCNMISC;
927 u32 msi_cfg = 0;
929 if (AR_SREV_9340(ah) || AR_SREV_9550(ah) || AR_SREV_9531(ah) ||
930 AR_SREV_9561(ah))
931 sync_default &= ~AR_INTR_SYNC_HOST1_FATAL;
933 if (AR_SREV_9300_20_OR_LATER(ah)) {
934 imr_reg |= AR_IMR_RXOK_HP;
935 if (ah->config.rx_intr_mitigation) {
936 imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
937 msi_cfg |= AR_INTCFG_MSI_RXINTM | AR_INTCFG_MSI_RXMINTR;
938 } else {
939 imr_reg |= AR_IMR_RXOK_LP;
940 msi_cfg |= AR_INTCFG_MSI_RXOK;
942 } else {
943 if (ah->config.rx_intr_mitigation) {
944 imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
945 msi_cfg |= AR_INTCFG_MSI_RXINTM | AR_INTCFG_MSI_RXMINTR;
946 } else {
947 imr_reg |= AR_IMR_RXOK;
948 msi_cfg |= AR_INTCFG_MSI_RXOK;
952 if (ah->config.tx_intr_mitigation) {
953 imr_reg |= AR_IMR_TXINTM | AR_IMR_TXMINTR;
954 msi_cfg |= AR_INTCFG_MSI_TXINTM | AR_INTCFG_MSI_TXMINTR;
955 } else {
956 imr_reg |= AR_IMR_TXOK;
957 msi_cfg |= AR_INTCFG_MSI_TXOK;
960 ENABLE_REGWRITE_BUFFER(ah);
962 REG_WRITE(ah, AR_IMR, imr_reg);
963 ah->imrs2_reg |= AR_IMR_S2_GTT;
964 REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
966 if (ah->msi_enabled) {
967 ah->msi_reg = REG_READ(ah, AR_PCIE_MSI);
968 ah->msi_reg |= AR_PCIE_MSI_HW_DBI_WR_EN;
969 ah->msi_reg &= AR_PCIE_MSI_HW_INT_PENDING_ADDR_MSI_64;
970 REG_WRITE(ah, AR_INTCFG, msi_cfg);
971 ath_dbg(ath9k_hw_common(ah), ANY,
972 "value of AR_INTCFG=0x%X, msi_cfg=0x%X\n",
973 REG_READ(ah, AR_INTCFG), msi_cfg);
976 if (!AR_SREV_9100(ah)) {
977 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
978 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, sync_default);
979 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
982 REGWRITE_BUFFER_FLUSH(ah);
984 if (AR_SREV_9300_20_OR_LATER(ah)) {
985 REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE, 0);
986 REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK, 0);
987 REG_WRITE(ah, AR_INTR_PRIO_SYNC_ENABLE, 0);
988 REG_WRITE(ah, AR_INTR_PRIO_SYNC_MASK, 0);
992 static void ath9k_hw_set_sifs_time(struct ath_hw *ah, u32 us)
994 u32 val = ath9k_hw_mac_to_clks(ah, us - 2);
995 val = min(val, (u32) 0xFFFF);
996 REG_WRITE(ah, AR_D_GBL_IFS_SIFS, val);
999 void ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
1001 u32 val = ath9k_hw_mac_to_clks(ah, us);
1002 val = min(val, (u32) 0xFFFF);
1003 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, val);
1006 void ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1008 u32 val = ath9k_hw_mac_to_clks(ah, us);
1009 val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_ACK));
1010 REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_ACK, val);
1013 void ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1015 u32 val = ath9k_hw_mac_to_clks(ah, us);
1016 val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_CTS));
1017 REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_CTS, val);
1020 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1022 if (tu > 0xFFFF) {
1023 ath_dbg(ath9k_hw_common(ah), XMIT, "bad global tx timeout %u\n",
1024 tu);
1025 ah->globaltxtimeout = (u32) -1;
1026 return false;
1027 } else {
1028 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1029 ah->globaltxtimeout = tu;
1030 return true;
1034 void ath9k_hw_init_global_settings(struct ath_hw *ah)
1036 struct ath_common *common = ath9k_hw_common(ah);
1037 const struct ath9k_channel *chan = ah->curchan;
1038 int acktimeout, ctstimeout, ack_offset = 0;
1039 int slottime;
1040 int sifstime;
1041 int rx_lat = 0, tx_lat = 0, eifs = 0, ack_shift = 0;
1042 u32 reg;
1044 ath_dbg(ath9k_hw_common(ah), RESET, "ah->misc_mode 0x%x\n",
1045 ah->misc_mode);
1047 if (!chan)
1048 return;
1050 if (ah->misc_mode != 0)
1051 REG_SET_BIT(ah, AR_PCU_MISC, ah->misc_mode);
1053 if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1054 rx_lat = 41;
1055 else
1056 rx_lat = 37;
1057 tx_lat = 54;
1059 if (IS_CHAN_5GHZ(chan))
1060 sifstime = 16;
1061 else
1062 sifstime = 10;
1064 if (IS_CHAN_HALF_RATE(chan)) {
1065 eifs = 175;
1066 rx_lat *= 2;
1067 tx_lat *= 2;
1068 if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1069 tx_lat += 11;
1071 sifstime = 32;
1072 ack_offset = 16;
1073 ack_shift = 3;
1074 slottime = 13;
1075 } else if (IS_CHAN_QUARTER_RATE(chan)) {
1076 eifs = 340;
1077 rx_lat = (rx_lat * 4) - 1;
1078 tx_lat *= 4;
1079 if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1080 tx_lat += 22;
1082 sifstime = 64;
1083 ack_offset = 32;
1084 ack_shift = 1;
1085 slottime = 21;
1086 } else {
1087 if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1088 eifs = AR_D_GBL_IFS_EIFS_ASYNC_FIFO;
1089 reg = AR_USEC_ASYNC_FIFO;
1090 } else {
1091 eifs = REG_READ(ah, AR_D_GBL_IFS_EIFS)/
1092 common->clockrate;
1093 reg = REG_READ(ah, AR_USEC);
1095 rx_lat = MS(reg, AR_USEC_RX_LAT);
1096 tx_lat = MS(reg, AR_USEC_TX_LAT);
1098 slottime = ah->slottime;
1101 /* As defined by IEEE 802.11-2007 17.3.8.6 */
1102 slottime += 3 * ah->coverage_class;
1103 acktimeout = slottime + sifstime + ack_offset;
1104 ctstimeout = acktimeout;
1107 * Workaround for early ACK timeouts, add an offset to match the
1108 * initval's 64us ack timeout value. Use 48us for the CTS timeout.
1109 * This was initially only meant to work around an issue with delayed
1110 * BA frames in some implementations, but it has been found to fix ACK
1111 * timeout issues in other cases as well.
1113 if (IS_CHAN_2GHZ(chan) &&
1114 !IS_CHAN_HALF_RATE(chan) && !IS_CHAN_QUARTER_RATE(chan)) {
1115 acktimeout += 64 - sifstime - ah->slottime;
1116 ctstimeout += 48 - sifstime - ah->slottime;
1119 if (ah->dynack.enabled) {
1120 acktimeout = ah->dynack.ackto;
1121 ctstimeout = acktimeout;
1122 slottime = (acktimeout - 3) / 2;
1123 } else {
1124 ah->dynack.ackto = acktimeout;
1127 ath9k_hw_set_sifs_time(ah, sifstime);
1128 ath9k_hw_setslottime(ah, slottime);
1129 ath9k_hw_set_ack_timeout(ah, acktimeout);
1130 ath9k_hw_set_cts_timeout(ah, ctstimeout);
1131 if (ah->globaltxtimeout != (u32) -1)
1132 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1134 REG_WRITE(ah, AR_D_GBL_IFS_EIFS, ath9k_hw_mac_to_clks(ah, eifs));
1135 REG_RMW(ah, AR_USEC,
1136 (common->clockrate - 1) |
1137 SM(rx_lat, AR_USEC_RX_LAT) |
1138 SM(tx_lat, AR_USEC_TX_LAT),
1139 AR_USEC_TX_LAT | AR_USEC_RX_LAT | AR_USEC_USEC);
1141 if (IS_CHAN_HALF_RATE(chan) || IS_CHAN_QUARTER_RATE(chan))
1142 REG_RMW(ah, AR_TXSIFS,
1143 sifstime | SM(ack_shift, AR_TXSIFS_ACK_SHIFT),
1144 (AR_TXSIFS_TIME | AR_TXSIFS_ACK_SHIFT));
1146 EXPORT_SYMBOL(ath9k_hw_init_global_settings);
1148 void ath9k_hw_deinit(struct ath_hw *ah)
1150 struct ath_common *common = ath9k_hw_common(ah);
1152 if (common->state < ATH_HW_INITIALIZED)
1153 return;
1155 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1157 EXPORT_SYMBOL(ath9k_hw_deinit);
1159 /*******/
1160 /* INI */
1161 /*******/
1163 u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, struct ath9k_channel *chan)
1165 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1167 if (IS_CHAN_2GHZ(chan))
1168 ctl |= CTL_11G;
1169 else
1170 ctl |= CTL_11A;
1172 return ctl;
1175 /****************************************/
1176 /* Reset and Channel Switching Routines */
1177 /****************************************/
1179 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1181 struct ath_common *common = ath9k_hw_common(ah);
1182 int txbuf_size;
1184 ENABLE_REGWRITE_BUFFER(ah);
1187 * set AHB_MODE not to do cacheline prefetches
1189 if (!AR_SREV_9300_20_OR_LATER(ah))
1190 REG_SET_BIT(ah, AR_AHB_MODE, AR_AHB_PREFETCH_RD_EN);
1193 * let mac dma reads be in 128 byte chunks
1195 REG_RMW(ah, AR_TXCFG, AR_TXCFG_DMASZ_128B, AR_TXCFG_DMASZ_MASK);
1197 REGWRITE_BUFFER_FLUSH(ah);
1200 * Restore TX Trigger Level to its pre-reset value.
1201 * The initial value depends on whether aggregation is enabled, and is
1202 * adjusted whenever underruns are detected.
1204 if (!AR_SREV_9300_20_OR_LATER(ah))
1205 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1207 ENABLE_REGWRITE_BUFFER(ah);
1210 * let mac dma writes be in 128 byte chunks
1212 REG_RMW(ah, AR_RXCFG, AR_RXCFG_DMASZ_128B, AR_RXCFG_DMASZ_MASK);
1215 * Setup receive FIFO threshold to hold off TX activities
1217 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1219 if (AR_SREV_9300_20_OR_LATER(ah)) {
1220 REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_HP, 0x1);
1221 REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_LP, 0x1);
1223 ath9k_hw_set_rx_bufsize(ah, common->rx_bufsize -
1224 ah->caps.rx_status_len);
1228 * reduce the number of usable entries in PCU TXBUF to avoid
1229 * wrap around issues.
1231 if (AR_SREV_9285(ah)) {
1232 /* For AR9285 the number of Fifos are reduced to half.
1233 * So set the usable tx buf size also to half to
1234 * avoid data/delimiter underruns
1236 txbuf_size = AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE;
1237 } else if (AR_SREV_9340_13_OR_LATER(ah)) {
1238 /* Uses fewer entries for AR934x v1.3+ to prevent rx overruns */
1239 txbuf_size = AR_9340_PCU_TXBUF_CTRL_USABLE_SIZE;
1240 } else {
1241 txbuf_size = AR_PCU_TXBUF_CTRL_USABLE_SIZE;
1244 if (!AR_SREV_9271(ah))
1245 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, txbuf_size);
1247 REGWRITE_BUFFER_FLUSH(ah);
1249 if (AR_SREV_9300_20_OR_LATER(ah))
1250 ath9k_hw_reset_txstatus_ring(ah);
1253 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1255 u32 mask = AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC;
1256 u32 set = AR_STA_ID1_KSRCH_MODE;
1258 ENABLE_REG_RMW_BUFFER(ah);
1259 switch (opmode) {
1260 case NL80211_IFTYPE_ADHOC:
1261 if (!AR_SREV_9340_13(ah)) {
1262 set |= AR_STA_ID1_ADHOC;
1263 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1264 break;
1266 /* fall through */
1267 case NL80211_IFTYPE_OCB:
1268 case NL80211_IFTYPE_MESH_POINT:
1269 case NL80211_IFTYPE_AP:
1270 set |= AR_STA_ID1_STA_AP;
1271 /* fall through */
1272 case NL80211_IFTYPE_STATION:
1273 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1274 break;
1275 default:
1276 if (!ah->is_monitoring)
1277 set = 0;
1278 break;
1280 REG_RMW(ah, AR_STA_ID1, set, mask);
1281 REG_RMW_BUFFER_FLUSH(ah);
1284 void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, u32 coef_scaled,
1285 u32 *coef_mantissa, u32 *coef_exponent)
1287 u32 coef_exp, coef_man;
1289 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1290 if ((coef_scaled >> coef_exp) & 0x1)
1291 break;
1293 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1295 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1297 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1298 *coef_exponent = coef_exp - 16;
1301 /* AR9330 WAR:
1302 * call external reset function to reset WMAC if:
1303 * - doing a cold reset
1304 * - we have pending frames in the TX queues.
1306 static bool ath9k_hw_ar9330_reset_war(struct ath_hw *ah, int type)
1308 int i, npend = 0;
1310 for (i = 0; i < AR_NUM_QCU; i++) {
1311 npend = ath9k_hw_numtxpending(ah, i);
1312 if (npend)
1313 break;
1316 if (ah->external_reset &&
1317 (npend || type == ATH9K_RESET_COLD)) {
1318 int reset_err = 0;
1320 ath_dbg(ath9k_hw_common(ah), RESET,
1321 "reset MAC via external reset\n");
1323 reset_err = ah->external_reset();
1324 if (reset_err) {
1325 ath_err(ath9k_hw_common(ah),
1326 "External reset failed, err=%d\n",
1327 reset_err);
1328 return false;
1331 REG_WRITE(ah, AR_RTC_RESET, 1);
1334 return true;
1337 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1339 u32 rst_flags;
1340 u32 tmpReg;
1342 if (AR_SREV_9100(ah)) {
1343 REG_RMW_FIELD(ah, AR_RTC_DERIVED_CLK,
1344 AR_RTC_DERIVED_CLK_PERIOD, 1);
1345 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1348 ENABLE_REGWRITE_BUFFER(ah);
1350 if (AR_SREV_9300_20_OR_LATER(ah)) {
1351 REG_WRITE(ah, AR_WA, ah->WARegVal);
1352 udelay(10);
1355 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1356 AR_RTC_FORCE_WAKE_ON_INT);
1358 if (AR_SREV_9100(ah)) {
1359 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1360 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1361 } else {
1362 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1363 if (AR_SREV_9340(ah))
1364 tmpReg &= AR9340_INTR_SYNC_LOCAL_TIMEOUT;
1365 else
1366 tmpReg &= AR_INTR_SYNC_LOCAL_TIMEOUT |
1367 AR_INTR_SYNC_RADM_CPL_TIMEOUT;
1369 if (tmpReg) {
1370 u32 val;
1371 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1373 val = AR_RC_HOSTIF;
1374 if (!AR_SREV_9300_20_OR_LATER(ah))
1375 val |= AR_RC_AHB;
1376 REG_WRITE(ah, AR_RC, val);
1378 } else if (!AR_SREV_9300_20_OR_LATER(ah))
1379 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1381 rst_flags = AR_RTC_RC_MAC_WARM;
1382 if (type == ATH9K_RESET_COLD)
1383 rst_flags |= AR_RTC_RC_MAC_COLD;
1386 if (AR_SREV_9330(ah)) {
1387 if (!ath9k_hw_ar9330_reset_war(ah, type))
1388 return false;
1391 if (ath9k_hw_mci_is_enabled(ah))
1392 ar9003_mci_check_gpm_offset(ah);
1394 /* DMA HALT added to resolve ar9300 and ar9580 bus error during
1395 * RTC_RC reg read
1397 if (AR_SREV_9300(ah) || AR_SREV_9580(ah)) {
1398 REG_SET_BIT(ah, AR_CFG, AR_CFG_HALT_REQ);
1399 ath9k_hw_wait(ah, AR_CFG, AR_CFG_HALT_ACK, AR_CFG_HALT_ACK,
1400 20 * AH_WAIT_TIMEOUT);
1401 REG_CLR_BIT(ah, AR_CFG, AR_CFG_HALT_REQ);
1404 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1406 REGWRITE_BUFFER_FLUSH(ah);
1408 if (AR_SREV_9300_20_OR_LATER(ah))
1409 udelay(50);
1410 else if (AR_SREV_9100(ah))
1411 mdelay(10);
1412 else
1413 udelay(100);
1415 REG_WRITE(ah, AR_RTC_RC, 0);
1416 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1417 ath_dbg(ath9k_hw_common(ah), RESET, "RTC stuck in MAC reset\n");
1418 return false;
1421 if (!AR_SREV_9100(ah))
1422 REG_WRITE(ah, AR_RC, 0);
1424 if (AR_SREV_9100(ah))
1425 udelay(50);
1427 return true;
1430 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1432 ENABLE_REGWRITE_BUFFER(ah);
1434 if (AR_SREV_9300_20_OR_LATER(ah)) {
1435 REG_WRITE(ah, AR_WA, ah->WARegVal);
1436 udelay(10);
1439 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1440 AR_RTC_FORCE_WAKE_ON_INT);
1442 if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1443 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1445 REG_WRITE(ah, AR_RTC_RESET, 0);
1447 REGWRITE_BUFFER_FLUSH(ah);
1449 udelay(2);
1451 if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1452 REG_WRITE(ah, AR_RC, 0);
1454 REG_WRITE(ah, AR_RTC_RESET, 1);
1456 if (!ath9k_hw_wait(ah,
1457 AR_RTC_STATUS,
1458 AR_RTC_STATUS_M,
1459 AR_RTC_STATUS_ON,
1460 AH_WAIT_TIMEOUT)) {
1461 ath_dbg(ath9k_hw_common(ah), RESET, "RTC not waking up\n");
1462 return false;
1465 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1468 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1470 bool ret = false;
1472 if (AR_SREV_9300_20_OR_LATER(ah)) {
1473 REG_WRITE(ah, AR_WA, ah->WARegVal);
1474 udelay(10);
1477 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1478 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1480 if (!ah->reset_power_on)
1481 type = ATH9K_RESET_POWER_ON;
1483 switch (type) {
1484 case ATH9K_RESET_POWER_ON:
1485 ret = ath9k_hw_set_reset_power_on(ah);
1486 if (ret)
1487 ah->reset_power_on = true;
1488 break;
1489 case ATH9K_RESET_WARM:
1490 case ATH9K_RESET_COLD:
1491 ret = ath9k_hw_set_reset(ah, type);
1492 break;
1493 default:
1494 break;
1497 return ret;
1500 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1501 struct ath9k_channel *chan)
1503 int reset_type = ATH9K_RESET_WARM;
1505 if (AR_SREV_9280(ah)) {
1506 if (ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
1507 reset_type = ATH9K_RESET_POWER_ON;
1508 else
1509 reset_type = ATH9K_RESET_COLD;
1510 } else if (ah->chip_fullsleep || REG_READ(ah, AR_Q_TXE) ||
1511 (REG_READ(ah, AR_CR) & AR_CR_RXE))
1512 reset_type = ATH9K_RESET_COLD;
1514 if (!ath9k_hw_set_reset_reg(ah, reset_type))
1515 return false;
1517 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1518 return false;
1520 ah->chip_fullsleep = false;
1522 if (AR_SREV_9330(ah))
1523 ar9003_hw_internal_regulator_apply(ah);
1524 ath9k_hw_init_pll(ah, chan);
1526 return true;
1529 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1530 struct ath9k_channel *chan)
1532 struct ath_common *common = ath9k_hw_common(ah);
1533 struct ath9k_hw_capabilities *pCap = &ah->caps;
1534 bool band_switch = false, mode_diff = false;
1535 u8 ini_reloaded = 0;
1536 u32 qnum;
1537 int r;
1539 if (pCap->hw_caps & ATH9K_HW_CAP_FCC_BAND_SWITCH) {
1540 u32 flags_diff = chan->channelFlags ^ ah->curchan->channelFlags;
1541 band_switch = !!(flags_diff & CHANNEL_5GHZ);
1542 mode_diff = !!(flags_diff & ~CHANNEL_HT);
1545 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1546 if (ath9k_hw_numtxpending(ah, qnum)) {
1547 ath_dbg(common, QUEUE,
1548 "Transmit frames pending on queue %d\n", qnum);
1549 return false;
1553 if (!ath9k_hw_rfbus_req(ah)) {
1554 ath_err(common, "Could not kill baseband RX\n");
1555 return false;
1558 if (band_switch || mode_diff) {
1559 ath9k_hw_mark_phy_inactive(ah);
1560 udelay(5);
1562 if (band_switch)
1563 ath9k_hw_init_pll(ah, chan);
1565 if (ath9k_hw_fast_chan_change(ah, chan, &ini_reloaded)) {
1566 ath_err(common, "Failed to do fast channel change\n");
1567 return false;
1571 ath9k_hw_set_channel_regs(ah, chan);
1573 r = ath9k_hw_rf_set_freq(ah, chan);
1574 if (r) {
1575 ath_err(common, "Failed to set channel\n");
1576 return false;
1578 ath9k_hw_set_clockrate(ah);
1579 ath9k_hw_apply_txpower(ah, chan, false);
1581 ath9k_hw_set_delta_slope(ah, chan);
1582 ath9k_hw_spur_mitigate_freq(ah, chan);
1584 if (band_switch || ini_reloaded)
1585 ah->eep_ops->set_board_values(ah, chan);
1587 ath9k_hw_init_bb(ah, chan);
1588 ath9k_hw_rfbus_done(ah);
1590 if (band_switch || ini_reloaded) {
1591 ah->ah_flags |= AH_FASTCC;
1592 ath9k_hw_init_cal(ah, chan);
1593 ah->ah_flags &= ~AH_FASTCC;
1596 return true;
1599 static void ath9k_hw_apply_gpio_override(struct ath_hw *ah)
1601 u32 gpio_mask = ah->gpio_mask;
1602 int i;
1604 for (i = 0; gpio_mask; i++, gpio_mask >>= 1) {
1605 if (!(gpio_mask & 1))
1606 continue;
1608 ath9k_hw_gpio_request_out(ah, i, NULL,
1609 AR_GPIO_OUTPUT_MUX_AS_OUTPUT);
1610 ath9k_hw_set_gpio(ah, i, !!(ah->gpio_val & BIT(i)));
1611 ath9k_hw_gpio_free(ah, i);
1615 void ath9k_hw_check_nav(struct ath_hw *ah)
1617 struct ath_common *common = ath9k_hw_common(ah);
1618 u32 val;
1620 val = REG_READ(ah, AR_NAV);
1621 if (val != 0xdeadbeef && val > 0x7fff) {
1622 ath_dbg(common, BSTUCK, "Abnormal NAV: 0x%x\n", val);
1623 REG_WRITE(ah, AR_NAV, 0);
1626 EXPORT_SYMBOL(ath9k_hw_check_nav);
1628 bool ath9k_hw_check_alive(struct ath_hw *ah)
1630 int count = 50;
1631 u32 reg, last_val;
1633 /* Check if chip failed to wake up */
1634 if (REG_READ(ah, AR_CFG) == 0xdeadbeef)
1635 return false;
1637 if (AR_SREV_9300(ah))
1638 return !ath9k_hw_detect_mac_hang(ah);
1640 if (AR_SREV_9285_12_OR_LATER(ah))
1641 return true;
1643 last_val = REG_READ(ah, AR_OBS_BUS_1);
1644 do {
1645 reg = REG_READ(ah, AR_OBS_BUS_1);
1646 if (reg != last_val)
1647 return true;
1649 udelay(1);
1650 last_val = reg;
1651 if ((reg & 0x7E7FFFEF) == 0x00702400)
1652 continue;
1654 switch (reg & 0x7E000B00) {
1655 case 0x1E000000:
1656 case 0x52000B00:
1657 case 0x18000B00:
1658 continue;
1659 default:
1660 return true;
1662 } while (count-- > 0);
1664 return false;
1666 EXPORT_SYMBOL(ath9k_hw_check_alive);
1668 static void ath9k_hw_init_mfp(struct ath_hw *ah)
1670 /* Setup MFP options for CCMP */
1671 if (AR_SREV_9280_20_OR_LATER(ah)) {
1672 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
1673 * frames when constructing CCMP AAD. */
1674 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
1675 0xc7ff);
1676 if (AR_SREV_9271(ah) || AR_DEVID_7010(ah))
1677 ah->sw_mgmt_crypto_tx = true;
1678 else
1679 ah->sw_mgmt_crypto_tx = false;
1680 ah->sw_mgmt_crypto_rx = false;
1681 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1682 /* Disable hardware crypto for management frames */
1683 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
1684 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
1685 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1686 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
1687 ah->sw_mgmt_crypto_tx = true;
1688 ah->sw_mgmt_crypto_rx = true;
1689 } else {
1690 ah->sw_mgmt_crypto_tx = true;
1691 ah->sw_mgmt_crypto_rx = true;
1695 static void ath9k_hw_reset_opmode(struct ath_hw *ah,
1696 u32 macStaId1, u32 saveDefAntenna)
1698 struct ath_common *common = ath9k_hw_common(ah);
1700 ENABLE_REGWRITE_BUFFER(ah);
1702 REG_RMW(ah, AR_STA_ID1, macStaId1
1703 | AR_STA_ID1_RTS_USE_DEF
1704 | ah->sta_id1_defaults,
1705 ~AR_STA_ID1_SADH_MASK);
1706 ath_hw_setbssidmask(common);
1707 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
1708 ath9k_hw_write_associd(ah);
1709 REG_WRITE(ah, AR_ISR, ~0);
1710 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
1712 REGWRITE_BUFFER_FLUSH(ah);
1714 ath9k_hw_set_operating_mode(ah, ah->opmode);
1717 static void ath9k_hw_init_queues(struct ath_hw *ah)
1719 int i;
1721 ENABLE_REGWRITE_BUFFER(ah);
1723 for (i = 0; i < AR_NUM_DCU; i++)
1724 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
1726 REGWRITE_BUFFER_FLUSH(ah);
1728 ah->intr_txqs = 0;
1729 for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
1730 ath9k_hw_resettxqueue(ah, i);
1734 * For big endian systems turn on swapping for descriptors
1736 static void ath9k_hw_init_desc(struct ath_hw *ah)
1738 struct ath_common *common = ath9k_hw_common(ah);
1740 if (AR_SREV_9100(ah)) {
1741 u32 mask;
1742 mask = REG_READ(ah, AR_CFG);
1743 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
1744 ath_dbg(common, RESET, "CFG Byte Swap Set 0x%x\n",
1745 mask);
1746 } else {
1747 mask = INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
1748 REG_WRITE(ah, AR_CFG, mask);
1749 ath_dbg(common, RESET, "Setting CFG 0x%x\n",
1750 REG_READ(ah, AR_CFG));
1752 } else {
1753 if (common->bus_ops->ath_bus_type == ATH_USB) {
1754 /* Configure AR9271 target WLAN */
1755 if (AR_SREV_9271(ah))
1756 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
1757 else
1758 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1760 #ifdef __BIG_ENDIAN
1761 else if (AR_SREV_9330(ah) || AR_SREV_9340(ah) ||
1762 AR_SREV_9550(ah) || AR_SREV_9531(ah) ||
1763 AR_SREV_9561(ah))
1764 REG_RMW(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB, 0);
1765 else
1766 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1767 #endif
1772 * Fast channel change:
1773 * (Change synthesizer based on channel freq without resetting chip)
1775 static int ath9k_hw_do_fastcc(struct ath_hw *ah, struct ath9k_channel *chan)
1777 struct ath_common *common = ath9k_hw_common(ah);
1778 struct ath9k_hw_capabilities *pCap = &ah->caps;
1779 int ret;
1781 if (AR_SREV_9280(ah) && common->bus_ops->ath_bus_type == ATH_PCI)
1782 goto fail;
1784 if (ah->chip_fullsleep)
1785 goto fail;
1787 if (!ah->curchan)
1788 goto fail;
1790 if (chan->channel == ah->curchan->channel)
1791 goto fail;
1793 if ((ah->curchan->channelFlags | chan->channelFlags) &
1794 (CHANNEL_HALF | CHANNEL_QUARTER))
1795 goto fail;
1798 * If cross-band fcc is not supoprted, bail out if channelFlags differ.
1800 if (!(pCap->hw_caps & ATH9K_HW_CAP_FCC_BAND_SWITCH) &&
1801 ((chan->channelFlags ^ ah->curchan->channelFlags) & ~CHANNEL_HT))
1802 goto fail;
1804 if (!ath9k_hw_check_alive(ah))
1805 goto fail;
1808 * For AR9462, make sure that calibration data for
1809 * re-using are present.
1811 if (AR_SREV_9462(ah) && (ah->caldata &&
1812 (!test_bit(TXIQCAL_DONE, &ah->caldata->cal_flags) ||
1813 !test_bit(TXCLCAL_DONE, &ah->caldata->cal_flags) ||
1814 !test_bit(RTT_DONE, &ah->caldata->cal_flags))))
1815 goto fail;
1817 ath_dbg(common, RESET, "FastChannelChange for %d -> %d\n",
1818 ah->curchan->channel, chan->channel);
1820 ret = ath9k_hw_channel_change(ah, chan);
1821 if (!ret)
1822 goto fail;
1824 if (ath9k_hw_mci_is_enabled(ah))
1825 ar9003_mci_2g5g_switch(ah, false);
1827 ath9k_hw_loadnf(ah, ah->curchan);
1828 ath9k_hw_start_nfcal(ah, true);
1830 if (AR_SREV_9271(ah))
1831 ar9002_hw_load_ani_reg(ah, chan);
1833 return 0;
1834 fail:
1835 return -EINVAL;
1838 u32 ath9k_hw_get_tsf_offset(struct timespec64 *last, struct timespec64 *cur)
1840 struct timespec64 ts;
1841 s64 usec;
1843 if (!cur) {
1844 ktime_get_raw_ts64(&ts);
1845 cur = &ts;
1848 usec = cur->tv_sec * 1000000ULL + cur->tv_nsec / 1000;
1849 usec -= last->tv_sec * 1000000ULL + last->tv_nsec / 1000;
1851 return (u32) usec;
1853 EXPORT_SYMBOL(ath9k_hw_get_tsf_offset);
1855 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1856 struct ath9k_hw_cal_data *caldata, bool fastcc)
1858 struct ath_common *common = ath9k_hw_common(ah);
1859 u32 saveLedState;
1860 u32 saveDefAntenna;
1861 u32 macStaId1;
1862 struct timespec64 tsf_ts;
1863 u32 tsf_offset;
1864 u64 tsf = 0;
1865 int r;
1866 bool start_mci_reset = false;
1867 bool save_fullsleep = ah->chip_fullsleep;
1869 if (ath9k_hw_mci_is_enabled(ah)) {
1870 start_mci_reset = ar9003_mci_start_reset(ah, chan);
1871 if (start_mci_reset)
1872 return 0;
1875 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1876 return -EIO;
1878 if (ah->curchan && !ah->chip_fullsleep)
1879 ath9k_hw_getnf(ah, ah->curchan);
1881 ah->caldata = caldata;
1882 if (caldata && (chan->channel != caldata->channel ||
1883 chan->channelFlags != caldata->channelFlags)) {
1884 /* Operating channel changed, reset channel calibration data */
1885 memset(caldata, 0, sizeof(*caldata));
1886 ath9k_init_nfcal_hist_buffer(ah, chan);
1887 } else if (caldata) {
1888 clear_bit(PAPRD_PACKET_SENT, &caldata->cal_flags);
1890 ah->noise = ath9k_hw_getchan_noise(ah, chan, chan->noisefloor);
1892 if (fastcc) {
1893 r = ath9k_hw_do_fastcc(ah, chan);
1894 if (!r)
1895 return r;
1898 if (ath9k_hw_mci_is_enabled(ah))
1899 ar9003_mci_stop_bt(ah, save_fullsleep);
1901 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1902 if (saveDefAntenna == 0)
1903 saveDefAntenna = 1;
1905 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1907 /* Save TSF before chip reset, a cold reset clears it */
1908 ktime_get_raw_ts64(&tsf_ts);
1909 tsf = ath9k_hw_gettsf64(ah);
1911 saveLedState = REG_READ(ah, AR_CFG_LED) &
1912 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1913 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1915 ath9k_hw_mark_phy_inactive(ah);
1917 ah->paprd_table_write_done = false;
1919 /* Only required on the first reset */
1920 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1921 REG_WRITE(ah,
1922 AR9271_RESET_POWER_DOWN_CONTROL,
1923 AR9271_RADIO_RF_RST);
1924 udelay(50);
1927 if (!ath9k_hw_chip_reset(ah, chan)) {
1928 ath_err(common, "Chip reset failed\n");
1929 return -EINVAL;
1932 /* Only required on the first reset */
1933 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1934 ah->htc_reset_init = false;
1935 REG_WRITE(ah,
1936 AR9271_RESET_POWER_DOWN_CONTROL,
1937 AR9271_GATE_MAC_CTL);
1938 udelay(50);
1941 /* Restore TSF */
1942 tsf_offset = ath9k_hw_get_tsf_offset(&tsf_ts, NULL);
1943 ath9k_hw_settsf64(ah, tsf + tsf_offset);
1945 if (AR_SREV_9280_20_OR_LATER(ah))
1946 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
1948 if (!AR_SREV_9300_20_OR_LATER(ah))
1949 ar9002_hw_enable_async_fifo(ah);
1951 r = ath9k_hw_process_ini(ah, chan);
1952 if (r)
1953 return r;
1955 ath9k_hw_set_rfmode(ah, chan);
1957 if (ath9k_hw_mci_is_enabled(ah))
1958 ar9003_mci_reset(ah, false, IS_CHAN_2GHZ(chan), save_fullsleep);
1961 * Some AR91xx SoC devices frequently fail to accept TSF writes
1962 * right after the chip reset. When that happens, write a new
1963 * value after the initvals have been applied.
1965 if (AR_SREV_9100(ah) && (ath9k_hw_gettsf64(ah) < tsf)) {
1966 tsf_offset = ath9k_hw_get_tsf_offset(&tsf_ts, NULL);
1967 ath9k_hw_settsf64(ah, tsf + tsf_offset);
1970 ath9k_hw_init_mfp(ah);
1972 ath9k_hw_set_delta_slope(ah, chan);
1973 ath9k_hw_spur_mitigate_freq(ah, chan);
1974 ah->eep_ops->set_board_values(ah, chan);
1976 ath9k_hw_reset_opmode(ah, macStaId1, saveDefAntenna);
1978 r = ath9k_hw_rf_set_freq(ah, chan);
1979 if (r)
1980 return r;
1982 ath9k_hw_set_clockrate(ah);
1984 ath9k_hw_init_queues(ah);
1985 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
1986 ath9k_hw_ani_cache_ini_regs(ah);
1987 ath9k_hw_init_qos(ah);
1989 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
1990 ath9k_hw_gpio_request_in(ah, ah->rfkill_gpio, "ath9k-rfkill");
1992 ath9k_hw_init_global_settings(ah);
1994 if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1995 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
1996 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
1997 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
1998 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
1999 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2000 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2003 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PRESERVE_SEQNUM);
2005 ath9k_hw_set_dma(ah);
2007 if (!ath9k_hw_mci_is_enabled(ah))
2008 REG_WRITE(ah, AR_OBS, 8);
2010 ENABLE_REG_RMW_BUFFER(ah);
2011 if (ah->config.rx_intr_mitigation) {
2012 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, ah->config.rimt_last);
2013 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, ah->config.rimt_first);
2016 if (ah->config.tx_intr_mitigation) {
2017 REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_LAST, 300);
2018 REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_FIRST, 750);
2020 REG_RMW_BUFFER_FLUSH(ah);
2022 ath9k_hw_init_bb(ah, chan);
2024 if (caldata) {
2025 clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
2026 clear_bit(TXCLCAL_DONE, &caldata->cal_flags);
2028 if (!ath9k_hw_init_cal(ah, chan))
2029 return -EIO;
2031 if (ath9k_hw_mci_is_enabled(ah) && ar9003_mci_end_reset(ah, chan, caldata))
2032 return -EIO;
2034 ENABLE_REGWRITE_BUFFER(ah);
2036 ath9k_hw_restore_chainmask(ah);
2037 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2039 REGWRITE_BUFFER_FLUSH(ah);
2041 ath9k_hw_gen_timer_start_tsf2(ah);
2043 ath9k_hw_init_desc(ah);
2045 if (ath9k_hw_btcoex_is_enabled(ah))
2046 ath9k_hw_btcoex_enable(ah);
2048 if (ath9k_hw_mci_is_enabled(ah))
2049 ar9003_mci_check_bt(ah);
2051 if (AR_SREV_9300_20_OR_LATER(ah)) {
2052 ath9k_hw_loadnf(ah, chan);
2053 ath9k_hw_start_nfcal(ah, true);
2056 if (AR_SREV_9300_20_OR_LATER(ah))
2057 ar9003_hw_bb_watchdog_config(ah);
2059 if (ah->config.hw_hang_checks & HW_PHYRESTART_CLC_WAR)
2060 ar9003_hw_disable_phy_restart(ah);
2062 ath9k_hw_apply_gpio_override(ah);
2064 if (AR_SREV_9565(ah) && common->bt_ant_diversity)
2065 REG_SET_BIT(ah, AR_BTCOEX_WL_LNADIV, AR_BTCOEX_WL_LNADIV_FORCE_ON);
2067 if (ah->hw->conf.radar_enabled) {
2068 /* set HW specific DFS configuration */
2069 ah->radar_conf.ext_channel = IS_CHAN_HT40(chan);
2070 ath9k_hw_set_radar_params(ah);
2073 return 0;
2075 EXPORT_SYMBOL(ath9k_hw_reset);
2077 /******************************/
2078 /* Power Management (Chipset) */
2079 /******************************/
2082 * Notify Power Mgt is disabled in self-generated frames.
2083 * If requested, force chip to sleep.
2085 static void ath9k_set_power_sleep(struct ath_hw *ah)
2087 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2089 if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
2090 REG_CLR_BIT(ah, AR_TIMER_MODE, 0xff);
2091 REG_CLR_BIT(ah, AR_NDP2_TIMER_MODE, 0xff);
2092 REG_CLR_BIT(ah, AR_SLP32_INC, 0xfffff);
2093 /* xxx Required for WLAN only case ? */
2094 REG_WRITE(ah, AR_MCI_INTERRUPT_RX_MSG_EN, 0);
2095 udelay(100);
2099 * Clear the RTC force wake bit to allow the
2100 * mac to go to sleep.
2102 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2104 if (ath9k_hw_mci_is_enabled(ah))
2105 udelay(100);
2107 if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
2108 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2110 /* Shutdown chip. Active low */
2111 if (!AR_SREV_5416(ah) && !AR_SREV_9271(ah)) {
2112 REG_CLR_BIT(ah, AR_RTC_RESET, AR_RTC_RESET_EN);
2113 udelay(2);
2116 /* Clear Bit 14 of AR_WA after putting chip into Full Sleep mode. */
2117 if (AR_SREV_9300_20_OR_LATER(ah))
2118 REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2122 * Notify Power Management is enabled in self-generating
2123 * frames. If request, set power mode of chip to
2124 * auto/normal. Duration in units of 128us (1/8 TU).
2126 static void ath9k_set_power_network_sleep(struct ath_hw *ah)
2128 struct ath9k_hw_capabilities *pCap = &ah->caps;
2130 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2132 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2133 /* Set WakeOnInterrupt bit; clear ForceWake bit */
2134 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2135 AR_RTC_FORCE_WAKE_ON_INT);
2136 } else {
2138 /* When chip goes into network sleep, it could be waken
2139 * up by MCI_INT interrupt caused by BT's HW messages
2140 * (LNA_xxx, CONT_xxx) which chould be in a very fast
2141 * rate (~100us). This will cause chip to leave and
2142 * re-enter network sleep mode frequently, which in
2143 * consequence will have WLAN MCI HW to generate lots of
2144 * SYS_WAKING and SYS_SLEEPING messages which will make
2145 * BT CPU to busy to process.
2147 if (ath9k_hw_mci_is_enabled(ah))
2148 REG_CLR_BIT(ah, AR_MCI_INTERRUPT_RX_MSG_EN,
2149 AR_MCI_INTERRUPT_RX_HW_MSG_MASK);
2151 * Clear the RTC force wake bit to allow the
2152 * mac to go to sleep.
2154 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2156 if (ath9k_hw_mci_is_enabled(ah))
2157 udelay(30);
2160 /* Clear Bit 14 of AR_WA after putting chip into Net Sleep mode. */
2161 if (AR_SREV_9300_20_OR_LATER(ah))
2162 REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2165 static bool ath9k_hw_set_power_awake(struct ath_hw *ah)
2167 u32 val;
2168 int i;
2170 /* Set Bits 14 and 17 of AR_WA before powering on the chip. */
2171 if (AR_SREV_9300_20_OR_LATER(ah)) {
2172 REG_WRITE(ah, AR_WA, ah->WARegVal);
2173 udelay(10);
2176 if ((REG_READ(ah, AR_RTC_STATUS) &
2177 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2178 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
2179 return false;
2181 if (!AR_SREV_9300_20_OR_LATER(ah))
2182 ath9k_hw_init_pll(ah, NULL);
2184 if (AR_SREV_9100(ah))
2185 REG_SET_BIT(ah, AR_RTC_RESET,
2186 AR_RTC_RESET_EN);
2188 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2189 AR_RTC_FORCE_WAKE_EN);
2190 if (AR_SREV_9100(ah))
2191 mdelay(10);
2192 else
2193 udelay(50);
2195 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2196 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2197 if (val == AR_RTC_STATUS_ON)
2198 break;
2199 udelay(50);
2200 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2201 AR_RTC_FORCE_WAKE_EN);
2203 if (i == 0) {
2204 ath_err(ath9k_hw_common(ah),
2205 "Failed to wakeup in %uus\n",
2206 POWER_UP_TIME / 20);
2207 return false;
2210 if (ath9k_hw_mci_is_enabled(ah))
2211 ar9003_mci_set_power_awake(ah);
2213 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2215 return true;
2218 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2220 struct ath_common *common = ath9k_hw_common(ah);
2221 int status = true;
2222 static const char *modes[] = {
2223 "AWAKE",
2224 "FULL-SLEEP",
2225 "NETWORK SLEEP",
2226 "UNDEFINED"
2229 if (ah->power_mode == mode)
2230 return status;
2232 ath_dbg(common, RESET, "%s -> %s\n",
2233 modes[ah->power_mode], modes[mode]);
2235 switch (mode) {
2236 case ATH9K_PM_AWAKE:
2237 status = ath9k_hw_set_power_awake(ah);
2238 break;
2239 case ATH9K_PM_FULL_SLEEP:
2240 if (ath9k_hw_mci_is_enabled(ah))
2241 ar9003_mci_set_full_sleep(ah);
2243 ath9k_set_power_sleep(ah);
2244 ah->chip_fullsleep = true;
2245 break;
2246 case ATH9K_PM_NETWORK_SLEEP:
2247 ath9k_set_power_network_sleep(ah);
2248 break;
2249 default:
2250 ath_err(common, "Unknown power mode %u\n", mode);
2251 return false;
2253 ah->power_mode = mode;
2256 * XXX: If this warning never comes up after a while then
2257 * simply keep the ATH_DBG_WARN_ON_ONCE() but make
2258 * ath9k_hw_setpower() return type void.
2261 if (!(ah->ah_flags & AH_UNPLUGGED))
2262 ATH_DBG_WARN_ON_ONCE(!status);
2264 return status;
2266 EXPORT_SYMBOL(ath9k_hw_setpower);
2268 /*******************/
2269 /* Beacon Handling */
2270 /*******************/
2272 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
2274 int flags = 0;
2276 ENABLE_REGWRITE_BUFFER(ah);
2278 switch (ah->opmode) {
2279 case NL80211_IFTYPE_ADHOC:
2280 REG_SET_BIT(ah, AR_TXCFG,
2281 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
2282 /* fall through */
2283 case NL80211_IFTYPE_MESH_POINT:
2284 case NL80211_IFTYPE_AP:
2285 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, next_beacon);
2286 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, next_beacon -
2287 TU_TO_USEC(ah->config.dma_beacon_response_time));
2288 REG_WRITE(ah, AR_NEXT_SWBA, next_beacon -
2289 TU_TO_USEC(ah->config.sw_beacon_response_time));
2290 flags |=
2291 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
2292 break;
2293 default:
2294 ath_dbg(ath9k_hw_common(ah), BEACON,
2295 "%s: unsupported opmode: %d\n", __func__, ah->opmode);
2296 return;
2297 break;
2300 REG_WRITE(ah, AR_BEACON_PERIOD, beacon_period);
2301 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, beacon_period);
2302 REG_WRITE(ah, AR_SWBA_PERIOD, beacon_period);
2304 REGWRITE_BUFFER_FLUSH(ah);
2306 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
2308 EXPORT_SYMBOL(ath9k_hw_beaconinit);
2310 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
2311 const struct ath9k_beacon_state *bs)
2313 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
2314 struct ath9k_hw_capabilities *pCap = &ah->caps;
2315 struct ath_common *common = ath9k_hw_common(ah);
2317 ENABLE_REGWRITE_BUFFER(ah);
2319 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, bs->bs_nexttbtt);
2320 REG_WRITE(ah, AR_BEACON_PERIOD, bs->bs_intval);
2321 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, bs->bs_intval);
2323 REGWRITE_BUFFER_FLUSH(ah);
2325 REG_RMW_FIELD(ah, AR_RSSI_THR,
2326 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
2328 beaconintval = bs->bs_intval;
2330 if (bs->bs_sleepduration > beaconintval)
2331 beaconintval = bs->bs_sleepduration;
2333 dtimperiod = bs->bs_dtimperiod;
2334 if (bs->bs_sleepduration > dtimperiod)
2335 dtimperiod = bs->bs_sleepduration;
2337 if (beaconintval == dtimperiod)
2338 nextTbtt = bs->bs_nextdtim;
2339 else
2340 nextTbtt = bs->bs_nexttbtt;
2342 ath_dbg(common, BEACON, "next DTIM %u\n", bs->bs_nextdtim);
2343 ath_dbg(common, BEACON, "next beacon %u\n", nextTbtt);
2344 ath_dbg(common, BEACON, "beacon period %u\n", beaconintval);
2345 ath_dbg(common, BEACON, "DTIM period %u\n", dtimperiod);
2347 ENABLE_REGWRITE_BUFFER(ah);
2349 REG_WRITE(ah, AR_NEXT_DTIM, bs->bs_nextdtim - SLEEP_SLOP);
2350 REG_WRITE(ah, AR_NEXT_TIM, nextTbtt - SLEEP_SLOP);
2352 REG_WRITE(ah, AR_SLEEP1,
2353 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
2354 | AR_SLEEP1_ASSUME_DTIM);
2356 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
2357 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
2358 else
2359 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
2361 REG_WRITE(ah, AR_SLEEP2,
2362 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
2364 REG_WRITE(ah, AR_TIM_PERIOD, beaconintval);
2365 REG_WRITE(ah, AR_DTIM_PERIOD, dtimperiod);
2367 REGWRITE_BUFFER_FLUSH(ah);
2369 REG_SET_BIT(ah, AR_TIMER_MODE,
2370 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
2371 AR_DTIM_TIMER_EN);
2373 /* TSF Out of Range Threshold */
2374 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
2376 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
2378 /*******************/
2379 /* HW Capabilities */
2380 /*******************/
2382 static u8 fixup_chainmask(u8 chip_chainmask, u8 eeprom_chainmask)
2384 eeprom_chainmask &= chip_chainmask;
2385 if (eeprom_chainmask)
2386 return eeprom_chainmask;
2387 else
2388 return chip_chainmask;
2392 * ath9k_hw_dfs_tested - checks if DFS has been tested with used chipset
2393 * @ah: the atheros hardware data structure
2395 * We enable DFS support upstream on chipsets which have passed a series
2396 * of tests. The testing requirements are going to be documented. Desired
2397 * test requirements are documented at:
2399 * http://wireless.kernel.org/en/users/Drivers/ath9k/dfs
2401 * Once a new chipset gets properly tested an individual commit can be used
2402 * to document the testing for DFS for that chipset.
2404 static bool ath9k_hw_dfs_tested(struct ath_hw *ah)
2407 switch (ah->hw_version.macVersion) {
2408 /* for temporary testing DFS with 9280 */
2409 case AR_SREV_VERSION_9280:
2410 /* AR9580 will likely be our first target to get testing on */
2411 case AR_SREV_VERSION_9580:
2412 return true;
2413 default:
2414 return false;
2418 static void ath9k_gpio_cap_init(struct ath_hw *ah)
2420 struct ath9k_hw_capabilities *pCap = &ah->caps;
2422 if (AR_SREV_9271(ah)) {
2423 pCap->num_gpio_pins = AR9271_NUM_GPIO;
2424 pCap->gpio_mask = AR9271_GPIO_MASK;
2425 } else if (AR_DEVID_7010(ah)) {
2426 pCap->num_gpio_pins = AR7010_NUM_GPIO;
2427 pCap->gpio_mask = AR7010_GPIO_MASK;
2428 } else if (AR_SREV_9287(ah)) {
2429 pCap->num_gpio_pins = AR9287_NUM_GPIO;
2430 pCap->gpio_mask = AR9287_GPIO_MASK;
2431 } else if (AR_SREV_9285(ah)) {
2432 pCap->num_gpio_pins = AR9285_NUM_GPIO;
2433 pCap->gpio_mask = AR9285_GPIO_MASK;
2434 } else if (AR_SREV_9280(ah)) {
2435 pCap->num_gpio_pins = AR9280_NUM_GPIO;
2436 pCap->gpio_mask = AR9280_GPIO_MASK;
2437 } else if (AR_SREV_9300(ah)) {
2438 pCap->num_gpio_pins = AR9300_NUM_GPIO;
2439 pCap->gpio_mask = AR9300_GPIO_MASK;
2440 } else if (AR_SREV_9330(ah)) {
2441 pCap->num_gpio_pins = AR9330_NUM_GPIO;
2442 pCap->gpio_mask = AR9330_GPIO_MASK;
2443 } else if (AR_SREV_9340(ah)) {
2444 pCap->num_gpio_pins = AR9340_NUM_GPIO;
2445 pCap->gpio_mask = AR9340_GPIO_MASK;
2446 } else if (AR_SREV_9462(ah)) {
2447 pCap->num_gpio_pins = AR9462_NUM_GPIO;
2448 pCap->gpio_mask = AR9462_GPIO_MASK;
2449 } else if (AR_SREV_9485(ah)) {
2450 pCap->num_gpio_pins = AR9485_NUM_GPIO;
2451 pCap->gpio_mask = AR9485_GPIO_MASK;
2452 } else if (AR_SREV_9531(ah)) {
2453 pCap->num_gpio_pins = AR9531_NUM_GPIO;
2454 pCap->gpio_mask = AR9531_GPIO_MASK;
2455 } else if (AR_SREV_9550(ah)) {
2456 pCap->num_gpio_pins = AR9550_NUM_GPIO;
2457 pCap->gpio_mask = AR9550_GPIO_MASK;
2458 } else if (AR_SREV_9561(ah)) {
2459 pCap->num_gpio_pins = AR9561_NUM_GPIO;
2460 pCap->gpio_mask = AR9561_GPIO_MASK;
2461 } else if (AR_SREV_9565(ah)) {
2462 pCap->num_gpio_pins = AR9565_NUM_GPIO;
2463 pCap->gpio_mask = AR9565_GPIO_MASK;
2464 } else if (AR_SREV_9580(ah)) {
2465 pCap->num_gpio_pins = AR9580_NUM_GPIO;
2466 pCap->gpio_mask = AR9580_GPIO_MASK;
2467 } else {
2468 pCap->num_gpio_pins = AR_NUM_GPIO;
2469 pCap->gpio_mask = AR_GPIO_MASK;
2473 int ath9k_hw_fill_cap_info(struct ath_hw *ah)
2475 struct ath9k_hw_capabilities *pCap = &ah->caps;
2476 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2477 struct ath_common *common = ath9k_hw_common(ah);
2479 u16 eeval;
2480 u8 ant_div_ctl1, tx_chainmask, rx_chainmask;
2482 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
2483 regulatory->current_rd = eeval;
2485 if (ah->opmode != NL80211_IFTYPE_AP &&
2486 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
2487 if (regulatory->current_rd == 0x64 ||
2488 regulatory->current_rd == 0x65)
2489 regulatory->current_rd += 5;
2490 else if (regulatory->current_rd == 0x41)
2491 regulatory->current_rd = 0x43;
2492 ath_dbg(common, REGULATORY, "regdomain mapped to 0x%x\n",
2493 regulatory->current_rd);
2496 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
2498 if (eeval & AR5416_OPFLAGS_11A) {
2499 if (ah->disable_5ghz)
2500 ath_warn(common, "disabling 5GHz band\n");
2501 else
2502 pCap->hw_caps |= ATH9K_HW_CAP_5GHZ;
2505 if (eeval & AR5416_OPFLAGS_11G) {
2506 if (ah->disable_2ghz)
2507 ath_warn(common, "disabling 2GHz band\n");
2508 else
2509 pCap->hw_caps |= ATH9K_HW_CAP_2GHZ;
2512 if ((pCap->hw_caps & (ATH9K_HW_CAP_2GHZ | ATH9K_HW_CAP_5GHZ)) == 0) {
2513 ath_err(common, "both bands are disabled\n");
2514 return -EINVAL;
2517 ath9k_gpio_cap_init(ah);
2519 if (AR_SREV_9485(ah) ||
2520 AR_SREV_9285(ah) ||
2521 AR_SREV_9330(ah) ||
2522 AR_SREV_9565(ah))
2523 pCap->chip_chainmask = 1;
2524 else if (!AR_SREV_9280_20_OR_LATER(ah))
2525 pCap->chip_chainmask = 7;
2526 else if (!AR_SREV_9300_20_OR_LATER(ah) ||
2527 AR_SREV_9340(ah) ||
2528 AR_SREV_9462(ah) ||
2529 AR_SREV_9531(ah))
2530 pCap->chip_chainmask = 3;
2531 else
2532 pCap->chip_chainmask = 7;
2534 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
2536 * For AR9271 we will temporarilly uses the rx chainmax as read from
2537 * the EEPROM.
2539 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
2540 !(eeval & AR5416_OPFLAGS_11A) &&
2541 !(AR_SREV_9271(ah)))
2542 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
2543 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
2544 else if (AR_SREV_9100(ah))
2545 pCap->rx_chainmask = 0x7;
2546 else
2547 /* Use rx_chainmask from EEPROM. */
2548 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
2550 pCap->tx_chainmask = fixup_chainmask(pCap->chip_chainmask, pCap->tx_chainmask);
2551 pCap->rx_chainmask = fixup_chainmask(pCap->chip_chainmask, pCap->rx_chainmask);
2552 ah->txchainmask = pCap->tx_chainmask;
2553 ah->rxchainmask = pCap->rx_chainmask;
2555 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
2557 /* enable key search for every frame in an aggregate */
2558 if (AR_SREV_9300_20_OR_LATER(ah))
2559 ah->misc_mode |= AR_PCU_ALWAYS_PERFORM_KEYSEARCH;
2561 common->crypt_caps |= ATH_CRYPT_CAP_CIPHER_AESCCM;
2563 if (ah->hw_version.devid != AR2427_DEVID_PCIE)
2564 pCap->hw_caps |= ATH9K_HW_CAP_HT;
2565 else
2566 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
2568 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah))
2569 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
2570 else
2571 pCap->rts_aggr_limit = (8 * 1024);
2573 #ifdef CONFIG_ATH9K_RFKILL
2574 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
2575 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
2576 ah->rfkill_gpio =
2577 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
2578 ah->rfkill_polarity =
2579 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
2581 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
2583 #endif
2584 if (AR_SREV_9271(ah) || AR_SREV_9300_20_OR_LATER(ah))
2585 pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
2586 else
2587 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
2589 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
2590 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
2591 else
2592 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
2594 if (AR_SREV_9300_20_OR_LATER(ah)) {
2595 pCap->hw_caps |= ATH9K_HW_CAP_EDMA | ATH9K_HW_CAP_FASTCLOCK;
2596 if (!AR_SREV_9330(ah) && !AR_SREV_9485(ah) &&
2597 !AR_SREV_9561(ah) && !AR_SREV_9565(ah))
2598 pCap->hw_caps |= ATH9K_HW_CAP_LDPC;
2600 pCap->rx_hp_qdepth = ATH9K_HW_RX_HP_QDEPTH;
2601 pCap->rx_lp_qdepth = ATH9K_HW_RX_LP_QDEPTH;
2602 pCap->rx_status_len = sizeof(struct ar9003_rxs);
2603 pCap->tx_desc_len = sizeof(struct ar9003_txc);
2604 pCap->txs_len = sizeof(struct ar9003_txs);
2605 } else {
2606 pCap->tx_desc_len = sizeof(struct ath_desc);
2607 if (AR_SREV_9280_20(ah))
2608 pCap->hw_caps |= ATH9K_HW_CAP_FASTCLOCK;
2611 if (AR_SREV_9300_20_OR_LATER(ah))
2612 pCap->hw_caps |= ATH9K_HW_CAP_RAC_SUPPORTED;
2614 if (AR_SREV_9561(ah))
2615 ah->ent_mode = 0x3BDA000;
2616 else if (AR_SREV_9300_20_OR_LATER(ah))
2617 ah->ent_mode = REG_READ(ah, AR_ENT_OTP);
2619 if (AR_SREV_9287_11_OR_LATER(ah) || AR_SREV_9271(ah))
2620 pCap->hw_caps |= ATH9K_HW_CAP_SGI_20;
2622 if (AR_SREV_9285(ah)) {
2623 if (ah->eep_ops->get_eeprom(ah, EEP_MODAL_VER) >= 3) {
2624 ant_div_ctl1 =
2625 ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2626 if ((ant_div_ctl1 & 0x1) && ((ant_div_ctl1 >> 3) & 0x1)) {
2627 pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2628 ath_info(common, "Enable LNA combining\n");
2633 if (AR_SREV_9300_20_OR_LATER(ah)) {
2634 if (ah->eep_ops->get_eeprom(ah, EEP_CHAIN_MASK_REDUCE))
2635 pCap->hw_caps |= ATH9K_HW_CAP_APM;
2638 if (AR_SREV_9330(ah) || AR_SREV_9485(ah) || AR_SREV_9565(ah)) {
2639 ant_div_ctl1 = ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2640 if ((ant_div_ctl1 >> 0x6) == 0x3) {
2641 pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2642 ath_info(common, "Enable LNA combining\n");
2646 if (ath9k_hw_dfs_tested(ah))
2647 pCap->hw_caps |= ATH9K_HW_CAP_DFS;
2649 tx_chainmask = pCap->tx_chainmask;
2650 rx_chainmask = pCap->rx_chainmask;
2651 while (tx_chainmask || rx_chainmask) {
2652 if (tx_chainmask & BIT(0))
2653 pCap->max_txchains++;
2654 if (rx_chainmask & BIT(0))
2655 pCap->max_rxchains++;
2657 tx_chainmask >>= 1;
2658 rx_chainmask >>= 1;
2661 if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
2662 if (!(ah->ent_mode & AR_ENT_OTP_49GHZ_DISABLE))
2663 pCap->hw_caps |= ATH9K_HW_CAP_MCI;
2665 if (AR_SREV_9462_20_OR_LATER(ah))
2666 pCap->hw_caps |= ATH9K_HW_CAP_RTT;
2669 if (AR_SREV_9300_20_OR_LATER(ah) &&
2670 ah->eep_ops->get_eeprom(ah, EEP_PAPRD))
2671 pCap->hw_caps |= ATH9K_HW_CAP_PAPRD;
2673 #ifdef CONFIG_ATH9K_WOW
2674 if (AR_SREV_9462_20_OR_LATER(ah) || AR_SREV_9565_11_OR_LATER(ah))
2675 ah->wow.max_patterns = MAX_NUM_PATTERN;
2676 else
2677 ah->wow.max_patterns = MAX_NUM_PATTERN_LEGACY;
2678 #endif
2680 return 0;
2683 /****************************/
2684 /* GPIO / RFKILL / Antennae */
2685 /****************************/
2687 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah, u32 gpio, u32 type)
2689 int addr;
2690 u32 gpio_shift, tmp;
2692 if (gpio > 11)
2693 addr = AR_GPIO_OUTPUT_MUX3;
2694 else if (gpio > 5)
2695 addr = AR_GPIO_OUTPUT_MUX2;
2696 else
2697 addr = AR_GPIO_OUTPUT_MUX1;
2699 gpio_shift = (gpio % 6) * 5;
2701 if (AR_SREV_9280_20_OR_LATER(ah) ||
2702 (addr != AR_GPIO_OUTPUT_MUX1)) {
2703 REG_RMW(ah, addr, (type << gpio_shift),
2704 (0x1f << gpio_shift));
2705 } else {
2706 tmp = REG_READ(ah, addr);
2707 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
2708 tmp &= ~(0x1f << gpio_shift);
2709 tmp |= (type << gpio_shift);
2710 REG_WRITE(ah, addr, tmp);
2714 /* BSP should set the corresponding MUX register correctly.
2716 static void ath9k_hw_gpio_cfg_soc(struct ath_hw *ah, u32 gpio, bool out,
2717 const char *label)
2719 if (ah->caps.gpio_requested & BIT(gpio))
2720 return;
2722 /* may be requested by BSP, free anyway */
2723 gpio_free(gpio);
2725 if (gpio_request_one(gpio, out ? GPIOF_OUT_INIT_LOW : GPIOF_IN, label))
2726 return;
2728 ah->caps.gpio_requested |= BIT(gpio);
2731 static void ath9k_hw_gpio_cfg_wmac(struct ath_hw *ah, u32 gpio, bool out,
2732 u32 ah_signal_type)
2734 u32 gpio_set, gpio_shift = gpio;
2736 if (AR_DEVID_7010(ah)) {
2737 gpio_set = out ?
2738 AR7010_GPIO_OE_AS_OUTPUT : AR7010_GPIO_OE_AS_INPUT;
2739 REG_RMW(ah, AR7010_GPIO_OE, gpio_set << gpio_shift,
2740 AR7010_GPIO_OE_MASK << gpio_shift);
2741 } else if (AR_SREV_SOC(ah)) {
2742 gpio_set = out ? 1 : 0;
2743 REG_RMW(ah, AR_GPIO_OE_OUT, gpio_set << gpio_shift,
2744 gpio_set << gpio_shift);
2745 } else {
2746 gpio_shift = gpio << 1;
2747 gpio_set = out ?
2748 AR_GPIO_OE_OUT_DRV_ALL : AR_GPIO_OE_OUT_DRV_NO;
2749 REG_RMW(ah, AR_GPIO_OE_OUT, gpio_set << gpio_shift,
2750 AR_GPIO_OE_OUT_DRV << gpio_shift);
2752 if (out)
2753 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
2757 static void ath9k_hw_gpio_request(struct ath_hw *ah, u32 gpio, bool out,
2758 const char *label, u32 ah_signal_type)
2760 WARN_ON(gpio >= ah->caps.num_gpio_pins);
2762 if (BIT(gpio) & ah->caps.gpio_mask)
2763 ath9k_hw_gpio_cfg_wmac(ah, gpio, out, ah_signal_type);
2764 else if (AR_SREV_SOC(ah))
2765 ath9k_hw_gpio_cfg_soc(ah, gpio, out, label);
2766 else
2767 WARN_ON(1);
2770 void ath9k_hw_gpio_request_in(struct ath_hw *ah, u32 gpio, const char *label)
2772 ath9k_hw_gpio_request(ah, gpio, false, label, 0);
2774 EXPORT_SYMBOL(ath9k_hw_gpio_request_in);
2776 void ath9k_hw_gpio_request_out(struct ath_hw *ah, u32 gpio, const char *label,
2777 u32 ah_signal_type)
2779 ath9k_hw_gpio_request(ah, gpio, true, label, ah_signal_type);
2781 EXPORT_SYMBOL(ath9k_hw_gpio_request_out);
2783 void ath9k_hw_gpio_free(struct ath_hw *ah, u32 gpio)
2785 if (!AR_SREV_SOC(ah))
2786 return;
2788 WARN_ON(gpio >= ah->caps.num_gpio_pins);
2790 if (ah->caps.gpio_requested & BIT(gpio)) {
2791 gpio_free(gpio);
2792 ah->caps.gpio_requested &= ~BIT(gpio);
2795 EXPORT_SYMBOL(ath9k_hw_gpio_free);
2797 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
2799 u32 val = 0xffffffff;
2801 #define MS_REG_READ(x, y) \
2802 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & BIT(y))
2804 WARN_ON(gpio >= ah->caps.num_gpio_pins);
2806 if (BIT(gpio) & ah->caps.gpio_mask) {
2807 if (AR_SREV_9271(ah))
2808 val = MS_REG_READ(AR9271, gpio);
2809 else if (AR_SREV_9287(ah))
2810 val = MS_REG_READ(AR9287, gpio);
2811 else if (AR_SREV_9285(ah))
2812 val = MS_REG_READ(AR9285, gpio);
2813 else if (AR_SREV_9280(ah))
2814 val = MS_REG_READ(AR928X, gpio);
2815 else if (AR_DEVID_7010(ah))
2816 val = REG_READ(ah, AR7010_GPIO_IN) & BIT(gpio);
2817 else if (AR_SREV_9300_20_OR_LATER(ah))
2818 val = REG_READ(ah, AR_GPIO_IN) & BIT(gpio);
2819 else
2820 val = MS_REG_READ(AR, gpio);
2821 } else if (BIT(gpio) & ah->caps.gpio_requested) {
2822 val = gpio_get_value(gpio) & BIT(gpio);
2823 } else {
2824 WARN_ON(1);
2827 return !!val;
2829 EXPORT_SYMBOL(ath9k_hw_gpio_get);
2831 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
2833 WARN_ON(gpio >= ah->caps.num_gpio_pins);
2835 if (AR_DEVID_7010(ah) || AR_SREV_9271(ah))
2836 val = !val;
2837 else
2838 val = !!val;
2840 if (BIT(gpio) & ah->caps.gpio_mask) {
2841 u32 out_addr = AR_DEVID_7010(ah) ?
2842 AR7010_GPIO_OUT : AR_GPIO_IN_OUT;
2844 REG_RMW(ah, out_addr, val << gpio, BIT(gpio));
2845 } else if (BIT(gpio) & ah->caps.gpio_requested) {
2846 gpio_set_value(gpio, val);
2847 } else {
2848 WARN_ON(1);
2851 EXPORT_SYMBOL(ath9k_hw_set_gpio);
2853 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
2855 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
2857 EXPORT_SYMBOL(ath9k_hw_setantenna);
2859 /*********************/
2860 /* General Operation */
2861 /*********************/
2863 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
2865 u32 bits = REG_READ(ah, AR_RX_FILTER);
2866 u32 phybits = REG_READ(ah, AR_PHY_ERR);
2868 if (phybits & AR_PHY_ERR_RADAR)
2869 bits |= ATH9K_RX_FILTER_PHYRADAR;
2870 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
2871 bits |= ATH9K_RX_FILTER_PHYERR;
2873 return bits;
2875 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
2877 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
2879 u32 phybits;
2881 ENABLE_REGWRITE_BUFFER(ah);
2883 REG_WRITE(ah, AR_RX_FILTER, bits);
2885 phybits = 0;
2886 if (bits & ATH9K_RX_FILTER_PHYRADAR)
2887 phybits |= AR_PHY_ERR_RADAR;
2888 if (bits & ATH9K_RX_FILTER_PHYERR)
2889 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
2890 REG_WRITE(ah, AR_PHY_ERR, phybits);
2892 if (phybits)
2893 REG_SET_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2894 else
2895 REG_CLR_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2897 REGWRITE_BUFFER_FLUSH(ah);
2899 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
2901 bool ath9k_hw_phy_disable(struct ath_hw *ah)
2903 if (ath9k_hw_mci_is_enabled(ah))
2904 ar9003_mci_bt_gain_ctrl(ah);
2906 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
2907 return false;
2909 ath9k_hw_init_pll(ah, NULL);
2910 ah->htc_reset_init = true;
2911 return true;
2913 EXPORT_SYMBOL(ath9k_hw_phy_disable);
2915 bool ath9k_hw_disable(struct ath_hw *ah)
2917 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2918 return false;
2920 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
2921 return false;
2923 ath9k_hw_init_pll(ah, NULL);
2924 return true;
2926 EXPORT_SYMBOL(ath9k_hw_disable);
2928 static int get_antenna_gain(struct ath_hw *ah, struct ath9k_channel *chan)
2930 enum eeprom_param gain_param;
2932 if (IS_CHAN_2GHZ(chan))
2933 gain_param = EEP_ANTENNA_GAIN_2G;
2934 else
2935 gain_param = EEP_ANTENNA_GAIN_5G;
2937 return ah->eep_ops->get_eeprom(ah, gain_param);
2940 void ath9k_hw_apply_txpower(struct ath_hw *ah, struct ath9k_channel *chan,
2941 bool test)
2943 struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2944 struct ieee80211_channel *channel;
2945 int chan_pwr, new_pwr;
2946 u16 ctl = NO_CTL;
2948 if (!chan)
2949 return;
2951 if (!test)
2952 ctl = ath9k_regd_get_ctl(reg, chan);
2954 channel = chan->chan;
2955 chan_pwr = min_t(int, channel->max_power * 2, MAX_RATE_POWER);
2956 new_pwr = min_t(int, chan_pwr, reg->power_limit);
2958 ah->eep_ops->set_txpower(ah, chan, ctl,
2959 get_antenna_gain(ah, chan), new_pwr, test);
2962 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit, bool test)
2964 struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2965 struct ath9k_channel *chan = ah->curchan;
2966 struct ieee80211_channel *channel = chan->chan;
2968 reg->power_limit = min_t(u32, limit, MAX_RATE_POWER);
2969 if (test)
2970 channel->max_power = MAX_RATE_POWER / 2;
2972 ath9k_hw_apply_txpower(ah, chan, test);
2974 if (test)
2975 channel->max_power = DIV_ROUND_UP(reg->max_power_level, 2);
2977 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
2979 void ath9k_hw_setopmode(struct ath_hw *ah)
2981 ath9k_hw_set_operating_mode(ah, ah->opmode);
2983 EXPORT_SYMBOL(ath9k_hw_setopmode);
2985 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
2987 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
2988 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
2990 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
2992 void ath9k_hw_write_associd(struct ath_hw *ah)
2994 struct ath_common *common = ath9k_hw_common(ah);
2996 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
2997 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
2998 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
3000 EXPORT_SYMBOL(ath9k_hw_write_associd);
3002 #define ATH9K_MAX_TSF_READ 10
3004 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
3006 u32 tsf_lower, tsf_upper1, tsf_upper2;
3007 int i;
3009 tsf_upper1 = REG_READ(ah, AR_TSF_U32);
3010 for (i = 0; i < ATH9K_MAX_TSF_READ; i++) {
3011 tsf_lower = REG_READ(ah, AR_TSF_L32);
3012 tsf_upper2 = REG_READ(ah, AR_TSF_U32);
3013 if (tsf_upper2 == tsf_upper1)
3014 break;
3015 tsf_upper1 = tsf_upper2;
3018 WARN_ON( i == ATH9K_MAX_TSF_READ );
3020 return (((u64)tsf_upper1 << 32) | tsf_lower);
3022 EXPORT_SYMBOL(ath9k_hw_gettsf64);
3024 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
3026 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
3027 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
3029 EXPORT_SYMBOL(ath9k_hw_settsf64);
3031 void ath9k_hw_reset_tsf(struct ath_hw *ah)
3033 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
3034 AH_TSF_WRITE_TIMEOUT))
3035 ath_dbg(ath9k_hw_common(ah), RESET,
3036 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
3038 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
3040 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
3042 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, bool set)
3044 if (set)
3045 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
3046 else
3047 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
3049 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
3051 void ath9k_hw_set11nmac2040(struct ath_hw *ah, struct ath9k_channel *chan)
3053 u32 macmode;
3055 if (IS_CHAN_HT40(chan) && !ah->config.cwm_ignore_extcca)
3056 macmode = AR_2040_JOINED_RX_CLEAR;
3057 else
3058 macmode = 0;
3060 REG_WRITE(ah, AR_2040_MODE, macmode);
3063 /* HW Generic timers configuration */
3065 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
3067 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3068 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3069 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3070 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3071 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3072 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3073 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3074 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
3075 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
3076 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
3077 AR_NDP2_TIMER_MODE, 0x0002},
3078 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
3079 AR_NDP2_TIMER_MODE, 0x0004},
3080 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
3081 AR_NDP2_TIMER_MODE, 0x0008},
3082 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
3083 AR_NDP2_TIMER_MODE, 0x0010},
3084 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
3085 AR_NDP2_TIMER_MODE, 0x0020},
3086 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
3087 AR_NDP2_TIMER_MODE, 0x0040},
3088 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
3089 AR_NDP2_TIMER_MODE, 0x0080}
3092 /* HW generic timer primitives */
3094 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
3096 return REG_READ(ah, AR_TSF_L32);
3098 EXPORT_SYMBOL(ath9k_hw_gettsf32);
3100 void ath9k_hw_gen_timer_start_tsf2(struct ath_hw *ah)
3102 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3104 if (timer_table->tsf2_enabled) {
3105 REG_SET_BIT(ah, AR_DIRECT_CONNECT, AR_DC_AP_STA_EN);
3106 REG_SET_BIT(ah, AR_RESET_TSF, AR_RESET_TSF2_ONCE);
3110 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
3111 void (*trigger)(void *),
3112 void (*overflow)(void *),
3113 void *arg,
3114 u8 timer_index)
3116 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3117 struct ath_gen_timer *timer;
3119 if ((timer_index < AR_FIRST_NDP_TIMER) ||
3120 (timer_index >= ATH_MAX_GEN_TIMER))
3121 return NULL;
3123 if ((timer_index > AR_FIRST_NDP_TIMER) &&
3124 !AR_SREV_9300_20_OR_LATER(ah))
3125 return NULL;
3127 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
3128 if (timer == NULL)
3129 return NULL;
3131 /* allocate a hardware generic timer slot */
3132 timer_table->timers[timer_index] = timer;
3133 timer->index = timer_index;
3134 timer->trigger = trigger;
3135 timer->overflow = overflow;
3136 timer->arg = arg;
3138 if ((timer_index > AR_FIRST_NDP_TIMER) && !timer_table->tsf2_enabled) {
3139 timer_table->tsf2_enabled = true;
3140 ath9k_hw_gen_timer_start_tsf2(ah);
3143 return timer;
3145 EXPORT_SYMBOL(ath_gen_timer_alloc);
3147 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
3148 struct ath_gen_timer *timer,
3149 u32 timer_next,
3150 u32 timer_period)
3152 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3153 u32 mask = 0;
3155 timer_table->timer_mask |= BIT(timer->index);
3158 * Program generic timer registers
3160 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
3161 timer_next);
3162 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
3163 timer_period);
3164 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3165 gen_tmr_configuration[timer->index].mode_mask);
3167 if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
3169 * Starting from AR9462, each generic timer can select which tsf
3170 * to use. But we still follow the old rule, 0 - 7 use tsf and
3171 * 8 - 15 use tsf2.
3173 if ((timer->index < AR_GEN_TIMER_BANK_1_LEN))
3174 REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3175 (1 << timer->index));
3176 else
3177 REG_SET_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3178 (1 << timer->index));
3181 if (timer->trigger)
3182 mask |= SM(AR_GENTMR_BIT(timer->index),
3183 AR_IMR_S5_GENTIMER_TRIG);
3184 if (timer->overflow)
3185 mask |= SM(AR_GENTMR_BIT(timer->index),
3186 AR_IMR_S5_GENTIMER_THRESH);
3188 REG_SET_BIT(ah, AR_IMR_S5, mask);
3190 if ((ah->imask & ATH9K_INT_GENTIMER) == 0) {
3191 ah->imask |= ATH9K_INT_GENTIMER;
3192 ath9k_hw_set_interrupts(ah);
3195 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
3197 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
3199 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3201 /* Clear generic timer enable bits. */
3202 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3203 gen_tmr_configuration[timer->index].mode_mask);
3205 if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
3207 * Need to switch back to TSF if it was using TSF2.
3209 if ((timer->index >= AR_GEN_TIMER_BANK_1_LEN)) {
3210 REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3211 (1 << timer->index));
3215 /* Disable both trigger and thresh interrupt masks */
3216 REG_CLR_BIT(ah, AR_IMR_S5,
3217 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3218 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3220 timer_table->timer_mask &= ~BIT(timer->index);
3222 if (timer_table->timer_mask == 0) {
3223 ah->imask &= ~ATH9K_INT_GENTIMER;
3224 ath9k_hw_set_interrupts(ah);
3227 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
3229 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
3231 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3233 /* free the hardware generic timer slot */
3234 timer_table->timers[timer->index] = NULL;
3235 kfree(timer);
3237 EXPORT_SYMBOL(ath_gen_timer_free);
3240 * Generic Timer Interrupts handling
3242 void ath_gen_timer_isr(struct ath_hw *ah)
3244 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3245 struct ath_gen_timer *timer;
3246 unsigned long trigger_mask, thresh_mask;
3247 unsigned int index;
3249 /* get hardware generic timer interrupt status */
3250 trigger_mask = ah->intr_gen_timer_trigger;
3251 thresh_mask = ah->intr_gen_timer_thresh;
3252 trigger_mask &= timer_table->timer_mask;
3253 thresh_mask &= timer_table->timer_mask;
3255 for_each_set_bit(index, &thresh_mask, ARRAY_SIZE(timer_table->timers)) {
3256 timer = timer_table->timers[index];
3257 if (!timer)
3258 continue;
3259 if (!timer->overflow)
3260 continue;
3262 trigger_mask &= ~BIT(index);
3263 timer->overflow(timer->arg);
3266 for_each_set_bit(index, &trigger_mask, ARRAY_SIZE(timer_table->timers)) {
3267 timer = timer_table->timers[index];
3268 if (!timer)
3269 continue;
3270 if (!timer->trigger)
3271 continue;
3272 timer->trigger(timer->arg);
3275 EXPORT_SYMBOL(ath_gen_timer_isr);
3277 /********/
3278 /* HTC */
3279 /********/
3281 static struct {
3282 u32 version;
3283 const char * name;
3284 } ath_mac_bb_names[] = {
3285 /* Devices with external radios */
3286 { AR_SREV_VERSION_5416_PCI, "5416" },
3287 { AR_SREV_VERSION_5416_PCIE, "5418" },
3288 { AR_SREV_VERSION_9100, "9100" },
3289 { AR_SREV_VERSION_9160, "9160" },
3290 /* Single-chip solutions */
3291 { AR_SREV_VERSION_9280, "9280" },
3292 { AR_SREV_VERSION_9285, "9285" },
3293 { AR_SREV_VERSION_9287, "9287" },
3294 { AR_SREV_VERSION_9271, "9271" },
3295 { AR_SREV_VERSION_9300, "9300" },
3296 { AR_SREV_VERSION_9330, "9330" },
3297 { AR_SREV_VERSION_9340, "9340" },
3298 { AR_SREV_VERSION_9485, "9485" },
3299 { AR_SREV_VERSION_9462, "9462" },
3300 { AR_SREV_VERSION_9550, "9550" },
3301 { AR_SREV_VERSION_9565, "9565" },
3302 { AR_SREV_VERSION_9531, "9531" },
3303 { AR_SREV_VERSION_9561, "9561" },
3306 /* For devices with external radios */
3307 static struct {
3308 u16 version;
3309 const char * name;
3310 } ath_rf_names[] = {
3311 { 0, "5133" },
3312 { AR_RAD5133_SREV_MAJOR, "5133" },
3313 { AR_RAD5122_SREV_MAJOR, "5122" },
3314 { AR_RAD2133_SREV_MAJOR, "2133" },
3315 { AR_RAD2122_SREV_MAJOR, "2122" }
3319 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
3321 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
3323 int i;
3325 for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
3326 if (ath_mac_bb_names[i].version == mac_bb_version) {
3327 return ath_mac_bb_names[i].name;
3331 return "????";
3335 * Return the RF name. "????" is returned if the RF is unknown.
3336 * Used for devices with external radios.
3338 static const char *ath9k_hw_rf_name(u16 rf_version)
3340 int i;
3342 for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
3343 if (ath_rf_names[i].version == rf_version) {
3344 return ath_rf_names[i].name;
3348 return "????";
3351 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
3353 int used;
3355 /* chipsets >= AR9280 are single-chip */
3356 if (AR_SREV_9280_20_OR_LATER(ah)) {
3357 used = scnprintf(hw_name, len,
3358 "Atheros AR%s Rev:%x",
3359 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3360 ah->hw_version.macRev);
3362 else {
3363 used = scnprintf(hw_name, len,
3364 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
3365 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3366 ah->hw_version.macRev,
3367 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev
3368 & AR_RADIO_SREV_MAJOR)),
3369 ah->hw_version.phyRev);
3372 hw_name[used] = '\0';
3374 EXPORT_SYMBOL(ath9k_hw_name);