treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / drivers / net / wireless / ath / ath9k / ar9003_mac.c
blobe1fe7a7c3ad8f2ea3ad5040bc767da6cc9ade1c2
1 /*
2 * Copyright (c) 2010-2011 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 #include <linux/export.h>
17 #include "hw.h"
18 #include "ar9003_mac.h"
19 #include "ar9003_mci.h"
21 static void ar9003_hw_rx_enable(struct ath_hw *hw)
23 REG_WRITE(hw, AR_CR, 0);
26 static void
27 ar9003_set_txdesc(struct ath_hw *ah, void *ds, struct ath_tx_info *i)
29 struct ar9003_txc *ads = ds;
30 int checksum = 0;
31 u32 val, ctl12, ctl17;
32 u8 desc_len;
34 desc_len = ((AR_SREV_9462(ah) || AR_SREV_9565(ah)) ? 0x18 : 0x17);
36 val = (ATHEROS_VENDOR_ID << AR_DescId_S) |
37 (1 << AR_TxRxDesc_S) |
38 (1 << AR_CtrlStat_S) |
39 (i->qcu << AR_TxQcuNum_S) | desc_len;
41 checksum += val;
42 WRITE_ONCE(ads->info, val);
44 checksum += i->link;
45 WRITE_ONCE(ads->link, i->link);
47 checksum += i->buf_addr[0];
48 WRITE_ONCE(ads->data0, i->buf_addr[0]);
49 checksum += i->buf_addr[1];
50 WRITE_ONCE(ads->data1, i->buf_addr[1]);
51 checksum += i->buf_addr[2];
52 WRITE_ONCE(ads->data2, i->buf_addr[2]);
53 checksum += i->buf_addr[3];
54 WRITE_ONCE(ads->data3, i->buf_addr[3]);
56 checksum += (val = (i->buf_len[0] << AR_BufLen_S) & AR_BufLen);
57 WRITE_ONCE(ads->ctl3, val);
58 checksum += (val = (i->buf_len[1] << AR_BufLen_S) & AR_BufLen);
59 WRITE_ONCE(ads->ctl5, val);
60 checksum += (val = (i->buf_len[2] << AR_BufLen_S) & AR_BufLen);
61 WRITE_ONCE(ads->ctl7, val);
62 checksum += (val = (i->buf_len[3] << AR_BufLen_S) & AR_BufLen);
63 WRITE_ONCE(ads->ctl9, val);
65 checksum = (u16) (((checksum & 0xffff) + (checksum >> 16)) & 0xffff);
66 WRITE_ONCE(ads->ctl10, checksum);
68 if (i->is_first || i->is_last) {
69 WRITE_ONCE(ads->ctl13, set11nTries(i->rates, 0)
70 | set11nTries(i->rates, 1)
71 | set11nTries(i->rates, 2)
72 | set11nTries(i->rates, 3)
73 | (i->dur_update ? AR_DurUpdateEna : 0)
74 | SM(0, AR_BurstDur));
76 WRITE_ONCE(ads->ctl14, set11nRate(i->rates, 0)
77 | set11nRate(i->rates, 1)
78 | set11nRate(i->rates, 2)
79 | set11nRate(i->rates, 3));
80 } else {
81 WRITE_ONCE(ads->ctl13, 0);
82 WRITE_ONCE(ads->ctl14, 0);
85 ads->ctl20 = 0;
86 ads->ctl21 = 0;
87 ads->ctl22 = 0;
88 ads->ctl23 = 0;
90 ctl17 = SM(i->keytype, AR_EncrType);
91 if (!i->is_first) {
92 WRITE_ONCE(ads->ctl11, 0);
93 WRITE_ONCE(ads->ctl12, i->is_last ? 0 : AR_TxMore);
94 WRITE_ONCE(ads->ctl15, 0);
95 WRITE_ONCE(ads->ctl16, 0);
96 WRITE_ONCE(ads->ctl17, ctl17);
97 WRITE_ONCE(ads->ctl18, 0);
98 WRITE_ONCE(ads->ctl19, 0);
99 return;
102 WRITE_ONCE(ads->ctl11, (i->pkt_len & AR_FrameLen)
103 | (i->flags & ATH9K_TXDESC_VMF ? AR_VirtMoreFrag : 0)
104 | SM(i->txpower[0], AR_XmitPower0)
105 | (i->flags & ATH9K_TXDESC_VEOL ? AR_VEOL : 0)
106 | (i->keyix != ATH9K_TXKEYIX_INVALID ? AR_DestIdxValid : 0)
107 | (i->flags & ATH9K_TXDESC_LOWRXCHAIN ? AR_LowRxChain : 0)
108 | (i->flags & ATH9K_TXDESC_CLRDMASK ? AR_ClrDestMask : 0)
109 | (i->flags & ATH9K_TXDESC_RTSENA ? AR_RTSEnable :
110 (i->flags & ATH9K_TXDESC_CTSENA ? AR_CTSEnable : 0)));
112 ctl12 = (i->keyix != ATH9K_TXKEYIX_INVALID ?
113 SM(i->keyix, AR_DestIdx) : 0)
114 | SM(i->type, AR_FrameType)
115 | (i->flags & ATH9K_TXDESC_NOACK ? AR_NoAck : 0)
116 | (i->flags & ATH9K_TXDESC_EXT_ONLY ? AR_ExtOnly : 0)
117 | (i->flags & ATH9K_TXDESC_EXT_AND_CTL ? AR_ExtAndCtl : 0);
119 ctl17 |= (i->flags & ATH9K_TXDESC_LDPC ? AR_LDPC : 0);
120 switch (i->aggr) {
121 case AGGR_BUF_FIRST:
122 ctl17 |= SM(i->aggr_len, AR_AggrLen);
123 /* fall through */
124 case AGGR_BUF_MIDDLE:
125 ctl12 |= AR_IsAggr | AR_MoreAggr;
126 ctl17 |= SM(i->ndelim, AR_PadDelim);
127 break;
128 case AGGR_BUF_LAST:
129 ctl12 |= AR_IsAggr;
130 break;
131 case AGGR_BUF_NONE:
132 break;
135 val = (i->flags & ATH9K_TXDESC_PAPRD) >> ATH9K_TXDESC_PAPRD_S;
136 ctl12 |= SM(val, AR_PAPRDChainMask);
138 WRITE_ONCE(ads->ctl12, ctl12);
139 WRITE_ONCE(ads->ctl17, ctl17);
141 WRITE_ONCE(ads->ctl15, set11nPktDurRTSCTS(i->rates, 0)
142 | set11nPktDurRTSCTS(i->rates, 1));
144 WRITE_ONCE(ads->ctl16, set11nPktDurRTSCTS(i->rates, 2)
145 | set11nPktDurRTSCTS(i->rates, 3));
147 WRITE_ONCE(ads->ctl18, set11nRateFlags(i->rates, 0)
148 | set11nRateFlags(i->rates, 1)
149 | set11nRateFlags(i->rates, 2)
150 | set11nRateFlags(i->rates, 3)
151 | SM(i->rtscts_rate, AR_RTSCTSRate));
153 WRITE_ONCE(ads->ctl19, AR_Not_Sounding);
155 WRITE_ONCE(ads->ctl20, SM(i->txpower[1], AR_XmitPower1));
156 WRITE_ONCE(ads->ctl21, SM(i->txpower[2], AR_XmitPower2));
157 WRITE_ONCE(ads->ctl22, SM(i->txpower[3], AR_XmitPower3));
160 static u16 ar9003_calc_ptr_chksum(struct ar9003_txc *ads)
162 int checksum;
164 checksum = ads->info + ads->link
165 + ads->data0 + ads->ctl3
166 + ads->data1 + ads->ctl5
167 + ads->data2 + ads->ctl7
168 + ads->data3 + ads->ctl9;
170 return ((checksum & 0xffff) + (checksum >> 16)) & AR_TxPtrChkSum;
173 static void ar9003_hw_set_desc_link(void *ds, u32 ds_link)
175 struct ar9003_txc *ads = ds;
177 ads->link = ds_link;
178 ads->ctl10 &= ~AR_TxPtrChkSum;
179 ads->ctl10 |= ar9003_calc_ptr_chksum(ads);
182 static bool ar9003_hw_get_isr(struct ath_hw *ah, enum ath9k_int *masked,
183 u32 *sync_cause_p)
185 u32 isr = 0;
186 u32 mask2 = 0;
187 struct ath9k_hw_capabilities *pCap = &ah->caps;
188 struct ath_common *common = ath9k_hw_common(ah);
189 u32 sync_cause = 0, async_cause, async_mask = AR_INTR_MAC_IRQ;
190 bool fatal_int;
192 if (ath9k_hw_mci_is_enabled(ah))
193 async_mask |= AR_INTR_ASYNC_MASK_MCI;
195 async_cause = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
197 if (async_cause & async_mask) {
198 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
199 == AR_RTC_STATUS_ON)
200 isr = REG_READ(ah, AR_ISR);
204 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) & AR_INTR_SYNC_DEFAULT;
206 *masked = 0;
208 if (!isr && !sync_cause && !async_cause)
209 return false;
211 if (isr) {
212 if (isr & AR_ISR_BCNMISC) {
213 u32 isr2;
214 isr2 = REG_READ(ah, AR_ISR_S2);
216 mask2 |= ((isr2 & AR_ISR_S2_TIM) >>
217 MAP_ISR_S2_TIM);
218 mask2 |= ((isr2 & AR_ISR_S2_DTIM) >>
219 MAP_ISR_S2_DTIM);
220 mask2 |= ((isr2 & AR_ISR_S2_DTIMSYNC) >>
221 MAP_ISR_S2_DTIMSYNC);
222 mask2 |= ((isr2 & AR_ISR_S2_CABEND) >>
223 MAP_ISR_S2_CABEND);
224 mask2 |= ((isr2 & AR_ISR_S2_GTT) <<
225 MAP_ISR_S2_GTT);
226 mask2 |= ((isr2 & AR_ISR_S2_CST) <<
227 MAP_ISR_S2_CST);
228 mask2 |= ((isr2 & AR_ISR_S2_TSFOOR) >>
229 MAP_ISR_S2_TSFOOR);
230 mask2 |= ((isr2 & AR_ISR_S2_BB_WATCHDOG) >>
231 MAP_ISR_S2_BB_WATCHDOG);
233 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
234 REG_WRITE(ah, AR_ISR_S2, isr2);
235 isr &= ~AR_ISR_BCNMISC;
239 if ((pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED))
240 isr = REG_READ(ah, AR_ISR_RAC);
242 if (isr == 0xffffffff) {
243 *masked = 0;
244 return false;
247 *masked = isr & ATH9K_INT_COMMON;
249 if (ah->config.rx_intr_mitigation)
250 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
251 *masked |= ATH9K_INT_RXLP;
253 if (ah->config.tx_intr_mitigation)
254 if (isr & (AR_ISR_TXMINTR | AR_ISR_TXINTM))
255 *masked |= ATH9K_INT_TX;
257 if (isr & (AR_ISR_LP_RXOK | AR_ISR_RXERR))
258 *masked |= ATH9K_INT_RXLP;
260 if (isr & AR_ISR_HP_RXOK)
261 *masked |= ATH9K_INT_RXHP;
263 if (isr & (AR_ISR_TXOK | AR_ISR_TXERR | AR_ISR_TXEOL)) {
264 *masked |= ATH9K_INT_TX;
266 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
267 u32 s0, s1;
268 s0 = REG_READ(ah, AR_ISR_S0);
269 REG_WRITE(ah, AR_ISR_S0, s0);
270 s1 = REG_READ(ah, AR_ISR_S1);
271 REG_WRITE(ah, AR_ISR_S1, s1);
273 isr &= ~(AR_ISR_TXOK | AR_ISR_TXERR |
274 AR_ISR_TXEOL);
278 if (isr & AR_ISR_GENTMR) {
279 u32 s5;
281 if (pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)
282 s5 = REG_READ(ah, AR_ISR_S5_S);
283 else
284 s5 = REG_READ(ah, AR_ISR_S5);
286 ah->intr_gen_timer_trigger =
287 MS(s5, AR_ISR_S5_GENTIMER_TRIG);
289 ah->intr_gen_timer_thresh =
290 MS(s5, AR_ISR_S5_GENTIMER_THRESH);
292 if (ah->intr_gen_timer_trigger)
293 *masked |= ATH9K_INT_GENTIMER;
295 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
296 REG_WRITE(ah, AR_ISR_S5, s5);
297 isr &= ~AR_ISR_GENTMR;
302 *masked |= mask2;
304 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
305 REG_WRITE(ah, AR_ISR, isr);
307 (void) REG_READ(ah, AR_ISR);
310 if (*masked & ATH9K_INT_BB_WATCHDOG)
311 ar9003_hw_bb_watchdog_read(ah);
314 if (async_cause & AR_INTR_ASYNC_MASK_MCI)
315 ar9003_mci_get_isr(ah, masked);
317 if (sync_cause) {
318 if (sync_cause_p)
319 *sync_cause_p = sync_cause;
320 fatal_int =
321 (sync_cause &
322 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
323 ? true : false;
325 if (fatal_int) {
326 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
327 ath_dbg(common, ANY,
328 "received PCI FATAL interrupt\n");
330 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
331 ath_dbg(common, ANY,
332 "received PCI PERR interrupt\n");
334 *masked |= ATH9K_INT_FATAL;
337 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
338 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
339 REG_WRITE(ah, AR_RC, 0);
340 *masked |= ATH9K_INT_FATAL;
343 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT)
344 ath_dbg(common, INTERRUPT,
345 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
347 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
348 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
351 return true;
354 static int ar9003_hw_proc_txdesc(struct ath_hw *ah, void *ds,
355 struct ath_tx_status *ts)
357 struct ar9003_txs *ads;
358 u32 status;
360 ads = &ah->ts_ring[ah->ts_tail];
362 status = READ_ONCE(ads->status8);
363 if ((status & AR_TxDone) == 0)
364 return -EINPROGRESS;
366 ah->ts_tail = (ah->ts_tail + 1) % ah->ts_size;
368 if ((MS(ads->ds_info, AR_DescId) != ATHEROS_VENDOR_ID) ||
369 (MS(ads->ds_info, AR_TxRxDesc) != 1)) {
370 ath_dbg(ath9k_hw_common(ah), XMIT,
371 "Tx Descriptor error %x\n", ads->ds_info);
372 memset(ads, 0, sizeof(*ads));
373 return -EIO;
376 ts->ts_rateindex = MS(status, AR_FinalTxIdx);
377 ts->ts_seqnum = MS(status, AR_SeqNum);
378 ts->tid = MS(status, AR_TxTid);
380 ts->qid = MS(ads->ds_info, AR_TxQcuNum);
381 ts->desc_id = MS(ads->status1, AR_TxDescId);
382 ts->ts_tstamp = ads->status4;
383 ts->ts_status = 0;
384 ts->ts_flags = 0;
386 if (status & AR_TxOpExceeded)
387 ts->ts_status |= ATH9K_TXERR_XTXOP;
388 status = READ_ONCE(ads->status2);
389 ts->ts_rssi_ctl0 = MS(status, AR_TxRSSIAnt00);
390 ts->ts_rssi_ctl1 = MS(status, AR_TxRSSIAnt01);
391 ts->ts_rssi_ctl2 = MS(status, AR_TxRSSIAnt02);
392 if (status & AR_TxBaStatus) {
393 ts->ts_flags |= ATH9K_TX_BA;
394 ts->ba_low = ads->status5;
395 ts->ba_high = ads->status6;
398 status = READ_ONCE(ads->status3);
399 if (status & AR_ExcessiveRetries)
400 ts->ts_status |= ATH9K_TXERR_XRETRY;
401 if (status & AR_Filtered)
402 ts->ts_status |= ATH9K_TXERR_FILT;
403 if (status & AR_FIFOUnderrun) {
404 ts->ts_status |= ATH9K_TXERR_FIFO;
405 ath9k_hw_updatetxtriglevel(ah, true);
407 if (status & AR_TxTimerExpired)
408 ts->ts_status |= ATH9K_TXERR_TIMER_EXPIRED;
409 if (status & AR_DescCfgErr)
410 ts->ts_flags |= ATH9K_TX_DESC_CFG_ERR;
411 if (status & AR_TxDataUnderrun) {
412 ts->ts_flags |= ATH9K_TX_DATA_UNDERRUN;
413 ath9k_hw_updatetxtriglevel(ah, true);
415 if (status & AR_TxDelimUnderrun) {
416 ts->ts_flags |= ATH9K_TX_DELIM_UNDERRUN;
417 ath9k_hw_updatetxtriglevel(ah, true);
419 ts->ts_shortretry = MS(status, AR_RTSFailCnt);
420 ts->ts_longretry = MS(status, AR_DataFailCnt);
421 ts->ts_virtcol = MS(status, AR_VirtRetryCnt);
423 status = READ_ONCE(ads->status7);
424 ts->ts_rssi = MS(status, AR_TxRSSICombined);
425 ts->ts_rssi_ext0 = MS(status, AR_TxRSSIAnt10);
426 ts->ts_rssi_ext1 = MS(status, AR_TxRSSIAnt11);
427 ts->ts_rssi_ext2 = MS(status, AR_TxRSSIAnt12);
429 memset(ads, 0, sizeof(*ads));
431 return 0;
434 static int ar9003_hw_get_duration(struct ath_hw *ah, const void *ds, int index)
436 const struct ar9003_txc *adc = ds;
438 switch (index) {
439 case 0:
440 return MS(READ_ONCE(adc->ctl15), AR_PacketDur0);
441 case 1:
442 return MS(READ_ONCE(adc->ctl15), AR_PacketDur1);
443 case 2:
444 return MS(READ_ONCE(adc->ctl16), AR_PacketDur2);
445 case 3:
446 return MS(READ_ONCE(adc->ctl16), AR_PacketDur3);
447 default:
448 return 0;
452 void ar9003_hw_attach_mac_ops(struct ath_hw *hw)
454 struct ath_hw_ops *ops = ath9k_hw_ops(hw);
456 ops->rx_enable = ar9003_hw_rx_enable;
457 ops->set_desc_link = ar9003_hw_set_desc_link;
458 ops->get_isr = ar9003_hw_get_isr;
459 ops->set_txdesc = ar9003_set_txdesc;
460 ops->proc_txdesc = ar9003_hw_proc_txdesc;
461 ops->get_duration = ar9003_hw_get_duration;
464 void ath9k_hw_set_rx_bufsize(struct ath_hw *ah, u16 buf_size)
466 REG_WRITE(ah, AR_DATABUF_SIZE, buf_size & AR_DATABUF_SIZE_MASK);
468 EXPORT_SYMBOL(ath9k_hw_set_rx_bufsize);
470 void ath9k_hw_addrxbuf_edma(struct ath_hw *ah, u32 rxdp,
471 enum ath9k_rx_qtype qtype)
473 if (qtype == ATH9K_RX_QUEUE_HP)
474 REG_WRITE(ah, AR_HP_RXDP, rxdp);
475 else
476 REG_WRITE(ah, AR_LP_RXDP, rxdp);
478 EXPORT_SYMBOL(ath9k_hw_addrxbuf_edma);
480 int ath9k_hw_process_rxdesc_edma(struct ath_hw *ah, struct ath_rx_status *rxs,
481 void *buf_addr)
483 struct ar9003_rxs *rxsp = buf_addr;
484 unsigned int phyerr;
486 if ((rxsp->status11 & AR_RxDone) == 0)
487 return -EINPROGRESS;
489 if (MS(rxsp->ds_info, AR_DescId) != 0x168c)
490 return -EINVAL;
492 if ((rxsp->ds_info & (AR_TxRxDesc | AR_CtrlStat)) != 0)
493 return -EINPROGRESS;
495 rxs->rs_status = 0;
496 rxs->rs_flags = 0;
497 rxs->enc_flags = 0;
498 rxs->bw = RATE_INFO_BW_20;
500 rxs->rs_datalen = rxsp->status2 & AR_DataLen;
501 rxs->rs_tstamp = rxsp->status3;
503 /* XXX: Keycache */
504 rxs->rs_rssi = MS(rxsp->status5, AR_RxRSSICombined);
505 rxs->rs_rssi_ctl[0] = MS(rxsp->status1, AR_RxRSSIAnt00);
506 rxs->rs_rssi_ctl[1] = MS(rxsp->status1, AR_RxRSSIAnt01);
507 rxs->rs_rssi_ctl[2] = MS(rxsp->status1, AR_RxRSSIAnt02);
508 rxs->rs_rssi_ext[0] = MS(rxsp->status5, AR_RxRSSIAnt10);
509 rxs->rs_rssi_ext[1] = MS(rxsp->status5, AR_RxRSSIAnt11);
510 rxs->rs_rssi_ext[2] = MS(rxsp->status5, AR_RxRSSIAnt12);
512 if (rxsp->status11 & AR_RxKeyIdxValid)
513 rxs->rs_keyix = MS(rxsp->status11, AR_KeyIdx);
514 else
515 rxs->rs_keyix = ATH9K_RXKEYIX_INVALID;
517 rxs->rs_rate = MS(rxsp->status1, AR_RxRate);
518 rxs->rs_more = (rxsp->status2 & AR_RxMore) ? 1 : 0;
520 rxs->rs_firstaggr = (rxsp->status11 & AR_RxFirstAggr) ? 1 : 0;
521 rxs->rs_isaggr = (rxsp->status11 & AR_RxAggr) ? 1 : 0;
522 rxs->rs_moreaggr = (rxsp->status11 & AR_RxMoreAggr) ? 1 : 0;
523 rxs->rs_antenna = (MS(rxsp->status4, AR_RxAntenna) & 0x7);
524 rxs->enc_flags |= (rxsp->status4 & AR_GI) ? RX_ENC_FLAG_SHORT_GI : 0;
525 rxs->bw = (rxsp->status4 & AR_2040) ? RATE_INFO_BW_40 : RATE_INFO_BW_20;
527 rxs->evm0 = rxsp->status6;
528 rxs->evm1 = rxsp->status7;
529 rxs->evm2 = rxsp->status8;
530 rxs->evm3 = rxsp->status9;
531 rxs->evm4 = (rxsp->status10 & 0xffff);
533 if (rxsp->status11 & AR_PreDelimCRCErr)
534 rxs->rs_flags |= ATH9K_RX_DELIM_CRC_PRE;
536 if (rxsp->status11 & AR_PostDelimCRCErr)
537 rxs->rs_flags |= ATH9K_RX_DELIM_CRC_POST;
539 if (rxsp->status11 & AR_DecryptBusyErr)
540 rxs->rs_flags |= ATH9K_RX_DECRYPT_BUSY;
542 if ((rxsp->status11 & AR_RxFrameOK) == 0) {
544 * AR_CRCErr will bet set to true if we're on the last
545 * subframe and the AR_PostDelimCRCErr is caught.
546 * In a way this also gives us a guarantee that when
547 * (!(AR_CRCErr) && (AR_PostDelimCRCErr)) we cannot
548 * possibly be reviewing the last subframe. AR_CRCErr
549 * is the CRC of the actual data.
551 if (rxsp->status11 & AR_CRCErr)
552 rxs->rs_status |= ATH9K_RXERR_CRC;
553 else if (rxsp->status11 & AR_DecryptCRCErr)
554 rxs->rs_status |= ATH9K_RXERR_DECRYPT;
555 else if (rxsp->status11 & AR_MichaelErr)
556 rxs->rs_status |= ATH9K_RXERR_MIC;
557 if (rxsp->status11 & AR_PHYErr) {
558 phyerr = MS(rxsp->status11, AR_PHYErrCode);
560 * If we reach a point here where AR_PostDelimCRCErr is
561 * true it implies we're *not* on the last subframe. In
562 * in that case that we know already that the CRC of
563 * the frame was OK, and MAC would send an ACK for that
564 * subframe, even if we did get a phy error of type
565 * ATH9K_PHYERR_OFDM_RESTART. This is only applicable
566 * to frame that are prior to the last subframe.
567 * The AR_PostDelimCRCErr is the CRC for the MPDU
568 * delimiter, which contains the 4 reserved bits,
569 * the MPDU length (12 bits), and follows the MPDU
570 * delimiter for an A-MPDU subframe (0x4E = 'N' ASCII).
572 if ((phyerr == ATH9K_PHYERR_OFDM_RESTART) &&
573 (rxsp->status11 & AR_PostDelimCRCErr)) {
574 rxs->rs_phyerr = 0;
575 } else {
576 rxs->rs_status |= ATH9K_RXERR_PHY;
577 rxs->rs_phyerr = phyerr;
582 if (rxsp->status11 & AR_KeyMiss)
583 rxs->rs_status |= ATH9K_RXERR_KEYMISS;
585 return 0;
587 EXPORT_SYMBOL(ath9k_hw_process_rxdesc_edma);
589 void ath9k_hw_reset_txstatus_ring(struct ath_hw *ah)
591 ah->ts_tail = 0;
593 memset((void *) ah->ts_ring, 0,
594 ah->ts_size * sizeof(struct ar9003_txs));
596 ath_dbg(ath9k_hw_common(ah), XMIT,
597 "TS Start 0x%x End 0x%x Virt %p, Size %d\n",
598 ah->ts_paddr_start, ah->ts_paddr_end,
599 ah->ts_ring, ah->ts_size);
601 REG_WRITE(ah, AR_Q_STATUS_RING_START, ah->ts_paddr_start);
602 REG_WRITE(ah, AR_Q_STATUS_RING_END, ah->ts_paddr_end);
605 void ath9k_hw_setup_statusring(struct ath_hw *ah, void *ts_start,
606 u32 ts_paddr_start,
607 u16 size)
610 ah->ts_paddr_start = ts_paddr_start;
611 ah->ts_paddr_end = ts_paddr_start + (size * sizeof(struct ar9003_txs));
612 ah->ts_size = size;
613 ah->ts_ring = ts_start;
615 ath9k_hw_reset_txstatus_ring(ah);
617 EXPORT_SYMBOL(ath9k_hw_setup_statusring);