2 * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com>
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 #define RADAR_SPEC(m, len, el, eh, wl, wh, \
20 w_tolerance, tl, th, t_tolerance, \
21 bl, bh, event_exp, power_jmp) \
29 .w_margin = w_tolerance, \
32 .t_margin = t_tolerance, \
35 .event_expiration = event_exp, \
36 .pwr_jmp = power_jmp \
39 static const struct mt76x02_radar_specs etsi_radar_specs
[] = {
41 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
42 0x7fffffff, 0x155cc0, 0x19cc),
43 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
44 0x7fffffff, 0x155cc0, 0x19cc),
45 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
46 0x7fffffff, 0x155cc0, 0x19dd),
47 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
48 0x7fffffff, 0x2191c0, 0x15cc),
50 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
51 0x7fffffff, 0x155cc0, 0x19cc),
52 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
53 0x7fffffff, 0x155cc0, 0x19cc),
54 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
55 0x7fffffff, 0x155cc0, 0x19dd),
56 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
57 0x7fffffff, 0x2191c0, 0x15cc),
59 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
60 0x7fffffff, 0x155cc0, 0x19cc),
61 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
62 0x7fffffff, 0x155cc0, 0x19cc),
63 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
64 0x7fffffff, 0x155cc0, 0x19dd),
65 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
66 0x7fffffff, 0x2191c0, 0x15cc)
69 static const struct mt76x02_radar_specs fcc_radar_specs
[] = {
71 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
72 0x7fffffff, 0xfe808, 0x13dc),
73 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
74 0x7fffffff, 0xfe808, 0x19dd),
75 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
76 0x7fffffff, 0xfe808, 0x12cc),
77 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
78 0x3938700, 0x57bcf00, 0x1289),
80 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
81 0x7fffffff, 0xfe808, 0x13dc),
82 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
83 0x7fffffff, 0xfe808, 0x19dd),
84 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
85 0x7fffffff, 0xfe808, 0x12cc),
86 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
87 0x3938700, 0x57bcf00, 0x1289),
89 RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0,
90 0x7fffffff, 0xfe808, 0x16cc),
91 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
92 0x7fffffff, 0xfe808, 0x19dd),
93 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
94 0x7fffffff, 0xfe808, 0x12cc),
95 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
96 0x3938700, 0x57bcf00, 0x1289)
99 static const struct mt76x02_radar_specs jp_w56_radar_specs
[] = {
101 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
102 0x7fffffff, 0x14c080, 0x13dc),
103 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
104 0x7fffffff, 0x14c080, 0x19dd),
105 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
106 0x7fffffff, 0x14c080, 0x12cc),
107 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
108 0x3938700, 0X57bcf00, 0x1289),
110 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
111 0x7fffffff, 0x14c080, 0x13dc),
112 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
113 0x7fffffff, 0x14c080, 0x19dd),
114 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
115 0x7fffffff, 0x14c080, 0x12cc),
116 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
117 0x3938700, 0X57bcf00, 0x1289),
119 RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0,
120 0x7fffffff, 0x14c080, 0x16cc),
121 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
122 0x7fffffff, 0x14c080, 0x19dd),
123 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
124 0x7fffffff, 0x14c080, 0x12cc),
125 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
126 0x3938700, 0X57bcf00, 0x1289)
129 static const struct mt76x02_radar_specs jp_w53_radar_specs
[] = {
131 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
132 0x7fffffff, 0x14c080, 0x16cc),
134 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
135 0x7fffffff, 0x14c080, 0x16cc),
138 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
139 0x7fffffff, 0x14c080, 0x16cc),
141 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
142 0x7fffffff, 0x14c080, 0x16cc),
145 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
146 0x7fffffff, 0x14c080, 0x16cc),
148 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
149 0x7fffffff, 0x14c080, 0x16cc),
154 mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev
*dev
, u8 enable
)
158 data
= (1 << 1) | enable
;
159 mt76_wr(dev
, MT_BBP(DFS
, 36), data
);
162 static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev
*dev
,
163 struct mt76x02_dfs_sequence
*seq
)
165 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
167 list_add(&seq
->head
, &dfs_pd
->seq_pool
);
169 dfs_pd
->seq_stats
.seq_pool_len
++;
170 dfs_pd
->seq_stats
.seq_len
--;
173 static struct mt76x02_dfs_sequence
*
174 mt76x02_dfs_seq_pool_get(struct mt76x02_dev
*dev
)
176 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
177 struct mt76x02_dfs_sequence
*seq
;
179 if (list_empty(&dfs_pd
->seq_pool
)) {
180 seq
= devm_kzalloc(dev
->mt76
.dev
, sizeof(*seq
), GFP_ATOMIC
);
182 seq
= list_first_entry(&dfs_pd
->seq_pool
,
183 struct mt76x02_dfs_sequence
,
185 list_del(&seq
->head
);
186 dfs_pd
->seq_stats
.seq_pool_len
--;
189 dfs_pd
->seq_stats
.seq_len
++;
194 static int mt76x02_dfs_get_multiple(int val
, int frac
, int margin
)
196 int remainder
, factor
;
201 if (abs(val
- frac
) <= margin
)
205 remainder
= val
% frac
;
207 if (remainder
> margin
) {
208 if ((frac
- remainder
) <= margin
)
216 static void mt76x02_dfs_detector_reset(struct mt76x02_dev
*dev
)
218 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
219 struct mt76x02_dfs_sequence
*seq
, *tmp_seq
;
222 /* reset hw detector */
223 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
225 /* reset sw detector */
226 for (i
= 0; i
< ARRAY_SIZE(dfs_pd
->event_rb
); i
++) {
227 dfs_pd
->event_rb
[i
].h_rb
= 0;
228 dfs_pd
->event_rb
[i
].t_rb
= 0;
231 list_for_each_entry_safe(seq
, tmp_seq
, &dfs_pd
->sequences
, head
) {
232 list_del_init(&seq
->head
);
233 mt76x02_dfs_seq_pool_put(dev
, seq
);
237 static bool mt76x02_dfs_check_chirp(struct mt76x02_dev
*dev
)
240 u32 current_ts
, delta_ts
;
241 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
243 current_ts
= mt76_rr(dev
, MT_PBF_LIFE_TIMER
);
244 delta_ts
= current_ts
- dfs_pd
->chirp_pulse_ts
;
245 dfs_pd
->chirp_pulse_ts
= current_ts
;
248 if (delta_ts
<= (12 * (1 << 20))) {
249 if (++dfs_pd
->chirp_pulse_cnt
> 8)
252 dfs_pd
->chirp_pulse_cnt
= 1;
258 static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev
*dev
,
259 struct mt76x02_dfs_hw_pulse
*pulse
)
264 data
= (MT_DFS_CH_EN
<< 16) | pulse
->engine
;
265 mt76_wr(dev
, MT_BBP(DFS
, 0), data
);
267 /* reported period */
268 pulse
->period
= mt76_rr(dev
, MT_BBP(DFS
, 19));
271 pulse
->w1
= mt76_rr(dev
, MT_BBP(DFS
, 20));
272 pulse
->w2
= mt76_rr(dev
, MT_BBP(DFS
, 23));
274 /* reported burst number */
275 pulse
->burst
= mt76_rr(dev
, MT_BBP(DFS
, 22));
278 static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev
*dev
,
279 struct mt76x02_dfs_hw_pulse
*pulse
)
283 if (!pulse
->period
|| !pulse
->w1
)
286 switch (dev
->dfs_pd
.region
) {
287 case NL80211_DFS_FCC
:
288 if (pulse
->engine
> 3)
291 if (pulse
->engine
== 3) {
292 ret
= mt76x02_dfs_check_chirp(dev
);
296 /* check short pulse*/
298 ret
= (pulse
->period
>= 2900 &&
299 (pulse
->period
<= 4700 ||
300 pulse
->period
>= 6400) &&
301 (pulse
->period
<= 6800 ||
302 pulse
->period
>= 10200) &&
303 pulse
->period
<= 61600);
304 else if (pulse
->w1
< 130) /* 120 - 130 */
305 ret
= (pulse
->period
>= 2900 &&
306 pulse
->period
<= 61600);
308 ret
= (pulse
->period
>= 3500 &&
309 pulse
->period
<= 10100);
311 case NL80211_DFS_ETSI
:
312 if (pulse
->engine
>= 3)
315 ret
= (pulse
->period
>= 4900 &&
316 (pulse
->period
<= 10200 ||
317 pulse
->period
>= 12400) &&
318 pulse
->period
<= 100100);
321 if (dev
->mt76
.chandef
.chan
->center_freq
>= 5250 &&
322 dev
->mt76
.chandef
.chan
->center_freq
<= 5350) {
324 if (pulse
->w1
<= 130)
325 ret
= (pulse
->period
>= 28360 &&
326 (pulse
->period
<= 28700 ||
327 pulse
->period
>= 76900) &&
328 pulse
->period
<= 76940);
332 if (pulse
->engine
> 3)
335 if (pulse
->engine
== 3) {
336 ret
= mt76x02_dfs_check_chirp(dev
);
340 /* check short pulse*/
342 ret
= (pulse
->period
>= 2900 &&
343 (pulse
->period
<= 4700 ||
344 pulse
->period
>= 6400) &&
345 (pulse
->period
<= 6800 ||
346 pulse
->period
>= 27560) &&
347 (pulse
->period
<= 27960 ||
348 pulse
->period
>= 28360) &&
349 (pulse
->period
<= 28700 ||
350 pulse
->period
>= 79900) &&
351 pulse
->period
<= 80100);
352 else if (pulse
->w1
< 130) /* 120 - 130 */
353 ret
= (pulse
->period
>= 2900 &&
354 (pulse
->period
<= 10100 ||
355 pulse
->period
>= 27560) &&
356 (pulse
->period
<= 27960 ||
357 pulse
->period
>= 28360) &&
358 (pulse
->period
<= 28700 ||
359 pulse
->period
>= 79900) &&
360 pulse
->period
<= 80100);
362 ret
= (pulse
->period
>= 3900 &&
363 pulse
->period
<= 10100);
365 case NL80211_DFS_UNSET
:
373 static bool mt76x02_dfs_fetch_event(struct mt76x02_dev
*dev
,
374 struct mt76x02_dfs_event
*event
)
378 /* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2)
379 * 2nd: DFS_R37[21:0]: pulse time
380 * 3rd: DFS_R37[11:0]: pulse width
381 * 3rd: DFS_R37[25:16]: phase
382 * 4th: DFS_R37[12:0]: current pwr
383 * 4th: DFS_R37[21:16]: pwr stable counter
385 * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected
387 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
388 if (!MT_DFS_CHECK_EVENT(data
))
391 event
->engine
= MT_DFS_EVENT_ENGINE(data
);
392 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
393 event
->ts
= MT_DFS_EVENT_TIMESTAMP(data
);
394 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
395 event
->width
= MT_DFS_EVENT_WIDTH(data
);
400 static bool mt76x02_dfs_check_event(struct mt76x02_dev
*dev
,
401 struct mt76x02_dfs_event
*event
)
403 if (event
->engine
== 2) {
404 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
405 struct mt76x02_dfs_event_rb
*event_buff
= &dfs_pd
->event_rb
[1];
409 last_event_idx
= mt76_decr(event_buff
->t_rb
,
410 MT_DFS_EVENT_BUFLEN
);
411 delta_ts
= event
->ts
- event_buff
->data
[last_event_idx
].ts
;
412 if (delta_ts
< MT_DFS_EVENT_TIME_MARGIN
&&
413 event_buff
->data
[last_event_idx
].width
>= 200)
419 static void mt76x02_dfs_queue_event(struct mt76x02_dev
*dev
,
420 struct mt76x02_dfs_event
*event
)
422 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
423 struct mt76x02_dfs_event_rb
*event_buff
;
425 /* add radar event to ring buffer */
426 event_buff
= event
->engine
== 2 ? &dfs_pd
->event_rb
[1]
427 : &dfs_pd
->event_rb
[0];
428 event_buff
->data
[event_buff
->t_rb
] = *event
;
429 event_buff
->data
[event_buff
->t_rb
].fetch_ts
= jiffies
;
431 event_buff
->t_rb
= mt76_incr(event_buff
->t_rb
, MT_DFS_EVENT_BUFLEN
);
432 if (event_buff
->t_rb
== event_buff
->h_rb
)
433 event_buff
->h_rb
= mt76_incr(event_buff
->h_rb
,
434 MT_DFS_EVENT_BUFLEN
);
437 static int mt76x02_dfs_create_sequence(struct mt76x02_dev
*dev
,
438 struct mt76x02_dfs_event
*event
,
441 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
442 struct mt76x02_dfs_sw_detector_params
*sw_params
;
443 u32 width_delta
, with_sum
, factor
, cur_pri
;
444 struct mt76x02_dfs_sequence seq
, *seq_p
;
445 struct mt76x02_dfs_event_rb
*event_rb
;
446 struct mt76x02_dfs_event
*cur_event
;
449 event_rb
= event
->engine
== 2 ? &dfs_pd
->event_rb
[1]
450 : &dfs_pd
->event_rb
[0];
452 i
= mt76_decr(event_rb
->t_rb
, MT_DFS_EVENT_BUFLEN
);
453 end
= mt76_decr(event_rb
->h_rb
, MT_DFS_EVENT_BUFLEN
);
456 cur_event
= &event_rb
->data
[i
];
457 with_sum
= event
->width
+ cur_event
->width
;
459 sw_params
= &dfs_pd
->sw_dpd_params
;
460 switch (dev
->dfs_pd
.region
) {
461 case NL80211_DFS_FCC
:
466 width_delta
= with_sum
>> 3;
468 case NL80211_DFS_ETSI
:
469 if (event
->engine
== 2)
470 width_delta
= with_sum
>> 6;
471 else if (with_sum
< 620)
476 case NL80211_DFS_UNSET
:
481 pri
= event
->ts
- cur_event
->ts
;
482 if (abs(event
->width
- cur_event
->width
) > width_delta
||
483 pri
< sw_params
->min_pri
)
486 if (pri
> sw_params
->max_pri
)
489 seq
.pri
= event
->ts
- cur_event
->ts
;
490 seq
.first_ts
= cur_event
->ts
;
491 seq
.last_ts
= event
->ts
;
492 seq
.engine
= event
->engine
;
495 j
= mt76_decr(i
, MT_DFS_EVENT_BUFLEN
);
497 cur_event
= &event_rb
->data
[j
];
498 cur_pri
= event
->ts
- cur_event
->ts
;
499 factor
= mt76x02_dfs_get_multiple(cur_pri
, seq
.pri
,
500 sw_params
->pri_margin
);
502 seq
.first_ts
= cur_event
->ts
;
506 j
= mt76_decr(j
, MT_DFS_EVENT_BUFLEN
);
508 if (seq
.count
<= cur_len
)
511 seq_p
= mt76x02_dfs_seq_pool_get(dev
);
516 INIT_LIST_HEAD(&seq_p
->head
);
517 list_add(&seq_p
->head
, &dfs_pd
->sequences
);
519 i
= mt76_decr(i
, MT_DFS_EVENT_BUFLEN
);
524 static u16
mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev
*dev
,
525 struct mt76x02_dfs_event
*event
)
527 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
528 struct mt76x02_dfs_sw_detector_params
*sw_params
;
529 struct mt76x02_dfs_sequence
*seq
, *tmp_seq
;
533 sw_params
= &dfs_pd
->sw_dpd_params
;
534 list_for_each_entry_safe(seq
, tmp_seq
, &dfs_pd
->sequences
, head
) {
535 if (event
->ts
> seq
->first_ts
+ MT_DFS_SEQUENCE_WINDOW
) {
536 list_del_init(&seq
->head
);
537 mt76x02_dfs_seq_pool_put(dev
, seq
);
541 if (event
->engine
!= seq
->engine
)
544 pri
= event
->ts
- seq
->last_ts
;
545 factor
= mt76x02_dfs_get_multiple(pri
, seq
->pri
,
546 sw_params
->pri_margin
);
548 seq
->last_ts
= event
->ts
;
550 max_seq_len
= max_t(u16
, max_seq_len
, seq
->count
);
556 static bool mt76x02_dfs_check_detection(struct mt76x02_dev
*dev
)
558 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
559 struct mt76x02_dfs_sequence
*seq
;
561 if (list_empty(&dfs_pd
->sequences
))
564 list_for_each_entry(seq
, &dfs_pd
->sequences
, head
) {
565 if (seq
->count
> MT_DFS_SEQUENCE_TH
) {
566 dfs_pd
->stats
[seq
->engine
].sw_pattern
++;
573 static void mt76x02_dfs_add_events(struct mt76x02_dev
*dev
)
575 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
576 struct mt76x02_dfs_event event
;
579 /* disable debug mode */
580 mt76x02_dfs_set_capture_mode_ctrl(dev
, false);
581 for (i
= 0; i
< MT_DFS_EVENT_LOOP
; i
++) {
582 if (!mt76x02_dfs_fetch_event(dev
, &event
))
585 if (dfs_pd
->last_event_ts
> event
.ts
)
586 mt76x02_dfs_detector_reset(dev
);
587 dfs_pd
->last_event_ts
= event
.ts
;
589 if (!mt76x02_dfs_check_event(dev
, &event
))
592 seq_len
= mt76x02_dfs_add_event_to_sequence(dev
, &event
);
593 mt76x02_dfs_create_sequence(dev
, &event
, seq_len
);
595 mt76x02_dfs_queue_event(dev
, &event
);
597 mt76x02_dfs_set_capture_mode_ctrl(dev
, true);
600 static void mt76x02_dfs_check_event_window(struct mt76x02_dev
*dev
)
602 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
603 struct mt76x02_dfs_event_rb
*event_buff
;
604 struct mt76x02_dfs_event
*event
;
607 for (i
= 0; i
< ARRAY_SIZE(dfs_pd
->event_rb
); i
++) {
608 event_buff
= &dfs_pd
->event_rb
[i
];
610 while (event_buff
->h_rb
!= event_buff
->t_rb
) {
611 event
= &event_buff
->data
[event_buff
->h_rb
];
614 if (time_is_after_jiffies(event
->fetch_ts
+
615 MT_DFS_EVENT_WINDOW
))
617 event_buff
->h_rb
= mt76_incr(event_buff
->h_rb
,
618 MT_DFS_EVENT_BUFLEN
);
623 static void mt76x02_dfs_tasklet(unsigned long arg
)
625 struct mt76x02_dev
*dev
= (struct mt76x02_dev
*)arg
;
626 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
630 if (test_bit(MT76_SCANNING
, &dev
->mt76
.state
))
633 if (time_is_before_jiffies(dfs_pd
->last_sw_check
+
634 MT_DFS_SW_TIMEOUT
)) {
637 dfs_pd
->last_sw_check
= jiffies
;
639 mt76x02_dfs_add_events(dev
);
640 radar_detected
= mt76x02_dfs_check_detection(dev
);
641 if (radar_detected
) {
642 /* sw detector rx radar pattern */
643 ieee80211_radar_detected(dev
->mt76
.hw
);
644 mt76x02_dfs_detector_reset(dev
);
648 mt76x02_dfs_check_event_window(dev
);
651 engine_mask
= mt76_rr(dev
, MT_BBP(DFS
, 1));
652 if (!(engine_mask
& 0xf))
655 for (i
= 0; i
< MT_DFS_NUM_ENGINES
; i
++) {
656 struct mt76x02_dfs_hw_pulse pulse
;
658 if (!(engine_mask
& (1 << i
)))
662 mt76x02_dfs_get_hw_pulse(dev
, &pulse
);
664 if (!mt76x02_dfs_check_hw_pulse(dev
, &pulse
)) {
665 dfs_pd
->stats
[i
].hw_pulse_discarded
++;
669 /* hw detector rx radar pattern */
670 dfs_pd
->stats
[i
].hw_pattern
++;
671 ieee80211_radar_detected(dev
->mt76
.hw
);
672 mt76x02_dfs_detector_reset(dev
);
677 /* reset hw detector */
678 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
681 mt76x02_irq_enable(dev
, MT_INT_GPTIMER
);
684 static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev
*dev
)
686 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
688 switch (dev
->dfs_pd
.region
) {
689 case NL80211_DFS_FCC
:
690 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_FCC_MAX_PRI
;
691 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_FCC_MIN_PRI
;
692 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
;
694 case NL80211_DFS_ETSI
:
695 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_ETSI_MAX_PRI
;
696 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_ETSI_MIN_PRI
;
697 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
<< 2;
700 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_JP_MAX_PRI
;
701 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_JP_MIN_PRI
;
702 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
;
704 case NL80211_DFS_UNSET
:
710 static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev
*dev
)
712 const struct mt76x02_radar_specs
*radar_specs
;
716 switch (dev
->mt76
.chandef
.width
) {
717 case NL80211_CHAN_WIDTH_40
:
718 shift
= MT_DFS_NUM_ENGINES
;
720 case NL80211_CHAN_WIDTH_80
:
721 shift
= 2 * MT_DFS_NUM_ENGINES
;
728 switch (dev
->dfs_pd
.region
) {
729 case NL80211_DFS_FCC
:
730 radar_specs
= &fcc_radar_specs
[shift
];
732 case NL80211_DFS_ETSI
:
733 radar_specs
= &etsi_radar_specs
[shift
];
736 if (dev
->mt76
.chandef
.chan
->center_freq
>= 5250 &&
737 dev
->mt76
.chandef
.chan
->center_freq
<= 5350)
738 radar_specs
= &jp_w53_radar_specs
[shift
];
740 radar_specs
= &jp_w56_radar_specs
[shift
];
742 case NL80211_DFS_UNSET
:
747 data
= (MT_DFS_VGA_MASK
<< 16) |
748 (MT_DFS_PWR_GAIN_OFFSET
<< 12) |
749 (MT_DFS_PWR_DOWN_TIME
<< 8) |
750 (MT_DFS_SYM_ROUND
<< 4) |
751 (MT_DFS_DELTA_DELAY
& 0xf);
752 mt76_wr(dev
, MT_BBP(DFS
, 2), data
);
754 data
= (MT_DFS_RX_PE_MASK
<< 16) | MT_DFS_PKT_END_MASK
;
755 mt76_wr(dev
, MT_BBP(DFS
, 3), data
);
757 for (i
= 0; i
< MT_DFS_NUM_ENGINES
; i
++) {
758 /* configure engine */
759 mt76_wr(dev
, MT_BBP(DFS
, 0), i
);
761 /* detection mode + avg_len */
762 data
= ((radar_specs
[i
].avg_len
& 0x1ff) << 16) |
763 (radar_specs
[i
].mode
& 0xf);
764 mt76_wr(dev
, MT_BBP(DFS
, 4), data
);
767 data
= ((radar_specs
[i
].e_high
& 0x0fff) << 16) |
768 (radar_specs
[i
].e_low
& 0x0fff);
769 mt76_wr(dev
, MT_BBP(DFS
, 5), data
);
772 mt76_wr(dev
, MT_BBP(DFS
, 7), radar_specs
[i
].t_low
);
773 mt76_wr(dev
, MT_BBP(DFS
, 9), radar_specs
[i
].t_high
);
776 mt76_wr(dev
, MT_BBP(DFS
, 11), radar_specs
[i
].b_low
);
777 mt76_wr(dev
, MT_BBP(DFS
, 13), radar_specs
[i
].b_high
);
780 data
= ((radar_specs
[i
].w_high
& 0x0fff) << 16) |
781 (radar_specs
[i
].w_low
& 0x0fff);
782 mt76_wr(dev
, MT_BBP(DFS
, 14), data
);
785 data
= (radar_specs
[i
].w_margin
<< 16) |
786 radar_specs
[i
].t_margin
;
787 mt76_wr(dev
, MT_BBP(DFS
, 15), data
);
789 /* dfs event expiration */
790 mt76_wr(dev
, MT_BBP(DFS
, 17), radar_specs
[i
].event_expiration
);
793 mt76_wr(dev
, MT_BBP(DFS
, 30), radar_specs
[i
].pwr_jmp
);
797 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
798 mt76_wr(dev
, MT_BBP(DFS
, 36), 0x3);
800 /* enable detection*/
801 mt76_wr(dev
, MT_BBP(DFS
, 0), MT_DFS_CH_EN
<< 16);
802 mt76_wr(dev
, MT_BBP(IBI
, 11), 0x0c350001);
805 void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev
*dev
)
807 u32 agc_r8
, agc_r4
, val_r8
, val_r4
, dfs_r31
;
809 agc_r8
= mt76_rr(dev
, MT_BBP(AGC
, 8));
810 agc_r4
= mt76_rr(dev
, MT_BBP(AGC
, 4));
812 val_r8
= (agc_r8
& 0x00007e00) >> 9;
813 val_r4
= agc_r4
& ~0x1f000000;
814 val_r4
+= (((val_r8
+ 1) >> 1) << 24);
815 mt76_wr(dev
, MT_BBP(AGC
, 4), val_r4
);
817 dfs_r31
= FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN
, val_r4
);
819 dfs_r31
-= (agc_r8
& 0x00000038) >> 3;
820 dfs_r31
= (dfs_r31
<< 16) | 0x00000307;
821 mt76_wr(dev
, MT_BBP(DFS
, 31), dfs_r31
);
823 if (is_mt76x2(dev
)) {
824 mt76_wr(dev
, MT_BBP(DFS
, 32), 0x00040071);
826 /* disable hw detector */
827 mt76_wr(dev
, MT_BBP(DFS
, 0), 0);
828 /* enable hw detector */
829 mt76_wr(dev
, MT_BBP(DFS
, 0), MT_DFS_CH_EN
<< 16);
832 EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc
);
834 void mt76x02_dfs_init_params(struct mt76x02_dev
*dev
)
836 struct cfg80211_chan_def
*chandef
= &dev
->mt76
.chandef
;
838 if ((chandef
->chan
->flags
& IEEE80211_CHAN_RADAR
) &&
839 dev
->dfs_pd
.region
!= NL80211_DFS_UNSET
) {
840 mt76x02_dfs_init_sw_detector(dev
);
841 mt76x02_dfs_set_bbp_params(dev
);
842 /* enable debug mode */
843 mt76x02_dfs_set_capture_mode_ctrl(dev
, true);
845 mt76x02_irq_enable(dev
, MT_INT_GPTIMER
);
846 mt76_rmw_field(dev
, MT_INT_TIMER_EN
,
847 MT_INT_TIMER_EN_GP_TIMER_EN
, 1);
849 /* disable hw detector */
850 mt76_wr(dev
, MT_BBP(DFS
, 0), 0);
851 /* clear detector status */
852 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
853 if (mt76_chip(&dev
->mt76
) == 0x7610 ||
854 mt76_chip(&dev
->mt76
) == 0x7630)
855 mt76_wr(dev
, MT_BBP(IBI
, 11), 0xfde8081);
857 mt76_wr(dev
, MT_BBP(IBI
, 11), 0);
859 mt76x02_irq_disable(dev
, MT_INT_GPTIMER
);
860 mt76_rmw_field(dev
, MT_INT_TIMER_EN
,
861 MT_INT_TIMER_EN_GP_TIMER_EN
, 0);
864 EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params
);
866 void mt76x02_dfs_init_detector(struct mt76x02_dev
*dev
)
868 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
870 INIT_LIST_HEAD(&dfs_pd
->sequences
);
871 INIT_LIST_HEAD(&dfs_pd
->seq_pool
);
872 dfs_pd
->region
= NL80211_DFS_UNSET
;
873 dfs_pd
->last_sw_check
= jiffies
;
874 tasklet_init(&dfs_pd
->dfs_tasklet
, mt76x02_dfs_tasklet
,
879 mt76x02_dfs_set_domain(struct mt76x02_dev
*dev
,
880 enum nl80211_dfs_regions region
)
882 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
884 mutex_lock(&dev
->mt76
.mutex
);
885 if (dfs_pd
->region
!= region
) {
886 tasklet_disable(&dfs_pd
->dfs_tasklet
);
888 dev
->ed_monitor
= dev
->ed_monitor_enabled
&&
889 region
== NL80211_DFS_ETSI
;
890 mt76x02_edcca_init(dev
, true);
892 dfs_pd
->region
= region
;
893 mt76x02_dfs_init_params(dev
);
894 tasklet_enable(&dfs_pd
->dfs_tasklet
);
896 mutex_unlock(&dev
->mt76
.mutex
);
899 void mt76x02_regd_notifier(struct wiphy
*wiphy
,
900 struct regulatory_request
*request
)
902 struct ieee80211_hw
*hw
= wiphy_to_ieee80211_hw(wiphy
);
903 struct mt76x02_dev
*dev
= hw
->priv
;
905 mt76x02_dfs_set_domain(dev
, request
->dfs_region
);