1 // SPDX-License-Identifier: ISC
3 * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com>
8 #define RADAR_SPEC(m, len, el, eh, wl, wh, \
9 w_tolerance, tl, th, t_tolerance, \
10 bl, bh, event_exp, power_jmp) \
18 .w_margin = w_tolerance, \
21 .t_margin = t_tolerance, \
24 .event_expiration = event_exp, \
25 .pwr_jmp = power_jmp \
28 static const struct mt76x02_radar_specs etsi_radar_specs
[] = {
30 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
31 0x7fffffff, 0x155cc0, 0x19cc),
32 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
33 0x7fffffff, 0x155cc0, 0x19cc),
34 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
35 0x7fffffff, 0x155cc0, 0x19dd),
36 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
37 0x7fffffff, 0x2191c0, 0x15cc),
39 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
40 0x7fffffff, 0x155cc0, 0x19cc),
41 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
42 0x7fffffff, 0x155cc0, 0x19cc),
43 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
44 0x7fffffff, 0x155cc0, 0x19dd),
45 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
46 0x7fffffff, 0x2191c0, 0x15cc),
48 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
49 0x7fffffff, 0x155cc0, 0x19cc),
50 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
51 0x7fffffff, 0x155cc0, 0x19cc),
52 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
53 0x7fffffff, 0x155cc0, 0x19dd),
54 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
55 0x7fffffff, 0x2191c0, 0x15cc)
58 static const struct mt76x02_radar_specs fcc_radar_specs
[] = {
60 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
61 0x7fffffff, 0xfe808, 0x13dc),
62 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
63 0x7fffffff, 0xfe808, 0x19dd),
64 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
65 0x7fffffff, 0xfe808, 0x12cc),
66 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
67 0x3938700, 0x57bcf00, 0x1289),
69 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
70 0x7fffffff, 0xfe808, 0x13dc),
71 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
72 0x7fffffff, 0xfe808, 0x19dd),
73 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
74 0x7fffffff, 0xfe808, 0x12cc),
75 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
76 0x3938700, 0x57bcf00, 0x1289),
78 RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0,
79 0x7fffffff, 0xfe808, 0x16cc),
80 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
81 0x7fffffff, 0xfe808, 0x19dd),
82 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
83 0x7fffffff, 0xfe808, 0x12cc),
84 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
85 0x3938700, 0x57bcf00, 0x1289)
88 static const struct mt76x02_radar_specs jp_w56_radar_specs
[] = {
90 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
91 0x7fffffff, 0x14c080, 0x13dc),
92 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
93 0x7fffffff, 0x14c080, 0x19dd),
94 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
95 0x7fffffff, 0x14c080, 0x12cc),
96 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
97 0x3938700, 0X57bcf00, 0x1289),
99 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
100 0x7fffffff, 0x14c080, 0x13dc),
101 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
102 0x7fffffff, 0x14c080, 0x19dd),
103 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
104 0x7fffffff, 0x14c080, 0x12cc),
105 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
106 0x3938700, 0X57bcf00, 0x1289),
108 RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0,
109 0x7fffffff, 0x14c080, 0x16cc),
110 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
111 0x7fffffff, 0x14c080, 0x19dd),
112 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
113 0x7fffffff, 0x14c080, 0x12cc),
114 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
115 0x3938700, 0X57bcf00, 0x1289)
118 static const struct mt76x02_radar_specs jp_w53_radar_specs
[] = {
120 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
121 0x7fffffff, 0x14c080, 0x16cc),
123 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
124 0x7fffffff, 0x14c080, 0x16cc),
127 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
128 0x7fffffff, 0x14c080, 0x16cc),
130 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
131 0x7fffffff, 0x14c080, 0x16cc),
134 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
135 0x7fffffff, 0x14c080, 0x16cc),
137 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
138 0x7fffffff, 0x14c080, 0x16cc),
143 mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev
*dev
, u8 enable
)
147 data
= (1 << 1) | enable
;
148 mt76_wr(dev
, MT_BBP(DFS
, 36), data
);
151 static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev
*dev
,
152 struct mt76x02_dfs_sequence
*seq
)
154 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
156 list_add(&seq
->head
, &dfs_pd
->seq_pool
);
158 dfs_pd
->seq_stats
.seq_pool_len
++;
159 dfs_pd
->seq_stats
.seq_len
--;
162 static struct mt76x02_dfs_sequence
*
163 mt76x02_dfs_seq_pool_get(struct mt76x02_dev
*dev
)
165 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
166 struct mt76x02_dfs_sequence
*seq
;
168 if (list_empty(&dfs_pd
->seq_pool
)) {
169 seq
= devm_kzalloc(dev
->mt76
.dev
, sizeof(*seq
), GFP_ATOMIC
);
171 seq
= list_first_entry(&dfs_pd
->seq_pool
,
172 struct mt76x02_dfs_sequence
,
174 list_del(&seq
->head
);
175 dfs_pd
->seq_stats
.seq_pool_len
--;
178 dfs_pd
->seq_stats
.seq_len
++;
183 static int mt76x02_dfs_get_multiple(int val
, int frac
, int margin
)
185 int remainder
, factor
;
190 if (abs(val
- frac
) <= margin
)
194 remainder
= val
% frac
;
196 if (remainder
> margin
) {
197 if ((frac
- remainder
) <= margin
)
205 static void mt76x02_dfs_detector_reset(struct mt76x02_dev
*dev
)
207 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
208 struct mt76x02_dfs_sequence
*seq
, *tmp_seq
;
211 /* reset hw detector */
212 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
214 /* reset sw detector */
215 for (i
= 0; i
< ARRAY_SIZE(dfs_pd
->event_rb
); i
++) {
216 dfs_pd
->event_rb
[i
].h_rb
= 0;
217 dfs_pd
->event_rb
[i
].t_rb
= 0;
220 list_for_each_entry_safe(seq
, tmp_seq
, &dfs_pd
->sequences
, head
) {
221 list_del_init(&seq
->head
);
222 mt76x02_dfs_seq_pool_put(dev
, seq
);
226 static bool mt76x02_dfs_check_chirp(struct mt76x02_dev
*dev
)
229 u32 current_ts
, delta_ts
;
230 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
232 current_ts
= mt76_rr(dev
, MT_PBF_LIFE_TIMER
);
233 delta_ts
= current_ts
- dfs_pd
->chirp_pulse_ts
;
234 dfs_pd
->chirp_pulse_ts
= current_ts
;
237 if (delta_ts
<= (12 * (1 << 20))) {
238 if (++dfs_pd
->chirp_pulse_cnt
> 8)
241 dfs_pd
->chirp_pulse_cnt
= 1;
247 static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev
*dev
,
248 struct mt76x02_dfs_hw_pulse
*pulse
)
253 data
= (MT_DFS_CH_EN
<< 16) | pulse
->engine
;
254 mt76_wr(dev
, MT_BBP(DFS
, 0), data
);
256 /* reported period */
257 pulse
->period
= mt76_rr(dev
, MT_BBP(DFS
, 19));
260 pulse
->w1
= mt76_rr(dev
, MT_BBP(DFS
, 20));
261 pulse
->w2
= mt76_rr(dev
, MT_BBP(DFS
, 23));
263 /* reported burst number */
264 pulse
->burst
= mt76_rr(dev
, MT_BBP(DFS
, 22));
267 static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev
*dev
,
268 struct mt76x02_dfs_hw_pulse
*pulse
)
272 if (!pulse
->period
|| !pulse
->w1
)
275 switch (dev
->mt76
.region
) {
276 case NL80211_DFS_FCC
:
277 if (pulse
->engine
> 3)
280 if (pulse
->engine
== 3) {
281 ret
= mt76x02_dfs_check_chirp(dev
);
285 /* check short pulse*/
287 ret
= (pulse
->period
>= 2900 &&
288 (pulse
->period
<= 4700 ||
289 pulse
->period
>= 6400) &&
290 (pulse
->period
<= 6800 ||
291 pulse
->period
>= 10200) &&
292 pulse
->period
<= 61600);
293 else if (pulse
->w1
< 130) /* 120 - 130 */
294 ret
= (pulse
->period
>= 2900 &&
295 pulse
->period
<= 61600);
297 ret
= (pulse
->period
>= 3500 &&
298 pulse
->period
<= 10100);
300 case NL80211_DFS_ETSI
:
301 if (pulse
->engine
>= 3)
304 ret
= (pulse
->period
>= 4900 &&
305 (pulse
->period
<= 10200 ||
306 pulse
->period
>= 12400) &&
307 pulse
->period
<= 100100);
310 if (dev
->mt76
.chandef
.chan
->center_freq
>= 5250 &&
311 dev
->mt76
.chandef
.chan
->center_freq
<= 5350) {
313 if (pulse
->w1
<= 130)
314 ret
= (pulse
->period
>= 28360 &&
315 (pulse
->period
<= 28700 ||
316 pulse
->period
>= 76900) &&
317 pulse
->period
<= 76940);
321 if (pulse
->engine
> 3)
324 if (pulse
->engine
== 3) {
325 ret
= mt76x02_dfs_check_chirp(dev
);
329 /* check short pulse*/
331 ret
= (pulse
->period
>= 2900 &&
332 (pulse
->period
<= 4700 ||
333 pulse
->period
>= 6400) &&
334 (pulse
->period
<= 6800 ||
335 pulse
->period
>= 27560) &&
336 (pulse
->period
<= 27960 ||
337 pulse
->period
>= 28360) &&
338 (pulse
->period
<= 28700 ||
339 pulse
->period
>= 79900) &&
340 pulse
->period
<= 80100);
341 else if (pulse
->w1
< 130) /* 120 - 130 */
342 ret
= (pulse
->period
>= 2900 &&
343 (pulse
->period
<= 10100 ||
344 pulse
->period
>= 27560) &&
345 (pulse
->period
<= 27960 ||
346 pulse
->period
>= 28360) &&
347 (pulse
->period
<= 28700 ||
348 pulse
->period
>= 79900) &&
349 pulse
->period
<= 80100);
351 ret
= (pulse
->period
>= 3900 &&
352 pulse
->period
<= 10100);
354 case NL80211_DFS_UNSET
:
362 static bool mt76x02_dfs_fetch_event(struct mt76x02_dev
*dev
,
363 struct mt76x02_dfs_event
*event
)
367 /* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2)
368 * 2nd: DFS_R37[21:0]: pulse time
369 * 3rd: DFS_R37[11:0]: pulse width
370 * 3rd: DFS_R37[25:16]: phase
371 * 4th: DFS_R37[12:0]: current pwr
372 * 4th: DFS_R37[21:16]: pwr stable counter
374 * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected
376 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
377 if (!MT_DFS_CHECK_EVENT(data
))
380 event
->engine
= MT_DFS_EVENT_ENGINE(data
);
381 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
382 event
->ts
= MT_DFS_EVENT_TIMESTAMP(data
);
383 data
= mt76_rr(dev
, MT_BBP(DFS
, 37));
384 event
->width
= MT_DFS_EVENT_WIDTH(data
);
389 static bool mt76x02_dfs_check_event(struct mt76x02_dev
*dev
,
390 struct mt76x02_dfs_event
*event
)
392 if (event
->engine
== 2) {
393 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
394 struct mt76x02_dfs_event_rb
*event_buff
= &dfs_pd
->event_rb
[1];
398 last_event_idx
= mt76_decr(event_buff
->t_rb
,
399 MT_DFS_EVENT_BUFLEN
);
400 delta_ts
= event
->ts
- event_buff
->data
[last_event_idx
].ts
;
401 if (delta_ts
< MT_DFS_EVENT_TIME_MARGIN
&&
402 event_buff
->data
[last_event_idx
].width
>= 200)
408 static void mt76x02_dfs_queue_event(struct mt76x02_dev
*dev
,
409 struct mt76x02_dfs_event
*event
)
411 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
412 struct mt76x02_dfs_event_rb
*event_buff
;
414 /* add radar event to ring buffer */
415 event_buff
= event
->engine
== 2 ? &dfs_pd
->event_rb
[1]
416 : &dfs_pd
->event_rb
[0];
417 event_buff
->data
[event_buff
->t_rb
] = *event
;
418 event_buff
->data
[event_buff
->t_rb
].fetch_ts
= jiffies
;
420 event_buff
->t_rb
= mt76_incr(event_buff
->t_rb
, MT_DFS_EVENT_BUFLEN
);
421 if (event_buff
->t_rb
== event_buff
->h_rb
)
422 event_buff
->h_rb
= mt76_incr(event_buff
->h_rb
,
423 MT_DFS_EVENT_BUFLEN
);
426 static int mt76x02_dfs_create_sequence(struct mt76x02_dev
*dev
,
427 struct mt76x02_dfs_event
*event
,
430 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
431 struct mt76x02_dfs_sw_detector_params
*sw_params
;
432 u32 width_delta
, with_sum
, factor
, cur_pri
;
433 struct mt76x02_dfs_sequence seq
, *seq_p
;
434 struct mt76x02_dfs_event_rb
*event_rb
;
435 struct mt76x02_dfs_event
*cur_event
;
438 event_rb
= event
->engine
== 2 ? &dfs_pd
->event_rb
[1]
439 : &dfs_pd
->event_rb
[0];
441 i
= mt76_decr(event_rb
->t_rb
, MT_DFS_EVENT_BUFLEN
);
442 end
= mt76_decr(event_rb
->h_rb
, MT_DFS_EVENT_BUFLEN
);
445 cur_event
= &event_rb
->data
[i
];
446 with_sum
= event
->width
+ cur_event
->width
;
448 sw_params
= &dfs_pd
->sw_dpd_params
;
449 switch (dev
->mt76
.region
) {
450 case NL80211_DFS_FCC
:
455 width_delta
= with_sum
>> 3;
457 case NL80211_DFS_ETSI
:
458 if (event
->engine
== 2)
459 width_delta
= with_sum
>> 6;
460 else if (with_sum
< 620)
465 case NL80211_DFS_UNSET
:
470 pri
= event
->ts
- cur_event
->ts
;
471 if (abs(event
->width
- cur_event
->width
) > width_delta
||
472 pri
< sw_params
->min_pri
)
475 if (pri
> sw_params
->max_pri
)
478 seq
.pri
= event
->ts
- cur_event
->ts
;
479 seq
.first_ts
= cur_event
->ts
;
480 seq
.last_ts
= event
->ts
;
481 seq
.engine
= event
->engine
;
484 j
= mt76_decr(i
, MT_DFS_EVENT_BUFLEN
);
486 cur_event
= &event_rb
->data
[j
];
487 cur_pri
= event
->ts
- cur_event
->ts
;
488 factor
= mt76x02_dfs_get_multiple(cur_pri
, seq
.pri
,
489 sw_params
->pri_margin
);
491 seq
.first_ts
= cur_event
->ts
;
495 j
= mt76_decr(j
, MT_DFS_EVENT_BUFLEN
);
497 if (seq
.count
<= cur_len
)
500 seq_p
= mt76x02_dfs_seq_pool_get(dev
);
505 INIT_LIST_HEAD(&seq_p
->head
);
506 list_add(&seq_p
->head
, &dfs_pd
->sequences
);
508 i
= mt76_decr(i
, MT_DFS_EVENT_BUFLEN
);
513 static u16
mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev
*dev
,
514 struct mt76x02_dfs_event
*event
)
516 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
517 struct mt76x02_dfs_sw_detector_params
*sw_params
;
518 struct mt76x02_dfs_sequence
*seq
, *tmp_seq
;
522 sw_params
= &dfs_pd
->sw_dpd_params
;
523 list_for_each_entry_safe(seq
, tmp_seq
, &dfs_pd
->sequences
, head
) {
524 if (event
->ts
> seq
->first_ts
+ MT_DFS_SEQUENCE_WINDOW
) {
525 list_del_init(&seq
->head
);
526 mt76x02_dfs_seq_pool_put(dev
, seq
);
530 if (event
->engine
!= seq
->engine
)
533 pri
= event
->ts
- seq
->last_ts
;
534 factor
= mt76x02_dfs_get_multiple(pri
, seq
->pri
,
535 sw_params
->pri_margin
);
537 seq
->last_ts
= event
->ts
;
539 max_seq_len
= max_t(u16
, max_seq_len
, seq
->count
);
545 static bool mt76x02_dfs_check_detection(struct mt76x02_dev
*dev
)
547 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
548 struct mt76x02_dfs_sequence
*seq
;
550 if (list_empty(&dfs_pd
->sequences
))
553 list_for_each_entry(seq
, &dfs_pd
->sequences
, head
) {
554 if (seq
->count
> MT_DFS_SEQUENCE_TH
) {
555 dfs_pd
->stats
[seq
->engine
].sw_pattern
++;
562 static void mt76x02_dfs_add_events(struct mt76x02_dev
*dev
)
564 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
565 struct mt76x02_dfs_event event
;
568 /* disable debug mode */
569 mt76x02_dfs_set_capture_mode_ctrl(dev
, false);
570 for (i
= 0; i
< MT_DFS_EVENT_LOOP
; i
++) {
571 if (!mt76x02_dfs_fetch_event(dev
, &event
))
574 if (dfs_pd
->last_event_ts
> event
.ts
)
575 mt76x02_dfs_detector_reset(dev
);
576 dfs_pd
->last_event_ts
= event
.ts
;
578 if (!mt76x02_dfs_check_event(dev
, &event
))
581 seq_len
= mt76x02_dfs_add_event_to_sequence(dev
, &event
);
582 mt76x02_dfs_create_sequence(dev
, &event
, seq_len
);
584 mt76x02_dfs_queue_event(dev
, &event
);
586 mt76x02_dfs_set_capture_mode_ctrl(dev
, true);
589 static void mt76x02_dfs_check_event_window(struct mt76x02_dev
*dev
)
591 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
592 struct mt76x02_dfs_event_rb
*event_buff
;
593 struct mt76x02_dfs_event
*event
;
596 for (i
= 0; i
< ARRAY_SIZE(dfs_pd
->event_rb
); i
++) {
597 event_buff
= &dfs_pd
->event_rb
[i
];
599 while (event_buff
->h_rb
!= event_buff
->t_rb
) {
600 event
= &event_buff
->data
[event_buff
->h_rb
];
603 if (time_is_after_jiffies(event
->fetch_ts
+
604 MT_DFS_EVENT_WINDOW
))
606 event_buff
->h_rb
= mt76_incr(event_buff
->h_rb
,
607 MT_DFS_EVENT_BUFLEN
);
612 static void mt76x02_dfs_tasklet(unsigned long arg
)
614 struct mt76x02_dev
*dev
= (struct mt76x02_dev
*)arg
;
615 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
619 if (test_bit(MT76_SCANNING
, &dev
->mt76
.state
))
622 if (time_is_before_jiffies(dfs_pd
->last_sw_check
+
623 MT_DFS_SW_TIMEOUT
)) {
626 dfs_pd
->last_sw_check
= jiffies
;
628 mt76x02_dfs_add_events(dev
);
629 radar_detected
= mt76x02_dfs_check_detection(dev
);
630 if (radar_detected
) {
631 /* sw detector rx radar pattern */
632 ieee80211_radar_detected(dev
->mt76
.hw
);
633 mt76x02_dfs_detector_reset(dev
);
637 mt76x02_dfs_check_event_window(dev
);
640 engine_mask
= mt76_rr(dev
, MT_BBP(DFS
, 1));
641 if (!(engine_mask
& 0xf))
644 for (i
= 0; i
< MT_DFS_NUM_ENGINES
; i
++) {
645 struct mt76x02_dfs_hw_pulse pulse
;
647 if (!(engine_mask
& (1 << i
)))
651 mt76x02_dfs_get_hw_pulse(dev
, &pulse
);
653 if (!mt76x02_dfs_check_hw_pulse(dev
, &pulse
)) {
654 dfs_pd
->stats
[i
].hw_pulse_discarded
++;
658 /* hw detector rx radar pattern */
659 dfs_pd
->stats
[i
].hw_pattern
++;
660 ieee80211_radar_detected(dev
->mt76
.hw
);
661 mt76x02_dfs_detector_reset(dev
);
666 /* reset hw detector */
667 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
670 mt76x02_irq_enable(dev
, MT_INT_GPTIMER
);
673 static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev
*dev
)
675 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
677 switch (dev
->mt76
.region
) {
678 case NL80211_DFS_FCC
:
679 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_FCC_MAX_PRI
;
680 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_FCC_MIN_PRI
;
681 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
;
683 case NL80211_DFS_ETSI
:
684 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_ETSI_MAX_PRI
;
685 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_ETSI_MIN_PRI
;
686 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
<< 2;
689 dfs_pd
->sw_dpd_params
.max_pri
= MT_DFS_JP_MAX_PRI
;
690 dfs_pd
->sw_dpd_params
.min_pri
= MT_DFS_JP_MIN_PRI
;
691 dfs_pd
->sw_dpd_params
.pri_margin
= MT_DFS_PRI_MARGIN
;
693 case NL80211_DFS_UNSET
:
699 static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev
*dev
)
701 const struct mt76x02_radar_specs
*radar_specs
;
705 switch (dev
->mt76
.chandef
.width
) {
706 case NL80211_CHAN_WIDTH_40
:
707 shift
= MT_DFS_NUM_ENGINES
;
709 case NL80211_CHAN_WIDTH_80
:
710 shift
= 2 * MT_DFS_NUM_ENGINES
;
717 switch (dev
->mt76
.region
) {
718 case NL80211_DFS_FCC
:
719 radar_specs
= &fcc_radar_specs
[shift
];
721 case NL80211_DFS_ETSI
:
722 radar_specs
= &etsi_radar_specs
[shift
];
725 if (dev
->mt76
.chandef
.chan
->center_freq
>= 5250 &&
726 dev
->mt76
.chandef
.chan
->center_freq
<= 5350)
727 radar_specs
= &jp_w53_radar_specs
[shift
];
729 radar_specs
= &jp_w56_radar_specs
[shift
];
731 case NL80211_DFS_UNSET
:
736 data
= (MT_DFS_VGA_MASK
<< 16) |
737 (MT_DFS_PWR_GAIN_OFFSET
<< 12) |
738 (MT_DFS_PWR_DOWN_TIME
<< 8) |
739 (MT_DFS_SYM_ROUND
<< 4) |
740 (MT_DFS_DELTA_DELAY
& 0xf);
741 mt76_wr(dev
, MT_BBP(DFS
, 2), data
);
743 data
= (MT_DFS_RX_PE_MASK
<< 16) | MT_DFS_PKT_END_MASK
;
744 mt76_wr(dev
, MT_BBP(DFS
, 3), data
);
746 for (i
= 0; i
< MT_DFS_NUM_ENGINES
; i
++) {
747 /* configure engine */
748 mt76_wr(dev
, MT_BBP(DFS
, 0), i
);
750 /* detection mode + avg_len */
751 data
= ((radar_specs
[i
].avg_len
& 0x1ff) << 16) |
752 (radar_specs
[i
].mode
& 0xf);
753 mt76_wr(dev
, MT_BBP(DFS
, 4), data
);
756 data
= ((radar_specs
[i
].e_high
& 0x0fff) << 16) |
757 (radar_specs
[i
].e_low
& 0x0fff);
758 mt76_wr(dev
, MT_BBP(DFS
, 5), data
);
761 mt76_wr(dev
, MT_BBP(DFS
, 7), radar_specs
[i
].t_low
);
762 mt76_wr(dev
, MT_BBP(DFS
, 9), radar_specs
[i
].t_high
);
765 mt76_wr(dev
, MT_BBP(DFS
, 11), radar_specs
[i
].b_low
);
766 mt76_wr(dev
, MT_BBP(DFS
, 13), radar_specs
[i
].b_high
);
769 data
= ((radar_specs
[i
].w_high
& 0x0fff) << 16) |
770 (radar_specs
[i
].w_low
& 0x0fff);
771 mt76_wr(dev
, MT_BBP(DFS
, 14), data
);
774 data
= (radar_specs
[i
].w_margin
<< 16) |
775 radar_specs
[i
].t_margin
;
776 mt76_wr(dev
, MT_BBP(DFS
, 15), data
);
778 /* dfs event expiration */
779 mt76_wr(dev
, MT_BBP(DFS
, 17), radar_specs
[i
].event_expiration
);
782 mt76_wr(dev
, MT_BBP(DFS
, 30), radar_specs
[i
].pwr_jmp
);
786 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
787 mt76_wr(dev
, MT_BBP(DFS
, 36), 0x3);
789 /* enable detection*/
790 mt76_wr(dev
, MT_BBP(DFS
, 0), MT_DFS_CH_EN
<< 16);
791 mt76_wr(dev
, MT_BBP(IBI
, 11), 0x0c350001);
794 void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev
*dev
)
796 u32 agc_r8
, agc_r4
, val_r8
, val_r4
, dfs_r31
;
798 agc_r8
= mt76_rr(dev
, MT_BBP(AGC
, 8));
799 agc_r4
= mt76_rr(dev
, MT_BBP(AGC
, 4));
801 val_r8
= (agc_r8
& 0x00007e00) >> 9;
802 val_r4
= agc_r4
& ~0x1f000000;
803 val_r4
+= (((val_r8
+ 1) >> 1) << 24);
804 mt76_wr(dev
, MT_BBP(AGC
, 4), val_r4
);
806 dfs_r31
= FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN
, val_r4
);
808 dfs_r31
-= (agc_r8
& 0x00000038) >> 3;
809 dfs_r31
= (dfs_r31
<< 16) | 0x00000307;
810 mt76_wr(dev
, MT_BBP(DFS
, 31), dfs_r31
);
812 if (is_mt76x2(dev
)) {
813 mt76_wr(dev
, MT_BBP(DFS
, 32), 0x00040071);
815 /* disable hw detector */
816 mt76_wr(dev
, MT_BBP(DFS
, 0), 0);
817 /* enable hw detector */
818 mt76_wr(dev
, MT_BBP(DFS
, 0), MT_DFS_CH_EN
<< 16);
821 EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc
);
823 void mt76x02_dfs_init_params(struct mt76x02_dev
*dev
)
825 struct cfg80211_chan_def
*chandef
= &dev
->mt76
.chandef
;
827 if ((chandef
->chan
->flags
& IEEE80211_CHAN_RADAR
) &&
828 dev
->mt76
.region
!= NL80211_DFS_UNSET
) {
829 mt76x02_dfs_init_sw_detector(dev
);
830 mt76x02_dfs_set_bbp_params(dev
);
831 /* enable debug mode */
832 mt76x02_dfs_set_capture_mode_ctrl(dev
, true);
834 mt76x02_irq_enable(dev
, MT_INT_GPTIMER
);
835 mt76_rmw_field(dev
, MT_INT_TIMER_EN
,
836 MT_INT_TIMER_EN_GP_TIMER_EN
, 1);
838 /* disable hw detector */
839 mt76_wr(dev
, MT_BBP(DFS
, 0), 0);
840 /* clear detector status */
841 mt76_wr(dev
, MT_BBP(DFS
, 1), 0xf);
842 if (mt76_chip(&dev
->mt76
) == 0x7610 ||
843 mt76_chip(&dev
->mt76
) == 0x7630)
844 mt76_wr(dev
, MT_BBP(IBI
, 11), 0xfde8081);
846 mt76_wr(dev
, MT_BBP(IBI
, 11), 0);
848 mt76x02_irq_disable(dev
, MT_INT_GPTIMER
);
849 mt76_rmw_field(dev
, MT_INT_TIMER_EN
,
850 MT_INT_TIMER_EN_GP_TIMER_EN
, 0);
853 EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params
);
855 void mt76x02_dfs_init_detector(struct mt76x02_dev
*dev
)
857 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
859 INIT_LIST_HEAD(&dfs_pd
->sequences
);
860 INIT_LIST_HEAD(&dfs_pd
->seq_pool
);
861 dev
->mt76
.region
= NL80211_DFS_UNSET
;
862 dfs_pd
->last_sw_check
= jiffies
;
863 tasklet_init(&dfs_pd
->dfs_tasklet
, mt76x02_dfs_tasklet
,
868 mt76x02_dfs_set_domain(struct mt76x02_dev
*dev
,
869 enum nl80211_dfs_regions region
)
871 struct mt76x02_dfs_pattern_detector
*dfs_pd
= &dev
->dfs_pd
;
873 mutex_lock(&dev
->mt76
.mutex
);
874 if (dev
->mt76
.region
!= region
) {
875 tasklet_disable(&dfs_pd
->dfs_tasklet
);
877 dev
->ed_monitor
= dev
->ed_monitor_enabled
&&
878 region
== NL80211_DFS_ETSI
;
879 mt76x02_edcca_init(dev
);
881 dev
->mt76
.region
= region
;
882 mt76x02_dfs_init_params(dev
);
883 tasklet_enable(&dfs_pd
->dfs_tasklet
);
885 mutex_unlock(&dev
->mt76
.mutex
);
888 void mt76x02_regd_notifier(struct wiphy
*wiphy
,
889 struct regulatory_request
*request
)
891 struct ieee80211_hw
*hw
= wiphy_to_ieee80211_hw(wiphy
);
892 struct mt76x02_dev
*dev
= hw
->priv
;
894 mt76x02_dfs_set_domain(dev
, request
->dfs_region
);