Linux Audio

Check our new training course

Linux kernel drivers training

Mar 31-Apr 9, 2025, special US time zones
Register
Loading...
Note: File does not exist in v3.1.
  1// SPDX-License-Identifier: ISC
  2/*
  3 * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com>
  4 */
  5
  6#include "mt76x02.h"
  7
  8#define RADAR_SPEC(m, len, el, eh, wl, wh,		\
  9		   w_tolerance, tl, th, t_tolerance,	\
 10		   bl, bh, event_exp, power_jmp)	\
 11{							\
 12	.mode = m,					\
 13	.avg_len = len,					\
 14	.e_low = el,					\
 15	.e_high = eh,					\
 16	.w_low = wl,					\
 17	.w_high = wh,					\
 18	.w_margin = w_tolerance,			\
 19	.t_low = tl,					\
 20	.t_high = th,					\
 21	.t_margin = t_tolerance,			\
 22	.b_low = bl,					\
 23	.b_high = bh,					\
 24	.event_expiration = event_exp,			\
 25	.pwr_jmp = power_jmp				\
 26}
 27
 28static const struct mt76x02_radar_specs etsi_radar_specs[] = {
 29	/* 20MHz */
 30	RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
 31		   0x7fffffff, 0x155cc0, 0x19cc),
 32	RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
 33		   0x7fffffff, 0x155cc0, 0x19cc),
 34	RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
 35		   0x7fffffff, 0x155cc0, 0x19dd),
 36	RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
 37		   0x7fffffff, 0x2191c0, 0x15cc),
 38	/* 40MHz */
 39	RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
 40		   0x7fffffff, 0x155cc0, 0x19cc),
 41	RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
 42		   0x7fffffff, 0x155cc0, 0x19cc),
 43	RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
 44		   0x7fffffff, 0x155cc0, 0x19dd),
 45	RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
 46		   0x7fffffff, 0x2191c0, 0x15cc),
 47	/* 80MHz */
 48	RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
 49		   0x7fffffff, 0x155cc0, 0x19cc),
 50	RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
 51		   0x7fffffff, 0x155cc0, 0x19cc),
 52	RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
 53		   0x7fffffff, 0x155cc0, 0x19dd),
 54	RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
 55		   0x7fffffff, 0x2191c0, 0x15cc)
 56};
 57
 58static const struct mt76x02_radar_specs fcc_radar_specs[] = {
 59	/* 20MHz */
 60	RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
 61		   0x7fffffff, 0xfe808, 0x13dc),
 62	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
 63		   0x7fffffff, 0xfe808, 0x19dd),
 64	RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
 65		   0x7fffffff, 0xfe808, 0x12cc),
 66	RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
 67		   0x3938700, 0x57bcf00, 0x1289),
 68	/* 40MHz */
 69	RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
 70		   0x7fffffff, 0xfe808, 0x13dc),
 71	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
 72		   0x7fffffff, 0xfe808, 0x19dd),
 73	RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
 74		   0x7fffffff, 0xfe808, 0x12cc),
 75	RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
 76		   0x3938700, 0x57bcf00, 0x1289),
 77	/* 80MHz */
 78	RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0,
 79		   0x7fffffff, 0xfe808, 0x16cc),
 80	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
 81		   0x7fffffff, 0xfe808, 0x19dd),
 82	RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
 83		   0x7fffffff, 0xfe808, 0x12cc),
 84	RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
 85		   0x3938700, 0x57bcf00, 0x1289)
 86};
 87
 88static const struct mt76x02_radar_specs jp_w56_radar_specs[] = {
 89	/* 20MHz */
 90	RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
 91		   0x7fffffff, 0x14c080, 0x13dc),
 92	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
 93		   0x7fffffff, 0x14c080, 0x19dd),
 94	RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
 95		   0x7fffffff, 0x14c080, 0x12cc),
 96	RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
 97		   0x3938700, 0X57bcf00, 0x1289),
 98	/* 40MHz */
 99	RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
100		   0x7fffffff, 0x14c080, 0x13dc),
101	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
102		   0x7fffffff, 0x14c080, 0x19dd),
103	RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
104		   0x7fffffff, 0x14c080, 0x12cc),
105	RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
106		   0x3938700, 0X57bcf00, 0x1289),
107	/* 80MHz */
108	RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0,
109		   0x7fffffff, 0x14c080, 0x16cc),
110	RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
111		   0x7fffffff, 0x14c080, 0x19dd),
112	RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
113		   0x7fffffff, 0x14c080, 0x12cc),
114	RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
115		   0x3938700, 0X57bcf00, 0x1289)
116};
117
118static const struct mt76x02_radar_specs jp_w53_radar_specs[] = {
119	/* 20MHz */
120	RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
121		   0x7fffffff, 0x14c080, 0x16cc),
122	{ 0 },
123	RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
124		   0x7fffffff, 0x14c080, 0x16cc),
125	{ 0 },
126	/* 40MHz */
127	RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
128		   0x7fffffff, 0x14c080, 0x16cc),
129	{ 0 },
130	RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
131		   0x7fffffff, 0x14c080, 0x16cc),
132	{ 0 },
133	/* 80MHz */
134	RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
135		   0x7fffffff, 0x14c080, 0x16cc),
136	{ 0 },
137	RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
138		   0x7fffffff, 0x14c080, 0x16cc),
139	{ 0 }
140};
141
142static void
143mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev *dev, u8 enable)
144{
145	u32 data;
146
147	data = (1 << 1) | enable;
148	mt76_wr(dev, MT_BBP(DFS, 36), data);
149}
150
151static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev *dev,
152				     struct mt76x02_dfs_sequence *seq)
153{
154	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
155
156	list_add(&seq->head, &dfs_pd->seq_pool);
157
158	dfs_pd->seq_stats.seq_pool_len++;
159	dfs_pd->seq_stats.seq_len--;
160}
161
162static struct mt76x02_dfs_sequence *
163mt76x02_dfs_seq_pool_get(struct mt76x02_dev *dev)
164{
165	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
166	struct mt76x02_dfs_sequence *seq;
167
168	if (list_empty(&dfs_pd->seq_pool)) {
169		seq = devm_kzalloc(dev->mt76.dev, sizeof(*seq), GFP_ATOMIC);
170	} else {
171		seq = list_first_entry(&dfs_pd->seq_pool,
172				       struct mt76x02_dfs_sequence,
173				       head);
174		list_del(&seq->head);
175		dfs_pd->seq_stats.seq_pool_len--;
176	}
177	if (seq)
178		dfs_pd->seq_stats.seq_len++;
179
180	return seq;
181}
182
183static int mt76x02_dfs_get_multiple(int val, int frac, int margin)
184{
185	int remainder, factor;
186
187	if (!frac)
188		return 0;
189
190	if (abs(val - frac) <= margin)
191		return 1;
192
193	factor = val / frac;
194	remainder = val % frac;
195
196	if (remainder > margin) {
197		if ((frac - remainder) <= margin)
198			factor++;
199		else
200			factor = 0;
201	}
202	return factor;
203}
204
205static void mt76x02_dfs_detector_reset(struct mt76x02_dev *dev)
206{
207	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
208	struct mt76x02_dfs_sequence *seq, *tmp_seq;
209	int i;
210
211	/* reset hw detector */
212	mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
213
214	/* reset sw detector */
215	for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
216		dfs_pd->event_rb[i].h_rb = 0;
217		dfs_pd->event_rb[i].t_rb = 0;
218	}
219
220	list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
221		list_del_init(&seq->head);
222		mt76x02_dfs_seq_pool_put(dev, seq);
223	}
224}
225
226static bool mt76x02_dfs_check_chirp(struct mt76x02_dev *dev)
227{
228	bool ret = false;
229	u32 current_ts, delta_ts;
230	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
231
232	current_ts = mt76_rr(dev, MT_PBF_LIFE_TIMER);
233	delta_ts = current_ts - dfs_pd->chirp_pulse_ts;
234	dfs_pd->chirp_pulse_ts = current_ts;
235
236	/* 12 sec */
237	if (delta_ts <= (12 * (1 << 20))) {
238		if (++dfs_pd->chirp_pulse_cnt > 8)
239			ret = true;
240	} else {
241		dfs_pd->chirp_pulse_cnt = 1;
242	}
243
244	return ret;
245}
246
247static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev *dev,
248				     struct mt76x02_dfs_hw_pulse *pulse)
249{
250	u32 data;
251
252	/* select channel */
253	data = (MT_DFS_CH_EN << 16) | pulse->engine;
254	mt76_wr(dev, MT_BBP(DFS, 0), data);
255
256	/* reported period */
257	pulse->period = mt76_rr(dev, MT_BBP(DFS, 19));
258
259	/* reported width */
260	pulse->w1 = mt76_rr(dev, MT_BBP(DFS, 20));
261	pulse->w2 = mt76_rr(dev, MT_BBP(DFS, 23));
262
263	/* reported burst number */
264	pulse->burst = mt76_rr(dev, MT_BBP(DFS, 22));
265}
266
267static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev *dev,
268				       struct mt76x02_dfs_hw_pulse *pulse)
269{
270	bool ret = false;
271
272	if (!pulse->period || !pulse->w1)
273		return false;
274
275	switch (dev->mt76.region) {
276	case NL80211_DFS_FCC:
277		if (pulse->engine > 3)
278			break;
279
280		if (pulse->engine == 3) {
281			ret = mt76x02_dfs_check_chirp(dev);
282			break;
283		}
284
285		/* check short pulse*/
286		if (pulse->w1 < 120)
287			ret = (pulse->period >= 2900 &&
288			       (pulse->period <= 4700 ||
289				pulse->period >= 6400) &&
290			       (pulse->period <= 6800 ||
291				pulse->period >= 10200) &&
292			       pulse->period <= 61600);
293		else if (pulse->w1 < 130) /* 120 - 130 */
294			ret = (pulse->period >= 2900 &&
295			       pulse->period <= 61600);
296		else
297			ret = (pulse->period >= 3500 &&
298			       pulse->period <= 10100);
299		break;
300	case NL80211_DFS_ETSI:
301		if (pulse->engine >= 3)
302			break;
303
304		ret = (pulse->period >= 4900 &&
305		       (pulse->period <= 10200 ||
306			pulse->period >= 12400) &&
307		       pulse->period <= 100100);
308		break;
309	case NL80211_DFS_JP:
310		if (dev->mt76.chandef.chan->center_freq >= 5250 &&
311		    dev->mt76.chandef.chan->center_freq <= 5350) {
312			/* JPW53 */
313			if (pulse->w1 <= 130)
314				ret = (pulse->period >= 28360 &&
315				       (pulse->period <= 28700 ||
316					pulse->period >= 76900) &&
317				       pulse->period <= 76940);
318			break;
319		}
320
321		if (pulse->engine > 3)
322			break;
323
324		if (pulse->engine == 3) {
325			ret = mt76x02_dfs_check_chirp(dev);
326			break;
327		}
328
329		/* check short pulse*/
330		if (pulse->w1 < 120)
331			ret = (pulse->period >= 2900 &&
332			       (pulse->period <= 4700 ||
333				pulse->period >= 6400) &&
334			       (pulse->period <= 6800 ||
335				pulse->period >= 27560) &&
336			       (pulse->period <= 27960 ||
337				pulse->period >= 28360) &&
338			       (pulse->period <= 28700 ||
339				pulse->period >= 79900) &&
340			       pulse->period <= 80100);
341		else if (pulse->w1 < 130) /* 120 - 130 */
342			ret = (pulse->period >= 2900 &&
343			       (pulse->period <= 10100 ||
344				pulse->period >= 27560) &&
345			       (pulse->period <= 27960 ||
346				pulse->period >= 28360) &&
347			       (pulse->period <= 28700 ||
348				pulse->period >= 79900) &&
349			       pulse->period <= 80100);
350		else
351			ret = (pulse->period >= 3900 &&
352			       pulse->period <= 10100);
353		break;
354	case NL80211_DFS_UNSET:
355	default:
356		return false;
357	}
358
359	return ret;
360}
361
362static bool mt76x02_dfs_fetch_event(struct mt76x02_dev *dev,
363				    struct mt76x02_dfs_event *event)
364{
365	u32 data;
366
367	/* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2)
368	 * 2nd: DFS_R37[21:0]: pulse time
369	 * 3rd: DFS_R37[11:0]: pulse width
370	 * 3rd: DFS_R37[25:16]: phase
371	 * 4th: DFS_R37[12:0]: current pwr
372	 * 4th: DFS_R37[21:16]: pwr stable counter
373	 *
374	 * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected
375	 */
376	data = mt76_rr(dev, MT_BBP(DFS, 37));
377	if (!MT_DFS_CHECK_EVENT(data))
378		return false;
379
380	event->engine = MT_DFS_EVENT_ENGINE(data);
381	data = mt76_rr(dev, MT_BBP(DFS, 37));
382	event->ts = MT_DFS_EVENT_TIMESTAMP(data);
383	data = mt76_rr(dev, MT_BBP(DFS, 37));
384	event->width = MT_DFS_EVENT_WIDTH(data);
385
386	return true;
387}
388
389static bool mt76x02_dfs_check_event(struct mt76x02_dev *dev,
390				    struct mt76x02_dfs_event *event)
391{
392	if (event->engine == 2) {
393		struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
394		struct mt76x02_dfs_event_rb *event_buff = &dfs_pd->event_rb[1];
395		u16 last_event_idx;
396		u32 delta_ts;
397
398		last_event_idx = mt76_decr(event_buff->t_rb,
399					   MT_DFS_EVENT_BUFLEN);
400		delta_ts = event->ts - event_buff->data[last_event_idx].ts;
401		if (delta_ts < MT_DFS_EVENT_TIME_MARGIN &&
402		    event_buff->data[last_event_idx].width >= 200)
403			return false;
404	}
405	return true;
406}
407
408static void mt76x02_dfs_queue_event(struct mt76x02_dev *dev,
409				    struct mt76x02_dfs_event *event)
410{
411	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
412	struct mt76x02_dfs_event_rb *event_buff;
413
414	/* add radar event to ring buffer */
415	event_buff = event->engine == 2 ? &dfs_pd->event_rb[1]
416					: &dfs_pd->event_rb[0];
417	event_buff->data[event_buff->t_rb] = *event;
418	event_buff->data[event_buff->t_rb].fetch_ts = jiffies;
419
420	event_buff->t_rb = mt76_incr(event_buff->t_rb, MT_DFS_EVENT_BUFLEN);
421	if (event_buff->t_rb == event_buff->h_rb)
422		event_buff->h_rb = mt76_incr(event_buff->h_rb,
423					     MT_DFS_EVENT_BUFLEN);
424}
425
426static int mt76x02_dfs_create_sequence(struct mt76x02_dev *dev,
427				       struct mt76x02_dfs_event *event,
428				       u16 cur_len)
429{
430	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
431	struct mt76x02_dfs_sw_detector_params *sw_params;
432	u32 width_delta, with_sum, factor, cur_pri;
433	struct mt76x02_dfs_sequence seq, *seq_p;
434	struct mt76x02_dfs_event_rb *event_rb;
435	struct mt76x02_dfs_event *cur_event;
436	int i, j, end, pri;
437
438	event_rb = event->engine == 2 ? &dfs_pd->event_rb[1]
439				      : &dfs_pd->event_rb[0];
440
441	i = mt76_decr(event_rb->t_rb, MT_DFS_EVENT_BUFLEN);
442	end = mt76_decr(event_rb->h_rb, MT_DFS_EVENT_BUFLEN);
443
444	while (i != end) {
445		cur_event = &event_rb->data[i];
446		with_sum = event->width + cur_event->width;
447
448		sw_params = &dfs_pd->sw_dpd_params;
449		switch (dev->mt76.region) {
450		case NL80211_DFS_FCC:
451		case NL80211_DFS_JP:
452			if (with_sum < 600)
453				width_delta = 8;
454			else
455				width_delta = with_sum >> 3;
456			break;
457		case NL80211_DFS_ETSI:
458			if (event->engine == 2)
459				width_delta = with_sum >> 6;
460			else if (with_sum < 620)
461				width_delta = 24;
462			else
463				width_delta = 8;
464			break;
465		case NL80211_DFS_UNSET:
466		default:
467			return -EINVAL;
468		}
469
470		pri = event->ts - cur_event->ts;
471		if (abs(event->width - cur_event->width) > width_delta ||
472		    pri < sw_params->min_pri)
473			goto next;
474
475		if (pri > sw_params->max_pri)
476			break;
477
478		seq.pri = event->ts - cur_event->ts;
479		seq.first_ts = cur_event->ts;
480		seq.last_ts = event->ts;
481		seq.engine = event->engine;
482		seq.count = 2;
483
484		j = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
485		while (j != end) {
486			cur_event = &event_rb->data[j];
487			cur_pri = event->ts - cur_event->ts;
488			factor = mt76x02_dfs_get_multiple(cur_pri, seq.pri,
489						sw_params->pri_margin);
490			if (factor > 0) {
491				seq.first_ts = cur_event->ts;
492				seq.count++;
493			}
494
495			j = mt76_decr(j, MT_DFS_EVENT_BUFLEN);
496		}
497		if (seq.count <= cur_len)
498			goto next;
499
500		seq_p = mt76x02_dfs_seq_pool_get(dev);
501		if (!seq_p)
502			return -ENOMEM;
503
504		*seq_p = seq;
505		INIT_LIST_HEAD(&seq_p->head);
506		list_add(&seq_p->head, &dfs_pd->sequences);
507next:
508		i = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
509	}
510	return 0;
511}
512
513static u16 mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev *dev,
514					     struct mt76x02_dfs_event *event)
515{
516	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
517	struct mt76x02_dfs_sw_detector_params *sw_params;
518	struct mt76x02_dfs_sequence *seq, *tmp_seq;
519	u16 max_seq_len = 0;
520	u32 factor, pri;
521
522	sw_params = &dfs_pd->sw_dpd_params;
523	list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
524		if (event->ts > seq->first_ts + MT_DFS_SEQUENCE_WINDOW) {
525			list_del_init(&seq->head);
526			mt76x02_dfs_seq_pool_put(dev, seq);
527			continue;
528		}
529
530		if (event->engine != seq->engine)
531			continue;
532
533		pri = event->ts - seq->last_ts;
534		factor = mt76x02_dfs_get_multiple(pri, seq->pri,
535						  sw_params->pri_margin);
536		if (factor > 0) {
537			seq->last_ts = event->ts;
538			seq->count++;
539			max_seq_len = max_t(u16, max_seq_len, seq->count);
540		}
541	}
542	return max_seq_len;
543}
544
545static bool mt76x02_dfs_check_detection(struct mt76x02_dev *dev)
546{
547	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
548	struct mt76x02_dfs_sequence *seq;
549
550	if (list_empty(&dfs_pd->sequences))
551		return false;
552
553	list_for_each_entry(seq, &dfs_pd->sequences, head) {
554		if (seq->count > MT_DFS_SEQUENCE_TH) {
555			dfs_pd->stats[seq->engine].sw_pattern++;
556			return true;
557		}
558	}
559	return false;
560}
561
562static void mt76x02_dfs_add_events(struct mt76x02_dev *dev)
563{
564	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
565	struct mt76x02_dfs_event event;
566	int i, seq_len;
567
568	/* disable debug mode */
569	mt76x02_dfs_set_capture_mode_ctrl(dev, false);
570	for (i = 0; i < MT_DFS_EVENT_LOOP; i++) {
571		if (!mt76x02_dfs_fetch_event(dev, &event))
572			break;
573
574		if (dfs_pd->last_event_ts > event.ts)
575			mt76x02_dfs_detector_reset(dev);
576		dfs_pd->last_event_ts = event.ts;
577
578		if (!mt76x02_dfs_check_event(dev, &event))
579			continue;
580
581		seq_len = mt76x02_dfs_add_event_to_sequence(dev, &event);
582		mt76x02_dfs_create_sequence(dev, &event, seq_len);
583
584		mt76x02_dfs_queue_event(dev, &event);
585	}
586	mt76x02_dfs_set_capture_mode_ctrl(dev, true);
587}
588
589static void mt76x02_dfs_check_event_window(struct mt76x02_dev *dev)
590{
591	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
592	struct mt76x02_dfs_event_rb *event_buff;
593	struct mt76x02_dfs_event *event;
594	int i;
595
596	for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
597		event_buff = &dfs_pd->event_rb[i];
598
599		while (event_buff->h_rb != event_buff->t_rb) {
600			event = &event_buff->data[event_buff->h_rb];
601
602			/* sorted list */
603			if (time_is_after_jiffies(event->fetch_ts +
604						  MT_DFS_EVENT_WINDOW))
605				break;
606			event_buff->h_rb = mt76_incr(event_buff->h_rb,
607						     MT_DFS_EVENT_BUFLEN);
608		}
609	}
610}
611
612static void mt76x02_dfs_tasklet(unsigned long arg)
613{
614	struct mt76x02_dev *dev = (struct mt76x02_dev *)arg;
615	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
616	u32 engine_mask;
617	int i;
618
619	if (test_bit(MT76_SCANNING, &dev->mt76.state))
620		goto out;
621
622	if (time_is_before_jiffies(dfs_pd->last_sw_check +
623				   MT_DFS_SW_TIMEOUT)) {
624		bool radar_detected;
625
626		dfs_pd->last_sw_check = jiffies;
627
628		mt76x02_dfs_add_events(dev);
629		radar_detected = mt76x02_dfs_check_detection(dev);
630		if (radar_detected) {
631			/* sw detector rx radar pattern */
632			ieee80211_radar_detected(dev->mt76.hw);
633			mt76x02_dfs_detector_reset(dev);
634
635			return;
636		}
637		mt76x02_dfs_check_event_window(dev);
638	}
639
640	engine_mask = mt76_rr(dev, MT_BBP(DFS, 1));
641	if (!(engine_mask & 0xf))
642		goto out;
643
644	for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
645		struct mt76x02_dfs_hw_pulse pulse;
646
647		if (!(engine_mask & (1 << i)))
648			continue;
649
650		pulse.engine = i;
651		mt76x02_dfs_get_hw_pulse(dev, &pulse);
652
653		if (!mt76x02_dfs_check_hw_pulse(dev, &pulse)) {
654			dfs_pd->stats[i].hw_pulse_discarded++;
655			continue;
656		}
657
658		/* hw detector rx radar pattern */
659		dfs_pd->stats[i].hw_pattern++;
660		ieee80211_radar_detected(dev->mt76.hw);
661		mt76x02_dfs_detector_reset(dev);
662
663		return;
664	}
665
666	/* reset hw detector */
667	mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
668
669out:
670	mt76x02_irq_enable(dev, MT_INT_GPTIMER);
671}
672
673static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev *dev)
674{
675	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
676
677	switch (dev->mt76.region) {
678	case NL80211_DFS_FCC:
679		dfs_pd->sw_dpd_params.max_pri = MT_DFS_FCC_MAX_PRI;
680		dfs_pd->sw_dpd_params.min_pri = MT_DFS_FCC_MIN_PRI;
681		dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
682		break;
683	case NL80211_DFS_ETSI:
684		dfs_pd->sw_dpd_params.max_pri = MT_DFS_ETSI_MAX_PRI;
685		dfs_pd->sw_dpd_params.min_pri = MT_DFS_ETSI_MIN_PRI;
686		dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN << 2;
687		break;
688	case NL80211_DFS_JP:
689		dfs_pd->sw_dpd_params.max_pri = MT_DFS_JP_MAX_PRI;
690		dfs_pd->sw_dpd_params.min_pri = MT_DFS_JP_MIN_PRI;
691		dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
692		break;
693	case NL80211_DFS_UNSET:
694	default:
695		break;
696	}
697}
698
699static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev *dev)
700{
701	const struct mt76x02_radar_specs *radar_specs;
702	u8 i, shift;
703	u32 data;
704
705	switch (dev->mt76.chandef.width) {
706	case NL80211_CHAN_WIDTH_40:
707		shift = MT_DFS_NUM_ENGINES;
708		break;
709	case NL80211_CHAN_WIDTH_80:
710		shift = 2 * MT_DFS_NUM_ENGINES;
711		break;
712	default:
713		shift = 0;
714		break;
715	}
716
717	switch (dev->mt76.region) {
718	case NL80211_DFS_FCC:
719		radar_specs = &fcc_radar_specs[shift];
720		break;
721	case NL80211_DFS_ETSI:
722		radar_specs = &etsi_radar_specs[shift];
723		break;
724	case NL80211_DFS_JP:
725		if (dev->mt76.chandef.chan->center_freq >= 5250 &&
726		    dev->mt76.chandef.chan->center_freq <= 5350)
727			radar_specs = &jp_w53_radar_specs[shift];
728		else
729			radar_specs = &jp_w56_radar_specs[shift];
730		break;
731	case NL80211_DFS_UNSET:
732	default:
733		return;
734	}
735
736	data = (MT_DFS_VGA_MASK << 16) |
737	       (MT_DFS_PWR_GAIN_OFFSET << 12) |
738	       (MT_DFS_PWR_DOWN_TIME << 8) |
739	       (MT_DFS_SYM_ROUND << 4) |
740	       (MT_DFS_DELTA_DELAY & 0xf);
741	mt76_wr(dev, MT_BBP(DFS, 2), data);
742
743	data = (MT_DFS_RX_PE_MASK << 16) | MT_DFS_PKT_END_MASK;
744	mt76_wr(dev, MT_BBP(DFS, 3), data);
745
746	for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
747		/* configure engine */
748		mt76_wr(dev, MT_BBP(DFS, 0), i);
749
750		/* detection mode + avg_len */
751		data = ((radar_specs[i].avg_len & 0x1ff) << 16) |
752		       (radar_specs[i].mode & 0xf);
753		mt76_wr(dev, MT_BBP(DFS, 4), data);
754
755		/* dfs energy */
756		data = ((radar_specs[i].e_high & 0x0fff) << 16) |
757		       (radar_specs[i].e_low & 0x0fff);
758		mt76_wr(dev, MT_BBP(DFS, 5), data);
759
760		/* dfs period */
761		mt76_wr(dev, MT_BBP(DFS, 7), radar_specs[i].t_low);
762		mt76_wr(dev, MT_BBP(DFS, 9), radar_specs[i].t_high);
763
764		/* dfs burst */
765		mt76_wr(dev, MT_BBP(DFS, 11), radar_specs[i].b_low);
766		mt76_wr(dev, MT_BBP(DFS, 13), radar_specs[i].b_high);
767
768		/* dfs width */
769		data = ((radar_specs[i].w_high & 0x0fff) << 16) |
770		       (radar_specs[i].w_low & 0x0fff);
771		mt76_wr(dev, MT_BBP(DFS, 14), data);
772
773		/* dfs margins */
774		data = (radar_specs[i].w_margin << 16) |
775		       radar_specs[i].t_margin;
776		mt76_wr(dev, MT_BBP(DFS, 15), data);
777
778		/* dfs event expiration */
779		mt76_wr(dev, MT_BBP(DFS, 17), radar_specs[i].event_expiration);
780
781		/* dfs pwr adj */
782		mt76_wr(dev, MT_BBP(DFS, 30), radar_specs[i].pwr_jmp);
783	}
784
785	/* reset status */
786	mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
787	mt76_wr(dev, MT_BBP(DFS, 36), 0x3);
788
789	/* enable detection*/
790	mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
791	mt76_wr(dev, MT_BBP(IBI, 11), 0x0c350001);
792}
793
794void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev *dev)
795{
796	u32 agc_r8, agc_r4, val_r8, val_r4, dfs_r31;
797
798	agc_r8 = mt76_rr(dev, MT_BBP(AGC, 8));
799	agc_r4 = mt76_rr(dev, MT_BBP(AGC, 4));
800
801	val_r8 = (agc_r8 & 0x00007e00) >> 9;
802	val_r4 = agc_r4 & ~0x1f000000;
803	val_r4 += (((val_r8 + 1) >> 1) << 24);
804	mt76_wr(dev, MT_BBP(AGC, 4), val_r4);
805
806	dfs_r31 = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, val_r4);
807	dfs_r31 += val_r8;
808	dfs_r31 -= (agc_r8 & 0x00000038) >> 3;
809	dfs_r31 = (dfs_r31 << 16) | 0x00000307;
810	mt76_wr(dev, MT_BBP(DFS, 31), dfs_r31);
811
812	if (is_mt76x2(dev)) {
813		mt76_wr(dev, MT_BBP(DFS, 32), 0x00040071);
814	} else {
815		/* disable hw detector */
816		mt76_wr(dev, MT_BBP(DFS, 0), 0);
817		/* enable hw detector */
818		mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
819	}
820}
821EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc);
822
823void mt76x02_dfs_init_params(struct mt76x02_dev *dev)
824{
825	struct cfg80211_chan_def *chandef = &dev->mt76.chandef;
826
827	if ((chandef->chan->flags & IEEE80211_CHAN_RADAR) &&
828	    dev->mt76.region != NL80211_DFS_UNSET) {
829		mt76x02_dfs_init_sw_detector(dev);
830		mt76x02_dfs_set_bbp_params(dev);
831		/* enable debug mode */
832		mt76x02_dfs_set_capture_mode_ctrl(dev, true);
833
834		mt76x02_irq_enable(dev, MT_INT_GPTIMER);
835		mt76_rmw_field(dev, MT_INT_TIMER_EN,
836			       MT_INT_TIMER_EN_GP_TIMER_EN, 1);
837	} else {
838		/* disable hw detector */
839		mt76_wr(dev, MT_BBP(DFS, 0), 0);
840		/* clear detector status */
841		mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
842		if (mt76_chip(&dev->mt76) == 0x7610 ||
843		    mt76_chip(&dev->mt76) == 0x7630)
844			mt76_wr(dev, MT_BBP(IBI, 11), 0xfde8081);
845		else
846			mt76_wr(dev, MT_BBP(IBI, 11), 0);
847
848		mt76x02_irq_disable(dev, MT_INT_GPTIMER);
849		mt76_rmw_field(dev, MT_INT_TIMER_EN,
850			       MT_INT_TIMER_EN_GP_TIMER_EN, 0);
851	}
852}
853EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params);
854
855void mt76x02_dfs_init_detector(struct mt76x02_dev *dev)
856{
857	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
858
859	INIT_LIST_HEAD(&dfs_pd->sequences);
860	INIT_LIST_HEAD(&dfs_pd->seq_pool);
861	dev->mt76.region = NL80211_DFS_UNSET;
862	dfs_pd->last_sw_check = jiffies;
863	tasklet_init(&dfs_pd->dfs_tasklet, mt76x02_dfs_tasklet,
864		     (unsigned long)dev);
865}
866
867static void
868mt76x02_dfs_set_domain(struct mt76x02_dev *dev,
869		       enum nl80211_dfs_regions region)
870{
871	struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
872
873	mutex_lock(&dev->mt76.mutex);
874	if (dev->mt76.region != region) {
875		tasklet_disable(&dfs_pd->dfs_tasklet);
876
877		dev->ed_monitor = dev->ed_monitor_enabled &&
878				  region == NL80211_DFS_ETSI;
879		mt76x02_edcca_init(dev);
880
881		dev->mt76.region = region;
882		mt76x02_dfs_init_params(dev);
883		tasklet_enable(&dfs_pd->dfs_tasklet);
884	}
885	mutex_unlock(&dev->mt76.mutex);
886}
887
888void mt76x02_regd_notifier(struct wiphy *wiphy,
889			   struct regulatory_request *request)
890{
891	struct ieee80211_hw *hw = wiphy_to_ieee80211_hw(wiphy);
892	struct mt76x02_dev *dev = hw->priv;
893
894	mt76x02_dfs_set_domain(dev, request->dfs_region);
895}