Loading...
1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation. All rights reserved.
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <sound/hdaudio_ext.h>
19#include <sound/hda_register.h>
20#include <sound/sof.h>
21#include <trace/events/sof_intel.h>
22#include "../ops.h"
23#include "../sof-audio.h"
24#include "hda.h"
25
26#define HDA_LTRP_GB_VALUE_US 95
27
28static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
29{
30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
31 return "Playback";
32 else
33 return "Capture";
34}
35
36static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
37{
38 struct snd_soc_pcm_runtime *rtd;
39
40 if (hstream->substream)
41 rtd = snd_soc_substream_to_rtd(hstream->substream);
42 else if (hstream->cstream)
43 rtd = hstream->cstream->private_data;
44 else
45 /* Non audio DMA user, like dma-trace */
46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
47 hda_hstream_direction_str(hstream),
48 hstream->stream_tag);
49
50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
51 rtd->dai_link->name, hda_hstream_direction_str(hstream),
52 hstream->stream_tag);
53}
54
55/*
56 * set up one of BDL entries for a stream
57 */
58static int hda_setup_bdle(struct snd_sof_dev *sdev,
59 struct snd_dma_buffer *dmab,
60 struct hdac_stream *hstream,
61 struct sof_intel_dsp_bdl **bdlp,
62 int offset, int size, int ioc)
63{
64 struct hdac_bus *bus = sof_to_bus(sdev);
65 struct sof_intel_dsp_bdl *bdl = *bdlp;
66
67 while (size > 0) {
68 dma_addr_t addr;
69 int chunk;
70
71 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
72 dev_err(sdev->dev, "error: stream frags exceeded\n");
73 return -EINVAL;
74 }
75
76 addr = snd_sgbuf_get_addr(dmab, offset);
77 /* program BDL addr */
78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
80 /* program BDL size */
81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
82 /* one BDLE should not cross 4K boundary */
83 if (bus->align_bdle_4k) {
84 u32 remain = 0x1000 - (offset & 0xfff);
85
86 if (chunk > remain)
87 chunk = remain;
88 }
89 bdl->size = cpu_to_le32(chunk);
90 /* only program IOC when the whole segment is processed */
91 size -= chunk;
92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
93 bdl++;
94 hstream->frags++;
95 offset += chunk;
96 }
97
98 *bdlp = bdl;
99 return offset;
100}
101
102/*
103 * set up Buffer Descriptor List (BDL) for host memory transfer
104 * BDL describes the location of the individual buffers and is little endian.
105 */
106int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
107 struct snd_dma_buffer *dmab,
108 struct hdac_stream *hstream)
109{
110 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
111 struct sof_intel_dsp_bdl *bdl;
112 int i, offset, period_bytes, periods;
113 int remain, ioc;
114
115 period_bytes = hstream->period_bytes;
116 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
117 if (!period_bytes)
118 period_bytes = hstream->bufsize;
119
120 periods = hstream->bufsize / period_bytes;
121
122 dev_dbg(sdev->dev, "periods:%d\n", periods);
123
124 remain = hstream->bufsize % period_bytes;
125 if (remain)
126 periods++;
127
128 /* program the initial BDL entries */
129 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
130 offset = 0;
131 hstream->frags = 0;
132
133 /*
134 * set IOC if don't use position IPC
135 * and period_wakeup needed.
136 */
137 ioc = hda->no_ipc_position ?
138 !hstream->no_period_wakeup : 0;
139
140 for (i = 0; i < periods; i++) {
141 if (i == (periods - 1) && remain)
142 /* set the last small entry */
143 offset = hda_setup_bdle(sdev, dmab,
144 hstream, &bdl, offset,
145 remain, 0);
146 else
147 offset = hda_setup_bdle(sdev, dmab,
148 hstream, &bdl, offset,
149 period_bytes, ioc);
150 }
151
152 return offset;
153}
154
155int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
156 struct hdac_ext_stream *hext_stream,
157 int enable, u32 size)
158{
159 struct hdac_stream *hstream = &hext_stream->hstream;
160 u32 mask;
161
162 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
163 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
164 return -EINVAL;
165 }
166
167 mask = (1 << hstream->index);
168
169 /* enable/disable SPIB for the stream */
170 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
171 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
172 enable << hstream->index);
173
174 /* set the SPIB value */
175 sof_io_write(sdev, hstream->spib_addr, size);
176
177 return 0;
178}
179
180/* get next unused stream */
181struct hdac_ext_stream *
182hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
183{
184 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
185 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
186 struct hdac_bus *bus = sof_to_bus(sdev);
187 struct sof_intel_hda_stream *hda_stream;
188 struct hdac_ext_stream *hext_stream = NULL;
189 struct hdac_stream *s;
190
191 spin_lock_irq(&bus->reg_lock);
192
193 /* get an unused stream */
194 list_for_each_entry(s, &bus->stream_list, list) {
195 if (s->direction == direction && !s->opened) {
196 hext_stream = stream_to_hdac_ext_stream(s);
197 hda_stream = container_of(hext_stream,
198 struct sof_intel_hda_stream,
199 hext_stream);
200 /* check if the host DMA channel is reserved */
201 if (hda_stream->host_reserved)
202 continue;
203
204 s->opened = true;
205 break;
206 }
207 }
208
209 spin_unlock_irq(&bus->reg_lock);
210
211 /* stream found ? */
212 if (!hext_stream) {
213 dev_err(sdev->dev, "error: no free %s streams\n",
214 direction == SNDRV_PCM_STREAM_PLAYBACK ?
215 "playback" : "capture");
216 return hext_stream;
217 }
218
219 hda_stream->flags = flags;
220
221 /*
222 * Prevent DMI Link L1 entry for streams that don't support it.
223 * Workaround to address a known issue with host DMA that results
224 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
225 */
226 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
227 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
229 HDA_VS_INTEL_EM2,
230 HDA_VS_INTEL_EM2_L1SEN, 0);
231 hda->l1_disabled = true;
232 }
233
234 return hext_stream;
235}
236
237/* free a stream */
238int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
239{
240 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
241 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
242 struct hdac_bus *bus = sof_to_bus(sdev);
243 struct sof_intel_hda_stream *hda_stream;
244 struct hdac_ext_stream *hext_stream;
245 struct hdac_stream *s;
246 bool dmi_l1_enable = true;
247 bool found = false;
248
249 spin_lock_irq(&bus->reg_lock);
250
251 /*
252 * close stream matching the stream tag and check if there are any open streams
253 * that are DMI L1 incompatible.
254 */
255 list_for_each_entry(s, &bus->stream_list, list) {
256 hext_stream = stream_to_hdac_ext_stream(s);
257 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
258
259 if (!s->opened)
260 continue;
261
262 if (s->direction == direction && s->stream_tag == stream_tag) {
263 s->opened = false;
264 found = true;
265 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
266 dmi_l1_enable = false;
267 }
268 }
269
270 spin_unlock_irq(&bus->reg_lock);
271
272 /* Enable DMI L1 if permitted */
273 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
274 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
275 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
276 hda->l1_disabled = false;
277 }
278
279 if (!found) {
280 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
281 __func__, stream_tag);
282 return -ENODEV;
283 }
284
285 return 0;
286}
287
288static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
289{
290 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
291 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
292 u32 val;
293
294 /* enter stream reset */
295 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
296 SOF_STREAM_SD_OFFSET_CRST);
297 do {
298 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
299 if (val & SOF_STREAM_SD_OFFSET_CRST)
300 break;
301 } while (--timeout);
302 if (timeout == 0) {
303 dev_err(sdev->dev, "timeout waiting for stream reset\n");
304 return -ETIMEDOUT;
305 }
306
307 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
308
309 /* exit stream reset and wait to read a zero before reading any other register */
310 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
311
312 /* wait for hardware to report that stream is out of reset */
313 udelay(3);
314 do {
315 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
316 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
317 break;
318 } while (--timeout);
319 if (timeout == 0) {
320 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
321 return -ETIMEDOUT;
322 }
323
324 return 0;
325}
326
327int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
328 struct hdac_ext_stream *hext_stream, int cmd)
329{
330 struct hdac_stream *hstream = &hext_stream->hstream;
331 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
332 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
333 int ret = 0;
334 u32 run;
335
336 /* cmd must be for audio stream */
337 switch (cmd) {
338 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
339 if (!sdev->dspless_mode_selected)
340 break;
341 fallthrough;
342 case SNDRV_PCM_TRIGGER_START:
343 if (hstream->running)
344 break;
345
346 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
347 1 << hstream->index,
348 1 << hstream->index);
349
350 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
351 sd_offset,
352 SOF_HDA_SD_CTL_DMA_START |
353 SOF_HDA_CL_DMA_SD_INT_MASK,
354 SOF_HDA_SD_CTL_DMA_START |
355 SOF_HDA_CL_DMA_SD_INT_MASK);
356
357 ret = snd_sof_dsp_read_poll_timeout(sdev,
358 HDA_DSP_HDA_BAR,
359 sd_offset, run,
360 ((run & dma_start) == dma_start),
361 HDA_DSP_REG_POLL_INTERVAL_US,
362 HDA_DSP_STREAM_RUN_TIMEOUT);
363
364 if (ret >= 0)
365 hstream->running = true;
366
367 break;
368 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
369 if (!sdev->dspless_mode_selected)
370 break;
371 fallthrough;
372 case SNDRV_PCM_TRIGGER_SUSPEND:
373 case SNDRV_PCM_TRIGGER_STOP:
374 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
375 sd_offset,
376 SOF_HDA_SD_CTL_DMA_START |
377 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
378
379 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
380 sd_offset, run,
381 !(run & dma_start),
382 HDA_DSP_REG_POLL_INTERVAL_US,
383 HDA_DSP_STREAM_RUN_TIMEOUT);
384
385 if (ret >= 0) {
386 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
387 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
388 SOF_HDA_CL_DMA_SD_INT_MASK);
389
390 hstream->running = false;
391 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
392 SOF_HDA_INTCTL,
393 1 << hstream->index, 0x0);
394 }
395 break;
396 default:
397 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
398 return -EINVAL;
399 }
400
401 if (ret < 0) {
402 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
403
404 dev_err(sdev->dev,
405 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
406 __func__, cmd, stream_name ? stream_name : "unknown stream");
407 kfree(stream_name);
408 }
409
410 return ret;
411}
412
413/* minimal recommended programming for ICCMAX stream */
414int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
415 struct snd_dma_buffer *dmab,
416 struct snd_pcm_hw_params *params)
417{
418 struct hdac_stream *hstream = &hext_stream->hstream;
419 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
420 int ret;
421 u32 mask = 0x1 << hstream->index;
422
423 if (!hext_stream) {
424 dev_err(sdev->dev, "error: no stream available\n");
425 return -ENODEV;
426 }
427
428 if (!dmab) {
429 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
430 return -ENODEV;
431 }
432
433 if (hstream->posbuf)
434 *hstream->posbuf = 0;
435
436 /* reset BDL address */
437 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
438 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
439 0x0);
440 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
441 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
442 0x0);
443
444 hstream->frags = 0;
445
446 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
447 if (ret < 0) {
448 dev_err(sdev->dev, "error: set up of BDL failed\n");
449 return ret;
450 }
451
452 /* program BDL address */
453 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
454 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
455 (u32)hstream->bdl.addr);
456 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
457 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
458 upper_32_bits(hstream->bdl.addr));
459
460 /* program cyclic buffer length */
461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
463 hstream->bufsize);
464
465 /* program last valid index */
466 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
467 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
468 0xffff, (hstream->frags - 1));
469
470 /* decouple host and link DMA, enable DSP features */
471 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
472 mask, mask);
473
474 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
475 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
476 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
477
478 /* start DMA */
479 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
480 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
481
482 return 0;
483}
484
485/*
486 * prepare for common hdac registers settings, for both code loader
487 * and normal stream.
488 */
489int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
490 struct hdac_ext_stream *hext_stream,
491 struct snd_dma_buffer *dmab,
492 struct snd_pcm_hw_params *params)
493{
494 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
495 struct hdac_bus *bus = sof_to_bus(sdev);
496 struct hdac_stream *hstream;
497 int sd_offset, ret;
498 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
499 u32 mask;
500 u32 run;
501
502 if (!hext_stream) {
503 dev_err(sdev->dev, "error: no stream available\n");
504 return -ENODEV;
505 }
506
507 if (!dmab) {
508 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
509 return -ENODEV;
510 }
511
512 hstream = &hext_stream->hstream;
513 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
514 mask = BIT(hstream->index);
515
516 /* decouple host and link DMA if the DSP is used */
517 if (!sdev->dspless_mode_selected)
518 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
519 mask, mask);
520
521 /* clear stream status */
522 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
523 SOF_HDA_CL_DMA_SD_INT_MASK |
524 SOF_HDA_SD_CTL_DMA_START, 0);
525
526 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
527 sd_offset, run,
528 !(run & dma_start),
529 HDA_DSP_REG_POLL_INTERVAL_US,
530 HDA_DSP_STREAM_RUN_TIMEOUT);
531
532 if (ret < 0) {
533 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
534
535 dev_err(sdev->dev,
536 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
537 __func__, stream_name ? stream_name : "unknown stream");
538 kfree(stream_name);
539 return ret;
540 }
541
542 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
543 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
544 SOF_HDA_CL_DMA_SD_INT_MASK,
545 SOF_HDA_CL_DMA_SD_INT_MASK);
546
547 /* stream reset */
548 ret = hda_dsp_stream_reset(sdev, hstream);
549 if (ret < 0)
550 return ret;
551
552 if (hstream->posbuf)
553 *hstream->posbuf = 0;
554
555 /* reset BDL address */
556 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
557 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
558 0x0);
559 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
560 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
561 0x0);
562
563 /* clear stream status */
564 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
565 SOF_HDA_CL_DMA_SD_INT_MASK |
566 SOF_HDA_SD_CTL_DMA_START, 0);
567
568 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
569 sd_offset, run,
570 !(run & dma_start),
571 HDA_DSP_REG_POLL_INTERVAL_US,
572 HDA_DSP_STREAM_RUN_TIMEOUT);
573
574 if (ret < 0) {
575 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
576
577 dev_err(sdev->dev,
578 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
579 __func__, stream_name ? stream_name : "unknown stream");
580 kfree(stream_name);
581 return ret;
582 }
583
584 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
585 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
586 SOF_HDA_CL_DMA_SD_INT_MASK,
587 SOF_HDA_CL_DMA_SD_INT_MASK);
588
589 hstream->frags = 0;
590
591 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
592 if (ret < 0) {
593 dev_err(sdev->dev, "error: set up of BDL failed\n");
594 return ret;
595 }
596
597 /* program stream tag to set up stream descriptor for DMA */
598 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
599 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
600 hstream->stream_tag <<
601 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
602
603 /* program cyclic buffer length */
604 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
605 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
606 hstream->bufsize);
607
608 /*
609 * Recommended hardware programming sequence for HDAudio DMA format
610 * on earlier platforms - this is not needed on newer platforms
611 *
612 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
613 * for corresponding stream index before the time of writing
614 * format to SDxFMT register.
615 * 2. Write SDxFMT
616 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
617 * enable decoupled mode
618 */
619
620 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
621 /* couple host and link DMA, disable DSP features */
622 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
623 mask, 0);
624
625 /* program stream format */
626 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
627 sd_offset +
628 SOF_HDA_ADSP_REG_SD_FORMAT,
629 0xffff, hstream->format_val);
630
631 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
632 /* decouple host and link DMA, enable DSP features */
633 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
634 mask, mask);
635
636 /* program last valid index */
637 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
638 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
639 0xffff, (hstream->frags - 1));
640
641 /* program BDL address */
642 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
643 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
644 (u32)hstream->bdl.addr);
645 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
646 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
647 upper_32_bits(hstream->bdl.addr));
648
649 /* enable position buffer, if needed */
650 if (bus->use_posbuf && bus->posbuf.addr &&
651 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
652 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
653 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
654 upper_32_bits(bus->posbuf.addr));
655 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
656 (u32)bus->posbuf.addr |
657 SOF_HDA_ADSP_DPLBASE_ENABLE);
658 }
659
660 /* set interrupt enable bits */
661 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
662 SOF_HDA_CL_DMA_SD_INT_MASK,
663 SOF_HDA_CL_DMA_SD_INT_MASK);
664
665 /* read FIFO size */
666 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
667 hstream->fifo_size =
668 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
669 sd_offset +
670 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
671 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
672 hstream->fifo_size += 1;
673 } else {
674 hstream->fifo_size = 0;
675 }
676
677 return ret;
678}
679
680int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
681 struct snd_pcm_substream *substream)
682{
683 struct hdac_stream *hstream = substream->runtime->private_data;
684 struct hdac_ext_stream *hext_stream = container_of(hstream,
685 struct hdac_ext_stream,
686 hstream);
687 int ret;
688
689 ret = hda_dsp_stream_reset(sdev, hstream);
690 if (ret < 0)
691 return ret;
692
693 if (!sdev->dspless_mode_selected) {
694 struct hdac_bus *bus = sof_to_bus(sdev);
695 u32 mask = BIT(hstream->index);
696
697 spin_lock_irq(&bus->reg_lock);
698 /* couple host and link DMA if link DMA channel is idle */
699 if (!hext_stream->link_locked)
700 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
701 SOF_HDA_REG_PP_PPCTL, mask, 0);
702 spin_unlock_irq(&bus->reg_lock);
703 }
704
705 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
706
707 hstream->substream = NULL;
708
709 return 0;
710}
711
712bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
713{
714 struct hdac_bus *bus = sof_to_bus(sdev);
715 bool ret = false;
716 u32 status;
717
718 /* The function can be called at irq thread, so use spin_lock_irq */
719 spin_lock_irq(&bus->reg_lock);
720
721 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
722
723 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
724
725 /* if Register inaccessible, ignore it.*/
726 if (status != 0xffffffff)
727 ret = true;
728
729 spin_unlock_irq(&bus->reg_lock);
730
731 return ret;
732}
733
734static void
735hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
736{
737 u64 buffer_size = hstream->bufsize;
738 u64 prev_pos, pos, num_bytes;
739
740 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
741 pos = hda_dsp_stream_get_position(hstream, direction, false);
742
743 if (pos < prev_pos)
744 num_bytes = (buffer_size - prev_pos) + pos;
745 else
746 num_bytes = pos - prev_pos;
747
748 hstream->curr_pos += num_bytes;
749}
750
751static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
752{
753 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
754 struct hdac_stream *s;
755 bool active = false;
756 u32 sd_status;
757
758 list_for_each_entry(s, &bus->stream_list, list) {
759 if (status & BIT(s->index) && s->opened) {
760 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
761
762 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
763
764 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
765
766 active = true;
767 if ((!s->substream && !s->cstream) ||
768 !s->running ||
769 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
770 continue;
771
772 /* Inform ALSA only in case not do that with IPC */
773 if (s->substream && sof_hda->no_ipc_position) {
774 snd_sof_pcm_period_elapsed(s->substream);
775 } else if (s->cstream) {
776 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
777 snd_compr_fragment_elapsed(s->cstream);
778 }
779 }
780 }
781
782 return active;
783}
784
785irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
786{
787 struct snd_sof_dev *sdev = context;
788 struct hdac_bus *bus = sof_to_bus(sdev);
789 bool active;
790 u32 status;
791 int i;
792
793 /*
794 * Loop 10 times to handle missed interrupts caused by
795 * unsolicited responses from the codec
796 */
797 for (i = 0, active = true; i < 10 && active; i++) {
798 spin_lock_irq(&bus->reg_lock);
799
800 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
801
802 /* check streams */
803 active = hda_dsp_stream_check(bus, status);
804
805 /* check and clear RIRB interrupt */
806 if (status & AZX_INT_CTRL_EN) {
807 active |= hda_codec_check_rirb_status(sdev);
808 }
809 spin_unlock_irq(&bus->reg_lock);
810 }
811
812 return IRQ_HANDLED;
813}
814
815int hda_dsp_stream_init(struct snd_sof_dev *sdev)
816{
817 struct hdac_bus *bus = sof_to_bus(sdev);
818 struct hdac_ext_stream *hext_stream;
819 struct hdac_stream *hstream;
820 struct pci_dev *pci = to_pci_dev(sdev->dev);
821 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
822 int sd_offset;
823 int i, num_playback, num_capture, num_total, ret;
824 u32 gcap;
825
826 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
827 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
828
829 /* get stream count from GCAP */
830 num_capture = (gcap >> 8) & 0x0f;
831 num_playback = (gcap >> 12) & 0x0f;
832 num_total = num_playback + num_capture;
833
834 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
835 num_playback, num_capture);
836
837 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
838 dev_err(sdev->dev, "error: too many playback streams %d\n",
839 num_playback);
840 return -EINVAL;
841 }
842
843 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
844 dev_err(sdev->dev, "error: too many capture streams %d\n",
845 num_playback);
846 return -EINVAL;
847 }
848
849 /*
850 * mem alloc for the position buffer
851 * TODO: check position buffer update
852 */
853 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
854 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
855 &bus->posbuf);
856 if (ret < 0) {
857 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
858 return -ENOMEM;
859 }
860
861 /*
862 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
863 * HDAudio codecs
864 */
865 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
866 PAGE_SIZE, &bus->rb);
867 if (ret < 0) {
868 dev_err(sdev->dev, "error: RB alloc failed\n");
869 return -ENOMEM;
870 }
871
872 /* create capture and playback streams */
873 for (i = 0; i < num_total; i++) {
874 struct sof_intel_hda_stream *hda_stream;
875
876 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
877 GFP_KERNEL);
878 if (!hda_stream)
879 return -ENOMEM;
880
881 hda_stream->sdev = sdev;
882
883 hext_stream = &hda_stream->hext_stream;
884
885 if (sdev->bar[HDA_DSP_PP_BAR]) {
886 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
887 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
888
889 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
890 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
891 SOF_HDA_PPLC_INTERVAL * i;
892 }
893
894 hstream = &hext_stream->hstream;
895
896 /* do we support SPIB */
897 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
898 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
899 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
900 SOF_HDA_SPIB_SPIB;
901
902 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
903 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
904 SOF_HDA_SPIB_MAXFIFO;
905 }
906
907 hstream->bus = bus;
908 hstream->sd_int_sta_mask = 1 << i;
909 hstream->index = i;
910 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
911 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
912 hstream->opened = false;
913 hstream->running = false;
914
915 if (i < num_capture) {
916 hstream->stream_tag = i + 1;
917 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
918 } else {
919 hstream->stream_tag = i - num_capture + 1;
920 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
921 }
922
923 /* mem alloc for stream BDL */
924 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
925 HDA_DSP_BDL_SIZE, &hstream->bdl);
926 if (ret < 0) {
927 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
928 return -ENOMEM;
929 }
930
931 hstream->posbuf = (__le32 *)(bus->posbuf.area +
932 (hstream->index) * 8);
933
934 list_add_tail(&hstream->list, &bus->stream_list);
935 }
936
937 /* store total stream count (playback + capture) from GCAP */
938 sof_hda->stream_max = num_total;
939
940 return 0;
941}
942
943void hda_dsp_stream_free(struct snd_sof_dev *sdev)
944{
945 struct hdac_bus *bus = sof_to_bus(sdev);
946 struct hdac_stream *s, *_s;
947 struct hdac_ext_stream *hext_stream;
948 struct sof_intel_hda_stream *hda_stream;
949
950 /* free position buffer */
951 if (bus->posbuf.area)
952 snd_dma_free_pages(&bus->posbuf);
953
954 /* free CORB/RIRB buffer - only used for HDaudio codecs */
955 if (bus->rb.area)
956 snd_dma_free_pages(&bus->rb);
957
958 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
959 /* TODO: decouple */
960
961 /* free bdl buffer */
962 if (s->bdl.area)
963 snd_dma_free_pages(&s->bdl);
964 list_del(&s->list);
965 hext_stream = stream_to_hdac_ext_stream(s);
966 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
967 hext_stream);
968 devm_kfree(sdev->dev, hda_stream);
969 }
970}
971
972snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
973 int direction, bool can_sleep)
974{
975 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
976 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
977 struct snd_sof_dev *sdev = hda_stream->sdev;
978 snd_pcm_uframes_t pos;
979
980 switch (sof_hda_position_quirk) {
981 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
982 /*
983 * This legacy code, inherited from the Skylake driver,
984 * mixes DPIB registers and DPIB DDR updates and
985 * does not seem to follow any known hardware recommendations.
986 * It's not clear e.g. why there is a different flow
987 * for capture and playback, the only information that matters is
988 * what traffic class is used, and on all SOF-enabled platforms
989 * only VC0 is supported so the work-around was likely not necessary
990 * and quite possibly wrong.
991 */
992
993 /* DPIB/posbuf position mode:
994 * For Playback, Use DPIB register from HDA space which
995 * reflects the actual data transferred.
996 * For Capture, Use the position buffer for pointer, as DPIB
997 * is not accurate enough, its update may be completed
998 * earlier than the data written to DDR.
999 */
1000 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1001 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1002 AZX_REG_VS_SDXDPIB_XBASE +
1003 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1004 hstream->index));
1005 } else {
1006 /*
1007 * For capture stream, we need more workaround to fix the
1008 * position incorrect issue:
1009 *
1010 * 1. Wait at least 20us before reading position buffer after
1011 * the interrupt generated(IOC), to make sure position update
1012 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1013 * 2. Perform a dummy Read to DPIB register to flush DMA
1014 * position value.
1015 * 3. Read the DMA Position from posbuf. Now the readback
1016 * value should be >= period boundary.
1017 */
1018 if (can_sleep)
1019 usleep_range(20, 21);
1020
1021 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1022 AZX_REG_VS_SDXDPIB_XBASE +
1023 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1024 hstream->index));
1025 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1026 }
1027 break;
1028 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1029 /*
1030 * In case VC1 traffic is disabled this is the recommended option
1031 */
1032 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1033 AZX_REG_VS_SDXDPIB_XBASE +
1034 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1035 hstream->index));
1036 break;
1037 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1038 /*
1039 * This is the recommended option when VC1 is enabled.
1040 * While this isn't needed for SOF platforms it's added for
1041 * consistency and debug.
1042 */
1043 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1044 break;
1045 default:
1046 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1047 sof_hda_position_quirk);
1048 pos = 0;
1049 break;
1050 }
1051
1052 if (pos >= hstream->bufsize)
1053 pos = 0;
1054
1055 return pos;
1056}
1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <sound/hdaudio_ext.h>
19#include <sound/hda_register.h>
20#include <sound/sof.h>
21#include <trace/events/sof_intel.h>
22#include "../ops.h"
23#include "../sof-audio.h"
24#include "../ipc4-priv.h"
25#include "hda.h"
26
27int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS;
28module_param_named(position_quirk, sof_hda_position_quirk, int, 0444);
29MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk");
30EXPORT_SYMBOL_NS(sof_hda_position_quirk, "SND_SOC_SOF_INTEL_HDA_COMMON");
31
32#define HDA_LTRP_GB_VALUE_US 95
33
34static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
35{
36 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
37 return "Playback";
38 else
39 return "Capture";
40}
41
42static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
43{
44 struct snd_soc_pcm_runtime *rtd;
45
46 if (hstream->substream)
47 rtd = snd_soc_substream_to_rtd(hstream->substream);
48 else if (hstream->cstream)
49 rtd = hstream->cstream->private_data;
50 else
51 /* Non audio DMA user, like dma-trace */
52 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
53 hda_hstream_direction_str(hstream),
54 hstream->stream_tag);
55
56 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
57 rtd->dai_link->name, hda_hstream_direction_str(hstream),
58 hstream->stream_tag);
59}
60
61/*
62 * set up one of BDL entries for a stream
63 */
64static int hda_setup_bdle(struct snd_sof_dev *sdev,
65 struct snd_dma_buffer *dmab,
66 struct hdac_stream *hstream,
67 struct sof_intel_dsp_bdl **bdlp,
68 int offset, int size, int ioc)
69{
70 struct hdac_bus *bus = sof_to_bus(sdev);
71 struct sof_intel_dsp_bdl *bdl = *bdlp;
72
73 while (size > 0) {
74 dma_addr_t addr;
75 int chunk;
76
77 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
78 dev_err(sdev->dev, "error: stream frags exceeded\n");
79 return -EINVAL;
80 }
81
82 addr = snd_sgbuf_get_addr(dmab, offset);
83 /* program BDL addr */
84 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
85 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
86 /* program BDL size */
87 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
88 /* one BDLE should not cross 4K boundary */
89 if (bus->align_bdle_4k) {
90 u32 remain = 0x1000 - (offset & 0xfff);
91
92 if (chunk > remain)
93 chunk = remain;
94 }
95 bdl->size = cpu_to_le32(chunk);
96 /* only program IOC when the whole segment is processed */
97 size -= chunk;
98 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
99 bdl++;
100 hstream->frags++;
101 offset += chunk;
102 }
103
104 *bdlp = bdl;
105 return offset;
106}
107
108/*
109 * set up Buffer Descriptor List (BDL) for host memory transfer
110 * BDL describes the location of the individual buffers and is little endian.
111 */
112int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
113 struct snd_dma_buffer *dmab,
114 struct hdac_stream *hstream)
115{
116 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
117 struct sof_intel_dsp_bdl *bdl;
118 int i, offset, period_bytes, periods;
119 int remain, ioc;
120
121 period_bytes = hstream->period_bytes;
122 dev_dbg(sdev->dev, "period_bytes: %#x, bufsize: %#x\n", period_bytes,
123 hstream->bufsize);
124
125 if (!period_bytes) {
126 unsigned int chunk_size;
127
128 chunk_size = snd_sgbuf_get_chunk_size(dmab, 0, hstream->bufsize);
129
130 period_bytes = hstream->bufsize;
131
132 /*
133 * HDA spec demands that the LVI value must be at least one
134 * before the DMA operation can begin. This means that there
135 * must be at least two BDLE present for the transfer.
136 *
137 * If the buffer is not a single continuous area then the
138 * hda_setup_bdle() will create multiple BDLEs for each segment.
139 * If the memory is a single continuous area, force it to be
140 * split into two 'periods', otherwise the transfer will be
141 * split to multiple BDLE for each chunk in hda_setup_bdle()
142 *
143 * Note: period_bytes == 0 can only happen for firmware or
144 * library loading. The data size is 4K aligned, which ensures
145 * that the second chunk's start address will be 128-byte
146 * aligned.
147 */
148 if (chunk_size == hstream->bufsize)
149 period_bytes /= 2;
150 }
151
152 periods = hstream->bufsize / period_bytes;
153
154 dev_dbg(sdev->dev, "periods: %d\n", periods);
155
156 remain = hstream->bufsize % period_bytes;
157 if (remain)
158 periods++;
159
160 /* program the initial BDL entries */
161 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
162 offset = 0;
163 hstream->frags = 0;
164
165 /*
166 * set IOC if don't use position IPC
167 * and period_wakeup needed.
168 */
169 ioc = hda->no_ipc_position ?
170 !hstream->no_period_wakeup : 0;
171
172 for (i = 0; i < periods; i++) {
173 if (i == (periods - 1) && remain)
174 /* set the last small entry */
175 offset = hda_setup_bdle(sdev, dmab,
176 hstream, &bdl, offset,
177 remain, 0);
178 else
179 offset = hda_setup_bdle(sdev, dmab,
180 hstream, &bdl, offset,
181 period_bytes, ioc);
182 }
183
184 return offset;
185}
186
187int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
188 struct hdac_ext_stream *hext_stream,
189 int enable, u32 size)
190{
191 struct hdac_stream *hstream = &hext_stream->hstream;
192 u32 mask;
193
194 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
195 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
196 return -EINVAL;
197 }
198
199 mask = (1 << hstream->index);
200
201 /* enable/disable SPIB for the stream */
202 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
203 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
204 enable << hstream->index);
205
206 /* set the SPIB value */
207 sof_io_write(sdev, hstream->spib_addr, size);
208
209 return 0;
210}
211
212/* get next unused stream */
213struct hdac_ext_stream *
214hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
215{
216 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
217 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
218 struct hdac_bus *bus = sof_to_bus(sdev);
219 struct sof_intel_hda_stream *hda_stream;
220 struct hdac_ext_stream *hext_stream = NULL;
221 struct hdac_stream *s;
222
223 spin_lock_irq(&bus->reg_lock);
224
225 /* get an unused stream */
226 list_for_each_entry(s, &bus->stream_list, list) {
227 if (s->direction == direction && !s->opened) {
228 hext_stream = stream_to_hdac_ext_stream(s);
229 hda_stream = container_of(hext_stream,
230 struct sof_intel_hda_stream,
231 hext_stream);
232 /* check if the host DMA channel is reserved */
233 if (hda_stream->host_reserved)
234 continue;
235
236 s->opened = true;
237 break;
238 }
239 }
240
241 spin_unlock_irq(&bus->reg_lock);
242
243 /* stream found ? */
244 if (!hext_stream) {
245 dev_err(sdev->dev, "error: no free %s streams\n", snd_pcm_direction_name(direction));
246 return hext_stream;
247 }
248
249 hda_stream->flags = flags;
250
251 /*
252 * Prevent DMI Link L1 entry for streams that don't support it.
253 * Workaround to address a known issue with host DMA that results
254 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
255 */
256 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
257 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
258 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
259 HDA_VS_INTEL_EM2,
260 HDA_VS_INTEL_EM2_L1SEN, 0);
261 hda->l1_disabled = true;
262 }
263
264 return hext_stream;
265}
266
267/* free a stream */
268int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
269{
270 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
271 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
272 struct hdac_bus *bus = sof_to_bus(sdev);
273 struct sof_intel_hda_stream *hda_stream;
274 struct hdac_ext_stream *hext_stream;
275 struct hdac_stream *s;
276 bool dmi_l1_enable = true;
277 bool found = false;
278
279 spin_lock_irq(&bus->reg_lock);
280
281 /*
282 * close stream matching the stream tag and check if there are any open streams
283 * that are DMI L1 incompatible.
284 */
285 list_for_each_entry(s, &bus->stream_list, list) {
286 hext_stream = stream_to_hdac_ext_stream(s);
287 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
288
289 if (!s->opened)
290 continue;
291
292 if (s->direction == direction && s->stream_tag == stream_tag) {
293 s->opened = false;
294 found = true;
295 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
296 dmi_l1_enable = false;
297 }
298 }
299
300 spin_unlock_irq(&bus->reg_lock);
301
302 /* Enable DMI L1 if permitted */
303 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
304 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
305 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
306 hda->l1_disabled = false;
307 }
308
309 if (!found) {
310 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
311 __func__, stream_tag);
312 return -ENODEV;
313 }
314
315 return 0;
316}
317
318static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
319{
320 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
321 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
322 u32 val;
323
324 /* enter stream reset */
325 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
326 SOF_STREAM_SD_OFFSET_CRST);
327 do {
328 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
329 if (val & SOF_STREAM_SD_OFFSET_CRST)
330 break;
331 } while (--timeout);
332 if (timeout == 0) {
333 dev_err(sdev->dev, "timeout waiting for stream reset\n");
334 return -ETIMEDOUT;
335 }
336
337 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
338
339 /* exit stream reset and wait to read a zero before reading any other register */
340 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
341
342 /* wait for hardware to report that stream is out of reset */
343 udelay(3);
344 do {
345 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
346 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
347 break;
348 } while (--timeout);
349 if (timeout == 0) {
350 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
351 return -ETIMEDOUT;
352 }
353
354 return 0;
355}
356
357int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
358 struct hdac_ext_stream *hext_stream, int cmd)
359{
360 struct hdac_stream *hstream = &hext_stream->hstream;
361 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
362 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
363 int ret = 0;
364 u32 run;
365
366 /* cmd must be for audio stream */
367 switch (cmd) {
368 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
369 if (!sdev->dspless_mode_selected)
370 break;
371 fallthrough;
372 case SNDRV_PCM_TRIGGER_START:
373 if (hstream->running)
374 break;
375
376 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
377 1 << hstream->index,
378 1 << hstream->index);
379
380 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
381 sd_offset,
382 SOF_HDA_SD_CTL_DMA_START |
383 SOF_HDA_CL_DMA_SD_INT_MASK,
384 SOF_HDA_SD_CTL_DMA_START |
385 SOF_HDA_CL_DMA_SD_INT_MASK);
386
387 ret = snd_sof_dsp_read_poll_timeout(sdev,
388 HDA_DSP_HDA_BAR,
389 sd_offset, run,
390 ((run & dma_start) == dma_start),
391 HDA_DSP_REG_POLL_INTERVAL_US,
392 HDA_DSP_STREAM_RUN_TIMEOUT);
393
394 if (ret >= 0)
395 hstream->running = true;
396
397 break;
398 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
399 if (!sdev->dspless_mode_selected)
400 break;
401 fallthrough;
402 case SNDRV_PCM_TRIGGER_SUSPEND:
403 case SNDRV_PCM_TRIGGER_STOP:
404 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
405 sd_offset,
406 SOF_HDA_SD_CTL_DMA_START |
407 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
408
409 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
410 sd_offset, run,
411 !(run & dma_start),
412 HDA_DSP_REG_POLL_INTERVAL_US,
413 HDA_DSP_STREAM_RUN_TIMEOUT);
414
415 if (ret >= 0) {
416 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
417 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
418 SOF_HDA_CL_DMA_SD_INT_MASK);
419
420 hstream->running = false;
421 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
422 SOF_HDA_INTCTL,
423 1 << hstream->index, 0x0);
424 }
425 break;
426 default:
427 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
428 return -EINVAL;
429 }
430
431 if (ret < 0) {
432 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
433
434 dev_err(sdev->dev,
435 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
436 __func__, cmd, stream_name ? stream_name : "unknown stream");
437 kfree(stream_name);
438 }
439
440 return ret;
441}
442
443/* minimal recommended programming for ICCMAX stream */
444int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
445 struct snd_dma_buffer *dmab,
446 struct snd_pcm_hw_params *params)
447{
448 struct hdac_stream *hstream = &hext_stream->hstream;
449 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
450 int ret;
451 u32 mask = 0x1 << hstream->index;
452
453 if (!hext_stream) {
454 dev_err(sdev->dev, "error: no stream available\n");
455 return -ENODEV;
456 }
457
458 if (!dmab) {
459 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
460 return -ENODEV;
461 }
462
463 if (hstream->posbuf)
464 *hstream->posbuf = 0;
465
466 /* reset BDL address */
467 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
468 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
469 0x0);
470 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
471 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
472 0x0);
473
474 hstream->frags = 0;
475
476 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
477 if (ret < 0) {
478 dev_err(sdev->dev, "error: set up of BDL failed\n");
479 return ret;
480 }
481
482 /* program BDL address */
483 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
484 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
485 (u32)hstream->bdl.addr);
486 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
487 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
488 upper_32_bits(hstream->bdl.addr));
489
490 /* program cyclic buffer length */
491 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
492 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
493 hstream->bufsize);
494
495 /* program last valid index */
496 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
497 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
498 0xffff, (hstream->frags - 1));
499
500 /* decouple host and link DMA, enable DSP features */
501 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
502 mask, mask);
503
504 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
505 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
506 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
507
508 /* start DMA */
509 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
510 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
511
512 return 0;
513}
514
515/*
516 * prepare for common hdac registers settings, for both code loader
517 * and normal stream.
518 */
519int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
520 struct hdac_ext_stream *hext_stream,
521 struct snd_dma_buffer *dmab,
522 struct snd_pcm_hw_params *params)
523{
524 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
525 struct hdac_bus *bus = sof_to_bus(sdev);
526 struct hdac_stream *hstream;
527 int sd_offset, ret;
528 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
529 u32 mask;
530 u32 run;
531
532 if (!hext_stream) {
533 dev_err(sdev->dev, "error: no stream available\n");
534 return -ENODEV;
535 }
536
537 if (!dmab) {
538 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
539 return -ENODEV;
540 }
541
542 hstream = &hext_stream->hstream;
543 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
544 mask = BIT(hstream->index);
545
546 /* decouple host and link DMA if the DSP is used */
547 if (!sdev->dspless_mode_selected)
548 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
549 mask, mask);
550
551 /* clear stream status */
552 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
553 SOF_HDA_CL_DMA_SD_INT_MASK |
554 SOF_HDA_SD_CTL_DMA_START, 0);
555
556 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
557 sd_offset, run,
558 !(run & dma_start),
559 HDA_DSP_REG_POLL_INTERVAL_US,
560 HDA_DSP_STREAM_RUN_TIMEOUT);
561
562 if (ret < 0) {
563 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
564
565 dev_err(sdev->dev,
566 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
567 __func__, stream_name ? stream_name : "unknown stream");
568 kfree(stream_name);
569 return ret;
570 }
571
572 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
573 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
574 SOF_HDA_CL_DMA_SD_INT_MASK,
575 SOF_HDA_CL_DMA_SD_INT_MASK);
576
577 /* stream reset */
578 ret = hda_dsp_stream_reset(sdev, hstream);
579 if (ret < 0)
580 return ret;
581
582 if (hstream->posbuf)
583 *hstream->posbuf = 0;
584
585 /* reset BDL address */
586 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
587 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
588 0x0);
589 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
590 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
591 0x0);
592
593 /* clear stream status */
594 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
595 SOF_HDA_CL_DMA_SD_INT_MASK |
596 SOF_HDA_SD_CTL_DMA_START, 0);
597
598 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
599 sd_offset, run,
600 !(run & dma_start),
601 HDA_DSP_REG_POLL_INTERVAL_US,
602 HDA_DSP_STREAM_RUN_TIMEOUT);
603
604 if (ret < 0) {
605 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
606
607 dev_err(sdev->dev,
608 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
609 __func__, stream_name ? stream_name : "unknown stream");
610 kfree(stream_name);
611 return ret;
612 }
613
614 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
615 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
616 SOF_HDA_CL_DMA_SD_INT_MASK,
617 SOF_HDA_CL_DMA_SD_INT_MASK);
618
619 hstream->frags = 0;
620
621 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
622 if (ret < 0) {
623 dev_err(sdev->dev, "error: set up of BDL failed\n");
624 return ret;
625 }
626
627 /* program stream tag to set up stream descriptor for DMA */
628 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
629 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
630 hstream->stream_tag <<
631 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
632
633 /* program cyclic buffer length */
634 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
635 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
636 hstream->bufsize);
637
638 /*
639 * Recommended hardware programming sequence for HDAudio DMA format
640 * on earlier platforms - this is not needed on newer platforms
641 *
642 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
643 * for corresponding stream index before the time of writing
644 * format to SDxFMT register.
645 * 2. Write SDxFMT
646 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
647 * enable decoupled mode
648 */
649
650 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
651 /* couple host and link DMA, disable DSP features */
652 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
653 mask, 0);
654
655 /* program stream format */
656 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
657 sd_offset +
658 SOF_HDA_ADSP_REG_SD_FORMAT,
659 0xffff, hstream->format_val);
660
661 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
662 /* decouple host and link DMA, enable DSP features */
663 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
664 mask, mask);
665
666 /* program last valid index */
667 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
668 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
669 0xffff, (hstream->frags - 1));
670
671 /* program BDL address */
672 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
673 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
674 (u32)hstream->bdl.addr);
675 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
676 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
677 upper_32_bits(hstream->bdl.addr));
678
679 /* enable position buffer, if needed */
680 if (bus->use_posbuf && bus->posbuf.addr &&
681 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
682 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
683 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
684 upper_32_bits(bus->posbuf.addr));
685 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
686 (u32)bus->posbuf.addr |
687 SOF_HDA_ADSP_DPLBASE_ENABLE);
688 }
689
690 /* set interrupt enable bits */
691 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
692 SOF_HDA_CL_DMA_SD_INT_MASK,
693 SOF_HDA_CL_DMA_SD_INT_MASK);
694
695 /* read FIFO size */
696 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
697 hstream->fifo_size =
698 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
699 sd_offset +
700 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
701 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
702 hstream->fifo_size += 1;
703 } else {
704 hstream->fifo_size = 0;
705 }
706
707 return ret;
708}
709
710int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
711 struct snd_pcm_substream *substream)
712{
713 struct hdac_stream *hstream = substream->runtime->private_data;
714 struct hdac_ext_stream *hext_stream = container_of(hstream,
715 struct hdac_ext_stream,
716 hstream);
717 int ret;
718
719 ret = hda_dsp_stream_reset(sdev, hstream);
720 if (ret < 0)
721 return ret;
722
723 if (!sdev->dspless_mode_selected) {
724 struct hdac_bus *bus = sof_to_bus(sdev);
725 u32 mask = BIT(hstream->index);
726
727 spin_lock_irq(&bus->reg_lock);
728 /* couple host and link DMA if link DMA channel is idle */
729 if (!hext_stream->link_locked)
730 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
731 SOF_HDA_REG_PP_PPCTL, mask, 0);
732 spin_unlock_irq(&bus->reg_lock);
733 }
734
735 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
736
737 hstream->substream = NULL;
738
739 return 0;
740}
741EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
742
743bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
744{
745 struct hdac_bus *bus = sof_to_bus(sdev);
746 bool ret = false;
747 u32 status;
748
749 /* The function can be called at irq thread, so use spin_lock_irq */
750 spin_lock_irq(&bus->reg_lock);
751
752 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
753
754 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
755
756 /* if Register inaccessible, ignore it.*/
757 if (status != 0xffffffff)
758 ret = true;
759
760 spin_unlock_irq(&bus->reg_lock);
761
762 return ret;
763}
764EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, "SND_SOC_SOF_INTEL_HDA_COMMON");
765
766static void
767hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
768{
769 u64 buffer_size = hstream->bufsize;
770 u64 prev_pos, pos, num_bytes;
771
772 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
773 pos = hda_dsp_stream_get_position(hstream, direction, false);
774
775 if (pos < prev_pos)
776 num_bytes = (buffer_size - prev_pos) + pos;
777 else
778 num_bytes = pos - prev_pos;
779
780 hstream->curr_pos += num_bytes;
781}
782
783static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
784{
785 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
786 struct hdac_stream *s;
787 bool active = false;
788 u32 sd_status;
789
790 list_for_each_entry(s, &bus->stream_list, list) {
791 if (status & BIT(s->index) && s->opened) {
792 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
793
794 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
795
796 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
797
798 active = true;
799 if (!s->running)
800 continue;
801 if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
802 continue;
803 if (!s->substream && !s->cstream) {
804 /*
805 * when no substream is found, the DMA may used for code loading
806 * or data transfers which can rely on wait_for_completion()
807 */
808 struct sof_intel_hda_stream *hda_stream;
809 struct hdac_ext_stream *hext_stream;
810
811 hext_stream = stream_to_hdac_ext_stream(s);
812 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
813 hext_stream);
814
815 complete(&hda_stream->ioc);
816 continue;
817 }
818
819 /* Inform ALSA only if the IPC position is not used */
820 if (s->substream && sof_hda->no_ipc_position) {
821 snd_sof_pcm_period_elapsed(s->substream);
822 } else if (s->cstream) {
823 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
824 snd_compr_fragment_elapsed(s->cstream);
825 }
826 }
827 }
828
829 return active;
830}
831
832irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
833{
834 struct snd_sof_dev *sdev = context;
835 struct hdac_bus *bus = sof_to_bus(sdev);
836 bool active;
837 u32 status;
838 int i;
839
840 /*
841 * Loop 10 times to handle missed interrupts caused by
842 * unsolicited responses from the codec
843 */
844 for (i = 0, active = true; i < 10 && active; i++) {
845 spin_lock_irq(&bus->reg_lock);
846
847 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
848
849 /* check streams */
850 active = hda_dsp_stream_check(bus, status);
851
852 /* check and clear RIRB interrupt */
853 if (status & AZX_INT_CTRL_EN) {
854 active |= hda_codec_check_rirb_status(sdev);
855 }
856 spin_unlock_irq(&bus->reg_lock);
857 }
858
859 return IRQ_HANDLED;
860}
861EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, "SND_SOC_SOF_INTEL_HDA_COMMON");
862
863int hda_dsp_stream_init(struct snd_sof_dev *sdev)
864{
865 struct hdac_bus *bus = sof_to_bus(sdev);
866 struct hdac_ext_stream *hext_stream;
867 struct hdac_stream *hstream;
868 struct pci_dev *pci = to_pci_dev(sdev->dev);
869 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
870 int sd_offset;
871 int i, num_playback, num_capture, num_total, ret;
872 u32 gcap;
873
874 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
875 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
876
877 /* get stream count from GCAP */
878 num_capture = (gcap >> 8) & 0x0f;
879 num_playback = (gcap >> 12) & 0x0f;
880 num_total = num_playback + num_capture;
881
882 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
883 num_playback, num_capture);
884
885 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
886 dev_err(sdev->dev, "error: too many playback streams %d\n",
887 num_playback);
888 return -EINVAL;
889 }
890
891 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
892 dev_err(sdev->dev, "error: too many capture streams %d\n",
893 num_playback);
894 return -EINVAL;
895 }
896
897 /*
898 * mem alloc for the position buffer
899 * TODO: check position buffer update
900 */
901 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
902 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
903 &bus->posbuf);
904 if (ret < 0) {
905 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
906 return -ENOMEM;
907 }
908
909 /*
910 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
911 * HDAudio codecs
912 */
913 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
914 PAGE_SIZE, &bus->rb);
915 if (ret < 0) {
916 dev_err(sdev->dev, "error: RB alloc failed\n");
917 return -ENOMEM;
918 }
919
920 /* create capture and playback streams */
921 for (i = 0; i < num_total; i++) {
922 struct sof_intel_hda_stream *hda_stream;
923
924 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
925 GFP_KERNEL);
926 if (!hda_stream)
927 return -ENOMEM;
928
929 hda_stream->sdev = sdev;
930 init_completion(&hda_stream->ioc);
931
932 hext_stream = &hda_stream->hext_stream;
933
934 if (sdev->bar[HDA_DSP_PP_BAR]) {
935 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
936 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
937
938 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
939 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
940 SOF_HDA_PPLC_INTERVAL * i;
941 }
942
943 hstream = &hext_stream->hstream;
944
945 /* do we support SPIB */
946 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
947 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
948 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
949 SOF_HDA_SPIB_SPIB;
950
951 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
952 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
953 SOF_HDA_SPIB_MAXFIFO;
954 }
955
956 hstream->bus = bus;
957 hstream->sd_int_sta_mask = 1 << i;
958 hstream->index = i;
959 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
960 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
961 hstream->opened = false;
962 hstream->running = false;
963
964 if (i < num_capture) {
965 hstream->stream_tag = i + 1;
966 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
967 } else {
968 hstream->stream_tag = i - num_capture + 1;
969 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
970 }
971
972 /* mem alloc for stream BDL */
973 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
974 HDA_DSP_BDL_SIZE, &hstream->bdl);
975 if (ret < 0) {
976 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
977 return -ENOMEM;
978 }
979
980 hstream->posbuf = (__le32 *)(bus->posbuf.area +
981 (hstream->index) * 8);
982
983 list_add_tail(&hstream->list, &bus->stream_list);
984 }
985
986 /* store total stream count (playback + capture) from GCAP */
987 sof_hda->stream_max = num_total;
988
989 /* store stream count from GCAP required for CHAIN_DMA */
990 if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
991 struct sof_ipc4_fw_data *ipc4_data = sdev->private;
992
993 ipc4_data->num_playback_streams = num_playback;
994 ipc4_data->num_capture_streams = num_capture;
995 }
996
997 return 0;
998}
999EXPORT_SYMBOL_NS(hda_dsp_stream_init, "SND_SOC_SOF_INTEL_HDA_COMMON");
1000
1001void hda_dsp_stream_free(struct snd_sof_dev *sdev)
1002{
1003 struct hdac_bus *bus = sof_to_bus(sdev);
1004 struct hdac_stream *s, *_s;
1005 struct hdac_ext_stream *hext_stream;
1006 struct sof_intel_hda_stream *hda_stream;
1007
1008 /* free position buffer */
1009 if (bus->posbuf.area)
1010 snd_dma_free_pages(&bus->posbuf);
1011
1012 /* free CORB/RIRB buffer - only used for HDaudio codecs */
1013 if (bus->rb.area)
1014 snd_dma_free_pages(&bus->rb);
1015
1016 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1017 /* TODO: decouple */
1018
1019 /* free bdl buffer */
1020 if (s->bdl.area)
1021 snd_dma_free_pages(&s->bdl);
1022 list_del(&s->list);
1023 hext_stream = stream_to_hdac_ext_stream(s);
1024 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1025 hext_stream);
1026 devm_kfree(sdev->dev, hda_stream);
1027 }
1028}
1029EXPORT_SYMBOL_NS(hda_dsp_stream_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
1030
1031snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1032 int direction, bool can_sleep)
1033{
1034 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1035 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1036 struct snd_sof_dev *sdev = hda_stream->sdev;
1037 snd_pcm_uframes_t pos;
1038
1039 switch (sof_hda_position_quirk) {
1040 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1041 /*
1042 * This legacy code, inherited from the Skylake driver,
1043 * mixes DPIB registers and DPIB DDR updates and
1044 * does not seem to follow any known hardware recommendations.
1045 * It's not clear e.g. why there is a different flow
1046 * for capture and playback, the only information that matters is
1047 * what traffic class is used, and on all SOF-enabled platforms
1048 * only VC0 is supported so the work-around was likely not necessary
1049 * and quite possibly wrong.
1050 */
1051
1052 /* DPIB/posbuf position mode:
1053 * For Playback, Use DPIB register from HDA space which
1054 * reflects the actual data transferred.
1055 * For Capture, Use the position buffer for pointer, as DPIB
1056 * is not accurate enough, its update may be completed
1057 * earlier than the data written to DDR.
1058 */
1059 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1060 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1061 AZX_REG_VS_SDXDPIB_XBASE +
1062 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1063 hstream->index));
1064 } else {
1065 /*
1066 * For capture stream, we need more workaround to fix the
1067 * position incorrect issue:
1068 *
1069 * 1. Wait at least 20us before reading position buffer after
1070 * the interrupt generated(IOC), to make sure position update
1071 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1072 * 2. Perform a dummy Read to DPIB register to flush DMA
1073 * position value.
1074 * 3. Read the DMA Position from posbuf. Now the readback
1075 * value should be >= period boundary.
1076 */
1077 if (can_sleep)
1078 usleep_range(20, 21);
1079
1080 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1081 AZX_REG_VS_SDXDPIB_XBASE +
1082 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1083 hstream->index));
1084 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1085 }
1086 break;
1087 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1088 /*
1089 * In case VC1 traffic is disabled this is the recommended option
1090 */
1091 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1092 AZX_REG_VS_SDXDPIB_XBASE +
1093 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1094 hstream->index));
1095 break;
1096 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1097 /*
1098 * This is the recommended option when VC1 is enabled.
1099 * While this isn't needed for SOF platforms it's added for
1100 * consistency and debug.
1101 */
1102 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1103 break;
1104 default:
1105 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1106 sof_hda_position_quirk);
1107 pos = 0;
1108 break;
1109 }
1110
1111 if (pos >= hstream->bufsize)
1112 pos = 0;
1113
1114 return pos;
1115}
1116EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, "SND_SOC_SOF_INTEL_HDA_COMMON");
1117
1118#define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1119
1120/**
1121 * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1122 * @sdev: SOF device
1123 * @component: ASoC component
1124 * @substream: PCM substream
1125 *
1126 * Returns the raw Linear Link Position value
1127 */
1128u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1129 struct snd_soc_component *component,
1130 struct snd_pcm_substream *substream)
1131{
1132 struct hdac_stream *hstream = substream->runtime->private_data;
1133 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1134 u32 llp_l, llp_u;
1135
1136 /*
1137 * The pplc_addr have been calculated during probe in
1138 * hda_dsp_stream_init():
1139 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1140 * SOF_HDA_PPLC_BASE +
1141 * SOF_HDA_PPLC_MULTI * total_stream +
1142 * SOF_HDA_PPLC_INTERVAL * stream_index
1143 *
1144 * Use this pre-calculated address to avoid repeated re-calculation.
1145 */
1146 llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1147 llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1148
1149 /* Compensate the LLP counter with the saved offset */
1150 if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1151 return merge_u64(llp_u, llp_l) -
1152 merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1153
1154 return merge_u64(llp_u, llp_l);
1155}
1156EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1157
1158/**
1159 * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1160 * @sdev: SOF device
1161 * @component: ASoC component
1162 * @substream: PCM substream
1163 *
1164 * Returns the raw Linear Link Position value
1165 */
1166u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1167 struct snd_soc_component *component,
1168 struct snd_pcm_substream *substream)
1169{
1170 struct hdac_stream *hstream = substream->runtime->private_data;
1171 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1172 u32 ldp_l, ldp_u;
1173
1174 /*
1175 * The pphc_addr have been calculated during probe in
1176 * hda_dsp_stream_init():
1177 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1178 * SOF_HDA_PPHC_BASE +
1179 * SOF_HDA_PPHC_INTERVAL * stream_index
1180 *
1181 * Use this pre-calculated address to avoid repeated re-calculation.
1182 */
1183 ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1184 ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1185
1186 return ((u64)ldp_u << 32) | ldp_l;
1187}
1188EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, "SND_SOC_SOF_INTEL_HDA_COMMON");