Loading...
1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation. All rights reserved.
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <sound/hdaudio_ext.h>
19#include <sound/hda_register.h>
20#include <sound/sof.h>
21#include <trace/events/sof_intel.h>
22#include "../ops.h"
23#include "../sof-audio.h"
24#include "hda.h"
25
26#define HDA_LTRP_GB_VALUE_US 95
27
28static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
29{
30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
31 return "Playback";
32 else
33 return "Capture";
34}
35
36static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
37{
38 struct snd_soc_pcm_runtime *rtd;
39
40 if (hstream->substream)
41 rtd = snd_soc_substream_to_rtd(hstream->substream);
42 else if (hstream->cstream)
43 rtd = hstream->cstream->private_data;
44 else
45 /* Non audio DMA user, like dma-trace */
46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
47 hda_hstream_direction_str(hstream),
48 hstream->stream_tag);
49
50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
51 rtd->dai_link->name, hda_hstream_direction_str(hstream),
52 hstream->stream_tag);
53}
54
55/*
56 * set up one of BDL entries for a stream
57 */
58static int hda_setup_bdle(struct snd_sof_dev *sdev,
59 struct snd_dma_buffer *dmab,
60 struct hdac_stream *hstream,
61 struct sof_intel_dsp_bdl **bdlp,
62 int offset, int size, int ioc)
63{
64 struct hdac_bus *bus = sof_to_bus(sdev);
65 struct sof_intel_dsp_bdl *bdl = *bdlp;
66
67 while (size > 0) {
68 dma_addr_t addr;
69 int chunk;
70
71 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
72 dev_err(sdev->dev, "error: stream frags exceeded\n");
73 return -EINVAL;
74 }
75
76 addr = snd_sgbuf_get_addr(dmab, offset);
77 /* program BDL addr */
78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
80 /* program BDL size */
81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
82 /* one BDLE should not cross 4K boundary */
83 if (bus->align_bdle_4k) {
84 u32 remain = 0x1000 - (offset & 0xfff);
85
86 if (chunk > remain)
87 chunk = remain;
88 }
89 bdl->size = cpu_to_le32(chunk);
90 /* only program IOC when the whole segment is processed */
91 size -= chunk;
92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
93 bdl++;
94 hstream->frags++;
95 offset += chunk;
96 }
97
98 *bdlp = bdl;
99 return offset;
100}
101
102/*
103 * set up Buffer Descriptor List (BDL) for host memory transfer
104 * BDL describes the location of the individual buffers and is little endian.
105 */
106int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
107 struct snd_dma_buffer *dmab,
108 struct hdac_stream *hstream)
109{
110 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
111 struct sof_intel_dsp_bdl *bdl;
112 int i, offset, period_bytes, periods;
113 int remain, ioc;
114
115 period_bytes = hstream->period_bytes;
116 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
117 if (!period_bytes)
118 period_bytes = hstream->bufsize;
119
120 periods = hstream->bufsize / period_bytes;
121
122 dev_dbg(sdev->dev, "periods:%d\n", periods);
123
124 remain = hstream->bufsize % period_bytes;
125 if (remain)
126 periods++;
127
128 /* program the initial BDL entries */
129 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
130 offset = 0;
131 hstream->frags = 0;
132
133 /*
134 * set IOC if don't use position IPC
135 * and period_wakeup needed.
136 */
137 ioc = hda->no_ipc_position ?
138 !hstream->no_period_wakeup : 0;
139
140 for (i = 0; i < periods; i++) {
141 if (i == (periods - 1) && remain)
142 /* set the last small entry */
143 offset = hda_setup_bdle(sdev, dmab,
144 hstream, &bdl, offset,
145 remain, 0);
146 else
147 offset = hda_setup_bdle(sdev, dmab,
148 hstream, &bdl, offset,
149 period_bytes, ioc);
150 }
151
152 return offset;
153}
154
155int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
156 struct hdac_ext_stream *hext_stream,
157 int enable, u32 size)
158{
159 struct hdac_stream *hstream = &hext_stream->hstream;
160 u32 mask;
161
162 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
163 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
164 return -EINVAL;
165 }
166
167 mask = (1 << hstream->index);
168
169 /* enable/disable SPIB for the stream */
170 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
171 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
172 enable << hstream->index);
173
174 /* set the SPIB value */
175 sof_io_write(sdev, hstream->spib_addr, size);
176
177 return 0;
178}
179
180/* get next unused stream */
181struct hdac_ext_stream *
182hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
183{
184 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
185 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
186 struct hdac_bus *bus = sof_to_bus(sdev);
187 struct sof_intel_hda_stream *hda_stream;
188 struct hdac_ext_stream *hext_stream = NULL;
189 struct hdac_stream *s;
190
191 spin_lock_irq(&bus->reg_lock);
192
193 /* get an unused stream */
194 list_for_each_entry(s, &bus->stream_list, list) {
195 if (s->direction == direction && !s->opened) {
196 hext_stream = stream_to_hdac_ext_stream(s);
197 hda_stream = container_of(hext_stream,
198 struct sof_intel_hda_stream,
199 hext_stream);
200 /* check if the host DMA channel is reserved */
201 if (hda_stream->host_reserved)
202 continue;
203
204 s->opened = true;
205 break;
206 }
207 }
208
209 spin_unlock_irq(&bus->reg_lock);
210
211 /* stream found ? */
212 if (!hext_stream) {
213 dev_err(sdev->dev, "error: no free %s streams\n",
214 direction == SNDRV_PCM_STREAM_PLAYBACK ?
215 "playback" : "capture");
216 return hext_stream;
217 }
218
219 hda_stream->flags = flags;
220
221 /*
222 * Prevent DMI Link L1 entry for streams that don't support it.
223 * Workaround to address a known issue with host DMA that results
224 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
225 */
226 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
227 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
229 HDA_VS_INTEL_EM2,
230 HDA_VS_INTEL_EM2_L1SEN, 0);
231 hda->l1_disabled = true;
232 }
233
234 return hext_stream;
235}
236
237/* free a stream */
238int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
239{
240 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
241 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
242 struct hdac_bus *bus = sof_to_bus(sdev);
243 struct sof_intel_hda_stream *hda_stream;
244 struct hdac_ext_stream *hext_stream;
245 struct hdac_stream *s;
246 bool dmi_l1_enable = true;
247 bool found = false;
248
249 spin_lock_irq(&bus->reg_lock);
250
251 /*
252 * close stream matching the stream tag and check if there are any open streams
253 * that are DMI L1 incompatible.
254 */
255 list_for_each_entry(s, &bus->stream_list, list) {
256 hext_stream = stream_to_hdac_ext_stream(s);
257 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
258
259 if (!s->opened)
260 continue;
261
262 if (s->direction == direction && s->stream_tag == stream_tag) {
263 s->opened = false;
264 found = true;
265 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
266 dmi_l1_enable = false;
267 }
268 }
269
270 spin_unlock_irq(&bus->reg_lock);
271
272 /* Enable DMI L1 if permitted */
273 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
274 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
275 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
276 hda->l1_disabled = false;
277 }
278
279 if (!found) {
280 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
281 __func__, stream_tag);
282 return -ENODEV;
283 }
284
285 return 0;
286}
287
288static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
289{
290 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
291 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
292 u32 val;
293
294 /* enter stream reset */
295 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
296 SOF_STREAM_SD_OFFSET_CRST);
297 do {
298 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
299 if (val & SOF_STREAM_SD_OFFSET_CRST)
300 break;
301 } while (--timeout);
302 if (timeout == 0) {
303 dev_err(sdev->dev, "timeout waiting for stream reset\n");
304 return -ETIMEDOUT;
305 }
306
307 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
308
309 /* exit stream reset and wait to read a zero before reading any other register */
310 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
311
312 /* wait for hardware to report that stream is out of reset */
313 udelay(3);
314 do {
315 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
316 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
317 break;
318 } while (--timeout);
319 if (timeout == 0) {
320 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
321 return -ETIMEDOUT;
322 }
323
324 return 0;
325}
326
327int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
328 struct hdac_ext_stream *hext_stream, int cmd)
329{
330 struct hdac_stream *hstream = &hext_stream->hstream;
331 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
332 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
333 int ret = 0;
334 u32 run;
335
336 /* cmd must be for audio stream */
337 switch (cmd) {
338 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
339 if (!sdev->dspless_mode_selected)
340 break;
341 fallthrough;
342 case SNDRV_PCM_TRIGGER_START:
343 if (hstream->running)
344 break;
345
346 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
347 1 << hstream->index,
348 1 << hstream->index);
349
350 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
351 sd_offset,
352 SOF_HDA_SD_CTL_DMA_START |
353 SOF_HDA_CL_DMA_SD_INT_MASK,
354 SOF_HDA_SD_CTL_DMA_START |
355 SOF_HDA_CL_DMA_SD_INT_MASK);
356
357 ret = snd_sof_dsp_read_poll_timeout(sdev,
358 HDA_DSP_HDA_BAR,
359 sd_offset, run,
360 ((run & dma_start) == dma_start),
361 HDA_DSP_REG_POLL_INTERVAL_US,
362 HDA_DSP_STREAM_RUN_TIMEOUT);
363
364 if (ret >= 0)
365 hstream->running = true;
366
367 break;
368 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
369 if (!sdev->dspless_mode_selected)
370 break;
371 fallthrough;
372 case SNDRV_PCM_TRIGGER_SUSPEND:
373 case SNDRV_PCM_TRIGGER_STOP:
374 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
375 sd_offset,
376 SOF_HDA_SD_CTL_DMA_START |
377 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
378
379 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
380 sd_offset, run,
381 !(run & dma_start),
382 HDA_DSP_REG_POLL_INTERVAL_US,
383 HDA_DSP_STREAM_RUN_TIMEOUT);
384
385 if (ret >= 0) {
386 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
387 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
388 SOF_HDA_CL_DMA_SD_INT_MASK);
389
390 hstream->running = false;
391 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
392 SOF_HDA_INTCTL,
393 1 << hstream->index, 0x0);
394 }
395 break;
396 default:
397 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
398 return -EINVAL;
399 }
400
401 if (ret < 0) {
402 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
403
404 dev_err(sdev->dev,
405 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
406 __func__, cmd, stream_name ? stream_name : "unknown stream");
407 kfree(stream_name);
408 }
409
410 return ret;
411}
412
413/* minimal recommended programming for ICCMAX stream */
414int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
415 struct snd_dma_buffer *dmab,
416 struct snd_pcm_hw_params *params)
417{
418 struct hdac_stream *hstream = &hext_stream->hstream;
419 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
420 int ret;
421 u32 mask = 0x1 << hstream->index;
422
423 if (!hext_stream) {
424 dev_err(sdev->dev, "error: no stream available\n");
425 return -ENODEV;
426 }
427
428 if (!dmab) {
429 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
430 return -ENODEV;
431 }
432
433 if (hstream->posbuf)
434 *hstream->posbuf = 0;
435
436 /* reset BDL address */
437 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
438 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
439 0x0);
440 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
441 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
442 0x0);
443
444 hstream->frags = 0;
445
446 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
447 if (ret < 0) {
448 dev_err(sdev->dev, "error: set up of BDL failed\n");
449 return ret;
450 }
451
452 /* program BDL address */
453 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
454 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
455 (u32)hstream->bdl.addr);
456 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
457 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
458 upper_32_bits(hstream->bdl.addr));
459
460 /* program cyclic buffer length */
461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
463 hstream->bufsize);
464
465 /* program last valid index */
466 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
467 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
468 0xffff, (hstream->frags - 1));
469
470 /* decouple host and link DMA, enable DSP features */
471 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
472 mask, mask);
473
474 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
475 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
476 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
477
478 /* start DMA */
479 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
480 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
481
482 return 0;
483}
484
485/*
486 * prepare for common hdac registers settings, for both code loader
487 * and normal stream.
488 */
489int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
490 struct hdac_ext_stream *hext_stream,
491 struct snd_dma_buffer *dmab,
492 struct snd_pcm_hw_params *params)
493{
494 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
495 struct hdac_bus *bus = sof_to_bus(sdev);
496 struct hdac_stream *hstream;
497 int sd_offset, ret;
498 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
499 u32 mask;
500 u32 run;
501
502 if (!hext_stream) {
503 dev_err(sdev->dev, "error: no stream available\n");
504 return -ENODEV;
505 }
506
507 if (!dmab) {
508 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
509 return -ENODEV;
510 }
511
512 hstream = &hext_stream->hstream;
513 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
514 mask = BIT(hstream->index);
515
516 /* decouple host and link DMA if the DSP is used */
517 if (!sdev->dspless_mode_selected)
518 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
519 mask, mask);
520
521 /* clear stream status */
522 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
523 SOF_HDA_CL_DMA_SD_INT_MASK |
524 SOF_HDA_SD_CTL_DMA_START, 0);
525
526 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
527 sd_offset, run,
528 !(run & dma_start),
529 HDA_DSP_REG_POLL_INTERVAL_US,
530 HDA_DSP_STREAM_RUN_TIMEOUT);
531
532 if (ret < 0) {
533 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
534
535 dev_err(sdev->dev,
536 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
537 __func__, stream_name ? stream_name : "unknown stream");
538 kfree(stream_name);
539 return ret;
540 }
541
542 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
543 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
544 SOF_HDA_CL_DMA_SD_INT_MASK,
545 SOF_HDA_CL_DMA_SD_INT_MASK);
546
547 /* stream reset */
548 ret = hda_dsp_stream_reset(sdev, hstream);
549 if (ret < 0)
550 return ret;
551
552 if (hstream->posbuf)
553 *hstream->posbuf = 0;
554
555 /* reset BDL address */
556 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
557 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
558 0x0);
559 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
560 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
561 0x0);
562
563 /* clear stream status */
564 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
565 SOF_HDA_CL_DMA_SD_INT_MASK |
566 SOF_HDA_SD_CTL_DMA_START, 0);
567
568 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
569 sd_offset, run,
570 !(run & dma_start),
571 HDA_DSP_REG_POLL_INTERVAL_US,
572 HDA_DSP_STREAM_RUN_TIMEOUT);
573
574 if (ret < 0) {
575 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
576
577 dev_err(sdev->dev,
578 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
579 __func__, stream_name ? stream_name : "unknown stream");
580 kfree(stream_name);
581 return ret;
582 }
583
584 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
585 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
586 SOF_HDA_CL_DMA_SD_INT_MASK,
587 SOF_HDA_CL_DMA_SD_INT_MASK);
588
589 hstream->frags = 0;
590
591 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
592 if (ret < 0) {
593 dev_err(sdev->dev, "error: set up of BDL failed\n");
594 return ret;
595 }
596
597 /* program stream tag to set up stream descriptor for DMA */
598 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
599 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
600 hstream->stream_tag <<
601 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
602
603 /* program cyclic buffer length */
604 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
605 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
606 hstream->bufsize);
607
608 /*
609 * Recommended hardware programming sequence for HDAudio DMA format
610 * on earlier platforms - this is not needed on newer platforms
611 *
612 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
613 * for corresponding stream index before the time of writing
614 * format to SDxFMT register.
615 * 2. Write SDxFMT
616 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
617 * enable decoupled mode
618 */
619
620 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
621 /* couple host and link DMA, disable DSP features */
622 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
623 mask, 0);
624
625 /* program stream format */
626 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
627 sd_offset +
628 SOF_HDA_ADSP_REG_SD_FORMAT,
629 0xffff, hstream->format_val);
630
631 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
632 /* decouple host and link DMA, enable DSP features */
633 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
634 mask, mask);
635
636 /* program last valid index */
637 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
638 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
639 0xffff, (hstream->frags - 1));
640
641 /* program BDL address */
642 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
643 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
644 (u32)hstream->bdl.addr);
645 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
646 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
647 upper_32_bits(hstream->bdl.addr));
648
649 /* enable position buffer, if needed */
650 if (bus->use_posbuf && bus->posbuf.addr &&
651 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
652 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
653 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
654 upper_32_bits(bus->posbuf.addr));
655 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
656 (u32)bus->posbuf.addr |
657 SOF_HDA_ADSP_DPLBASE_ENABLE);
658 }
659
660 /* set interrupt enable bits */
661 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
662 SOF_HDA_CL_DMA_SD_INT_MASK,
663 SOF_HDA_CL_DMA_SD_INT_MASK);
664
665 /* read FIFO size */
666 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
667 hstream->fifo_size =
668 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
669 sd_offset +
670 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
671 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
672 hstream->fifo_size += 1;
673 } else {
674 hstream->fifo_size = 0;
675 }
676
677 return ret;
678}
679
680int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
681 struct snd_pcm_substream *substream)
682{
683 struct hdac_stream *hstream = substream->runtime->private_data;
684 struct hdac_ext_stream *hext_stream = container_of(hstream,
685 struct hdac_ext_stream,
686 hstream);
687 int ret;
688
689 ret = hda_dsp_stream_reset(sdev, hstream);
690 if (ret < 0)
691 return ret;
692
693 if (!sdev->dspless_mode_selected) {
694 struct hdac_bus *bus = sof_to_bus(sdev);
695 u32 mask = BIT(hstream->index);
696
697 spin_lock_irq(&bus->reg_lock);
698 /* couple host and link DMA if link DMA channel is idle */
699 if (!hext_stream->link_locked)
700 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
701 SOF_HDA_REG_PP_PPCTL, mask, 0);
702 spin_unlock_irq(&bus->reg_lock);
703 }
704
705 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
706
707 hstream->substream = NULL;
708
709 return 0;
710}
711
712bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
713{
714 struct hdac_bus *bus = sof_to_bus(sdev);
715 bool ret = false;
716 u32 status;
717
718 /* The function can be called at irq thread, so use spin_lock_irq */
719 spin_lock_irq(&bus->reg_lock);
720
721 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
722
723 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
724
725 /* if Register inaccessible, ignore it.*/
726 if (status != 0xffffffff)
727 ret = true;
728
729 spin_unlock_irq(&bus->reg_lock);
730
731 return ret;
732}
733
734static void
735hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
736{
737 u64 buffer_size = hstream->bufsize;
738 u64 prev_pos, pos, num_bytes;
739
740 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
741 pos = hda_dsp_stream_get_position(hstream, direction, false);
742
743 if (pos < prev_pos)
744 num_bytes = (buffer_size - prev_pos) + pos;
745 else
746 num_bytes = pos - prev_pos;
747
748 hstream->curr_pos += num_bytes;
749}
750
751static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
752{
753 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
754 struct hdac_stream *s;
755 bool active = false;
756 u32 sd_status;
757
758 list_for_each_entry(s, &bus->stream_list, list) {
759 if (status & BIT(s->index) && s->opened) {
760 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
761
762 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
763
764 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
765
766 active = true;
767 if ((!s->substream && !s->cstream) ||
768 !s->running ||
769 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
770 continue;
771
772 /* Inform ALSA only in case not do that with IPC */
773 if (s->substream && sof_hda->no_ipc_position) {
774 snd_sof_pcm_period_elapsed(s->substream);
775 } else if (s->cstream) {
776 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
777 snd_compr_fragment_elapsed(s->cstream);
778 }
779 }
780 }
781
782 return active;
783}
784
785irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
786{
787 struct snd_sof_dev *sdev = context;
788 struct hdac_bus *bus = sof_to_bus(sdev);
789 bool active;
790 u32 status;
791 int i;
792
793 /*
794 * Loop 10 times to handle missed interrupts caused by
795 * unsolicited responses from the codec
796 */
797 for (i = 0, active = true; i < 10 && active; i++) {
798 spin_lock_irq(&bus->reg_lock);
799
800 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
801
802 /* check streams */
803 active = hda_dsp_stream_check(bus, status);
804
805 /* check and clear RIRB interrupt */
806 if (status & AZX_INT_CTRL_EN) {
807 active |= hda_codec_check_rirb_status(sdev);
808 }
809 spin_unlock_irq(&bus->reg_lock);
810 }
811
812 return IRQ_HANDLED;
813}
814
815int hda_dsp_stream_init(struct snd_sof_dev *sdev)
816{
817 struct hdac_bus *bus = sof_to_bus(sdev);
818 struct hdac_ext_stream *hext_stream;
819 struct hdac_stream *hstream;
820 struct pci_dev *pci = to_pci_dev(sdev->dev);
821 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
822 int sd_offset;
823 int i, num_playback, num_capture, num_total, ret;
824 u32 gcap;
825
826 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
827 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
828
829 /* get stream count from GCAP */
830 num_capture = (gcap >> 8) & 0x0f;
831 num_playback = (gcap >> 12) & 0x0f;
832 num_total = num_playback + num_capture;
833
834 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
835 num_playback, num_capture);
836
837 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
838 dev_err(sdev->dev, "error: too many playback streams %d\n",
839 num_playback);
840 return -EINVAL;
841 }
842
843 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
844 dev_err(sdev->dev, "error: too many capture streams %d\n",
845 num_playback);
846 return -EINVAL;
847 }
848
849 /*
850 * mem alloc for the position buffer
851 * TODO: check position buffer update
852 */
853 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
854 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
855 &bus->posbuf);
856 if (ret < 0) {
857 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
858 return -ENOMEM;
859 }
860
861 /*
862 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
863 * HDAudio codecs
864 */
865 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
866 PAGE_SIZE, &bus->rb);
867 if (ret < 0) {
868 dev_err(sdev->dev, "error: RB alloc failed\n");
869 return -ENOMEM;
870 }
871
872 /* create capture and playback streams */
873 for (i = 0; i < num_total; i++) {
874 struct sof_intel_hda_stream *hda_stream;
875
876 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
877 GFP_KERNEL);
878 if (!hda_stream)
879 return -ENOMEM;
880
881 hda_stream->sdev = sdev;
882
883 hext_stream = &hda_stream->hext_stream;
884
885 if (sdev->bar[HDA_DSP_PP_BAR]) {
886 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
887 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
888
889 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
890 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
891 SOF_HDA_PPLC_INTERVAL * i;
892 }
893
894 hstream = &hext_stream->hstream;
895
896 /* do we support SPIB */
897 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
898 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
899 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
900 SOF_HDA_SPIB_SPIB;
901
902 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
903 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
904 SOF_HDA_SPIB_MAXFIFO;
905 }
906
907 hstream->bus = bus;
908 hstream->sd_int_sta_mask = 1 << i;
909 hstream->index = i;
910 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
911 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
912 hstream->opened = false;
913 hstream->running = false;
914
915 if (i < num_capture) {
916 hstream->stream_tag = i + 1;
917 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
918 } else {
919 hstream->stream_tag = i - num_capture + 1;
920 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
921 }
922
923 /* mem alloc for stream BDL */
924 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
925 HDA_DSP_BDL_SIZE, &hstream->bdl);
926 if (ret < 0) {
927 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
928 return -ENOMEM;
929 }
930
931 hstream->posbuf = (__le32 *)(bus->posbuf.area +
932 (hstream->index) * 8);
933
934 list_add_tail(&hstream->list, &bus->stream_list);
935 }
936
937 /* store total stream count (playback + capture) from GCAP */
938 sof_hda->stream_max = num_total;
939
940 return 0;
941}
942
943void hda_dsp_stream_free(struct snd_sof_dev *sdev)
944{
945 struct hdac_bus *bus = sof_to_bus(sdev);
946 struct hdac_stream *s, *_s;
947 struct hdac_ext_stream *hext_stream;
948 struct sof_intel_hda_stream *hda_stream;
949
950 /* free position buffer */
951 if (bus->posbuf.area)
952 snd_dma_free_pages(&bus->posbuf);
953
954 /* free CORB/RIRB buffer - only used for HDaudio codecs */
955 if (bus->rb.area)
956 snd_dma_free_pages(&bus->rb);
957
958 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
959 /* TODO: decouple */
960
961 /* free bdl buffer */
962 if (s->bdl.area)
963 snd_dma_free_pages(&s->bdl);
964 list_del(&s->list);
965 hext_stream = stream_to_hdac_ext_stream(s);
966 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
967 hext_stream);
968 devm_kfree(sdev->dev, hda_stream);
969 }
970}
971
972snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
973 int direction, bool can_sleep)
974{
975 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
976 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
977 struct snd_sof_dev *sdev = hda_stream->sdev;
978 snd_pcm_uframes_t pos;
979
980 switch (sof_hda_position_quirk) {
981 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
982 /*
983 * This legacy code, inherited from the Skylake driver,
984 * mixes DPIB registers and DPIB DDR updates and
985 * does not seem to follow any known hardware recommendations.
986 * It's not clear e.g. why there is a different flow
987 * for capture and playback, the only information that matters is
988 * what traffic class is used, and on all SOF-enabled platforms
989 * only VC0 is supported so the work-around was likely not necessary
990 * and quite possibly wrong.
991 */
992
993 /* DPIB/posbuf position mode:
994 * For Playback, Use DPIB register from HDA space which
995 * reflects the actual data transferred.
996 * For Capture, Use the position buffer for pointer, as DPIB
997 * is not accurate enough, its update may be completed
998 * earlier than the data written to DDR.
999 */
1000 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1001 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1002 AZX_REG_VS_SDXDPIB_XBASE +
1003 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1004 hstream->index));
1005 } else {
1006 /*
1007 * For capture stream, we need more workaround to fix the
1008 * position incorrect issue:
1009 *
1010 * 1. Wait at least 20us before reading position buffer after
1011 * the interrupt generated(IOC), to make sure position update
1012 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1013 * 2. Perform a dummy Read to DPIB register to flush DMA
1014 * position value.
1015 * 3. Read the DMA Position from posbuf. Now the readback
1016 * value should be >= period boundary.
1017 */
1018 if (can_sleep)
1019 usleep_range(20, 21);
1020
1021 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1022 AZX_REG_VS_SDXDPIB_XBASE +
1023 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1024 hstream->index));
1025 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1026 }
1027 break;
1028 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1029 /*
1030 * In case VC1 traffic is disabled this is the recommended option
1031 */
1032 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1033 AZX_REG_VS_SDXDPIB_XBASE +
1034 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1035 hstream->index));
1036 break;
1037 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1038 /*
1039 * This is the recommended option when VC1 is enabled.
1040 * While this isn't needed for SOF platforms it's added for
1041 * consistency and debug.
1042 */
1043 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1044 break;
1045 default:
1046 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1047 sof_hda_position_quirk);
1048 pos = 0;
1049 break;
1050 }
1051
1052 if (pos >= hstream->bufsize)
1053 pos = 0;
1054
1055 return pos;
1056}
1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation. All rights reserved.
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <linux/pm_runtime.h>
19#include <sound/hdaudio_ext.h>
20#include <sound/hda_register.h>
21#include <sound/sof.h>
22#include "../ops.h"
23#include "../sof-audio.h"
24#include "hda.h"
25
26#define HDA_LTRP_GB_VALUE_US 95
27
28/*
29 * set up one of BDL entries for a stream
30 */
31static int hda_setup_bdle(struct snd_sof_dev *sdev,
32 struct snd_dma_buffer *dmab,
33 struct hdac_stream *stream,
34 struct sof_intel_dsp_bdl **bdlp,
35 int offset, int size, int ioc)
36{
37 struct hdac_bus *bus = sof_to_bus(sdev);
38 struct sof_intel_dsp_bdl *bdl = *bdlp;
39
40 while (size > 0) {
41 dma_addr_t addr;
42 int chunk;
43
44 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
45 dev_err(sdev->dev, "error: stream frags exceeded\n");
46 return -EINVAL;
47 }
48
49 addr = snd_sgbuf_get_addr(dmab, offset);
50 /* program BDL addr */
51 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
52 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
53 /* program BDL size */
54 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
55 /* one BDLE should not cross 4K boundary */
56 if (bus->align_bdle_4k) {
57 u32 remain = 0x1000 - (offset & 0xfff);
58
59 if (chunk > remain)
60 chunk = remain;
61 }
62 bdl->size = cpu_to_le32(chunk);
63 /* only program IOC when the whole segment is processed */
64 size -= chunk;
65 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
66 bdl++;
67 stream->frags++;
68 offset += chunk;
69
70 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
71 stream->frags, chunk);
72 }
73
74 *bdlp = bdl;
75 return offset;
76}
77
78/*
79 * set up Buffer Descriptor List (BDL) for host memory transfer
80 * BDL describes the location of the individual buffers and is little endian.
81 */
82int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
83 struct snd_dma_buffer *dmab,
84 struct hdac_stream *stream)
85{
86 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
87 struct sof_intel_dsp_bdl *bdl;
88 int i, offset, period_bytes, periods;
89 int remain, ioc;
90
91 period_bytes = stream->period_bytes;
92 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
93 if (!period_bytes)
94 period_bytes = stream->bufsize;
95
96 periods = stream->bufsize / period_bytes;
97
98 dev_dbg(sdev->dev, "periods:%d\n", periods);
99
100 remain = stream->bufsize % period_bytes;
101 if (remain)
102 periods++;
103
104 /* program the initial BDL entries */
105 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
106 offset = 0;
107 stream->frags = 0;
108
109 /*
110 * set IOC if don't use position IPC
111 * and period_wakeup needed.
112 */
113 ioc = hda->no_ipc_position ?
114 !stream->no_period_wakeup : 0;
115
116 for (i = 0; i < periods; i++) {
117 if (i == (periods - 1) && remain)
118 /* set the last small entry */
119 offset = hda_setup_bdle(sdev, dmab,
120 stream, &bdl, offset,
121 remain, 0);
122 else
123 offset = hda_setup_bdle(sdev, dmab,
124 stream, &bdl, offset,
125 period_bytes, ioc);
126 }
127
128 return offset;
129}
130
131int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
132 struct hdac_ext_stream *stream,
133 int enable, u32 size)
134{
135 struct hdac_stream *hstream = &stream->hstream;
136 u32 mask;
137
138 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
139 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
140 return -EINVAL;
141 }
142
143 mask = (1 << hstream->index);
144
145 /* enable/disable SPIB for the stream */
146 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
147 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
148 enable << hstream->index);
149
150 /* set the SPIB value */
151 sof_io_write(sdev, stream->spib_addr, size);
152
153 return 0;
154}
155
156/* get next unused stream */
157struct hdac_ext_stream *
158hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
159{
160 struct hdac_bus *bus = sof_to_bus(sdev);
161 struct sof_intel_hda_stream *hda_stream;
162 struct hdac_ext_stream *stream = NULL;
163 struct hdac_stream *s;
164
165 spin_lock_irq(&bus->reg_lock);
166
167 /* get an unused stream */
168 list_for_each_entry(s, &bus->stream_list, list) {
169 if (s->direction == direction && !s->opened) {
170 stream = stream_to_hdac_ext_stream(s);
171 hda_stream = container_of(stream,
172 struct sof_intel_hda_stream,
173 hda_stream);
174 /* check if the host DMA channel is reserved */
175 if (hda_stream->host_reserved)
176 continue;
177
178 s->opened = true;
179 break;
180 }
181 }
182
183 spin_unlock_irq(&bus->reg_lock);
184
185 /* stream found ? */
186 if (!stream) {
187 dev_err(sdev->dev, "error: no free %s streams\n",
188 direction == SNDRV_PCM_STREAM_PLAYBACK ?
189 "playback" : "capture");
190 return stream;
191 }
192
193 hda_stream->flags = flags;
194
195 /*
196 * Prevent DMI Link L1 entry for streams that don't support it.
197 * Workaround to address a known issue with host DMA that results
198 * in xruns during pause/release in capture scenarios.
199 */
200 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
201 if (stream && !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE))
202 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
203 HDA_VS_INTEL_EM2,
204 HDA_VS_INTEL_EM2_L1SEN, 0);
205
206 return stream;
207}
208
209/* free a stream */
210int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
211{
212 struct hdac_bus *bus = sof_to_bus(sdev);
213 struct sof_intel_hda_stream *hda_stream;
214 struct hdac_ext_stream *stream;
215 struct hdac_stream *s;
216 bool dmi_l1_enable = true;
217 bool found = false;
218
219 spin_lock_irq(&bus->reg_lock);
220
221 /*
222 * close stream matching the stream tag and check if there are any open streams
223 * that are DMI L1 incompatible.
224 */
225 list_for_each_entry(s, &bus->stream_list, list) {
226 stream = stream_to_hdac_ext_stream(s);
227 hda_stream = container_of(stream, struct sof_intel_hda_stream, hda_stream);
228
229 if (!s->opened)
230 continue;
231
232 if (s->direction == direction && s->stream_tag == stream_tag) {
233 s->opened = false;
234 found = true;
235 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
236 dmi_l1_enable = false;
237 }
238 }
239
240 spin_unlock_irq(&bus->reg_lock);
241
242 /* Enable DMI L1 if permitted */
243 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1) && dmi_l1_enable)
244 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
245 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
246
247 if (!found) {
248 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
249 return -ENODEV;
250 }
251
252 return 0;
253}
254
255int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
256 struct hdac_ext_stream *stream, int cmd)
257{
258 struct hdac_stream *hstream = &stream->hstream;
259 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
260 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
261 int ret;
262 u32 run;
263
264 /* cmd must be for audio stream */
265 switch (cmd) {
266 case SNDRV_PCM_TRIGGER_RESUME:
267 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
268 case SNDRV_PCM_TRIGGER_START:
269 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
270 1 << hstream->index,
271 1 << hstream->index);
272
273 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
274 sd_offset,
275 SOF_HDA_SD_CTL_DMA_START |
276 SOF_HDA_CL_DMA_SD_INT_MASK,
277 SOF_HDA_SD_CTL_DMA_START |
278 SOF_HDA_CL_DMA_SD_INT_MASK);
279
280 ret = snd_sof_dsp_read_poll_timeout(sdev,
281 HDA_DSP_HDA_BAR,
282 sd_offset, run,
283 ((run & dma_start) == dma_start),
284 HDA_DSP_REG_POLL_INTERVAL_US,
285 HDA_DSP_STREAM_RUN_TIMEOUT);
286
287 if (ret < 0) {
288 dev_err(sdev->dev,
289 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
290 __func__, cmd);
291 return ret;
292 }
293
294 hstream->running = true;
295 break;
296 case SNDRV_PCM_TRIGGER_SUSPEND:
297 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
298 case SNDRV_PCM_TRIGGER_STOP:
299 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
300 sd_offset,
301 SOF_HDA_SD_CTL_DMA_START |
302 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
303
304 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
305 sd_offset, run,
306 !(run & dma_start),
307 HDA_DSP_REG_POLL_INTERVAL_US,
308 HDA_DSP_STREAM_RUN_TIMEOUT);
309
310 if (ret < 0) {
311 dev_err(sdev->dev,
312 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
313 __func__, cmd);
314 return ret;
315 }
316
317 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
318 SOF_HDA_ADSP_REG_CL_SD_STS,
319 SOF_HDA_CL_DMA_SD_INT_MASK);
320
321 hstream->running = false;
322 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
323 1 << hstream->index, 0x0);
324 break;
325 default:
326 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
327 return -EINVAL;
328 }
329
330 return 0;
331}
332
333/* minimal recommended programming for ICCMAX stream */
334int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *stream,
335 struct snd_dma_buffer *dmab,
336 struct snd_pcm_hw_params *params)
337{
338 struct hdac_bus *bus = sof_to_bus(sdev);
339 struct hdac_stream *hstream = &stream->hstream;
340 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
341 int ret;
342 u32 mask = 0x1 << hstream->index;
343
344 if (!stream) {
345 dev_err(sdev->dev, "error: no stream available\n");
346 return -ENODEV;
347 }
348
349 if (hstream->posbuf)
350 *hstream->posbuf = 0;
351
352 /* reset BDL address */
353 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
354 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
355 0x0);
356 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
357 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
358 0x0);
359
360 hstream->frags = 0;
361
362 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
363 if (ret < 0) {
364 dev_err(sdev->dev, "error: set up of BDL failed\n");
365 return ret;
366 }
367
368 /* program BDL address */
369 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
370 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
371 (u32)hstream->bdl.addr);
372 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
373 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
374 upper_32_bits(hstream->bdl.addr));
375
376 /* program cyclic buffer length */
377 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
378 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
379 hstream->bufsize);
380
381 /* program last valid index */
382 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
383 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
384 0xffff, (hstream->frags - 1));
385
386 /* decouple host and link DMA, enable DSP features */
387 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
388 mask, mask);
389
390 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
391 snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
392
393 /* start DMA */
394 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
395 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
396
397 return 0;
398}
399
400/*
401 * prepare for common hdac registers settings, for both code loader
402 * and normal stream.
403 */
404int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
405 struct hdac_ext_stream *stream,
406 struct snd_dma_buffer *dmab,
407 struct snd_pcm_hw_params *params)
408{
409 struct hdac_bus *bus = sof_to_bus(sdev);
410 struct hdac_stream *hstream = &stream->hstream;
411 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
412 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
413 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
414 u32 val, mask;
415 u32 run;
416
417 if (!stream) {
418 dev_err(sdev->dev, "error: no stream available\n");
419 return -ENODEV;
420 }
421
422 /* decouple host and link DMA */
423 mask = 0x1 << hstream->index;
424 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
425 mask, mask);
426
427 if (!dmab) {
428 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
429 return -ENODEV;
430 }
431
432 /* clear stream status */
433 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
434 SOF_HDA_CL_DMA_SD_INT_MASK |
435 SOF_HDA_SD_CTL_DMA_START, 0);
436
437 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
438 sd_offset, run,
439 !(run & dma_start),
440 HDA_DSP_REG_POLL_INTERVAL_US,
441 HDA_DSP_STREAM_RUN_TIMEOUT);
442
443 if (ret < 0) {
444 dev_err(sdev->dev,
445 "error: %s: timeout on STREAM_SD_OFFSET read1\n",
446 __func__);
447 return ret;
448 }
449
450 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
451 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
452 SOF_HDA_CL_DMA_SD_INT_MASK,
453 SOF_HDA_CL_DMA_SD_INT_MASK);
454
455 /* stream reset */
456 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
457 0x1);
458 udelay(3);
459 do {
460 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
461 sd_offset);
462 if (val & 0x1)
463 break;
464 } while (--timeout);
465 if (timeout == 0) {
466 dev_err(sdev->dev, "error: stream reset failed\n");
467 return -ETIMEDOUT;
468 }
469
470 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
471 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
472 0x0);
473
474 /* wait for hardware to report that stream is out of reset */
475 udelay(3);
476 do {
477 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
478 sd_offset);
479 if ((val & 0x1) == 0)
480 break;
481 } while (--timeout);
482 if (timeout == 0) {
483 dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
484 return -ETIMEDOUT;
485 }
486
487 if (hstream->posbuf)
488 *hstream->posbuf = 0;
489
490 /* reset BDL address */
491 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
492 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
493 0x0);
494 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
495 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
496 0x0);
497
498 /* clear stream status */
499 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
500 SOF_HDA_CL_DMA_SD_INT_MASK |
501 SOF_HDA_SD_CTL_DMA_START, 0);
502
503 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
504 sd_offset, run,
505 !(run & dma_start),
506 HDA_DSP_REG_POLL_INTERVAL_US,
507 HDA_DSP_STREAM_RUN_TIMEOUT);
508
509 if (ret < 0) {
510 dev_err(sdev->dev,
511 "error: %s: timeout on STREAM_SD_OFFSET read2\n",
512 __func__);
513 return ret;
514 }
515
516 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
517 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
518 SOF_HDA_CL_DMA_SD_INT_MASK,
519 SOF_HDA_CL_DMA_SD_INT_MASK);
520
521 hstream->frags = 0;
522
523 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
524 if (ret < 0) {
525 dev_err(sdev->dev, "error: set up of BDL failed\n");
526 return ret;
527 }
528
529 /* program stream tag to set up stream descriptor for DMA */
530 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
531 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
532 hstream->stream_tag <<
533 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
534
535 /* program cyclic buffer length */
536 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
537 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
538 hstream->bufsize);
539
540 /*
541 * Recommended hardware programming sequence for HDAudio DMA format
542 *
543 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
544 * for corresponding stream index before the time of writing
545 * format to SDxFMT register.
546 * 2. Write SDxFMT
547 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
548 * enable decoupled mode
549 */
550
551 /* couple host and link DMA, disable DSP features */
552 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
553 mask, 0);
554
555 /* program stream format */
556 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
557 sd_offset +
558 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
559 0xffff, hstream->format_val);
560
561 /* decouple host and link DMA, enable DSP features */
562 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
563 mask, mask);
564
565 /* program last valid index */
566 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
567 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
568 0xffff, (hstream->frags - 1));
569
570 /* program BDL address */
571 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
572 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
573 (u32)hstream->bdl.addr);
574 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
575 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
576 upper_32_bits(hstream->bdl.addr));
577
578 /* enable position buffer */
579 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
580 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
581 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
582 upper_32_bits(bus->posbuf.addr));
583 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
584 (u32)bus->posbuf.addr |
585 SOF_HDA_ADSP_DPLBASE_ENABLE);
586 }
587
588 /* set interrupt enable bits */
589 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
590 SOF_HDA_CL_DMA_SD_INT_MASK,
591 SOF_HDA_CL_DMA_SD_INT_MASK);
592
593 /* read FIFO size */
594 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
595 hstream->fifo_size =
596 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
597 sd_offset +
598 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
599 hstream->fifo_size &= 0xffff;
600 hstream->fifo_size += 1;
601 } else {
602 hstream->fifo_size = 0;
603 }
604
605 return ret;
606}
607
608int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
609 struct snd_pcm_substream *substream)
610{
611 struct hdac_stream *stream = substream->runtime->private_data;
612 struct hdac_ext_stream *link_dev = container_of(stream,
613 struct hdac_ext_stream,
614 hstream);
615 struct hdac_bus *bus = sof_to_bus(sdev);
616 u32 mask = 0x1 << stream->index;
617
618 spin_lock_irq(&bus->reg_lock);
619 /* couple host and link DMA if link DMA channel is idle */
620 if (!link_dev->link_locked)
621 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
622 SOF_HDA_REG_PP_PPCTL, mask, 0);
623 spin_unlock_irq(&bus->reg_lock);
624
625 stream->substream = NULL;
626
627 return 0;
628}
629
630bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
631{
632 struct hdac_bus *bus = sof_to_bus(sdev);
633 bool ret = false;
634 u32 status;
635
636 /* The function can be called at irq thread, so use spin_lock_irq */
637 spin_lock_irq(&bus->reg_lock);
638
639 status = snd_hdac_chip_readl(bus, INTSTS);
640 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
641
642 /* if Register inaccessible, ignore it.*/
643 if (status != 0xffffffff)
644 ret = true;
645
646 spin_unlock_irq(&bus->reg_lock);
647
648 return ret;
649}
650
651static void
652hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size)
653{
654 u64 prev_pos, pos, num_bytes;
655
656 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
657 pos = snd_hdac_stream_get_pos_posbuf(hstream);
658
659 if (pos < prev_pos)
660 num_bytes = (buffer_size - prev_pos) + pos;
661 else
662 num_bytes = pos - prev_pos;
663
664 hstream->curr_pos += num_bytes;
665}
666
667static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
668{
669 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
670 struct hdac_stream *s;
671 bool active = false;
672 u32 sd_status;
673
674 list_for_each_entry(s, &bus->stream_list, list) {
675 if (status & BIT(s->index) && s->opened) {
676 sd_status = snd_hdac_stream_readb(s, SD_STS);
677
678 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
679 s->index, sd_status);
680
681 snd_hdac_stream_writeb(s, SD_STS, sd_status);
682
683 active = true;
684 if ((!s->substream && !s->cstream) ||
685 !s->running ||
686 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
687 continue;
688
689 /* Inform ALSA only in case not do that with IPC */
690 if (s->substream && sof_hda->no_ipc_position) {
691 snd_sof_pcm_period_elapsed(s->substream);
692 } else if (s->cstream) {
693 hda_dsp_set_bytes_transferred(s,
694 s->cstream->runtime->buffer_size);
695 snd_compr_fragment_elapsed(s->cstream);
696 }
697 }
698 }
699
700 return active;
701}
702
703irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
704{
705 struct snd_sof_dev *sdev = context;
706 struct hdac_bus *bus = sof_to_bus(sdev);
707#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
708 u32 rirb_status;
709#endif
710 bool active;
711 u32 status;
712 int i;
713
714 /*
715 * Loop 10 times to handle missed interrupts caused by
716 * unsolicited responses from the codec
717 */
718 for (i = 0, active = true; i < 10 && active; i++) {
719 spin_lock_irq(&bus->reg_lock);
720
721 status = snd_hdac_chip_readl(bus, INTSTS);
722
723 /* check streams */
724 active = hda_dsp_stream_check(bus, status);
725
726 /* check and clear RIRB interrupt */
727#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
728 if (status & AZX_INT_CTRL_EN) {
729 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
730 if (rirb_status & RIRB_INT_MASK) {
731 /*
732 * Clearing the interrupt status here ensures
733 * that no interrupt gets masked after the RIRB
734 * wp is read in snd_hdac_bus_update_rirb.
735 */
736 snd_hdac_chip_writeb(bus, RIRBSTS,
737 RIRB_INT_MASK);
738 active = true;
739 if (rirb_status & RIRB_INT_RESPONSE)
740 snd_hdac_bus_update_rirb(bus);
741 }
742 }
743#endif
744 spin_unlock_irq(&bus->reg_lock);
745 }
746
747 return IRQ_HANDLED;
748}
749
750int hda_dsp_stream_init(struct snd_sof_dev *sdev)
751{
752 struct hdac_bus *bus = sof_to_bus(sdev);
753 struct hdac_ext_stream *stream;
754 struct hdac_stream *hstream;
755 struct pci_dev *pci = to_pci_dev(sdev->dev);
756 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
757 int sd_offset;
758 int i, num_playback, num_capture, num_total, ret;
759 u32 gcap;
760
761 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
762 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
763
764 /* get stream count from GCAP */
765 num_capture = (gcap >> 8) & 0x0f;
766 num_playback = (gcap >> 12) & 0x0f;
767 num_total = num_playback + num_capture;
768
769 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
770 num_playback, num_capture);
771
772 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
773 dev_err(sdev->dev, "error: too many playback streams %d\n",
774 num_playback);
775 return -EINVAL;
776 }
777
778 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
779 dev_err(sdev->dev, "error: too many capture streams %d\n",
780 num_playback);
781 return -EINVAL;
782 }
783
784 /*
785 * mem alloc for the position buffer
786 * TODO: check position buffer update
787 */
788 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
789 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
790 &bus->posbuf);
791 if (ret < 0) {
792 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
793 return -ENOMEM;
794 }
795
796#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
797 /* mem alloc for the CORB/RIRB ringbuffers */
798 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
799 PAGE_SIZE, &bus->rb);
800 if (ret < 0) {
801 dev_err(sdev->dev, "error: RB alloc failed\n");
802 return -ENOMEM;
803 }
804#endif
805
806 /* create capture streams */
807 for (i = 0; i < num_capture; i++) {
808 struct sof_intel_hda_stream *hda_stream;
809
810 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
811 GFP_KERNEL);
812 if (!hda_stream)
813 return -ENOMEM;
814
815 hda_stream->sdev = sdev;
816
817 stream = &hda_stream->hda_stream;
818
819 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
820 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
821
822 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
823 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
824 SOF_HDA_PPLC_INTERVAL * i;
825
826 /* do we support SPIB */
827 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
828 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
829 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
830 SOF_HDA_SPIB_SPIB;
831
832 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
833 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
834 SOF_HDA_SPIB_MAXFIFO;
835 }
836
837 hstream = &stream->hstream;
838 hstream->bus = bus;
839 hstream->sd_int_sta_mask = 1 << i;
840 hstream->index = i;
841 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
842 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
843 hstream->stream_tag = i + 1;
844 hstream->opened = false;
845 hstream->running = false;
846 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
847
848 /* memory alloc for stream BDL */
849 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
850 HDA_DSP_BDL_SIZE, &hstream->bdl);
851 if (ret < 0) {
852 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
853 return -ENOMEM;
854 }
855 hstream->posbuf = (__le32 *)(bus->posbuf.area +
856 (hstream->index) * 8);
857
858 list_add_tail(&hstream->list, &bus->stream_list);
859 }
860
861 /* create playback streams */
862 for (i = num_capture; i < num_total; i++) {
863 struct sof_intel_hda_stream *hda_stream;
864
865 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
866 GFP_KERNEL);
867 if (!hda_stream)
868 return -ENOMEM;
869
870 hda_stream->sdev = sdev;
871
872 stream = &hda_stream->hda_stream;
873
874 /* we always have DSP support */
875 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
876 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
877
878 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
879 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
880 SOF_HDA_PPLC_INTERVAL * i;
881
882 /* do we support SPIB */
883 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
884 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
885 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
886 SOF_HDA_SPIB_SPIB;
887
888 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
889 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
890 SOF_HDA_SPIB_MAXFIFO;
891 }
892
893 hstream = &stream->hstream;
894 hstream->bus = bus;
895 hstream->sd_int_sta_mask = 1 << i;
896 hstream->index = i;
897 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
898 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
899 hstream->stream_tag = i - num_capture + 1;
900 hstream->opened = false;
901 hstream->running = false;
902 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
903
904 /* mem alloc for stream BDL */
905 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
906 HDA_DSP_BDL_SIZE, &hstream->bdl);
907 if (ret < 0) {
908 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
909 return -ENOMEM;
910 }
911
912 hstream->posbuf = (__le32 *)(bus->posbuf.area +
913 (hstream->index) * 8);
914
915 list_add_tail(&hstream->list, &bus->stream_list);
916 }
917
918 /* store total stream count (playback + capture) from GCAP */
919 sof_hda->stream_max = num_total;
920
921 return 0;
922}
923
924void hda_dsp_stream_free(struct snd_sof_dev *sdev)
925{
926 struct hdac_bus *bus = sof_to_bus(sdev);
927 struct hdac_stream *s, *_s;
928 struct hdac_ext_stream *stream;
929 struct sof_intel_hda_stream *hda_stream;
930
931 /* free position buffer */
932 if (bus->posbuf.area)
933 snd_dma_free_pages(&bus->posbuf);
934
935#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
936 /* free position buffer */
937 if (bus->rb.area)
938 snd_dma_free_pages(&bus->rb);
939#endif
940
941 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
942 /* TODO: decouple */
943
944 /* free bdl buffer */
945 if (s->bdl.area)
946 snd_dma_free_pages(&s->bdl);
947 list_del(&s->list);
948 stream = stream_to_hdac_ext_stream(s);
949 hda_stream = container_of(stream, struct sof_intel_hda_stream,
950 hda_stream);
951 devm_kfree(sdev->dev, hda_stream);
952 }
953}