Loading...
1// SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation. All rights reserved.
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <sound/hdaudio_ext.h>
19#include <sound/hda_register.h>
20#include <sound/sof.h>
21#include <trace/events/sof_intel.h>
22#include "../ops.h"
23#include "../sof-audio.h"
24#include "hda.h"
25
26#define HDA_LTRP_GB_VALUE_US 95
27
28static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
29{
30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
31 return "Playback";
32 else
33 return "Capture";
34}
35
36static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
37{
38 struct snd_soc_pcm_runtime *rtd;
39
40 if (hstream->substream)
41 rtd = snd_soc_substream_to_rtd(hstream->substream);
42 else if (hstream->cstream)
43 rtd = hstream->cstream->private_data;
44 else
45 /* Non audio DMA user, like dma-trace */
46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
47 hda_hstream_direction_str(hstream),
48 hstream->stream_tag);
49
50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
51 rtd->dai_link->name, hda_hstream_direction_str(hstream),
52 hstream->stream_tag);
53}
54
55/*
56 * set up one of BDL entries for a stream
57 */
58static int hda_setup_bdle(struct snd_sof_dev *sdev,
59 struct snd_dma_buffer *dmab,
60 struct hdac_stream *hstream,
61 struct sof_intel_dsp_bdl **bdlp,
62 int offset, int size, int ioc)
63{
64 struct hdac_bus *bus = sof_to_bus(sdev);
65 struct sof_intel_dsp_bdl *bdl = *bdlp;
66
67 while (size > 0) {
68 dma_addr_t addr;
69 int chunk;
70
71 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
72 dev_err(sdev->dev, "error: stream frags exceeded\n");
73 return -EINVAL;
74 }
75
76 addr = snd_sgbuf_get_addr(dmab, offset);
77 /* program BDL addr */
78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
80 /* program BDL size */
81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
82 /* one BDLE should not cross 4K boundary */
83 if (bus->align_bdle_4k) {
84 u32 remain = 0x1000 - (offset & 0xfff);
85
86 if (chunk > remain)
87 chunk = remain;
88 }
89 bdl->size = cpu_to_le32(chunk);
90 /* only program IOC when the whole segment is processed */
91 size -= chunk;
92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
93 bdl++;
94 hstream->frags++;
95 offset += chunk;
96 }
97
98 *bdlp = bdl;
99 return offset;
100}
101
102/*
103 * set up Buffer Descriptor List (BDL) for host memory transfer
104 * BDL describes the location of the individual buffers and is little endian.
105 */
106int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
107 struct snd_dma_buffer *dmab,
108 struct hdac_stream *hstream)
109{
110 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
111 struct sof_intel_dsp_bdl *bdl;
112 int i, offset, period_bytes, periods;
113 int remain, ioc;
114
115 period_bytes = hstream->period_bytes;
116 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
117 if (!period_bytes)
118 period_bytes = hstream->bufsize;
119
120 periods = hstream->bufsize / period_bytes;
121
122 dev_dbg(sdev->dev, "periods:%d\n", periods);
123
124 remain = hstream->bufsize % period_bytes;
125 if (remain)
126 periods++;
127
128 /* program the initial BDL entries */
129 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
130 offset = 0;
131 hstream->frags = 0;
132
133 /*
134 * set IOC if don't use position IPC
135 * and period_wakeup needed.
136 */
137 ioc = hda->no_ipc_position ?
138 !hstream->no_period_wakeup : 0;
139
140 for (i = 0; i < periods; i++) {
141 if (i == (periods - 1) && remain)
142 /* set the last small entry */
143 offset = hda_setup_bdle(sdev, dmab,
144 hstream, &bdl, offset,
145 remain, 0);
146 else
147 offset = hda_setup_bdle(sdev, dmab,
148 hstream, &bdl, offset,
149 period_bytes, ioc);
150 }
151
152 return offset;
153}
154
155int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
156 struct hdac_ext_stream *hext_stream,
157 int enable, u32 size)
158{
159 struct hdac_stream *hstream = &hext_stream->hstream;
160 u32 mask;
161
162 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
163 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
164 return -EINVAL;
165 }
166
167 mask = (1 << hstream->index);
168
169 /* enable/disable SPIB for the stream */
170 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
171 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
172 enable << hstream->index);
173
174 /* set the SPIB value */
175 sof_io_write(sdev, hstream->spib_addr, size);
176
177 return 0;
178}
179
180/* get next unused stream */
181struct hdac_ext_stream *
182hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
183{
184 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
185 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
186 struct hdac_bus *bus = sof_to_bus(sdev);
187 struct sof_intel_hda_stream *hda_stream;
188 struct hdac_ext_stream *hext_stream = NULL;
189 struct hdac_stream *s;
190
191 spin_lock_irq(&bus->reg_lock);
192
193 /* get an unused stream */
194 list_for_each_entry(s, &bus->stream_list, list) {
195 if (s->direction == direction && !s->opened) {
196 hext_stream = stream_to_hdac_ext_stream(s);
197 hda_stream = container_of(hext_stream,
198 struct sof_intel_hda_stream,
199 hext_stream);
200 /* check if the host DMA channel is reserved */
201 if (hda_stream->host_reserved)
202 continue;
203
204 s->opened = true;
205 break;
206 }
207 }
208
209 spin_unlock_irq(&bus->reg_lock);
210
211 /* stream found ? */
212 if (!hext_stream) {
213 dev_err(sdev->dev, "error: no free %s streams\n",
214 direction == SNDRV_PCM_STREAM_PLAYBACK ?
215 "playback" : "capture");
216 return hext_stream;
217 }
218
219 hda_stream->flags = flags;
220
221 /*
222 * Prevent DMI Link L1 entry for streams that don't support it.
223 * Workaround to address a known issue with host DMA that results
224 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
225 */
226 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
227 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
229 HDA_VS_INTEL_EM2,
230 HDA_VS_INTEL_EM2_L1SEN, 0);
231 hda->l1_disabled = true;
232 }
233
234 return hext_stream;
235}
236
237/* free a stream */
238int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
239{
240 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
241 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
242 struct hdac_bus *bus = sof_to_bus(sdev);
243 struct sof_intel_hda_stream *hda_stream;
244 struct hdac_ext_stream *hext_stream;
245 struct hdac_stream *s;
246 bool dmi_l1_enable = true;
247 bool found = false;
248
249 spin_lock_irq(&bus->reg_lock);
250
251 /*
252 * close stream matching the stream tag and check if there are any open streams
253 * that are DMI L1 incompatible.
254 */
255 list_for_each_entry(s, &bus->stream_list, list) {
256 hext_stream = stream_to_hdac_ext_stream(s);
257 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
258
259 if (!s->opened)
260 continue;
261
262 if (s->direction == direction && s->stream_tag == stream_tag) {
263 s->opened = false;
264 found = true;
265 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
266 dmi_l1_enable = false;
267 }
268 }
269
270 spin_unlock_irq(&bus->reg_lock);
271
272 /* Enable DMI L1 if permitted */
273 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
274 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
275 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
276 hda->l1_disabled = false;
277 }
278
279 if (!found) {
280 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
281 __func__, stream_tag);
282 return -ENODEV;
283 }
284
285 return 0;
286}
287
288static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
289{
290 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
291 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
292 u32 val;
293
294 /* enter stream reset */
295 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
296 SOF_STREAM_SD_OFFSET_CRST);
297 do {
298 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
299 if (val & SOF_STREAM_SD_OFFSET_CRST)
300 break;
301 } while (--timeout);
302 if (timeout == 0) {
303 dev_err(sdev->dev, "timeout waiting for stream reset\n");
304 return -ETIMEDOUT;
305 }
306
307 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
308
309 /* exit stream reset and wait to read a zero before reading any other register */
310 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
311
312 /* wait for hardware to report that stream is out of reset */
313 udelay(3);
314 do {
315 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
316 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
317 break;
318 } while (--timeout);
319 if (timeout == 0) {
320 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
321 return -ETIMEDOUT;
322 }
323
324 return 0;
325}
326
327int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
328 struct hdac_ext_stream *hext_stream, int cmd)
329{
330 struct hdac_stream *hstream = &hext_stream->hstream;
331 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
332 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
333 int ret = 0;
334 u32 run;
335
336 /* cmd must be for audio stream */
337 switch (cmd) {
338 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
339 if (!sdev->dspless_mode_selected)
340 break;
341 fallthrough;
342 case SNDRV_PCM_TRIGGER_START:
343 if (hstream->running)
344 break;
345
346 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
347 1 << hstream->index,
348 1 << hstream->index);
349
350 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
351 sd_offset,
352 SOF_HDA_SD_CTL_DMA_START |
353 SOF_HDA_CL_DMA_SD_INT_MASK,
354 SOF_HDA_SD_CTL_DMA_START |
355 SOF_HDA_CL_DMA_SD_INT_MASK);
356
357 ret = snd_sof_dsp_read_poll_timeout(sdev,
358 HDA_DSP_HDA_BAR,
359 sd_offset, run,
360 ((run & dma_start) == dma_start),
361 HDA_DSP_REG_POLL_INTERVAL_US,
362 HDA_DSP_STREAM_RUN_TIMEOUT);
363
364 if (ret >= 0)
365 hstream->running = true;
366
367 break;
368 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
369 if (!sdev->dspless_mode_selected)
370 break;
371 fallthrough;
372 case SNDRV_PCM_TRIGGER_SUSPEND:
373 case SNDRV_PCM_TRIGGER_STOP:
374 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
375 sd_offset,
376 SOF_HDA_SD_CTL_DMA_START |
377 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
378
379 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
380 sd_offset, run,
381 !(run & dma_start),
382 HDA_DSP_REG_POLL_INTERVAL_US,
383 HDA_DSP_STREAM_RUN_TIMEOUT);
384
385 if (ret >= 0) {
386 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
387 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
388 SOF_HDA_CL_DMA_SD_INT_MASK);
389
390 hstream->running = false;
391 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
392 SOF_HDA_INTCTL,
393 1 << hstream->index, 0x0);
394 }
395 break;
396 default:
397 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
398 return -EINVAL;
399 }
400
401 if (ret < 0) {
402 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
403
404 dev_err(sdev->dev,
405 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
406 __func__, cmd, stream_name ? stream_name : "unknown stream");
407 kfree(stream_name);
408 }
409
410 return ret;
411}
412
413/* minimal recommended programming for ICCMAX stream */
414int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
415 struct snd_dma_buffer *dmab,
416 struct snd_pcm_hw_params *params)
417{
418 struct hdac_stream *hstream = &hext_stream->hstream;
419 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
420 int ret;
421 u32 mask = 0x1 << hstream->index;
422
423 if (!hext_stream) {
424 dev_err(sdev->dev, "error: no stream available\n");
425 return -ENODEV;
426 }
427
428 if (!dmab) {
429 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
430 return -ENODEV;
431 }
432
433 if (hstream->posbuf)
434 *hstream->posbuf = 0;
435
436 /* reset BDL address */
437 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
438 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
439 0x0);
440 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
441 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
442 0x0);
443
444 hstream->frags = 0;
445
446 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
447 if (ret < 0) {
448 dev_err(sdev->dev, "error: set up of BDL failed\n");
449 return ret;
450 }
451
452 /* program BDL address */
453 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
454 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
455 (u32)hstream->bdl.addr);
456 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
457 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
458 upper_32_bits(hstream->bdl.addr));
459
460 /* program cyclic buffer length */
461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
463 hstream->bufsize);
464
465 /* program last valid index */
466 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
467 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
468 0xffff, (hstream->frags - 1));
469
470 /* decouple host and link DMA, enable DSP features */
471 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
472 mask, mask);
473
474 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
475 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
476 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
477
478 /* start DMA */
479 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
480 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
481
482 return 0;
483}
484
485/*
486 * prepare for common hdac registers settings, for both code loader
487 * and normal stream.
488 */
489int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
490 struct hdac_ext_stream *hext_stream,
491 struct snd_dma_buffer *dmab,
492 struct snd_pcm_hw_params *params)
493{
494 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
495 struct hdac_bus *bus = sof_to_bus(sdev);
496 struct hdac_stream *hstream;
497 int sd_offset, ret;
498 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
499 u32 mask;
500 u32 run;
501
502 if (!hext_stream) {
503 dev_err(sdev->dev, "error: no stream available\n");
504 return -ENODEV;
505 }
506
507 if (!dmab) {
508 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
509 return -ENODEV;
510 }
511
512 hstream = &hext_stream->hstream;
513 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
514 mask = BIT(hstream->index);
515
516 /* decouple host and link DMA if the DSP is used */
517 if (!sdev->dspless_mode_selected)
518 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
519 mask, mask);
520
521 /* clear stream status */
522 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
523 SOF_HDA_CL_DMA_SD_INT_MASK |
524 SOF_HDA_SD_CTL_DMA_START, 0);
525
526 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
527 sd_offset, run,
528 !(run & dma_start),
529 HDA_DSP_REG_POLL_INTERVAL_US,
530 HDA_DSP_STREAM_RUN_TIMEOUT);
531
532 if (ret < 0) {
533 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
534
535 dev_err(sdev->dev,
536 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
537 __func__, stream_name ? stream_name : "unknown stream");
538 kfree(stream_name);
539 return ret;
540 }
541
542 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
543 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
544 SOF_HDA_CL_DMA_SD_INT_MASK,
545 SOF_HDA_CL_DMA_SD_INT_MASK);
546
547 /* stream reset */
548 ret = hda_dsp_stream_reset(sdev, hstream);
549 if (ret < 0)
550 return ret;
551
552 if (hstream->posbuf)
553 *hstream->posbuf = 0;
554
555 /* reset BDL address */
556 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
557 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
558 0x0);
559 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
560 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
561 0x0);
562
563 /* clear stream status */
564 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
565 SOF_HDA_CL_DMA_SD_INT_MASK |
566 SOF_HDA_SD_CTL_DMA_START, 0);
567
568 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
569 sd_offset, run,
570 !(run & dma_start),
571 HDA_DSP_REG_POLL_INTERVAL_US,
572 HDA_DSP_STREAM_RUN_TIMEOUT);
573
574 if (ret < 0) {
575 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
576
577 dev_err(sdev->dev,
578 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
579 __func__, stream_name ? stream_name : "unknown stream");
580 kfree(stream_name);
581 return ret;
582 }
583
584 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
585 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
586 SOF_HDA_CL_DMA_SD_INT_MASK,
587 SOF_HDA_CL_DMA_SD_INT_MASK);
588
589 hstream->frags = 0;
590
591 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
592 if (ret < 0) {
593 dev_err(sdev->dev, "error: set up of BDL failed\n");
594 return ret;
595 }
596
597 /* program stream tag to set up stream descriptor for DMA */
598 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
599 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
600 hstream->stream_tag <<
601 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
602
603 /* program cyclic buffer length */
604 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
605 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
606 hstream->bufsize);
607
608 /*
609 * Recommended hardware programming sequence for HDAudio DMA format
610 * on earlier platforms - this is not needed on newer platforms
611 *
612 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
613 * for corresponding stream index before the time of writing
614 * format to SDxFMT register.
615 * 2. Write SDxFMT
616 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
617 * enable decoupled mode
618 */
619
620 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
621 /* couple host and link DMA, disable DSP features */
622 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
623 mask, 0);
624
625 /* program stream format */
626 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
627 sd_offset +
628 SOF_HDA_ADSP_REG_SD_FORMAT,
629 0xffff, hstream->format_val);
630
631 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
632 /* decouple host and link DMA, enable DSP features */
633 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
634 mask, mask);
635
636 /* program last valid index */
637 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
638 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
639 0xffff, (hstream->frags - 1));
640
641 /* program BDL address */
642 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
643 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
644 (u32)hstream->bdl.addr);
645 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
646 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
647 upper_32_bits(hstream->bdl.addr));
648
649 /* enable position buffer, if needed */
650 if (bus->use_posbuf && bus->posbuf.addr &&
651 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
652 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
653 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
654 upper_32_bits(bus->posbuf.addr));
655 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
656 (u32)bus->posbuf.addr |
657 SOF_HDA_ADSP_DPLBASE_ENABLE);
658 }
659
660 /* set interrupt enable bits */
661 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
662 SOF_HDA_CL_DMA_SD_INT_MASK,
663 SOF_HDA_CL_DMA_SD_INT_MASK);
664
665 /* read FIFO size */
666 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
667 hstream->fifo_size =
668 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
669 sd_offset +
670 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
671 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
672 hstream->fifo_size += 1;
673 } else {
674 hstream->fifo_size = 0;
675 }
676
677 return ret;
678}
679
680int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
681 struct snd_pcm_substream *substream)
682{
683 struct hdac_stream *hstream = substream->runtime->private_data;
684 struct hdac_ext_stream *hext_stream = container_of(hstream,
685 struct hdac_ext_stream,
686 hstream);
687 int ret;
688
689 ret = hda_dsp_stream_reset(sdev, hstream);
690 if (ret < 0)
691 return ret;
692
693 if (!sdev->dspless_mode_selected) {
694 struct hdac_bus *bus = sof_to_bus(sdev);
695 u32 mask = BIT(hstream->index);
696
697 spin_lock_irq(&bus->reg_lock);
698 /* couple host and link DMA if link DMA channel is idle */
699 if (!hext_stream->link_locked)
700 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
701 SOF_HDA_REG_PP_PPCTL, mask, 0);
702 spin_unlock_irq(&bus->reg_lock);
703 }
704
705 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
706
707 hstream->substream = NULL;
708
709 return 0;
710}
711
712bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
713{
714 struct hdac_bus *bus = sof_to_bus(sdev);
715 bool ret = false;
716 u32 status;
717
718 /* The function can be called at irq thread, so use spin_lock_irq */
719 spin_lock_irq(&bus->reg_lock);
720
721 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
722
723 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
724
725 /* if Register inaccessible, ignore it.*/
726 if (status != 0xffffffff)
727 ret = true;
728
729 spin_unlock_irq(&bus->reg_lock);
730
731 return ret;
732}
733
734static void
735hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
736{
737 u64 buffer_size = hstream->bufsize;
738 u64 prev_pos, pos, num_bytes;
739
740 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
741 pos = hda_dsp_stream_get_position(hstream, direction, false);
742
743 if (pos < prev_pos)
744 num_bytes = (buffer_size - prev_pos) + pos;
745 else
746 num_bytes = pos - prev_pos;
747
748 hstream->curr_pos += num_bytes;
749}
750
751static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
752{
753 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
754 struct hdac_stream *s;
755 bool active = false;
756 u32 sd_status;
757
758 list_for_each_entry(s, &bus->stream_list, list) {
759 if (status & BIT(s->index) && s->opened) {
760 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
761
762 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
763
764 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
765
766 active = true;
767 if ((!s->substream && !s->cstream) ||
768 !s->running ||
769 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
770 continue;
771
772 /* Inform ALSA only in case not do that with IPC */
773 if (s->substream && sof_hda->no_ipc_position) {
774 snd_sof_pcm_period_elapsed(s->substream);
775 } else if (s->cstream) {
776 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
777 snd_compr_fragment_elapsed(s->cstream);
778 }
779 }
780 }
781
782 return active;
783}
784
785irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
786{
787 struct snd_sof_dev *sdev = context;
788 struct hdac_bus *bus = sof_to_bus(sdev);
789 bool active;
790 u32 status;
791 int i;
792
793 /*
794 * Loop 10 times to handle missed interrupts caused by
795 * unsolicited responses from the codec
796 */
797 for (i = 0, active = true; i < 10 && active; i++) {
798 spin_lock_irq(&bus->reg_lock);
799
800 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
801
802 /* check streams */
803 active = hda_dsp_stream_check(bus, status);
804
805 /* check and clear RIRB interrupt */
806 if (status & AZX_INT_CTRL_EN) {
807 active |= hda_codec_check_rirb_status(sdev);
808 }
809 spin_unlock_irq(&bus->reg_lock);
810 }
811
812 return IRQ_HANDLED;
813}
814
815int hda_dsp_stream_init(struct snd_sof_dev *sdev)
816{
817 struct hdac_bus *bus = sof_to_bus(sdev);
818 struct hdac_ext_stream *hext_stream;
819 struct hdac_stream *hstream;
820 struct pci_dev *pci = to_pci_dev(sdev->dev);
821 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
822 int sd_offset;
823 int i, num_playback, num_capture, num_total, ret;
824 u32 gcap;
825
826 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
827 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
828
829 /* get stream count from GCAP */
830 num_capture = (gcap >> 8) & 0x0f;
831 num_playback = (gcap >> 12) & 0x0f;
832 num_total = num_playback + num_capture;
833
834 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
835 num_playback, num_capture);
836
837 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
838 dev_err(sdev->dev, "error: too many playback streams %d\n",
839 num_playback);
840 return -EINVAL;
841 }
842
843 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
844 dev_err(sdev->dev, "error: too many capture streams %d\n",
845 num_playback);
846 return -EINVAL;
847 }
848
849 /*
850 * mem alloc for the position buffer
851 * TODO: check position buffer update
852 */
853 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
854 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
855 &bus->posbuf);
856 if (ret < 0) {
857 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
858 return -ENOMEM;
859 }
860
861 /*
862 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
863 * HDAudio codecs
864 */
865 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
866 PAGE_SIZE, &bus->rb);
867 if (ret < 0) {
868 dev_err(sdev->dev, "error: RB alloc failed\n");
869 return -ENOMEM;
870 }
871
872 /* create capture and playback streams */
873 for (i = 0; i < num_total; i++) {
874 struct sof_intel_hda_stream *hda_stream;
875
876 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
877 GFP_KERNEL);
878 if (!hda_stream)
879 return -ENOMEM;
880
881 hda_stream->sdev = sdev;
882
883 hext_stream = &hda_stream->hext_stream;
884
885 if (sdev->bar[HDA_DSP_PP_BAR]) {
886 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
887 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
888
889 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
890 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
891 SOF_HDA_PPLC_INTERVAL * i;
892 }
893
894 hstream = &hext_stream->hstream;
895
896 /* do we support SPIB */
897 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
898 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
899 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
900 SOF_HDA_SPIB_SPIB;
901
902 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
903 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
904 SOF_HDA_SPIB_MAXFIFO;
905 }
906
907 hstream->bus = bus;
908 hstream->sd_int_sta_mask = 1 << i;
909 hstream->index = i;
910 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
911 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
912 hstream->opened = false;
913 hstream->running = false;
914
915 if (i < num_capture) {
916 hstream->stream_tag = i + 1;
917 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
918 } else {
919 hstream->stream_tag = i - num_capture + 1;
920 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
921 }
922
923 /* mem alloc for stream BDL */
924 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
925 HDA_DSP_BDL_SIZE, &hstream->bdl);
926 if (ret < 0) {
927 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
928 return -ENOMEM;
929 }
930
931 hstream->posbuf = (__le32 *)(bus->posbuf.area +
932 (hstream->index) * 8);
933
934 list_add_tail(&hstream->list, &bus->stream_list);
935 }
936
937 /* store total stream count (playback + capture) from GCAP */
938 sof_hda->stream_max = num_total;
939
940 return 0;
941}
942
943void hda_dsp_stream_free(struct snd_sof_dev *sdev)
944{
945 struct hdac_bus *bus = sof_to_bus(sdev);
946 struct hdac_stream *s, *_s;
947 struct hdac_ext_stream *hext_stream;
948 struct sof_intel_hda_stream *hda_stream;
949
950 /* free position buffer */
951 if (bus->posbuf.area)
952 snd_dma_free_pages(&bus->posbuf);
953
954 /* free CORB/RIRB buffer - only used for HDaudio codecs */
955 if (bus->rb.area)
956 snd_dma_free_pages(&bus->rb);
957
958 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
959 /* TODO: decouple */
960
961 /* free bdl buffer */
962 if (s->bdl.area)
963 snd_dma_free_pages(&s->bdl);
964 list_del(&s->list);
965 hext_stream = stream_to_hdac_ext_stream(s);
966 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
967 hext_stream);
968 devm_kfree(sdev->dev, hda_stream);
969 }
970}
971
972snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
973 int direction, bool can_sleep)
974{
975 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
976 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
977 struct snd_sof_dev *sdev = hda_stream->sdev;
978 snd_pcm_uframes_t pos;
979
980 switch (sof_hda_position_quirk) {
981 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
982 /*
983 * This legacy code, inherited from the Skylake driver,
984 * mixes DPIB registers and DPIB DDR updates and
985 * does not seem to follow any known hardware recommendations.
986 * It's not clear e.g. why there is a different flow
987 * for capture and playback, the only information that matters is
988 * what traffic class is used, and on all SOF-enabled platforms
989 * only VC0 is supported so the work-around was likely not necessary
990 * and quite possibly wrong.
991 */
992
993 /* DPIB/posbuf position mode:
994 * For Playback, Use DPIB register from HDA space which
995 * reflects the actual data transferred.
996 * For Capture, Use the position buffer for pointer, as DPIB
997 * is not accurate enough, its update may be completed
998 * earlier than the data written to DDR.
999 */
1000 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1001 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1002 AZX_REG_VS_SDXDPIB_XBASE +
1003 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1004 hstream->index));
1005 } else {
1006 /*
1007 * For capture stream, we need more workaround to fix the
1008 * position incorrect issue:
1009 *
1010 * 1. Wait at least 20us before reading position buffer after
1011 * the interrupt generated(IOC), to make sure position update
1012 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1013 * 2. Perform a dummy Read to DPIB register to flush DMA
1014 * position value.
1015 * 3. Read the DMA Position from posbuf. Now the readback
1016 * value should be >= period boundary.
1017 */
1018 if (can_sleep)
1019 usleep_range(20, 21);
1020
1021 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1022 AZX_REG_VS_SDXDPIB_XBASE +
1023 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1024 hstream->index));
1025 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1026 }
1027 break;
1028 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1029 /*
1030 * In case VC1 traffic is disabled this is the recommended option
1031 */
1032 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1033 AZX_REG_VS_SDXDPIB_XBASE +
1034 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1035 hstream->index));
1036 break;
1037 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1038 /*
1039 * This is the recommended option when VC1 is enabled.
1040 * While this isn't needed for SOF platforms it's added for
1041 * consistency and debug.
1042 */
1043 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1044 break;
1045 default:
1046 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1047 sof_hda_position_quirk);
1048 pos = 0;
1049 break;
1050 }
1051
1052 if (pos >= hstream->bufsize)
1053 pos = 0;
1054
1055 return pos;
1056}
1// SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2//
3// This file is provided under a dual BSD/GPLv2 license. When using or
4// redistributing this file, you may do so under either license.
5//
6// Copyright(c) 2018 Intel Corporation. All rights reserved.
7//
8// Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9// Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10// Rander Wang <rander.wang@intel.com>
11// Keyon Jie <yang.jie@linux.intel.com>
12//
13
14/*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18#include <linux/pm_runtime.h>
19#include <sound/hdaudio_ext.h>
20#include <sound/hda_register.h>
21#include <sound/sof.h>
22#include "../ops.h"
23#include "hda.h"
24
25/*
26 * set up one of BDL entries for a stream
27 */
28static int hda_setup_bdle(struct snd_sof_dev *sdev,
29 struct snd_dma_buffer *dmab,
30 struct hdac_stream *stream,
31 struct sof_intel_dsp_bdl **bdlp,
32 int offset, int size, int ioc)
33{
34 struct hdac_bus *bus = sof_to_bus(sdev);
35 struct sof_intel_dsp_bdl *bdl = *bdlp;
36
37 while (size > 0) {
38 dma_addr_t addr;
39 int chunk;
40
41 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
42 dev_err(sdev->dev, "error: stream frags exceeded\n");
43 return -EINVAL;
44 }
45
46 addr = snd_sgbuf_get_addr(dmab, offset);
47 /* program BDL addr */
48 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
49 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
50 /* program BDL size */
51 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
52 /* one BDLE should not cross 4K boundary */
53 if (bus->align_bdle_4k) {
54 u32 remain = 0x1000 - (offset & 0xfff);
55
56 if (chunk > remain)
57 chunk = remain;
58 }
59 bdl->size = cpu_to_le32(chunk);
60 /* only program IOC when the whole segment is processed */
61 size -= chunk;
62 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
63 bdl++;
64 stream->frags++;
65 offset += chunk;
66
67 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
68 stream->frags, chunk);
69 }
70
71 *bdlp = bdl;
72 return offset;
73}
74
75/*
76 * set up Buffer Descriptor List (BDL) for host memory transfer
77 * BDL describes the location of the individual buffers and is little endian.
78 */
79int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
80 struct snd_dma_buffer *dmab,
81 struct hdac_stream *stream)
82{
83 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
84 struct sof_intel_dsp_bdl *bdl;
85 int i, offset, period_bytes, periods;
86 int remain, ioc;
87
88 period_bytes = stream->period_bytes;
89 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
90 if (!period_bytes)
91 period_bytes = stream->bufsize;
92
93 periods = stream->bufsize / period_bytes;
94
95 dev_dbg(sdev->dev, "periods:%d\n", periods);
96
97 remain = stream->bufsize % period_bytes;
98 if (remain)
99 periods++;
100
101 /* program the initial BDL entries */
102 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
103 offset = 0;
104 stream->frags = 0;
105
106 /*
107 * set IOC if don't use position IPC
108 * and period_wakeup needed.
109 */
110 ioc = hda->no_ipc_position ?
111 !stream->no_period_wakeup : 0;
112
113 for (i = 0; i < periods; i++) {
114 if (i == (periods - 1) && remain)
115 /* set the last small entry */
116 offset = hda_setup_bdle(sdev, dmab,
117 stream, &bdl, offset,
118 remain, 0);
119 else
120 offset = hda_setup_bdle(sdev, dmab,
121 stream, &bdl, offset,
122 period_bytes, ioc);
123 }
124
125 return offset;
126}
127
128int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
129 struct hdac_ext_stream *stream,
130 int enable, u32 size)
131{
132 struct hdac_stream *hstream = &stream->hstream;
133 u32 mask;
134
135 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
136 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
137 return -EINVAL;
138 }
139
140 mask = (1 << hstream->index);
141
142 /* enable/disable SPIB for the stream */
143 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
144 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
145 enable << hstream->index);
146
147 /* set the SPIB value */
148 sof_io_write(sdev, stream->spib_addr, size);
149
150 return 0;
151}
152
153/* get next unused stream */
154struct hdac_ext_stream *
155hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
156{
157 struct hdac_bus *bus = sof_to_bus(sdev);
158 struct sof_intel_hda_stream *hda_stream;
159 struct hdac_ext_stream *stream = NULL;
160 struct hdac_stream *s;
161
162 spin_lock_irq(&bus->reg_lock);
163
164 /* get an unused stream */
165 list_for_each_entry(s, &bus->stream_list, list) {
166 if (s->direction == direction && !s->opened) {
167 stream = stream_to_hdac_ext_stream(s);
168 hda_stream = container_of(stream,
169 struct sof_intel_hda_stream,
170 hda_stream);
171 /* check if the host DMA channel is reserved */
172 if (hda_stream->host_reserved)
173 continue;
174
175 s->opened = true;
176 break;
177 }
178 }
179
180 spin_unlock_irq(&bus->reg_lock);
181
182 /* stream found ? */
183 if (!stream)
184 dev_err(sdev->dev, "error: no free %s streams\n",
185 direction == SNDRV_PCM_STREAM_PLAYBACK ?
186 "playback" : "capture");
187
188 /*
189 * Disable DMI Link L1 entry when capture stream is opened.
190 * Workaround to address a known issue with host DMA that results
191 * in xruns during pause/release in capture scenarios.
192 */
193 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
194 if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
195 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
196 HDA_VS_INTEL_EM2,
197 HDA_VS_INTEL_EM2_L1SEN, 0);
198
199 return stream;
200}
201
202/* free a stream */
203int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
204{
205 struct hdac_bus *bus = sof_to_bus(sdev);
206 struct hdac_stream *s;
207 bool active_capture_stream = false;
208 bool found = false;
209
210 spin_lock_irq(&bus->reg_lock);
211
212 /*
213 * close stream matching the stream tag
214 * and check if there are any open capture streams.
215 */
216 list_for_each_entry(s, &bus->stream_list, list) {
217 if (!s->opened)
218 continue;
219
220 if (s->direction == direction && s->stream_tag == stream_tag) {
221 s->opened = false;
222 found = true;
223 } else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
224 active_capture_stream = true;
225 }
226 }
227
228 spin_unlock_irq(&bus->reg_lock);
229
230 /* Enable DMI L1 entry if there are no capture streams open */
231 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
232 if (!active_capture_stream)
233 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
234 HDA_VS_INTEL_EM2,
235 HDA_VS_INTEL_EM2_L1SEN,
236 HDA_VS_INTEL_EM2_L1SEN);
237
238 if (!found) {
239 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
240 return -ENODEV;
241 }
242
243 return 0;
244}
245
246int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
247 struct hdac_ext_stream *stream, int cmd)
248{
249 struct hdac_stream *hstream = &stream->hstream;
250 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
251 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
252 int ret;
253 u32 run;
254
255 /* cmd must be for audio stream */
256 switch (cmd) {
257 case SNDRV_PCM_TRIGGER_RESUME:
258 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
259 case SNDRV_PCM_TRIGGER_START:
260 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
261 1 << hstream->index,
262 1 << hstream->index);
263
264 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
265 sd_offset,
266 SOF_HDA_SD_CTL_DMA_START |
267 SOF_HDA_CL_DMA_SD_INT_MASK,
268 SOF_HDA_SD_CTL_DMA_START |
269 SOF_HDA_CL_DMA_SD_INT_MASK);
270
271 ret = snd_sof_dsp_read_poll_timeout(sdev,
272 HDA_DSP_HDA_BAR,
273 sd_offset, run,
274 ((run & dma_start) == dma_start),
275 HDA_DSP_REG_POLL_INTERVAL_US,
276 HDA_DSP_STREAM_RUN_TIMEOUT);
277
278 if (ret)
279 return ret;
280
281 hstream->running = true;
282 break;
283 case SNDRV_PCM_TRIGGER_SUSPEND:
284 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
285 case SNDRV_PCM_TRIGGER_STOP:
286 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
287 sd_offset,
288 SOF_HDA_SD_CTL_DMA_START |
289 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
290
291 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
292 sd_offset, run,
293 !(run & dma_start),
294 HDA_DSP_REG_POLL_INTERVAL_US,
295 HDA_DSP_STREAM_RUN_TIMEOUT);
296
297 if (ret)
298 return ret;
299
300 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
301 SOF_HDA_ADSP_REG_CL_SD_STS,
302 SOF_HDA_CL_DMA_SD_INT_MASK);
303
304 hstream->running = false;
305 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
306 1 << hstream->index, 0x0);
307 break;
308 default:
309 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
310 return -EINVAL;
311 }
312
313 return 0;
314}
315
316/*
317 * prepare for common hdac registers settings, for both code loader
318 * and normal stream.
319 */
320int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
321 struct hdac_ext_stream *stream,
322 struct snd_dma_buffer *dmab,
323 struct snd_pcm_hw_params *params)
324{
325 struct hdac_bus *bus = sof_to_bus(sdev);
326 struct hdac_stream *hstream = &stream->hstream;
327 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
328 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
329 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
330 u32 val, mask;
331 u32 run;
332
333 if (!stream) {
334 dev_err(sdev->dev, "error: no stream available\n");
335 return -ENODEV;
336 }
337
338 /* decouple host and link DMA */
339 mask = 0x1 << hstream->index;
340 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
341 mask, mask);
342
343 if (!dmab) {
344 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
345 return -ENODEV;
346 }
347
348 /* clear stream status */
349 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
350 SOF_HDA_CL_DMA_SD_INT_MASK |
351 SOF_HDA_SD_CTL_DMA_START, 0);
352
353 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
354 sd_offset, run,
355 !(run & dma_start),
356 HDA_DSP_REG_POLL_INTERVAL_US,
357 HDA_DSP_STREAM_RUN_TIMEOUT);
358
359 if (ret)
360 return ret;
361
362 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
363 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
364 SOF_HDA_CL_DMA_SD_INT_MASK,
365 SOF_HDA_CL_DMA_SD_INT_MASK);
366
367 /* stream reset */
368 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
369 0x1);
370 udelay(3);
371 do {
372 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
373 sd_offset);
374 if (val & 0x1)
375 break;
376 } while (--timeout);
377 if (timeout == 0) {
378 dev_err(sdev->dev, "error: stream reset failed\n");
379 return -ETIMEDOUT;
380 }
381
382 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
383 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
384 0x0);
385
386 /* wait for hardware to report that stream is out of reset */
387 udelay(3);
388 do {
389 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
390 sd_offset);
391 if ((val & 0x1) == 0)
392 break;
393 } while (--timeout);
394 if (timeout == 0) {
395 dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
396 return -ETIMEDOUT;
397 }
398
399 if (hstream->posbuf)
400 *hstream->posbuf = 0;
401
402 /* reset BDL address */
403 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
404 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
405 0x0);
406 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
407 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
408 0x0);
409
410 /* clear stream status */
411 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
412 SOF_HDA_CL_DMA_SD_INT_MASK |
413 SOF_HDA_SD_CTL_DMA_START, 0);
414
415 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
416 sd_offset, run,
417 !(run & dma_start),
418 HDA_DSP_REG_POLL_INTERVAL_US,
419 HDA_DSP_STREAM_RUN_TIMEOUT);
420
421 if (ret)
422 return ret;
423
424 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
425 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
426 SOF_HDA_CL_DMA_SD_INT_MASK,
427 SOF_HDA_CL_DMA_SD_INT_MASK);
428
429 hstream->frags = 0;
430
431 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
432 if (ret < 0) {
433 dev_err(sdev->dev, "error: set up of BDL failed\n");
434 return ret;
435 }
436
437 /* program stream tag to set up stream descriptor for DMA */
438 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
439 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
440 hstream->stream_tag <<
441 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
442
443 /* program cyclic buffer length */
444 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
445 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
446 hstream->bufsize);
447
448 /*
449 * Recommended hardware programming sequence for HDAudio DMA format
450 *
451 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
452 * for corresponding stream index before the time of writing
453 * format to SDxFMT register.
454 * 2. Write SDxFMT
455 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
456 * enable decoupled mode
457 */
458
459 /* couple host and link DMA, disable DSP features */
460 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
461 mask, 0);
462
463 /* program stream format */
464 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
465 sd_offset +
466 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
467 0xffff, hstream->format_val);
468
469 /* decouple host and link DMA, enable DSP features */
470 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
471 mask, mask);
472
473 /* program last valid index */
474 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
475 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
476 0xffff, (hstream->frags - 1));
477
478 /* program BDL address */
479 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
480 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
481 (u32)hstream->bdl.addr);
482 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
483 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
484 upper_32_bits(hstream->bdl.addr));
485
486 /* enable position buffer */
487 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
488 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
489 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
490 upper_32_bits(bus->posbuf.addr));
491 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
492 (u32)bus->posbuf.addr |
493 SOF_HDA_ADSP_DPLBASE_ENABLE);
494 }
495
496 /* set interrupt enable bits */
497 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
498 SOF_HDA_CL_DMA_SD_INT_MASK,
499 SOF_HDA_CL_DMA_SD_INT_MASK);
500
501 /* read FIFO size */
502 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
503 hstream->fifo_size =
504 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
505 sd_offset +
506 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
507 hstream->fifo_size &= 0xffff;
508 hstream->fifo_size += 1;
509 } else {
510 hstream->fifo_size = 0;
511 }
512
513 return ret;
514}
515
516int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
517 struct snd_pcm_substream *substream)
518{
519 struct hdac_stream *stream = substream->runtime->private_data;
520 struct hdac_ext_stream *link_dev = container_of(stream,
521 struct hdac_ext_stream,
522 hstream);
523 struct hdac_bus *bus = sof_to_bus(sdev);
524 u32 mask = 0x1 << stream->index;
525
526 spin_lock_irq(&bus->reg_lock);
527 /* couple host and link DMA if link DMA channel is idle */
528 if (!link_dev->link_locked)
529 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
530 SOF_HDA_REG_PP_PPCTL, mask, 0);
531 spin_unlock_irq(&bus->reg_lock);
532
533 return 0;
534}
535
536irqreturn_t hda_dsp_stream_interrupt(int irq, void *context)
537{
538 struct hdac_bus *bus = context;
539 int ret = IRQ_WAKE_THREAD;
540 u32 status;
541
542 spin_lock(&bus->reg_lock);
543
544 status = snd_hdac_chip_readl(bus, INTSTS);
545 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
546
547 /* Register inaccessible, ignore it.*/
548 if (status == 0xffffffff)
549 ret = IRQ_NONE;
550
551 spin_unlock(&bus->reg_lock);
552
553 return ret;
554}
555
556static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
557{
558 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
559 struct hdac_stream *s;
560 bool active = false;
561 u32 sd_status;
562
563 list_for_each_entry(s, &bus->stream_list, list) {
564 if (status & BIT(s->index) && s->opened) {
565 sd_status = snd_hdac_stream_readb(s, SD_STS);
566
567 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
568 s->index, sd_status);
569
570 snd_hdac_stream_writeb(s, SD_STS, sd_status);
571
572 active = true;
573 if (!s->substream ||
574 !s->running ||
575 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
576 continue;
577
578 /* Inform ALSA only in case not do that with IPC */
579 if (sof_hda->no_ipc_position)
580 snd_sof_pcm_period_elapsed(s->substream);
581 }
582 }
583
584 return active;
585}
586
587irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
588{
589 struct hdac_bus *bus = context;
590#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
591 u32 rirb_status;
592#endif
593 bool active;
594 u32 status;
595 int i;
596
597 /*
598 * Loop 10 times to handle missed interrupts caused by
599 * unsolicited responses from the codec
600 */
601 for (i = 0, active = true; i < 10 && active; i++) {
602 spin_lock_irq(&bus->reg_lock);
603
604 status = snd_hdac_chip_readl(bus, INTSTS);
605
606 /* check streams */
607 active = hda_dsp_stream_check(bus, status);
608
609 /* check and clear RIRB interrupt */
610#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
611 if (status & AZX_INT_CTRL_EN) {
612 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
613 if (rirb_status & RIRB_INT_MASK) {
614 active = true;
615 if (rirb_status & RIRB_INT_RESPONSE)
616 snd_hdac_bus_update_rirb(bus);
617 snd_hdac_chip_writeb(bus, RIRBSTS,
618 RIRB_INT_MASK);
619 }
620 }
621#endif
622 spin_unlock_irq(&bus->reg_lock);
623 }
624
625 return IRQ_HANDLED;
626}
627
628int hda_dsp_stream_init(struct snd_sof_dev *sdev)
629{
630 struct hdac_bus *bus = sof_to_bus(sdev);
631 struct hdac_ext_stream *stream;
632 struct hdac_stream *hstream;
633 struct pci_dev *pci = to_pci_dev(sdev->dev);
634 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
635 int sd_offset;
636 int i, num_playback, num_capture, num_total, ret;
637 u32 gcap;
638
639 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
640 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
641
642 /* get stream count from GCAP */
643 num_capture = (gcap >> 8) & 0x0f;
644 num_playback = (gcap >> 12) & 0x0f;
645 num_total = num_playback + num_capture;
646
647 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
648 num_playback, num_capture);
649
650 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
651 dev_err(sdev->dev, "error: too many playback streams %d\n",
652 num_playback);
653 return -EINVAL;
654 }
655
656 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
657 dev_err(sdev->dev, "error: too many capture streams %d\n",
658 num_playback);
659 return -EINVAL;
660 }
661
662 /*
663 * mem alloc for the position buffer
664 * TODO: check position buffer update
665 */
666 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
667 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
668 &bus->posbuf);
669 if (ret < 0) {
670 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
671 return -ENOMEM;
672 }
673
674#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
675 /* mem alloc for the CORB/RIRB ringbuffers */
676 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
677 PAGE_SIZE, &bus->rb);
678 if (ret < 0) {
679 dev_err(sdev->dev, "error: RB alloc failed\n");
680 return -ENOMEM;
681 }
682#endif
683
684 /* create capture streams */
685 for (i = 0; i < num_capture; i++) {
686 struct sof_intel_hda_stream *hda_stream;
687
688 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
689 GFP_KERNEL);
690 if (!hda_stream)
691 return -ENOMEM;
692
693 hda_stream->sdev = sdev;
694
695 stream = &hda_stream->hda_stream;
696
697 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
698 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
699
700 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
701 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
702 SOF_HDA_PPLC_INTERVAL * i;
703
704 /* do we support SPIB */
705 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
706 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
707 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
708 SOF_HDA_SPIB_SPIB;
709
710 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
711 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
712 SOF_HDA_SPIB_MAXFIFO;
713 }
714
715 hstream = &stream->hstream;
716 hstream->bus = bus;
717 hstream->sd_int_sta_mask = 1 << i;
718 hstream->index = i;
719 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
720 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
721 hstream->stream_tag = i + 1;
722 hstream->opened = false;
723 hstream->running = false;
724 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
725
726 /* memory alloc for stream BDL */
727 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
728 HDA_DSP_BDL_SIZE, &hstream->bdl);
729 if (ret < 0) {
730 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
731 return -ENOMEM;
732 }
733 hstream->posbuf = (__le32 *)(bus->posbuf.area +
734 (hstream->index) * 8);
735
736 list_add_tail(&hstream->list, &bus->stream_list);
737 }
738
739 /* create playback streams */
740 for (i = num_capture; i < num_total; i++) {
741 struct sof_intel_hda_stream *hda_stream;
742
743 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
744 GFP_KERNEL);
745 if (!hda_stream)
746 return -ENOMEM;
747
748 hda_stream->sdev = sdev;
749
750 stream = &hda_stream->hda_stream;
751
752 /* we always have DSP support */
753 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
754 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
755
756 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
757 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
758 SOF_HDA_PPLC_INTERVAL * i;
759
760 /* do we support SPIB */
761 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
762 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
763 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
764 SOF_HDA_SPIB_SPIB;
765
766 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
767 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
768 SOF_HDA_SPIB_MAXFIFO;
769 }
770
771 hstream = &stream->hstream;
772 hstream->bus = bus;
773 hstream->sd_int_sta_mask = 1 << i;
774 hstream->index = i;
775 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
776 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
777 hstream->stream_tag = i - num_capture + 1;
778 hstream->opened = false;
779 hstream->running = false;
780 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
781
782 /* mem alloc for stream BDL */
783 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
784 HDA_DSP_BDL_SIZE, &hstream->bdl);
785 if (ret < 0) {
786 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
787 return -ENOMEM;
788 }
789
790 hstream->posbuf = (__le32 *)(bus->posbuf.area +
791 (hstream->index) * 8);
792
793 list_add_tail(&hstream->list, &bus->stream_list);
794 }
795
796 /* store total stream count (playback + capture) from GCAP */
797 sof_hda->stream_max = num_total;
798
799 return 0;
800}
801
802void hda_dsp_stream_free(struct snd_sof_dev *sdev)
803{
804 struct hdac_bus *bus = sof_to_bus(sdev);
805 struct hdac_stream *s, *_s;
806 struct hdac_ext_stream *stream;
807 struct sof_intel_hda_stream *hda_stream;
808
809 /* free position buffer */
810 if (bus->posbuf.area)
811 snd_dma_free_pages(&bus->posbuf);
812
813#if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
814 /* free position buffer */
815 if (bus->rb.area)
816 snd_dma_free_pages(&bus->rb);
817#endif
818
819 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
820 /* TODO: decouple */
821
822 /* free bdl buffer */
823 if (s->bdl.area)
824 snd_dma_free_pages(&s->bdl);
825 list_del(&s->list);
826 stream = stream_to_hdac_ext_stream(s);
827 hda_stream = container_of(stream, struct sof_intel_hda_stream,
828 hda_stream);
829 devm_kfree(sdev->dev, hda_stream);
830 }
831}