Loading...
1// SPDX-License-Identifier: GPL-2.0
2//
3// Freescale ASRC ALSA SoC Platform (DMA) driver
4//
5// Copyright (C) 2014 Freescale Semiconductor, Inc.
6//
7// Author: Nicolin Chen <nicoleotsuka@gmail.com>
8
9#include <linux/dma-mapping.h>
10#include <linux/module.h>
11#include <linux/dma/imx-dma.h>
12#include <sound/dmaengine_pcm.h>
13#include <sound/pcm_params.h>
14
15#include "fsl_asrc_common.h"
16
17#define FSL_ASRC_DMABUF_SIZE (256 * 1024)
18
19static struct snd_pcm_hardware snd_imx_hardware = {
20 .info = SNDRV_PCM_INFO_INTERLEAVED |
21 SNDRV_PCM_INFO_BLOCK_TRANSFER |
22 SNDRV_PCM_INFO_MMAP |
23 SNDRV_PCM_INFO_MMAP_VALID,
24 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
25 .period_bytes_min = 128,
26 .period_bytes_max = 65535, /* Limited by SDMA engine */
27 .periods_min = 2,
28 .periods_max = 255,
29 .fifo_size = 0,
30};
31
32static bool filter(struct dma_chan *chan, void *param)
33{
34 if (!imx_dma_is_general_purpose(chan))
35 return false;
36
37 chan->private = param;
38
39 return true;
40}
41
42static void fsl_asrc_dma_complete(void *arg)
43{
44 struct snd_pcm_substream *substream = arg;
45 struct snd_pcm_runtime *runtime = substream->runtime;
46 struct fsl_asrc_pair *pair = runtime->private_data;
47
48 pair->pos += snd_pcm_lib_period_bytes(substream);
49 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
50 pair->pos = 0;
51
52 snd_pcm_period_elapsed(substream);
53}
54
55static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream,
56 struct snd_soc_component *component)
57{
58 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
59 struct snd_pcm_runtime *runtime = substream->runtime;
60 struct fsl_asrc_pair *pair = runtime->private_data;
61 struct device *dev = component->dev;
62 unsigned long flags = DMA_CTRL_ACK;
63
64 /* Prepare and submit Front-End DMA channel */
65 if (!substream->runtime->no_period_wakeup)
66 flags |= DMA_PREP_INTERRUPT;
67
68 pair->pos = 0;
69 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
70 pair->dma_chan[!dir], runtime->dma_addr,
71 snd_pcm_lib_buffer_bytes(substream),
72 snd_pcm_lib_period_bytes(substream),
73 dir == OUT ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM, flags);
74 if (!pair->desc[!dir]) {
75 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
76 return -ENOMEM;
77 }
78
79 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
80 pair->desc[!dir]->callback_param = substream;
81
82 dmaengine_submit(pair->desc[!dir]);
83
84 /* Prepare and submit Back-End DMA channel */
85 pair->desc[dir] = dmaengine_prep_dma_cyclic(
86 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
87 if (!pair->desc[dir]) {
88 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
89 return -ENOMEM;
90 }
91
92 dmaengine_submit(pair->desc[dir]);
93
94 return 0;
95}
96
97static int fsl_asrc_dma_trigger(struct snd_soc_component *component,
98 struct snd_pcm_substream *substream, int cmd)
99{
100 struct snd_pcm_runtime *runtime = substream->runtime;
101 struct fsl_asrc_pair *pair = runtime->private_data;
102 int ret;
103
104 switch (cmd) {
105 case SNDRV_PCM_TRIGGER_START:
106 case SNDRV_PCM_TRIGGER_RESUME:
107 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
108 ret = fsl_asrc_dma_prepare_and_submit(substream, component);
109 if (ret)
110 return ret;
111 dma_async_issue_pending(pair->dma_chan[IN]);
112 dma_async_issue_pending(pair->dma_chan[OUT]);
113 break;
114 case SNDRV_PCM_TRIGGER_STOP:
115 case SNDRV_PCM_TRIGGER_SUSPEND:
116 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
117 dmaengine_terminate_async(pair->dma_chan[OUT]);
118 dmaengine_terminate_async(pair->dma_chan[IN]);
119 break;
120 default:
121 return -EINVAL;
122 }
123
124 return 0;
125}
126
127static int fsl_asrc_dma_hw_params(struct snd_soc_component *component,
128 struct snd_pcm_substream *substream,
129 struct snd_pcm_hw_params *params)
130{
131 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
132 enum sdma_peripheral_type be_peripheral_type = IMX_DMATYPE_SSI;
133 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
134 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
135 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
136 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
137 struct snd_pcm_runtime *runtime = substream->runtime;
138 struct fsl_asrc_pair *pair = runtime->private_data;
139 struct dma_chan *tmp_chan = NULL, *be_chan = NULL;
140 struct snd_soc_component *component_be = NULL;
141 struct fsl_asrc *asrc = pair->asrc;
142 struct dma_slave_config config_fe = {}, config_be = {};
143 struct sdma_peripheral_config audio_config;
144 enum asrc_pair_index index = pair->index;
145 struct device *dev = component->dev;
146 struct device_node *of_dma_node;
147 int stream = substream->stream;
148 struct imx_dma_data *tmp_data;
149 struct snd_soc_dpcm *dpcm;
150 struct device *dev_be;
151 u8 dir = tx ? OUT : IN;
152 dma_cap_mask_t mask;
153 int ret, width;
154
155 /* Fetch the Back-End dma_data from DPCM */
156 for_each_dpcm_be(rtd, stream, dpcm) {
157 struct snd_soc_pcm_runtime *be = dpcm->be;
158 struct snd_pcm_substream *substream_be;
159 struct snd_soc_dai *dai = snd_soc_rtd_to_cpu(be, 0);
160
161 if (dpcm->fe != rtd)
162 continue;
163
164 substream_be = snd_soc_dpcm_get_substream(be, stream);
165 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
166 dev_be = dai->dev;
167 break;
168 }
169
170 if (!dma_params_be) {
171 dev_err(dev, "failed to get the substream of Back-End\n");
172 return -EINVAL;
173 }
174
175 /* Override dma_data of the Front-End and config its dmaengine */
176 dma_params_fe = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream);
177 dma_params_fe->addr = asrc->paddr + asrc->get_fifo_addr(!dir, index);
178 dma_params_fe->maxburst = dma_params_be->maxburst;
179
180 pair->dma_chan[!dir] = asrc->get_dma_channel(pair, !dir);
181 if (!pair->dma_chan[!dir]) {
182 dev_err(dev, "failed to request DMA channel\n");
183 return -EINVAL;
184 }
185
186 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
187 if (ret) {
188 dev_err(dev, "failed to prepare DMA config for Front-End\n");
189 return ret;
190 }
191
192 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
193 if (ret) {
194 dev_err(dev, "failed to config DMA channel for Front-End\n");
195 return ret;
196 }
197
198 /* Request and config DMA channel for Back-End */
199 dma_cap_zero(mask);
200 dma_cap_set(DMA_SLAVE, mask);
201 dma_cap_set(DMA_CYCLIC, mask);
202
203 /*
204 * The Back-End device might have already requested a DMA channel,
205 * so try to reuse it first, and then request a new one upon NULL.
206 */
207 component_be = snd_soc_lookup_component_nolocked(dev_be, SND_DMAENGINE_PCM_DRV_NAME);
208 if (component_be) {
209 be_chan = soc_component_to_pcm(component_be)->chan[substream->stream];
210 tmp_chan = be_chan;
211 }
212 if (!tmp_chan) {
213 tmp_chan = dma_request_chan(dev_be, tx ? "tx" : "rx");
214 if (IS_ERR(tmp_chan)) {
215 dev_err(dev, "failed to request DMA channel for Back-End\n");
216 return -EINVAL;
217 }
218 }
219
220 /*
221 * An EDMA DEV_TO_DEV channel is fixed and bound with DMA event of each
222 * peripheral, unlike SDMA channel that is allocated dynamically. So no
223 * need to configure dma_request and dma_request2, but get dma_chan of
224 * Back-End device directly via dma_request_chan.
225 */
226 if (!asrc->use_edma) {
227 /* Get DMA request of Back-End */
228 tmp_data = tmp_chan->private;
229 pair->dma_data.dma_request = tmp_data->dma_request;
230 be_peripheral_type = tmp_data->peripheral_type;
231 if (!be_chan)
232 dma_release_channel(tmp_chan);
233
234 /* Get DMA request of Front-End */
235 tmp_chan = asrc->get_dma_channel(pair, dir);
236 tmp_data = tmp_chan->private;
237 pair->dma_data.dma_request2 = tmp_data->dma_request;
238 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
239 pair->dma_data.priority = tmp_data->priority;
240 dma_release_channel(tmp_chan);
241
242 of_dma_node = pair->dma_chan[!dir]->device->dev->of_node;
243 pair->dma_chan[dir] =
244 __dma_request_channel(&mask, filter, &pair->dma_data,
245 of_dma_node);
246 pair->req_dma_chan = true;
247 } else {
248 pair->dma_chan[dir] = tmp_chan;
249 /* Do not flag to release if we are reusing the Back-End one */
250 pair->req_dma_chan = !be_chan;
251 }
252
253 if (!pair->dma_chan[dir]) {
254 dev_err(dev, "failed to request DMA channel for Back-End\n");
255 return -EINVAL;
256 }
257
258 width = snd_pcm_format_physical_width(asrc->asrc_format);
259 if (width < 8 || width > 64)
260 return -EINVAL;
261 else if (width == 8)
262 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
263 else if (width == 16)
264 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
265 else if (width == 24)
266 buswidth = DMA_SLAVE_BUSWIDTH_3_BYTES;
267 else if (width <= 32)
268 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
269 else
270 buswidth = DMA_SLAVE_BUSWIDTH_8_BYTES;
271
272 config_be.direction = DMA_DEV_TO_DEV;
273 config_be.src_addr_width = buswidth;
274 config_be.src_maxburst = dma_params_be->maxburst;
275 config_be.dst_addr_width = buswidth;
276 config_be.dst_maxburst = dma_params_be->maxburst;
277
278 memset(&audio_config, 0, sizeof(audio_config));
279 config_be.peripheral_config = &audio_config;
280 config_be.peripheral_size = sizeof(audio_config);
281
282 if (tx && (be_peripheral_type == IMX_DMATYPE_SSI_DUAL ||
283 be_peripheral_type == IMX_DMATYPE_SPDIF))
284 audio_config.n_fifos_dst = 2;
285 if (!tx && (be_peripheral_type == IMX_DMATYPE_SSI_DUAL ||
286 be_peripheral_type == IMX_DMATYPE_SPDIF))
287 audio_config.n_fifos_src = 2;
288
289 if (tx) {
290 config_be.src_addr = asrc->paddr + asrc->get_fifo_addr(OUT, index);
291 config_be.dst_addr = dma_params_be->addr;
292 } else {
293 config_be.dst_addr = asrc->paddr + asrc->get_fifo_addr(IN, index);
294 config_be.src_addr = dma_params_be->addr;
295 }
296
297 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
298 if (ret) {
299 dev_err(dev, "failed to config DMA channel for Back-End\n");
300 if (pair->req_dma_chan)
301 dma_release_channel(pair->dma_chan[dir]);
302 return ret;
303 }
304
305 return 0;
306}
307
308static int fsl_asrc_dma_hw_free(struct snd_soc_component *component,
309 struct snd_pcm_substream *substream)
310{
311 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
312 struct snd_pcm_runtime *runtime = substream->runtime;
313 struct fsl_asrc_pair *pair = runtime->private_data;
314 u8 dir = tx ? OUT : IN;
315
316 if (pair->dma_chan[!dir])
317 dma_release_channel(pair->dma_chan[!dir]);
318
319 /* release dev_to_dev chan if we aren't reusing the Back-End one */
320 if (pair->dma_chan[dir] && pair->req_dma_chan)
321 dma_release_channel(pair->dma_chan[dir]);
322
323 pair->dma_chan[!dir] = NULL;
324 pair->dma_chan[dir] = NULL;
325
326 return 0;
327}
328
329static int fsl_asrc_dma_startup(struct snd_soc_component *component,
330 struct snd_pcm_substream *substream)
331{
332 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
333 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
334 struct snd_pcm_runtime *runtime = substream->runtime;
335 struct snd_dmaengine_dai_dma_data *dma_data;
336 struct device *dev = component->dev;
337 struct fsl_asrc *asrc = dev_get_drvdata(dev);
338 struct fsl_asrc_pair *pair;
339 struct dma_chan *tmp_chan = NULL;
340 u8 dir = tx ? OUT : IN;
341 bool release_pair = true;
342 int ret = 0;
343
344 ret = snd_pcm_hw_constraint_integer(substream->runtime,
345 SNDRV_PCM_HW_PARAM_PERIODS);
346 if (ret < 0) {
347 dev_err(dev, "failed to set pcm hw params periods\n");
348 return ret;
349 }
350
351 pair = kzalloc(sizeof(*pair) + asrc->pair_priv_size, GFP_KERNEL);
352 if (!pair)
353 return -ENOMEM;
354
355 pair->asrc = asrc;
356 pair->private = (void *)pair + sizeof(struct fsl_asrc_pair);
357
358 runtime->private_data = pair;
359
360 /* Request a dummy pair, which will be released later.
361 * Request pair function needs channel num as input, for this
362 * dummy pair, we just request "1" channel temporarily.
363 */
364 ret = asrc->request_pair(1, pair);
365 if (ret < 0) {
366 dev_err(dev, "failed to request asrc pair\n");
367 goto req_pair_err;
368 }
369
370 /* Request a dummy dma channel, which will be released later. */
371 tmp_chan = asrc->get_dma_channel(pair, dir);
372 if (!tmp_chan) {
373 dev_err(dev, "failed to get dma channel\n");
374 ret = -EINVAL;
375 goto dma_chan_err;
376 }
377
378 dma_data = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream);
379
380 /* Refine the snd_imx_hardware according to caps of DMA. */
381 ret = snd_dmaengine_pcm_refine_runtime_hwparams(substream,
382 dma_data,
383 &snd_imx_hardware,
384 tmp_chan);
385 if (ret < 0) {
386 dev_err(dev, "failed to refine runtime hwparams\n");
387 goto out;
388 }
389
390 release_pair = false;
391 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
392
393out:
394 dma_release_channel(tmp_chan);
395
396dma_chan_err:
397 asrc->release_pair(pair);
398
399req_pair_err:
400 if (release_pair)
401 kfree(pair);
402
403 return ret;
404}
405
406static int fsl_asrc_dma_shutdown(struct snd_soc_component *component,
407 struct snd_pcm_substream *substream)
408{
409 struct snd_pcm_runtime *runtime = substream->runtime;
410 struct fsl_asrc_pair *pair = runtime->private_data;
411 struct fsl_asrc *asrc;
412
413 if (!pair)
414 return 0;
415
416 asrc = pair->asrc;
417
418 if (asrc->pair[pair->index] == pair)
419 asrc->pair[pair->index] = NULL;
420
421 kfree(pair);
422
423 return 0;
424}
425
426static snd_pcm_uframes_t
427fsl_asrc_dma_pcm_pointer(struct snd_soc_component *component,
428 struct snd_pcm_substream *substream)
429{
430 struct snd_pcm_runtime *runtime = substream->runtime;
431 struct fsl_asrc_pair *pair = runtime->private_data;
432
433 return bytes_to_frames(substream->runtime, pair->pos);
434}
435
436static int fsl_asrc_dma_pcm_new(struct snd_soc_component *component,
437 struct snd_soc_pcm_runtime *rtd)
438{
439 struct snd_card *card = rtd->card->snd_card;
440 struct snd_pcm *pcm = rtd->pcm;
441 int ret;
442
443 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
444 if (ret) {
445 dev_err(card->dev, "failed to set DMA mask\n");
446 return ret;
447 }
448
449 return snd_pcm_set_fixed_buffer_all(pcm, SNDRV_DMA_TYPE_DEV,
450 card->dev, FSL_ASRC_DMABUF_SIZE);
451}
452
453struct snd_soc_component_driver fsl_asrc_component = {
454 .name = DRV_NAME,
455 .hw_params = fsl_asrc_dma_hw_params,
456 .hw_free = fsl_asrc_dma_hw_free,
457 .trigger = fsl_asrc_dma_trigger,
458 .open = fsl_asrc_dma_startup,
459 .close = fsl_asrc_dma_shutdown,
460 .pointer = fsl_asrc_dma_pcm_pointer,
461 .pcm_construct = fsl_asrc_dma_pcm_new,
462 .legacy_dai_naming = 1,
463};
464EXPORT_SYMBOL_GPL(fsl_asrc_component);
1// SPDX-License-Identifier: GPL-2.0
2//
3// Freescale ASRC ALSA SoC Platform (DMA) driver
4//
5// Copyright (C) 2014 Freescale Semiconductor, Inc.
6//
7// Author: Nicolin Chen <nicoleotsuka@gmail.com>
8
9#include <linux/dma-mapping.h>
10#include <linux/module.h>
11#include <linux/platform_data/dma-imx.h>
12#include <sound/dmaengine_pcm.h>
13#include <sound/pcm_params.h>
14
15#include "fsl_asrc_common.h"
16
17#define FSL_ASRC_DMABUF_SIZE (256 * 1024)
18
19static struct snd_pcm_hardware snd_imx_hardware = {
20 .info = SNDRV_PCM_INFO_INTERLEAVED |
21 SNDRV_PCM_INFO_BLOCK_TRANSFER |
22 SNDRV_PCM_INFO_MMAP |
23 SNDRV_PCM_INFO_MMAP_VALID,
24 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
25 .period_bytes_min = 128,
26 .period_bytes_max = 65535, /* Limited by SDMA engine */
27 .periods_min = 2,
28 .periods_max = 255,
29 .fifo_size = 0,
30};
31
32static bool filter(struct dma_chan *chan, void *param)
33{
34 if (!imx_dma_is_general_purpose(chan))
35 return false;
36
37 chan->private = param;
38
39 return true;
40}
41
42static void fsl_asrc_dma_complete(void *arg)
43{
44 struct snd_pcm_substream *substream = arg;
45 struct snd_pcm_runtime *runtime = substream->runtime;
46 struct fsl_asrc_pair *pair = runtime->private_data;
47
48 pair->pos += snd_pcm_lib_period_bytes(substream);
49 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
50 pair->pos = 0;
51
52 snd_pcm_period_elapsed(substream);
53}
54
55static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream,
56 struct snd_soc_component *component)
57{
58 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
59 struct snd_pcm_runtime *runtime = substream->runtime;
60 struct fsl_asrc_pair *pair = runtime->private_data;
61 struct device *dev = component->dev;
62 unsigned long flags = DMA_CTRL_ACK;
63
64 /* Prepare and submit Front-End DMA channel */
65 if (!substream->runtime->no_period_wakeup)
66 flags |= DMA_PREP_INTERRUPT;
67
68 pair->pos = 0;
69 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
70 pair->dma_chan[!dir], runtime->dma_addr,
71 snd_pcm_lib_buffer_bytes(substream),
72 snd_pcm_lib_period_bytes(substream),
73 dir == OUT ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM, flags);
74 if (!pair->desc[!dir]) {
75 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
76 return -ENOMEM;
77 }
78
79 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
80 pair->desc[!dir]->callback_param = substream;
81
82 dmaengine_submit(pair->desc[!dir]);
83
84 /* Prepare and submit Back-End DMA channel */
85 pair->desc[dir] = dmaengine_prep_dma_cyclic(
86 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
87 if (!pair->desc[dir]) {
88 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
89 return -ENOMEM;
90 }
91
92 dmaengine_submit(pair->desc[dir]);
93
94 return 0;
95}
96
97static int fsl_asrc_dma_trigger(struct snd_soc_component *component,
98 struct snd_pcm_substream *substream, int cmd)
99{
100 struct snd_pcm_runtime *runtime = substream->runtime;
101 struct fsl_asrc_pair *pair = runtime->private_data;
102 int ret;
103
104 switch (cmd) {
105 case SNDRV_PCM_TRIGGER_START:
106 case SNDRV_PCM_TRIGGER_RESUME:
107 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
108 ret = fsl_asrc_dma_prepare_and_submit(substream, component);
109 if (ret)
110 return ret;
111 dma_async_issue_pending(pair->dma_chan[IN]);
112 dma_async_issue_pending(pair->dma_chan[OUT]);
113 break;
114 case SNDRV_PCM_TRIGGER_STOP:
115 case SNDRV_PCM_TRIGGER_SUSPEND:
116 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
117 dmaengine_terminate_all(pair->dma_chan[OUT]);
118 dmaengine_terminate_all(pair->dma_chan[IN]);
119 break;
120 default:
121 return -EINVAL;
122 }
123
124 return 0;
125}
126
127static int fsl_asrc_dma_hw_params(struct snd_soc_component *component,
128 struct snd_pcm_substream *substream,
129 struct snd_pcm_hw_params *params)
130{
131 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
132 struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
133 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
134 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
135 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
136 struct snd_pcm_runtime *runtime = substream->runtime;
137 struct fsl_asrc_pair *pair = runtime->private_data;
138 struct dma_chan *tmp_chan = NULL, *be_chan = NULL;
139 struct snd_soc_component *component_be = NULL;
140 struct fsl_asrc *asrc = pair->asrc;
141 struct dma_slave_config config_fe, config_be;
142 enum asrc_pair_index index = pair->index;
143 struct device *dev = component->dev;
144 struct device_node *of_dma_node;
145 int stream = substream->stream;
146 struct imx_dma_data *tmp_data;
147 struct snd_soc_dpcm *dpcm;
148 struct device *dev_be;
149 u8 dir = tx ? OUT : IN;
150 dma_cap_mask_t mask;
151 int ret, width;
152
153 /* Fetch the Back-End dma_data from DPCM */
154 for_each_dpcm_be(rtd, stream, dpcm) {
155 struct snd_soc_pcm_runtime *be = dpcm->be;
156 struct snd_pcm_substream *substream_be;
157 struct snd_soc_dai *dai = asoc_rtd_to_cpu(be, 0);
158
159 if (dpcm->fe != rtd)
160 continue;
161
162 substream_be = snd_soc_dpcm_get_substream(be, stream);
163 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
164 dev_be = dai->dev;
165 break;
166 }
167
168 if (!dma_params_be) {
169 dev_err(dev, "failed to get the substream of Back-End\n");
170 return -EINVAL;
171 }
172
173 /* Override dma_data of the Front-End and config its dmaengine */
174 dma_params_fe = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
175 dma_params_fe->addr = asrc->paddr + asrc->get_fifo_addr(!dir, index);
176 dma_params_fe->maxburst = dma_params_be->maxburst;
177
178 pair->dma_chan[!dir] = asrc->get_dma_channel(pair, !dir);
179 if (!pair->dma_chan[!dir]) {
180 dev_err(dev, "failed to request DMA channel\n");
181 return -EINVAL;
182 }
183
184 memset(&config_fe, 0, sizeof(config_fe));
185 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
186 if (ret) {
187 dev_err(dev, "failed to prepare DMA config for Front-End\n");
188 return ret;
189 }
190
191 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
192 if (ret) {
193 dev_err(dev, "failed to config DMA channel for Front-End\n");
194 return ret;
195 }
196
197 /* Request and config DMA channel for Back-End */
198 dma_cap_zero(mask);
199 dma_cap_set(DMA_SLAVE, mask);
200 dma_cap_set(DMA_CYCLIC, mask);
201
202 /*
203 * The Back-End device might have already requested a DMA channel,
204 * so try to reuse it first, and then request a new one upon NULL.
205 */
206 component_be = snd_soc_lookup_component_nolocked(dev_be, SND_DMAENGINE_PCM_DRV_NAME);
207 if (component_be) {
208 be_chan = soc_component_to_pcm(component_be)->chan[substream->stream];
209 tmp_chan = be_chan;
210 }
211 if (!tmp_chan)
212 tmp_chan = dma_request_slave_channel(dev_be, tx ? "tx" : "rx");
213
214 /*
215 * An EDMA DEV_TO_DEV channel is fixed and bound with DMA event of each
216 * peripheral, unlike SDMA channel that is allocated dynamically. So no
217 * need to configure dma_request and dma_request2, but get dma_chan of
218 * Back-End device directly via dma_request_slave_channel.
219 */
220 if (!asrc->use_edma) {
221 /* Get DMA request of Back-End */
222 tmp_data = tmp_chan->private;
223 pair->dma_data.dma_request = tmp_data->dma_request;
224 if (!be_chan)
225 dma_release_channel(tmp_chan);
226
227 /* Get DMA request of Front-End */
228 tmp_chan = asrc->get_dma_channel(pair, dir);
229 tmp_data = tmp_chan->private;
230 pair->dma_data.dma_request2 = tmp_data->dma_request;
231 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
232 pair->dma_data.priority = tmp_data->priority;
233 dma_release_channel(tmp_chan);
234
235 of_dma_node = pair->dma_chan[!dir]->device->dev->of_node;
236 pair->dma_chan[dir] =
237 __dma_request_channel(&mask, filter, &pair->dma_data,
238 of_dma_node);
239 pair->req_dma_chan = true;
240 } else {
241 pair->dma_chan[dir] = tmp_chan;
242 /* Do not flag to release if we are reusing the Back-End one */
243 pair->req_dma_chan = !be_chan;
244 }
245
246 if (!pair->dma_chan[dir]) {
247 dev_err(dev, "failed to request DMA channel for Back-End\n");
248 return -EINVAL;
249 }
250
251 width = snd_pcm_format_physical_width(asrc->asrc_format);
252 if (width < 8 || width > 64)
253 return -EINVAL;
254 else if (width == 8)
255 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
256 else if (width == 16)
257 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
258 else if (width == 24)
259 buswidth = DMA_SLAVE_BUSWIDTH_3_BYTES;
260 else if (width <= 32)
261 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
262 else
263 buswidth = DMA_SLAVE_BUSWIDTH_8_BYTES;
264
265 config_be.direction = DMA_DEV_TO_DEV;
266 config_be.src_addr_width = buswidth;
267 config_be.src_maxburst = dma_params_be->maxburst;
268 config_be.dst_addr_width = buswidth;
269 config_be.dst_maxburst = dma_params_be->maxburst;
270
271 if (tx) {
272 config_be.src_addr = asrc->paddr + asrc->get_fifo_addr(OUT, index);
273 config_be.dst_addr = dma_params_be->addr;
274 } else {
275 config_be.dst_addr = asrc->paddr + asrc->get_fifo_addr(IN, index);
276 config_be.src_addr = dma_params_be->addr;
277 }
278
279 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
280 if (ret) {
281 dev_err(dev, "failed to config DMA channel for Back-End\n");
282 if (pair->req_dma_chan)
283 dma_release_channel(pair->dma_chan[dir]);
284 return ret;
285 }
286
287 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
288
289 return 0;
290}
291
292static int fsl_asrc_dma_hw_free(struct snd_soc_component *component,
293 struct snd_pcm_substream *substream)
294{
295 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
296 struct snd_pcm_runtime *runtime = substream->runtime;
297 struct fsl_asrc_pair *pair = runtime->private_data;
298 u8 dir = tx ? OUT : IN;
299
300 snd_pcm_set_runtime_buffer(substream, NULL);
301
302 if (pair->dma_chan[!dir])
303 dma_release_channel(pair->dma_chan[!dir]);
304
305 /* release dev_to_dev chan if we aren't reusing the Back-End one */
306 if (pair->dma_chan[dir] && pair->req_dma_chan)
307 dma_release_channel(pair->dma_chan[dir]);
308
309 pair->dma_chan[!dir] = NULL;
310 pair->dma_chan[dir] = NULL;
311
312 return 0;
313}
314
315static int fsl_asrc_dma_startup(struct snd_soc_component *component,
316 struct snd_pcm_substream *substream)
317{
318 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
319 struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
320 struct snd_pcm_runtime *runtime = substream->runtime;
321 struct snd_dmaengine_dai_dma_data *dma_data;
322 struct device *dev = component->dev;
323 struct fsl_asrc *asrc = dev_get_drvdata(dev);
324 struct fsl_asrc_pair *pair;
325 struct dma_chan *tmp_chan = NULL;
326 u8 dir = tx ? OUT : IN;
327 bool release_pair = true;
328 int ret = 0;
329
330 ret = snd_pcm_hw_constraint_integer(substream->runtime,
331 SNDRV_PCM_HW_PARAM_PERIODS);
332 if (ret < 0) {
333 dev_err(dev, "failed to set pcm hw params periods\n");
334 return ret;
335 }
336
337 pair = kzalloc(sizeof(*pair) + asrc->pair_priv_size, GFP_KERNEL);
338 if (!pair)
339 return -ENOMEM;
340
341 pair->asrc = asrc;
342 pair->private = (void *)pair + sizeof(struct fsl_asrc_pair);
343
344 runtime->private_data = pair;
345
346 /* Request a dummy pair, which will be released later.
347 * Request pair function needs channel num as input, for this
348 * dummy pair, we just request "1" channel temporarily.
349 */
350 ret = asrc->request_pair(1, pair);
351 if (ret < 0) {
352 dev_err(dev, "failed to request asrc pair\n");
353 goto req_pair_err;
354 }
355
356 /* Request a dummy dma channel, which will be released later. */
357 tmp_chan = asrc->get_dma_channel(pair, dir);
358 if (!tmp_chan) {
359 dev_err(dev, "failed to get dma channel\n");
360 ret = -EINVAL;
361 goto dma_chan_err;
362 }
363
364 dma_data = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
365
366 /* Refine the snd_imx_hardware according to caps of DMA. */
367 ret = snd_dmaengine_pcm_refine_runtime_hwparams(substream,
368 dma_data,
369 &snd_imx_hardware,
370 tmp_chan);
371 if (ret < 0) {
372 dev_err(dev, "failed to refine runtime hwparams\n");
373 goto out;
374 }
375
376 release_pair = false;
377 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
378
379out:
380 dma_release_channel(tmp_chan);
381
382dma_chan_err:
383 asrc->release_pair(pair);
384
385req_pair_err:
386 if (release_pair)
387 kfree(pair);
388
389 return ret;
390}
391
392static int fsl_asrc_dma_shutdown(struct snd_soc_component *component,
393 struct snd_pcm_substream *substream)
394{
395 struct snd_pcm_runtime *runtime = substream->runtime;
396 struct fsl_asrc_pair *pair = runtime->private_data;
397 struct fsl_asrc *asrc;
398
399 if (!pair)
400 return 0;
401
402 asrc = pair->asrc;
403
404 if (asrc->pair[pair->index] == pair)
405 asrc->pair[pair->index] = NULL;
406
407 kfree(pair);
408
409 return 0;
410}
411
412static snd_pcm_uframes_t
413fsl_asrc_dma_pcm_pointer(struct snd_soc_component *component,
414 struct snd_pcm_substream *substream)
415{
416 struct snd_pcm_runtime *runtime = substream->runtime;
417 struct fsl_asrc_pair *pair = runtime->private_data;
418
419 return bytes_to_frames(substream->runtime, pair->pos);
420}
421
422static int fsl_asrc_dma_pcm_new(struct snd_soc_component *component,
423 struct snd_soc_pcm_runtime *rtd)
424{
425 struct snd_card *card = rtd->card->snd_card;
426 struct snd_pcm_substream *substream;
427 struct snd_pcm *pcm = rtd->pcm;
428 int ret, i;
429
430 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
431 if (ret) {
432 dev_err(card->dev, "failed to set DMA mask\n");
433 return ret;
434 }
435
436 for_each_pcm_streams(i) {
437 substream = pcm->streams[i].substream;
438 if (!substream)
439 continue;
440
441 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
442 FSL_ASRC_DMABUF_SIZE, &substream->dma_buffer);
443 if (ret) {
444 dev_err(card->dev, "failed to allocate DMA buffer\n");
445 goto err;
446 }
447 }
448
449 return 0;
450
451err:
452 if (--i == 0 && pcm->streams[i].substream)
453 snd_dma_free_pages(&pcm->streams[i].substream->dma_buffer);
454
455 return ret;
456}
457
458static void fsl_asrc_dma_pcm_free(struct snd_soc_component *component,
459 struct snd_pcm *pcm)
460{
461 struct snd_pcm_substream *substream;
462 int i;
463
464 for_each_pcm_streams(i) {
465 substream = pcm->streams[i].substream;
466 if (!substream)
467 continue;
468
469 snd_dma_free_pages(&substream->dma_buffer);
470 substream->dma_buffer.area = NULL;
471 substream->dma_buffer.addr = 0;
472 }
473}
474
475struct snd_soc_component_driver fsl_asrc_component = {
476 .name = DRV_NAME,
477 .hw_params = fsl_asrc_dma_hw_params,
478 .hw_free = fsl_asrc_dma_hw_free,
479 .trigger = fsl_asrc_dma_trigger,
480 .open = fsl_asrc_dma_startup,
481 .close = fsl_asrc_dma_shutdown,
482 .pointer = fsl_asrc_dma_pcm_pointer,
483 .pcm_construct = fsl_asrc_dma_pcm_new,
484 .pcm_destruct = fsl_asrc_dma_pcm_free,
485};
486EXPORT_SYMBOL_GPL(fsl_asrc_component);