Loading...
1// SPDX-License-Identifier: GPL-2.0
2//
3// Freescale ASRC ALSA SoC Platform (DMA) driver
4//
5// Copyright (C) 2014 Freescale Semiconductor, Inc.
6//
7// Author: Nicolin Chen <nicoleotsuka@gmail.com>
8
9#include <linux/dma-mapping.h>
10#include <linux/module.h>
11#include <linux/dma/imx-dma.h>
12#include <sound/dmaengine_pcm.h>
13#include <sound/pcm_params.h>
14
15#include "fsl_asrc_common.h"
16
17#define FSL_ASRC_DMABUF_SIZE (256 * 1024)
18
19static struct snd_pcm_hardware snd_imx_hardware = {
20 .info = SNDRV_PCM_INFO_INTERLEAVED |
21 SNDRV_PCM_INFO_BLOCK_TRANSFER |
22 SNDRV_PCM_INFO_MMAP |
23 SNDRV_PCM_INFO_MMAP_VALID,
24 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
25 .period_bytes_min = 128,
26 .period_bytes_max = 65535, /* Limited by SDMA engine */
27 .periods_min = 2,
28 .periods_max = 255,
29 .fifo_size = 0,
30};
31
32static bool filter(struct dma_chan *chan, void *param)
33{
34 if (!imx_dma_is_general_purpose(chan))
35 return false;
36
37 chan->private = param;
38
39 return true;
40}
41
42static void fsl_asrc_dma_complete(void *arg)
43{
44 struct snd_pcm_substream *substream = arg;
45 struct snd_pcm_runtime *runtime = substream->runtime;
46 struct fsl_asrc_pair *pair = runtime->private_data;
47
48 pair->pos += snd_pcm_lib_period_bytes(substream);
49 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
50 pair->pos = 0;
51
52 snd_pcm_period_elapsed(substream);
53}
54
55static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream,
56 struct snd_soc_component *component)
57{
58 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
59 struct snd_pcm_runtime *runtime = substream->runtime;
60 struct fsl_asrc_pair *pair = runtime->private_data;
61 struct device *dev = component->dev;
62 unsigned long flags = DMA_CTRL_ACK;
63
64 /* Prepare and submit Front-End DMA channel */
65 if (!substream->runtime->no_period_wakeup)
66 flags |= DMA_PREP_INTERRUPT;
67
68 pair->pos = 0;
69 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
70 pair->dma_chan[!dir], runtime->dma_addr,
71 snd_pcm_lib_buffer_bytes(substream),
72 snd_pcm_lib_period_bytes(substream),
73 dir == OUT ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM, flags);
74 if (!pair->desc[!dir]) {
75 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
76 return -ENOMEM;
77 }
78
79 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
80 pair->desc[!dir]->callback_param = substream;
81
82 dmaengine_submit(pair->desc[!dir]);
83
84 /* Prepare and submit Back-End DMA channel */
85 pair->desc[dir] = dmaengine_prep_dma_cyclic(
86 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
87 if (!pair->desc[dir]) {
88 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
89 return -ENOMEM;
90 }
91
92 dmaengine_submit(pair->desc[dir]);
93
94 return 0;
95}
96
97static int fsl_asrc_dma_trigger(struct snd_soc_component *component,
98 struct snd_pcm_substream *substream, int cmd)
99{
100 struct snd_pcm_runtime *runtime = substream->runtime;
101 struct fsl_asrc_pair *pair = runtime->private_data;
102 int ret;
103
104 switch (cmd) {
105 case SNDRV_PCM_TRIGGER_START:
106 case SNDRV_PCM_TRIGGER_RESUME:
107 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
108 ret = fsl_asrc_dma_prepare_and_submit(substream, component);
109 if (ret)
110 return ret;
111 dma_async_issue_pending(pair->dma_chan[IN]);
112 dma_async_issue_pending(pair->dma_chan[OUT]);
113 break;
114 case SNDRV_PCM_TRIGGER_STOP:
115 case SNDRV_PCM_TRIGGER_SUSPEND:
116 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
117 dmaengine_terminate_async(pair->dma_chan[OUT]);
118 dmaengine_terminate_async(pair->dma_chan[IN]);
119 break;
120 default:
121 return -EINVAL;
122 }
123
124 return 0;
125}
126
127static int fsl_asrc_dma_hw_params(struct snd_soc_component *component,
128 struct snd_pcm_substream *substream,
129 struct snd_pcm_hw_params *params)
130{
131 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
132 enum sdma_peripheral_type be_peripheral_type = IMX_DMATYPE_SSI;
133 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
134 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
135 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
136 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
137 struct snd_pcm_runtime *runtime = substream->runtime;
138 struct fsl_asrc_pair *pair = runtime->private_data;
139 struct dma_chan *tmp_chan = NULL, *be_chan = NULL;
140 struct snd_soc_component *component_be = NULL;
141 struct fsl_asrc *asrc = pair->asrc;
142 struct dma_slave_config config_fe = {}, config_be = {};
143 struct sdma_peripheral_config audio_config;
144 enum asrc_pair_index index = pair->index;
145 struct device *dev = component->dev;
146 struct device_node *of_dma_node;
147 int stream = substream->stream;
148 struct imx_dma_data *tmp_data;
149 struct snd_soc_dpcm *dpcm;
150 struct device *dev_be;
151 u8 dir = tx ? OUT : IN;
152 dma_cap_mask_t mask;
153 int ret, width;
154
155 /* Fetch the Back-End dma_data from DPCM */
156 for_each_dpcm_be(rtd, stream, dpcm) {
157 struct snd_soc_pcm_runtime *be = dpcm->be;
158 struct snd_pcm_substream *substream_be;
159 struct snd_soc_dai *dai = snd_soc_rtd_to_cpu(be, 0);
160
161 if (dpcm->fe != rtd)
162 continue;
163
164 substream_be = snd_soc_dpcm_get_substream(be, stream);
165 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
166 dev_be = dai->dev;
167 break;
168 }
169
170 if (!dma_params_be) {
171 dev_err(dev, "failed to get the substream of Back-End\n");
172 return -EINVAL;
173 }
174
175 /* Override dma_data of the Front-End and config its dmaengine */
176 dma_params_fe = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream);
177 dma_params_fe->addr = asrc->paddr + asrc->get_fifo_addr(!dir, index);
178 dma_params_fe->maxburst = dma_params_be->maxburst;
179
180 pair->dma_chan[!dir] = asrc->get_dma_channel(pair, !dir);
181 if (!pair->dma_chan[!dir]) {
182 dev_err(dev, "failed to request DMA channel\n");
183 return -EINVAL;
184 }
185
186 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
187 if (ret) {
188 dev_err(dev, "failed to prepare DMA config for Front-End\n");
189 return ret;
190 }
191
192 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
193 if (ret) {
194 dev_err(dev, "failed to config DMA channel for Front-End\n");
195 return ret;
196 }
197
198 /* Request and config DMA channel for Back-End */
199 dma_cap_zero(mask);
200 dma_cap_set(DMA_SLAVE, mask);
201 dma_cap_set(DMA_CYCLIC, mask);
202
203 /*
204 * The Back-End device might have already requested a DMA channel,
205 * so try to reuse it first, and then request a new one upon NULL.
206 */
207 component_be = snd_soc_lookup_component_nolocked(dev_be, SND_DMAENGINE_PCM_DRV_NAME);
208 if (component_be) {
209 be_chan = soc_component_to_pcm(component_be)->chan[substream->stream];
210 tmp_chan = be_chan;
211 }
212 if (!tmp_chan) {
213 tmp_chan = dma_request_chan(dev_be, tx ? "tx" : "rx");
214 if (IS_ERR(tmp_chan)) {
215 dev_err(dev, "failed to request DMA channel for Back-End\n");
216 return -EINVAL;
217 }
218 }
219
220 /*
221 * An EDMA DEV_TO_DEV channel is fixed and bound with DMA event of each
222 * peripheral, unlike SDMA channel that is allocated dynamically. So no
223 * need to configure dma_request and dma_request2, but get dma_chan of
224 * Back-End device directly via dma_request_chan.
225 */
226 if (!asrc->use_edma) {
227 /* Get DMA request of Back-End */
228 tmp_data = tmp_chan->private;
229 pair->dma_data.dma_request = tmp_data->dma_request;
230 be_peripheral_type = tmp_data->peripheral_type;
231 if (!be_chan)
232 dma_release_channel(tmp_chan);
233
234 /* Get DMA request of Front-End */
235 tmp_chan = asrc->get_dma_channel(pair, dir);
236 tmp_data = tmp_chan->private;
237 pair->dma_data.dma_request2 = tmp_data->dma_request;
238 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
239 pair->dma_data.priority = tmp_data->priority;
240 dma_release_channel(tmp_chan);
241
242 of_dma_node = pair->dma_chan[!dir]->device->dev->of_node;
243 pair->dma_chan[dir] =
244 __dma_request_channel(&mask, filter, &pair->dma_data,
245 of_dma_node);
246 pair->req_dma_chan = true;
247 } else {
248 pair->dma_chan[dir] = tmp_chan;
249 /* Do not flag to release if we are reusing the Back-End one */
250 pair->req_dma_chan = !be_chan;
251 }
252
253 if (!pair->dma_chan[dir]) {
254 dev_err(dev, "failed to request DMA channel for Back-End\n");
255 return -EINVAL;
256 }
257
258 width = snd_pcm_format_physical_width(asrc->asrc_format);
259 if (width < 8 || width > 64)
260 return -EINVAL;
261 else if (width == 8)
262 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
263 else if (width == 16)
264 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
265 else if (width == 24)
266 buswidth = DMA_SLAVE_BUSWIDTH_3_BYTES;
267 else if (width <= 32)
268 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
269 else
270 buswidth = DMA_SLAVE_BUSWIDTH_8_BYTES;
271
272 config_be.direction = DMA_DEV_TO_DEV;
273 config_be.src_addr_width = buswidth;
274 config_be.src_maxburst = dma_params_be->maxburst;
275 config_be.dst_addr_width = buswidth;
276 config_be.dst_maxburst = dma_params_be->maxburst;
277
278 memset(&audio_config, 0, sizeof(audio_config));
279 config_be.peripheral_config = &audio_config;
280 config_be.peripheral_size = sizeof(audio_config);
281
282 if (tx && (be_peripheral_type == IMX_DMATYPE_SSI_DUAL ||
283 be_peripheral_type == IMX_DMATYPE_SPDIF))
284 audio_config.n_fifos_dst = 2;
285 if (!tx && (be_peripheral_type == IMX_DMATYPE_SSI_DUAL ||
286 be_peripheral_type == IMX_DMATYPE_SPDIF))
287 audio_config.n_fifos_src = 2;
288
289 if (tx) {
290 config_be.src_addr = asrc->paddr + asrc->get_fifo_addr(OUT, index);
291 config_be.dst_addr = dma_params_be->addr;
292 } else {
293 config_be.dst_addr = asrc->paddr + asrc->get_fifo_addr(IN, index);
294 config_be.src_addr = dma_params_be->addr;
295 }
296
297 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
298 if (ret) {
299 dev_err(dev, "failed to config DMA channel for Back-End\n");
300 if (pair->req_dma_chan)
301 dma_release_channel(pair->dma_chan[dir]);
302 return ret;
303 }
304
305 return 0;
306}
307
308static int fsl_asrc_dma_hw_free(struct snd_soc_component *component,
309 struct snd_pcm_substream *substream)
310{
311 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
312 struct snd_pcm_runtime *runtime = substream->runtime;
313 struct fsl_asrc_pair *pair = runtime->private_data;
314 u8 dir = tx ? OUT : IN;
315
316 if (pair->dma_chan[!dir])
317 dma_release_channel(pair->dma_chan[!dir]);
318
319 /* release dev_to_dev chan if we aren't reusing the Back-End one */
320 if (pair->dma_chan[dir] && pair->req_dma_chan)
321 dma_release_channel(pair->dma_chan[dir]);
322
323 pair->dma_chan[!dir] = NULL;
324 pair->dma_chan[dir] = NULL;
325
326 return 0;
327}
328
329static int fsl_asrc_dma_startup(struct snd_soc_component *component,
330 struct snd_pcm_substream *substream)
331{
332 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
333 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
334 struct snd_pcm_runtime *runtime = substream->runtime;
335 struct snd_dmaengine_dai_dma_data *dma_data;
336 struct device *dev = component->dev;
337 struct fsl_asrc *asrc = dev_get_drvdata(dev);
338 struct fsl_asrc_pair *pair;
339 struct dma_chan *tmp_chan = NULL;
340 u8 dir = tx ? OUT : IN;
341 bool release_pair = true;
342 int ret = 0;
343
344 ret = snd_pcm_hw_constraint_integer(substream->runtime,
345 SNDRV_PCM_HW_PARAM_PERIODS);
346 if (ret < 0) {
347 dev_err(dev, "failed to set pcm hw params periods\n");
348 return ret;
349 }
350
351 pair = kzalloc(sizeof(*pair) + asrc->pair_priv_size, GFP_KERNEL);
352 if (!pair)
353 return -ENOMEM;
354
355 pair->asrc = asrc;
356 pair->private = (void *)pair + sizeof(struct fsl_asrc_pair);
357
358 runtime->private_data = pair;
359
360 /* Request a dummy pair, which will be released later.
361 * Request pair function needs channel num as input, for this
362 * dummy pair, we just request "1" channel temporarily.
363 */
364 ret = asrc->request_pair(1, pair);
365 if (ret < 0) {
366 dev_err(dev, "failed to request asrc pair\n");
367 goto req_pair_err;
368 }
369
370 /* Request a dummy dma channel, which will be released later. */
371 tmp_chan = asrc->get_dma_channel(pair, dir);
372 if (!tmp_chan) {
373 dev_err(dev, "failed to get dma channel\n");
374 ret = -EINVAL;
375 goto dma_chan_err;
376 }
377
378 dma_data = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream);
379
380 /* Refine the snd_imx_hardware according to caps of DMA. */
381 ret = snd_dmaengine_pcm_refine_runtime_hwparams(substream,
382 dma_data,
383 &snd_imx_hardware,
384 tmp_chan);
385 if (ret < 0) {
386 dev_err(dev, "failed to refine runtime hwparams\n");
387 goto out;
388 }
389
390 release_pair = false;
391 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
392
393out:
394 dma_release_channel(tmp_chan);
395
396dma_chan_err:
397 asrc->release_pair(pair);
398
399req_pair_err:
400 if (release_pair)
401 kfree(pair);
402
403 return ret;
404}
405
406static int fsl_asrc_dma_shutdown(struct snd_soc_component *component,
407 struct snd_pcm_substream *substream)
408{
409 struct snd_pcm_runtime *runtime = substream->runtime;
410 struct fsl_asrc_pair *pair = runtime->private_data;
411 struct fsl_asrc *asrc;
412
413 if (!pair)
414 return 0;
415
416 asrc = pair->asrc;
417
418 if (asrc->pair[pair->index] == pair)
419 asrc->pair[pair->index] = NULL;
420
421 kfree(pair);
422
423 return 0;
424}
425
426static snd_pcm_uframes_t
427fsl_asrc_dma_pcm_pointer(struct snd_soc_component *component,
428 struct snd_pcm_substream *substream)
429{
430 struct snd_pcm_runtime *runtime = substream->runtime;
431 struct fsl_asrc_pair *pair = runtime->private_data;
432
433 return bytes_to_frames(substream->runtime, pair->pos);
434}
435
436static int fsl_asrc_dma_pcm_new(struct snd_soc_component *component,
437 struct snd_soc_pcm_runtime *rtd)
438{
439 struct snd_card *card = rtd->card->snd_card;
440 struct snd_pcm *pcm = rtd->pcm;
441 int ret;
442
443 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
444 if (ret) {
445 dev_err(card->dev, "failed to set DMA mask\n");
446 return ret;
447 }
448
449 return snd_pcm_set_fixed_buffer_all(pcm, SNDRV_DMA_TYPE_DEV,
450 card->dev, FSL_ASRC_DMABUF_SIZE);
451}
452
453struct snd_soc_component_driver fsl_asrc_component = {
454 .name = DRV_NAME,
455 .hw_params = fsl_asrc_dma_hw_params,
456 .hw_free = fsl_asrc_dma_hw_free,
457 .trigger = fsl_asrc_dma_trigger,
458 .open = fsl_asrc_dma_startup,
459 .close = fsl_asrc_dma_shutdown,
460 .pointer = fsl_asrc_dma_pcm_pointer,
461 .pcm_construct = fsl_asrc_dma_pcm_new,
462 .legacy_dai_naming = 1,
463};
464EXPORT_SYMBOL_GPL(fsl_asrc_component);
1/*
2 * Freescale ASRC ALSA SoC Platform (DMA) driver
3 *
4 * Copyright (C) 2014 Freescale Semiconductor, Inc.
5 *
6 * Author: Nicolin Chen <nicoleotsuka@gmail.com>
7 *
8 * This file is licensed under the terms of the GNU General Public License
9 * version 2. This program is licensed "as is" without any warranty of any
10 * kind, whether express or implied.
11 */
12
13#include <linux/dma-mapping.h>
14#include <linux/module.h>
15#include <linux/platform_data/dma-imx.h>
16#include <sound/dmaengine_pcm.h>
17#include <sound/pcm_params.h>
18
19#include "fsl_asrc.h"
20
21#define FSL_ASRC_DMABUF_SIZE (256 * 1024)
22
23static struct snd_pcm_hardware snd_imx_hardware = {
24 .info = SNDRV_PCM_INFO_INTERLEAVED |
25 SNDRV_PCM_INFO_BLOCK_TRANSFER |
26 SNDRV_PCM_INFO_MMAP |
27 SNDRV_PCM_INFO_MMAP_VALID |
28 SNDRV_PCM_INFO_PAUSE |
29 SNDRV_PCM_INFO_RESUME,
30 .buffer_bytes_max = FSL_ASRC_DMABUF_SIZE,
31 .period_bytes_min = 128,
32 .period_bytes_max = 65535, /* Limited by SDMA engine */
33 .periods_min = 2,
34 .periods_max = 255,
35 .fifo_size = 0,
36};
37
38static bool filter(struct dma_chan *chan, void *param)
39{
40 if (!imx_dma_is_general_purpose(chan))
41 return false;
42
43 chan->private = param;
44
45 return true;
46}
47
48static void fsl_asrc_dma_complete(void *arg)
49{
50 struct snd_pcm_substream *substream = arg;
51 struct snd_pcm_runtime *runtime = substream->runtime;
52 struct fsl_asrc_pair *pair = runtime->private_data;
53
54 pair->pos += snd_pcm_lib_period_bytes(substream);
55 if (pair->pos >= snd_pcm_lib_buffer_bytes(substream))
56 pair->pos = 0;
57
58 snd_pcm_period_elapsed(substream);
59}
60
61static int fsl_asrc_dma_prepare_and_submit(struct snd_pcm_substream *substream)
62{
63 u8 dir = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? OUT : IN;
64 struct snd_soc_pcm_runtime *rtd = substream->private_data;
65 struct snd_pcm_runtime *runtime = substream->runtime;
66 struct fsl_asrc_pair *pair = runtime->private_data;
67 struct device *dev = rtd->platform->dev;
68 unsigned long flags = DMA_CTRL_ACK;
69
70 /* Prepare and submit Front-End DMA channel */
71 if (!substream->runtime->no_period_wakeup)
72 flags |= DMA_PREP_INTERRUPT;
73
74 pair->pos = 0;
75 pair->desc[!dir] = dmaengine_prep_dma_cyclic(
76 pair->dma_chan[!dir], runtime->dma_addr,
77 snd_pcm_lib_buffer_bytes(substream),
78 snd_pcm_lib_period_bytes(substream),
79 dir == OUT ? DMA_TO_DEVICE : DMA_FROM_DEVICE, flags);
80 if (!pair->desc[!dir]) {
81 dev_err(dev, "failed to prepare slave DMA for Front-End\n");
82 return -ENOMEM;
83 }
84
85 pair->desc[!dir]->callback = fsl_asrc_dma_complete;
86 pair->desc[!dir]->callback_param = substream;
87
88 dmaengine_submit(pair->desc[!dir]);
89
90 /* Prepare and submit Back-End DMA channel */
91 pair->desc[dir] = dmaengine_prep_dma_cyclic(
92 pair->dma_chan[dir], 0xffff, 64, 64, DMA_DEV_TO_DEV, 0);
93 if (!pair->desc[dir]) {
94 dev_err(dev, "failed to prepare slave DMA for Back-End\n");
95 return -ENOMEM;
96 }
97
98 dmaengine_submit(pair->desc[dir]);
99
100 return 0;
101}
102
103static int fsl_asrc_dma_trigger(struct snd_pcm_substream *substream, int cmd)
104{
105 struct snd_pcm_runtime *runtime = substream->runtime;
106 struct fsl_asrc_pair *pair = runtime->private_data;
107 int ret;
108
109 switch (cmd) {
110 case SNDRV_PCM_TRIGGER_START:
111 case SNDRV_PCM_TRIGGER_RESUME:
112 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
113 ret = fsl_asrc_dma_prepare_and_submit(substream);
114 if (ret)
115 return ret;
116 dma_async_issue_pending(pair->dma_chan[IN]);
117 dma_async_issue_pending(pair->dma_chan[OUT]);
118 break;
119 case SNDRV_PCM_TRIGGER_STOP:
120 case SNDRV_PCM_TRIGGER_SUSPEND:
121 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
122 dmaengine_terminate_all(pair->dma_chan[OUT]);
123 dmaengine_terminate_all(pair->dma_chan[IN]);
124 break;
125 default:
126 return -EINVAL;
127 }
128
129 return 0;
130}
131
132static int fsl_asrc_dma_hw_params(struct snd_pcm_substream *substream,
133 struct snd_pcm_hw_params *params)
134{
135 enum dma_slave_buswidth buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
136 struct snd_soc_pcm_runtime *rtd = substream->private_data;
137 bool tx = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
138 struct snd_dmaengine_dai_dma_data *dma_params_fe = NULL;
139 struct snd_dmaengine_dai_dma_data *dma_params_be = NULL;
140 struct snd_pcm_runtime *runtime = substream->runtime;
141 struct fsl_asrc_pair *pair = runtime->private_data;
142 struct fsl_asrc *asrc_priv = pair->asrc_priv;
143 struct dma_slave_config config_fe, config_be;
144 enum asrc_pair_index index = pair->index;
145 struct device *dev = rtd->platform->dev;
146 int stream = substream->stream;
147 struct imx_dma_data *tmp_data;
148 struct snd_soc_dpcm *dpcm;
149 struct dma_chan *tmp_chan;
150 struct device *dev_be;
151 u8 dir = tx ? OUT : IN;
152 dma_cap_mask_t mask;
153 int ret;
154
155 /* Fetch the Back-End dma_data from DPCM */
156 list_for_each_entry(dpcm, &rtd->dpcm[stream].be_clients, list_be) {
157 struct snd_soc_pcm_runtime *be = dpcm->be;
158 struct snd_pcm_substream *substream_be;
159 struct snd_soc_dai *dai = be->cpu_dai;
160
161 if (dpcm->fe != rtd)
162 continue;
163
164 substream_be = snd_soc_dpcm_get_substream(be, stream);
165 dma_params_be = snd_soc_dai_get_dma_data(dai, substream_be);
166 dev_be = dai->dev;
167 break;
168 }
169
170 if (!dma_params_be) {
171 dev_err(dev, "failed to get the substream of Back-End\n");
172 return -EINVAL;
173 }
174
175 /* Override dma_data of the Front-End and config its dmaengine */
176 dma_params_fe = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
177 dma_params_fe->addr = asrc_priv->paddr + REG_ASRDx(!dir, index);
178 dma_params_fe->maxburst = dma_params_be->maxburst;
179
180 pair->dma_chan[!dir] = fsl_asrc_get_dma_channel(pair, !dir);
181 if (!pair->dma_chan[!dir]) {
182 dev_err(dev, "failed to request DMA channel\n");
183 return -EINVAL;
184 }
185
186 memset(&config_fe, 0, sizeof(config_fe));
187 ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &config_fe);
188 if (ret) {
189 dev_err(dev, "failed to prepare DMA config for Front-End\n");
190 return ret;
191 }
192
193 ret = dmaengine_slave_config(pair->dma_chan[!dir], &config_fe);
194 if (ret) {
195 dev_err(dev, "failed to config DMA channel for Front-End\n");
196 return ret;
197 }
198
199 /* Request and config DMA channel for Back-End */
200 dma_cap_zero(mask);
201 dma_cap_set(DMA_SLAVE, mask);
202 dma_cap_set(DMA_CYCLIC, mask);
203
204 /* Get DMA request of Back-End */
205 tmp_chan = dma_request_slave_channel(dev_be, tx ? "tx" : "rx");
206 tmp_data = tmp_chan->private;
207 pair->dma_data.dma_request = tmp_data->dma_request;
208 dma_release_channel(tmp_chan);
209
210 /* Get DMA request of Front-End */
211 tmp_chan = fsl_asrc_get_dma_channel(pair, dir);
212 tmp_data = tmp_chan->private;
213 pair->dma_data.dma_request2 = tmp_data->dma_request;
214 pair->dma_data.peripheral_type = tmp_data->peripheral_type;
215 pair->dma_data.priority = tmp_data->priority;
216 dma_release_channel(tmp_chan);
217
218 pair->dma_chan[dir] = dma_request_channel(mask, filter, &pair->dma_data);
219 if (!pair->dma_chan[dir]) {
220 dev_err(dev, "failed to request DMA channel for Back-End\n");
221 return -EINVAL;
222 }
223
224 if (asrc_priv->asrc_width == 16)
225 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
226 else
227 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
228
229 config_be.direction = DMA_DEV_TO_DEV;
230 config_be.src_addr_width = buswidth;
231 config_be.src_maxburst = dma_params_be->maxburst;
232 config_be.dst_addr_width = buswidth;
233 config_be.dst_maxburst = dma_params_be->maxburst;
234
235 if (tx) {
236 config_be.src_addr = asrc_priv->paddr + REG_ASRDO(index);
237 config_be.dst_addr = dma_params_be->addr;
238 } else {
239 config_be.dst_addr = asrc_priv->paddr + REG_ASRDI(index);
240 config_be.src_addr = dma_params_be->addr;
241 }
242
243 ret = dmaengine_slave_config(pair->dma_chan[dir], &config_be);
244 if (ret) {
245 dev_err(dev, "failed to config DMA channel for Back-End\n");
246 return ret;
247 }
248
249 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
250
251 return 0;
252}
253
254static int fsl_asrc_dma_hw_free(struct snd_pcm_substream *substream)
255{
256 struct snd_pcm_runtime *runtime = substream->runtime;
257 struct fsl_asrc_pair *pair = runtime->private_data;
258
259 snd_pcm_set_runtime_buffer(substream, NULL);
260
261 if (pair->dma_chan[IN])
262 dma_release_channel(pair->dma_chan[IN]);
263
264 if (pair->dma_chan[OUT])
265 dma_release_channel(pair->dma_chan[OUT]);
266
267 pair->dma_chan[IN] = NULL;
268 pair->dma_chan[OUT] = NULL;
269
270 return 0;
271}
272
273static int fsl_asrc_dma_startup(struct snd_pcm_substream *substream)
274{
275 struct snd_soc_pcm_runtime *rtd = substream->private_data;
276 struct snd_pcm_runtime *runtime = substream->runtime;
277 struct device *dev = rtd->platform->dev;
278 struct fsl_asrc *asrc_priv = dev_get_drvdata(dev);
279 struct fsl_asrc_pair *pair;
280
281 pair = kzalloc(sizeof(struct fsl_asrc_pair), GFP_KERNEL);
282 if (!pair) {
283 dev_err(dev, "failed to allocate pair\n");
284 return -ENOMEM;
285 }
286
287 pair->asrc_priv = asrc_priv;
288
289 runtime->private_data = pair;
290
291 snd_pcm_hw_constraint_integer(substream->runtime,
292 SNDRV_PCM_HW_PARAM_PERIODS);
293 snd_soc_set_runtime_hwparams(substream, &snd_imx_hardware);
294
295 return 0;
296}
297
298static int fsl_asrc_dma_shutdown(struct snd_pcm_substream *substream)
299{
300 struct snd_pcm_runtime *runtime = substream->runtime;
301 struct fsl_asrc_pair *pair = runtime->private_data;
302 struct fsl_asrc *asrc_priv;
303
304 if (!pair)
305 return 0;
306
307 asrc_priv = pair->asrc_priv;
308
309 if (asrc_priv->pair[pair->index] == pair)
310 asrc_priv->pair[pair->index] = NULL;
311
312 kfree(pair);
313
314 return 0;
315}
316
317static snd_pcm_uframes_t fsl_asrc_dma_pcm_pointer(struct snd_pcm_substream *substream)
318{
319 struct snd_pcm_runtime *runtime = substream->runtime;
320 struct fsl_asrc_pair *pair = runtime->private_data;
321
322 return bytes_to_frames(substream->runtime, pair->pos);
323}
324
325static struct snd_pcm_ops fsl_asrc_dma_pcm_ops = {
326 .ioctl = snd_pcm_lib_ioctl,
327 .hw_params = fsl_asrc_dma_hw_params,
328 .hw_free = fsl_asrc_dma_hw_free,
329 .trigger = fsl_asrc_dma_trigger,
330 .open = fsl_asrc_dma_startup,
331 .close = fsl_asrc_dma_shutdown,
332 .pointer = fsl_asrc_dma_pcm_pointer,
333};
334
335static int fsl_asrc_dma_pcm_new(struct snd_soc_pcm_runtime *rtd)
336{
337 struct snd_card *card = rtd->card->snd_card;
338 struct snd_pcm_substream *substream;
339 struct snd_pcm *pcm = rtd->pcm;
340 int ret, i;
341
342 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
343 if (ret) {
344 dev_err(card->dev, "failed to set DMA mask\n");
345 return ret;
346 }
347
348 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
349 substream = pcm->streams[i].substream;
350 if (!substream)
351 continue;
352
353 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
354 FSL_ASRC_DMABUF_SIZE, &substream->dma_buffer);
355 if (ret) {
356 dev_err(card->dev, "failed to allocate DMA buffer\n");
357 goto err;
358 }
359 }
360
361 return 0;
362
363err:
364 if (--i == 0 && pcm->streams[i].substream)
365 snd_dma_free_pages(&pcm->streams[i].substream->dma_buffer);
366
367 return ret;
368}
369
370static void fsl_asrc_dma_pcm_free(struct snd_pcm *pcm)
371{
372 struct snd_pcm_substream *substream;
373 int i;
374
375 for (i = SNDRV_PCM_STREAM_PLAYBACK; i <= SNDRV_PCM_STREAM_LAST; i++) {
376 substream = pcm->streams[i].substream;
377 if (!substream)
378 continue;
379
380 snd_dma_free_pages(&substream->dma_buffer);
381 substream->dma_buffer.area = NULL;
382 substream->dma_buffer.addr = 0;
383 }
384}
385
386struct snd_soc_platform_driver fsl_asrc_platform = {
387 .ops = &fsl_asrc_dma_pcm_ops,
388 .pcm_new = fsl_asrc_dma_pcm_new,
389 .pcm_free = fsl_asrc_dma_pcm_free,
390};
391EXPORT_SYMBOL_GPL(fsl_asrc_platform);