Loading...
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * MUSB OTG driver - support for Mentor's DMA controller
4 *
5 * Copyright 2005 Mentor Graphics Corporation
6 * Copyright (C) 2005-2007 by Texas Instruments
7 */
8#include <linux/device.h>
9#include <linux/interrupt.h>
10#include <linux/platform_device.h>
11#include <linux/slab.h>
12#include "musb_core.h"
13#include "musb_dma.h"
14
15#define MUSB_HSDMA_CHANNEL_OFFSET(_bchannel, _offset) \
16 (MUSB_HSDMA_BASE + (_bchannel << 4) + _offset)
17
18#define musb_read_hsdma_addr(mbase, bchannel) \
19 musb_readl(mbase, \
20 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS))
21
22#define musb_write_hsdma_addr(mbase, bchannel, addr) \
23 musb_writel(mbase, \
24 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS), \
25 addr)
26
27#define musb_read_hsdma_count(mbase, bchannel) \
28 musb_readl(mbase, \
29 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT))
30
31#define musb_write_hsdma_count(mbase, bchannel, len) \
32 musb_writel(mbase, \
33 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT), \
34 len)
35/* control register (16-bit): */
36#define MUSB_HSDMA_ENABLE_SHIFT 0
37#define MUSB_HSDMA_TRANSMIT_SHIFT 1
38#define MUSB_HSDMA_MODE1_SHIFT 2
39#define MUSB_HSDMA_IRQENABLE_SHIFT 3
40#define MUSB_HSDMA_ENDPOINT_SHIFT 4
41#define MUSB_HSDMA_BUSERROR_SHIFT 8
42#define MUSB_HSDMA_BURSTMODE_SHIFT 9
43#define MUSB_HSDMA_BURSTMODE (3 << MUSB_HSDMA_BURSTMODE_SHIFT)
44#define MUSB_HSDMA_BURSTMODE_UNSPEC 0
45#define MUSB_HSDMA_BURSTMODE_INCR4 1
46#define MUSB_HSDMA_BURSTMODE_INCR8 2
47#define MUSB_HSDMA_BURSTMODE_INCR16 3
48
49#define MUSB_HSDMA_CHANNELS 8
50
51struct musb_dma_controller;
52
53struct musb_dma_channel {
54 struct dma_channel channel;
55 struct musb_dma_controller *controller;
56 u32 start_addr;
57 u32 len;
58 u16 max_packet_sz;
59 u8 idx;
60 u8 epnum;
61 u8 transmit;
62};
63
64struct musb_dma_controller {
65 struct dma_controller controller;
66 struct musb_dma_channel channel[MUSB_HSDMA_CHANNELS];
67 void *private_data;
68 void __iomem *base;
69 u8 channel_count;
70 u8 used_channels;
71 int irq;
72};
73
74static void dma_channel_release(struct dma_channel *channel);
75
76static void dma_controller_stop(struct musb_dma_controller *controller)
77{
78 struct musb *musb = controller->private_data;
79 struct dma_channel *channel;
80 u8 bit;
81
82 if (controller->used_channels != 0) {
83 dev_err(musb->controller,
84 "Stopping DMA controller while channel active\n");
85
86 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
87 if (controller->used_channels & (1 << bit)) {
88 channel = &controller->channel[bit].channel;
89 dma_channel_release(channel);
90
91 if (!controller->used_channels)
92 break;
93 }
94 }
95 }
96}
97
98static struct dma_channel *dma_channel_allocate(struct dma_controller *c,
99 struct musb_hw_ep *hw_ep, u8 transmit)
100{
101 struct musb_dma_controller *controller = container_of(c,
102 struct musb_dma_controller, controller);
103 struct musb_dma_channel *musb_channel = NULL;
104 struct dma_channel *channel = NULL;
105 u8 bit;
106
107 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
108 if (!(controller->used_channels & (1 << bit))) {
109 controller->used_channels |= (1 << bit);
110 musb_channel = &(controller->channel[bit]);
111 musb_channel->controller = controller;
112 musb_channel->idx = bit;
113 musb_channel->epnum = hw_ep->epnum;
114 musb_channel->transmit = transmit;
115 channel = &(musb_channel->channel);
116 channel->private_data = musb_channel;
117 channel->status = MUSB_DMA_STATUS_FREE;
118 channel->max_len = 0x100000;
119 /* Tx => mode 1; Rx => mode 0 */
120 channel->desired_mode = transmit;
121 channel->actual_len = 0;
122 break;
123 }
124 }
125
126 return channel;
127}
128
129static void dma_channel_release(struct dma_channel *channel)
130{
131 struct musb_dma_channel *musb_channel = channel->private_data;
132
133 channel->actual_len = 0;
134 musb_channel->start_addr = 0;
135 musb_channel->len = 0;
136
137 musb_channel->controller->used_channels &=
138 ~(1 << musb_channel->idx);
139
140 channel->status = MUSB_DMA_STATUS_UNKNOWN;
141}
142
143static void configure_channel(struct dma_channel *channel,
144 u16 packet_sz, u8 mode,
145 dma_addr_t dma_addr, u32 len)
146{
147 struct musb_dma_channel *musb_channel = channel->private_data;
148 struct musb_dma_controller *controller = musb_channel->controller;
149 struct musb *musb = controller->private_data;
150 void __iomem *mbase = controller->base;
151 u8 bchannel = musb_channel->idx;
152 u16 csr = 0;
153
154 musb_dbg(musb, "%p, pkt_sz %d, addr %pad, len %d, mode %d",
155 channel, packet_sz, &dma_addr, len, mode);
156
157 if (mode) {
158 csr |= 1 << MUSB_HSDMA_MODE1_SHIFT;
159 BUG_ON(len < packet_sz);
160 }
161 csr |= MUSB_HSDMA_BURSTMODE_INCR16
162 << MUSB_HSDMA_BURSTMODE_SHIFT;
163
164 csr |= (musb_channel->epnum << MUSB_HSDMA_ENDPOINT_SHIFT)
165 | (1 << MUSB_HSDMA_ENABLE_SHIFT)
166 | (1 << MUSB_HSDMA_IRQENABLE_SHIFT)
167 | (musb_channel->transmit
168 ? (1 << MUSB_HSDMA_TRANSMIT_SHIFT)
169 : 0);
170
171 /* address/count */
172 musb_write_hsdma_addr(mbase, bchannel, dma_addr);
173 musb_write_hsdma_count(mbase, bchannel, len);
174
175 /* control (this should start things) */
176 musb_writew(mbase,
177 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
178 csr);
179}
180
181static int dma_channel_program(struct dma_channel *channel,
182 u16 packet_sz, u8 mode,
183 dma_addr_t dma_addr, u32 len)
184{
185 struct musb_dma_channel *musb_channel = channel->private_data;
186 struct musb_dma_controller *controller = musb_channel->controller;
187 struct musb *musb = controller->private_data;
188
189 musb_dbg(musb, "ep%d-%s pkt_sz %d, dma_addr %pad length %d, mode %d",
190 musb_channel->epnum,
191 musb_channel->transmit ? "Tx" : "Rx",
192 packet_sz, &dma_addr, len, mode);
193
194 BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN ||
195 channel->status == MUSB_DMA_STATUS_BUSY);
196
197 /*
198 * The DMA engine in RTL1.8 and above cannot handle
199 * DMA addresses that are not aligned to a 4 byte boundary.
200 * It ends up masking the last two bits of the address
201 * programmed in DMA_ADDR.
202 *
203 * Fail such DMA transfers, so that the backup PIO mode
204 * can carry out the transfer
205 */
206 if ((musb->hwvers >= MUSB_HWVERS_1800) && (dma_addr % 4))
207 return false;
208
209 channel->actual_len = 0;
210 musb_channel->start_addr = dma_addr;
211 musb_channel->len = len;
212 musb_channel->max_packet_sz = packet_sz;
213 channel->status = MUSB_DMA_STATUS_BUSY;
214
215 configure_channel(channel, packet_sz, mode, dma_addr, len);
216
217 return true;
218}
219
220static int dma_channel_abort(struct dma_channel *channel)
221{
222 struct musb_dma_channel *musb_channel = channel->private_data;
223 void __iomem *mbase = musb_channel->controller->base;
224 struct musb *musb = musb_channel->controller->private_data;
225
226 u8 bchannel = musb_channel->idx;
227 int offset;
228 u16 csr;
229
230 if (channel->status == MUSB_DMA_STATUS_BUSY) {
231 if (musb_channel->transmit) {
232 offset = musb->io.ep_offset(musb_channel->epnum,
233 MUSB_TXCSR);
234
235 /*
236 * The programming guide says that we must clear
237 * the DMAENAB bit before the DMAMODE bit...
238 */
239 csr = musb_readw(mbase, offset);
240 csr &= ~(MUSB_TXCSR_AUTOSET | MUSB_TXCSR_DMAENAB);
241 musb_writew(mbase, offset, csr);
242 csr &= ~MUSB_TXCSR_DMAMODE;
243 musb_writew(mbase, offset, csr);
244 } else {
245 offset = musb->io.ep_offset(musb_channel->epnum,
246 MUSB_RXCSR);
247
248 csr = musb_readw(mbase, offset);
249 csr &= ~(MUSB_RXCSR_AUTOCLEAR |
250 MUSB_RXCSR_DMAENAB |
251 MUSB_RXCSR_DMAMODE);
252 musb_writew(mbase, offset, csr);
253 }
254
255 musb_writew(mbase,
256 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
257 0);
258 musb_write_hsdma_addr(mbase, bchannel, 0);
259 musb_write_hsdma_count(mbase, bchannel, 0);
260 channel->status = MUSB_DMA_STATUS_FREE;
261 }
262
263 return 0;
264}
265
266irqreturn_t dma_controller_irq(int irq, void *private_data)
267{
268 struct musb_dma_controller *controller = private_data;
269 struct musb *musb = controller->private_data;
270 struct musb_dma_channel *musb_channel;
271 struct dma_channel *channel;
272
273 void __iomem *mbase = controller->base;
274
275 irqreturn_t retval = IRQ_NONE;
276
277 unsigned long flags;
278
279 u8 bchannel;
280 u8 int_hsdma;
281
282 u32 addr, count;
283 u16 csr;
284
285 spin_lock_irqsave(&musb->lock, flags);
286
287 int_hsdma = musb_clearb(mbase, MUSB_HSDMA_INTR);
288
289 if (!int_hsdma) {
290 musb_dbg(musb, "spurious DMA irq");
291
292 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
293 musb_channel = (struct musb_dma_channel *)
294 &(controller->channel[bchannel]);
295 channel = &musb_channel->channel;
296 if (channel->status == MUSB_DMA_STATUS_BUSY) {
297 count = musb_read_hsdma_count(mbase, bchannel);
298
299 if (count == 0)
300 int_hsdma |= (1 << bchannel);
301 }
302 }
303
304 musb_dbg(musb, "int_hsdma = 0x%x", int_hsdma);
305
306 if (!int_hsdma)
307 goto done;
308 }
309
310 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
311 if (int_hsdma & (1 << bchannel)) {
312 musb_channel = (struct musb_dma_channel *)
313 &(controller->channel[bchannel]);
314 channel = &musb_channel->channel;
315
316 csr = musb_readw(mbase,
317 MUSB_HSDMA_CHANNEL_OFFSET(bchannel,
318 MUSB_HSDMA_CONTROL));
319
320 if (csr & (1 << MUSB_HSDMA_BUSERROR_SHIFT)) {
321 musb_channel->channel.status =
322 MUSB_DMA_STATUS_BUS_ABORT;
323 } else {
324 addr = musb_read_hsdma_addr(mbase,
325 bchannel);
326 channel->actual_len = addr
327 - musb_channel->start_addr;
328
329 musb_dbg(musb, "ch %p, 0x%x -> 0x%x (%zu / %d) %s",
330 channel, musb_channel->start_addr,
331 addr, channel->actual_len,
332 musb_channel->len,
333 (channel->actual_len
334 < musb_channel->len) ?
335 "=> reconfig 0" : "=> complete");
336
337 channel->status = MUSB_DMA_STATUS_FREE;
338
339 /* completed */
340 if (musb_channel->transmit &&
341 (!channel->desired_mode ||
342 (channel->actual_len %
343 musb_channel->max_packet_sz))) {
344 u8 epnum = musb_channel->epnum;
345 int offset = musb->io.ep_offset(epnum,
346 MUSB_TXCSR);
347 u16 txcsr;
348
349 /*
350 * The programming guide says that we
351 * must clear DMAENAB before DMAMODE.
352 */
353 musb_ep_select(mbase, epnum);
354 txcsr = musb_readw(mbase, offset);
355 if (channel->desired_mode == 1) {
356 txcsr &= ~(MUSB_TXCSR_DMAENAB
357 | MUSB_TXCSR_AUTOSET);
358 musb_writew(mbase, offset, txcsr);
359 /* Send out the packet */
360 txcsr &= ~MUSB_TXCSR_DMAMODE;
361 txcsr |= MUSB_TXCSR_DMAENAB;
362 }
363 txcsr |= MUSB_TXCSR_TXPKTRDY;
364 musb_writew(mbase, offset, txcsr);
365 }
366 musb_dma_completion(musb, musb_channel->epnum,
367 musb_channel->transmit);
368 }
369 }
370 }
371
372 retval = IRQ_HANDLED;
373done:
374 spin_unlock_irqrestore(&musb->lock, flags);
375 return retval;
376}
377EXPORT_SYMBOL_GPL(dma_controller_irq);
378
379void musbhs_dma_controller_destroy(struct dma_controller *c)
380{
381 struct musb_dma_controller *controller = container_of(c,
382 struct musb_dma_controller, controller);
383
384 dma_controller_stop(controller);
385
386 if (controller->irq)
387 free_irq(controller->irq, c);
388
389 kfree(controller);
390}
391EXPORT_SYMBOL_GPL(musbhs_dma_controller_destroy);
392
393static struct musb_dma_controller *
394dma_controller_alloc(struct musb *musb, void __iomem *base)
395{
396 struct musb_dma_controller *controller;
397
398 controller = kzalloc(sizeof(*controller), GFP_KERNEL);
399 if (!controller)
400 return NULL;
401
402 controller->channel_count = MUSB_HSDMA_CHANNELS;
403 controller->private_data = musb;
404 controller->base = base;
405
406 controller->controller.channel_alloc = dma_channel_allocate;
407 controller->controller.channel_release = dma_channel_release;
408 controller->controller.channel_program = dma_channel_program;
409 controller->controller.channel_abort = dma_channel_abort;
410 return controller;
411}
412
413struct dma_controller *
414musbhs_dma_controller_create(struct musb *musb, void __iomem *base)
415{
416 struct musb_dma_controller *controller;
417 struct device *dev = musb->controller;
418 struct platform_device *pdev = to_platform_device(dev);
419 int irq = platform_get_irq_byname(pdev, "dma");
420
421 if (irq <= 0) {
422 dev_err(dev, "No DMA interrupt line!\n");
423 return NULL;
424 }
425
426 controller = dma_controller_alloc(musb, base);
427 if (!controller)
428 return NULL;
429
430 if (request_irq(irq, dma_controller_irq, 0,
431 dev_name(musb->controller), controller)) {
432 dev_err(dev, "request_irq %d failed!\n", irq);
433 musb_dma_controller_destroy(&controller->controller);
434
435 return NULL;
436 }
437
438 controller->irq = irq;
439
440 return &controller->controller;
441}
442EXPORT_SYMBOL_GPL(musbhs_dma_controller_create);
443
444struct dma_controller *
445musbhs_dma_controller_create_noirq(struct musb *musb, void __iomem *base)
446{
447 struct musb_dma_controller *controller;
448
449 controller = dma_controller_alloc(musb, base);
450 if (!controller)
451 return NULL;
452
453 return &controller->controller;
454}
455EXPORT_SYMBOL_GPL(musbhs_dma_controller_create_noirq);
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * MUSB OTG driver - support for Mentor's DMA controller
4 *
5 * Copyright 2005 Mentor Graphics Corporation
6 * Copyright (C) 2005-2007 by Texas Instruments
7 */
8#include <linux/device.h>
9#include <linux/interrupt.h>
10#include <linux/platform_device.h>
11#include <linux/slab.h>
12#include "musb_core.h"
13#include "musbhsdma.h"
14
15static void dma_channel_release(struct dma_channel *channel);
16
17static void dma_controller_stop(struct musb_dma_controller *controller)
18{
19 struct musb *musb = controller->private_data;
20 struct dma_channel *channel;
21 u8 bit;
22
23 if (controller->used_channels != 0) {
24 dev_err(musb->controller,
25 "Stopping DMA controller while channel active\n");
26
27 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
28 if (controller->used_channels & (1 << bit)) {
29 channel = &controller->channel[bit].channel;
30 dma_channel_release(channel);
31
32 if (!controller->used_channels)
33 break;
34 }
35 }
36 }
37}
38
39static struct dma_channel *dma_channel_allocate(struct dma_controller *c,
40 struct musb_hw_ep *hw_ep, u8 transmit)
41{
42 struct musb_dma_controller *controller = container_of(c,
43 struct musb_dma_controller, controller);
44 struct musb_dma_channel *musb_channel = NULL;
45 struct dma_channel *channel = NULL;
46 u8 bit;
47
48 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) {
49 if (!(controller->used_channels & (1 << bit))) {
50 controller->used_channels |= (1 << bit);
51 musb_channel = &(controller->channel[bit]);
52 musb_channel->controller = controller;
53 musb_channel->idx = bit;
54 musb_channel->epnum = hw_ep->epnum;
55 musb_channel->transmit = transmit;
56 channel = &(musb_channel->channel);
57 channel->private_data = musb_channel;
58 channel->status = MUSB_DMA_STATUS_FREE;
59 channel->max_len = 0x100000;
60 /* Tx => mode 1; Rx => mode 0 */
61 channel->desired_mode = transmit;
62 channel->actual_len = 0;
63 break;
64 }
65 }
66
67 return channel;
68}
69
70static void dma_channel_release(struct dma_channel *channel)
71{
72 struct musb_dma_channel *musb_channel = channel->private_data;
73
74 channel->actual_len = 0;
75 musb_channel->start_addr = 0;
76 musb_channel->len = 0;
77
78 musb_channel->controller->used_channels &=
79 ~(1 << musb_channel->idx);
80
81 channel->status = MUSB_DMA_STATUS_UNKNOWN;
82}
83
84static void configure_channel(struct dma_channel *channel,
85 u16 packet_sz, u8 mode,
86 dma_addr_t dma_addr, u32 len)
87{
88 struct musb_dma_channel *musb_channel = channel->private_data;
89 struct musb_dma_controller *controller = musb_channel->controller;
90 struct musb *musb = controller->private_data;
91 void __iomem *mbase = controller->base;
92 u8 bchannel = musb_channel->idx;
93 u16 csr = 0;
94
95 musb_dbg(musb, "%p, pkt_sz %d, addr %pad, len %d, mode %d",
96 channel, packet_sz, &dma_addr, len, mode);
97
98 if (mode) {
99 csr |= 1 << MUSB_HSDMA_MODE1_SHIFT;
100 BUG_ON(len < packet_sz);
101 }
102 csr |= MUSB_HSDMA_BURSTMODE_INCR16
103 << MUSB_HSDMA_BURSTMODE_SHIFT;
104
105 csr |= (musb_channel->epnum << MUSB_HSDMA_ENDPOINT_SHIFT)
106 | (1 << MUSB_HSDMA_ENABLE_SHIFT)
107 | (1 << MUSB_HSDMA_IRQENABLE_SHIFT)
108 | (musb_channel->transmit
109 ? (1 << MUSB_HSDMA_TRANSMIT_SHIFT)
110 : 0);
111
112 /* address/count */
113 musb_write_hsdma_addr(mbase, bchannel, dma_addr);
114 musb_write_hsdma_count(mbase, bchannel, len);
115
116 /* control (this should start things) */
117 musb_writew(mbase,
118 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
119 csr);
120}
121
122static int dma_channel_program(struct dma_channel *channel,
123 u16 packet_sz, u8 mode,
124 dma_addr_t dma_addr, u32 len)
125{
126 struct musb_dma_channel *musb_channel = channel->private_data;
127 struct musb_dma_controller *controller = musb_channel->controller;
128 struct musb *musb = controller->private_data;
129
130 musb_dbg(musb, "ep%d-%s pkt_sz %d, dma_addr %pad length %d, mode %d",
131 musb_channel->epnum,
132 musb_channel->transmit ? "Tx" : "Rx",
133 packet_sz, &dma_addr, len, mode);
134
135 BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN ||
136 channel->status == MUSB_DMA_STATUS_BUSY);
137
138 /* Let targets check/tweak the arguments */
139 if (musb->ops->adjust_channel_params) {
140 int ret = musb->ops->adjust_channel_params(channel,
141 packet_sz, &mode, &dma_addr, &len);
142 if (ret)
143 return ret;
144 }
145
146 /*
147 * The DMA engine in RTL1.8 and above cannot handle
148 * DMA addresses that are not aligned to a 4 byte boundary.
149 * It ends up masking the last two bits of the address
150 * programmed in DMA_ADDR.
151 *
152 * Fail such DMA transfers, so that the backup PIO mode
153 * can carry out the transfer
154 */
155 if ((musb->hwvers >= MUSB_HWVERS_1800) && (dma_addr % 4))
156 return false;
157
158 channel->actual_len = 0;
159 musb_channel->start_addr = dma_addr;
160 musb_channel->len = len;
161 musb_channel->max_packet_sz = packet_sz;
162 channel->status = MUSB_DMA_STATUS_BUSY;
163
164 configure_channel(channel, packet_sz, mode, dma_addr, len);
165
166 return true;
167}
168
169static int dma_channel_abort(struct dma_channel *channel)
170{
171 struct musb_dma_channel *musb_channel = channel->private_data;
172 void __iomem *mbase = musb_channel->controller->base;
173 struct musb *musb = musb_channel->controller->private_data;
174
175 u8 bchannel = musb_channel->idx;
176 int offset;
177 u16 csr;
178
179 if (channel->status == MUSB_DMA_STATUS_BUSY) {
180 if (musb_channel->transmit) {
181 offset = musb->io.ep_offset(musb_channel->epnum,
182 MUSB_TXCSR);
183
184 /*
185 * The programming guide says that we must clear
186 * the DMAENAB bit before the DMAMODE bit...
187 */
188 csr = musb_readw(mbase, offset);
189 csr &= ~(MUSB_TXCSR_AUTOSET | MUSB_TXCSR_DMAENAB);
190 musb_writew(mbase, offset, csr);
191 csr &= ~MUSB_TXCSR_DMAMODE;
192 musb_writew(mbase, offset, csr);
193 } else {
194 offset = musb->io.ep_offset(musb_channel->epnum,
195 MUSB_RXCSR);
196
197 csr = musb_readw(mbase, offset);
198 csr &= ~(MUSB_RXCSR_AUTOCLEAR |
199 MUSB_RXCSR_DMAENAB |
200 MUSB_RXCSR_DMAMODE);
201 musb_writew(mbase, offset, csr);
202 }
203
204 musb_writew(mbase,
205 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL),
206 0);
207 musb_write_hsdma_addr(mbase, bchannel, 0);
208 musb_write_hsdma_count(mbase, bchannel, 0);
209 channel->status = MUSB_DMA_STATUS_FREE;
210 }
211
212 return 0;
213}
214
215static irqreturn_t dma_controller_irq(int irq, void *private_data)
216{
217 struct musb_dma_controller *controller = private_data;
218 struct musb *musb = controller->private_data;
219 struct musb_dma_channel *musb_channel;
220 struct dma_channel *channel;
221
222 void __iomem *mbase = controller->base;
223
224 irqreturn_t retval = IRQ_NONE;
225
226 unsigned long flags;
227
228 u8 bchannel;
229 u8 int_hsdma;
230
231 u32 addr, count;
232 u16 csr;
233
234 spin_lock_irqsave(&musb->lock, flags);
235
236 int_hsdma = musb_readb(mbase, MUSB_HSDMA_INTR);
237
238 if (!int_hsdma) {
239 musb_dbg(musb, "spurious DMA irq");
240
241 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
242 musb_channel = (struct musb_dma_channel *)
243 &(controller->channel[bchannel]);
244 channel = &musb_channel->channel;
245 if (channel->status == MUSB_DMA_STATUS_BUSY) {
246 count = musb_read_hsdma_count(mbase, bchannel);
247
248 if (count == 0)
249 int_hsdma |= (1 << bchannel);
250 }
251 }
252
253 musb_dbg(musb, "int_hsdma = 0x%x", int_hsdma);
254
255 if (!int_hsdma)
256 goto done;
257 }
258
259 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) {
260 if (int_hsdma & (1 << bchannel)) {
261 musb_channel = (struct musb_dma_channel *)
262 &(controller->channel[bchannel]);
263 channel = &musb_channel->channel;
264
265 csr = musb_readw(mbase,
266 MUSB_HSDMA_CHANNEL_OFFSET(bchannel,
267 MUSB_HSDMA_CONTROL));
268
269 if (csr & (1 << MUSB_HSDMA_BUSERROR_SHIFT)) {
270 musb_channel->channel.status =
271 MUSB_DMA_STATUS_BUS_ABORT;
272 } else {
273 u8 devctl;
274
275 addr = musb_read_hsdma_addr(mbase,
276 bchannel);
277 channel->actual_len = addr
278 - musb_channel->start_addr;
279
280 musb_dbg(musb, "ch %p, 0x%x -> 0x%x (%zu / %d) %s",
281 channel, musb_channel->start_addr,
282 addr, channel->actual_len,
283 musb_channel->len,
284 (channel->actual_len
285 < musb_channel->len) ?
286 "=> reconfig 0" : "=> complete");
287
288 devctl = musb_readb(mbase, MUSB_DEVCTL);
289
290 channel->status = MUSB_DMA_STATUS_FREE;
291
292 /* completed */
293 if ((devctl & MUSB_DEVCTL_HM)
294 && (musb_channel->transmit)
295 && ((channel->desired_mode == 0)
296 || (channel->actual_len &
297 (musb_channel->max_packet_sz - 1)))
298 ) {
299 u8 epnum = musb_channel->epnum;
300 int offset = musb->io.ep_offset(epnum,
301 MUSB_TXCSR);
302 u16 txcsr;
303
304 /*
305 * The programming guide says that we
306 * must clear DMAENAB before DMAMODE.
307 */
308 musb_ep_select(mbase, epnum);
309 txcsr = musb_readw(mbase, offset);
310 txcsr &= ~(MUSB_TXCSR_DMAENAB
311 | MUSB_TXCSR_AUTOSET);
312 musb_writew(mbase, offset, txcsr);
313 /* Send out the packet */
314 txcsr &= ~MUSB_TXCSR_DMAMODE;
315 txcsr |= MUSB_TXCSR_TXPKTRDY;
316 musb_writew(mbase, offset, txcsr);
317 }
318 musb_dma_completion(musb, musb_channel->epnum,
319 musb_channel->transmit);
320 }
321 }
322 }
323
324 retval = IRQ_HANDLED;
325done:
326 spin_unlock_irqrestore(&musb->lock, flags);
327 return retval;
328}
329
330void musbhs_dma_controller_destroy(struct dma_controller *c)
331{
332 struct musb_dma_controller *controller = container_of(c,
333 struct musb_dma_controller, controller);
334
335 dma_controller_stop(controller);
336
337 if (controller->irq)
338 free_irq(controller->irq, c);
339
340 kfree(controller);
341}
342EXPORT_SYMBOL_GPL(musbhs_dma_controller_destroy);
343
344struct dma_controller *musbhs_dma_controller_create(struct musb *musb,
345 void __iomem *base)
346{
347 struct musb_dma_controller *controller;
348 struct device *dev = musb->controller;
349 struct platform_device *pdev = to_platform_device(dev);
350 int irq = platform_get_irq_byname(pdev, "dma");
351
352 if (irq <= 0) {
353 dev_err(dev, "No DMA interrupt line!\n");
354 return NULL;
355 }
356
357 controller = kzalloc(sizeof(*controller), GFP_KERNEL);
358 if (!controller)
359 return NULL;
360
361 controller->channel_count = MUSB_HSDMA_CHANNELS;
362 controller->private_data = musb;
363 controller->base = base;
364
365 controller->controller.channel_alloc = dma_channel_allocate;
366 controller->controller.channel_release = dma_channel_release;
367 controller->controller.channel_program = dma_channel_program;
368 controller->controller.channel_abort = dma_channel_abort;
369
370 if (request_irq(irq, dma_controller_irq, 0,
371 dev_name(musb->controller), &controller->controller)) {
372 dev_err(dev, "request_irq %d failed!\n", irq);
373 musb_dma_controller_destroy(&controller->controller);
374
375 return NULL;
376 }
377
378 controller->irq = irq;
379
380 return &controller->controller;
381}
382EXPORT_SYMBOL_GPL(musbhs_dma_controller_create);