28 #include <linux/module.h>
30 #include <linux/slab.h>
36 #define DRIVER_NAME "timb-dma"
39 #define TIMBDMA_ACR 0x34
40 #define TIMBDMA_32BIT_ADDR 0x01
42 #define TIMBDMA_ISR 0x080000
43 #define TIMBDMA_IPR 0x080004
44 #define TIMBDMA_IER 0x080008
50 #define TIMBDMA_INSTANCE_OFFSET 0x40
51 #define TIMBDMA_INSTANCE_TX_OFFSET 0x18
54 #define TIMBDMA_OFFS_RX_DHAR 0x00
55 #define TIMBDMA_OFFS_RX_DLAR 0x04
56 #define TIMBDMA_OFFS_RX_LR 0x0C
57 #define TIMBDMA_OFFS_RX_BLR 0x10
58 #define TIMBDMA_OFFS_RX_ER 0x14
59 #define TIMBDMA_RX_EN 0x01
63 #define TIMBDMA_OFFS_RX_BPRR 0x30
66 #define TIMBDMA_OFFS_TX_DHAR 0x00
67 #define TIMBDMA_OFFS_TX_DLAR 0x04
68 #define TIMBDMA_OFFS_TX_BLR 0x0C
69 #define TIMBDMA_OFFS_TX_LR 0x14
72 #define TIMB_DMA_DESC_SIZE 8
108 return &chan->
dev->device;
112 return chan2dev(chan)->parent->parent;
117 int id = td_chan->
chan.chan_id;
123 static void __td_enable_chan_irq(
struct timb_dma_chan *td_chan)
125 int id = td_chan->
chan.chan_id;
132 dev_dbg(chan2dev(&td_chan->
chan),
"Enabling irq: %d, IER: 0x%x\n",
id,
140 int id = td_chan->
chan.chan_id;
146 dev_dbg(chan2dev(&td_chan->
chan),
"Checking irq: %d, td: %p\n",
id, td);
163 addr = (dma_desc[7] << 24) | (dma_desc[6] << 16) | (dma_desc[5] << 8) |
166 len = (dma_desc[3] << 8) | dma_desc[2];
176 static void __td_unmap_descs(
struct timb_dma_desc *td_desc,
bool single)
183 __td_unmap_desc(td_chan, descs, single);
193 dev_err(chan2dev(&td_chan->
chan),
"Too big sg element\n");
199 dev_err(chan2dev(&td_chan->
chan),
"Incorrect length: %d\n",
204 dev_dbg(chan2dev(&td_chan->
chan),
"desc: %p, addr: 0x%llx\n",
216 dma_desc[0] = 0x21 | (last ? 0x02 : 0);
228 "Transfer already ongoing\n");
236 "td_chan: %p, chan: %d, membase: %p\n",
260 __td_enable_chan_irq(td_chan);
278 dev_dbg(chan2dev(&td_chan->
chan),
"descriptor %u complete\n",
288 dma_cookie_complete(txd);
297 __td_unmap_descs(td_desc,
313 for (i = 0; i < td->
dma.chancnt; i++) {
337 dev_dbg(chan2dev(&td_chan->
chan),
"%s: started %u\n",
338 __func__, td_desc->
txd.cookie);
341 __td_start_dma(td_chan);
352 spin_lock_bh(&td_chan->
lock);
353 cookie = dma_cookie_assign(txd);
356 dev_dbg(chan2dev(txd->
chan),
"%s: started %u\n", __func__,
359 __td_start_dma(td_chan);
361 dev_dbg(chan2dev(txd->
chan),
"tx_submit: queued %u\n",
367 spin_unlock_bh(&td_chan->
lock);
380 dev_err(chan2dev(chan),
"Failed to alloc descriptor\n");
388 dev_err(chan2dev(chan),
"Failed to alloc descriptor\n");
393 td_desc->
txd.tx_submit = td_tx_submit;
401 dev_err(chan2dev(chan),
"DMA mapping error: %d\n", err);
416 dev_dbg(chan2dev(td_desc->
txd.chan),
"Freeing desc: %p\n", td_desc);
427 dev_dbg(chan2dev(&td_chan->
chan),
"Putting desc: %p\n", td_desc);
429 spin_lock_bh(&td_chan->
lock);
431 spin_unlock_bh(&td_chan->
lock);
439 spin_lock_bh(&td_chan->
lock);
442 if (async_tx_test_ack(&td_desc->
txd)) {
447 dev_dbg(chan2dev(&td_chan->
chan),
"desc %p not ACKed\n",
450 spin_unlock_bh(&td_chan->
lock);
455 static int td_alloc_chan_resources(
struct dma_chan *chan)
461 dev_dbg(chan2dev(chan),
"%s: entry\n", __func__);
464 for (i = 0; i < td_chan->
descs; i++) {
471 "Couldnt allocate any descriptors\n");
476 td_desc_put(td_chan, td_desc);
479 spin_lock_bh(&td_chan->
lock);
480 dma_cookie_init(chan);
481 spin_unlock_bh(&td_chan->
lock);
486 static void td_free_chan_resources(
struct dma_chan *chan)
493 dev_dbg(chan2dev(chan),
"%s: Entry\n", __func__);
499 spin_lock_bh(&td_chan->
lock);
501 spin_unlock_bh(&td_chan->
lock);
504 dev_dbg(chan2dev(chan),
"%s: Freeing desc: %p\n", __func__,
506 td_free_desc(td_desc);
515 dev_dbg(chan2dev(chan),
"%s: Entry\n", __func__);
517 ret = dma_cookie_status(chan, cookie, txstate);
519 dev_dbg(chan2dev(chan),
"%s: exit, ret: %d\n", __func__, ret);
524 static void td_issue_pending(
struct dma_chan *chan)
529 dev_dbg(chan2dev(chan),
"%s: Entry\n", __func__);
530 spin_lock_bh(&td_chan->
lock);
534 if (__td_dma_done_ack(td_chan))
535 __td_finish(td_chan);
538 __td_start_next(td_chan);
540 spin_unlock_bh(&td_chan->
lock);
553 unsigned int desc_usage = 0;
555 if (!sgl || !sg_len) {
556 dev_err(chan2dev(chan),
"%s: No SG list\n", __func__);
563 "Requesting channel in wrong direction\n");
567 td_desc = td_desc_get(td_chan);
569 dev_err(chan2dev(chan),
"Not enough descriptors available\n");
578 dev_err(chan2dev(chan),
"No descriptor space\n");
582 err = td_fill_desc(td_chan, td_desc->
desc_list + desc_usage, sg,
585 dev_err(chan2dev(chan),
"Failed to update desc: %d\n",
587 td_desc_put(td_chan, td_desc);
596 return &td_desc->
txd;
606 dev_dbg(chan2dev(chan),
"%s: Entry\n", __func__);
612 spin_lock_bh(&td_chan->
lock);
615 list_move(&td_desc->
desc_node, &td_chan->free_list);
618 __td_finish(td_chan);
619 spin_unlock_bh(&td_chan->lock);
624 static
void td_tasklet(
unsigned long data)
633 ipr = isr & __td_ier_mask(td);
638 for (i = 0; i < td->
dma.chancnt; i++)
639 if (ipr & (1 << i)) {
641 spin_lock(&td_chan->
lock);
642 __td_finish(td_chan);
643 if (!list_empty(&td_chan->
queue))
644 __td_start_next(td_chan);
645 spin_unlock(&td_chan->
lock);
648 ier = __td_ier_mask(td);
662 tasklet_schedule(&td->
tasklet);
696 td = kzalloc(
sizeof(
struct timb_dma) +
700 goto err_release_region;
703 dev_dbg(&pdev->
dev,
"Allocated TD: %p\n", td);
707 dev_err(&pdev->
dev,
"Failed to remap I/O memory\n");
723 dev_err(&pdev->
dev,
"Failed to request IRQ\n");
724 goto err_tasklet_kill;
727 td->
dma.device_alloc_chan_resources = td_alloc_chan_resources;
728 td->
dma.device_free_chan_resources = td_free_chan_resources;
729 td->
dma.device_tx_status = td_tx_status;
730 td->
dma.device_issue_pending = td_issue_pending;
734 td->
dma.device_prep_slave_sg = td_prep_slave_sg;
735 td->
dma.device_control = td_control;
739 INIT_LIST_HEAD(&td->
dma.channels);
747 if ((i % 2) == pchan->
rx) {
748 dev_err(&pdev->
dev,
"Wrong channel configuration\n");
753 td_chan->
chan.device = &td->
dma;
754 dma_cookie_init(&td_chan->
chan);
757 INIT_LIST_HEAD(&td_chan->
queue);
770 dev_dbg(&pdev->
dev,
"Chan: %d, membase: %p\n",
778 dev_err(&pdev->
dev,
"Failed to register async device\n");
782 platform_set_drvdata(pdev, td);
784 dev_dbg(&pdev->
dev,
"Probe result: %d\n", err);
803 struct timb_dma *td = platform_get_drvdata(pdev);
814 platform_set_drvdata(pdev,
NULL);