21 #include <linux/list.h>
22 #include <linux/module.h>
24 #include <linux/slab.h>
41 #ifdef CONFIG_ARCH_DAVINCI_DA8XX
51 #define EDMA_MAX_SLOTS MAX_NR_SG
52 #define EDMA_DESCRIPTORS 16
107 static void edma_execute(
struct edma_chan *echan)
120 echan->
edesc = edesc = to_edma_desc(&vdesc->
tx);
123 for (i = 0; i < edesc->
pset_nr; i++) {
141 edesc->
pset[i].a_b_cnt,
143 edesc->
pset[i].src_dst_bidx,
144 edesc->
pset[i].src_dst_cidx,
145 edesc->
pset[i].link_bcntrld);
157 static int edma_terminate_all(
struct edma_chan *echan)
174 vchan_get_all_descriptors(&echan->
vchan, &
head);
175 spin_unlock_irqrestore(&echan->
vchan.lock, flags);
182 static int edma_slave_config(
struct edma_chan *echan,
213 struct edma_chan *echan = to_edma_chan(chan);
217 edma_terminate_all(echan);
221 ret = edma_slave_config(echan, config);
233 unsigned long tx_flags,
void *
context)
235 struct edma_chan *echan = to_edma_chan(chan);
241 int src_bidx, dst_bidx, src_cidx, dst_cidx;
243 if (
unlikely(!echan || !sgl || !sg_len))
247 dev_err(dev,
"Undefined slave buswidth\n");
252 dev_err(dev,
"Exceeded max SG segments %d > %d\n",
257 edesc = kzalloc(
sizeof(*edesc) + sg_len *
260 dev_dbg(dev,
"Failed to allocate a descriptor\n");
268 if (echan->
slot[i] < 0) {
272 if (echan->
slot[i] < 0) {
273 dev_err(dev,
"Failed to allocate slot\n");
307 if (ccnt > (
SZ_64K - 1)) {
308 dev_err(dev,
"Exceeded max SG segment size\n");
341 edesc->
pset[
i].src_dst_bidx = (dst_bidx << 16) | src_bidx;
342 edesc->
pset[
i].src_dst_cidx = (dst_cidx << 16) | src_cidx;
344 edesc->
pset[
i].a_b_cnt = bcnt << 16 | acnt;
345 edesc->
pset[
i].ccnt = ccnt;
346 edesc->
pset[
i].link_bcntrld = 0xffffffff;
350 return vchan_tx_prep(&echan->
vchan, &edesc->
vdesc, tx_flags);
353 static void edma_callback(
unsigned ch_num,
u16 ch_status,
void *
data)
356 struct device *dev = echan->
vchan.chan.device->dev;
365 dev_dbg(dev,
"transfer complete on channel %d\n", ch_num);
369 edesc = echan->
edesc;
372 vchan_cookie_complete(&edesc->
vdesc);
375 spin_unlock_irqrestore(&echan->
vchan.lock, flags);
379 dev_dbg(dev,
"transfer error on channel %d\n", ch_num);
387 static int edma_alloc_chan_resources(
struct dma_chan *chan)
389 struct edma_chan *echan = to_edma_chan(chan);
403 if (a_ch_num != echan->
ch_num) {
404 dev_err(dev,
"failed to allocate requested channel %u:%u\n",
414 dev_info(dev,
"allocated channel for %u:%u\n",
426 static void edma_free_chan_resources(
struct dma_chan *chan)
428 struct edma_chan *echan = to_edma_chan(chan);
435 vchan_free_chan_resources(&echan->
vchan);
439 if (echan->
slot[i] >= 0) {
455 static void edma_issue_pending(
struct dma_chan *chan)
457 struct edma_chan *echan = to_edma_chan(chan);
461 if (vchan_issue_pending(&echan->
vchan) && !echan->
edesc)
463 spin_unlock_irqrestore(&echan->
vchan.lock, flags);
466 static size_t edma_desc_size(
struct edma_desc *edesc)
472 for (size = i = 0; i < edesc->
pset_nr; i++)
473 size += (edesc->
pset[i].a_b_cnt & 0xffff) *
474 (edesc->
pset[
i].a_b_cnt >> 16) *
477 size = (edesc->
pset[0].a_b_cnt & 0xffff) *
478 (edesc->
pset[0].a_b_cnt >> 16) +
479 (edesc->
pset[0].a_b_cnt & 0xffff) *
490 struct edma_chan *echan = to_edma_chan(chan);
495 ret = dma_cookie_status(chan, cookie, txstate);
502 txstate->
residue = edma_desc_size(to_edma_desc(&vdesc->
tx));
503 }
else if (echan->
edesc && echan->
edesc->vdesc.tx.cookie == cookie) {
505 txstate->
residue = edma_desc_size(edesc);
509 spin_unlock_irqrestore(&echan->
vchan.lock, flags);
524 echan->
vchan.desc_free = edma_desc_free;
528 INIT_LIST_HEAD(&echan->
node);
555 dev_err(&pdev->
dev,
"Can't allocate controller\n");
562 dev_err(&pdev->
dev,
"Can't allocate PaRAM dummy slot\n");
577 platform_set_drvdata(pdev, ecc);
579 dev_info(&pdev->
dev,
"TI EDMA DMA engine driver\n");
603 .name =
"edma-dma-engine",
611 struct edma_chan *echan = to_edma_chan(chan);
612 unsigned ch_req = *(
unsigned *)param;
613 return ch_req == echan->
ch_num;
622 .name =
"edma-dma-engine",
628 .name =
"edma-dma-engine",
633 static int edma_init(
void)
641 ret = PTR_ERR(pdev0);
651 ret = PTR_ERR(pdev1);
660 static void __exit edma_exit(
void)