20 #include <linux/kernel.h>
22 #include <linux/module.h>
26 #include <linux/slab.h>
33 #define PARM_A_B_CNT 0x08
35 #define PARM_SRC_DST_BIDX 0x10
36 #define PARM_LINK_BCNTRLD 0x14
37 #define PARM_SRC_DST_CIDX 0x18
38 #define PARM_CCNT 0x1c
40 #define PARM_SIZE 0x20
67 #define EDMA_REV 0x0000
68 #define EDMA_CCCFG 0x0004
69 #define EDMA_QCHMAP 0x0200
70 #define EDMA_DMAQNUM 0x0240
71 #define EDMA_QDMAQNUM 0x0260
72 #define EDMA_QUETCMAP 0x0280
73 #define EDMA_QUEPRI 0x0284
74 #define EDMA_EMR 0x0300
75 #define EDMA_EMCR 0x0308
76 #define EDMA_QEMR 0x0310
77 #define EDMA_QEMCR 0x0314
78 #define EDMA_CCERR 0x0318
79 #define EDMA_CCERRCLR 0x031c
80 #define EDMA_EEVAL 0x0320
81 #define EDMA_DRAE 0x0340
82 #define EDMA_QRAE 0x0380
83 #define EDMA_QUEEVTENTRY 0x0400
84 #define EDMA_QSTAT 0x0600
85 #define EDMA_QWMTHRA 0x0620
86 #define EDMA_QWMTHRB 0x0624
87 #define EDMA_CCSTAT 0x0640
90 #define EDMA_ECR 0x1008
91 #define EDMA_ECRH 0x100C
92 #define EDMA_SHADOW0 0x2000
93 #define EDMA_PARM 0x4000
95 #define PARM_OFFSET(param_no) (EDMA_PARM + ((param_no) << 5))
97 #define EDMA_DCHMAP 0x0100
98 #define CHMAP_EXIST BIT(24)
100 #define EDMA_MAX_DMACH 64
101 #define EDMA_MAX_PARAMENTRY 512
107 static inline unsigned int edma_read(
unsigned ctlr,
int offset)
109 return (
unsigned int)
__raw_readl(edmacc_regs_base[ctlr] + offset);
112 static inline void edma_write(
unsigned ctlr,
int offset,
int val)
116 static inline void edma_modify(
unsigned ctlr,
int offset,
unsigned and,
119 unsigned val = edma_read(ctlr, offset);
122 edma_write(ctlr, offset, val);
124 static inline void edma_and(
unsigned ctlr,
int offset,
unsigned and)
126 unsigned val = edma_read(ctlr, offset);
128 edma_write(ctlr, offset, val);
130 static inline void edma_or(
unsigned ctlr,
int offset,
unsigned or)
132 unsigned val = edma_read(ctlr, offset);
134 edma_write(ctlr, offset, val);
136 static inline unsigned int edma_read_array(
unsigned ctlr,
int offset,
int i)
138 return edma_read(ctlr, offset + (i << 2));
140 static inline void edma_write_array(
unsigned ctlr,
int offset,
int i,
143 edma_write(ctlr, offset + (i << 2), val);
145 static inline void edma_modify_array(
unsigned ctlr,
int offset,
int i,
146 unsigned and,
unsigned or)
148 edma_modify(ctlr, offset + (i << 2), and, or);
150 static inline void edma_or_array(
unsigned ctlr,
int offset,
int i,
unsigned or)
152 edma_or(ctlr, offset + (i << 2), or);
154 static inline void edma_or_array2(
unsigned ctlr,
int offset,
int i,
int j,
157 edma_or(ctlr, offset + ((i*2 + j) << 2), or);
159 static inline void edma_write_array2(
unsigned ctlr,
int offset,
int i,
int j,
162 edma_write(ctlr, offset + ((i*2 + j) << 2), val);
164 static inline unsigned int edma_shadow0_read(
unsigned ctlr,
int offset)
168 static inline unsigned int edma_shadow0_read_array(
unsigned ctlr,
int offset,
171 return edma_read(ctlr,
EDMA_SHADOW0 + offset + (i << 2));
173 static inline void edma_shadow0_write(
unsigned ctlr,
int offset,
unsigned val)
177 static inline void edma_shadow0_write_array(
unsigned ctlr,
int offset,
int i,
180 edma_write(ctlr,
EDMA_SHADOW0 + offset + (i << 2), val);
182 static inline unsigned int edma_parm_read(
unsigned ctlr,
int offset,
185 return edma_read(ctlr,
EDMA_PARM + offset + (param_no << 5));
187 static inline void edma_parm_write(
unsigned ctlr,
int offset,
int param_no,
190 edma_write(ctlr,
EDMA_PARM + offset + (param_no << 5), val);
192 static inline void edma_parm_modify(
unsigned ctlr,
int offset,
int param_no,
193 unsigned and,
unsigned or)
195 edma_modify(ctlr,
EDMA_PARM + offset + (param_no << 5), and, or);
197 static inline void edma_parm_and(
unsigned ctlr,
int offset,
int param_no,
200 edma_and(ctlr,
EDMA_PARM + offset + (param_no << 5), and);
202 static inline void edma_parm_or(
unsigned ctlr,
int offset,
int param_no,
205 edma_or(ctlr,
EDMA_PARM + offset + (param_no << 5), or);
208 static inline void set_bits(
int offset,
int len,
unsigned long *
p)
210 for (; len > 0; len--)
211 set_bit(offset + (len - 1), p);
214 static inline void clear_bits(
int offset,
int len,
unsigned long *p)
216 for (; len > 0; len--)
257 static int arch_num_cc;
261 .link_bcntrld = 0xffff,
267 static void map_dmach_queue(
unsigned ctlr,
unsigned ch_no,
270 int bit = (ch_no & 0x7) * 4;
278 ~(0x7 << bit), queue_no << bit);
281 static void __init map_queue_tc(
unsigned ctlr,
int queue_no,
int tc_no)
283 int bit = queue_no * 4;
284 edma_modify(ctlr,
EDMA_QUETCMAP, ~(0x7 << bit), ((tc_no & 0x7) << bit));
287 static void __init assign_priority_to_queue(
unsigned ctlr,
int queue_no,
290 int bit = queue_no * 4;
292 ((priority & 0x7) << bit));
306 static void __init map_dmach_param(
unsigned ctlr)
310 edma_write_array(ctlr,
EDMA_DCHMAP , i , (i << 5));
314 setup_dma_interrupt(
unsigned lch,
324 edma_shadow0_write_array(ctlr,
SH_IECR, lch >> 5,
331 edma_shadow0_write_array(ctlr,
SH_ICR, lch >> 5,
333 edma_shadow0_write_array(ctlr,
SH_IESR, lch >> 5,
338 static int irq2ctlr(
int irq)
340 if (irq >= edma_cc[0]->irq_res_start && irq <= edma_cc[0]->irq_res_end)
342 else if (irq >= edma_cc[1]->irq_res_start &&
343 irq <= edma_cc[1]->irq_res_end)
354 static irqreturn_t dma_irq_handler(
int irq,
void *data)
361 ctlr = irq2ctlr(irq);
365 dev_dbg(data,
"dma_irq_handler\n");
367 sh_ipr = edma_shadow0_read_array(ctlr,
SH_IPR, 0);
369 sh_ipr = edma_shadow0_read_array(ctlr,
SH_IPR, 1);
372 sh_ier = edma_shadow0_read_array(ctlr,
SH_IER, 1);
375 sh_ier = edma_shadow0_read_array(ctlr,
SH_IER, 0);
383 dev_dbg(data,
"IPR%d %08x\n", bank, sh_ipr);
385 slot =
__ffs(sh_ipr);
386 sh_ipr &= ~(
BIT(slot));
388 if (sh_ier &
BIT(slot)) {
389 channel = (bank << 5) | slot;
391 edma_shadow0_write_array(ctlr,
SH_ICR, bank,
393 if (edma_cc[ctlr]->intr_data[channel].
callback)
396 edma_cc[ctlr]->intr_data[channel].data);
400 edma_shadow0_write(ctlr,
SH_IEVAL, 1);
409 static irqreturn_t dma_ccerr_handler(
int irq,
void *data)
413 unsigned int cnt = 0;
415 ctlr = irq2ctlr(irq);
419 dev_dbg(data,
"dma_ccerr_handler\n");
421 if ((edma_read_array(ctlr,
EDMA_EMR, 0) == 0) &&
422 (edma_read_array(ctlr,
EDMA_EMR, 1) == 0) &&
429 if (edma_read_array(ctlr,
EDMA_EMR, 0))
431 else if (edma_read_array(ctlr,
EDMA_EMR, 1))
434 dev_dbg(data,
"EMR%d %08x\n", j,
435 edma_read_array(ctlr,
EDMA_EMR, j));
436 for (i = 0; i < 32; i++) {
437 int k = (j << 5) + i;
438 if (edma_read_array(ctlr,
EDMA_EMR, j) &
444 edma_shadow0_write_array(ctlr,
SH_SECR,
446 if (edma_cc[ctlr]->intr_data[k].
451 edma_cc[ctlr]->intr_data
459 for (i = 0; i < 8; i++) {
475 for (i = 0; i < 8; i++) {
484 if ((edma_read_array(ctlr,
EDMA_EMR, 0) == 0) &&
485 (edma_read_array(ctlr,
EDMA_EMR, 1) == 0) &&
503 #define tc_errs_handled false
505 static irqreturn_t dma_tc0err_handler(
int irq,
void *data)
507 dev_dbg(data,
"dma_tc0err_handler\n");
511 static irqreturn_t dma_tc1err_handler(
int irq,
void *data)
513 dev_dbg(data,
"dma_tc1err_handler\n");
517 static int reserve_contiguous_slots(
int ctlr,
unsigned int id,
519 unsigned int start_slot)
523 int stop_slot = start_slot;
526 for (i = start_slot; i < edma_cc[ctlr]->
num_slots; ++
i) {
530 if (count == num_slots)
556 if (i == edma_cc[ctlr]->num_slots)
566 for (j = i - num_slots + 1; j <= i; ++j)
580 (
int)pdev->
resource[i].start >= 0) {
583 edma_cc[ctlr]->edma_unused);
592 static bool unused_chan_list_done;
627 void (*
callback)(
unsigned channel,
u16 ch_status,
void *data),
631 unsigned i,
done = 0, ctlr = 0;
634 if (!unused_chan_list_done) {
641 prepare_unused_channel_list);
645 unused_chan_list_done =
true;
654 for (i = 0; i < arch_num_cc; i++) {
663 edma_cc[i]->edma_inuse)) {
682 edma_or_array2(ctlr,
EDMA_DRAE, 0, channel >> 5,
BIT(channel & 0x1f));
687 &dummy_paramset, PARM_SIZE);
693 map_dmach_queue(ctlr, channel, eventq_no);
721 setup_dma_interrupt(channel,
NULL,
NULL);
725 &dummy_paramset, PARM_SIZE);
726 clear_bit(channel, edma_cc[ctlr]->edma_inuse);
753 edma_cc[ctlr]->num_slots, slot);
754 if (slot == edma_cc[ctlr]->num_slots)
760 slot >= edma_cc[ctlr]->num_slots) {
767 &dummy_paramset, PARM_SIZE);
789 slot >= edma_cc[ctlr]->num_slots)
793 &dummy_paramset, PARM_SIZE);
794 clear_bit(slot, edma_cc[ctlr]->edma_inuse);
833 slot >= edma_cc[ctlr]->num_slots))
841 if (count < 1 || count >
842 (edma_cc[ctlr]->num_slots - edma_cc[ctlr]->
num_channels))
847 return reserve_contiguous_slots(ctlr,
id, count,
851 return reserve_contiguous_slots(ctlr,
id, count, slot);
874 unsigned ctlr, slot_to_free;
881 slot >= edma_cc[ctlr]->num_slots ||
885 for (i = slot; i < slot +
count; ++
i) {
890 &dummy_paramset, PARM_SIZE);
891 clear_bit(slot_to_free, edma_cc[ctlr]->edma_inuse);
921 if (slot < edma_cc[ctlr]->num_slots) {
922 unsigned int i = edma_parm_read(ctlr,
PARM_OPT, slot);
931 edma_parm_write(ctlr,
PARM_OPT, slot, i);
935 edma_parm_write(ctlr,
PARM_SRC, slot, src_port);
959 if (slot < edma_cc[ctlr]->num_slots) {
960 unsigned int i = edma_parm_read(ctlr,
PARM_OPT, slot);
969 edma_parm_write(ctlr,
PARM_OPT, slot, i);
972 edma_parm_write(ctlr,
PARM_DST, slot, dest_port);
1019 if (slot < edma_cc[ctlr]->num_slots) {
1021 0xffff0000, src_bidx);
1023 0xffff0000, src_cidx);
1045 if (slot < edma_cc[ctlr]->num_slots) {
1047 0x0000ffff, dest_bidx << 16);
1049 0x0000ffff, dest_cidx << 16);
1092 if (slot < edma_cc[ctlr]->num_slots) {
1094 0x0000ffff, bcnt_rld << 16);
1095 if (sync_mode ==
ASYNC)
1100 edma_parm_write(ctlr,
PARM_A_B_CNT, slot, (bcnt << 16) | acnt);
1101 edma_parm_write(ctlr,
PARM_CCNT, slot, ccnt);
1115 unsigned ctlr_from, ctlr_to;
1122 if (from >= edma_cc[ctlr_from]->num_slots)
1124 if (to >= edma_cc[ctlr_to]->num_slots)
1145 if (from >= edma_cc[ctlr]->num_slots)
1172 if (slot >= edma_cc[ctlr]->num_slots)
1194 if (slot >= edma_cc[ctlr]->num_slots)
1220 unsigned int mask =
BIT(channel & 0x1f);
1222 edma_shadow0_write_array(ctlr,
SH_EECR, channel >> 5, mask);
1241 unsigned int mask =
BIT(channel & 0x1f);
1243 edma_shadow0_write_array(ctlr,
SH_EESR, channel >> 5, mask);
1267 int j = channel >> 5;
1268 unsigned int mask =
BIT(channel & 0x1f);
1271 if (
test_bit(channel, edma_cc[ctlr]->edma_unused)) {
1273 edma_shadow0_read_array(ctlr,
SH_ESR, j));
1274 edma_shadow0_write_array(ctlr,
SH_ESR, j, mask);
1280 edma_shadow0_read_array(ctlr,
SH_ER, j));
1282 edma_write_array(ctlr,
EDMA_ECR, j, mask);
1283 edma_write_array(ctlr,
EDMA_EMCR, j, mask);
1285 edma_shadow0_write_array(ctlr,
SH_SECR, j, mask);
1286 edma_shadow0_write_array(ctlr,
SH_EESR, j, mask);
1288 edma_shadow0_read_array(ctlr,
SH_EER, j));
1313 int j = channel >> 5;
1314 unsigned int mask =
BIT(channel & 0x1f);
1316 edma_shadow0_write_array(ctlr,
SH_EECR, j, mask);
1317 edma_shadow0_write_array(ctlr,
SH_ECR, j, mask);
1318 edma_shadow0_write_array(ctlr,
SH_SECR, j, mask);
1319 edma_write_array(ctlr,
EDMA_EMCR, j, mask);
1322 edma_shadow0_read_array(ctlr,
SH_EER, j));
1352 int j = (channel >> 5);
1353 unsigned int mask =
BIT(channel & 0x1f);
1356 edma_read_array(ctlr,
EDMA_EMR, j));
1357 edma_shadow0_write_array(ctlr,
SH_ECR, j, mask);
1359 edma_write_array(ctlr,
EDMA_EMCR, j, mask);
1361 edma_shadow0_write_array(ctlr,
SH_SECR, j, mask);
1395 int i,
j, off, ln, found = 0;
1397 const s16 (*rsv_chans)[2];
1398 const s16 (*rsv_slots)[2];
1410 sprintf(res_name,
"edma_cc%d", j);
1413 if (!r[j] || !info[j]) {
1422 len[
j] = resource_size(r[j]);
1425 dev_name(&pdev->
dev));
1432 if (!edmacc_regs_base[j]) {
1452 dev_dbg(&pdev->
dev,
"DMA REG BASE ADDR=%p\n",
1453 edmacc_regs_base[j]);
1457 &dummy_paramset, PARM_SIZE);
1460 memset(edma_cc[j]->edma_unused, 0xff,
1461 sizeof(edma_cc[j]->edma_unused));
1466 rsv_chans = info[
j]->
rsv->rsv_chans;
1468 for (i = 0; rsv_chans[
i][0] != -1; i++) {
1469 off = rsv_chans[
i][0];
1470 ln = rsv_chans[
i][1];
1472 edma_cc[j]->edma_unused);
1477 rsv_slots = info[
j]->
rsv->rsv_slots;
1479 for (i = 0; rsv_slots[
i][0] != -1; i++) {
1480 off = rsv_slots[
i][0];
1481 ln = rsv_slots[
i][1];
1483 edma_cc[j]->edma_inuse);
1488 sprintf(irq_name,
"edma%d", j);
1491 status =
request_irq(irq[j], dma_irq_handler, 0,
"edma",
1494 dev_dbg(&pdev->
dev,
"request_irq %d failed --> %d\n",
1499 sprintf(irq_name,
"edma%d_err", j);
1502 status =
request_irq(err_irq[j], dma_ccerr_handler, 0,
1503 "edma_error", &pdev->
dev);
1505 dev_dbg(&pdev->
dev,
"request_irq %d failed --> %d\n",
1506 err_irq[j], status);
1511 map_dmach_queue(j, i, info[j]->default_queue);
1517 for (i = 0; queue_tc_mapping[
i][0] != -1; i++)
1518 map_queue_tc(j, queue_tc_mapping[i][0],
1519 queue_tc_mapping[i][1]);
1522 for (i = 0; queue_priority_mapping[
i][0] != -1; i++)
1523 assign_priority_to_queue(j,
1524 queue_priority_mapping[i][0],
1525 queue_priority_mapping[i][1]);
1533 for (i = 0; i < info[
j]->
n_region; i++) {
1534 edma_write_array2(j,
EDMA_DRAE, i, 0, 0x0);
1535 edma_write_array2(j,
EDMA_DRAE, i, 1, 0x0);
1543 "edma_tc0", &pdev->
dev);
1545 dev_dbg(&pdev->
dev,
"request_irq %d failed --> %d\n",
1550 "edma_tc1", &pdev->
dev);
1552 dev_dbg(&pdev->
dev,
"request_irq %d --> %d\n",
1571 if (edmacc_regs_base[i])
1580 .driver.name =
"edma",
1583 static int __init edma_init(
void)