26 #include <linux/module.h>
35 #include <linux/slab.h>
37 #include <mach/adma.h>
41 #define to_iop_adma_chan(chan) container_of(chan, struct iop_adma_chan, common)
42 #define to_iop_adma_device(dev) \
43 container_of(dev, struct iop_adma_device, common)
44 #define tx_to_iop_adma_slot(tx) \
45 container_of(tx, struct iop_adma_desc_slot, async_tx)
77 dest = iop_desc_get_dest_addr(unmap, iop_chan);
91 addr = iop_desc_get_src_addr(unmap, iop_chan, src_cnt);
109 dma_addr_t pdest = iop_desc_get_dest_addr(unmap, iop_chan);
110 dma_addr_t qdest = iop_desc_get_qdest_addr(unmap, iop_chan);
121 if (!(flags & DMA_COMPL_SKIP_SRC_UNMAP)) {
124 for (i = 0; i < src_cnt; i++) {
125 addr = iop_desc_get_src_addr(unmap, iop_chan, i);
159 if (iop_desc_is_pq(desc))
160 iop_desc_unmap_pq(iop_chan, desc);
162 iop_desc_unmap(iop_chan, desc);
179 if (!async_tx_test_ack(&desc->
async_tx))
189 "\tfree slot: %d slots_per_op: %d\n",
193 iop_adma_free_slots(desc);
198 static void __iop_adma_slot_cleanup(
struct iop_adma_chan *iop_chan)
202 u32 current_desc = iop_chan_get_current_descriptor(iop_chan);
203 int busy = iop_chan_is_busy(iop_chan);
212 pr_debug(
"\tcookie: %d slot: %d busy: %d "
213 "this_desc: %#x next_desc: %#x ack: %d\n",
215 iter->
async_tx.phys, iop_desc_get_next_desc(iter),
216 async_tx_test_ack(&iter->
async_tx));
231 if (iter->
async_tx.phys == current_desc) {
233 if (busy || iop_desc_get_next_desc(iter))
257 int end_of_chain = 0;
262 u32 zero_sum_result = 0;
264 grp_iter = grp_start;
269 iop_desc_get_zero_result(grp_iter);
271 grp_iter->
idx, zero_sum_result);
276 pr_debug(
"\tgrp_start->xor_check_result: %p\n",
283 grp_iter = grp_start;
286 cookie = iop_adma_run_tx_complete_actions(
287 grp_iter, iop_chan, cookie);
290 end_of_chain = iop_adma_clean_slot(grp_iter,
312 iop_desc_get_zero_result(iter);
314 cookie = iop_adma_run_tx_complete_actions(
315 iter, iop_chan, cookie);
317 if (iop_adma_clean_slot(iter, iop_chan))
323 pr_debug(
"\tcompleted cookie %d\n", cookie);
330 spin_lock_bh(&iop_chan->
lock);
331 __iop_adma_slot_cleanup(iop_chan);
332 spin_unlock_bh(&iop_chan->
lock);
335 static void iop_adma_tasklet(
unsigned long data)
345 __iop_adma_slot_cleanup(iop_chan);
346 spin_unlock(&iop_chan->
lock);
355 int slots_found,
retry = 0;
386 if (!slots_found++) {
395 if (slots_found == num_slots) {
402 "allocated slot: %d "
403 "(desc %p phys: %#x) slots_per_op %d\n",
408 if (num_slots != slots_per_op)
429 iop_desc_clear_next_desc(alloc_start);
430 iop_desc_clear_next_desc(alloc_tail);
438 __iop_adma_slot_cleanup(iop_chan);
443 static void iop_adma_check_threshold(
struct iop_adma_chan *iop_chan)
450 iop_chan_append(iop_chan);
469 spin_lock_bh(&iop_chan->
lock);
470 cookie = dma_cookie_assign(tx);
474 list_splice_init(&sw_desc->
tx_list,
478 next_dma = grp_start->
async_tx.phys;
479 iop_desc_set_next_desc(old_chain_tail, next_dma);
480 BUG_ON(iop_desc_get_next_desc(old_chain_tail) != next_dma);
491 iop_adma_check_threshold(iop_chan);
492 spin_unlock_bh(&iop_chan->
lock);
494 dev_dbg(iop_chan->
device->common.dev,
"%s cookie: %d slot: %d\n",
500 static void iop_chan_start_null_memcpy(
struct iop_adma_chan *iop_chan);
501 static void iop_chan_start_null_xor(
struct iop_adma_chan *iop_chan);
513 static int iop_adma_alloc_chan_resources(
struct dma_chan *
chan)
521 iop_chan->
device->pdev->dev.platform_data;
527 if (idx == num_descs_in_pool)
533 " %d descriptor slots", idx);
536 hw_desc = (
char *) iop_chan->
device->dma_desc_pool_virt;
540 slot->
async_tx.tx_submit = iop_adma_tx_submit;
541 INIT_LIST_HEAD(&slot->
tx_list);
544 hw_desc = (
char *) iop_chan->
device->dma_desc_pool;
549 spin_lock_bh(&iop_chan->
lock);
552 spin_unlock_bh(&iop_chan->
lock);
561 "allocated %d descriptor slots last_used: %p\n",
567 iop_chan->
device->common.cap_mask))
568 iop_chan_start_null_memcpy(iop_chan);
570 iop_chan->
device->common.cap_mask))
571 iop_chan_start_null_xor(iop_chan);
576 return (idx > 0) ? idx : -
ENOMEM;
580 iop_adma_prep_dma_interrupt(
struct dma_chan *chan,
unsigned long flags)
588 spin_lock_bh(&iop_chan->
lock);
590 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
593 iop_desc_init_interrupt(grp_start, iop_chan);
597 spin_unlock_bh(&iop_chan->
lock);
604 dma_addr_t dma_src,
size_t len,
unsigned long flags)
617 spin_lock_bh(&iop_chan->
lock);
618 slot_cnt = iop_chan_memcpy_slot_count(len, &slots_per_op);
619 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
622 iop_desc_init_memcpy(grp_start, flags);
623 iop_desc_set_byte_count(grp_start, iop_chan, len);
624 iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest);
625 iop_desc_set_memcpy_src_addr(grp_start, dma_src);
630 spin_unlock_bh(&iop_chan->
lock);
637 int value,
size_t len,
unsigned long flags)
650 spin_lock_bh(&iop_chan->
lock);
651 slot_cnt = iop_chan_memset_slot_count(len, &slots_per_op);
652 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
655 iop_desc_init_memset(grp_start, flags);
656 iop_desc_set_byte_count(grp_start, iop_chan, len);
657 iop_desc_set_block_fill_val(grp_start, value);
658 iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest);
663 spin_unlock_bh(&iop_chan->
lock);
670 dma_addr_t *dma_src,
unsigned int src_cnt,
size_t len,
682 "%s src_cnt: %d len: %u flags: %lx\n",
683 __func__, src_cnt, len, flags);
685 spin_lock_bh(&iop_chan->
lock);
686 slot_cnt = iop_chan_xor_slot_count(len, src_cnt, &slots_per_op);
687 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
690 iop_desc_init_xor(grp_start, src_cnt, flags);
691 iop_desc_set_byte_count(grp_start, iop_chan, len);
692 iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest);
697 iop_desc_set_xor_src_addr(grp_start, src_cnt,
700 spin_unlock_bh(&iop_chan->
lock);
707 unsigned int src_cnt,
size_t len,
u32 *
result,
717 dev_dbg(iop_chan->
device->common.dev,
"%s src_cnt: %d len: %u\n",
718 __func__, src_cnt, len);
720 spin_lock_bh(&iop_chan->
lock);
722 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
725 iop_desc_init_zero_sum(grp_start, src_cnt, flags);
726 iop_desc_set_zero_sum_byte_count(grp_start, len);
728 pr_debug(
"\t%s: grp_start->xor_check_result: %p\n",
737 spin_unlock_bh(&iop_chan->
lock);
744 unsigned int src_cnt,
const unsigned char *scf,
size_t len,
757 "%s src_cnt: %d len: %u flags: %lx\n",
758 __func__, src_cnt, len, flags);
760 if (dmaf_p_disabled_continue(flags))
761 continue_srcs = 1+src_cnt;
762 else if (dmaf_continue(flags))
763 continue_srcs = 3+src_cnt;
765 continue_srcs = 0+src_cnt;
767 spin_lock_bh(&iop_chan->
lock);
769 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
774 iop_desc_set_byte_count(g, iop_chan, len);
781 dst[0] = dst[1] & 0x7;
783 iop_desc_set_pq_addr(g, dst);
787 for (i = 0; i < src_cnt; i++)
788 iop_desc_set_pq_src_addr(g, i, src[i], scf[i]);
794 if (dmaf_p_disabled_continue(flags))
795 iop_desc_set_pq_src_addr(g, i++, dst[1], 1);
796 else if (dmaf_continue(flags)) {
797 iop_desc_set_pq_src_addr(g, i++, dst[0], 0);
798 iop_desc_set_pq_src_addr(g, i++, dst[1], 1);
799 iop_desc_set_pq_src_addr(g, i++, dst[1], 0);
801 iop_desc_init_pq(g, i, flags);
803 spin_unlock_bh(&iop_chan->
lock);
810 unsigned int src_cnt,
const unsigned char *scf,
822 dev_dbg(iop_chan->
device->common.dev,
"%s src_cnt: %d len: %u\n",
823 __func__, src_cnt, len);
825 spin_lock_bh(&iop_chan->
lock);
827 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
832 int pq_idx = src_cnt;
835 iop_desc_init_pq_zero_sum(g, src_cnt+2, flags);
838 pr_debug(
"\t%s: g->pq_check_result: %p\n",
847 iop_desc_set_pq_zero_sum_addr(g, pq_idx, src);
849 spin_unlock_bh(&iop_chan->
lock);
854 static void iop_adma_free_chan_resources(
struct dma_chan *chan)
858 int in_use_descs = 0;
860 iop_adma_slot_cleanup(iop_chan);
862 spin_lock_bh(&iop_chan->
lock);
876 dev_dbg(iop_chan->
device->common.dev,
"%s slots_allocated %d\n",
878 spin_unlock_bh(&iop_chan->
lock);
881 if (in_use_descs > 1)
899 ret = dma_cookie_status(chan, cookie, txstate);
903 iop_adma_slot_cleanup(iop_chan);
905 return dma_cookie_status(chan, cookie, txstate);
916 iop_adma_device_clear_eot_status(chan);
929 iop_adma_device_clear_eoc_status(chan);
937 unsigned long status = iop_chan_get_status(chan);
940 "error ( %s%s%s%s%s%s%s)\n",
941 iop_is_err_int_parity(status, chan) ?
"int_parity " :
"",
942 iop_is_err_mcu_abort(status, chan) ?
"mcu_abort " :
"",
943 iop_is_err_int_tabort(status, chan) ?
"int_tabort " :
"",
944 iop_is_err_int_mabort(status, chan) ?
"int_mabort " :
"",
945 iop_is_err_pci_tabort(status, chan) ?
"pci_tabort " :
"",
946 iop_is_err_pci_mabort(status, chan) ?
"pci_mabort " :
"",
947 iop_is_err_split_tx(status, chan) ?
"split_tx " :
"");
949 iop_adma_device_clear_err_status(chan);
956 static void iop_adma_issue_pending(
struct dma_chan *chan)
962 iop_chan_append(iop_chan);
969 #define IOP_ADMA_TEST_SIZE 2000
995 ((
u8 *) src)[
i] = (
u8)i;
1001 if (iop_adma_alloc_chan_resources(dma_chan) < 1) {
1010 tx = iop_adma_prep_dma_memcpy(dma_chan, dest_dma, src_dma,
1014 cookie = iop_adma_tx_submit(tx);
1015 iop_adma_issue_pending(dma_chan);
1018 if (iop_adma_status(dma_chan, cookie,
NULL) !=
1021 "Self-test copy timed out, disabling\n");
1023 goto free_resources;
1029 if (
memcmp(src, dest, IOP_ADMA_TEST_SIZE)) {
1031 "Self-test copy failed compare, disabling\n");
1033 goto free_resources;
1037 iop_adma_free_chan_resources(dma_chan);
1044 #define IOP_ADMA_NUM_SRC_TEST 4
1059 u32 zero_sum_result;
1067 if (!xor_srcs[src_idx]) {
1085 ptr[i] = (1 << src_idx);
1089 cmp_byte ^= (
u8) (1 << src_idx);
1091 cmp_word = (cmp_byte << 24) | (cmp_byte << 16) |
1092 (cmp_byte << 8) | cmp_byte;
1099 if (iop_adma_alloc_chan_resources(dma_chan) < 1) {
1110 tx = iop_adma_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
1114 cookie = iop_adma_tx_submit(tx);
1115 iop_adma_issue_pending(dma_chan);
1118 if (iop_adma_status(dma_chan, cookie,
NULL) !=
1121 "Self-test xor timed out, disabling\n");
1123 goto free_resources;
1131 if (ptr[i] != cmp_word) {
1133 "Self-test xor failed compare, disabling\n");
1135 goto free_resources;
1143 goto free_resources;
1147 zero_sum_srcs[i] = xor_srcs[i];
1148 zero_sum_srcs[
i] =
dest;
1150 zero_sum_result = 1;
1152 for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++)
1156 tx = iop_adma_prep_dma_xor_val(dma_chan, dma_srcs,
1161 cookie = iop_adma_tx_submit(tx);
1162 iop_adma_issue_pending(dma_chan);
1167 "Self-test zero sum timed out, disabling\n");
1169 goto free_resources;
1172 if (zero_sum_result != 0) {
1174 "Self-test zero sum failed compare, disabling\n");
1176 goto free_resources;
1182 tx = iop_adma_prep_dma_memset(dma_chan, dma_addr, 0,
PAGE_SIZE,
1185 cookie = iop_adma_tx_submit(tx);
1186 iop_adma_issue_pending(dma_chan);
1191 "Self-test memset timed out, disabling\n");
1193 goto free_resources;
1200 "Self-test memset failed compare, disabling\n");
1202 goto free_resources;
1207 zero_sum_result = 0;
1208 for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++)
1212 tx = iop_adma_prep_dma_xor_val(dma_chan, dma_srcs,
1217 cookie = iop_adma_tx_submit(tx);
1218 iop_adma_issue_pending(dma_chan);
1223 "Self-test non-zero sum timed out, disabling\n");
1225 goto free_resources;
1228 if (zero_sum_result != 1) {
1230 "Self-test non-zero sum failed compare, disabling\n");
1232 goto free_resources;
1236 iop_adma_free_chan_resources(dma_chan);
1245 #ifdef CONFIG_RAID6_PQ
1262 u32 zero_sum_result;
1288 if (iop_adma_alloc_chan_resources(dma_chan) < 1) {
1293 dev = dma_chan->
device->dev;
1306 tx = iop_adma_prep_dma_pq(dma_chan, pq_dest, pq_src,
1307 IOP_ADMA_NUM_SRC_TEST, (
u8 *)raid6_gfexp,
1312 cookie = iop_adma_tx_submit(tx);
1313 iop_adma_issue_pending(dma_chan);
1316 if (iop_adma_status(dma_chan, cookie,
NULL) !=
1318 dev_err(dev,
"Self-test pq timed out, disabling\n");
1320 goto free_resources;
1325 if (
memcmp(pq_sw[IOP_ADMA_NUM_SRC_TEST],
1327 dev_err(dev,
"Self-test p failed compare, disabling\n");
1329 goto free_resources;
1331 if (
memcmp(pq_sw[IOP_ADMA_NUM_SRC_TEST+1],
1333 dev_err(dev,
"Self-test q failed compare, disabling\n");
1335 goto free_resources;
1339 for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 2; i++)
1343 zero_sum_result = ~0;
1344 tx = iop_adma_prep_dma_pq_val(dma_chan, &pq_src[IOP_ADMA_NUM_SRC_TEST],
1345 pq_src, IOP_ADMA_NUM_SRC_TEST,
1346 raid6_gfexp,
PAGE_SIZE, &zero_sum_result,
1349 cookie = iop_adma_tx_submit(tx);
1350 iop_adma_issue_pending(dma_chan);
1353 if (iop_adma_status(dma_chan, cookie,
NULL) !=
1355 dev_err(dev,
"Self-test pq-zero-sum timed out, disabling\n");
1357 goto free_resources;
1360 if (zero_sum_result != 0) {
1361 dev_err(dev,
"Self-test pq-zero-sum failed to validate: %x\n",
1364 goto free_resources;
1369 memset(pq_sw[i] + 100, 0, 100);
1370 memset(pq_sw[i+1] + 200, 0, 200);
1371 for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 2; i++)
1375 zero_sum_result = 0;
1376 tx = iop_adma_prep_dma_pq_val(dma_chan, &pq_src[IOP_ADMA_NUM_SRC_TEST],
1377 pq_src, IOP_ADMA_NUM_SRC_TEST,
1378 raid6_gfexp,
PAGE_SIZE, &zero_sum_result,
1381 cookie = iop_adma_tx_submit(tx);
1382 iop_adma_issue_pending(dma_chan);
1385 if (iop_adma_status(dma_chan, cookie,
NULL) !=
1387 dev_err(dev,
"Self-test !pq-zero-sum timed out, disabling\n");
1389 goto free_resources;
1393 dev_err(dev,
"Self-test !pq-zero-sum failed to validate: %x\n",
1396 goto free_resources;
1400 iop_adma_free_chan_resources(dma_chan);
1412 struct dma_chan *chan, *_chan;
1446 resource_size(res), pdev->
name))
1466 dev_dbg(&pdev->
dev,
"%s: allocated descriptor pool virt %p phys %p\n",
1476 platform_set_drvdata(pdev, adev);
1478 INIT_LIST_HEAD(&dma_dev->
channels);
1485 dma_dev->
dev = &pdev->
dev;
1493 dma_dev->
max_xor = iop_adma_get_max_xor();
1498 iop_adma_prep_dma_xor_val;
1505 iop_adma_prep_dma_pq_val;
1508 iop_adma_prep_dma_interrupt;
1510 iop_chan = kzalloc(
sizeof(*iop_chan),
GFP_KERNEL);
1518 resource_size(res));
1521 goto err_free_iop_chan;
1527 iop_adma_device_clear_err_status(iop_chan);
1529 for (i = 0; i < 3; i++) {
1531 iop_adma_eoc_handler,
1532 iop_adma_err_handler };
1536 goto err_free_iop_chan;
1538 ret = devm_request_irq(&pdev->
dev, irq,
1539 handler[i], 0, pdev->
name, iop_chan);
1541 goto err_free_iop_chan;
1546 INIT_LIST_HEAD(&iop_chan->
chain);
1548 iop_chan->
common.device = dma_dev;
1549 dma_cookie_init(&iop_chan->
common);
1553 ret = iop_adma_memcpy_self_test(adev);
1554 dev_dbg(&pdev->
dev,
"memcpy self test returned %d\n", ret);
1556 goto err_free_iop_chan;
1561 ret = iop_adma_xor_val_self_test(adev);
1562 dev_dbg(&pdev->
dev,
"xor self test returned %d\n", ret);
1564 goto err_free_iop_chan;
1569 #ifdef CONFIG_RAID6_PQ
1570 ret = iop_adma_pq_zero_sum_self_test(adev);
1571 dev_dbg(&pdev->
dev,
"pq self test returned %d\n", ret);
1579 goto err_free_iop_chan;
1583 "( %s%s%s%s%s%s%s)\n",
1606 static void iop_chan_start_null_memcpy(
struct iop_adma_chan *iop_chan)
1614 spin_lock_bh(&iop_chan->
lock);
1615 slot_cnt = iop_chan_memcpy_slot_count(0, &slots_per_op);
1616 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
1622 iop_desc_init_memcpy(grp_start, 0);
1623 iop_desc_set_byte_count(grp_start, iop_chan, 0);
1624 iop_desc_set_dest_addr(grp_start, iop_chan, 0);
1625 iop_desc_set_memcpy_src_addr(grp_start, 0);
1627 cookie = dma_cookie_assign(&sw_desc->
async_tx);
1632 iop_chan->
common.completed_cookie = cookie - 1;
1635 BUG_ON(iop_chan_is_busy(iop_chan));
1638 iop_adma_device_clear_err_status(iop_chan);
1641 iop_chan_disable(iop_chan);
1644 iop_chan_set_next_descriptor(iop_chan, sw_desc->
async_tx.phys);
1649 BUG_ON(iop_desc_get_next_desc(sw_desc));
1652 iop_chan_enable(iop_chan);
1655 "failed to allocate null descriptor\n");
1656 spin_unlock_bh(&iop_chan->
lock);
1659 static void iop_chan_start_null_xor(
struct iop_adma_chan *iop_chan)
1667 spin_lock_bh(&iop_chan->
lock);
1668 slot_cnt = iop_chan_xor_slot_count(0, 2, &slots_per_op);
1669 sw_desc = iop_adma_alloc_slots(iop_chan, slot_cnt, slots_per_op);
1675 iop_desc_set_byte_count(grp_start, iop_chan, 0);
1676 iop_desc_set_dest_addr(grp_start, iop_chan, 0);
1677 iop_desc_set_xor_src_addr(grp_start, 0, 0);
1678 iop_desc_set_xor_src_addr(grp_start, 1, 0);
1680 cookie = dma_cookie_assign(&sw_desc->
async_tx);
1685 iop_chan->
common.completed_cookie = cookie - 1;
1688 BUG_ON(iop_chan_is_busy(iop_chan));
1691 iop_adma_device_clear_err_status(iop_chan);
1694 iop_chan_disable(iop_chan);
1697 iop_chan_set_next_descriptor(iop_chan, sw_desc->
async_tx.phys);
1702 BUG_ON(iop_desc_get_next_desc(sw_desc));
1705 iop_chan_enable(iop_chan);
1708 "failed to allocate null descriptor\n");
1709 spin_unlock_bh(&iop_chan->
lock);
1713 .probe = iop_adma_probe,