20 #include <linux/types.h>
22 #include <mach/hardware.h>
26 #define DMA_CCR(chan) (chan->mmr_base + 0x0)
27 #define DMA_CSR(chan) (chan->mmr_base + 0x4)
28 #define DMA_DAR(chan) (chan->mmr_base + 0xc)
29 #define DMA_NDAR(chan) (chan->mmr_base + 0x10)
30 #define DMA_PADR(chan) (chan->mmr_base + 0x14)
31 #define DMA_PUADR(chan) (chan->mmr_base + 0x18)
32 #define DMA_LADR(chan) (chan->mmr_base + 0x1c)
33 #define DMA_BCR(chan) (chan->mmr_base + 0x20)
34 #define DMA_DCR(chan) (chan->mmr_base + 0x24)
37 #define AAU_ACR(chan) (chan->mmr_base + 0x0)
38 #define AAU_ASR(chan) (chan->mmr_base + 0x4)
39 #define AAU_ADAR(chan) (chan->mmr_base + 0x8)
40 #define AAU_ANDAR(chan) (chan->mmr_base + 0xc)
41 #define AAU_SAR(src, chan) (chan->mmr_base + (0x10 + ((src) << 2)))
42 #define AAU_DAR(chan) (chan->mmr_base + 0x20)
43 #define AAU_ABCR(chan) (chan->mmr_base + 0x24)
44 #define AAU_ADCR(chan) (chan->mmr_base + 0x28)
45 #define AAU_SAR_EDCR(src_edc) (chan->mmr_base + (0x02c + ((src_edc-4) << 2)))
46 #define AAU_EDCR0_IDX 8
47 #define AAU_EDCR1_IDX 17
48 #define AAU_EDCR2_IDX 26
238 #define iop_desc_set_pq_zero_sum_src_addr iop_desc_set_pq_src_addr
247 static inline int iop_adma_get_max_xor(
void)
260 int id = chan->
device->id;
274 static inline void iop_chan_set_next_descriptor(
struct iop_adma_chan *chan,
277 int id = chan->
device->id;
291 #define IOP_ADMA_STATUS_BUSY (1 << 10)
292 #define IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT (1024)
293 #define IOP_ADMA_XOR_MAX_BYTE_COUNT (16 * 1024 * 1024)
294 #define IOP_ADMA_MAX_BYTE_COUNT (16 * 1024 * 1024)
296 static inline int iop_chan_is_busy(
struct iop_adma_chan *chan)
306 return (desc->
idx & (num_slots - 1)) ? 0 : 1;
310 static inline int iop_chan_memcpy_slot_count(
size_t len,
int *slots_per_op)
317 static inline int iop_chan_memset_slot_count(
size_t len,
int *slots_per_op)
323 static inline int iop3xx_aau_xor_slot_count(
size_t len,
int src_cnt,
326 static const char slot_count_table[] = {
336 *slots_per_op = slot_count_table[src_cnt - 1];
337 return *slots_per_op;
343 switch (chan->
device->id) {
346 return iop_chan_memcpy_slot_count(0, slots_per_op);
348 return iop3xx_aau_xor_slot_count(0, 2, slots_per_op);
355 static inline int iop_chan_xor_slot_count(
size_t len,
int src_cnt,
358 int slot_cnt = iop3xx_aau_xor_slot_count(len, src_cnt, slots_per_op);
366 slot_cnt += *slots_per_op;
369 slot_cnt += *slots_per_op;
380 int slot_cnt = iop3xx_aau_xor_slot_count(len, src_cnt, slots_per_op);
388 slot_cnt += *slots_per_op;
391 slot_cnt += *slots_per_op;
406 switch (chan->
device->id) {
409 return hw_desc.
dma->dest_addr;
411 return hw_desc.
aau->dest_addr;
431 switch (chan->
device->id) {
434 return hw_desc.
dma->byte_count;
436 return hw_desc.
aau->byte_count;
444 static inline int __desc_idx(
int src_idx)
446 static const int desc_idx_table[] = { 0, 0, 0, 0,
456 return desc_idx_table[src_idx];
465 switch (chan->
device->id) {
468 return hw_desc.
dma->src_addr;
476 return hw_desc.
aau->src[src_idx];
478 return hw_desc.
aau->src_edc[__desc_idx(src_idx)].src_addr;
481 static inline void iop3xx_aau_desc_set_src_addr(
struct iop3xx_desc_aau *hw_desc,
499 u_desc_ctrl.value = 0;
500 u_desc_ctrl.field.mem_to_mem_en = 1;
501 u_desc_ctrl.field.pci_transaction = 0xe;
517 u_desc_ctrl.value = 0;
518 u_desc_ctrl.field.blk1_cmd_ctrl = 0x2;
519 u_desc_ctrl.field.dest_write_en = 1;
535 u_desc_ctrl.value = 0;
538 u_desc_ctrl.field.blk_ctrl = 0x3;
541 for (i = 24; i < src_cnt; i++) {
542 edcr |= (1 << shift);
549 if (!u_desc_ctrl.field.blk_ctrl) {
551 u_desc_ctrl.field.blk_ctrl = 0x3;
555 for (i = 16; i < src_cnt; i++) {
556 edcr |= (1 << shift);
563 if (!u_desc_ctrl.field.blk_ctrl)
564 u_desc_ctrl.field.blk_ctrl = 0x2;
567 for (i = 8; i < src_cnt; i++) {
568 edcr |= (1 << shift);
576 for (i = 0; i < src_cnt; i++) {
577 u_desc_ctrl.value |= (1 << shift);
581 if (!u_desc_ctrl.field.blk_ctrl && src_cnt > 4)
582 u_desc_ctrl.field.blk_ctrl = 0x1;
585 u_desc_ctrl.field.dest_write_en = 1;
586 u_desc_ctrl.field.blk1_cmd_ctrl = 0x7;
590 return u_desc_ctrl.value;
597 iop3xx_desc_init_xor(desc->
hw_desc, src_cnt, flags);
615 for (i = 0, j = 0; (slot_cnt -= slots_per_op) >= 0;
616 i += slots_per_op, j++) {
618 u_desc_ctrl.value = iop3xx_desc_init_xor(iter, src_cnt, flags);
619 u_desc_ctrl.field.dest_write_en = 0;
620 u_desc_ctrl.field.zero_result_en = 1;
648 u_desc_ctrl.value = 0;
651 u_desc_ctrl.field.blk_ctrl = 0x3;
655 if (!u_desc_ctrl.field.blk_ctrl) {
657 u_desc_ctrl.field.blk_ctrl = 0x3;
662 if (!u_desc_ctrl.field.blk_ctrl)
663 u_desc_ctrl.field.blk_ctrl = 0x2;
667 if (!u_desc_ctrl.field.blk_ctrl && src_cnt > 4)
668 u_desc_ctrl.field.blk_ctrl = 0x1;
671 u_desc_ctrl.field.dest_write_en = 0;
682 switch (chan->
device->id) {
685 hw_desc.
dma->byte_count = byte_count;
688 hw_desc.
aau->byte_count = byte_count;
701 switch (chan->
device->id) {
704 iop_desc_init_memcpy(desc, 1);
705 hw_desc.
dma->byte_count = 0;
706 hw_desc.
dma->dest_addr = 0;
707 hw_desc.
dma->src_addr = 0;
711 hw_desc.
aau->byte_count = 0;
712 hw_desc.
aau->dest_addr = 0;
713 hw_desc.
aau->src[0] = 0;
714 hw_desc.
aau->src[1] = 0;
749 switch (chan->
device->id) {
778 for (i = 0; (slot_cnt -= slots_per_op) >= 0;
781 iop3xx_aau_desc_set_src_addr(iter, src_idx, addr);
793 for (i = 0; (slot_cnt -= slots_per_op) >= 0;
796 iop3xx_aau_desc_set_src_addr(iter, src_idx, addr);
814 return hw_desc.
dma->next_desc;
821 hw_desc.
dma->next_desc = 0;
841 static inline void iop_chan_append(
struct iop_adma_chan *chan)
846 dma_chan_ctrl |= 0x2;
855 static inline void iop_chan_disable(
struct iop_adma_chan *chan)
862 static inline void iop_chan_enable(
struct iop_adma_chan *chan)
870 static inline void iop_adma_device_clear_eot_status(
struct iop_adma_chan *chan)
877 static inline void iop_adma_device_clear_eoc_status(
struct iop_adma_chan *chan)
884 static inline void iop_adma_device_clear_err_status(
struct iop_adma_chan *chan)
888 switch (chan->
device->id) {
891 status &= (1 << 5) | (1 << 3) | (1 << 2) | (1 << 1);
904 iop_is_err_int_parity(
unsigned long status,
struct iop_adma_chan *chan)
910 iop_is_err_mcu_abort(
unsigned long status,
struct iop_adma_chan *chan)
916 iop_is_err_int_tabort(
unsigned long status,
struct iop_adma_chan *chan)
922 iop_is_err_int_mabort(
unsigned long status,
struct iop_adma_chan *chan)
928 iop_is_err_pci_tabort(
unsigned long status,
struct iop_adma_chan *chan)
930 switch (chan->
device->id) {
940 iop_is_err_pci_mabort(
unsigned long status,
struct iop_adma_chan *chan)
942 switch (chan->
device->id) {
952 iop_is_err_split_tx(
unsigned long status,
struct iop_adma_chan *chan)
954 switch (chan->
device->id) {