21 #ifndef LINUX_DMAENGINE_H
22 #define LINUX_DMAENGINE_H
24 #include <linux/device.h>
25 #include <linux/uio.h>
28 #include <linux/bitmap.h>
29 #include <linux/types.h>
38 #define DMA_MIN_COOKIE 1
39 #define DMA_MAX_COOKIE INT_MAX
41 #define dma_submit_error(cookie) ((cookie) < 0 ? 1 : 0)
374 static inline const char *dma_chan_name(
struct dma_chan *
chan)
376 return dev_name(&chan->
dev->device);
420 #ifdef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH
427 #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH
456 spin_lock_bh(&txd->lock);
460 spin_unlock_bh(&txd->lock);
551 #define DMA_HAS_PQ_CONTINUE (1 << 15)
564 unsigned int src_cnt,
size_t len,
unsigned long flags);
570 unsigned int src_cnt,
const unsigned char *scf,
574 unsigned int src_cnt,
const unsigned char *scf,
size_t len,
578 unsigned long flags);
583 struct scatterlist *dst_sg,
unsigned int dst_nents,
584 struct scatterlist *src_sg,
unsigned int src_nents,
585 unsigned long flags);
597 unsigned long flags);
607 static inline int dmaengine_device_control(
struct dma_chan *chan,
611 return chan->
device->device_control(chan, cmd, arg);
614 static inline int dmaengine_slave_config(
struct dma_chan *chan,
618 (
unsigned long)config);
630 return chan->
device->device_prep_slave_sg(chan, &
sg, 1,
638 return chan->
device->device_prep_slave_sg(chan, sgl, sg_len,
642 #ifdef CONFIG_RAPIDIO_DMA_ENGINE
647 struct rio_dma_ext *rio_ext)
649 return chan->
device->device_prep_slave_sg(chan, sgl, sg_len,
650 dir, flags, rio_ext);
659 return chan->
device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
660 period_len, dir, flags,
NULL);
663 static inline int dmaengine_terminate_all(
struct dma_chan *chan)
668 static inline int dmaengine_pause(
struct dma_chan *chan)
670 return dmaengine_device_control(chan,
DMA_PAUSE, 0);
673 static inline int dmaengine_resume(
struct dma_chan *chan)
675 return dmaengine_device_control(chan,
DMA_RESUME, 0);
681 return chan->
device->device_tx_status(chan, cookie, state);
689 static inline bool dmaengine_check_align(
u8 align,
size_t off1,
size_t off2,
size_t len)
695 mask = (1 <<
align) - 1;
696 if (mask & (off1 | off2 | len))
701 static inline bool is_dma_copy_aligned(
struct dma_device *
dev,
size_t off1,
702 size_t off2,
size_t len)
704 return dmaengine_check_align(dev->
copy_align, off1, off2, len);
707 static inline bool is_dma_xor_aligned(
struct dma_device *
dev,
size_t off1,
708 size_t off2,
size_t len)
710 return dmaengine_check_align(dev->
xor_align, off1, off2, len);
713 static inline bool is_dma_pq_aligned(
struct dma_device *
dev,
size_t off1,
714 size_t off2,
size_t len)
716 return dmaengine_check_align(dev->
pq_align, off1, off2, len);
719 static inline bool is_dma_fill_aligned(
struct dma_device *
dev,
size_t off1,
720 size_t off2,
size_t len)
722 return dmaengine_check_align(dev->
fill_align, off1, off2, len);
726 dma_set_maxpq(
struct dma_device *
dma,
int maxpq,
int has_pq_continue)
738 static inline bool dmaf_p_disabled_continue(
enum dma_ctrl_flags flags)
742 return (flags & mask) ==
mask;
745 static inline bool dma_dev_has_pq_continue(
struct dma_device *dma)
750 static inline unsigned short dma_dev_to_maxpq(
struct dma_device *dma)
770 if (dma_dev_has_pq_continue(dma) || !dmaf_continue(flags))
771 return dma_dev_to_maxpq(dma);
772 else if (dmaf_p_disabled_continue(flags))
773 return dma_dev_to_maxpq(dma) - 1;
774 else if (dmaf_continue(flags))
775 return dma_dev_to_maxpq(dma) - 3;
781 #ifdef CONFIG_DMA_ENGINE
793 #ifdef CONFIG_NET_DMA
794 #define net_dmaengine_get() dmaengine_get()
795 #define net_dmaengine_put() dmaengine_put()
797 static inline void net_dmaengine_get(
void)
800 static inline void net_dmaengine_put(
void)
805 #ifdef CONFIG_ASYNC_TX_DMA
806 #define async_dmaengine_get() dmaengine_get()
807 #define async_dmaengine_put() dmaengine_put()
808 #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH
809 #define async_dma_find_channel(type) dma_find_channel(DMA_ASYNC_TX)
811 #define async_dma_find_channel(type) dma_find_channel(type)
814 static inline void async_dmaengine_get(
void)
817 static inline void async_dmaengine_put(
void)
828 void *
dest,
void *
src,
size_t len);
832 struct page *dest_pg,
unsigned int dest_off,
struct page *src_pg,
833 unsigned int src_off,
size_t len);
852 #define first_dma_cap(mask) __first_dma_cap(&(mask))
859 #define next_dma_cap(n, mask) __next_dma_cap((n), &(mask))
866 #define dma_cap_set(tx, mask) __dma_cap_set((tx), &(mask))
873 #define dma_cap_clear(tx, mask) __dma_cap_clear((tx), &(mask))
880 #define dma_cap_zero(mask) __dma_cap_zero(&(mask))
886 #define dma_has_cap(tx, mask) __dma_has_cap((tx), &(mask))
890 return test_bit(tx_type, srcp->bits);
893 #define for_each_dma_cap_mask(cap, mask) \
894 for ((cap) = first_dma_cap(mask); \
895 (cap) < DMA_TX_TYPE_END; \
896 (cap) = next_dma_cap((cap), (mask)))
905 static inline void dma_async_issue_pending(
struct dma_chan *chan)
907 chan->
device->device_issue_pending(chan);
910 #define dma_async_memcpy_issue_pending(chan) dma_async_issue_pending(chan)
929 status = chan->
device->device_tx_status(chan, cookie, &state);
937 #define dma_async_memcpy_complete(chan, cookie, last, used)\
938 dma_async_is_tx_complete(chan, cookie, last, used)
952 if (last_complete <= last_used) {
953 if ((cookie <= last_complete) || (cookie > last_used))
956 if ((cookie <= last_complete) && (cookie > last_used))
973 #ifdef CONFIG_DMA_ENGINE
1003 #define dma_request_channel(mask, x, y) __dma_request_channel(&(mask), x, y)
1022 struct dma_pinned_list *pinned_list,
unsigned char *kdata,
size_t len);
1025 unsigned int offset,
size_t len);