enum dma_status (*device_tx_status)(struct dma_chan *chan,
dma_cookie_t cookie,
struct dma_tx_state *txstate);
+#ifdef CONFIG_ARCH_ROCKCHIP
+ int (*dma_getposition)(struct dma_chan *chan,
+ dma_addr_t *src, dma_addr_t *dst);
+#endif
void (*device_issue_pending)(struct dma_chan *chan);
};
size_t period_len, enum dma_transfer_direction dir,
unsigned long flags)
{
+ unsigned int t=0;
return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
- period_len, dir, flags, NULL);
+ period_len, dir, flags, &t);
}
+#ifdef CONFIG_ARCH_ROCKCHIP
+static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_infiniteloop(
+ struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
+ size_t period_len, enum dma_transfer_direction dir,
+ unsigned long flags,unsigned int limit)
+{
+ unsigned int t=limit;
+ return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
+ period_len, dir, flags, &t);
+}
+#endif
static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma(
struct dma_chan *chan, struct dma_interleaved_template *xt,