dev_vdbg(chan2dev(&dwc->chan), "scan_descriptors: llp=0x%x\n", llp);
list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) {
+ /* check first descriptors addr */
+ if (desc->txd.phys == llp)
+ return;
+
+ /* check first descriptors llp */
if (desc->lli.llp == llp)
/* This one is currently in progress */
return;
if (err)
goto err_dma;
- pm_runtime_set_active(&pdev->dev);
- pm_runtime_enable(&pdev->dev);
+ pm_runtime_put_noidle(&pdev->dev);
pm_runtime_allow(&pdev->dev);
return 0;
static void __devexit intel_mid_dma_remove(struct pci_dev *pdev)
{
struct middma_device *device = pci_get_drvdata(pdev);
+
+ pm_runtime_get_noresume(&pdev->dev);
+ pm_runtime_forbid(&pdev->dev);
middma_shutdown(pdev);
pci_dev_put(pdev);
kfree(device);
static int dma_runtime_suspend(struct device *dev)
{
struct pci_dev *pci_dev = to_pci_dev(dev);
- return dma_suspend(pci_dev, PMSG_SUSPEND);
+ struct middma_device *device = pci_get_drvdata(pci_dev);
+
+ device->state = SUSPENDED;
+ return 0;
}
static int dma_runtime_resume(struct device *dev)
{
struct pci_dev *pci_dev = to_pci_dev(dev);
- return dma_resume(pci_dev);
+ struct middma_device *device = pci_get_drvdata(pci_dev);
+
+ device->state = RUNNING;
+ iowrite32(REG_BIT0, device->dma_base + DMA_CFG);
+ return 0;
}
static int dma_runtime_idle(struct device *dev)
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > IOP_ADMA_MAX_BYTE_COUNT));
+ BUG_ON(len > IOP_ADMA_MAX_BYTE_COUNT);
dev_dbg(iop_chan->device->common.dev, "%s len: %u\n",
__func__, len);
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > IOP_ADMA_MAX_BYTE_COUNT));
+ BUG_ON(len > IOP_ADMA_MAX_BYTE_COUNT);
dev_dbg(iop_chan->device->common.dev, "%s len: %u\n",
__func__, len);
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > IOP_ADMA_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > IOP_ADMA_XOR_MAX_BYTE_COUNT);
dev_dbg(iop_chan->device->common.dev,
"%s src_cnt: %d len: %u flags: %lx\n",
if (unlikely(len < MV_XOR_MIN_BYTE_COUNT))
return NULL;
- BUG_ON(unlikely(len > MV_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > MV_XOR_MAX_BYTE_COUNT);
spin_lock_bh(&mv_chan->lock);
slot_cnt = mv_chan_memcpy_slot_count(len);
if (unlikely(len < MV_XOR_MIN_BYTE_COUNT))
return NULL;
- BUG_ON(unlikely(len > MV_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > MV_XOR_MAX_BYTE_COUNT);
spin_lock_bh(&mv_chan->lock);
slot_cnt = mv_chan_memset_slot_count(len);
if (unlikely(len < MV_XOR_MIN_BYTE_COUNT))
return NULL;
- BUG_ON(unlikely(len > MV_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > MV_XOR_MAX_BYTE_COUNT);
dev_dbg(mv_chan->device->common.dev,
"%s src_cnt: %d len: dest %x %u flags: %ld\n",
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > PPC440SPE_ADMA_DMA_MAX_BYTE_COUNT));
+ BUG_ON(len > PPC440SPE_ADMA_DMA_MAX_BYTE_COUNT);
spin_lock_bh(&ppc440spe_chan->lock);
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > PPC440SPE_ADMA_DMA_MAX_BYTE_COUNT));
+ BUG_ON(len > PPC440SPE_ADMA_DMA_MAX_BYTE_COUNT);
spin_lock_bh(&ppc440spe_chan->lock);
dma_dest, dma_src, src_cnt));
if (unlikely(!len))
return NULL;
- BUG_ON(unlikely(len > PPC440SPE_ADMA_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > PPC440SPE_ADMA_XOR_MAX_BYTE_COUNT);
dev_dbg(ppc440spe_chan->device->common.dev,
"ppc440spe adma%d: %s src_cnt: %d len: %u int_en: %d\n",
ADMA_LL_DBG(prep_dma_pq_dbg(ppc440spe_chan->device->id,
dst, src, src_cnt));
BUG_ON(!len);
- BUG_ON(unlikely(len > PPC440SPE_ADMA_XOR_MAX_BYTE_COUNT));
+ BUG_ON(len > PPC440SPE_ADMA_XOR_MAX_BYTE_COUNT);
BUG_ON(!src_cnt);
if (src_cnt == 1 && dst[1] == src[0]) {