+3
-1
drivers/dma/imx-dma.c
+3
-1
drivers/dma/imx-dma.c
+2
-2
drivers/dma/sirf-dma.c
+2
-2
drivers/dma/sirf-dma.c
···
109
sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc,
110
node);
111
/* Move the first queued descriptor to active list */
112
-
list_move_tail(&schan->queued, &schan->active);
113
114
/* Start the DMA transfer */
115
writel_relaxed(sdesc->width, sdma->base + SIRFSOC_DMA_WIDTH_0 +
···
428
unsigned long iflags;
429
int ret;
430
431
-
if ((xt->dir != DMA_MEM_TO_DEV) || (xt->dir != DMA_DEV_TO_MEM)) {
432
ret = -EINVAL;
433
goto err_dir;
434
}
···
109
sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc,
110
node);
111
/* Move the first queued descriptor to active list */
112
+
list_move_tail(&sdesc->node, &schan->active);
113
114
/* Start the DMA transfer */
115
writel_relaxed(sdesc->width, sdma->base + SIRFSOC_DMA_WIDTH_0 +
···
428
unsigned long iflags;
429
int ret;
430
431
+
if ((xt->dir != DMA_MEM_TO_DEV) && (xt->dir != DMA_DEV_TO_MEM)) {
432
ret = -EINVAL;
433
goto err_dir;
434
}