Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

dmaengine: Remove the context argument to the prep_dma_cyclic operation

The argument is always set to NULL and never used. Remove it.

Signed-off-by: Laurent Pinchart <laurent.pinchart+renesas@ideasonboard.com>
Signed-off-by: Vinod Koul <vinod.koul@intel.com>

authored by

Laurent Pinchart and committed by
Vinod Koul
31c1e5a1 f02323ec

+22 -28
+1 -1
drivers/dma/amba-pl08x.c
··· 1653 1653 static struct dma_async_tx_descriptor *pl08x_prep_dma_cyclic( 1654 1654 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 1655 1655 size_t period_len, enum dma_transfer_direction direction, 1656 - unsigned long flags, void *context) 1656 + unsigned long flags) 1657 1657 { 1658 1658 struct pl08x_dma_chan *plchan = to_pl08x_chan(chan); 1659 1659 struct pl08x_driver_data *pl08x = plchan->host;
+1 -2
drivers/dma/at_hdmac.c
··· 893 893 * @period_len: number of bytes for each period 894 894 * @direction: transfer direction, to or from device 895 895 * @flags: tx descriptor status flags 896 - * @context: transfer context (ignored) 897 896 */ 898 897 static struct dma_async_tx_descriptor * 899 898 atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 900 899 size_t period_len, enum dma_transfer_direction direction, 901 - unsigned long flags, void *context) 900 + unsigned long flags) 902 901 { 903 902 struct at_dma_chan *atchan = to_at_dma_chan(chan); 904 903 struct at_dma_slave *atslave = chan->private;
+1 -1
drivers/dma/bcm2835-dma.c
··· 335 335 static struct dma_async_tx_descriptor *bcm2835_dma_prep_dma_cyclic( 336 336 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 337 337 size_t period_len, enum dma_transfer_direction direction, 338 - unsigned long flags, void *context) 338 + unsigned long flags) 339 339 { 340 340 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); 341 341 enum dma_slave_buswidth dev_width;
+1 -1
drivers/dma/dma-jz4740.c
··· 433 433 static struct dma_async_tx_descriptor *jz4740_dma_prep_dma_cyclic( 434 434 struct dma_chan *c, dma_addr_t buf_addr, size_t buf_len, 435 435 size_t period_len, enum dma_transfer_direction direction, 436 - unsigned long flags, void *context) 436 + unsigned long flags) 437 437 { 438 438 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); 439 439 struct jz4740_dma_desc *desc;
+1 -1
drivers/dma/edma.c
··· 598 598 static struct dma_async_tx_descriptor *edma_prep_dma_cyclic( 599 599 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 600 600 size_t period_len, enum dma_transfer_direction direction, 601 - unsigned long tx_flags, void *context) 601 + unsigned long tx_flags) 602 602 { 603 603 struct edma_chan *echan = to_edma_chan(chan); 604 604 struct device *dev = chan->device->dev;
+1 -3
drivers/dma/ep93xx_dma.c
··· 1092 1092 * @period_len: length of a single period 1093 1093 * @dir: direction of the operation 1094 1094 * @flags: tx descriptor status flags 1095 - * @context: operation context (ignored) 1096 1095 * 1097 1096 * Prepares a descriptor for cyclic DMA operation. This means that once the 1098 1097 * descriptor is submitted, we will be submitting in a @period_len sized ··· 1104 1105 static struct dma_async_tx_descriptor * 1105 1106 ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr, 1106 1107 size_t buf_len, size_t period_len, 1107 - enum dma_transfer_direction dir, unsigned long flags, 1108 - void *context) 1108 + enum dma_transfer_direction dir, unsigned long flags) 1109 1109 { 1110 1110 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan); 1111 1111 struct ep93xx_dma_desc *desc, *first;
+1 -1
drivers/dma/fsl-edma.c
··· 517 517 static struct dma_async_tx_descriptor *fsl_edma_prep_dma_cyclic( 518 518 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 519 519 size_t period_len, enum dma_transfer_direction direction, 520 - unsigned long flags, void *context) 520 + unsigned long flags) 521 521 { 522 522 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); 523 523 struct fsl_edma_desc *fsl_desc;
+1 -1
drivers/dma/imx-dma.c
··· 866 866 static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic( 867 867 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 868 868 size_t period_len, enum dma_transfer_direction direction, 869 - unsigned long flags, void *context) 869 + unsigned long flags) 870 870 { 871 871 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); 872 872 struct imxdma_engine *imxdma = imxdmac->imxdma;
+1 -1
drivers/dma/imx-sdma.c
··· 1125 1125 static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic( 1126 1126 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 1127 1127 size_t period_len, enum dma_transfer_direction direction, 1128 - unsigned long flags, void *context) 1128 + unsigned long flags) 1129 1129 { 1130 1130 struct sdma_channel *sdmac = to_sdma_chan(chan); 1131 1131 struct sdma_engine *sdma = sdmac->sdma;
+1 -1
drivers/dma/mmp_pdma.c
··· 601 601 mmp_pdma_prep_dma_cyclic(struct dma_chan *dchan, 602 602 dma_addr_t buf_addr, size_t len, size_t period_len, 603 603 enum dma_transfer_direction direction, 604 - unsigned long flags, void *context) 604 + unsigned long flags) 605 605 { 606 606 struct mmp_pdma_chan *chan; 607 607 struct mmp_pdma_desc_sw *first = NULL, *prev = NULL, *new;
+1 -1
drivers/dma/mmp_tdma.c
··· 389 389 static struct dma_async_tx_descriptor *mmp_tdma_prep_dma_cyclic( 390 390 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 391 391 size_t period_len, enum dma_transfer_direction direction, 392 - unsigned long flags, void *context) 392 + unsigned long flags) 393 393 { 394 394 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); 395 395 struct mmp_tdma_desc *desc;
+1 -1
drivers/dma/mxs-dma.c
··· 589 589 static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic( 590 590 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, 591 591 size_t period_len, enum dma_transfer_direction direction, 592 - unsigned long flags, void *context) 592 + unsigned long flags) 593 593 { 594 594 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); 595 595 struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
+1 -2
drivers/dma/omap-dma.c
··· 853 853 854 854 static struct dma_async_tx_descriptor *omap_dma_prep_dma_cyclic( 855 855 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 856 - size_t period_len, enum dma_transfer_direction dir, unsigned long flags, 857 - void *context) 856 + size_t period_len, enum dma_transfer_direction dir, unsigned long flags) 858 857 { 859 858 struct omap_dmadev *od = to_omap_dma_dev(chan->device); 860 859 struct omap_chan *c = to_omap_dma_chan(chan);
+1 -1
drivers/dma/pl330.c
··· 2362 2362 static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic( 2363 2363 struct dma_chan *chan, dma_addr_t dma_addr, size_t len, 2364 2364 size_t period_len, enum dma_transfer_direction direction, 2365 - unsigned long flags, void *context) 2365 + unsigned long flags) 2366 2366 { 2367 2367 struct dma_pl330_desc *desc = NULL, *first = NULL; 2368 2368 struct dma_pl330_chan *pch = to_pchan(chan);
+1 -2
drivers/dma/s3c24xx-dma.c
··· 889 889 890 890 static struct dma_async_tx_descriptor *s3c24xx_dma_prep_dma_cyclic( 891 891 struct dma_chan *chan, dma_addr_t addr, size_t size, size_t period, 892 - enum dma_transfer_direction direction, unsigned long flags, 893 - void *context) 892 + enum dma_transfer_direction direction, unsigned long flags) 894 893 { 895 894 struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(chan); 896 895 struct s3c24xx_dma_engine *s3cdma = s3cchan->host;
+1 -1
drivers/dma/sa11x0-dma.c
··· 612 612 613 613 static struct dma_async_tx_descriptor *sa11x0_dma_prep_dma_cyclic( 614 614 struct dma_chan *chan, dma_addr_t addr, size_t size, size_t period, 615 - enum dma_transfer_direction dir, unsigned long flags, void *context) 615 + enum dma_transfer_direction dir, unsigned long flags) 616 616 { 617 617 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); 618 618 struct sa11x0_dma_desc *txd;
+1 -1
drivers/dma/sh/shdma-base.c
··· 668 668 static struct dma_async_tx_descriptor *shdma_prep_dma_cyclic( 669 669 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 670 670 size_t period_len, enum dma_transfer_direction direction, 671 - unsigned long flags, void *context) 671 + unsigned long flags) 672 672 { 673 673 struct shdma_chan *schan = to_shdma_chan(chan); 674 674 struct shdma_dev *sdev = to_shdma_dev(schan->dma_chan.device);
+1 -1
drivers/dma/sirf-dma.c
··· 580 580 static struct dma_async_tx_descriptor * 581 581 sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr, 582 582 size_t buf_len, size_t period_len, 583 - enum dma_transfer_direction direction, unsigned long flags, void *context) 583 + enum dma_transfer_direction direction, unsigned long flags) 584 584 { 585 585 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); 586 586 struct sirfsoc_dma_desc *sdesc = NULL;
+1 -2
drivers/dma/ste_dma40.c
··· 2531 2531 static struct dma_async_tx_descriptor * 2532 2532 dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr, 2533 2533 size_t buf_len, size_t period_len, 2534 - enum dma_transfer_direction direction, unsigned long flags, 2535 - void *context) 2534 + enum dma_transfer_direction direction, unsigned long flags) 2536 2535 { 2537 2536 unsigned int periods = buf_len / period_len; 2538 2537 struct dma_async_tx_descriptor *txd;
+1 -1
drivers/dma/tegra20-apb-dma.c
··· 1055 1055 static struct dma_async_tx_descriptor *tegra_dma_prep_dma_cyclic( 1056 1056 struct dma_chan *dc, dma_addr_t buf_addr, size_t buf_len, 1057 1057 size_t period_len, enum dma_transfer_direction direction, 1058 - unsigned long flags, void *context) 1058 + unsigned long flags) 1059 1059 { 1060 1060 struct tegra_dma_channel *tdc = to_tegra_dma_chan(dc); 1061 1061 struct tegra_dma_desc *dma_desc = NULL;
+2 -2
include/linux/dmaengine.h
··· 669 669 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)( 670 670 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, 671 671 size_t period_len, enum dma_transfer_direction direction, 672 - unsigned long flags, void *context); 672 + unsigned long flags); 673 673 struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)( 674 674 struct dma_chan *chan, struct dma_interleaved_template *xt, 675 675 unsigned long flags); ··· 744 744 unsigned long flags) 745 745 { 746 746 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len, 747 - period_len, dir, flags, NULL); 747 + period_len, dir, flags); 748 748 } 749 749 750 750 static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma(