Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

async_tx: make async_tx channel switching opt-in

The majority of drivers in drivers/dma/ will never establish cross
channel operation chains and do not need the extra overhead in struct
dma_async_tx_descriptor. Make channel switching opt-in by default.

Cc: Anatolij Gustschin <agust@denx.de>
Cc: Ira Snyder <iws@ovro.caltech.edu>
Cc: Linus Walleij <linus.walleij@stericsson.com>
Cc: Saeed Bishara <saeed@marvell.com>
Signed-off-by: Dan Williams <dan.j.williams@intel.com>

+11 -8
+5 -2
drivers/dma/Kconfig
··· 46 46 47 47 If unsure, say N. 48 48 49 - config ASYNC_TX_DISABLE_CHANNEL_SWITCH 49 + config ASYNC_TX_ENABLE_CHANNEL_SWITCH 50 50 bool 51 51 52 52 config AMBA_PL08X ··· 62 62 depends on PCI && X86 63 63 select DMA_ENGINE 64 64 select DCA 65 - select ASYNC_TX_DISABLE_CHANNEL_SWITCH 66 65 select ASYNC_TX_DISABLE_PQ_VAL_DMA 67 66 select ASYNC_TX_DISABLE_XOR_VAL_DMA 68 67 help ··· 76 77 tristate "Intel IOP ADMA support" 77 78 depends on ARCH_IOP32X || ARCH_IOP33X || ARCH_IOP13XX 78 79 select DMA_ENGINE 80 + select ASYNC_TX_ENABLE_CHANNEL_SWITCH 79 81 help 80 82 Enable support for the Intel(R) IOP Series RAID engines. 81 83 ··· 101 101 tristate "Freescale Elo and Elo Plus DMA support" 102 102 depends on FSL_SOC 103 103 select DMA_ENGINE 104 + select ASYNC_TX_ENABLE_CHANNEL_SWITCH 104 105 ---help--- 105 106 Enable support for the Freescale Elo and Elo Plus DMA controllers. 106 107 The Elo is the DMA controller on some 82xx and 83xx parts, and the ··· 118 117 bool "Marvell XOR engine support" 119 118 depends on PLAT_ORION 120 119 select DMA_ENGINE 120 + select ASYNC_TX_ENABLE_CHANNEL_SWITCH 121 121 ---help--- 122 122 Enable support for the Marvell XOR engine. 123 123 ··· 176 174 depends on 440SPe || 440SP 177 175 select DMA_ENGINE 178 176 select ARCH_HAS_ASYNC_TX_FIND_CHANNEL 177 + select ASYNC_TX_ENABLE_CHANNEL_SWITCH 179 178 help 180 179 Enable support for the AMCC PPC440SPe RAID engines. 181 180
+2 -2
drivers/dma/dmaengine.c
··· 706 706 BUG_ON(!device->dev); 707 707 708 708 /* note: this only matters in the 709 - * CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH=y case 709 + * CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH=n case 710 710 */ 711 711 if (device_has_all_tx_types(device)) 712 712 dma_cap_set(DMA_ASYNC_TX, device->cap_mask); ··· 980 980 struct dma_chan *chan) 981 981 { 982 982 tx->chan = chan; 983 - #ifndef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH 983 + #ifdef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH 984 984 spin_lock_init(&tx->lock); 985 985 #endif 986 986 }
+4 -4
include/linux/dmaengine.h
··· 321 321 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); 322 322 dma_async_tx_callback callback; 323 323 void *callback_param; 324 - #ifndef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH 324 + #ifdef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH 325 325 struct dma_async_tx_descriptor *next; 326 326 struct dma_async_tx_descriptor *parent; 327 327 spinlock_t lock; 328 328 #endif 329 329 }; 330 330 331 - #ifdef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH 331 + #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH 332 332 static inline void txd_lock(struct dma_async_tx_descriptor *txd) 333 333 { 334 334 } ··· 656 656 #ifdef CONFIG_ASYNC_TX_DMA 657 657 #define async_dmaengine_get() dmaengine_get() 658 658 #define async_dmaengine_put() dmaengine_put() 659 - #ifdef CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH 659 + #ifndef CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH 660 660 #define async_dma_find_channel(type) dma_find_channel(DMA_ASYNC_TX) 661 661 #else 662 662 #define async_dma_find_channel(type) dma_find_channel(type) 663 - #endif /* CONFIG_ASYNC_TX_DISABLE_CHANNEL_SWITCH */ 663 + #endif /* CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH */ 664 664 #else 665 665 static inline void async_dmaengine_get(void) 666 666 {