Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

DMA-API: sound: fix dma mask handling in a lot of drivers

This code sequence is unsafe in modules:

static u64 mask = DMA_BIT_MASK(something);
...
if (!dev->dma_mask)
dev->dma_mask = &mask;

as if a module is reloaded, the mask will be pointing at the original
module's mask address, and this can lead to oopses. Moreover, they
all follow this with:

if (!dev->coherent_dma_mask)
dev->coherent_dma_mask = mask;

where 'mask' is the same value as the statically defined mask, and this
bypasses the architecture's check on whether the DMA mask is possible.

Fix these issues by using the new dma_coerce_coherent_and_mask()
function.

Acked-by: Mark Brown <broonie@linaro.org>
Acked-by: Takashi Iwai <tiwai@suse.de>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>

+61 -104
+4 -6
sound/arm/pxa2xx-pcm.c
··· 11 11 */ 12 12 13 13 #include <linux/module.h> 14 + #include <linux/dma-mapping.h> 14 15 #include <linux/dmaengine.h> 15 16 16 17 #include <sound/core.h> ··· 84 83 .mmap = pxa2xx_pcm_mmap, 85 84 }; 86 85 87 - static u64 pxa2xx_pcm_dmamask = 0xffffffff; 88 - 89 86 int pxa2xx_pcm_new(struct snd_card *card, struct pxa2xx_pcm_client *client, 90 87 struct snd_pcm **rpcm) 91 88 { ··· 99 100 pcm->private_data = client; 100 101 pcm->private_free = pxa2xx_pcm_free_dma_buffers; 101 102 102 - if (!card->dev->dma_mask) 103 - card->dev->dma_mask = &pxa2xx_pcm_dmamask; 104 - if (!card->dev->coherent_dma_mask) 105 - card->dev->coherent_dma_mask = 0xffffffff; 103 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 104 + if (ret) 105 + goto out; 106 106 107 107 if (play) { 108 108 int stream = SNDRV_PCM_STREAM_PLAYBACK;
+4 -7
sound/soc/atmel/atmel-pcm.c
··· 68 68 } 69 69 EXPORT_SYMBOL_GPL(atmel_pcm_mmap); 70 70 71 - static u64 atmel_pcm_dmamask = DMA_BIT_MASK(32); 72 - 73 71 int atmel_pcm_new(struct snd_soc_pcm_runtime *rtd) 74 72 { 75 73 struct snd_card *card = rtd->card->snd_card; 76 74 struct snd_pcm *pcm = rtd->pcm; 77 - int ret = 0; 75 + int ret; 78 76 79 - if (!card->dev->dma_mask) 80 - card->dev->dma_mask = &atmel_pcm_dmamask; 81 - if (!card->dev->coherent_dma_mask) 82 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 77 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 78 + if (ret) 79 + return ret; 83 80 84 81 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 85 82 pr_debug("atmel-pcm: allocating PCM playback DMA buffer\n");
+4 -7
sound/soc/blackfin/bf5xx-ac97-pcm.c
··· 415 415 } 416 416 } 417 417 418 - static u64 bf5xx_pcm_dmamask = DMA_BIT_MASK(32); 419 - 420 418 static int bf5xx_pcm_ac97_new(struct snd_soc_pcm_runtime *rtd) 421 419 { 422 420 struct snd_card *card = rtd->card->snd_card; 423 421 struct snd_pcm *pcm = rtd->pcm; 424 - int ret = 0; 422 + int ret; 425 423 426 424 pr_debug("%s enter\n", __func__); 427 - if (!card->dev->dma_mask) 428 - card->dev->dma_mask = &bf5xx_pcm_dmamask; 429 - if (!card->dev->coherent_dma_mask) 430 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 425 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 426 + if (ret) 427 + return ret; 431 428 432 429 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 433 430 ret = bf5xx_pcm_preallocate_dma_buffer(pcm,
+4 -6
sound/soc/blackfin/bf5xx-i2s-pcm.c
··· 323 323 .silence = bf5xx_pcm_silence, 324 324 }; 325 325 326 - static u64 bf5xx_pcm_dmamask = DMA_BIT_MASK(32); 327 - 328 326 static int bf5xx_pcm_i2s_new(struct snd_soc_pcm_runtime *rtd) 329 327 { 330 328 struct snd_card *card = rtd->card->snd_card; 331 329 size_t size = bf5xx_pcm_hardware.buffer_bytes_max; 330 + int ret; 332 331 333 332 pr_debug("%s enter\n", __func__); 334 - if (!card->dev->dma_mask) 335 - card->dev->dma_mask = &bf5xx_pcm_dmamask; 336 - if (!card->dev->coherent_dma_mask) 337 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 333 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 334 + if (ret) 335 + return ret; 338 336 339 337 return snd_pcm_lib_preallocate_pages_for_all(rtd->pcm, 340 338 SNDRV_DMA_TYPE_DEV, card->dev, size, size);
+3 -6
sound/soc/davinci/davinci-pcm.c
··· 844 844 } 845 845 } 846 846 847 - static u64 davinci_pcm_dmamask = DMA_BIT_MASK(32); 848 - 849 847 static int davinci_pcm_new(struct snd_soc_pcm_runtime *rtd) 850 848 { 851 849 struct snd_card *card = rtd->card->snd_card; 852 850 struct snd_pcm *pcm = rtd->pcm; 853 851 int ret; 854 852 855 - if (!card->dev->dma_mask) 856 - card->dev->dma_mask = &davinci_pcm_dmamask; 857 - if (!card->dev->coherent_dma_mask) 858 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 853 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 854 + if (ret) 855 + return ret; 859 856 860 857 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 861 858 ret = davinci_pcm_preallocate_dma_buffer(pcm,
+3 -6
sound/soc/fsl/fsl_dma.c
··· 298 298 { 299 299 struct snd_card *card = rtd->card->snd_card; 300 300 struct snd_pcm *pcm = rtd->pcm; 301 - static u64 fsl_dma_dmamask = DMA_BIT_MASK(36); 302 301 int ret; 303 302 304 - if (!card->dev->dma_mask) 305 - card->dev->dma_mask = &fsl_dma_dmamask; 306 - 307 - if (!card->dev->coherent_dma_mask) 308 - card->dev->coherent_dma_mask = fsl_dma_dmamask; 303 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(36)); 304 + if (ret) 305 + return ret; 309 306 310 307 /* Some codecs have separate DAIs for playback and capture, so we 311 308 * should allocate a DMA buffer only for the streams that are valid.
+5 -7
sound/soc/fsl/imx-pcm-fiq.c
··· 272 272 return 0; 273 273 } 274 274 275 - static u64 imx_pcm_dmamask = DMA_BIT_MASK(32); 276 - 277 275 static int imx_pcm_new(struct snd_soc_pcm_runtime *rtd) 278 276 { 279 277 struct snd_card *card = rtd->card->snd_card; 280 278 struct snd_pcm *pcm = rtd->pcm; 281 - int ret = 0; 279 + int ret; 282 280 283 - if (!card->dev->dma_mask) 284 - card->dev->dma_mask = &imx_pcm_dmamask; 285 - if (!card->dev->coherent_dma_mask) 286 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 281 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 282 + if (ret) 283 + return ret; 284 + 287 285 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 288 286 ret = imx_pcm_preallocate_dma_buffer(pcm, 289 287 SNDRV_PCM_STREAM_PLAYBACK);
+4 -6
sound/soc/fsl/mpc5200_dma.c
··· 299 299 .hw_params = psc_dma_hw_params, 300 300 }; 301 301 302 - static u64 psc_dma_dmamask = DMA_BIT_MASK(32); 303 302 static int psc_dma_new(struct snd_soc_pcm_runtime *rtd) 304 303 { 305 304 struct snd_card *card = rtd->card->snd_card; ··· 306 307 struct snd_pcm *pcm = rtd->pcm; 307 308 struct psc_dma *psc_dma = snd_soc_dai_get_drvdata(rtd->cpu_dai); 308 309 size_t size = psc_dma_hardware.buffer_bytes_max; 309 - int rc = 0; 310 + int rc; 310 311 311 312 dev_dbg(rtd->platform->dev, "psc_dma_new(card=%p, dai=%p, pcm=%p)\n", 312 313 card, dai, pcm); 313 314 314 - if (!card->dev->dma_mask) 315 - card->dev->dma_mask = &psc_dma_dmamask; 316 - if (!card->dev->coherent_dma_mask) 317 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 315 + rc = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 316 + if (rc) 317 + return rc; 318 318 319 319 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 320 320 rc = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, pcm->card->dev,
+4 -8
sound/soc/jz4740/jz4740-pcm.c
··· 297 297 } 298 298 } 299 299 300 - static u64 jz4740_pcm_dmamask = DMA_BIT_MASK(32); 301 - 302 300 static int jz4740_pcm_new(struct snd_soc_pcm_runtime *rtd) 303 301 { 304 302 struct snd_card *card = rtd->card->snd_card; 305 303 struct snd_pcm *pcm = rtd->pcm; 306 - int ret = 0; 304 + int ret; 307 305 308 - if (!card->dev->dma_mask) 309 - card->dev->dma_mask = &jz4740_pcm_dmamask; 310 - 311 - if (!card->dev->coherent_dma_mask) 312 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 306 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 307 + if (ret) 308 + return ret; 313 309 314 310 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 315 311 ret = jz4740_pcm_preallocate_dma_buffer(pcm,
+3 -6
sound/soc/kirkwood/kirkwood-dma.c
··· 59 59 .fifo_size = 0, 60 60 }; 61 61 62 - static u64 kirkwood_dma_dmamask = DMA_BIT_MASK(32); 63 - 64 62 static irqreturn_t kirkwood_dma_irq(int irq, void *dev_id) 65 63 { 66 64 struct kirkwood_dma_data *priv = dev_id; ··· 290 292 struct snd_pcm *pcm = rtd->pcm; 291 293 int ret; 292 294 293 - if (!card->dev->dma_mask) 294 - card->dev->dma_mask = &kirkwood_dma_dmamask; 295 - if (!card->dev->coherent_dma_mask) 296 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 295 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 296 + if (ret) 297 + return ret; 297 298 298 299 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 299 300 ret = kirkwood_dma_preallocate_dma_buffer(pcm,
+4 -5
sound/soc/nuc900/nuc900-pcm.c
··· 314 314 snd_pcm_lib_preallocate_free_for_all(pcm); 315 315 } 316 316 317 - static u64 nuc900_pcm_dmamask = DMA_BIT_MASK(32); 318 317 static int nuc900_dma_new(struct snd_soc_pcm_runtime *rtd) 319 318 { 320 319 struct snd_card *card = rtd->card->snd_card; 321 320 struct snd_pcm *pcm = rtd->pcm; 321 + int ret; 322 322 323 - if (!card->dev->dma_mask) 324 - card->dev->dma_mask = &nuc900_pcm_dmamask; 325 - if (!card->dev->coherent_dma_mask) 326 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 323 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 324 + if (ret) 325 + return ret; 327 326 328 327 snd_pcm_lib_preallocate_pages_for_all(pcm, SNDRV_DMA_TYPE_DEV, 329 328 card->dev, 4 * 1024, (4 * 1024) - 1);
+4 -7
sound/soc/omap/omap-pcm.c
··· 156 156 .mmap = omap_pcm_mmap, 157 157 }; 158 158 159 - static u64 omap_pcm_dmamask = DMA_BIT_MASK(64); 160 - 161 159 static int omap_pcm_preallocate_dma_buffer(struct snd_pcm *pcm, 162 160 int stream) 163 161 { ··· 200 202 { 201 203 struct snd_card *card = rtd->card->snd_card; 202 204 struct snd_pcm *pcm = rtd->pcm; 203 - int ret = 0; 205 + int ret; 204 206 205 - if (!card->dev->dma_mask) 206 - card->dev->dma_mask = &omap_pcm_dmamask; 207 - if (!card->dev->coherent_dma_mask) 208 - card->dev->coherent_dma_mask = DMA_BIT_MASK(64); 207 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(64)); 208 + if (ret) 209 + return ret; 209 210 210 211 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 211 212 ret = omap_pcm_preallocate_dma_buffer(pcm,
+4 -7
sound/soc/pxa/pxa2xx-pcm.c
··· 87 87 .mmap = pxa2xx_pcm_mmap, 88 88 }; 89 89 90 - static u64 pxa2xx_pcm_dmamask = DMA_BIT_MASK(32); 91 - 92 90 static int pxa2xx_soc_pcm_new(struct snd_soc_pcm_runtime *rtd) 93 91 { 94 92 struct snd_card *card = rtd->card->snd_card; 95 93 struct snd_pcm *pcm = rtd->pcm; 96 - int ret = 0; 94 + int ret; 97 95 98 - if (!card->dev->dma_mask) 99 - card->dev->dma_mask = &pxa2xx_pcm_dmamask; 100 - if (!card->dev->coherent_dma_mask) 101 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 96 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 97 + if (ret) 98 + return ret; 102 99 103 100 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 104 101 ret = pxa2xx_pcm_preallocate_dma_buffer(pcm,
+3 -6
sound/soc/s6000/s6000-pcm.c
··· 444 444 snd_pcm_lib_preallocate_free_for_all(pcm); 445 445 } 446 446 447 - static u64 s6000_pcm_dmamask = DMA_BIT_MASK(32); 448 - 449 447 static int s6000_pcm_new(struct snd_soc_pcm_runtime *runtime) 450 448 { 451 449 struct snd_card *card = runtime->card->snd_card; ··· 454 456 params = snd_soc_dai_get_dma_data(runtime->cpu_dai, 455 457 pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream); 456 458 457 - if (!card->dev->dma_mask) 458 - card->dev->dma_mask = &s6000_pcm_dmamask; 459 - if (!card->dev->coherent_dma_mask) 460 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 459 + res = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 460 + if (res) 461 + return res; 461 462 462 463 if (params->dma_in) { 463 464 s6dmac_disable_chan(DMA_MASK_DMAC(params->dma_in),
+4 -7
sound/soc/samsung/dma.c
··· 406 406 } 407 407 } 408 408 409 - static u64 dma_mask = DMA_BIT_MASK(32); 410 - 411 409 static int dma_new(struct snd_soc_pcm_runtime *rtd) 412 410 { 413 411 struct snd_card *card = rtd->card->snd_card; 414 412 struct snd_pcm *pcm = rtd->pcm; 415 - int ret = 0; 413 + int ret; 416 414 417 415 pr_debug("Entered %s\n", __func__); 418 416 419 - if (!card->dev->dma_mask) 420 - card->dev->dma_mask = &dma_mask; 421 - if (!card->dev->coherent_dma_mask) 422 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 417 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 418 + if (ret) 419 + return ret; 423 420 424 421 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 425 422 ret = preallocate_dma_buffer(pcm,
+4 -7
sound/soc/samsung/idma.c
··· 383 383 return 0; 384 384 } 385 385 386 - static u64 idma_mask = DMA_BIT_MASK(32); 387 - 388 386 static int idma_new(struct snd_soc_pcm_runtime *rtd) 389 387 { 390 388 struct snd_card *card = rtd->card->snd_card; 391 389 struct snd_pcm *pcm = rtd->pcm; 392 - int ret = 0; 390 + int ret; 393 391 394 - if (!card->dev->dma_mask) 395 - card->dev->dma_mask = &idma_mask; 396 - if (!card->dev->coherent_dma_mask) 397 - card->dev->coherent_dma_mask = DMA_BIT_MASK(32); 392 + ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32)); 393 + if (ret) 394 + return ret; 398 395 399 396 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) { 400 397 ret = preallocate_idma_buffer(pcm,