dma-coherent: remove the DMA_MEMORY_MAP and DMA_MEMORY_IO flags

DMA_MEMORY_IO was never used in the tree, so remove it. That means there is
no need for the DMA_MEMORY_MAP flag either now, so remove it as well and
change dma_declare_coherent_memory to return a normal errno value.

Signed-off-by: Christoph Hellwig <hch@lst.de>
Reviewed-by: Marek Szyprowski <m.szyprowski@samsung.com>

+52 -111
+1 -20
Documentation/DMA-API.txt
··· 592 592 593 593 flags can be ORed together and are: 594 594 595 - - DMA_MEMORY_MAP - request that the memory returned from 596 - dma_alloc_coherent() be directly writable. 597 - 598 - - DMA_MEMORY_IO - request that the memory returned from 599 - dma_alloc_coherent() be addressable using read()/write()/memcpy_toio() etc. 600 - 601 - One or both of these flags must be present. 602 - 603 595 - DMA_MEMORY_EXCLUSIVE - only allocate memory from the declared regions. 604 596 Do not allow dma_alloc_coherent() to fall back to system memory when 605 597 it's out of memory in the declared region. 606 598 607 - The return value will be either DMA_MEMORY_MAP or DMA_MEMORY_IO and 608 - must correspond to a passed in flag (i.e. no returning DMA_MEMORY_IO 609 - if only DMA_MEMORY_MAP were passed in) for success or zero for 610 - failure. 611 - 612 - Note, for DMA_MEMORY_IO returns, all subsequent memory returned by 613 - dma_alloc_coherent() may no longer be accessed directly, but instead 614 - must be accessed using the correct bus functions. If your driver 615 - isn't prepared to handle this contingency, it should not specify 616 - DMA_MEMORY_IO in the input flags. 617 - 618 - As a simplification for the platforms, only **one** such region of 599 + As a simplification for the platforms, only *one* such region of 619 600 memory may be declared per device. 620 601 621 602 For reasons of efficiency, most platforms choose to track the declared
+17 -27
arch/arm/mach-imx/mach-imx27_visstrim_m10.c
··· 245 245 static void __init visstrim_analog_camera_init(void) 246 246 { 247 247 struct platform_device *pdev; 248 - int dma; 249 248 250 249 gpio_set_value(TVP5150_PWDN, 1); 251 250 ndelay(1); ··· 257 258 if (IS_ERR(pdev)) 258 259 return; 259 260 260 - dma = dma_declare_coherent_memory(&pdev->dev, 261 - mx2_camera_base, mx2_camera_base, 262 - MX2_CAMERA_BUF_SIZE, 263 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE); 264 - if (!(dma & DMA_MEMORY_MAP)) 265 - return; 261 + dma_declare_coherent_memory(&pdev->dev, mx2_camera_base, 262 + mx2_camera_base, MX2_CAMERA_BUF_SIZE, 263 + DMA_MEMORY_EXCLUSIVE); 266 264 } 267 265 268 266 static void __init visstrim_reserve(void) ··· 440 444 static void __init visstrim_coda_init(void) 441 445 { 442 446 struct platform_device *pdev; 443 - int dma; 444 447 445 448 pdev = imx27_add_coda(); 446 - dma = dma_declare_coherent_memory(&pdev->dev, 447 - mx2_camera_base + MX2_CAMERA_BUF_SIZE, 448 - mx2_camera_base + MX2_CAMERA_BUF_SIZE, 449 - MX2_CAMERA_BUF_SIZE, 450 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE); 451 - if (!(dma & DMA_MEMORY_MAP)) 452 - return; 449 + dma_declare_coherent_memory(&pdev->dev, 450 + mx2_camera_base + MX2_CAMERA_BUF_SIZE, 451 + mx2_camera_base + MX2_CAMERA_BUF_SIZE, 452 + MX2_CAMERA_BUF_SIZE, 453 + DMA_MEMORY_EXCLUSIVE); 453 454 } 454 455 455 456 /* DMA deinterlace */ ··· 459 466 { 460 467 int ret = -ENOMEM; 461 468 struct platform_device *pdev = &visstrim_deinterlace; 462 - int dma; 463 469 464 470 ret = platform_device_register(pdev); 465 471 466 - dma = dma_declare_coherent_memory(&pdev->dev, 467 - mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, 468 - mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, 469 - MX2_CAMERA_BUF_SIZE, 470 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE); 471 - if (!(dma & DMA_MEMORY_MAP)) 472 - return; 472 + dma_declare_coherent_memory(&pdev->dev, 473 + mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, 474 + mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, 475 + MX2_CAMERA_BUF_SIZE, 476 + DMA_MEMORY_EXCLUSIVE); 473 477 } 474 478 475 479 /* Emma-PrP for format conversion */ 476 480 static void __init visstrim_emmaprp_init(void) 477 481 { 478 482 struct platform_device *pdev; 479 - int dma; 483 + int ret; 480 484 481 485 pdev = imx27_add_mx2_emmaprp(); 482 486 if (IS_ERR(pdev)) ··· 483 493 * Use the same memory area as the analog camera since both 484 494 * devices are, by nature, exclusive. 485 495 */ 486 - dma = dma_declare_coherent_memory(&pdev->dev, 496 + ret = dma_declare_coherent_memory(&pdev->dev, 487 497 mx2_camera_base, mx2_camera_base, 488 498 MX2_CAMERA_BUF_SIZE, 489 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE); 490 - if (!(dma & DMA_MEMORY_MAP)) 499 + DMA_MEMORY_EXCLUSIVE); 500 + if (ret) 491 501 pr_err("Failed to declare memory for emmaprp\n"); 492 502 } 493 503
+6 -6
arch/arm/mach-imx/mach-mx31moboard.c
··· 475 475 476 476 static int __init mx31moboard_init_cam(void) 477 477 { 478 - int dma, ret = -ENOMEM; 478 + int dma, ret; 479 479 struct platform_device *pdev; 480 480 481 481 imx31_add_ipu_core(); ··· 484 484 if (IS_ERR(pdev)) 485 485 return PTR_ERR(pdev); 486 486 487 - dma = dma_declare_coherent_memory(&pdev->dev, 488 - mx3_camera_base, mx3_camera_base, 489 - MX3_CAMERA_BUF_SIZE, 490 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE); 491 - if (!(dma & DMA_MEMORY_MAP)) 487 + ret = dma_declare_coherent_memory(&pdev->dev, 488 + mx3_camera_base, mx3_camera_base, 489 + MX3_CAMERA_BUF_SIZE, 490 + DMA_MEMORY_EXCLUSIVE); 491 + if (ret) 492 492 goto err; 493 493 494 494 ret = platform_device_add(pdev);
+1 -2
arch/sh/drivers/pci/fixups-dreamcast.c
··· 63 63 res.end = GAPSPCI_DMA_BASE + GAPSPCI_DMA_SIZE - 1; 64 64 res.flags = IORESOURCE_MEM; 65 65 pcibios_resource_to_bus(dev->bus, &region, &res); 66 - BUG_ON(!dma_declare_coherent_memory(&dev->dev, 66 + BUG_ON(dma_declare_coherent_memory(&dev->dev, 67 67 res.start, 68 68 region.start, 69 69 resource_size(&res), 70 - DMA_MEMORY_MAP | 71 70 DMA_MEMORY_EXCLUSIVE)); 72 71 break; 73 72 default:
+14 -32
drivers/base/dma-coherent.c
··· 46 46 int pages = size >> PAGE_SHIFT; 47 47 int bitmap_size = BITS_TO_LONGS(pages) * sizeof(long); 48 48 49 - if ((flags & (DMA_MEMORY_MAP | DMA_MEMORY_IO)) == 0) 50 - goto out; 51 49 if (!size) 52 50 goto out; 53 51 54 - if (flags & DMA_MEMORY_MAP) 55 - mem_base = memremap(phys_addr, size, MEMREMAP_WC); 56 - else 57 - mem_base = ioremap(phys_addr, size); 52 + mem_base = memremap(phys_addr, size, MEMREMAP_WC); 58 53 if (!mem_base) 59 54 goto out; 60 55 ··· 72 77 73 78 out: 74 79 kfree(dma_mem); 75 - if (mem_base) { 76 - if (flags & DMA_MEMORY_MAP) 77 - memunmap(mem_base); 78 - else 79 - iounmap(mem_base); 80 - } 80 + if (mem_base) 81 + memunmap(mem_base); 81 82 return false; 82 83 } 83 84 ··· 82 91 if (!mem) 83 92 return; 84 93 85 - if (mem->flags & DMA_MEMORY_MAP) 86 - memunmap(mem->virt_base); 87 - else 88 - iounmap(mem->virt_base); 94 + memunmap(mem->virt_base); 89 95 kfree(mem->bitmap); 90 96 kfree(mem); 91 97 } ··· 104 116 dma_addr_t device_addr, size_t size, int flags) 105 117 { 106 118 struct dma_coherent_mem *mem; 119 + int ret; 107 120 108 - if (!dma_init_coherent_memory(phys_addr, device_addr, size, flags, 109 - &mem)) 110 - return 0; 121 + ret = dma_init_coherent_memory(phys_addr, device_addr, size, flags, &mem); 122 + if (ret) 123 + return ret; 111 124 112 - if (dma_assign_coherent_memory(dev, mem) == 0) 113 - return flags & DMA_MEMORY_MAP ? DMA_MEMORY_MAP : DMA_MEMORY_IO; 114 - 115 - dma_release_coherent_memory(mem); 116 - return 0; 125 + ret = dma_assign_coherent_memory(dev, mem); 126 + if (ret) 127 + dma_release_coherent_memory(mem); 128 + return ret; 117 129 } 118 130 EXPORT_SYMBOL(dma_declare_coherent_memory); 119 131 ··· 174 186 */ 175 187 *dma_handle = mem->device_base + (pageno << PAGE_SHIFT); 176 188 ret = mem->virt_base + (pageno << PAGE_SHIFT); 177 - dma_memory_map = (mem->flags & DMA_MEMORY_MAP); 178 189 spin_unlock_irqrestore(&mem->spinlock, flags); 179 - if (dma_memory_map) 180 - memset(ret, 0, size); 181 - else 182 - memset_io(ret, 0, size); 183 - 190 + memset(ret, 0, size); 184 191 return ret; 185 - 186 192 err: 187 193 spin_unlock_irqrestore(&mem->spinlock, flags); 188 194 return NULL; ··· 342 360 343 361 if (!mem && 344 362 !dma_init_coherent_memory(rmem->base, rmem->base, rmem->size, 345 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE, 363 + DMA_MEMORY_EXCLUSIVE, 346 364 &mem)) { 347 365 pr_err("Reserved memory: failed to init DMA memory pool at %pa, size %ld MiB\n", 348 366 &rmem->base, (unsigned long)rmem->size / SZ_1M);
+2 -5
drivers/base/dma-mapping.c
··· 176 176 177 177 rc = dma_declare_coherent_memory(dev, phys_addr, device_addr, size, 178 178 flags); 179 - if (rc) { 179 + if (!rc) 180 180 devres_add(dev, res); 181 - rc = 0; 182 - } else { 181 + else 183 182 devres_free(res); 184 - rc = -ENOMEM; 185 - } 186 183 187 184 return rc; 188 185 }
+2 -3
drivers/media/platform/soc_camera/sh_mobile_ceu_camera.c
··· 1708 1708 err = dma_declare_coherent_memory(&pdev->dev, res->start, 1709 1709 res->start, 1710 1710 resource_size(res), 1711 - DMA_MEMORY_MAP | 1712 1711 DMA_MEMORY_EXCLUSIVE); 1713 - if (!err) { 1712 + if (err) { 1714 1713 dev_err(&pdev->dev, "Unable to declare CEU memory.\n"); 1715 - return -ENXIO; 1714 + return err; 1716 1715 } 1717 1716 1718 1717 pcdev->video_limit = resource_size(res);
+1 -2
drivers/scsi/NCR_Q720.c
··· 217 217 } 218 218 219 219 if (dma_declare_coherent_memory(dev, base_addr, base_addr, 220 - mem_size, DMA_MEMORY_MAP) 221 - != DMA_MEMORY_MAP) { 220 + mem_size, 0)) { 222 221 printk(KERN_ERR "NCR_Q720: DMA declare memory failed\n"); 223 222 goto out_release_region; 224 223 }
+3 -4
drivers/usb/host/ohci-sm501.c
··· 123 123 * regular memory. The HCD_LOCAL_MEM flag does just that. 124 124 */ 125 125 126 - if (!dma_declare_coherent_memory(dev, mem->start, 126 + retval = dma_declare_coherent_memory(dev, mem->start, 127 127 mem->start - mem->parent->start, 128 128 resource_size(mem), 129 - DMA_MEMORY_MAP | 130 - DMA_MEMORY_EXCLUSIVE)) { 129 + DMA_MEMORY_EXCLUSIVE); 130 + if (retval) { 131 131 dev_err(dev, "cannot declare coherent memory\n"); 132 - retval = -ENXIO; 133 132 goto err1; 134 133 } 135 134
+3 -6
drivers/usb/host/ohci-tmio.c
··· 227 227 goto err_ioremap_regs; 228 228 } 229 229 230 - if (!dma_declare_coherent_memory(&dev->dev, sram->start, 231 - sram->start, 232 - resource_size(sram), 233 - DMA_MEMORY_MAP | DMA_MEMORY_EXCLUSIVE)) { 234 - ret = -EBUSY; 230 + ret = dma_declare_coherent_memory(&dev->dev, sram->start, sram->start, 231 + resource_size(sram), DMA_MEMORY_EXCLUSIVE); 232 + if (ret) 235 233 goto err_dma_declare; 236 - } 237 234 238 235 if (cell->enable) { 239 236 ret = cell->enable(dev);
+2 -4
include/linux/dma-mapping.h
··· 694 694 #endif 695 695 696 696 /* flags for the coherent memory api */ 697 - #define DMA_MEMORY_MAP 0x01 698 - #define DMA_MEMORY_IO 0x02 699 - #define DMA_MEMORY_EXCLUSIVE 0x04 697 + #define DMA_MEMORY_EXCLUSIVE 0x01 700 698 701 699 #ifdef CONFIG_HAVE_GENERIC_DMA_COHERENT 702 700 int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, ··· 707 709 dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, 708 710 dma_addr_t device_addr, size_t size, int flags) 709 711 { 710 - return 0; 712 + return -ENOSYS; 711 713 } 712 714 713 715 static inline void