Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

MIPS: Convert DMA to use dma-mapping-common.h

Use asm-generic/dma-mapping-common.h to handle all DMA mapping operations
and establish a default get_dma_ops() that forwards all operations to the
existing code.

Augment dev_archdata to carry a pointer to the struct dma_map_ops, allowing
DMA operations to be overridden on a per device basis. Currently this is
never filled in, so the default dma_map_ops are used. A follow-on patch
sets this for Octeon PCI devices.

Also initialize the dma_debug system as it is now used if it is configured.

Includes fixes by Kevin Cernekee <cernekee@gmail.com>.

Signed-off-by: David Daney <ddaney@caviumnetworks.com>
Patchwork: http://patchwork.linux-mips.org/patch/1637/
Patchwork: http://patchwork.linux-mips.org/patch/1678/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>

authored by

David Daney and committed by
Ralf Baechle
48e1fd5a 43e4f7ae

+138 -155
+2
arch/mips/Kconfig
··· 14 14 select HAVE_KRETPROBES 15 15 select RTC_LIB if !MACH_LOONGSON 16 16 select GENERIC_ATOMIC64 if !64BIT 17 + select HAVE_DMA_ATTRS 18 + select HAVE_DMA_API_DEBUG 17 19 18 20 menu "Machine selection" 19 21
+14 -1
arch/mips/include/asm/device.h
··· 3 3 * 4 4 * This file is released under the GPLv2 5 5 */ 6 - #include <asm-generic/device.h> 6 + #ifndef _ASM_MIPS_DEVICE_H 7 + #define _ASM_MIPS_DEVICE_H 8 + 9 + struct dma_map_ops; 10 + 11 + struct dev_archdata { 12 + /* DMA operations on that device */ 13 + struct dma_map_ops *dma_ops; 14 + }; 15 + 16 + struct pdev_archdata { 17 + }; 18 + 19 + #endif /* _ASM_MIPS_DEVICE_H*/
+60 -40
arch/mips/include/asm/dma-mapping.h
··· 5 5 #include <asm/cache.h> 6 6 #include <asm-generic/dma-coherent.h> 7 7 8 - void *dma_alloc_noncoherent(struct device *dev, size_t size, 9 - dma_addr_t *dma_handle, gfp_t flag); 8 + #include <dma-coherence.h> 10 9 11 - void dma_free_noncoherent(struct device *dev, size_t size, 12 - void *vaddr, dma_addr_t dma_handle); 10 + extern struct dma_map_ops *mips_dma_map_ops; 13 11 14 - void *dma_alloc_coherent(struct device *dev, size_t size, 15 - dma_addr_t *dma_handle, gfp_t flag); 16 - 17 - void dma_free_coherent(struct device *dev, size_t size, 18 - void *vaddr, dma_addr_t dma_handle); 19 - 20 - extern dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size, 21 - enum dma_data_direction direction); 22 - extern void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, 23 - size_t size, enum dma_data_direction direction); 24 - extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 25 - enum dma_data_direction direction); 26 - extern dma_addr_t dma_map_page(struct device *dev, struct page *page, 27 - unsigned long offset, size_t size, enum dma_data_direction direction); 28 - 29 - static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address, 30 - size_t size, enum dma_data_direction direction) 12 + static inline struct dma_map_ops *get_dma_ops(struct device *dev) 31 13 { 32 - dma_unmap_single(dev, dma_address, size, direction); 14 + if (dev && dev->archdata.dma_ops) 15 + return dev->archdata.dma_ops; 16 + else 17 + return mips_dma_map_ops; 33 18 } 34 19 35 - extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg, 36 - int nhwentries, enum dma_data_direction direction); 37 - extern void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, 38 - size_t size, enum dma_data_direction direction); 39 - extern void dma_sync_single_for_device(struct device *dev, 40 - dma_addr_t dma_handle, size_t size, enum dma_data_direction direction); 41 - extern void dma_sync_single_range_for_cpu(struct device *dev, 42 - dma_addr_t dma_handle, unsigned long offset, size_t size, 43 - enum dma_data_direction direction); 44 - extern void dma_sync_single_range_for_device(struct device *dev, 45 - dma_addr_t dma_handle, unsigned long offset, size_t size, 46 - enum dma_data_direction direction); 47 - extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, 48 - int nelems, enum dma_data_direction direction); 49 - extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, 50 - int nelems, enum dma_data_direction direction); 51 - extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr); 52 - extern int dma_supported(struct device *dev, u64 mask); 20 + static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size) 21 + { 22 + if (!dev->dma_mask) 23 + return 0; 24 + 25 + return addr + size <= *dev->dma_mask; 26 + } 27 + 28 + static inline void dma_mark_clean(void *addr, size_t size) {} 29 + 30 + #include <asm-generic/dma-mapping-common.h> 31 + 32 + static inline int dma_supported(struct device *dev, u64 mask) 33 + { 34 + struct dma_map_ops *ops = get_dma_ops(dev); 35 + return ops->dma_supported(dev, mask); 36 + } 37 + 38 + static inline int dma_mapping_error(struct device *dev, u64 mask) 39 + { 40 + struct dma_map_ops *ops = get_dma_ops(dev); 41 + return ops->mapping_error(dev, mask); 42 + } 53 43 54 44 static inline int 55 45 dma_set_mask(struct device *dev, u64 mask) ··· 54 64 55 65 extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size, 56 66 enum dma_data_direction direction); 67 + 68 + static inline void *dma_alloc_coherent(struct device *dev, size_t size, 69 + dma_addr_t *dma_handle, gfp_t gfp) 70 + { 71 + void *ret; 72 + struct dma_map_ops *ops = get_dma_ops(dev); 73 + 74 + ret = ops->alloc_coherent(dev, size, dma_handle, gfp); 75 + 76 + debug_dma_alloc_coherent(dev, size, *dma_handle, ret); 77 + 78 + return ret; 79 + } 80 + 81 + static inline void dma_free_coherent(struct device *dev, size_t size, 82 + void *vaddr, dma_addr_t dma_handle) 83 + { 84 + struct dma_map_ops *ops = get_dma_ops(dev); 85 + 86 + ops->free_coherent(dev, size, vaddr, dma_handle); 87 + 88 + debug_dma_free_coherent(dev, size, vaddr, dma_handle); 89 + } 90 + 91 + 92 + void *dma_alloc_noncoherent(struct device *dev, size_t size, 93 + dma_addr_t *dma_handle, gfp_t flag); 94 + 95 + void dma_free_noncoherent(struct device *dev, size_t size, 96 + void *vaddr, dma_addr_t dma_handle); 57 97 58 98 #endif /* _ASM_DMA_MAPPING_H */
+1 -1
arch/mips/include/asm/mach-cavium-octeon/dma-coherence.h
··· 27 27 static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, 28 28 struct page *page) 29 29 { 30 - return octeon_map_dma_mem(dev, page_address(page), PAGE_SIZE); 30 + BUG(); 31 31 } 32 32 33 33 static inline unsigned long plat_dma_addr_to_phys(struct device *dev,
+2 -1
arch/mips/include/asm/mach-ip27/dma-coherence.h
··· 26 26 return pa; 27 27 } 28 28 29 - static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, struct page *page) 29 + static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, 30 + struct page *page) 30 31 { 31 32 dma_addr_t pa = dev_to_baddr(dev, page_to_phys(page)); 32 33
+2 -1
arch/mips/include/asm/mach-ip32/dma-coherence.h
··· 37 37 return pa; 38 38 } 39 39 40 - static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, struct page *page) 40 + static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, 41 + struct page *page) 41 42 { 42 43 dma_addr_t pa; 43 44
+2 -1
arch/mips/include/asm/mach-jazz/dma-coherence.h
··· 17 17 return vdma_alloc(virt_to_phys(addr), size); 18 18 } 19 19 20 - static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, struct page *page) 20 + static inline dma_addr_t plat_map_dma_mem_page(struct device *dev, 21 + struct page *page) 21 22 { 22 23 return vdma_alloc(page_to_phys(page), PAGE_SIZE); 23 24 }
+55 -110
arch/mips/mm/dma-default.c
··· 95 95 96 96 return ret; 97 97 } 98 - 99 98 EXPORT_SYMBOL(dma_alloc_noncoherent); 100 99 101 - void *dma_alloc_coherent(struct device *dev, size_t size, 100 + static void *mips_dma_alloc_coherent(struct device *dev, size_t size, 102 101 dma_addr_t * dma_handle, gfp_t gfp) 103 102 { 104 103 void *ret; ··· 122 123 return ret; 123 124 } 124 125 125 - EXPORT_SYMBOL(dma_alloc_coherent); 126 126 127 127 void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, 128 128 dma_addr_t dma_handle) ··· 129 131 plat_unmap_dma_mem(dev, dma_handle, size, DMA_BIDIRECTIONAL); 130 132 free_pages((unsigned long) vaddr, get_order(size)); 131 133 } 132 - 133 134 EXPORT_SYMBOL(dma_free_noncoherent); 134 135 135 - void dma_free_coherent(struct device *dev, size_t size, void *vaddr, 136 + static void mips_dma_free_coherent(struct device *dev, size_t size, void *vaddr, 136 137 dma_addr_t dma_handle) 137 138 { 138 139 unsigned long addr = (unsigned long) vaddr; ··· 147 150 148 151 free_pages(addr, get_order(size)); 149 152 } 150 - 151 - EXPORT_SYMBOL(dma_free_coherent); 152 153 153 154 static inline void __dma_sync(unsigned long addr, size_t size, 154 155 enum dma_data_direction direction) ··· 169 174 } 170 175 } 171 176 172 - dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size, 173 - enum dma_data_direction direction) 174 - { 175 - unsigned long addr = (unsigned long) ptr; 176 - 177 - if (!plat_device_is_coherent(dev)) 178 - __dma_sync(addr, size, direction); 179 - 180 - return plat_map_dma_mem(dev, ptr, size); 181 - } 182 - 183 - EXPORT_SYMBOL(dma_map_single); 184 - 185 - void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, 186 - enum dma_data_direction direction) 177 + static void mips_dma_unmap_page(struct device *dev, dma_addr_t dma_addr, 178 + size_t size, enum dma_data_direction direction, struct dma_attrs *attrs) 187 179 { 188 180 if (cpu_is_noncoherent_r10000(dev)) 189 181 __dma_sync(dma_addr_to_virt(dev, dma_addr), size, ··· 179 197 plat_unmap_dma_mem(dev, dma_addr, size, direction); 180 198 } 181 199 182 - EXPORT_SYMBOL(dma_unmap_single); 183 - 184 - int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 185 - enum dma_data_direction direction) 200 + static int mips_dma_map_sg(struct device *dev, struct scatterlist *sg, 201 + int nents, enum dma_data_direction direction, struct dma_attrs *attrs) 186 202 { 187 203 int i; 188 - 189 - BUG_ON(direction == DMA_NONE); 190 204 191 205 for (i = 0; i < nents; i++, sg++) { 192 206 unsigned long addr; ··· 197 219 return nents; 198 220 } 199 221 200 - EXPORT_SYMBOL(dma_map_sg); 201 - 202 - dma_addr_t dma_map_page(struct device *dev, struct page *page, 203 - unsigned long offset, size_t size, enum dma_data_direction direction) 222 + static dma_addr_t mips_dma_map_page(struct device *dev, struct page *page, 223 + unsigned long offset, size_t size, enum dma_data_direction direction, 224 + struct dma_attrs *attrs) 204 225 { 205 - BUG_ON(direction == DMA_NONE); 226 + unsigned long addr; 206 227 207 - if (!plat_device_is_coherent(dev)) { 208 - unsigned long addr; 228 + addr = (unsigned long) page_address(page) + offset; 209 229 210 - addr = (unsigned long) page_address(page) + offset; 230 + if (!plat_device_is_coherent(dev)) 211 231 __dma_sync(addr, size, direction); 212 - } 213 232 214 - return plat_map_dma_mem_page(dev, page) + offset; 233 + return plat_map_dma_mem(dev, (void *)addr, size); 215 234 } 216 235 217 - EXPORT_SYMBOL(dma_map_page); 218 - 219 - void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, 220 - enum dma_data_direction direction) 236 + static void mips_dma_unmap_sg(struct device *dev, struct scatterlist *sg, 237 + int nhwentries, enum dma_data_direction direction, 238 + struct dma_attrs *attrs) 221 239 { 222 240 unsigned long addr; 223 241 int i; 224 - 225 - BUG_ON(direction == DMA_NONE); 226 242 227 243 for (i = 0; i < nhwentries; i++, sg++) { 228 244 if (!plat_device_is_coherent(dev) && ··· 229 257 } 230 258 } 231 259 232 - EXPORT_SYMBOL(dma_unmap_sg); 233 - 234 - void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, 235 - size_t size, enum dma_data_direction direction) 260 + static void mips_dma_sync_single_for_cpu(struct device *dev, 261 + dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) 236 262 { 237 - BUG_ON(direction == DMA_NONE); 238 - 239 263 if (cpu_is_noncoherent_r10000(dev)) { 240 264 unsigned long addr; 241 265 ··· 240 272 } 241 273 } 242 274 243 - EXPORT_SYMBOL(dma_sync_single_for_cpu); 244 - 245 - void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, 246 - size_t size, enum dma_data_direction direction) 275 + static void mips_dma_sync_single_for_device(struct device *dev, 276 + dma_addr_t dma_handle, size_t size, enum dma_data_direction direction) 247 277 { 248 - BUG_ON(direction == DMA_NONE); 249 - 250 278 plat_extra_sync_for_device(dev); 251 279 if (!plat_device_is_coherent(dev)) { 252 280 unsigned long addr; ··· 252 288 } 253 289 } 254 290 255 - EXPORT_SYMBOL(dma_sync_single_for_device); 256 - 257 - void dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle, 258 - unsigned long offset, size_t size, enum dma_data_direction direction) 259 - { 260 - BUG_ON(direction == DMA_NONE); 261 - 262 - if (cpu_is_noncoherent_r10000(dev)) { 263 - unsigned long addr; 264 - 265 - addr = dma_addr_to_virt(dev, dma_handle); 266 - __dma_sync(addr + offset, size, direction); 267 - } 268 - } 269 - 270 - EXPORT_SYMBOL(dma_sync_single_range_for_cpu); 271 - 272 - void dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle, 273 - unsigned long offset, size_t size, enum dma_data_direction direction) 274 - { 275 - BUG_ON(direction == DMA_NONE); 276 - 277 - plat_extra_sync_for_device(dev); 278 - if (!plat_device_is_coherent(dev)) { 279 - unsigned long addr; 280 - 281 - addr = dma_addr_to_virt(dev, dma_handle); 282 - __dma_sync(addr + offset, size, direction); 283 - } 284 - } 285 - 286 - EXPORT_SYMBOL(dma_sync_single_range_for_device); 287 - 288 - void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, 289 - enum dma_data_direction direction) 291 + static void mips_dma_sync_sg_for_cpu(struct device *dev, 292 + struct scatterlist *sg, int nelems, enum dma_data_direction direction) 290 293 { 291 294 int i; 292 - 293 - BUG_ON(direction == DMA_NONE); 294 295 295 296 /* Make sure that gcc doesn't leave the empty loop body. */ 296 297 for (i = 0; i < nelems; i++, sg++) { ··· 265 336 } 266 337 } 267 338 268 - EXPORT_SYMBOL(dma_sync_sg_for_cpu); 269 - 270 - void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, 271 - enum dma_data_direction direction) 339 + static void mips_dma_sync_sg_for_device(struct device *dev, 340 + struct scatterlist *sg, int nelems, enum dma_data_direction direction) 272 341 { 273 342 int i; 274 - 275 - BUG_ON(direction == DMA_NONE); 276 343 277 344 /* Make sure that gcc doesn't leave the empty loop body. */ 278 345 for (i = 0; i < nelems; i++, sg++) { ··· 278 353 } 279 354 } 280 355 281 - EXPORT_SYMBOL(dma_sync_sg_for_device); 282 - 283 - int dma_mapping_error(struct device *dev, dma_addr_t dma_addr) 356 + int mips_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) 284 357 { 285 358 return plat_dma_mapping_error(dev, dma_addr); 286 359 } 287 360 288 - EXPORT_SYMBOL(dma_mapping_error); 289 - 290 - int dma_supported(struct device *dev, u64 mask) 361 + int mips_dma_supported(struct device *dev, u64 mask) 291 362 { 292 363 return plat_dma_supported(dev, mask); 293 364 } 294 365 295 - EXPORT_SYMBOL(dma_supported); 296 - 297 - void dma_cache_sync(struct device *dev, void *vaddr, size_t size, 298 - enum dma_data_direction direction) 366 + void mips_dma_cache_sync(struct device *dev, void *vaddr, size_t size, 367 + enum dma_data_direction direction) 299 368 { 300 369 BUG_ON(direction == DMA_NONE); 301 370 ··· 298 379 __dma_sync((unsigned long)vaddr, size, direction); 299 380 } 300 381 301 - EXPORT_SYMBOL(dma_cache_sync); 382 + static struct dma_map_ops mips_default_dma_map_ops = { 383 + .alloc_coherent = mips_dma_alloc_coherent, 384 + .free_coherent = mips_dma_free_coherent, 385 + .map_page = mips_dma_map_page, 386 + .unmap_page = mips_dma_unmap_page, 387 + .map_sg = mips_dma_map_sg, 388 + .unmap_sg = mips_dma_unmap_sg, 389 + .sync_single_for_cpu = mips_dma_sync_single_for_cpu, 390 + .sync_single_for_device = mips_dma_sync_single_for_device, 391 + .sync_sg_for_cpu = mips_dma_sync_sg_for_cpu, 392 + .sync_sg_for_device = mips_dma_sync_sg_for_device, 393 + .mapping_error = mips_dma_mapping_error, 394 + .dma_supported = mips_dma_supported 395 + }; 396 + 397 + struct dma_map_ops *mips_dma_map_ops = &mips_default_dma_map_ops; 398 + EXPORT_SYMBOL(mips_dma_map_ops); 399 + 400 + #define PREALLOC_DMA_DEBUG_ENTRIES (1 << 16) 401 + 402 + static int __init mips_dma_init(void) 403 + { 404 + dma_debug_init(PREALLOC_DMA_DEBUG_ENTRIES); 405 + 406 + return 0; 407 + } 408 + fs_initcall(mips_dma_init);