Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

at v2.6.15 713 lines 19 kB view raw
1/* 2 * arch/ppc/kernel/ppc4xx_dma.c 3 * 4 * IBM PPC4xx DMA engine core library 5 * 6 * Copyright 2000-2004 MontaVista Software Inc. 7 * 8 * Cleaned up and converted to new DCR access 9 * Matt Porter <mporter@kernel.crashing.org> 10 * 11 * Original code by Armin Kuster <akuster@mvista.com> 12 * and Pete Popov <ppopov@mvista.com> 13 * 14 * This program is free software; you can redistribute it and/or modify it 15 * under the terms of the GNU General Public License as published by the 16 * Free Software Foundation; either version 2 of the License, or (at your 17 * option) any later version. 18 * 19 * You should have received a copy of the GNU General Public License along 20 * with this program; if not, write to the Free Software Foundation, Inc., 21 * 675 Mass Ave, Cambridge, MA 02139, USA. 22 */ 23 24#include <linux/config.h> 25#include <linux/kernel.h> 26#include <linux/mm.h> 27#include <linux/miscdevice.h> 28#include <linux/init.h> 29#include <linux/module.h> 30 31#include <asm/system.h> 32#include <asm/io.h> 33#include <asm/dma.h> 34#include <asm/ppc4xx_dma.h> 35 36ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS]; 37 38int 39ppc4xx_get_dma_status(void) 40{ 41 return (mfdcr(DCRN_DMASR)); 42} 43 44void 45ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr) 46{ 47 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 48 printk("set_src_addr: bad channel: %d\n", dmanr); 49 return; 50 } 51 52#ifdef PPC4xx_DMA_64BIT 53 mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32)); 54#else 55 mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr); 56#endif 57} 58 59void 60ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr) 61{ 62 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 63 printk("set_dst_addr: bad channel: %d\n", dmanr); 64 return; 65 } 66 67#ifdef PPC4xx_DMA_64BIT 68 mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32)); 69#else 70 mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr); 71#endif 72} 73 74void 75ppc4xx_enable_dma(unsigned int dmanr) 76{ 77 unsigned int control; 78 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 79 unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR, 80 DMA_CS1 | DMA_TS1 | DMA_CH1_ERR, 81 DMA_CS2 | DMA_TS2 | DMA_CH2_ERR, 82 DMA_CS3 | DMA_TS3 | DMA_CH3_ERR}; 83 84 if (p_dma_ch->in_use) { 85 printk("enable_dma: channel %d in use\n", dmanr); 86 return; 87 } 88 89 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 90 printk("enable_dma: bad channel: %d\n", dmanr); 91 return; 92 } 93 94 if (p_dma_ch->mode == DMA_MODE_READ) { 95 /* peripheral to memory */ 96 ppc4xx_set_src_addr(dmanr, 0); 97 ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr); 98 } else if (p_dma_ch->mode == DMA_MODE_WRITE) { 99 /* memory to peripheral */ 100 ppc4xx_set_src_addr(dmanr, p_dma_ch->addr); 101 ppc4xx_set_dst_addr(dmanr, 0); 102 } 103 104 /* for other xfer modes, the addresses are already set */ 105 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 106 107 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */ 108 if (p_dma_ch->mode == DMA_MODE_MM) { 109 /* software initiated memory to memory */ 110 control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE; 111 } 112 113 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 114 115 /* 116 * Clear the CS, TS, RI bits for the channel from DMASR. This 117 * has been observed to happen correctly only after the mode and 118 * ETD/DCE bits in DMACRx are set above. Must do this before 119 * enabling the channel. 120 */ 121 122 mtdcr(DCRN_DMASR, status_bits[dmanr]); 123 124 /* 125 * For device-paced transfers, Terminal Count Enable apparently 126 * must be on, and this must be turned on after the mode, etc. 127 * bits are cleared above (at least on Redwood-6). 128 */ 129 130 if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) || 131 (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC)) 132 control |= DMA_TCE_ENABLE; 133 134 /* 135 * Now enable the channel. 136 */ 137 138 control |= (p_dma_ch->mode | DMA_CE_ENABLE); 139 140 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 141 142 p_dma_ch->in_use = 1; 143} 144 145void 146ppc4xx_disable_dma(unsigned int dmanr) 147{ 148 unsigned int control; 149 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 150 151 if (!p_dma_ch->in_use) { 152 printk("disable_dma: channel %d not in use\n", dmanr); 153 return; 154 } 155 156 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 157 printk("disable_dma: bad channel: %d\n", dmanr); 158 return; 159 } 160 161 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 162 control &= ~DMA_CE_ENABLE; 163 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 164 165 p_dma_ch->in_use = 0; 166} 167 168/* 169 * Sets the dma mode for single DMA transfers only. 170 * For scatter/gather transfers, the mode is passed to the 171 * alloc_dma_handle() function as one of the parameters. 172 * 173 * The mode is simply saved and used later. This allows 174 * the driver to call set_dma_mode() and set_dma_addr() in 175 * any order. 176 * 177 * Valid mode values are: 178 * 179 * DMA_MODE_READ peripheral to memory 180 * DMA_MODE_WRITE memory to peripheral 181 * DMA_MODE_MM memory to memory 182 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src 183 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst 184 */ 185int 186ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode) 187{ 188 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 189 190 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 191 printk("set_dma_mode: bad channel 0x%x\n", dmanr); 192 return DMA_STATUS_BAD_CHANNEL; 193 } 194 195 p_dma_ch->mode = mode; 196 197 return DMA_STATUS_GOOD; 198} 199 200/* 201 * Sets the DMA Count register. Note that 'count' is in bytes. 202 * However, the DMA Count register counts the number of "transfers", 203 * where each transfer is equal to the bus width. Thus, count 204 * MUST be a multiple of the bus width. 205 */ 206void 207ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count) 208{ 209 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 210 211#ifdef DEBUG_4xxDMA 212 { 213 int error = 0; 214 switch (p_dma_ch->pwidth) { 215 case PW_8: 216 break; 217 case PW_16: 218 if (count & 0x1) 219 error = 1; 220 break; 221 case PW_32: 222 if (count & 0x3) 223 error = 1; 224 break; 225 case PW_64: 226 if (count & 0x7) 227 error = 1; 228 break; 229 default: 230 printk("set_dma_count: invalid bus width: 0x%x\n", 231 p_dma_ch->pwidth); 232 return; 233 } 234 if (error) 235 printk 236 ("Warning: set_dma_count count 0x%x bus width %d\n", 237 count, p_dma_ch->pwidth); 238 } 239#endif 240 241 count = count >> p_dma_ch->shift; 242 243 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count); 244} 245 246/* 247 * Returns the number of bytes left to be transfered. 248 * After a DMA transfer, this should return zero. 249 * Reading this while a DMA transfer is still in progress will return 250 * unpredictable results. 251 */ 252int 253ppc4xx_get_dma_residue(unsigned int dmanr) 254{ 255 unsigned int count; 256 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 257 258 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 259 printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr); 260 return DMA_STATUS_BAD_CHANNEL; 261 } 262 263 count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)); 264 265 return (count << p_dma_ch->shift); 266} 267 268/* 269 * Sets the DMA address for a memory to peripheral or peripheral 270 * to memory transfer. The address is just saved in the channel 271 * structure for now and used later in enable_dma(). 272 */ 273void 274ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr) 275{ 276 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 277 278 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 279 printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr); 280 return; 281 } 282 283#ifdef DEBUG_4xxDMA 284 { 285 int error = 0; 286 switch (p_dma_ch->pwidth) { 287 case PW_8: 288 break; 289 case PW_16: 290 if ((unsigned) addr & 0x1) 291 error = 1; 292 break; 293 case PW_32: 294 if ((unsigned) addr & 0x3) 295 error = 1; 296 break; 297 case PW_64: 298 if ((unsigned) addr & 0x7) 299 error = 1; 300 break; 301 default: 302 printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n", 303 p_dma_ch->pwidth); 304 return; 305 } 306 if (error) 307 printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n", 308 addr, p_dma_ch->pwidth); 309 } 310#endif 311 312 /* save dma address and program it later after we know the xfer mode */ 313 p_dma_ch->addr = addr; 314} 315 316/* 317 * Sets both DMA addresses for a memory to memory transfer. 318 * For memory to peripheral or peripheral to memory transfers 319 * the function set_dma_addr() should be used instead. 320 */ 321void 322ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr, 323 phys_addr_t dst_dma_addr) 324{ 325 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 326 printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr); 327 return; 328 } 329 330#ifdef DEBUG_4xxDMA 331 { 332 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 333 int error = 0; 334 switch (p_dma_ch->pwidth) { 335 case PW_8: 336 break; 337 case PW_16: 338 if (((unsigned) src_dma_addr & 0x1) || 339 ((unsigned) dst_dma_addr & 0x1) 340 ) 341 error = 1; 342 break; 343 case PW_32: 344 if (((unsigned) src_dma_addr & 0x3) || 345 ((unsigned) dst_dma_addr & 0x3) 346 ) 347 error = 1; 348 break; 349 case PW_64: 350 if (((unsigned) src_dma_addr & 0x7) || 351 ((unsigned) dst_dma_addr & 0x7) 352 ) 353 error = 1; 354 break; 355 default: 356 printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n", 357 p_dma_ch->pwidth); 358 return; 359 } 360 if (error) 361 printk 362 ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n", 363 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth); 364 } 365#endif 366 367 ppc4xx_set_src_addr(dmanr, src_dma_addr); 368 ppc4xx_set_dst_addr(dmanr, dst_dma_addr); 369} 370 371/* 372 * Enables the channel interrupt. 373 * 374 * If performing a scatter/gatter transfer, this function 375 * MUST be called before calling alloc_dma_handle() and building 376 * the sgl list. Otherwise, interrupts will not be enabled, if 377 * they were previously disabled. 378 */ 379int 380ppc4xx_enable_dma_interrupt(unsigned int dmanr) 381{ 382 unsigned int control; 383 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 384 385 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 386 printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr); 387 return DMA_STATUS_BAD_CHANNEL; 388 } 389 390 p_dma_ch->int_enable = 1; 391 392 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 393 control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */ 394 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 395 396 return DMA_STATUS_GOOD; 397} 398 399/* 400 * Disables the channel interrupt. 401 * 402 * If performing a scatter/gatter transfer, this function 403 * MUST be called before calling alloc_dma_handle() and building 404 * the sgl list. Otherwise, interrupts will not be disabled, if 405 * they were previously enabled. 406 */ 407int 408ppc4xx_disable_dma_interrupt(unsigned int dmanr) 409{ 410 unsigned int control; 411 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 412 413 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 414 printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr); 415 return DMA_STATUS_BAD_CHANNEL; 416 } 417 418 p_dma_ch->int_enable = 0; 419 420 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 421 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */ 422 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 423 424 return DMA_STATUS_GOOD; 425} 426 427/* 428 * Configures a DMA channel, including the peripheral bus width, if a 429 * peripheral is attached to the channel, the polarity of the DMAReq and 430 * DMAAck signals, etc. This information should really be setup by the boot 431 * code, since most likely the configuration won't change dynamically. 432 * If the kernel has to call this function, it's recommended that it's 433 * called from platform specific init code. The driver should not need to 434 * call this function. 435 */ 436int 437ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init) 438{ 439 unsigned int polarity; 440 uint32_t control = 0; 441 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr]; 442 443 DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */ 444 DMA_MODE_WRITE = 0; /* Memory to Peripheral */ 445 446 if (!p_init) { 447 printk("ppc4xx_init_dma_channel: NULL p_init\n"); 448 return DMA_STATUS_NULL_POINTER; 449 } 450 451 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 452 printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr); 453 return DMA_STATUS_BAD_CHANNEL; 454 } 455 456#if DCRN_POL > 0 457 polarity = mfdcr(DCRN_POL); 458#else 459 polarity = 0; 460#endif 461 462 /* Setup the control register based on the values passed to 463 * us in p_init. Then, over-write the control register with this 464 * new value. 465 */ 466 control |= SET_DMA_CONTROL; 467 468 /* clear all polarity signals and then "or" in new signal levels */ 469 polarity &= ~GET_DMA_POLARITY(dmanr); 470 polarity |= p_init->polarity; 471#if DCRN_POL > 0 472 mtdcr(DCRN_POL, polarity); 473#endif 474 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 475 476 /* save these values in our dma channel structure */ 477 memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t)); 478 479 /* 480 * The peripheral width values written in the control register are: 481 * PW_8 0 482 * PW_16 1 483 * PW_32 2 484 * PW_64 3 485 * 486 * Since the DMA count register takes the number of "transfers", 487 * we need to divide the count sent to us in certain 488 * functions by the appropriate number. It so happens that our 489 * right shift value is equal to the peripheral width value. 490 */ 491 p_dma_ch->shift = p_init->pwidth; 492 493 /* 494 * Save the control word for easy access. 495 */ 496 p_dma_ch->control = control; 497 498 mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */ 499 return DMA_STATUS_GOOD; 500} 501 502/* 503 * This function returns the channel configuration. 504 */ 505int 506ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch) 507{ 508 unsigned int polarity; 509 unsigned int control; 510 511 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 512 printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr); 513 return DMA_STATUS_BAD_CHANNEL; 514 } 515 516 memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t)); 517 518#if DCRN_POL > 0 519 polarity = mfdcr(DCRN_POL); 520#else 521 polarity = 0; 522#endif 523 524 p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr); 525 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 526 527 p_dma_ch->cp = GET_DMA_PRIORITY(control); 528 p_dma_ch->pwidth = GET_DMA_PW(control); 529 p_dma_ch->psc = GET_DMA_PSC(control); 530 p_dma_ch->pwc = GET_DMA_PWC(control); 531 p_dma_ch->phc = GET_DMA_PHC(control); 532 p_dma_ch->ce = GET_DMA_CE_ENABLE(control); 533 p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control); 534 p_dma_ch->shift = GET_DMA_PW(control); 535 536#ifdef CONFIG_PPC4xx_EDMA 537 p_dma_ch->pf = GET_DMA_PREFETCH(control); 538#else 539 p_dma_ch->ch_enable = GET_DMA_CH(control); 540 p_dma_ch->ece_enable = GET_DMA_ECE(control); 541 p_dma_ch->tcd_disable = GET_DMA_TCD(control); 542#endif 543 return DMA_STATUS_GOOD; 544} 545 546/* 547 * Sets the priority for the DMA channel dmanr. 548 * Since this is setup by the hardware init function, this function 549 * can be used to dynamically change the priority of a channel. 550 * 551 * Acceptable priorities: 552 * 553 * PRIORITY_LOW 554 * PRIORITY_MID_LOW 555 * PRIORITY_MID_HIGH 556 * PRIORITY_HIGH 557 * 558 */ 559int 560ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority) 561{ 562 unsigned int control; 563 564 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 565 printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr); 566 return DMA_STATUS_BAD_CHANNEL; 567 } 568 569 if ((priority != PRIORITY_LOW) && 570 (priority != PRIORITY_MID_LOW) && 571 (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) { 572 printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority); 573 } 574 575 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 576 control |= SET_DMA_PRIORITY(priority); 577 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control); 578 579 return DMA_STATUS_GOOD; 580} 581 582/* 583 * Returns the width of the peripheral attached to this channel. This assumes 584 * that someone who knows the hardware configuration, boot code or some other 585 * init code, already set the width. 586 * 587 * The return value is one of: 588 * PW_8 589 * PW_16 590 * PW_32 591 * PW_64 592 * 593 * The function returns 0 on error. 594 */ 595unsigned int 596ppc4xx_get_peripheral_width(unsigned int dmanr) 597{ 598 unsigned int control; 599 600 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 601 printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr); 602 return DMA_STATUS_BAD_CHANNEL; 603 } 604 605 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8)); 606 607 return (GET_DMA_PW(control)); 608} 609 610/* 611 * Clears the channel status bits 612 */ 613int 614ppc4xx_clr_dma_status(unsigned int dmanr) 615{ 616 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 617 printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr); 618 return DMA_STATUS_BAD_CHANNEL; 619 } 620 mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr); 621 return DMA_STATUS_GOOD; 622} 623 624#ifdef CONFIG_PPC4xx_EDMA 625/* 626 * Enables the burst on the channel (BTEN bit in the control/count register) 627 * Note: 628 * For scatter/gather dma, this function MUST be called before the 629 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the 630 * sgl list and used as each sgl element is added. 631 */ 632int 633ppc4xx_enable_burst(unsigned int dmanr) 634{ 635 unsigned int ctc; 636 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 637 printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr); 638 return DMA_STATUS_BAD_CHANNEL; 639 } 640 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN; 641 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc); 642 return DMA_STATUS_GOOD; 643} 644/* 645 * Disables the burst on the channel (BTEN bit in the control/count register) 646 * Note: 647 * For scatter/gather dma, this function MUST be called before the 648 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the 649 * sgl list and used as each sgl element is added. 650 */ 651int 652ppc4xx_disable_burst(unsigned int dmanr) 653{ 654 unsigned int ctc; 655 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 656 printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr); 657 return DMA_STATUS_BAD_CHANNEL; 658 } 659 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN; 660 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc); 661 return DMA_STATUS_GOOD; 662} 663/* 664 * Sets the burst size (number of peripheral widths) for the channel 665 * (BSIZ bits in the control/count register)) 666 * must be one of: 667 * DMA_CTC_BSIZ_2 668 * DMA_CTC_BSIZ_4 669 * DMA_CTC_BSIZ_8 670 * DMA_CTC_BSIZ_16 671 * Note: 672 * For scatter/gather dma, this function MUST be called before the 673 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the 674 * sgl list and used as each sgl element is added. 675 */ 676int 677ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize) 678{ 679 unsigned int ctc; 680 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) { 681 printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr); 682 return DMA_STATUS_BAD_CHANNEL; 683 } 684 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK; 685 ctc |= (bsize & DMA_CTC_BSIZ_MSK); 686 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc); 687 return DMA_STATUS_GOOD; 688} 689 690EXPORT_SYMBOL(ppc4xx_enable_burst); 691EXPORT_SYMBOL(ppc4xx_disable_burst); 692EXPORT_SYMBOL(ppc4xx_set_burst_size); 693#endif /* CONFIG_PPC4xx_EDMA */ 694 695EXPORT_SYMBOL(ppc4xx_init_dma_channel); 696EXPORT_SYMBOL(ppc4xx_get_channel_config); 697EXPORT_SYMBOL(ppc4xx_set_channel_priority); 698EXPORT_SYMBOL(ppc4xx_get_peripheral_width); 699EXPORT_SYMBOL(dma_channels); 700EXPORT_SYMBOL(ppc4xx_set_src_addr); 701EXPORT_SYMBOL(ppc4xx_set_dst_addr); 702EXPORT_SYMBOL(ppc4xx_set_dma_addr); 703EXPORT_SYMBOL(ppc4xx_set_dma_addr2); 704EXPORT_SYMBOL(ppc4xx_enable_dma); 705EXPORT_SYMBOL(ppc4xx_disable_dma); 706EXPORT_SYMBOL(ppc4xx_set_dma_mode); 707EXPORT_SYMBOL(ppc4xx_set_dma_count); 708EXPORT_SYMBOL(ppc4xx_get_dma_residue); 709EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt); 710EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt); 711EXPORT_SYMBOL(ppc4xx_get_dma_status); 712EXPORT_SYMBOL(ppc4xx_clr_dma_status); 713