Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

at v2.6.26-rc4 986 lines 25 kB view raw
1/* 2 * au1550_spi.c - au1550 psc spi controller driver 3 * may work also with au1200, au1210, au1250 4 * will not work on au1000, au1100 and au1500 (no full spi controller there) 5 * 6 * Copyright (c) 2006 ATRON electronic GmbH 7 * Author: Jan Nikitenko <jan.nikitenko@gmail.com> 8 * 9 * This program is free software; you can redistribute it and/or modify 10 * it under the terms of the GNU General Public License as published by 11 * the Free Software Foundation; either version 2 of the License, or 12 * (at your option) any later version. 13 * 14 * This program is distributed in the hope that it will be useful, 15 * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 * GNU General Public License for more details. 18 * 19 * You should have received a copy of the GNU General Public License 20 * along with this program; if not, write to the Free Software 21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 22 */ 23 24#include <linux/init.h> 25#include <linux/interrupt.h> 26#include <linux/errno.h> 27#include <linux/device.h> 28#include <linux/platform_device.h> 29#include <linux/spi/spi.h> 30#include <linux/spi/spi_bitbang.h> 31#include <linux/dma-mapping.h> 32#include <linux/completion.h> 33#include <asm/mach-au1x00/au1000.h> 34#include <asm/mach-au1x00/au1xxx_psc.h> 35#include <asm/mach-au1x00/au1xxx_dbdma.h> 36 37#include <asm/mach-au1x00/au1550_spi.h> 38 39static unsigned usedma = 1; 40module_param(usedma, uint, 0644); 41 42/* 43#define AU1550_SPI_DEBUG_LOOPBACK 44*/ 45 46 47#define AU1550_SPI_DBDMA_DESCRIPTORS 1 48#define AU1550_SPI_DMA_RXTMP_MINSIZE 2048U 49 50struct au1550_spi { 51 struct spi_bitbang bitbang; 52 53 volatile psc_spi_t __iomem *regs; 54 int irq; 55 unsigned freq_max; 56 unsigned freq_min; 57 58 unsigned len; 59 unsigned tx_count; 60 unsigned rx_count; 61 const u8 *tx; 62 u8 *rx; 63 64 void (*rx_word)(struct au1550_spi *hw); 65 void (*tx_word)(struct au1550_spi *hw); 66 int (*txrx_bufs)(struct spi_device *spi, struct spi_transfer *t); 67 irqreturn_t (*irq_callback)(struct au1550_spi *hw); 68 69 struct completion master_done; 70 71 unsigned usedma; 72 u32 dma_tx_id; 73 u32 dma_rx_id; 74 u32 dma_tx_ch; 75 u32 dma_rx_ch; 76 77 u8 *dma_rx_tmpbuf; 78 unsigned dma_rx_tmpbuf_size; 79 u32 dma_rx_tmpbuf_addr; 80 81 struct spi_master *master; 82 struct device *dev; 83 struct au1550_spi_info *pdata; 84}; 85 86 87/* we use an 8-bit memory device for dma transfers to/from spi fifo */ 88static dbdev_tab_t au1550_spi_mem_dbdev = 89{ 90 .dev_id = DBDMA_MEM_CHAN, 91 .dev_flags = DEV_FLAGS_ANYUSE|DEV_FLAGS_SYNC, 92 .dev_tsize = 0, 93 .dev_devwidth = 8, 94 .dev_physaddr = 0x00000000, 95 .dev_intlevel = 0, 96 .dev_intpolarity = 0 97}; 98 99static void au1550_spi_bits_handlers_set(struct au1550_spi *hw, int bpw); 100 101 102/* 103 * compute BRG and DIV bits to setup spi clock based on main input clock rate 104 * that was specified in platform data structure 105 * according to au1550 datasheet: 106 * psc_tempclk = psc_mainclk / (2 << DIV) 107 * spiclk = psc_tempclk / (2 * (BRG + 1)) 108 * BRG valid range is 4..63 109 * DIV valid range is 0..3 110 */ 111static u32 au1550_spi_baudcfg(struct au1550_spi *hw, unsigned speed_hz) 112{ 113 u32 mainclk_hz = hw->pdata->mainclk_hz; 114 u32 div, brg; 115 116 for (div = 0; div < 4; div++) { 117 brg = mainclk_hz / speed_hz / (4 << div); 118 /* now we have BRG+1 in brg, so count with that */ 119 if (brg < (4 + 1)) { 120 brg = (4 + 1); /* speed_hz too big */ 121 break; /* set lowest brg (div is == 0) */ 122 } 123 if (brg <= (63 + 1)) 124 break; /* we have valid brg and div */ 125 } 126 if (div == 4) { 127 div = 3; /* speed_hz too small */ 128 brg = (63 + 1); /* set highest brg and div */ 129 } 130 brg--; 131 return PSC_SPICFG_SET_BAUD(brg) | PSC_SPICFG_SET_DIV(div); 132} 133 134static inline void au1550_spi_mask_ack_all(struct au1550_spi *hw) 135{ 136 hw->regs->psc_spimsk = 137 PSC_SPIMSK_MM | PSC_SPIMSK_RR | PSC_SPIMSK_RO 138 | PSC_SPIMSK_RU | PSC_SPIMSK_TR | PSC_SPIMSK_TO 139 | PSC_SPIMSK_TU | PSC_SPIMSK_SD | PSC_SPIMSK_MD; 140 au_sync(); 141 142 hw->regs->psc_spievent = 143 PSC_SPIEVNT_MM | PSC_SPIEVNT_RR | PSC_SPIEVNT_RO 144 | PSC_SPIEVNT_RU | PSC_SPIEVNT_TR | PSC_SPIEVNT_TO 145 | PSC_SPIEVNT_TU | PSC_SPIEVNT_SD | PSC_SPIEVNT_MD; 146 au_sync(); 147} 148 149static void au1550_spi_reset_fifos(struct au1550_spi *hw) 150{ 151 u32 pcr; 152 153 hw->regs->psc_spipcr = PSC_SPIPCR_RC | PSC_SPIPCR_TC; 154 au_sync(); 155 do { 156 pcr = hw->regs->psc_spipcr; 157 au_sync(); 158 } while (pcr != 0); 159} 160 161/* 162 * dma transfers are used for the most common spi word size of 8-bits 163 * we cannot easily change already set up dma channels' width, so if we wanted 164 * dma support for more than 8-bit words (up to 24 bits), we would need to 165 * setup dma channels from scratch on each spi transfer, based on bits_per_word 166 * instead we have pre set up 8 bit dma channels supporting spi 4 to 8 bits 167 * transfers, and 9 to 24 bits spi transfers will be done in pio irq based mode 168 * callbacks to handle dma or pio are set up in au1550_spi_bits_handlers_set() 169 */ 170static void au1550_spi_chipsel(struct spi_device *spi, int value) 171{ 172 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 173 unsigned cspol = spi->mode & SPI_CS_HIGH ? 1 : 0; 174 u32 cfg, stat; 175 176 switch (value) { 177 case BITBANG_CS_INACTIVE: 178 if (hw->pdata->deactivate_cs) 179 hw->pdata->deactivate_cs(hw->pdata, spi->chip_select, 180 cspol); 181 break; 182 183 case BITBANG_CS_ACTIVE: 184 au1550_spi_bits_handlers_set(hw, spi->bits_per_word); 185 186 cfg = hw->regs->psc_spicfg; 187 au_sync(); 188 hw->regs->psc_spicfg = cfg & ~PSC_SPICFG_DE_ENABLE; 189 au_sync(); 190 191 if (spi->mode & SPI_CPOL) 192 cfg |= PSC_SPICFG_BI; 193 else 194 cfg &= ~PSC_SPICFG_BI; 195 if (spi->mode & SPI_CPHA) 196 cfg &= ~PSC_SPICFG_CDE; 197 else 198 cfg |= PSC_SPICFG_CDE; 199 200 if (spi->mode & SPI_LSB_FIRST) 201 cfg |= PSC_SPICFG_MLF; 202 else 203 cfg &= ~PSC_SPICFG_MLF; 204 205 if (hw->usedma && spi->bits_per_word <= 8) 206 cfg &= ~PSC_SPICFG_DD_DISABLE; 207 else 208 cfg |= PSC_SPICFG_DD_DISABLE; 209 cfg = PSC_SPICFG_CLR_LEN(cfg); 210 cfg |= PSC_SPICFG_SET_LEN(spi->bits_per_word); 211 212 cfg = PSC_SPICFG_CLR_BAUD(cfg); 213 cfg &= ~PSC_SPICFG_SET_DIV(3); 214 cfg |= au1550_spi_baudcfg(hw, spi->max_speed_hz); 215 216 hw->regs->psc_spicfg = cfg | PSC_SPICFG_DE_ENABLE; 217 au_sync(); 218 do { 219 stat = hw->regs->psc_spistat; 220 au_sync(); 221 } while ((stat & PSC_SPISTAT_DR) == 0); 222 223 if (hw->pdata->activate_cs) 224 hw->pdata->activate_cs(hw->pdata, spi->chip_select, 225 cspol); 226 break; 227 } 228} 229 230static int au1550_spi_setupxfer(struct spi_device *spi, struct spi_transfer *t) 231{ 232 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 233 unsigned bpw, hz; 234 u32 cfg, stat; 235 236 bpw = t ? t->bits_per_word : spi->bits_per_word; 237 hz = t ? t->speed_hz : spi->max_speed_hz; 238 239 if (bpw < 4 || bpw > 24) { 240 dev_err(&spi->dev, "setupxfer: invalid bits_per_word=%d\n", 241 bpw); 242 return -EINVAL; 243 } 244 if (hz > spi->max_speed_hz || hz > hw->freq_max || hz < hw->freq_min) { 245 dev_err(&spi->dev, "setupxfer: clock rate=%d out of range\n", 246 hz); 247 return -EINVAL; 248 } 249 250 au1550_spi_bits_handlers_set(hw, spi->bits_per_word); 251 252 cfg = hw->regs->psc_spicfg; 253 au_sync(); 254 hw->regs->psc_spicfg = cfg & ~PSC_SPICFG_DE_ENABLE; 255 au_sync(); 256 257 if (hw->usedma && bpw <= 8) 258 cfg &= ~PSC_SPICFG_DD_DISABLE; 259 else 260 cfg |= PSC_SPICFG_DD_DISABLE; 261 cfg = PSC_SPICFG_CLR_LEN(cfg); 262 cfg |= PSC_SPICFG_SET_LEN(bpw); 263 264 cfg = PSC_SPICFG_CLR_BAUD(cfg); 265 cfg &= ~PSC_SPICFG_SET_DIV(3); 266 cfg |= au1550_spi_baudcfg(hw, hz); 267 268 hw->regs->psc_spicfg = cfg; 269 au_sync(); 270 271 if (cfg & PSC_SPICFG_DE_ENABLE) { 272 do { 273 stat = hw->regs->psc_spistat; 274 au_sync(); 275 } while ((stat & PSC_SPISTAT_DR) == 0); 276 } 277 278 au1550_spi_reset_fifos(hw); 279 au1550_spi_mask_ack_all(hw); 280 return 0; 281} 282 283/* the spi->mode bits understood by this driver: */ 284#define MODEBITS (SPI_CPOL | SPI_CPHA | SPI_CS_HIGH | SPI_LSB_FIRST) 285 286static int au1550_spi_setup(struct spi_device *spi) 287{ 288 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 289 290 if (spi->bits_per_word == 0) 291 spi->bits_per_word = 8; 292 if (spi->bits_per_word < 4 || spi->bits_per_word > 24) { 293 dev_err(&spi->dev, "setup: invalid bits_per_word=%d\n", 294 spi->bits_per_word); 295 return -EINVAL; 296 } 297 298 if (spi->mode & ~MODEBITS) { 299 dev_dbg(&spi->dev, "setup: unsupported mode bits %x\n", 300 spi->mode & ~MODEBITS); 301 return -EINVAL; 302 } 303 304 if (spi->max_speed_hz == 0) 305 spi->max_speed_hz = hw->freq_max; 306 if (spi->max_speed_hz > hw->freq_max 307 || spi->max_speed_hz < hw->freq_min) 308 return -EINVAL; 309 /* 310 * NOTE: cannot change speed and other hw settings immediately, 311 * otherwise sharing of spi bus is not possible, 312 * so do not call setupxfer(spi, NULL) here 313 */ 314 return 0; 315} 316 317/* 318 * for dma spi transfers, we have to setup rx channel, otherwise there is 319 * no reliable way how to recognize that spi transfer is done 320 * dma complete callbacks are called before real spi transfer is finished 321 * and if only tx dma channel is set up (and rx fifo overflow event masked) 322 * spi master done event irq is not generated unless rx fifo is empty (emptied) 323 * so we need rx tmp buffer to use for rx dma if user does not provide one 324 */ 325static int au1550_spi_dma_rxtmp_alloc(struct au1550_spi *hw, unsigned size) 326{ 327 hw->dma_rx_tmpbuf = kmalloc(size, GFP_KERNEL); 328 if (!hw->dma_rx_tmpbuf) 329 return -ENOMEM; 330 hw->dma_rx_tmpbuf_size = size; 331 hw->dma_rx_tmpbuf_addr = dma_map_single(hw->dev, hw->dma_rx_tmpbuf, 332 size, DMA_FROM_DEVICE); 333 if (dma_mapping_error(hw->dma_rx_tmpbuf_addr)) { 334 kfree(hw->dma_rx_tmpbuf); 335 hw->dma_rx_tmpbuf = 0; 336 hw->dma_rx_tmpbuf_size = 0; 337 return -EFAULT; 338 } 339 return 0; 340} 341 342static void au1550_spi_dma_rxtmp_free(struct au1550_spi *hw) 343{ 344 dma_unmap_single(hw->dev, hw->dma_rx_tmpbuf_addr, 345 hw->dma_rx_tmpbuf_size, DMA_FROM_DEVICE); 346 kfree(hw->dma_rx_tmpbuf); 347 hw->dma_rx_tmpbuf = 0; 348 hw->dma_rx_tmpbuf_size = 0; 349} 350 351static int au1550_spi_dma_txrxb(struct spi_device *spi, struct spi_transfer *t) 352{ 353 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 354 dma_addr_t dma_tx_addr; 355 dma_addr_t dma_rx_addr; 356 u32 res; 357 358 hw->len = t->len; 359 hw->tx_count = 0; 360 hw->rx_count = 0; 361 362 hw->tx = t->tx_buf; 363 hw->rx = t->rx_buf; 364 dma_tx_addr = t->tx_dma; 365 dma_rx_addr = t->rx_dma; 366 367 /* 368 * check if buffers are already dma mapped, map them otherwise 369 * use rx buffer in place of tx if tx buffer was not provided 370 * use temp rx buffer (preallocated or realloc to fit) for rx dma 371 */ 372 if (t->rx_buf) { 373 if (t->rx_dma == 0) { /* if DMA_ADDR_INVALID, map it */ 374 dma_rx_addr = dma_map_single(hw->dev, 375 (void *)t->rx_buf, 376 t->len, DMA_FROM_DEVICE); 377 if (dma_mapping_error(dma_rx_addr)) 378 dev_err(hw->dev, "rx dma map error\n"); 379 } 380 } else { 381 if (t->len > hw->dma_rx_tmpbuf_size) { 382 int ret; 383 384 au1550_spi_dma_rxtmp_free(hw); 385 ret = au1550_spi_dma_rxtmp_alloc(hw, max(t->len, 386 AU1550_SPI_DMA_RXTMP_MINSIZE)); 387 if (ret < 0) 388 return ret; 389 } 390 hw->rx = hw->dma_rx_tmpbuf; 391 dma_rx_addr = hw->dma_rx_tmpbuf_addr; 392 dma_sync_single_for_device(hw->dev, dma_rx_addr, 393 t->len, DMA_FROM_DEVICE); 394 } 395 if (t->tx_buf) { 396 if (t->tx_dma == 0) { /* if DMA_ADDR_INVALID, map it */ 397 dma_tx_addr = dma_map_single(hw->dev, 398 (void *)t->tx_buf, 399 t->len, DMA_TO_DEVICE); 400 if (dma_mapping_error(dma_tx_addr)) 401 dev_err(hw->dev, "tx dma map error\n"); 402 } 403 } else { 404 dma_sync_single_for_device(hw->dev, dma_rx_addr, 405 t->len, DMA_BIDIRECTIONAL); 406 hw->tx = hw->rx; 407 } 408 409 /* put buffers on the ring */ 410 res = au1xxx_dbdma_put_dest(hw->dma_rx_ch, hw->rx, t->len); 411 if (!res) 412 dev_err(hw->dev, "rx dma put dest error\n"); 413 414 res = au1xxx_dbdma_put_source(hw->dma_tx_ch, (void *)hw->tx, t->len); 415 if (!res) 416 dev_err(hw->dev, "tx dma put source error\n"); 417 418 au1xxx_dbdma_start(hw->dma_rx_ch); 419 au1xxx_dbdma_start(hw->dma_tx_ch); 420 421 /* by default enable nearly all events interrupt */ 422 hw->regs->psc_spimsk = PSC_SPIMSK_SD; 423 au_sync(); 424 425 /* start the transfer */ 426 hw->regs->psc_spipcr = PSC_SPIPCR_MS; 427 au_sync(); 428 429 wait_for_completion(&hw->master_done); 430 431 au1xxx_dbdma_stop(hw->dma_tx_ch); 432 au1xxx_dbdma_stop(hw->dma_rx_ch); 433 434 if (!t->rx_buf) { 435 /* using the temporal preallocated and premapped buffer */ 436 dma_sync_single_for_cpu(hw->dev, dma_rx_addr, t->len, 437 DMA_FROM_DEVICE); 438 } 439 /* unmap buffers if mapped above */ 440 if (t->rx_buf && t->rx_dma == 0 ) 441 dma_unmap_single(hw->dev, dma_rx_addr, t->len, 442 DMA_FROM_DEVICE); 443 if (t->tx_buf && t->tx_dma == 0 ) 444 dma_unmap_single(hw->dev, dma_tx_addr, t->len, 445 DMA_TO_DEVICE); 446 447 return hw->rx_count < hw->tx_count ? hw->rx_count : hw->tx_count; 448} 449 450static irqreturn_t au1550_spi_dma_irq_callback(struct au1550_spi *hw) 451{ 452 u32 stat, evnt; 453 454 stat = hw->regs->psc_spistat; 455 evnt = hw->regs->psc_spievent; 456 au_sync(); 457 if ((stat & PSC_SPISTAT_DI) == 0) { 458 dev_err(hw->dev, "Unexpected IRQ!\n"); 459 return IRQ_NONE; 460 } 461 462 if ((evnt & (PSC_SPIEVNT_MM | PSC_SPIEVNT_RO 463 | PSC_SPIEVNT_RU | PSC_SPIEVNT_TO 464 | PSC_SPIEVNT_TU | PSC_SPIEVNT_SD)) 465 != 0) { 466 /* 467 * due to an spi error we consider transfer as done, 468 * so mask all events until before next transfer start 469 * and stop the possibly running dma immediatelly 470 */ 471 au1550_spi_mask_ack_all(hw); 472 au1xxx_dbdma_stop(hw->dma_rx_ch); 473 au1xxx_dbdma_stop(hw->dma_tx_ch); 474 475 /* get number of transfered bytes */ 476 hw->rx_count = hw->len - au1xxx_get_dma_residue(hw->dma_rx_ch); 477 hw->tx_count = hw->len - au1xxx_get_dma_residue(hw->dma_tx_ch); 478 479 au1xxx_dbdma_reset(hw->dma_rx_ch); 480 au1xxx_dbdma_reset(hw->dma_tx_ch); 481 au1550_spi_reset_fifos(hw); 482 483 dev_err(hw->dev, 484 "Unexpected SPI error: event=0x%x stat=0x%x!\n", 485 evnt, stat); 486 487 complete(&hw->master_done); 488 return IRQ_HANDLED; 489 } 490 491 if ((evnt & PSC_SPIEVNT_MD) != 0) { 492 /* transfer completed successfully */ 493 au1550_spi_mask_ack_all(hw); 494 hw->rx_count = hw->len; 495 hw->tx_count = hw->len; 496 complete(&hw->master_done); 497 } 498 return IRQ_HANDLED; 499} 500 501 502/* routines to handle different word sizes in pio mode */ 503#define AU1550_SPI_RX_WORD(size, mask) \ 504static void au1550_spi_rx_word_##size(struct au1550_spi *hw) \ 505{ \ 506 u32 fifoword = hw->regs->psc_spitxrx & (u32)(mask); \ 507 au_sync(); \ 508 if (hw->rx) { \ 509 *(u##size *)hw->rx = (u##size)fifoword; \ 510 hw->rx += (size) / 8; \ 511 } \ 512 hw->rx_count += (size) / 8; \ 513} 514 515#define AU1550_SPI_TX_WORD(size, mask) \ 516static void au1550_spi_tx_word_##size(struct au1550_spi *hw) \ 517{ \ 518 u32 fifoword = 0; \ 519 if (hw->tx) { \ 520 fifoword = *(u##size *)hw->tx & (u32)(mask); \ 521 hw->tx += (size) / 8; \ 522 } \ 523 hw->tx_count += (size) / 8; \ 524 if (hw->tx_count >= hw->len) \ 525 fifoword |= PSC_SPITXRX_LC; \ 526 hw->regs->psc_spitxrx = fifoword; \ 527 au_sync(); \ 528} 529 530AU1550_SPI_RX_WORD(8,0xff) 531AU1550_SPI_RX_WORD(16,0xffff) 532AU1550_SPI_RX_WORD(32,0xffffff) 533AU1550_SPI_TX_WORD(8,0xff) 534AU1550_SPI_TX_WORD(16,0xffff) 535AU1550_SPI_TX_WORD(32,0xffffff) 536 537static int au1550_spi_pio_txrxb(struct spi_device *spi, struct spi_transfer *t) 538{ 539 u32 stat, mask; 540 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 541 542 hw->tx = t->tx_buf; 543 hw->rx = t->rx_buf; 544 hw->len = t->len; 545 hw->tx_count = 0; 546 hw->rx_count = 0; 547 548 /* by default enable nearly all events after filling tx fifo */ 549 mask = PSC_SPIMSK_SD; 550 551 /* fill the transmit FIFO */ 552 while (hw->tx_count < hw->len) { 553 554 hw->tx_word(hw); 555 556 if (hw->tx_count >= hw->len) { 557 /* mask tx fifo request interrupt as we are done */ 558 mask |= PSC_SPIMSK_TR; 559 } 560 561 stat = hw->regs->psc_spistat; 562 au_sync(); 563 if (stat & PSC_SPISTAT_TF) 564 break; 565 } 566 567 /* enable event interrupts */ 568 hw->regs->psc_spimsk = mask; 569 au_sync(); 570 571 /* start the transfer */ 572 hw->regs->psc_spipcr = PSC_SPIPCR_MS; 573 au_sync(); 574 575 wait_for_completion(&hw->master_done); 576 577 return hw->rx_count < hw->tx_count ? hw->rx_count : hw->tx_count; 578} 579 580static irqreturn_t au1550_spi_pio_irq_callback(struct au1550_spi *hw) 581{ 582 int busy; 583 u32 stat, evnt; 584 585 stat = hw->regs->psc_spistat; 586 evnt = hw->regs->psc_spievent; 587 au_sync(); 588 if ((stat & PSC_SPISTAT_DI) == 0) { 589 dev_err(hw->dev, "Unexpected IRQ!\n"); 590 return IRQ_NONE; 591 } 592 593 if ((evnt & (PSC_SPIEVNT_MM | PSC_SPIEVNT_RO 594 | PSC_SPIEVNT_RU | PSC_SPIEVNT_TO 595 | PSC_SPIEVNT_TU | PSC_SPIEVNT_SD)) 596 != 0) { 597 dev_err(hw->dev, 598 "Unexpected SPI error: event=0x%x stat=0x%x!\n", 599 evnt, stat); 600 /* 601 * due to an error we consider transfer as done, 602 * so mask all events until before next transfer start 603 */ 604 au1550_spi_mask_ack_all(hw); 605 au1550_spi_reset_fifos(hw); 606 complete(&hw->master_done); 607 return IRQ_HANDLED; 608 } 609 610 /* 611 * while there is something to read from rx fifo 612 * or there is a space to write to tx fifo: 613 */ 614 do { 615 busy = 0; 616 stat = hw->regs->psc_spistat; 617 au_sync(); 618 619 if ((stat & PSC_SPISTAT_RE) == 0 && hw->rx_count < hw->len) { 620 hw->rx_word(hw); 621 /* ack the receive request event */ 622 hw->regs->psc_spievent = PSC_SPIEVNT_RR; 623 au_sync(); 624 busy = 1; 625 } 626 627 if ((stat & PSC_SPISTAT_TF) == 0 && hw->tx_count < hw->len) { 628 hw->tx_word(hw); 629 /* ack the transmit request event */ 630 hw->regs->psc_spievent = PSC_SPIEVNT_TR; 631 au_sync(); 632 busy = 1; 633 } 634 } while (busy); 635 636 evnt = hw->regs->psc_spievent; 637 au_sync(); 638 639 if (hw->rx_count >= hw->len || (evnt & PSC_SPIEVNT_MD) != 0) { 640 /* transfer completed successfully */ 641 au1550_spi_mask_ack_all(hw); 642 complete(&hw->master_done); 643 } 644 return IRQ_HANDLED; 645} 646 647static int au1550_spi_txrx_bufs(struct spi_device *spi, struct spi_transfer *t) 648{ 649 struct au1550_spi *hw = spi_master_get_devdata(spi->master); 650 return hw->txrx_bufs(spi, t); 651} 652 653static irqreturn_t au1550_spi_irq(int irq, void *dev) 654{ 655 struct au1550_spi *hw = dev; 656 return hw->irq_callback(hw); 657} 658 659static void au1550_spi_bits_handlers_set(struct au1550_spi *hw, int bpw) 660{ 661 if (bpw <= 8) { 662 if (hw->usedma) { 663 hw->txrx_bufs = &au1550_spi_dma_txrxb; 664 hw->irq_callback = &au1550_spi_dma_irq_callback; 665 } else { 666 hw->rx_word = &au1550_spi_rx_word_8; 667 hw->tx_word = &au1550_spi_tx_word_8; 668 hw->txrx_bufs = &au1550_spi_pio_txrxb; 669 hw->irq_callback = &au1550_spi_pio_irq_callback; 670 } 671 } else if (bpw <= 16) { 672 hw->rx_word = &au1550_spi_rx_word_16; 673 hw->tx_word = &au1550_spi_tx_word_16; 674 hw->txrx_bufs = &au1550_spi_pio_txrxb; 675 hw->irq_callback = &au1550_spi_pio_irq_callback; 676 } else { 677 hw->rx_word = &au1550_spi_rx_word_32; 678 hw->tx_word = &au1550_spi_tx_word_32; 679 hw->txrx_bufs = &au1550_spi_pio_txrxb; 680 hw->irq_callback = &au1550_spi_pio_irq_callback; 681 } 682} 683 684static void __init au1550_spi_setup_psc_as_spi(struct au1550_spi *hw) 685{ 686 u32 stat, cfg; 687 688 /* set up the PSC for SPI mode */ 689 hw->regs->psc_ctrl = PSC_CTRL_DISABLE; 690 au_sync(); 691 hw->regs->psc_sel = PSC_SEL_PS_SPIMODE; 692 au_sync(); 693 694 hw->regs->psc_spicfg = 0; 695 au_sync(); 696 697 hw->regs->psc_ctrl = PSC_CTRL_ENABLE; 698 au_sync(); 699 700 do { 701 stat = hw->regs->psc_spistat; 702 au_sync(); 703 } while ((stat & PSC_SPISTAT_SR) == 0); 704 705 706 cfg = hw->usedma ? 0 : PSC_SPICFG_DD_DISABLE; 707 cfg |= PSC_SPICFG_SET_LEN(8); 708 cfg |= PSC_SPICFG_RT_FIFO8 | PSC_SPICFG_TT_FIFO8; 709 /* use minimal allowed brg and div values as initial setting: */ 710 cfg |= PSC_SPICFG_SET_BAUD(4) | PSC_SPICFG_SET_DIV(0); 711 712#ifdef AU1550_SPI_DEBUG_LOOPBACK 713 cfg |= PSC_SPICFG_LB; 714#endif 715 716 hw->regs->psc_spicfg = cfg; 717 au_sync(); 718 719 au1550_spi_mask_ack_all(hw); 720 721 hw->regs->psc_spicfg |= PSC_SPICFG_DE_ENABLE; 722 au_sync(); 723 724 do { 725 stat = hw->regs->psc_spistat; 726 au_sync(); 727 } while ((stat & PSC_SPISTAT_DR) == 0); 728} 729 730 731static int __init au1550_spi_probe(struct platform_device *pdev) 732{ 733 struct au1550_spi *hw; 734 struct spi_master *master; 735 int err = 0; 736 737 master = spi_alloc_master(&pdev->dev, sizeof(struct au1550_spi)); 738 if (master == NULL) { 739 dev_err(&pdev->dev, "No memory for spi_master\n"); 740 err = -ENOMEM; 741 goto err_nomem; 742 } 743 744 hw = spi_master_get_devdata(master); 745 746 hw->master = spi_master_get(master); 747 hw->pdata = pdev->dev.platform_data; 748 hw->dev = &pdev->dev; 749 750 if (hw->pdata == NULL) { 751 dev_err(&pdev->dev, "No platform data supplied\n"); 752 err = -ENOENT; 753 goto err_no_pdata; 754 } 755 756 platform_set_drvdata(pdev, hw); 757 758 init_completion(&hw->master_done); 759 760 hw->bitbang.master = hw->master; 761 hw->bitbang.setup_transfer = au1550_spi_setupxfer; 762 hw->bitbang.chipselect = au1550_spi_chipsel; 763 hw->bitbang.master->setup = au1550_spi_setup; 764 hw->bitbang.txrx_bufs = au1550_spi_txrx_bufs; 765 766 switch (hw->pdata->bus_num) { 767 case 0: 768 hw->irq = AU1550_PSC0_INT; 769 hw->regs = (volatile psc_spi_t *)PSC0_BASE_ADDR; 770 hw->dma_rx_id = DSCR_CMD0_PSC0_RX; 771 hw->dma_tx_id = DSCR_CMD0_PSC0_TX; 772 break; 773 case 1: 774 hw->irq = AU1550_PSC1_INT; 775 hw->regs = (volatile psc_spi_t *)PSC1_BASE_ADDR; 776 hw->dma_rx_id = DSCR_CMD0_PSC1_RX; 777 hw->dma_tx_id = DSCR_CMD0_PSC1_TX; 778 break; 779 case 2: 780 hw->irq = AU1550_PSC2_INT; 781 hw->regs = (volatile psc_spi_t *)PSC2_BASE_ADDR; 782 hw->dma_rx_id = DSCR_CMD0_PSC2_RX; 783 hw->dma_tx_id = DSCR_CMD0_PSC2_TX; 784 break; 785 case 3: 786 hw->irq = AU1550_PSC3_INT; 787 hw->regs = (volatile psc_spi_t *)PSC3_BASE_ADDR; 788 hw->dma_rx_id = DSCR_CMD0_PSC3_RX; 789 hw->dma_tx_id = DSCR_CMD0_PSC3_TX; 790 break; 791 default: 792 dev_err(&pdev->dev, "Wrong bus_num of SPI\n"); 793 err = -ENOENT; 794 goto err_no_pdata; 795 } 796 797 if (request_mem_region((unsigned long)hw->regs, sizeof(psc_spi_t), 798 pdev->name) == NULL) { 799 dev_err(&pdev->dev, "Cannot reserve iomem region\n"); 800 err = -ENXIO; 801 goto err_no_iores; 802 } 803 804 805 if (usedma) { 806 if (pdev->dev.dma_mask == NULL) 807 dev_warn(&pdev->dev, "no dma mask\n"); 808 else 809 hw->usedma = 1; 810 } 811 812 if (hw->usedma) { 813 /* 814 * create memory device with 8 bits dev_devwidth 815 * needed for proper byte ordering to spi fifo 816 */ 817 int memid = au1xxx_ddma_add_device(&au1550_spi_mem_dbdev); 818 if (!memid) { 819 dev_err(&pdev->dev, 820 "Cannot create dma 8 bit mem device\n"); 821 err = -ENXIO; 822 goto err_dma_add_dev; 823 } 824 825 hw->dma_tx_ch = au1xxx_dbdma_chan_alloc(memid, 826 hw->dma_tx_id, NULL, (void *)hw); 827 if (hw->dma_tx_ch == 0) { 828 dev_err(&pdev->dev, 829 "Cannot allocate tx dma channel\n"); 830 err = -ENXIO; 831 goto err_no_txdma; 832 } 833 au1xxx_dbdma_set_devwidth(hw->dma_tx_ch, 8); 834 if (au1xxx_dbdma_ring_alloc(hw->dma_tx_ch, 835 AU1550_SPI_DBDMA_DESCRIPTORS) == 0) { 836 dev_err(&pdev->dev, 837 "Cannot allocate tx dma descriptors\n"); 838 err = -ENXIO; 839 goto err_no_txdma_descr; 840 } 841 842 843 hw->dma_rx_ch = au1xxx_dbdma_chan_alloc(hw->dma_rx_id, 844 memid, NULL, (void *)hw); 845 if (hw->dma_rx_ch == 0) { 846 dev_err(&pdev->dev, 847 "Cannot allocate rx dma channel\n"); 848 err = -ENXIO; 849 goto err_no_rxdma; 850 } 851 au1xxx_dbdma_set_devwidth(hw->dma_rx_ch, 8); 852 if (au1xxx_dbdma_ring_alloc(hw->dma_rx_ch, 853 AU1550_SPI_DBDMA_DESCRIPTORS) == 0) { 854 dev_err(&pdev->dev, 855 "Cannot allocate rx dma descriptors\n"); 856 err = -ENXIO; 857 goto err_no_rxdma_descr; 858 } 859 860 err = au1550_spi_dma_rxtmp_alloc(hw, 861 AU1550_SPI_DMA_RXTMP_MINSIZE); 862 if (err < 0) { 863 dev_err(&pdev->dev, 864 "Cannot allocate initial rx dma tmp buffer\n"); 865 goto err_dma_rxtmp_alloc; 866 } 867 } 868 869 au1550_spi_bits_handlers_set(hw, 8); 870 871 err = request_irq(hw->irq, au1550_spi_irq, 0, pdev->name, hw); 872 if (err) { 873 dev_err(&pdev->dev, "Cannot claim IRQ\n"); 874 goto err_no_irq; 875 } 876 877 master->bus_num = hw->pdata->bus_num; 878 master->num_chipselect = hw->pdata->num_chipselect; 879 880 /* 881 * precompute valid range for spi freq - from au1550 datasheet: 882 * psc_tempclk = psc_mainclk / (2 << DIV) 883 * spiclk = psc_tempclk / (2 * (BRG + 1)) 884 * BRG valid range is 4..63 885 * DIV valid range is 0..3 886 * round the min and max frequencies to values that would still 887 * produce valid brg and div 888 */ 889 { 890 int min_div = (2 << 0) * (2 * (4 + 1)); 891 int max_div = (2 << 3) * (2 * (63 + 1)); 892 hw->freq_max = hw->pdata->mainclk_hz / min_div; 893 hw->freq_min = hw->pdata->mainclk_hz / (max_div + 1) + 1; 894 } 895 896 au1550_spi_setup_psc_as_spi(hw); 897 898 err = spi_bitbang_start(&hw->bitbang); 899 if (err) { 900 dev_err(&pdev->dev, "Failed to register SPI master\n"); 901 goto err_register; 902 } 903 904 dev_info(&pdev->dev, 905 "spi master registered: bus_num=%d num_chipselect=%d\n", 906 master->bus_num, master->num_chipselect); 907 908 return 0; 909 910err_register: 911 free_irq(hw->irq, hw); 912 913err_no_irq: 914 au1550_spi_dma_rxtmp_free(hw); 915 916err_dma_rxtmp_alloc: 917err_no_rxdma_descr: 918 if (hw->usedma) 919 au1xxx_dbdma_chan_free(hw->dma_rx_ch); 920 921err_no_rxdma: 922err_no_txdma_descr: 923 if (hw->usedma) 924 au1xxx_dbdma_chan_free(hw->dma_tx_ch); 925 926err_no_txdma: 927err_dma_add_dev: 928 release_mem_region((unsigned long)hw->regs, sizeof(psc_spi_t)); 929 930err_no_iores: 931err_no_pdata: 932 spi_master_put(hw->master); 933 934err_nomem: 935 return err; 936} 937 938static int __exit au1550_spi_remove(struct platform_device *pdev) 939{ 940 struct au1550_spi *hw = platform_get_drvdata(pdev); 941 942 dev_info(&pdev->dev, "spi master remove: bus_num=%d\n", 943 hw->master->bus_num); 944 945 spi_bitbang_stop(&hw->bitbang); 946 free_irq(hw->irq, hw); 947 release_mem_region((unsigned long)hw->regs, sizeof(psc_spi_t)); 948 949 if (hw->usedma) { 950 au1550_spi_dma_rxtmp_free(hw); 951 au1xxx_dbdma_chan_free(hw->dma_rx_ch); 952 au1xxx_dbdma_chan_free(hw->dma_tx_ch); 953 } 954 955 platform_set_drvdata(pdev, NULL); 956 957 spi_master_put(hw->master); 958 return 0; 959} 960 961/* work with hotplug and coldplug */ 962MODULE_ALIAS("platform:au1550-spi"); 963 964static struct platform_driver au1550_spi_drv = { 965 .remove = __exit_p(au1550_spi_remove), 966 .driver = { 967 .name = "au1550-spi", 968 .owner = THIS_MODULE, 969 }, 970}; 971 972static int __init au1550_spi_init(void) 973{ 974 return platform_driver_probe(&au1550_spi_drv, au1550_spi_probe); 975} 976module_init(au1550_spi_init); 977 978static void __exit au1550_spi_exit(void) 979{ 980 platform_driver_unregister(&au1550_spi_drv); 981} 982module_exit(au1550_spi_exit); 983 984MODULE_DESCRIPTION("Au1550 PSC SPI Driver"); 985MODULE_AUTHOR("Jan Nikitenko <jan.nikitenko@gmail.com>"); 986MODULE_LICENSE("GPL");