Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
spi-au1550.c
Go to the documentation of this file.
1 /*
2  * au1550 psc spi controller driver
3  * may work also with au1200, au1210, au1250
4  * will not work on au1000, au1100 and au1500 (no full spi controller there)
5  *
6  * Copyright (c) 2006 ATRON electronic GmbH
7  * Author: Jan Nikitenko <[email protected]>
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation; either version 2 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the GNU General Public License
20  * along with this program; if not, write to the Free Software
21  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22  */
23 
24 #include <linux/init.h>
25 #include <linux/interrupt.h>
26 #include <linux/slab.h>
27 #include <linux/errno.h>
28 #include <linux/module.h>
29 #include <linux/device.h>
30 #include <linux/platform_device.h>
31 #include <linux/resource.h>
32 #include <linux/spi/spi.h>
33 #include <linux/spi/spi_bitbang.h>
34 #include <linux/dma-mapping.h>
35 #include <linux/completion.h>
36 #include <asm/mach-au1x00/au1000.h>
39 
41 
42 static unsigned usedma = 1;
43 module_param(usedma, uint, 0644);
44 
45 /*
46 #define AU1550_SPI_DEBUG_LOOPBACK
47 */
48 
49 
50 #define AU1550_SPI_DBDMA_DESCRIPTORS 1
51 #define AU1550_SPI_DMA_RXTMP_MINSIZE 2048U
52 
53 struct au1550_spi {
55 
56  volatile psc_spi_t __iomem *regs;
57  int irq;
58  unsigned freq_max;
59  unsigned freq_min;
60 
61  unsigned len;
62  unsigned tx_count;
63  unsigned rx_count;
64  const u8 *tx;
65  u8 *rx;
66 
67  void (*rx_word)(struct au1550_spi *hw);
68  void (*tx_word)(struct au1550_spi *hw);
69  int (*txrx_bufs)(struct spi_device *spi, struct spi_transfer *t);
71 
73 
74  unsigned usedma;
79 
83 
84  struct spi_master *master;
85  struct device *dev;
87  struct resource *ioarea;
88 };
89 
90 
91 /* we use an 8-bit memory device for dma transfers to/from spi fifo */
92 static dbdev_tab_t au1550_spi_mem_dbdev =
93 {
95  .dev_flags = DEV_FLAGS_ANYUSE|DEV_FLAGS_SYNC,
96  .dev_tsize = 0,
97  .dev_devwidth = 8,
98  .dev_physaddr = 0x00000000,
99  .dev_intlevel = 0,
100  .dev_intpolarity = 0
101 };
102 
103 static int ddma_memid; /* id to above mem dma device */
104 
105 static void au1550_spi_bits_handlers_set(struct au1550_spi *hw, int bpw);
106 
107 
108 /*
109  * compute BRG and DIV bits to setup spi clock based on main input clock rate
110  * that was specified in platform data structure
111  * according to au1550 datasheet:
112  * psc_tempclk = psc_mainclk / (2 << DIV)
113  * spiclk = psc_tempclk / (2 * (BRG + 1))
114  * BRG valid range is 4..63
115  * DIV valid range is 0..3
116  */
117 static u32 au1550_spi_baudcfg(struct au1550_spi *hw, unsigned speed_hz)
118 {
119  u32 mainclk_hz = hw->pdata->mainclk_hz;
120  u32 div, brg;
121 
122  for (div = 0; div < 4; div++) {
123  brg = mainclk_hz / speed_hz / (4 << div);
124  /* now we have BRG+1 in brg, so count with that */
125  if (brg < (4 + 1)) {
126  brg = (4 + 1); /* speed_hz too big */
127  break; /* set lowest brg (div is == 0) */
128  }
129  if (brg <= (63 + 1))
130  break; /* we have valid brg and div */
131  }
132  if (div == 4) {
133  div = 3; /* speed_hz too small */
134  brg = (63 + 1); /* set highest brg and div */
135  }
136  brg--;
137  return PSC_SPICFG_SET_BAUD(brg) | PSC_SPICFG_SET_DIV(div);
138 }
139 
140 static inline void au1550_spi_mask_ack_all(struct au1550_spi *hw)
141 {
142  hw->regs->psc_spimsk =
146  au_sync();
147 
148  hw->regs->psc_spievent =
152  au_sync();
153 }
154 
155 static void au1550_spi_reset_fifos(struct au1550_spi *hw)
156 {
157  u32 pcr;
158 
160  au_sync();
161  do {
162  pcr = hw->regs->psc_spipcr;
163  au_sync();
164  } while (pcr != 0);
165 }
166 
167 /*
168  * dma transfers are used for the most common spi word size of 8-bits
169  * we cannot easily change already set up dma channels' width, so if we wanted
170  * dma support for more than 8-bit words (up to 24 bits), we would need to
171  * setup dma channels from scratch on each spi transfer, based on bits_per_word
172  * instead we have pre set up 8 bit dma channels supporting spi 4 to 8 bits
173  * transfers, and 9 to 24 bits spi transfers will be done in pio irq based mode
174  * callbacks to handle dma or pio are set up in au1550_spi_bits_handlers_set()
175  */
176 static void au1550_spi_chipsel(struct spi_device *spi, int value)
177 {
178  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
179  unsigned cspol = spi->mode & SPI_CS_HIGH ? 1 : 0;
180  u32 cfg, stat;
181 
182  switch (value) {
183  case BITBANG_CS_INACTIVE:
184  if (hw->pdata->deactivate_cs)
185  hw->pdata->deactivate_cs(hw->pdata, spi->chip_select,
186  cspol);
187  break;
188 
189  case BITBANG_CS_ACTIVE:
190  au1550_spi_bits_handlers_set(hw, spi->bits_per_word);
191 
192  cfg = hw->regs->psc_spicfg;
193  au_sync();
194  hw->regs->psc_spicfg = cfg & ~PSC_SPICFG_DE_ENABLE;
195  au_sync();
196 
197  if (spi->mode & SPI_CPOL)
198  cfg |= PSC_SPICFG_BI;
199  else
200  cfg &= ~PSC_SPICFG_BI;
201  if (spi->mode & SPI_CPHA)
202  cfg &= ~PSC_SPICFG_CDE;
203  else
204  cfg |= PSC_SPICFG_CDE;
205 
206  if (spi->mode & SPI_LSB_FIRST)
207  cfg |= PSC_SPICFG_MLF;
208  else
209  cfg &= ~PSC_SPICFG_MLF;
210 
211  if (hw->usedma && spi->bits_per_word <= 8)
212  cfg &= ~PSC_SPICFG_DD_DISABLE;
213  else
214  cfg |= PSC_SPICFG_DD_DISABLE;
215  cfg = PSC_SPICFG_CLR_LEN(cfg);
216  cfg |= PSC_SPICFG_SET_LEN(spi->bits_per_word);
217 
218  cfg = PSC_SPICFG_CLR_BAUD(cfg);
219  cfg &= ~PSC_SPICFG_SET_DIV(3);
220  cfg |= au1550_spi_baudcfg(hw, spi->max_speed_hz);
221 
222  hw->regs->psc_spicfg = cfg | PSC_SPICFG_DE_ENABLE;
223  au_sync();
224  do {
225  stat = hw->regs->psc_spistat;
226  au_sync();
227  } while ((stat & PSC_SPISTAT_DR) == 0);
228 
229  if (hw->pdata->activate_cs)
230  hw->pdata->activate_cs(hw->pdata, spi->chip_select,
231  cspol);
232  break;
233  }
234 }
235 
236 static int au1550_spi_setupxfer(struct spi_device *spi, struct spi_transfer *t)
237 {
238  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
239  unsigned bpw, hz;
240  u32 cfg, stat;
241 
242  bpw = spi->bits_per_word;
243  hz = spi->max_speed_hz;
244  if (t) {
245  if (t->bits_per_word)
246  bpw = t->bits_per_word;
247  if (t->speed_hz)
248  hz = t->speed_hz;
249  }
250 
251  if (bpw < 4 || bpw > 24) {
252  dev_err(&spi->dev, "setupxfer: invalid bits_per_word=%d\n",
253  bpw);
254  return -EINVAL;
255  }
256  if (hz > spi->max_speed_hz || hz > hw->freq_max || hz < hw->freq_min) {
257  dev_err(&spi->dev, "setupxfer: clock rate=%d out of range\n",
258  hz);
259  return -EINVAL;
260  }
261 
262  au1550_spi_bits_handlers_set(hw, spi->bits_per_word);
263 
264  cfg = hw->regs->psc_spicfg;
265  au_sync();
266  hw->regs->psc_spicfg = cfg & ~PSC_SPICFG_DE_ENABLE;
267  au_sync();
268 
269  if (hw->usedma && bpw <= 8)
270  cfg &= ~PSC_SPICFG_DD_DISABLE;
271  else
272  cfg |= PSC_SPICFG_DD_DISABLE;
273  cfg = PSC_SPICFG_CLR_LEN(cfg);
274  cfg |= PSC_SPICFG_SET_LEN(bpw);
275 
276  cfg = PSC_SPICFG_CLR_BAUD(cfg);
277  cfg &= ~PSC_SPICFG_SET_DIV(3);
278  cfg |= au1550_spi_baudcfg(hw, hz);
279 
280  hw->regs->psc_spicfg = cfg;
281  au_sync();
282 
283  if (cfg & PSC_SPICFG_DE_ENABLE) {
284  do {
285  stat = hw->regs->psc_spistat;
286  au_sync();
287  } while ((stat & PSC_SPISTAT_DR) == 0);
288  }
289 
290  au1550_spi_reset_fifos(hw);
291  au1550_spi_mask_ack_all(hw);
292  return 0;
293 }
294 
295 static int au1550_spi_setup(struct spi_device *spi)
296 {
297  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
298 
299  if (spi->bits_per_word < 4 || spi->bits_per_word > 24) {
300  dev_err(&spi->dev, "setup: invalid bits_per_word=%d\n",
301  spi->bits_per_word);
302  return -EINVAL;
303  }
304 
305  if (spi->max_speed_hz == 0)
306  spi->max_speed_hz = hw->freq_max;
307  if (spi->max_speed_hz > hw->freq_max
308  || spi->max_speed_hz < hw->freq_min)
309  return -EINVAL;
310  /*
311  * NOTE: cannot change speed and other hw settings immediately,
312  * otherwise sharing of spi bus is not possible,
313  * so do not call setupxfer(spi, NULL) here
314  */
315  return 0;
316 }
317 
318 /*
319  * for dma spi transfers, we have to setup rx channel, otherwise there is
320  * no reliable way how to recognize that spi transfer is done
321  * dma complete callbacks are called before real spi transfer is finished
322  * and if only tx dma channel is set up (and rx fifo overflow event masked)
323  * spi master done event irq is not generated unless rx fifo is empty (emptied)
324  * so we need rx tmp buffer to use for rx dma if user does not provide one
325  */
326 static int au1550_spi_dma_rxtmp_alloc(struct au1550_spi *hw, unsigned size)
327 {
328  hw->dma_rx_tmpbuf = kmalloc(size, GFP_KERNEL);
329  if (!hw->dma_rx_tmpbuf)
330  return -ENOMEM;
331  hw->dma_rx_tmpbuf_size = size;
333  size, DMA_FROM_DEVICE);
334  if (dma_mapping_error(hw->dev, hw->dma_rx_tmpbuf_addr)) {
335  kfree(hw->dma_rx_tmpbuf);
336  hw->dma_rx_tmpbuf = 0;
337  hw->dma_rx_tmpbuf_size = 0;
338  return -EFAULT;
339  }
340  return 0;
341 }
342 
343 static void au1550_spi_dma_rxtmp_free(struct au1550_spi *hw)
344 {
347  kfree(hw->dma_rx_tmpbuf);
348  hw->dma_rx_tmpbuf = 0;
349  hw->dma_rx_tmpbuf_size = 0;
350 }
351 
352 static int au1550_spi_dma_txrxb(struct spi_device *spi, struct spi_transfer *t)
353 {
354  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
355  dma_addr_t dma_tx_addr;
356  dma_addr_t dma_rx_addr;
357  u32 res;
358 
359  hw->len = t->len;
360  hw->tx_count = 0;
361  hw->rx_count = 0;
362 
363  hw->tx = t->tx_buf;
364  hw->rx = t->rx_buf;
365  dma_tx_addr = t->tx_dma;
366  dma_rx_addr = t->rx_dma;
367 
368  /*
369  * check if buffers are already dma mapped, map them otherwise:
370  * - first map the TX buffer, so cache data gets written to memory
371  * - then map the RX buffer, so that cache entries (with
372  * soon-to-be-stale data) get removed
373  * use rx buffer in place of tx if tx buffer was not provided
374  * use temp rx buffer (preallocated or realloc to fit) for rx dma
375  */
376  if (t->tx_buf) {
377  if (t->tx_dma == 0) { /* if DMA_ADDR_INVALID, map it */
378  dma_tx_addr = dma_map_single(hw->dev,
379  (void *)t->tx_buf,
380  t->len, DMA_TO_DEVICE);
381  if (dma_mapping_error(hw->dev, dma_tx_addr))
382  dev_err(hw->dev, "tx dma map error\n");
383  }
384  }
385 
386  if (t->rx_buf) {
387  if (t->rx_dma == 0) { /* if DMA_ADDR_INVALID, map it */
388  dma_rx_addr = dma_map_single(hw->dev,
389  (void *)t->rx_buf,
390  t->len, DMA_FROM_DEVICE);
391  if (dma_mapping_error(hw->dev, dma_rx_addr))
392  dev_err(hw->dev, "rx dma map error\n");
393  }
394  } else {
395  if (t->len > hw->dma_rx_tmpbuf_size) {
396  int ret;
397 
398  au1550_spi_dma_rxtmp_free(hw);
399  ret = au1550_spi_dma_rxtmp_alloc(hw, max(t->len,
401  if (ret < 0)
402  return ret;
403  }
404  hw->rx = hw->dma_rx_tmpbuf;
405  dma_rx_addr = hw->dma_rx_tmpbuf_addr;
406  dma_sync_single_for_device(hw->dev, dma_rx_addr,
407  t->len, DMA_FROM_DEVICE);
408  }
409 
410  if (!t->tx_buf) {
411  dma_sync_single_for_device(hw->dev, dma_rx_addr,
412  t->len, DMA_BIDIRECTIONAL);
413  hw->tx = hw->rx;
414  }
415 
416  /* put buffers on the ring */
418  t->len, DDMA_FLAGS_IE);
419  if (!res)
420  dev_err(hw->dev, "rx dma put dest error\n");
421 
423  t->len, DDMA_FLAGS_IE);
424  if (!res)
425  dev_err(hw->dev, "tx dma put source error\n");
426 
429 
430  /* by default enable nearly all events interrupt */
432  au_sync();
433 
434  /* start the transfer */
436  au_sync();
437 
439 
442 
443  if (!t->rx_buf) {
444  /* using the temporal preallocated and premapped buffer */
445  dma_sync_single_for_cpu(hw->dev, dma_rx_addr, t->len,
447  }
448  /* unmap buffers if mapped above */
449  if (t->rx_buf && t->rx_dma == 0 )
450  dma_unmap_single(hw->dev, dma_rx_addr, t->len,
452  if (t->tx_buf && t->tx_dma == 0 )
453  dma_unmap_single(hw->dev, dma_tx_addr, t->len,
454  DMA_TO_DEVICE);
455 
456  return hw->rx_count < hw->tx_count ? hw->rx_count : hw->tx_count;
457 }
458 
459 static irqreturn_t au1550_spi_dma_irq_callback(struct au1550_spi *hw)
460 {
461  u32 stat, evnt;
462 
463  stat = hw->regs->psc_spistat;
464  evnt = hw->regs->psc_spievent;
465  au_sync();
466  if ((stat & PSC_SPISTAT_DI) == 0) {
467  dev_err(hw->dev, "Unexpected IRQ!\n");
468  return IRQ_NONE;
469  }
470 
471  if ((evnt & (PSC_SPIEVNT_MM | PSC_SPIEVNT_RO
474  != 0) {
475  /*
476  * due to an spi error we consider transfer as done,
477  * so mask all events until before next transfer start
478  * and stop the possibly running dma immediately
479  */
480  au1550_spi_mask_ack_all(hw);
483 
484  /* get number of transferred bytes */
485  hw->rx_count = hw->len - au1xxx_get_dma_residue(hw->dma_rx_ch);
486  hw->tx_count = hw->len - au1xxx_get_dma_residue(hw->dma_tx_ch);
487 
490  au1550_spi_reset_fifos(hw);
491 
492  if (evnt == PSC_SPIEVNT_RO)
493  dev_err(hw->dev,
494  "dma transfer: receive FIFO overflow!\n");
495  else
496  dev_err(hw->dev,
497  "dma transfer: unexpected SPI error "
498  "(event=0x%x stat=0x%x)!\n", evnt, stat);
499 
500  complete(&hw->master_done);
501  return IRQ_HANDLED;
502  }
503 
504  if ((evnt & PSC_SPIEVNT_MD) != 0) {
505  /* transfer completed successfully */
506  au1550_spi_mask_ack_all(hw);
507  hw->rx_count = hw->len;
508  hw->tx_count = hw->len;
509  complete(&hw->master_done);
510  }
511  return IRQ_HANDLED;
512 }
513 
514 
515 /* routines to handle different word sizes in pio mode */
516 #define AU1550_SPI_RX_WORD(size, mask) \
517 static void au1550_spi_rx_word_##size(struct au1550_spi *hw) \
518 { \
519  u32 fifoword = hw->regs->psc_spitxrx & (u32)(mask); \
520  au_sync(); \
521  if (hw->rx) { \
522  *(u##size *)hw->rx = (u##size)fifoword; \
523  hw->rx += (size) / 8; \
524  } \
525  hw->rx_count += (size) / 8; \
526 }
527 
528 #define AU1550_SPI_TX_WORD(size, mask) \
529 static void au1550_spi_tx_word_##size(struct au1550_spi *hw) \
530 { \
531  u32 fifoword = 0; \
532  if (hw->tx) { \
533  fifoword = *(u##size *)hw->tx & (u32)(mask); \
534  hw->tx += (size) / 8; \
535  } \
536  hw->tx_count += (size) / 8; \
537  if (hw->tx_count >= hw->len) \
538  fifoword |= PSC_SPITXRX_LC; \
539  hw->regs->psc_spitxrx = fifoword; \
540  au_sync(); \
541 }
542 
543 AU1550_SPI_RX_WORD(8,0xff)
544 AU1550_SPI_RX_WORD(16,0xffff)
545 AU1550_SPI_RX_WORD(32,0xffffff)
546 AU1550_SPI_TX_WORD(8,0xff)
547 AU1550_SPI_TX_WORD(16,0xffff)
548 AU1550_SPI_TX_WORD(32,0xffffff)
549 
550 static int au1550_spi_pio_txrxb(struct spi_device *spi, struct spi_transfer *t)
551 {
552  u32 stat, mask;
553  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
554 
555  hw->tx = t->tx_buf;
556  hw->rx = t->rx_buf;
557  hw->len = t->len;
558  hw->tx_count = 0;
559  hw->rx_count = 0;
560 
561  /* by default enable nearly all events after filling tx fifo */
562  mask = PSC_SPIMSK_SD;
563 
564  /* fill the transmit FIFO */
565  while (hw->tx_count < hw->len) {
566 
567  hw->tx_word(hw);
568 
569  if (hw->tx_count >= hw->len) {
570  /* mask tx fifo request interrupt as we are done */
571  mask |= PSC_SPIMSK_TR;
572  }
573 
574  stat = hw->regs->psc_spistat;
575  au_sync();
576  if (stat & PSC_SPISTAT_TF)
577  break;
578  }
579 
580  /* enable event interrupts */
581  hw->regs->psc_spimsk = mask;
582  au_sync();
583 
584  /* start the transfer */
586  au_sync();
587 
589 
590  return hw->rx_count < hw->tx_count ? hw->rx_count : hw->tx_count;
591 }
592 
593 static irqreturn_t au1550_spi_pio_irq_callback(struct au1550_spi *hw)
594 {
595  int busy;
596  u32 stat, evnt;
597 
598  stat = hw->regs->psc_spistat;
599  evnt = hw->regs->psc_spievent;
600  au_sync();
601  if ((stat & PSC_SPISTAT_DI) == 0) {
602  dev_err(hw->dev, "Unexpected IRQ!\n");
603  return IRQ_NONE;
604  }
605 
606  if ((evnt & (PSC_SPIEVNT_MM | PSC_SPIEVNT_RO
608  | PSC_SPIEVNT_SD))
609  != 0) {
610  /*
611  * due to an error we consider transfer as done,
612  * so mask all events until before next transfer start
613  */
614  au1550_spi_mask_ack_all(hw);
615  au1550_spi_reset_fifos(hw);
616  dev_err(hw->dev,
617  "pio transfer: unexpected SPI error "
618  "(event=0x%x stat=0x%x)!\n", evnt, stat);
619  complete(&hw->master_done);
620  return IRQ_HANDLED;
621  }
622 
623  /*
624  * while there is something to read from rx fifo
625  * or there is a space to write to tx fifo:
626  */
627  do {
628  busy = 0;
629  stat = hw->regs->psc_spistat;
630  au_sync();
631 
632  /*
633  * Take care to not let the Rx FIFO overflow.
634  *
635  * We only write a byte if we have read one at least. Initially,
636  * the write fifo is full, so we should read from the read fifo
637  * first.
638  * In case we miss a word from the read fifo, we should get a
639  * RO event and should back out.
640  */
641  if (!(stat & PSC_SPISTAT_RE) && hw->rx_count < hw->len) {
642  hw->rx_word(hw);
643  busy = 1;
644 
645  if (!(stat & PSC_SPISTAT_TF) && hw->tx_count < hw->len)
646  hw->tx_word(hw);
647  }
648  } while (busy);
649 
651  au_sync();
652 
653  /*
654  * Restart the SPI transmission in case of a transmit underflow.
655  * This seems to work despite the notes in the Au1550 data book
656  * of Figure 8-4 with flowchart for SPI master operation:
657  *
658  * """Note 1: An XFR Error Interrupt occurs, unless masked,
659  * for any of the following events: Tx FIFO Underflow,
660  * Rx FIFO Overflow, or Multiple-master Error
661  * Note 2: In case of a Tx Underflow Error, all zeroes are
662  * transmitted."""
663  *
664  * By simply restarting the spi transfer on Tx Underflow Error,
665  * we assume that spi transfer was paused instead of zeroes
666  * transmittion mentioned in the Note 2 of Au1550 data book.
667  */
668  if (evnt & PSC_SPIEVNT_TU) {
669  hw->regs->psc_spievent = PSC_SPIEVNT_TU | PSC_SPIEVNT_MD;
670  au_sync();
672  au_sync();
673  }
674 
675  if (hw->rx_count >= hw->len) {
676  /* transfer completed successfully */
677  au1550_spi_mask_ack_all(hw);
678  complete(&hw->master_done);
679  }
680  return IRQ_HANDLED;
681 }
682 
683 static int au1550_spi_txrx_bufs(struct spi_device *spi, struct spi_transfer *t)
684 {
685  struct au1550_spi *hw = spi_master_get_devdata(spi->master);
686  return hw->txrx_bufs(spi, t);
687 }
688 
689 static irqreturn_t au1550_spi_irq(int irq, void *dev)
690 {
691  struct au1550_spi *hw = dev;
692  return hw->irq_callback(hw);
693 }
694 
695 static void au1550_spi_bits_handlers_set(struct au1550_spi *hw, int bpw)
696 {
697  if (bpw <= 8) {
698  if (hw->usedma) {
699  hw->txrx_bufs = &au1550_spi_dma_txrxb;
700  hw->irq_callback = &au1550_spi_dma_irq_callback;
701  } else {
702  hw->rx_word = &au1550_spi_rx_word_8;
703  hw->tx_word = &au1550_spi_tx_word_8;
704  hw->txrx_bufs = &au1550_spi_pio_txrxb;
705  hw->irq_callback = &au1550_spi_pio_irq_callback;
706  }
707  } else if (bpw <= 16) {
708  hw->rx_word = &au1550_spi_rx_word_16;
709  hw->tx_word = &au1550_spi_tx_word_16;
710  hw->txrx_bufs = &au1550_spi_pio_txrxb;
711  hw->irq_callback = &au1550_spi_pio_irq_callback;
712  } else {
713  hw->rx_word = &au1550_spi_rx_word_32;
714  hw->tx_word = &au1550_spi_tx_word_32;
715  hw->txrx_bufs = &au1550_spi_pio_txrxb;
716  hw->irq_callback = &au1550_spi_pio_irq_callback;
717  }
718 }
719 
720 static void __init au1550_spi_setup_psc_as_spi(struct au1550_spi *hw)
721 {
722  u32 stat, cfg;
723 
724  /* set up the PSC for SPI mode */
726  au_sync();
728  au_sync();
729 
730  hw->regs->psc_spicfg = 0;
731  au_sync();
732 
734  au_sync();
735 
736  do {
737  stat = hw->regs->psc_spistat;
738  au_sync();
739  } while ((stat & PSC_SPISTAT_SR) == 0);
740 
741 
742  cfg = hw->usedma ? 0 : PSC_SPICFG_DD_DISABLE;
743  cfg |= PSC_SPICFG_SET_LEN(8);
745  /* use minimal allowed brg and div values as initial setting: */
747 
748 #ifdef AU1550_SPI_DEBUG_LOOPBACK
749  cfg |= PSC_SPICFG_LB;
750 #endif
751 
752  hw->regs->psc_spicfg = cfg;
753  au_sync();
754 
755  au1550_spi_mask_ack_all(hw);
756 
758  au_sync();
759 
760  do {
761  stat = hw->regs->psc_spistat;
762  au_sync();
763  } while ((stat & PSC_SPISTAT_DR) == 0);
764 
765  au1550_spi_reset_fifos(hw);
766 }
767 
768 
769 static int __init au1550_spi_probe(struct platform_device *pdev)
770 {
771  struct au1550_spi *hw;
772  struct spi_master *master;
773  struct resource *r;
774  int err = 0;
775 
776  master = spi_alloc_master(&pdev->dev, sizeof(struct au1550_spi));
777  if (master == NULL) {
778  dev_err(&pdev->dev, "No memory for spi_master\n");
779  err = -ENOMEM;
780  goto err_nomem;
781  }
782 
783  /* the spi->mode bits understood by this driver: */
785 
786  hw = spi_master_get_devdata(master);
787 
788  hw->master = spi_master_get(master);
789  hw->pdata = pdev->dev.platform_data;
790  hw->dev = &pdev->dev;
791 
792  if (hw->pdata == NULL) {
793  dev_err(&pdev->dev, "No platform data supplied\n");
794  err = -ENOENT;
795  goto err_no_pdata;
796  }
797 
799  if (!r) {
800  dev_err(&pdev->dev, "no IRQ\n");
801  err = -ENODEV;
802  goto err_no_iores;
803  }
804  hw->irq = r->start;
805 
806  hw->usedma = 0;
808  if (r) {
809  hw->dma_tx_id = r->start;
811  if (r) {
812  hw->dma_rx_id = r->start;
813  if (usedma && ddma_memid) {
814  if (pdev->dev.dma_mask == NULL)
815  dev_warn(&pdev->dev, "no dma mask\n");
816  else
817  hw->usedma = 1;
818  }
819  }
820  }
821 
823  if (!r) {
824  dev_err(&pdev->dev, "no mmio resource\n");
825  err = -ENODEV;
826  goto err_no_iores;
827  }
828 
829  hw->ioarea = request_mem_region(r->start, sizeof(psc_spi_t),
830  pdev->name);
831  if (!hw->ioarea) {
832  dev_err(&pdev->dev, "Cannot reserve iomem region\n");
833  err = -ENXIO;
834  goto err_no_iores;
835  }
836 
837  hw->regs = (psc_spi_t __iomem *)ioremap(r->start, sizeof(psc_spi_t));
838  if (!hw->regs) {
839  dev_err(&pdev->dev, "cannot ioremap\n");
840  err = -ENXIO;
841  goto err_ioremap;
842  }
843 
844  platform_set_drvdata(pdev, hw);
845 
846  init_completion(&hw->master_done);
847 
848  hw->bitbang.master = hw->master;
849  hw->bitbang.setup_transfer = au1550_spi_setupxfer;
850  hw->bitbang.chipselect = au1550_spi_chipsel;
851  hw->bitbang.master->setup = au1550_spi_setup;
852  hw->bitbang.txrx_bufs = au1550_spi_txrx_bufs;
853 
854  if (hw->usedma) {
855  hw->dma_tx_ch = au1xxx_dbdma_chan_alloc(ddma_memid,
856  hw->dma_tx_id, NULL, (void *)hw);
857  if (hw->dma_tx_ch == 0) {
858  dev_err(&pdev->dev,
859  "Cannot allocate tx dma channel\n");
860  err = -ENXIO;
861  goto err_no_txdma;
862  }
866  dev_err(&pdev->dev,
867  "Cannot allocate tx dma descriptors\n");
868  err = -ENXIO;
869  goto err_no_txdma_descr;
870  }
871 
872 
874  ddma_memid, NULL, (void *)hw);
875  if (hw->dma_rx_ch == 0) {
876  dev_err(&pdev->dev,
877  "Cannot allocate rx dma channel\n");
878  err = -ENXIO;
879  goto err_no_rxdma;
880  }
884  dev_err(&pdev->dev,
885  "Cannot allocate rx dma descriptors\n");
886  err = -ENXIO;
887  goto err_no_rxdma_descr;
888  }
889 
890  err = au1550_spi_dma_rxtmp_alloc(hw,
892  if (err < 0) {
893  dev_err(&pdev->dev,
894  "Cannot allocate initial rx dma tmp buffer\n");
895  goto err_dma_rxtmp_alloc;
896  }
897  }
898 
899  au1550_spi_bits_handlers_set(hw, 8);
900 
901  err = request_irq(hw->irq, au1550_spi_irq, 0, pdev->name, hw);
902  if (err) {
903  dev_err(&pdev->dev, "Cannot claim IRQ\n");
904  goto err_no_irq;
905  }
906 
907  master->bus_num = pdev->id;
908  master->num_chipselect = hw->pdata->num_chipselect;
909 
910  /*
911  * precompute valid range for spi freq - from au1550 datasheet:
912  * psc_tempclk = psc_mainclk / (2 << DIV)
913  * spiclk = psc_tempclk / (2 * (BRG + 1))
914  * BRG valid range is 4..63
915  * DIV valid range is 0..3
916  * round the min and max frequencies to values that would still
917  * produce valid brg and div
918  */
919  {
920  int min_div = (2 << 0) * (2 * (4 + 1));
921  int max_div = (2 << 3) * (2 * (63 + 1));
922  hw->freq_max = hw->pdata->mainclk_hz / min_div;
923  hw->freq_min = hw->pdata->mainclk_hz / (max_div + 1) + 1;
924  }
925 
926  au1550_spi_setup_psc_as_spi(hw);
927 
928  err = spi_bitbang_start(&hw->bitbang);
929  if (err) {
930  dev_err(&pdev->dev, "Failed to register SPI master\n");
931  goto err_register;
932  }
933 
934  dev_info(&pdev->dev,
935  "spi master registered: bus_num=%d num_chipselect=%d\n",
936  master->bus_num, master->num_chipselect);
937 
938  return 0;
939 
940 err_register:
941  free_irq(hw->irq, hw);
942 
943 err_no_irq:
944  au1550_spi_dma_rxtmp_free(hw);
945 
946 err_dma_rxtmp_alloc:
947 err_no_rxdma_descr:
948  if (hw->usedma)
950 
951 err_no_rxdma:
952 err_no_txdma_descr:
953  if (hw->usedma)
955 
956 err_no_txdma:
957  iounmap((void __iomem *)hw->regs);
958 
959 err_ioremap:
961  kfree(hw->ioarea);
962 
963 err_no_iores:
964 err_no_pdata:
965  spi_master_put(hw->master);
966 
967 err_nomem:
968  return err;
969 }
970 
971 static int __exit au1550_spi_remove(struct platform_device *pdev)
972 {
973  struct au1550_spi *hw = platform_get_drvdata(pdev);
974 
975  dev_info(&pdev->dev, "spi master remove: bus_num=%d\n",
976  hw->master->bus_num);
977 
979  free_irq(hw->irq, hw);
980  iounmap((void __iomem *)hw->regs);
982  kfree(hw->ioarea);
983 
984  if (hw->usedma) {
985  au1550_spi_dma_rxtmp_free(hw);
988  }
989 
990  platform_set_drvdata(pdev, NULL);
991 
992  spi_master_put(hw->master);
993  return 0;
994 }
995 
996 /* work with hotplug and coldplug */
997 MODULE_ALIAS("platform:au1550-spi");
998 
999 static struct platform_driver au1550_spi_drv = {
1000  .remove = __exit_p(au1550_spi_remove),
1001  .driver = {
1002  .name = "au1550-spi",
1003  .owner = THIS_MODULE,
1004  },
1005 };
1006 
1007 static int __init au1550_spi_init(void)
1008 {
1009  /*
1010  * create memory device with 8 bits dev_devwidth
1011  * needed for proper byte ordering to spi fifo
1012  */
1013  if (usedma) {
1014  ddma_memid = au1xxx_ddma_add_device(&au1550_spi_mem_dbdev);
1015  if (!ddma_memid)
1016  printk(KERN_ERR "au1550-spi: cannot add memory"
1017  "dbdma device\n");
1018  }
1019  return platform_driver_probe(&au1550_spi_drv, au1550_spi_probe);
1020 }
1021 module_init(au1550_spi_init);
1022 
1023 static void __exit au1550_spi_exit(void)
1024 {
1025  if (usedma && ddma_memid)
1026  au1xxx_ddma_del_device(ddma_memid);
1027  platform_driver_unregister(&au1550_spi_drv);
1028 }
1029 module_exit(au1550_spi_exit);
1030 
1031 MODULE_DESCRIPTION("Au1550 PSC SPI Driver");
1032 MODULE_AUTHOR("Jan Nikitenko <[email protected]>");
1033 MODULE_LICENSE("GPL");