2 * Copyright (C) 2009 Samsung Electronics Ltd.
3 * Jaswinder Singh <jassi.brar@samsung.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
20 #include <linux/init.h>
21 #include <linux/module.h>
22 #include <linux/workqueue.h>
23 #include <linux/interrupt.h>
24 #include <linux/delay.h>
25 #include <linux/clk.h>
26 #include <linux/dma-mapping.h>
27 #include <linux/dmaengine.h>
28 #include <linux/platform_device.h>
29 #include <linux/pm_runtime.h>
30 #include <linux/spi/spi.h>
31 #include <linux/gpio.h>
33 #include <linux/of_gpio.h>
35 #include <linux/platform_data/spi-s3c64xx.h>
41 #define MAX_SPI_PORTS 3
42 #define S3C64XX_SPI_QUIRK_POLL (1 << 0)
44 /* Registers and bit-fields */
46 #define S3C64XX_SPI_CH_CFG 0x00
47 #define S3C64XX_SPI_CLK_CFG 0x04
48 #define S3C64XX_SPI_MODE_CFG 0x08
49 #define S3C64XX_SPI_SLAVE_SEL 0x0C
50 #define S3C64XX_SPI_INT_EN 0x10
51 #define S3C64XX_SPI_STATUS 0x14
52 #define S3C64XX_SPI_TX_DATA 0x18
53 #define S3C64XX_SPI_RX_DATA 0x1C
54 #define S3C64XX_SPI_PACKET_CNT 0x20
55 #define S3C64XX_SPI_PENDING_CLR 0x24
56 #define S3C64XX_SPI_SWAP_CFG 0x28
57 #define S3C64XX_SPI_FB_CLK 0x2C
59 #define S3C64XX_SPI_CH_HS_EN (1<<6) /* High Speed Enable */
60 #define S3C64XX_SPI_CH_SW_RST (1<<5)
61 #define S3C64XX_SPI_CH_SLAVE (1<<4)
62 #define S3C64XX_SPI_CPOL_L (1<<3)
63 #define S3C64XX_SPI_CPHA_B (1<<2)
64 #define S3C64XX_SPI_CH_RXCH_ON (1<<1)
65 #define S3C64XX_SPI_CH_TXCH_ON (1<<0)
67 #define S3C64XX_SPI_CLKSEL_SRCMSK (3<<9)
68 #define S3C64XX_SPI_CLKSEL_SRCSHFT 9
69 #define S3C64XX_SPI_ENCLK_ENABLE (1<<8)
70 #define S3C64XX_SPI_PSR_MASK 0xff
72 #define S3C64XX_SPI_MODE_CH_TSZ_BYTE (0<<29)
73 #define S3C64XX_SPI_MODE_CH_TSZ_HALFWORD (1<<29)
74 #define S3C64XX_SPI_MODE_CH_TSZ_WORD (2<<29)
75 #define S3C64XX_SPI_MODE_CH_TSZ_MASK (3<<29)
76 #define S3C64XX_SPI_MODE_BUS_TSZ_BYTE (0<<17)
77 #define S3C64XX_SPI_MODE_BUS_TSZ_HALFWORD (1<<17)
78 #define S3C64XX_SPI_MODE_BUS_TSZ_WORD (2<<17)
79 #define S3C64XX_SPI_MODE_BUS_TSZ_MASK (3<<17)
80 #define S3C64XX_SPI_MODE_RXDMA_ON (1<<2)
81 #define S3C64XX_SPI_MODE_TXDMA_ON (1<<1)
82 #define S3C64XX_SPI_MODE_4BURST (1<<0)
84 #define S3C64XX_SPI_SLAVE_AUTO (1<<1)
85 #define S3C64XX_SPI_SLAVE_SIG_INACT (1<<0)
87 #define S3C64XX_SPI_INT_TRAILING_EN (1<<6)
88 #define S3C64XX_SPI_INT_RX_OVERRUN_EN (1<<5)
89 #define S3C64XX_SPI_INT_RX_UNDERRUN_EN (1<<4)
90 #define S3C64XX_SPI_INT_TX_OVERRUN_EN (1<<3)
91 #define S3C64XX_SPI_INT_TX_UNDERRUN_EN (1<<2)
92 #define S3C64XX_SPI_INT_RX_FIFORDY_EN (1<<1)
93 #define S3C64XX_SPI_INT_TX_FIFORDY_EN (1<<0)
95 #define S3C64XX_SPI_ST_RX_OVERRUN_ERR (1<<5)
96 #define S3C64XX_SPI_ST_RX_UNDERRUN_ERR (1<<4)
97 #define S3C64XX_SPI_ST_TX_OVERRUN_ERR (1<<3)
98 #define S3C64XX_SPI_ST_TX_UNDERRUN_ERR (1<<2)
99 #define S3C64XX_SPI_ST_RX_FIFORDY (1<<1)
100 #define S3C64XX_SPI_ST_TX_FIFORDY (1<<0)
102 #define S3C64XX_SPI_PACKET_CNT_EN (1<<16)
104 #define S3C64XX_SPI_PND_TX_UNDERRUN_CLR (1<<4)
105 #define S3C64XX_SPI_PND_TX_OVERRUN_CLR (1<<3)
106 #define S3C64XX_SPI_PND_RX_UNDERRUN_CLR (1<<2)
107 #define S3C64XX_SPI_PND_RX_OVERRUN_CLR (1<<1)
108 #define S3C64XX_SPI_PND_TRAILING_CLR (1<<0)
110 #define S3C64XX_SPI_SWAP_RX_HALF_WORD (1<<7)
111 #define S3C64XX_SPI_SWAP_RX_BYTE (1<<6)
112 #define S3C64XX_SPI_SWAP_RX_BIT (1<<5)
113 #define S3C64XX_SPI_SWAP_RX_EN (1<<4)
114 #define S3C64XX_SPI_SWAP_TX_HALF_WORD (1<<3)
115 #define S3C64XX_SPI_SWAP_TX_BYTE (1<<2)
116 #define S3C64XX_SPI_SWAP_TX_BIT (1<<1)
117 #define S3C64XX_SPI_SWAP_TX_EN (1<<0)
119 #define S3C64XX_SPI_FBCLK_MSK (3<<0)
121 #define FIFO_LVL_MASK(i) ((i)->port_conf->fifo_lvl_mask[i->port_id])
122 #define S3C64XX_SPI_ST_TX_DONE(v, i) (((v) & \
123 (1 << (i)->port_conf->tx_st_done)) ? 1 : 0)
124 #define TX_FIFO_LVL(v, i) (((v) >> 6) & FIFO_LVL_MASK(i))
125 #define RX_FIFO_LVL(v, i) (((v) >> (i)->port_conf->rx_lvl_offset) & \
128 #define S3C64XX_SPI_MAX_TRAILCNT 0x3ff
129 #define S3C64XX_SPI_TRAILCNT_OFF 19
131 #define S3C64XX_SPI_TRAILCNT S3C64XX_SPI_MAX_TRAILCNT
133 #define msecs_to_loops(t) (loops_per_jiffy / 1000 * HZ * t)
134 #define is_polling(x) (x->port_conf->quirks & S3C64XX_SPI_QUIRK_POLL)
136 #define RXBUSY (1<<2)
137 #define TXBUSY (1<<3)
139 struct s3c64xx_spi_dma_data
{
141 enum dma_transfer_direction direction
;
146 * struct s3c64xx_spi_info - SPI Controller hardware info
147 * @fifo_lvl_mask: Bit-mask for {TX|RX}_FIFO_LVL bits in SPI_STATUS register.
148 * @rx_lvl_offset: Bit offset of RX_FIFO_LVL bits in SPI_STATUS regiter.
149 * @tx_st_done: Bit offset of TX_DONE bit in SPI_STATUS regiter.
150 * @high_speed: True, if the controller supports HIGH_SPEED_EN bit.
151 * @clk_from_cmu: True, if the controller does not include a clock mux and
154 * The Samsung s3c64xx SPI controller are used on various Samsung SoC's but
155 * differ in some aspects such as the size of the fifo and spi bus clock
156 * setup. Such differences are specified to the driver using this structure
157 * which is provided as driver data to the driver.
159 struct s3c64xx_spi_port_config
{
160 int fifo_lvl_mask
[MAX_SPI_PORTS
];
169 * struct s3c64xx_spi_driver_data - Runtime info holder for SPI driver.
170 * @clk: Pointer to the spi clock.
171 * @src_clk: Pointer to the clock used to generate SPI signals.
172 * @master: Pointer to the SPI Protocol master.
173 * @cntrlr_info: Platform specific data for the controller this driver manages.
174 * @tgl_spi: Pointer to the last CS left untoggled by the cs_change hint.
175 * @lock: Controller specific lock.
176 * @state: Set of FLAGS to indicate status.
177 * @rx_dmach: Controller's DMA channel for Rx.
178 * @tx_dmach: Controller's DMA channel for Tx.
179 * @sfr_start: BUS address of SPI controller regs.
180 * @regs: Pointer to ioremap'ed controller registers.
182 * @xfer_completion: To indicate completion of xfer task.
183 * @cur_mode: Stores the active configuration of the controller.
184 * @cur_bpw: Stores the active bits per word settings.
185 * @cur_speed: Stores the active xfer clock speed.
187 struct s3c64xx_spi_driver_data
{
191 struct platform_device
*pdev
;
192 struct spi_master
*master
;
193 struct s3c64xx_spi_info
*cntrlr_info
;
194 struct spi_device
*tgl_spi
;
196 unsigned long sfr_start
;
197 struct completion xfer_completion
;
199 unsigned cur_mode
, cur_bpw
;
201 struct s3c64xx_spi_dma_data rx_dma
;
202 struct s3c64xx_spi_dma_data tx_dma
;
203 #ifdef CONFIG_S3C_DMA
204 struct samsung_dma_ops
*ops
;
206 struct s3c64xx_spi_port_config
*port_conf
;
207 unsigned int port_id
;
211 static void flush_fifo(struct s3c64xx_spi_driver_data
*sdd
)
213 void __iomem
*regs
= sdd
->regs
;
217 writel(0, regs
+ S3C64XX_SPI_PACKET_CNT
);
219 val
= readl(regs
+ S3C64XX_SPI_CH_CFG
);
220 val
&= ~(S3C64XX_SPI_CH_RXCH_ON
| S3C64XX_SPI_CH_TXCH_ON
);
221 writel(val
, regs
+ S3C64XX_SPI_CH_CFG
);
223 val
= readl(regs
+ S3C64XX_SPI_CH_CFG
);
224 val
|= S3C64XX_SPI_CH_SW_RST
;
225 val
&= ~S3C64XX_SPI_CH_HS_EN
;
226 writel(val
, regs
+ S3C64XX_SPI_CH_CFG
);
229 loops
= msecs_to_loops(1);
231 val
= readl(regs
+ S3C64XX_SPI_STATUS
);
232 } while (TX_FIFO_LVL(val
, sdd
) && loops
--);
235 dev_warn(&sdd
->pdev
->dev
, "Timed out flushing TX FIFO\n");
238 loops
= msecs_to_loops(1);
240 val
= readl(regs
+ S3C64XX_SPI_STATUS
);
241 if (RX_FIFO_LVL(val
, sdd
))
242 readl(regs
+ S3C64XX_SPI_RX_DATA
);
248 dev_warn(&sdd
->pdev
->dev
, "Timed out flushing RX FIFO\n");
250 val
= readl(regs
+ S3C64XX_SPI_CH_CFG
);
251 val
&= ~S3C64XX_SPI_CH_SW_RST
;
252 writel(val
, regs
+ S3C64XX_SPI_CH_CFG
);
254 val
= readl(regs
+ S3C64XX_SPI_MODE_CFG
);
255 val
&= ~(S3C64XX_SPI_MODE_TXDMA_ON
| S3C64XX_SPI_MODE_RXDMA_ON
);
256 writel(val
, regs
+ S3C64XX_SPI_MODE_CFG
);
259 static void s3c64xx_spi_dmacb(void *data
)
261 struct s3c64xx_spi_driver_data
*sdd
;
262 struct s3c64xx_spi_dma_data
*dma
= data
;
265 if (dma
->direction
== DMA_DEV_TO_MEM
)
266 sdd
= container_of(data
,
267 struct s3c64xx_spi_driver_data
, rx_dma
);
269 sdd
= container_of(data
,
270 struct s3c64xx_spi_driver_data
, tx_dma
);
272 spin_lock_irqsave(&sdd
->lock
, flags
);
274 if (dma
->direction
== DMA_DEV_TO_MEM
) {
275 sdd
->state
&= ~RXBUSY
;
276 if (!(sdd
->state
& TXBUSY
))
277 complete(&sdd
->xfer_completion
);
279 sdd
->state
&= ~TXBUSY
;
280 if (!(sdd
->state
& RXBUSY
))
281 complete(&sdd
->xfer_completion
);
284 spin_unlock_irqrestore(&sdd
->lock
, flags
);
287 #ifdef CONFIG_S3C_DMA
288 /* FIXME: remove this section once arch/arm/mach-s3c64xx uses dmaengine */
290 static struct s3c2410_dma_client s3c64xx_spi_dma_client
= {
291 .name
= "samsung-spi-dma",
294 static void prepare_dma(struct s3c64xx_spi_dma_data
*dma
,
295 unsigned len
, dma_addr_t buf
)
297 struct s3c64xx_spi_driver_data
*sdd
;
298 struct samsung_dma_prep info
;
299 struct samsung_dma_config config
;
301 if (dma
->direction
== DMA_DEV_TO_MEM
) {
302 sdd
= container_of((void *)dma
,
303 struct s3c64xx_spi_driver_data
, rx_dma
);
304 config
.direction
= sdd
->rx_dma
.direction
;
305 config
.fifo
= sdd
->sfr_start
+ S3C64XX_SPI_RX_DATA
;
306 config
.width
= sdd
->cur_bpw
/ 8;
307 sdd
->ops
->config((enum dma_ch
)sdd
->rx_dma
.ch
, &config
);
309 sdd
= container_of((void *)dma
,
310 struct s3c64xx_spi_driver_data
, tx_dma
);
311 config
.direction
= sdd
->tx_dma
.direction
;
312 config
.fifo
= sdd
->sfr_start
+ S3C64XX_SPI_TX_DATA
;
313 config
.width
= sdd
->cur_bpw
/ 8;
314 sdd
->ops
->config((enum dma_ch
)sdd
->tx_dma
.ch
, &config
);
317 info
.cap
= DMA_SLAVE
;
319 info
.fp
= s3c64xx_spi_dmacb
;
321 info
.direction
= dma
->direction
;
324 sdd
->ops
->prepare((enum dma_ch
)dma
->ch
, &info
);
325 sdd
->ops
->trigger((enum dma_ch
)dma
->ch
);
328 static int acquire_dma(struct s3c64xx_spi_driver_data
*sdd
)
330 struct samsung_dma_req req
;
331 struct device
*dev
= &sdd
->pdev
->dev
;
333 sdd
->ops
= samsung_dma_get_ops();
336 req
.client
= &s3c64xx_spi_dma_client
;
338 sdd
->rx_dma
.ch
= (struct dma_chan
*)(unsigned long)sdd
->ops
->request(
339 sdd
->rx_dma
.dmach
, &req
, dev
, "rx");
340 sdd
->tx_dma
.ch
= (struct dma_chan
*)(unsigned long)sdd
->ops
->request(
341 sdd
->tx_dma
.dmach
, &req
, dev
, "tx");
346 static int s3c64xx_spi_prepare_transfer(struct spi_master
*spi
)
348 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(spi
);
351 * If DMA resource was not available during
352 * probe, no need to continue with dma requests
353 * else Acquire DMA channels
355 while (!is_polling(sdd
) && !acquire_dma(sdd
))
356 usleep_range(10000, 11000);
361 static int s3c64xx_spi_unprepare_transfer(struct spi_master
*spi
)
363 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(spi
);
365 /* Free DMA channels */
366 if (!is_polling(sdd
)) {
367 sdd
->ops
->release((enum dma_ch
)sdd
->rx_dma
.ch
,
368 &s3c64xx_spi_dma_client
);
369 sdd
->ops
->release((enum dma_ch
)sdd
->tx_dma
.ch
,
370 &s3c64xx_spi_dma_client
);
376 static void s3c64xx_spi_dma_stop(struct s3c64xx_spi_driver_data
*sdd
,
377 struct s3c64xx_spi_dma_data
*dma
)
379 sdd
->ops
->stop((enum dma_ch
)dma
->ch
);
383 static void prepare_dma(struct s3c64xx_spi_dma_data
*dma
,
384 unsigned len
, dma_addr_t buf
)
386 struct s3c64xx_spi_driver_data
*sdd
;
387 struct dma_slave_config config
;
388 struct dma_async_tx_descriptor
*desc
;
390 memset(&config
, 0, sizeof(config
));
392 if (dma
->direction
== DMA_DEV_TO_MEM
) {
393 sdd
= container_of((void *)dma
,
394 struct s3c64xx_spi_driver_data
, rx_dma
);
395 config
.direction
= dma
->direction
;
396 config
.src_addr
= sdd
->sfr_start
+ S3C64XX_SPI_RX_DATA
;
397 config
.src_addr_width
= sdd
->cur_bpw
/ 8;
398 config
.src_maxburst
= 1;
399 dmaengine_slave_config(dma
->ch
, &config
);
401 sdd
= container_of((void *)dma
,
402 struct s3c64xx_spi_driver_data
, tx_dma
);
403 config
.direction
= dma
->direction
;
404 config
.dst_addr
= sdd
->sfr_start
+ S3C64XX_SPI_TX_DATA
;
405 config
.dst_addr_width
= sdd
->cur_bpw
/ 8;
406 config
.dst_maxburst
= 1;
407 dmaengine_slave_config(dma
->ch
, &config
);
410 desc
= dmaengine_prep_slave_single(dma
->ch
, buf
, len
,
411 dma
->direction
, DMA_PREP_INTERRUPT
);
413 desc
->callback
= s3c64xx_spi_dmacb
;
414 desc
->callback_param
= dma
;
416 dmaengine_submit(desc
);
417 dma_async_issue_pending(dma
->ch
);
420 static int s3c64xx_spi_prepare_transfer(struct spi_master
*spi
)
422 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(spi
);
423 dma_filter_fn filter
= sdd
->cntrlr_info
->filter
;
424 struct device
*dev
= &sdd
->pdev
->dev
;
428 if (!is_polling(sdd
)) {
430 dma_cap_set(DMA_SLAVE
, mask
);
432 /* Acquire DMA channels */
433 sdd
->rx_dma
.ch
= dma_request_slave_channel_compat(mask
, filter
,
434 (void *)sdd
->rx_dma
.dmach
, dev
, "rx");
435 if (!sdd
->rx_dma
.ch
) {
436 dev_err(dev
, "Failed to get RX DMA channel\n");
441 sdd
->tx_dma
.ch
= dma_request_slave_channel_compat(mask
, filter
,
442 (void *)sdd
->tx_dma
.dmach
, dev
, "tx");
443 if (!sdd
->tx_dma
.ch
) {
444 dev_err(dev
, "Failed to get TX DMA channel\n");
450 ret
= pm_runtime_get_sync(&sdd
->pdev
->dev
);
452 dev_err(dev
, "Failed to enable device: %d\n", ret
);
459 dma_release_channel(sdd
->tx_dma
.ch
);
461 dma_release_channel(sdd
->rx_dma
.ch
);
466 static int s3c64xx_spi_unprepare_transfer(struct spi_master
*spi
)
468 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(spi
);
470 /* Free DMA channels */
471 if (!is_polling(sdd
)) {
472 dma_release_channel(sdd
->rx_dma
.ch
);
473 dma_release_channel(sdd
->tx_dma
.ch
);
476 pm_runtime_put(&sdd
->pdev
->dev
);
480 static void s3c64xx_spi_dma_stop(struct s3c64xx_spi_driver_data
*sdd
,
481 struct s3c64xx_spi_dma_data
*dma
)
483 dmaengine_terminate_all(dma
->ch
);
487 static void enable_datapath(struct s3c64xx_spi_driver_data
*sdd
,
488 struct spi_device
*spi
,
489 struct spi_transfer
*xfer
, int dma_mode
)
491 void __iomem
*regs
= sdd
->regs
;
494 modecfg
= readl(regs
+ S3C64XX_SPI_MODE_CFG
);
495 modecfg
&= ~(S3C64XX_SPI_MODE_TXDMA_ON
| S3C64XX_SPI_MODE_RXDMA_ON
);
497 chcfg
= readl(regs
+ S3C64XX_SPI_CH_CFG
);
498 chcfg
&= ~S3C64XX_SPI_CH_TXCH_ON
;
501 chcfg
&= ~S3C64XX_SPI_CH_RXCH_ON
;
503 /* Always shift in data in FIFO, even if xfer is Tx only,
504 * this helps setting PCKT_CNT value for generating clocks
507 chcfg
|= S3C64XX_SPI_CH_RXCH_ON
;
508 writel(((xfer
->len
* 8 / sdd
->cur_bpw
) & 0xffff)
509 | S3C64XX_SPI_PACKET_CNT_EN
,
510 regs
+ S3C64XX_SPI_PACKET_CNT
);
513 if (xfer
->tx_buf
!= NULL
) {
514 sdd
->state
|= TXBUSY
;
515 chcfg
|= S3C64XX_SPI_CH_TXCH_ON
;
517 modecfg
|= S3C64XX_SPI_MODE_TXDMA_ON
;
518 prepare_dma(&sdd
->tx_dma
, xfer
->len
, xfer
->tx_dma
);
520 switch (sdd
->cur_bpw
) {
522 iowrite32_rep(regs
+ S3C64XX_SPI_TX_DATA
,
523 xfer
->tx_buf
, xfer
->len
/ 4);
526 iowrite16_rep(regs
+ S3C64XX_SPI_TX_DATA
,
527 xfer
->tx_buf
, xfer
->len
/ 2);
530 iowrite8_rep(regs
+ S3C64XX_SPI_TX_DATA
,
531 xfer
->tx_buf
, xfer
->len
);
537 if (xfer
->rx_buf
!= NULL
) {
538 sdd
->state
|= RXBUSY
;
540 if (sdd
->port_conf
->high_speed
&& sdd
->cur_speed
>= 30000000UL
541 && !(sdd
->cur_mode
& SPI_CPHA
))
542 chcfg
|= S3C64XX_SPI_CH_HS_EN
;
545 modecfg
|= S3C64XX_SPI_MODE_RXDMA_ON
;
546 chcfg
|= S3C64XX_SPI_CH_RXCH_ON
;
547 writel(((xfer
->len
* 8 / sdd
->cur_bpw
) & 0xffff)
548 | S3C64XX_SPI_PACKET_CNT_EN
,
549 regs
+ S3C64XX_SPI_PACKET_CNT
);
550 prepare_dma(&sdd
->rx_dma
, xfer
->len
, xfer
->rx_dma
);
554 writel(modecfg
, regs
+ S3C64XX_SPI_MODE_CFG
);
555 writel(chcfg
, regs
+ S3C64XX_SPI_CH_CFG
);
558 static inline void enable_cs(struct s3c64xx_spi_driver_data
*sdd
,
559 struct spi_device
*spi
)
561 if (sdd
->tgl_spi
!= NULL
) { /* If last device toggled after mssg */
562 if (sdd
->tgl_spi
!= spi
) { /* if last mssg on diff device */
563 /* Deselect the last toggled device */
564 if (spi
->cs_gpio
>= 0)
565 gpio_set_value(spi
->cs_gpio
,
566 spi
->mode
& SPI_CS_HIGH
? 0 : 1);
571 if (spi
->cs_gpio
>= 0)
572 gpio_set_value(spi
->cs_gpio
, spi
->mode
& SPI_CS_HIGH
? 1 : 0);
575 static u32
s3c64xx_spi_wait_for_timeout(struct s3c64xx_spi_driver_data
*sdd
,
578 void __iomem
*regs
= sdd
->regs
;
579 unsigned long val
= 1;
582 /* max fifo depth available */
583 u32 max_fifo
= (FIFO_LVL_MASK(sdd
) >> 1) + 1;
586 val
= msecs_to_loops(timeout_ms
);
589 status
= readl(regs
+ S3C64XX_SPI_STATUS
);
590 } while (RX_FIFO_LVL(status
, sdd
) < max_fifo
&& --val
);
592 /* return the actual received data length */
593 return RX_FIFO_LVL(status
, sdd
);
596 static int wait_for_xfer(struct s3c64xx_spi_driver_data
*sdd
,
597 struct spi_transfer
*xfer
, int dma_mode
)
599 void __iomem
*regs
= sdd
->regs
;
603 /* millisecs to xfer 'len' bytes @ 'cur_speed' */
604 ms
= xfer
->len
* 8 * 1000 / sdd
->cur_speed
;
605 ms
+= 10; /* some tolerance */
608 val
= msecs_to_jiffies(ms
) + 10;
609 val
= wait_for_completion_timeout(&sdd
->xfer_completion
, val
);
612 val
= msecs_to_loops(ms
);
614 status
= readl(regs
+ S3C64XX_SPI_STATUS
);
615 } while (RX_FIFO_LVL(status
, sdd
) < xfer
->len
&& --val
);
622 * If the previous xfer was completed within timeout, then
623 * proceed further else return -EIO.
624 * DmaTx returns after simply writing data in the FIFO,
625 * w/o waiting for real transmission on the bus to finish.
626 * DmaRx returns only after Dma read data from FIFO which
627 * needs bus transmission to finish, so we don't worry if
628 * Xfer involved Rx(with or without Tx).
630 if (val
&& !xfer
->rx_buf
) {
631 val
= msecs_to_loops(10);
632 status
= readl(regs
+ S3C64XX_SPI_STATUS
);
633 while ((TX_FIFO_LVL(status
, sdd
)
634 || !S3C64XX_SPI_ST_TX_DONE(status
, sdd
))
637 status
= readl(regs
+ S3C64XX_SPI_STATUS
);
642 /* If timed out while checking rx/tx status return error */
650 /* If it was only Tx */
652 sdd
->state
&= ~TXBUSY
;
657 * If the receive length is bigger than the controller fifo
658 * size, calculate the loops and read the fifo as many times.
659 * loops = length / max fifo size (calculated by using the
661 * For any size less than the fifo size the below code is
662 * executed atleast once.
664 loops
= xfer
->len
/ ((FIFO_LVL_MASK(sdd
) >> 1) + 1);
667 /* wait for data to be received in the fifo */
668 cpy_len
= s3c64xx_spi_wait_for_timeout(sdd
,
671 switch (sdd
->cur_bpw
) {
673 ioread32_rep(regs
+ S3C64XX_SPI_RX_DATA
,
677 ioread16_rep(regs
+ S3C64XX_SPI_RX_DATA
,
681 ioread8_rep(regs
+ S3C64XX_SPI_RX_DATA
,
688 sdd
->state
&= ~RXBUSY
;
694 static inline void disable_cs(struct s3c64xx_spi_driver_data
*sdd
,
695 struct spi_device
*spi
)
697 if (sdd
->tgl_spi
== spi
)
700 if (spi
->cs_gpio
>= 0)
701 gpio_set_value(spi
->cs_gpio
, spi
->mode
& SPI_CS_HIGH
? 0 : 1);
704 static void s3c64xx_spi_config(struct s3c64xx_spi_driver_data
*sdd
)
706 void __iomem
*regs
= sdd
->regs
;
710 if (sdd
->port_conf
->clk_from_cmu
) {
711 clk_disable_unprepare(sdd
->src_clk
);
713 val
= readl(regs
+ S3C64XX_SPI_CLK_CFG
);
714 val
&= ~S3C64XX_SPI_ENCLK_ENABLE
;
715 writel(val
, regs
+ S3C64XX_SPI_CLK_CFG
);
718 /* Set Polarity and Phase */
719 val
= readl(regs
+ S3C64XX_SPI_CH_CFG
);
720 val
&= ~(S3C64XX_SPI_CH_SLAVE
|
724 if (sdd
->cur_mode
& SPI_CPOL
)
725 val
|= S3C64XX_SPI_CPOL_L
;
727 if (sdd
->cur_mode
& SPI_CPHA
)
728 val
|= S3C64XX_SPI_CPHA_B
;
730 writel(val
, regs
+ S3C64XX_SPI_CH_CFG
);
732 /* Set Channel & DMA Mode */
733 val
= readl(regs
+ S3C64XX_SPI_MODE_CFG
);
734 val
&= ~(S3C64XX_SPI_MODE_BUS_TSZ_MASK
735 | S3C64XX_SPI_MODE_CH_TSZ_MASK
);
737 switch (sdd
->cur_bpw
) {
739 val
|= S3C64XX_SPI_MODE_BUS_TSZ_WORD
;
740 val
|= S3C64XX_SPI_MODE_CH_TSZ_WORD
;
743 val
|= S3C64XX_SPI_MODE_BUS_TSZ_HALFWORD
;
744 val
|= S3C64XX_SPI_MODE_CH_TSZ_HALFWORD
;
747 val
|= S3C64XX_SPI_MODE_BUS_TSZ_BYTE
;
748 val
|= S3C64XX_SPI_MODE_CH_TSZ_BYTE
;
752 writel(val
, regs
+ S3C64XX_SPI_MODE_CFG
);
754 if (sdd
->port_conf
->clk_from_cmu
) {
755 /* Configure Clock */
756 /* There is half-multiplier before the SPI */
757 clk_set_rate(sdd
->src_clk
, sdd
->cur_speed
* 2);
759 clk_prepare_enable(sdd
->src_clk
);
761 /* Configure Clock */
762 val
= readl(regs
+ S3C64XX_SPI_CLK_CFG
);
763 val
&= ~S3C64XX_SPI_PSR_MASK
;
764 val
|= ((clk_get_rate(sdd
->src_clk
) / sdd
->cur_speed
/ 2 - 1)
765 & S3C64XX_SPI_PSR_MASK
);
766 writel(val
, regs
+ S3C64XX_SPI_CLK_CFG
);
769 val
= readl(regs
+ S3C64XX_SPI_CLK_CFG
);
770 val
|= S3C64XX_SPI_ENCLK_ENABLE
;
771 writel(val
, regs
+ S3C64XX_SPI_CLK_CFG
);
775 #define XFER_DMAADDR_INVALID DMA_BIT_MASK(32)
777 static int s3c64xx_spi_map_mssg(struct s3c64xx_spi_driver_data
*sdd
,
778 struct spi_message
*msg
)
780 struct device
*dev
= &sdd
->pdev
->dev
;
781 struct spi_transfer
*xfer
;
783 if (is_polling(sdd
) || msg
->is_dma_mapped
)
786 /* First mark all xfer unmapped */
787 list_for_each_entry(xfer
, &msg
->transfers
, transfer_list
) {
788 xfer
->rx_dma
= XFER_DMAADDR_INVALID
;
789 xfer
->tx_dma
= XFER_DMAADDR_INVALID
;
792 /* Map until end or first fail */
793 list_for_each_entry(xfer
, &msg
->transfers
, transfer_list
) {
795 if (xfer
->len
<= ((FIFO_LVL_MASK(sdd
) >> 1) + 1))
798 if (xfer
->tx_buf
!= NULL
) {
799 xfer
->tx_dma
= dma_map_single(dev
,
800 (void *)xfer
->tx_buf
, xfer
->len
,
802 if (dma_mapping_error(dev
, xfer
->tx_dma
)) {
803 dev_err(dev
, "dma_map_single Tx failed\n");
804 xfer
->tx_dma
= XFER_DMAADDR_INVALID
;
809 if (xfer
->rx_buf
!= NULL
) {
810 xfer
->rx_dma
= dma_map_single(dev
, xfer
->rx_buf
,
811 xfer
->len
, DMA_FROM_DEVICE
);
812 if (dma_mapping_error(dev
, xfer
->rx_dma
)) {
813 dev_err(dev
, "dma_map_single Rx failed\n");
814 dma_unmap_single(dev
, xfer
->tx_dma
,
815 xfer
->len
, DMA_TO_DEVICE
);
816 xfer
->tx_dma
= XFER_DMAADDR_INVALID
;
817 xfer
->rx_dma
= XFER_DMAADDR_INVALID
;
826 static void s3c64xx_spi_unmap_mssg(struct s3c64xx_spi_driver_data
*sdd
,
827 struct spi_message
*msg
)
829 struct device
*dev
= &sdd
->pdev
->dev
;
830 struct spi_transfer
*xfer
;
832 if (is_polling(sdd
) || msg
->is_dma_mapped
)
835 list_for_each_entry(xfer
, &msg
->transfers
, transfer_list
) {
837 if (xfer
->len
<= ((FIFO_LVL_MASK(sdd
) >> 1) + 1))
840 if (xfer
->rx_buf
!= NULL
841 && xfer
->rx_dma
!= XFER_DMAADDR_INVALID
)
842 dma_unmap_single(dev
, xfer
->rx_dma
,
843 xfer
->len
, DMA_FROM_DEVICE
);
845 if (xfer
->tx_buf
!= NULL
846 && xfer
->tx_dma
!= XFER_DMAADDR_INVALID
)
847 dma_unmap_single(dev
, xfer
->tx_dma
,
848 xfer
->len
, DMA_TO_DEVICE
);
852 static int s3c64xx_spi_prepare_message(struct spi_master
*master
,
853 struct spi_message
*msg
)
855 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
856 struct spi_device
*spi
= msg
->spi
;
857 struct s3c64xx_spi_csinfo
*cs
= spi
->controller_data
;
859 /* If Master's(controller) state differs from that needed by Slave */
860 if (sdd
->cur_speed
!= spi
->max_speed_hz
861 || sdd
->cur_mode
!= spi
->mode
862 || sdd
->cur_bpw
!= spi
->bits_per_word
) {
863 sdd
->cur_bpw
= spi
->bits_per_word
;
864 sdd
->cur_speed
= spi
->max_speed_hz
;
865 sdd
->cur_mode
= spi
->mode
;
866 s3c64xx_spi_config(sdd
);
869 /* Map all the transfers if needed */
870 if (s3c64xx_spi_map_mssg(sdd
, msg
)) {
872 "Xfer: Unable to map message buffers!\n");
876 /* Configure feedback delay */
877 writel(cs
->fb_delay
& 0x3, sdd
->regs
+ S3C64XX_SPI_FB_CLK
);
882 static int s3c64xx_spi_transfer_one(struct spi_master
*master
,
883 struct spi_device
*spi
,
884 struct spi_transfer
*xfer
)
886 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
893 reinit_completion(&sdd
->xfer_completion
);
895 /* Only BPW and Speed may change across transfers */
896 bpw
= xfer
->bits_per_word
;
897 speed
= xfer
->speed_hz
? : spi
->max_speed_hz
;
899 if (xfer
->len
% (bpw
/ 8)) {
901 "Xfer length(%u) not a multiple of word size(%u)\n",
906 if (bpw
!= sdd
->cur_bpw
|| speed
!= sdd
->cur_speed
) {
908 sdd
->cur_speed
= speed
;
909 s3c64xx_spi_config(sdd
);
912 /* Polling method for xfers not bigger than FIFO capacity */
914 if (!is_polling(sdd
) &&
915 (sdd
->rx_dma
.ch
&& sdd
->tx_dma
.ch
&&
916 (xfer
->len
> ((FIFO_LVL_MASK(sdd
) >> 1) + 1))))
919 spin_lock_irqsave(&sdd
->lock
, flags
);
921 /* Pending only which is to be done */
922 sdd
->state
&= ~RXBUSY
;
923 sdd
->state
&= ~TXBUSY
;
925 enable_datapath(sdd
, spi
, xfer
, use_dma
);
927 /* Start the signals */
928 writel(0, sdd
->regs
+ S3C64XX_SPI_SLAVE_SEL
);
930 /* Start the signals */
931 writel(0, sdd
->regs
+ S3C64XX_SPI_SLAVE_SEL
);
933 spin_unlock_irqrestore(&sdd
->lock
, flags
);
935 status
= wait_for_xfer(sdd
, xfer
, use_dma
);
938 dev_err(&spi
->dev
, "I/O Error: rx-%d tx-%d res:rx-%c tx-%c len-%d\n",
939 xfer
->rx_buf
? 1 : 0, xfer
->tx_buf
? 1 : 0,
940 (sdd
->state
& RXBUSY
) ? 'f' : 'p',
941 (sdd
->state
& TXBUSY
) ? 'f' : 'p',
945 if (xfer
->tx_buf
!= NULL
946 && (sdd
->state
& TXBUSY
))
947 s3c64xx_spi_dma_stop(sdd
, &sdd
->tx_dma
);
948 if (xfer
->rx_buf
!= NULL
949 && (sdd
->state
& RXBUSY
))
950 s3c64xx_spi_dma_stop(sdd
, &sdd
->rx_dma
);
959 static int s3c64xx_spi_unprepare_message(struct spi_master
*master
,
960 struct spi_message
*msg
)
962 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
964 s3c64xx_spi_unmap_mssg(sdd
, msg
);
969 static struct s3c64xx_spi_csinfo
*s3c64xx_get_slave_ctrldata(
970 struct spi_device
*spi
)
972 struct s3c64xx_spi_csinfo
*cs
;
973 struct device_node
*slave_np
, *data_np
= NULL
;
974 struct s3c64xx_spi_driver_data
*sdd
;
977 sdd
= spi_master_get_devdata(spi
->master
);
978 slave_np
= spi
->dev
.of_node
;
980 dev_err(&spi
->dev
, "device node not found\n");
981 return ERR_PTR(-EINVAL
);
984 data_np
= of_get_child_by_name(slave_np
, "controller-data");
986 dev_err(&spi
->dev
, "child node 'controller-data' not found\n");
987 return ERR_PTR(-EINVAL
);
990 cs
= kzalloc(sizeof(*cs
), GFP_KERNEL
);
992 dev_err(&spi
->dev
, "could not allocate memory for controller data\n");
993 of_node_put(data_np
);
994 return ERR_PTR(-ENOMEM
);
997 /* The CS line is asserted/deasserted by the gpio pin */
999 cs
->line
= of_get_named_gpio(data_np
, "cs-gpio", 0);
1001 if (!gpio_is_valid(cs
->line
)) {
1002 dev_err(&spi
->dev
, "chip select gpio is not specified or invalid\n");
1004 of_node_put(data_np
);
1005 return ERR_PTR(-EINVAL
);
1008 of_property_read_u32(data_np
, "samsung,spi-feedback-delay", &fb_delay
);
1009 cs
->fb_delay
= fb_delay
;
1010 of_node_put(data_np
);
1015 * Here we only check the validity of requested configuration
1016 * and save the configuration in a local data-structure.
1017 * The controller is actually configured only just before we
1018 * get a message to transfer.
1020 static int s3c64xx_spi_setup(struct spi_device
*spi
)
1022 struct s3c64xx_spi_csinfo
*cs
= spi
->controller_data
;
1023 struct s3c64xx_spi_driver_data
*sdd
;
1024 struct s3c64xx_spi_info
*sci
;
1027 sdd
= spi_master_get_devdata(spi
->master
);
1028 if (!cs
&& spi
->dev
.of_node
) {
1029 cs
= s3c64xx_get_slave_ctrldata(spi
);
1030 spi
->controller_data
= cs
;
1033 if (IS_ERR_OR_NULL(cs
)) {
1034 dev_err(&spi
->dev
, "No CS for SPI(%d)\n", spi
->chip_select
);
1038 if (!spi_get_ctldata(spi
)) {
1039 /* Request gpio only if cs line is asserted by gpio pins */
1041 err
= gpio_request_one(cs
->line
, GPIOF_OUT_INIT_HIGH
,
1042 dev_name(&spi
->dev
));
1045 "Failed to get /CS gpio [%d]: %d\n",
1050 spi
->cs_gpio
= cs
->line
;
1053 spi_set_ctldata(spi
, cs
);
1056 sci
= sdd
->cntrlr_info
;
1058 pm_runtime_get_sync(&sdd
->pdev
->dev
);
1060 /* Check if we can provide the requested rate */
1061 if (!sdd
->port_conf
->clk_from_cmu
) {
1065 speed
= clk_get_rate(sdd
->src_clk
) / 2 / (0 + 1);
1067 if (spi
->max_speed_hz
> speed
)
1068 spi
->max_speed_hz
= speed
;
1070 psr
= clk_get_rate(sdd
->src_clk
) / 2 / spi
->max_speed_hz
- 1;
1071 psr
&= S3C64XX_SPI_PSR_MASK
;
1072 if (psr
== S3C64XX_SPI_PSR_MASK
)
1075 speed
= clk_get_rate(sdd
->src_clk
) / 2 / (psr
+ 1);
1076 if (spi
->max_speed_hz
< speed
) {
1077 if (psr
+1 < S3C64XX_SPI_PSR_MASK
) {
1085 speed
= clk_get_rate(sdd
->src_clk
) / 2 / (psr
+ 1);
1086 if (spi
->max_speed_hz
>= speed
) {
1087 spi
->max_speed_hz
= speed
;
1089 dev_err(&spi
->dev
, "Can't set %dHz transfer speed\n",
1096 pm_runtime_put(&sdd
->pdev
->dev
);
1097 writel(S3C64XX_SPI_SLAVE_SIG_INACT
, sdd
->regs
+ S3C64XX_SPI_SLAVE_SEL
);
1098 disable_cs(sdd
, spi
);
1102 pm_runtime_put(&sdd
->pdev
->dev
);
1103 /* setup() returns with device de-selected */
1104 writel(S3C64XX_SPI_SLAVE_SIG_INACT
, sdd
->regs
+ S3C64XX_SPI_SLAVE_SEL
);
1105 disable_cs(sdd
, spi
);
1107 gpio_free(cs
->line
);
1108 spi_set_ctldata(spi
, NULL
);
1111 if (spi
->dev
.of_node
)
1117 static void s3c64xx_spi_cleanup(struct spi_device
*spi
)
1119 struct s3c64xx_spi_csinfo
*cs
= spi_get_ctldata(spi
);
1120 struct s3c64xx_spi_driver_data
*sdd
;
1122 sdd
= spi_master_get_devdata(spi
->master
);
1124 gpio_free(spi
->cs_gpio
);
1125 if (spi
->dev
.of_node
)
1128 spi_set_ctldata(spi
, NULL
);
1131 static irqreturn_t
s3c64xx_spi_irq(int irq
, void *data
)
1133 struct s3c64xx_spi_driver_data
*sdd
= data
;
1134 struct spi_master
*spi
= sdd
->master
;
1135 unsigned int val
, clr
= 0;
1137 val
= readl(sdd
->regs
+ S3C64XX_SPI_STATUS
);
1139 if (val
& S3C64XX_SPI_ST_RX_OVERRUN_ERR
) {
1140 clr
= S3C64XX_SPI_PND_RX_OVERRUN_CLR
;
1141 dev_err(&spi
->dev
, "RX overrun\n");
1143 if (val
& S3C64XX_SPI_ST_RX_UNDERRUN_ERR
) {
1144 clr
|= S3C64XX_SPI_PND_RX_UNDERRUN_CLR
;
1145 dev_err(&spi
->dev
, "RX underrun\n");
1147 if (val
& S3C64XX_SPI_ST_TX_OVERRUN_ERR
) {
1148 clr
|= S3C64XX_SPI_PND_TX_OVERRUN_CLR
;
1149 dev_err(&spi
->dev
, "TX overrun\n");
1151 if (val
& S3C64XX_SPI_ST_TX_UNDERRUN_ERR
) {
1152 clr
|= S3C64XX_SPI_PND_TX_UNDERRUN_CLR
;
1153 dev_err(&spi
->dev
, "TX underrun\n");
1156 /* Clear the pending irq by setting and then clearing it */
1157 writel(clr
, sdd
->regs
+ S3C64XX_SPI_PENDING_CLR
);
1158 writel(0, sdd
->regs
+ S3C64XX_SPI_PENDING_CLR
);
1163 static void s3c64xx_spi_hwinit(struct s3c64xx_spi_driver_data
*sdd
, int channel
)
1165 struct s3c64xx_spi_info
*sci
= sdd
->cntrlr_info
;
1166 void __iomem
*regs
= sdd
->regs
;
1171 writel(S3C64XX_SPI_SLAVE_SIG_INACT
, sdd
->regs
+ S3C64XX_SPI_SLAVE_SEL
);
1173 /* Disable Interrupts - we use Polling if not DMA mode */
1174 writel(0, regs
+ S3C64XX_SPI_INT_EN
);
1176 if (!sdd
->port_conf
->clk_from_cmu
)
1177 writel(sci
->src_clk_nr
<< S3C64XX_SPI_CLKSEL_SRCSHFT
,
1178 regs
+ S3C64XX_SPI_CLK_CFG
);
1179 writel(0, regs
+ S3C64XX_SPI_MODE_CFG
);
1180 writel(0, regs
+ S3C64XX_SPI_PACKET_CNT
);
1182 /* Clear any irq pending bits, should set and clear the bits */
1183 val
= S3C64XX_SPI_PND_RX_OVERRUN_CLR
|
1184 S3C64XX_SPI_PND_RX_UNDERRUN_CLR
|
1185 S3C64XX_SPI_PND_TX_OVERRUN_CLR
|
1186 S3C64XX_SPI_PND_TX_UNDERRUN_CLR
;
1187 writel(val
, regs
+ S3C64XX_SPI_PENDING_CLR
);
1188 writel(0, regs
+ S3C64XX_SPI_PENDING_CLR
);
1190 writel(0, regs
+ S3C64XX_SPI_SWAP_CFG
);
1192 val
= readl(regs
+ S3C64XX_SPI_MODE_CFG
);
1193 val
&= ~S3C64XX_SPI_MODE_4BURST
;
1194 val
&= ~(S3C64XX_SPI_MAX_TRAILCNT
<< S3C64XX_SPI_TRAILCNT_OFF
);
1195 val
|= (S3C64XX_SPI_TRAILCNT
<< S3C64XX_SPI_TRAILCNT_OFF
);
1196 writel(val
, regs
+ S3C64XX_SPI_MODE_CFG
);
1202 static struct s3c64xx_spi_info
*s3c64xx_spi_parse_dt(struct device
*dev
)
1204 struct s3c64xx_spi_info
*sci
;
1207 sci
= devm_kzalloc(dev
, sizeof(*sci
), GFP_KERNEL
);
1209 dev_err(dev
, "memory allocation for spi_info failed\n");
1210 return ERR_PTR(-ENOMEM
);
1213 if (of_property_read_u32(dev
->of_node
, "samsung,spi-src-clk", &temp
)) {
1214 dev_warn(dev
, "spi bus clock parent not specified, using clock at index 0 as parent\n");
1215 sci
->src_clk_nr
= 0;
1217 sci
->src_clk_nr
= temp
;
1220 if (of_property_read_u32(dev
->of_node
, "num-cs", &temp
)) {
1221 dev_warn(dev
, "number of chip select lines not specified, assuming 1 chip select line\n");
1230 static struct s3c64xx_spi_info
*s3c64xx_spi_parse_dt(struct device
*dev
)
1232 return dev_get_platdata(dev
);
1236 static const struct of_device_id s3c64xx_spi_dt_match
[];
1238 static inline struct s3c64xx_spi_port_config
*s3c64xx_spi_get_port_config(
1239 struct platform_device
*pdev
)
1242 if (pdev
->dev
.of_node
) {
1243 const struct of_device_id
*match
;
1244 match
= of_match_node(s3c64xx_spi_dt_match
, pdev
->dev
.of_node
);
1245 return (struct s3c64xx_spi_port_config
*)match
->data
;
1248 return (struct s3c64xx_spi_port_config
*)
1249 platform_get_device_id(pdev
)->driver_data
;
1252 static int s3c64xx_spi_probe(struct platform_device
*pdev
)
1254 struct resource
*mem_res
;
1255 struct resource
*res
;
1256 struct s3c64xx_spi_driver_data
*sdd
;
1257 struct s3c64xx_spi_info
*sci
= dev_get_platdata(&pdev
->dev
);
1258 struct spi_master
*master
;
1262 if (!sci
&& pdev
->dev
.of_node
) {
1263 sci
= s3c64xx_spi_parse_dt(&pdev
->dev
);
1265 return PTR_ERR(sci
);
1269 dev_err(&pdev
->dev
, "platform_data missing!\n");
1273 mem_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1274 if (mem_res
== NULL
) {
1275 dev_err(&pdev
->dev
, "Unable to get SPI MEM resource\n");
1279 irq
= platform_get_irq(pdev
, 0);
1281 dev_warn(&pdev
->dev
, "Failed to get IRQ: %d\n", irq
);
1285 master
= spi_alloc_master(&pdev
->dev
,
1286 sizeof(struct s3c64xx_spi_driver_data
));
1287 if (master
== NULL
) {
1288 dev_err(&pdev
->dev
, "Unable to allocate SPI Master\n");
1292 platform_set_drvdata(pdev
, master
);
1294 sdd
= spi_master_get_devdata(master
);
1295 sdd
->port_conf
= s3c64xx_spi_get_port_config(pdev
);
1296 sdd
->master
= master
;
1297 sdd
->cntrlr_info
= sci
;
1299 sdd
->sfr_start
= mem_res
->start
;
1300 sdd
->cs_gpio
= true;
1301 if (pdev
->dev
.of_node
) {
1302 if (!of_find_property(pdev
->dev
.of_node
, "cs-gpio", NULL
))
1303 sdd
->cs_gpio
= false;
1305 ret
= of_alias_get_id(pdev
->dev
.of_node
, "spi");
1307 dev_err(&pdev
->dev
, "failed to get alias id, errno %d\n",
1313 sdd
->port_id
= pdev
->id
;
1318 if (!sdd
->pdev
->dev
.of_node
) {
1319 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
1321 dev_warn(&pdev
->dev
, "Unable to get SPI tx dma resource. Switching to poll mode\n");
1322 sdd
->port_conf
->quirks
= S3C64XX_SPI_QUIRK_POLL
;
1324 sdd
->tx_dma
.dmach
= res
->start
;
1326 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
1328 dev_warn(&pdev
->dev
, "Unable to get SPI rx dma resource. Switching to poll mode\n");
1329 sdd
->port_conf
->quirks
= S3C64XX_SPI_QUIRK_POLL
;
1331 sdd
->rx_dma
.dmach
= res
->start
;
1334 sdd
->tx_dma
.direction
= DMA_MEM_TO_DEV
;
1335 sdd
->rx_dma
.direction
= DMA_DEV_TO_MEM
;
1337 master
->dev
.of_node
= pdev
->dev
.of_node
;
1338 master
->bus_num
= sdd
->port_id
;
1339 master
->setup
= s3c64xx_spi_setup
;
1340 master
->cleanup
= s3c64xx_spi_cleanup
;
1341 master
->prepare_transfer_hardware
= s3c64xx_spi_prepare_transfer
;
1342 master
->prepare_message
= s3c64xx_spi_prepare_message
;
1343 master
->transfer_one
= s3c64xx_spi_transfer_one
;
1344 master
->unprepare_message
= s3c64xx_spi_unprepare_message
;
1345 master
->unprepare_transfer_hardware
= s3c64xx_spi_unprepare_transfer
;
1346 master
->num_chipselect
= sci
->num_cs
;
1347 master
->dma_alignment
= 8;
1348 master
->bits_per_word_mask
= SPI_BPW_MASK(32) | SPI_BPW_MASK(16) |
1350 /* the spi->mode bits understood by this driver: */
1351 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_CS_HIGH
;
1352 master
->auto_runtime_pm
= true;
1354 sdd
->regs
= devm_ioremap_resource(&pdev
->dev
, mem_res
);
1355 if (IS_ERR(sdd
->regs
)) {
1356 ret
= PTR_ERR(sdd
->regs
);
1360 if (sci
->cfg_gpio
&& sci
->cfg_gpio()) {
1361 dev_err(&pdev
->dev
, "Unable to config gpio\n");
1367 sdd
->clk
= devm_clk_get(&pdev
->dev
, "spi");
1368 if (IS_ERR(sdd
->clk
)) {
1369 dev_err(&pdev
->dev
, "Unable to acquire clock 'spi'\n");
1370 ret
= PTR_ERR(sdd
->clk
);
1374 if (clk_prepare_enable(sdd
->clk
)) {
1375 dev_err(&pdev
->dev
, "Couldn't enable clock 'spi'\n");
1380 sprintf(clk_name
, "spi_busclk%d", sci
->src_clk_nr
);
1381 sdd
->src_clk
= devm_clk_get(&pdev
->dev
, clk_name
);
1382 if (IS_ERR(sdd
->src_clk
)) {
1384 "Unable to acquire clock '%s'\n", clk_name
);
1385 ret
= PTR_ERR(sdd
->src_clk
);
1389 if (clk_prepare_enable(sdd
->src_clk
)) {
1390 dev_err(&pdev
->dev
, "Couldn't enable clock '%s'\n", clk_name
);
1395 /* Setup Deufult Mode */
1396 s3c64xx_spi_hwinit(sdd
, sdd
->port_id
);
1398 spin_lock_init(&sdd
->lock
);
1399 init_completion(&sdd
->xfer_completion
);
1401 ret
= devm_request_irq(&pdev
->dev
, irq
, s3c64xx_spi_irq
, 0,
1402 "spi-s3c64xx", sdd
);
1404 dev_err(&pdev
->dev
, "Failed to request IRQ %d: %d\n",
1409 writel(S3C64XX_SPI_INT_RX_OVERRUN_EN
| S3C64XX_SPI_INT_RX_UNDERRUN_EN
|
1410 S3C64XX_SPI_INT_TX_OVERRUN_EN
| S3C64XX_SPI_INT_TX_UNDERRUN_EN
,
1411 sdd
->regs
+ S3C64XX_SPI_INT_EN
);
1413 pm_runtime_set_active(&pdev
->dev
);
1414 pm_runtime_enable(&pdev
->dev
);
1416 ret
= devm_spi_register_master(&pdev
->dev
, master
);
1418 dev_err(&pdev
->dev
, "cannot register SPI master: %d\n", ret
);
1422 dev_dbg(&pdev
->dev
, "Samsung SoC SPI Driver loaded for Bus SPI-%d with %d Slaves attached\n",
1423 sdd
->port_id
, master
->num_chipselect
);
1424 dev_dbg(&pdev
->dev
, "\tIOmem=[%pR]\tDMA=[Rx-%d, Tx-%d]\n",
1426 sdd
->rx_dma
.dmach
, sdd
->tx_dma
.dmach
);
1431 clk_disable_unprepare(sdd
->src_clk
);
1433 clk_disable_unprepare(sdd
->clk
);
1435 spi_master_put(master
);
1440 static int s3c64xx_spi_remove(struct platform_device
*pdev
)
1442 struct spi_master
*master
= spi_master_get(platform_get_drvdata(pdev
));
1443 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
1445 pm_runtime_disable(&pdev
->dev
);
1447 writel(0, sdd
->regs
+ S3C64XX_SPI_INT_EN
);
1449 clk_disable_unprepare(sdd
->src_clk
);
1451 clk_disable_unprepare(sdd
->clk
);
1456 #ifdef CONFIG_PM_SLEEP
1457 static int s3c64xx_spi_suspend(struct device
*dev
)
1459 struct spi_master
*master
= dev_get_drvdata(dev
);
1460 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
1462 int ret
= spi_master_suspend(master
);
1466 if (!pm_runtime_suspended(dev
)) {
1467 clk_disable_unprepare(sdd
->clk
);
1468 clk_disable_unprepare(sdd
->src_clk
);
1471 sdd
->cur_speed
= 0; /* Output Clock is stopped */
1476 static int s3c64xx_spi_resume(struct device
*dev
)
1478 struct spi_master
*master
= dev_get_drvdata(dev
);
1479 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
1480 struct s3c64xx_spi_info
*sci
= sdd
->cntrlr_info
;
1485 if (!pm_runtime_suspended(dev
)) {
1486 clk_prepare_enable(sdd
->src_clk
);
1487 clk_prepare_enable(sdd
->clk
);
1490 s3c64xx_spi_hwinit(sdd
, sdd
->port_id
);
1492 return spi_master_resume(master
);
1494 #endif /* CONFIG_PM_SLEEP */
1496 #ifdef CONFIG_PM_RUNTIME
1497 static int s3c64xx_spi_runtime_suspend(struct device
*dev
)
1499 struct spi_master
*master
= dev_get_drvdata(dev
);
1500 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
1502 clk_disable_unprepare(sdd
->clk
);
1503 clk_disable_unprepare(sdd
->src_clk
);
1508 static int s3c64xx_spi_runtime_resume(struct device
*dev
)
1510 struct spi_master
*master
= dev_get_drvdata(dev
);
1511 struct s3c64xx_spi_driver_data
*sdd
= spi_master_get_devdata(master
);
1514 ret
= clk_prepare_enable(sdd
->src_clk
);
1518 ret
= clk_prepare_enable(sdd
->clk
);
1520 clk_disable_unprepare(sdd
->src_clk
);
1526 #endif /* CONFIG_PM_RUNTIME */
1528 static const struct dev_pm_ops s3c64xx_spi_pm
= {
1529 SET_SYSTEM_SLEEP_PM_OPS(s3c64xx_spi_suspend
, s3c64xx_spi_resume
)
1530 SET_RUNTIME_PM_OPS(s3c64xx_spi_runtime_suspend
,
1531 s3c64xx_spi_runtime_resume
, NULL
)
1534 static struct s3c64xx_spi_port_config s3c2443_spi_port_config
= {
1535 .fifo_lvl_mask
= { 0x7f },
1536 .rx_lvl_offset
= 13,
1541 static struct s3c64xx_spi_port_config s3c6410_spi_port_config
= {
1542 .fifo_lvl_mask
= { 0x7f, 0x7F },
1543 .rx_lvl_offset
= 13,
1547 static struct s3c64xx_spi_port_config s5p64x0_spi_port_config
= {
1548 .fifo_lvl_mask
= { 0x1ff, 0x7F },
1549 .rx_lvl_offset
= 15,
1553 static struct s3c64xx_spi_port_config s5pc100_spi_port_config
= {
1554 .fifo_lvl_mask
= { 0x7f, 0x7F },
1555 .rx_lvl_offset
= 13,
1560 static struct s3c64xx_spi_port_config s5pv210_spi_port_config
= {
1561 .fifo_lvl_mask
= { 0x1ff, 0x7F },
1562 .rx_lvl_offset
= 15,
1567 static struct s3c64xx_spi_port_config exynos4_spi_port_config
= {
1568 .fifo_lvl_mask
= { 0x1ff, 0x7F, 0x7F },
1569 .rx_lvl_offset
= 15,
1572 .clk_from_cmu
= true,
1575 static struct s3c64xx_spi_port_config exynos5440_spi_port_config
= {
1576 .fifo_lvl_mask
= { 0x1ff },
1577 .rx_lvl_offset
= 15,
1580 .clk_from_cmu
= true,
1581 .quirks
= S3C64XX_SPI_QUIRK_POLL
,
1584 static struct platform_device_id s3c64xx_spi_driver_ids
[] = {
1586 .name
= "s3c2443-spi",
1587 .driver_data
= (kernel_ulong_t
)&s3c2443_spi_port_config
,
1589 .name
= "s3c6410-spi",
1590 .driver_data
= (kernel_ulong_t
)&s3c6410_spi_port_config
,
1592 .name
= "s5p64x0-spi",
1593 .driver_data
= (kernel_ulong_t
)&s5p64x0_spi_port_config
,
1595 .name
= "s5pc100-spi",
1596 .driver_data
= (kernel_ulong_t
)&s5pc100_spi_port_config
,
1598 .name
= "s5pv210-spi",
1599 .driver_data
= (kernel_ulong_t
)&s5pv210_spi_port_config
,
1601 .name
= "exynos4210-spi",
1602 .driver_data
= (kernel_ulong_t
)&exynos4_spi_port_config
,
1607 static const struct of_device_id s3c64xx_spi_dt_match
[] = {
1608 { .compatible
= "samsung,s3c2443-spi",
1609 .data
= (void *)&s3c2443_spi_port_config
,
1611 { .compatible
= "samsung,s3c6410-spi",
1612 .data
= (void *)&s3c6410_spi_port_config
,
1614 { .compatible
= "samsung,s5pc100-spi",
1615 .data
= (void *)&s5pc100_spi_port_config
,
1617 { .compatible
= "samsung,s5pv210-spi",
1618 .data
= (void *)&s5pv210_spi_port_config
,
1620 { .compatible
= "samsung,exynos4210-spi",
1621 .data
= (void *)&exynos4_spi_port_config
,
1623 { .compatible
= "samsung,exynos5440-spi",
1624 .data
= (void *)&exynos5440_spi_port_config
,
1628 MODULE_DEVICE_TABLE(of
, s3c64xx_spi_dt_match
);
1630 static struct platform_driver s3c64xx_spi_driver
= {
1632 .name
= "s3c64xx-spi",
1633 .owner
= THIS_MODULE
,
1634 .pm
= &s3c64xx_spi_pm
,
1635 .of_match_table
= of_match_ptr(s3c64xx_spi_dt_match
),
1637 .probe
= s3c64xx_spi_probe
,
1638 .remove
= s3c64xx_spi_remove
,
1639 .id_table
= s3c64xx_spi_driver_ids
,
1641 MODULE_ALIAS("platform:s3c64xx-spi");
1643 module_platform_driver(s3c64xx_spi_driver
);
1645 MODULE_AUTHOR("Jaswinder Singh <jassi.brar@samsung.com>");
1646 MODULE_DESCRIPTION("S3C64XX SPI Controller Driver");
1647 MODULE_LICENSE("GPL");