2 * Copyright (c) 2015 MediaTek Inc.
3 * Author: Leilk Liu <leilk.liu@mediatek.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
15 #include <linux/clk.h>
16 #include <linux/device.h>
17 #include <linux/err.h>
18 #include <linux/interrupt.h>
20 #include <linux/ioport.h>
21 #include <linux/module.h>
23 #include <linux/of_gpio.h>
24 #include <linux/platform_device.h>
25 #include <linux/platform_data/spi-mt65xx.h>
26 #include <linux/pm_runtime.h>
27 #include <linux/spi/spi.h>
29 #define SPI_CFG0_REG 0x0000
30 #define SPI_CFG1_REG 0x0004
31 #define SPI_TX_SRC_REG 0x0008
32 #define SPI_RX_DST_REG 0x000c
33 #define SPI_TX_DATA_REG 0x0010
34 #define SPI_RX_DATA_REG 0x0014
35 #define SPI_CMD_REG 0x0018
36 #define SPI_STATUS0_REG 0x001c
37 #define SPI_PAD_SEL_REG 0x0024
38 #define SPI_CFG2_REG 0x0028
40 #define SPI_CFG0_SCK_HIGH_OFFSET 0
41 #define SPI_CFG0_SCK_LOW_OFFSET 8
42 #define SPI_CFG0_CS_HOLD_OFFSET 16
43 #define SPI_CFG0_CS_SETUP_OFFSET 24
44 #define SPI_ADJUST_CFG0_SCK_LOW_OFFSET 16
45 #define SPI_ADJUST_CFG0_CS_HOLD_OFFSET 0
46 #define SPI_ADJUST_CFG0_CS_SETUP_OFFSET 16
48 #define SPI_CFG1_CS_IDLE_OFFSET 0
49 #define SPI_CFG1_PACKET_LOOP_OFFSET 8
50 #define SPI_CFG1_PACKET_LENGTH_OFFSET 16
51 #define SPI_CFG1_GET_TICK_DLY_OFFSET 30
53 #define SPI_CFG1_CS_IDLE_MASK 0xff
54 #define SPI_CFG1_PACKET_LOOP_MASK 0xff00
55 #define SPI_CFG1_PACKET_LENGTH_MASK 0x3ff0000
57 #define SPI_CMD_ACT BIT(0)
58 #define SPI_CMD_RESUME BIT(1)
59 #define SPI_CMD_RST BIT(2)
60 #define SPI_CMD_PAUSE_EN BIT(4)
61 #define SPI_CMD_DEASSERT BIT(5)
62 #define SPI_CMD_SAMPLE_SEL BIT(6)
63 #define SPI_CMD_CS_POL BIT(7)
64 #define SPI_CMD_CPHA BIT(8)
65 #define SPI_CMD_CPOL BIT(9)
66 #define SPI_CMD_RX_DMA BIT(10)
67 #define SPI_CMD_TX_DMA BIT(11)
68 #define SPI_CMD_TXMSBF BIT(12)
69 #define SPI_CMD_RXMSBF BIT(13)
70 #define SPI_CMD_RX_ENDIAN BIT(14)
71 #define SPI_CMD_TX_ENDIAN BIT(15)
72 #define SPI_CMD_FINISH_IE BIT(16)
73 #define SPI_CMD_PAUSE_IE BIT(17)
75 #define MT8173_SPI_MAX_PAD_SEL 3
77 #define MTK_SPI_PAUSE_INT_STATUS 0x2
79 #define MTK_SPI_IDLE 0
80 #define MTK_SPI_PAUSED 1
82 #define MTK_SPI_MAX_FIFO_SIZE 32U
83 #define MTK_SPI_PACKET_SIZE 1024
85 struct mtk_spi_compatible
{
87 /* Must explicitly send dummy Tx bytes to do Rx only transfer */
89 /* some IC design adjust cfg register to enhance time accuracy */
98 struct clk
*parent_clk
, *sel_clk
, *spi_clk
;
99 struct spi_transfer
*cur_transfer
;
102 struct scatterlist
*tx_sgl
, *rx_sgl
;
103 u32 tx_sgl_len
, rx_sgl_len
;
104 const struct mtk_spi_compatible
*dev_comp
;
107 static const struct mtk_spi_compatible mtk_common_compat
;
109 static const struct mtk_spi_compatible mt2712_compat
= {
113 static const struct mtk_spi_compatible mt7622_compat
= {
115 .enhance_timing
= true,
118 static const struct mtk_spi_compatible mt8173_compat
= {
119 .need_pad_sel
= true,
123 static const struct mtk_spi_compatible mt8183_compat
= {
124 .need_pad_sel
= true,
126 .enhance_timing
= true,
130 * A piece of default chip info unless the platform
133 static const struct mtk_chip_config mtk_default_chip_info
= {
140 static const struct of_device_id mtk_spi_of_match
[] = {
141 { .compatible
= "mediatek,mt2701-spi",
142 .data
= (void *)&mtk_common_compat
,
144 { .compatible
= "mediatek,mt2712-spi",
145 .data
= (void *)&mt2712_compat
,
147 { .compatible
= "mediatek,mt6589-spi",
148 .data
= (void *)&mtk_common_compat
,
150 { .compatible
= "mediatek,mt7622-spi",
151 .data
= (void *)&mt7622_compat
,
153 { .compatible
= "mediatek,mt8135-spi",
154 .data
= (void *)&mtk_common_compat
,
156 { .compatible
= "mediatek,mt8173-spi",
157 .data
= (void *)&mt8173_compat
,
159 { .compatible
= "mediatek,mt8183-spi",
160 .data
= (void *)&mt8183_compat
,
164 MODULE_DEVICE_TABLE(of
, mtk_spi_of_match
);
166 static void mtk_spi_reset(struct mtk_spi
*mdata
)
170 /* set the software reset bit in SPI_CMD_REG. */
171 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
172 reg_val
|= SPI_CMD_RST
;
173 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
175 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
176 reg_val
&= ~SPI_CMD_RST
;
177 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
180 static int mtk_spi_prepare_message(struct spi_master
*master
,
181 struct spi_message
*msg
)
185 struct spi_device
*spi
= msg
->spi
;
186 struct mtk_chip_config
*chip_config
= spi
->controller_data
;
187 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
189 cpha
= spi
->mode
& SPI_CPHA
? 1 : 0;
190 cpol
= spi
->mode
& SPI_CPOL
? 1 : 0;
192 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
194 reg_val
|= SPI_CMD_CPHA
;
196 reg_val
&= ~SPI_CMD_CPHA
;
198 reg_val
|= SPI_CMD_CPOL
;
200 reg_val
&= ~SPI_CMD_CPOL
;
202 /* set the mlsbx and mlsbtx */
203 if (chip_config
->tx_mlsb
)
204 reg_val
|= SPI_CMD_TXMSBF
;
206 reg_val
&= ~SPI_CMD_TXMSBF
;
207 if (chip_config
->rx_mlsb
)
208 reg_val
|= SPI_CMD_RXMSBF
;
210 reg_val
&= ~SPI_CMD_RXMSBF
;
212 /* set the tx/rx endian */
213 #ifdef __LITTLE_ENDIAN
214 reg_val
&= ~SPI_CMD_TX_ENDIAN
;
215 reg_val
&= ~SPI_CMD_RX_ENDIAN
;
217 reg_val
|= SPI_CMD_TX_ENDIAN
;
218 reg_val
|= SPI_CMD_RX_ENDIAN
;
221 if (mdata
->dev_comp
->enhance_timing
) {
222 if (chip_config
->cs_pol
)
223 reg_val
|= SPI_CMD_CS_POL
;
225 reg_val
&= ~SPI_CMD_CS_POL
;
226 if (chip_config
->sample_sel
)
227 reg_val
|= SPI_CMD_SAMPLE_SEL
;
229 reg_val
&= ~SPI_CMD_SAMPLE_SEL
;
232 /* set finish and pause interrupt always enable */
233 reg_val
|= SPI_CMD_FINISH_IE
| SPI_CMD_PAUSE_IE
;
235 /* disable dma mode */
236 reg_val
&= ~(SPI_CMD_TX_DMA
| SPI_CMD_RX_DMA
);
238 /* disable deassert mode */
239 reg_val
&= ~SPI_CMD_DEASSERT
;
241 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
244 if (mdata
->dev_comp
->need_pad_sel
)
245 writel(mdata
->pad_sel
[spi
->chip_select
],
246 mdata
->base
+ SPI_PAD_SEL_REG
);
251 static void mtk_spi_set_cs(struct spi_device
*spi
, bool enable
)
254 struct mtk_spi
*mdata
= spi_master_get_devdata(spi
->master
);
256 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
258 reg_val
|= SPI_CMD_PAUSE_EN
;
259 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
261 reg_val
&= ~SPI_CMD_PAUSE_EN
;
262 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
263 mdata
->state
= MTK_SPI_IDLE
;
264 mtk_spi_reset(mdata
);
268 static void mtk_spi_prepare_transfer(struct spi_master
*master
,
269 struct spi_transfer
*xfer
)
271 u32 spi_clk_hz
, div
, sck_time
, cs_time
, reg_val
= 0;
272 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
274 spi_clk_hz
= clk_get_rate(mdata
->spi_clk
);
275 if (xfer
->speed_hz
< spi_clk_hz
/ 2)
276 div
= DIV_ROUND_UP(spi_clk_hz
, xfer
->speed_hz
);
280 sck_time
= (div
+ 1) / 2;
281 cs_time
= sck_time
* 2;
283 if (mdata
->dev_comp
->enhance_timing
) {
284 reg_val
|= (((sck_time
- 1) & 0xffff)
285 << SPI_CFG0_SCK_HIGH_OFFSET
);
286 reg_val
|= (((sck_time
- 1) & 0xffff)
287 << SPI_ADJUST_CFG0_SCK_LOW_OFFSET
);
288 writel(reg_val
, mdata
->base
+ SPI_CFG2_REG
);
289 reg_val
|= (((cs_time
- 1) & 0xffff)
290 << SPI_ADJUST_CFG0_CS_HOLD_OFFSET
);
291 reg_val
|= (((cs_time
- 1) & 0xffff)
292 << SPI_ADJUST_CFG0_CS_SETUP_OFFSET
);
293 writel(reg_val
, mdata
->base
+ SPI_CFG0_REG
);
295 reg_val
|= (((sck_time
- 1) & 0xff)
296 << SPI_CFG0_SCK_HIGH_OFFSET
);
297 reg_val
|= (((sck_time
- 1) & 0xff) << SPI_CFG0_SCK_LOW_OFFSET
);
298 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_HOLD_OFFSET
);
299 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_SETUP_OFFSET
);
300 writel(reg_val
, mdata
->base
+ SPI_CFG0_REG
);
303 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
304 reg_val
&= ~SPI_CFG1_CS_IDLE_MASK
;
305 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG1_CS_IDLE_OFFSET
);
306 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
309 static void mtk_spi_setup_packet(struct spi_master
*master
)
311 u32 packet_size
, packet_loop
, reg_val
;
312 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
314 packet_size
= min_t(u32
, mdata
->xfer_len
, MTK_SPI_PACKET_SIZE
);
315 packet_loop
= mdata
->xfer_len
/ packet_size
;
317 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
318 reg_val
&= ~(SPI_CFG1_PACKET_LENGTH_MASK
| SPI_CFG1_PACKET_LOOP_MASK
);
319 reg_val
|= (packet_size
- 1) << SPI_CFG1_PACKET_LENGTH_OFFSET
;
320 reg_val
|= (packet_loop
- 1) << SPI_CFG1_PACKET_LOOP_OFFSET
;
321 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
324 static void mtk_spi_enable_transfer(struct spi_master
*master
)
327 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
329 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
330 if (mdata
->state
== MTK_SPI_IDLE
)
333 cmd
|= SPI_CMD_RESUME
;
334 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
337 static int mtk_spi_get_mult_delta(u32 xfer_len
)
341 if (xfer_len
> MTK_SPI_PACKET_SIZE
)
342 mult_delta
= xfer_len
% MTK_SPI_PACKET_SIZE
;
349 static void mtk_spi_update_mdata_len(struct spi_master
*master
)
352 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
354 if (mdata
->tx_sgl_len
&& mdata
->rx_sgl_len
) {
355 if (mdata
->tx_sgl_len
> mdata
->rx_sgl_len
) {
356 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
357 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
358 mdata
->rx_sgl_len
= mult_delta
;
359 mdata
->tx_sgl_len
-= mdata
->xfer_len
;
361 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
362 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
363 mdata
->tx_sgl_len
= mult_delta
;
364 mdata
->rx_sgl_len
-= mdata
->xfer_len
;
366 } else if (mdata
->tx_sgl_len
) {
367 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
368 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
369 mdata
->tx_sgl_len
= mult_delta
;
370 } else if (mdata
->rx_sgl_len
) {
371 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
372 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
373 mdata
->rx_sgl_len
= mult_delta
;
377 static void mtk_spi_setup_dma_addr(struct spi_master
*master
,
378 struct spi_transfer
*xfer
)
380 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
383 writel(xfer
->tx_dma
, mdata
->base
+ SPI_TX_SRC_REG
);
385 writel(xfer
->rx_dma
, mdata
->base
+ SPI_RX_DST_REG
);
388 static int mtk_spi_fifo_transfer(struct spi_master
*master
,
389 struct spi_device
*spi
,
390 struct spi_transfer
*xfer
)
394 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
396 mdata
->cur_transfer
= xfer
;
397 mdata
->xfer_len
= min(MTK_SPI_MAX_FIFO_SIZE
, xfer
->len
);
398 mdata
->num_xfered
= 0;
399 mtk_spi_prepare_transfer(master
, xfer
);
400 mtk_spi_setup_packet(master
);
403 iowrite32_rep(mdata
->base
+ SPI_TX_DATA_REG
, xfer
->tx_buf
, cnt
);
405 remainder
= xfer
->len
% 4;
408 memcpy(®_val
, xfer
->tx_buf
+ (cnt
* 4), remainder
);
409 writel(reg_val
, mdata
->base
+ SPI_TX_DATA_REG
);
412 mtk_spi_enable_transfer(master
);
417 static int mtk_spi_dma_transfer(struct spi_master
*master
,
418 struct spi_device
*spi
,
419 struct spi_transfer
*xfer
)
422 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
424 mdata
->tx_sgl
= NULL
;
425 mdata
->rx_sgl
= NULL
;
426 mdata
->tx_sgl_len
= 0;
427 mdata
->rx_sgl_len
= 0;
428 mdata
->cur_transfer
= xfer
;
429 mdata
->num_xfered
= 0;
431 mtk_spi_prepare_transfer(master
, xfer
);
433 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
435 cmd
|= SPI_CMD_TX_DMA
;
437 cmd
|= SPI_CMD_RX_DMA
;
438 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
441 mdata
->tx_sgl
= xfer
->tx_sg
.sgl
;
443 mdata
->rx_sgl
= xfer
->rx_sg
.sgl
;
446 xfer
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
447 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
450 xfer
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
451 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
454 mtk_spi_update_mdata_len(master
);
455 mtk_spi_setup_packet(master
);
456 mtk_spi_setup_dma_addr(master
, xfer
);
457 mtk_spi_enable_transfer(master
);
462 static int mtk_spi_transfer_one(struct spi_master
*master
,
463 struct spi_device
*spi
,
464 struct spi_transfer
*xfer
)
466 if (master
->can_dma(master
, spi
, xfer
))
467 return mtk_spi_dma_transfer(master
, spi
, xfer
);
469 return mtk_spi_fifo_transfer(master
, spi
, xfer
);
472 static bool mtk_spi_can_dma(struct spi_master
*master
,
473 struct spi_device
*spi
,
474 struct spi_transfer
*xfer
)
476 /* Buffers for DMA transactions must be 4-byte aligned */
477 return (xfer
->len
> MTK_SPI_MAX_FIFO_SIZE
&&
478 (unsigned long)xfer
->tx_buf
% 4 == 0 &&
479 (unsigned long)xfer
->rx_buf
% 4 == 0);
482 static int mtk_spi_setup(struct spi_device
*spi
)
484 struct mtk_spi
*mdata
= spi_master_get_devdata(spi
->master
);
486 if (!spi
->controller_data
)
487 spi
->controller_data
= (void *)&mtk_default_chip_info
;
489 if (mdata
->dev_comp
->need_pad_sel
&& gpio_is_valid(spi
->cs_gpio
))
490 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
495 static irqreturn_t
mtk_spi_interrupt(int irq
, void *dev_id
)
497 u32 cmd
, reg_val
, cnt
, remainder
, len
;
498 struct spi_master
*master
= dev_id
;
499 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
500 struct spi_transfer
*trans
= mdata
->cur_transfer
;
502 reg_val
= readl(mdata
->base
+ SPI_STATUS0_REG
);
503 if (reg_val
& MTK_SPI_PAUSE_INT_STATUS
)
504 mdata
->state
= MTK_SPI_PAUSED
;
506 mdata
->state
= MTK_SPI_IDLE
;
508 if (!master
->can_dma(master
, master
->cur_msg
->spi
, trans
)) {
510 cnt
= mdata
->xfer_len
/ 4;
511 ioread32_rep(mdata
->base
+ SPI_RX_DATA_REG
,
512 trans
->rx_buf
+ mdata
->num_xfered
, cnt
);
513 remainder
= mdata
->xfer_len
% 4;
515 reg_val
= readl(mdata
->base
+ SPI_RX_DATA_REG
);
516 memcpy(trans
->rx_buf
+
524 mdata
->num_xfered
+= mdata
->xfer_len
;
525 if (mdata
->num_xfered
== trans
->len
) {
526 spi_finalize_current_transfer(master
);
530 len
= trans
->len
- mdata
->num_xfered
;
531 mdata
->xfer_len
= min(MTK_SPI_MAX_FIFO_SIZE
, len
);
532 mtk_spi_setup_packet(master
);
534 cnt
= mdata
->xfer_len
/ 4;
535 iowrite32_rep(mdata
->base
+ SPI_TX_DATA_REG
,
536 trans
->tx_buf
+ mdata
->num_xfered
, cnt
);
538 remainder
= mdata
->xfer_len
% 4;
542 trans
->tx_buf
+ (cnt
* 4) + mdata
->num_xfered
,
544 writel(reg_val
, mdata
->base
+ SPI_TX_DATA_REG
);
547 mtk_spi_enable_transfer(master
);
553 trans
->tx_dma
+= mdata
->xfer_len
;
555 trans
->rx_dma
+= mdata
->xfer_len
;
557 if (mdata
->tx_sgl
&& (mdata
->tx_sgl_len
== 0)) {
558 mdata
->tx_sgl
= sg_next(mdata
->tx_sgl
);
560 trans
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
561 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
564 if (mdata
->rx_sgl
&& (mdata
->rx_sgl_len
== 0)) {
565 mdata
->rx_sgl
= sg_next(mdata
->rx_sgl
);
567 trans
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
568 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
572 if (!mdata
->tx_sgl
&& !mdata
->rx_sgl
) {
573 /* spi disable dma */
574 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
575 cmd
&= ~SPI_CMD_TX_DMA
;
576 cmd
&= ~SPI_CMD_RX_DMA
;
577 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
579 spi_finalize_current_transfer(master
);
583 mtk_spi_update_mdata_len(master
);
584 mtk_spi_setup_packet(master
);
585 mtk_spi_setup_dma_addr(master
, trans
);
586 mtk_spi_enable_transfer(master
);
591 static int mtk_spi_probe(struct platform_device
*pdev
)
593 struct spi_master
*master
;
594 struct mtk_spi
*mdata
;
595 const struct of_device_id
*of_id
;
596 struct resource
*res
;
599 master
= spi_alloc_master(&pdev
->dev
, sizeof(*mdata
));
601 dev_err(&pdev
->dev
, "failed to alloc spi master\n");
605 master
->auto_runtime_pm
= true;
606 master
->dev
.of_node
= pdev
->dev
.of_node
;
607 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
;
609 master
->set_cs
= mtk_spi_set_cs
;
610 master
->prepare_message
= mtk_spi_prepare_message
;
611 master
->transfer_one
= mtk_spi_transfer_one
;
612 master
->can_dma
= mtk_spi_can_dma
;
613 master
->setup
= mtk_spi_setup
;
615 of_id
= of_match_node(mtk_spi_of_match
, pdev
->dev
.of_node
);
617 dev_err(&pdev
->dev
, "failed to probe of_node\n");
622 mdata
= spi_master_get_devdata(master
);
623 mdata
->dev_comp
= of_id
->data
;
624 if (mdata
->dev_comp
->must_tx
)
625 master
->flags
= SPI_MASTER_MUST_TX
;
627 if (mdata
->dev_comp
->need_pad_sel
) {
628 mdata
->pad_num
= of_property_count_u32_elems(
630 "mediatek,pad-select");
631 if (mdata
->pad_num
< 0) {
633 "No 'mediatek,pad-select' property\n");
638 mdata
->pad_sel
= devm_kmalloc_array(&pdev
->dev
, mdata
->pad_num
,
639 sizeof(u32
), GFP_KERNEL
);
640 if (!mdata
->pad_sel
) {
645 for (i
= 0; i
< mdata
->pad_num
; i
++) {
646 of_property_read_u32_index(pdev
->dev
.of_node
,
647 "mediatek,pad-select",
648 i
, &mdata
->pad_sel
[i
]);
649 if (mdata
->pad_sel
[i
] > MT8173_SPI_MAX_PAD_SEL
) {
650 dev_err(&pdev
->dev
, "wrong pad-sel[%d]: %u\n",
651 i
, mdata
->pad_sel
[i
]);
658 platform_set_drvdata(pdev
, master
);
660 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
663 dev_err(&pdev
->dev
, "failed to determine base address\n");
667 mdata
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
668 if (IS_ERR(mdata
->base
)) {
669 ret
= PTR_ERR(mdata
->base
);
673 irq
= platform_get_irq(pdev
, 0);
675 dev_err(&pdev
->dev
, "failed to get irq (%d)\n", irq
);
680 if (!pdev
->dev
.dma_mask
)
681 pdev
->dev
.dma_mask
= &pdev
->dev
.coherent_dma_mask
;
683 ret
= devm_request_irq(&pdev
->dev
, irq
, mtk_spi_interrupt
,
684 IRQF_TRIGGER_NONE
, dev_name(&pdev
->dev
), master
);
686 dev_err(&pdev
->dev
, "failed to register irq (%d)\n", ret
);
690 mdata
->parent_clk
= devm_clk_get(&pdev
->dev
, "parent-clk");
691 if (IS_ERR(mdata
->parent_clk
)) {
692 ret
= PTR_ERR(mdata
->parent_clk
);
693 dev_err(&pdev
->dev
, "failed to get parent-clk: %d\n", ret
);
697 mdata
->sel_clk
= devm_clk_get(&pdev
->dev
, "sel-clk");
698 if (IS_ERR(mdata
->sel_clk
)) {
699 ret
= PTR_ERR(mdata
->sel_clk
);
700 dev_err(&pdev
->dev
, "failed to get sel-clk: %d\n", ret
);
704 mdata
->spi_clk
= devm_clk_get(&pdev
->dev
, "spi-clk");
705 if (IS_ERR(mdata
->spi_clk
)) {
706 ret
= PTR_ERR(mdata
->spi_clk
);
707 dev_err(&pdev
->dev
, "failed to get spi-clk: %d\n", ret
);
711 ret
= clk_prepare_enable(mdata
->spi_clk
);
713 dev_err(&pdev
->dev
, "failed to enable spi_clk (%d)\n", ret
);
717 ret
= clk_set_parent(mdata
->sel_clk
, mdata
->parent_clk
);
719 dev_err(&pdev
->dev
, "failed to clk_set_parent (%d)\n", ret
);
720 clk_disable_unprepare(mdata
->spi_clk
);
724 clk_disable_unprepare(mdata
->spi_clk
);
726 pm_runtime_enable(&pdev
->dev
);
728 ret
= devm_spi_register_master(&pdev
->dev
, master
);
730 dev_err(&pdev
->dev
, "failed to register master (%d)\n", ret
);
731 goto err_disable_runtime_pm
;
734 if (mdata
->dev_comp
->need_pad_sel
) {
735 if (mdata
->pad_num
!= master
->num_chipselect
) {
737 "pad_num does not match num_chipselect(%d != %d)\n",
738 mdata
->pad_num
, master
->num_chipselect
);
740 goto err_disable_runtime_pm
;
743 if (!master
->cs_gpios
&& master
->num_chipselect
> 1) {
745 "cs_gpios not specified and num_chipselect > 1\n");
747 goto err_disable_runtime_pm
;
750 if (master
->cs_gpios
) {
751 for (i
= 0; i
< master
->num_chipselect
; i
++) {
752 ret
= devm_gpio_request(&pdev
->dev
,
754 dev_name(&pdev
->dev
));
757 "can't get CS GPIO %i\n", i
);
758 goto err_disable_runtime_pm
;
766 err_disable_runtime_pm
:
767 pm_runtime_disable(&pdev
->dev
);
769 spi_master_put(master
);
774 static int mtk_spi_remove(struct platform_device
*pdev
)
776 struct spi_master
*master
= platform_get_drvdata(pdev
);
777 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
779 pm_runtime_disable(&pdev
->dev
);
781 mtk_spi_reset(mdata
);
786 #ifdef CONFIG_PM_SLEEP
787 static int mtk_spi_suspend(struct device
*dev
)
790 struct spi_master
*master
= dev_get_drvdata(dev
);
791 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
793 ret
= spi_master_suspend(master
);
797 if (!pm_runtime_suspended(dev
))
798 clk_disable_unprepare(mdata
->spi_clk
);
803 static int mtk_spi_resume(struct device
*dev
)
806 struct spi_master
*master
= dev_get_drvdata(dev
);
807 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
809 if (!pm_runtime_suspended(dev
)) {
810 ret
= clk_prepare_enable(mdata
->spi_clk
);
812 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
817 ret
= spi_master_resume(master
);
819 clk_disable_unprepare(mdata
->spi_clk
);
823 #endif /* CONFIG_PM_SLEEP */
826 static int mtk_spi_runtime_suspend(struct device
*dev
)
828 struct spi_master
*master
= dev_get_drvdata(dev
);
829 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
831 clk_disable_unprepare(mdata
->spi_clk
);
836 static int mtk_spi_runtime_resume(struct device
*dev
)
838 struct spi_master
*master
= dev_get_drvdata(dev
);
839 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
842 ret
= clk_prepare_enable(mdata
->spi_clk
);
844 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
850 #endif /* CONFIG_PM */
852 static const struct dev_pm_ops mtk_spi_pm
= {
853 SET_SYSTEM_SLEEP_PM_OPS(mtk_spi_suspend
, mtk_spi_resume
)
854 SET_RUNTIME_PM_OPS(mtk_spi_runtime_suspend
,
855 mtk_spi_runtime_resume
, NULL
)
858 static struct platform_driver mtk_spi_driver
= {
862 .of_match_table
= mtk_spi_of_match
,
864 .probe
= mtk_spi_probe
,
865 .remove
= mtk_spi_remove
,
868 module_platform_driver(mtk_spi_driver
);
870 MODULE_DESCRIPTION("MTK SPI Controller driver");
871 MODULE_AUTHOR("Leilk Liu <leilk.liu@mediatek.com>");
872 MODULE_LICENSE("GPL v2");
873 MODULE_ALIAS("platform:mtk-spi");