1 // SPDX-License-Identifier: GPL-2.0-only
3 * Microchip PIC32 SPI controller driver.
5 * Purna Chandra Mandal <purna.mandal@microchip.com>
6 * Copyright (c) 2016, Microchip Technology Inc.
10 #include <linux/clkdev.h>
11 #include <linux/delay.h>
12 #include <linux/dmaengine.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/highmem.h>
15 #include <linux/module.h>
17 #include <linux/interrupt.h>
19 #include <linux/of_irq.h>
20 #include <linux/of_gpio.h>
21 #include <linux/of_address.h>
22 #include <linux/platform_device.h>
23 #include <linux/spi/spi.h>
25 /* SPI controller registers */
26 struct pic32_spi_regs
{
45 /* Bit fields of SPI Control Register */
46 #define CTRL_RX_INT_SHIFT 0 /* Rx interrupt generation */
47 #define RX_FIFO_EMPTY 0
48 #define RX_FIFO_NOT_EMPTY 1 /* not empty */
49 #define RX_FIFO_HALF_FULL 2 /* full by half or more */
50 #define RX_FIFO_FULL 3 /* completely full */
52 #define CTRL_TX_INT_SHIFT 2 /* TX interrupt generation */
53 #define TX_FIFO_ALL_EMPTY 0 /* completely empty */
54 #define TX_FIFO_EMPTY 1 /* empty */
55 #define TX_FIFO_HALF_EMPTY 2 /* empty by half or more */
56 #define TX_FIFO_NOT_FULL 3 /* atleast one empty */
58 #define CTRL_MSTEN BIT(5) /* enable master mode */
59 #define CTRL_CKP BIT(6) /* active low */
60 #define CTRL_CKE BIT(8) /* Tx on falling edge */
61 #define CTRL_SMP BIT(9) /* Rx at middle or end of tx */
62 #define CTRL_BPW_MASK 0x03 /* bits per word/sample */
63 #define CTRL_BPW_SHIFT 10
65 #define PIC32_BPW_16 1
66 #define PIC32_BPW_32 2
67 #define CTRL_SIDL BIT(13) /* sleep when idle */
68 #define CTRL_ON BIT(15) /* enable macro */
69 #define CTRL_ENHBUF BIT(16) /* enable enhanced buffering */
70 #define CTRL_MCLKSEL BIT(23) /* select clock source */
71 #define CTRL_MSSEN BIT(28) /* macro driven /SS */
72 #define CTRL_FRMEN BIT(31) /* enable framing mode */
74 /* Bit fields of SPI Status Register */
75 #define STAT_RF_EMPTY BIT(5) /* RX Fifo empty */
76 #define STAT_RX_OV BIT(6) /* err, s/w needs to clear */
77 #define STAT_TX_UR BIT(8) /* UR in Framed SPI modes */
78 #define STAT_FRM_ERR BIT(12) /* Multiple Frame Sync pulse */
79 #define STAT_TF_LVL_MASK 0x1F
80 #define STAT_TF_LVL_SHIFT 16
81 #define STAT_RF_LVL_MASK 0x1F
82 #define STAT_RF_LVL_SHIFT 24
84 /* Bit fields of SPI Baud Register */
85 #define BAUD_MASK 0x1ff
87 /* Bit fields of SPI Control2 Register */
88 #define CTRL2_TX_UR_EN BIT(10) /* Enable int on Tx under-run */
89 #define CTRL2_RX_OV_EN BIT(11) /* Enable int on Rx over-run */
90 #define CTRL2_FRM_ERR_EN BIT(12) /* Enable frame err int */
92 /* Minimum DMA transfer size */
93 #define PIC32_DMA_LEN_MIN 64
97 struct pic32_spi_regs __iomem
*regs
;
101 u32 fifo_n_byte
; /* FIFO depth in bytes */
103 struct spi_master
*master
;
104 /* Current controller setting */
105 u32 speed_hz
; /* spi-clk rate */
108 u32 fifo_n_elm
; /* FIFO depth in words */
109 #define PIC32F_DMA_PREP 0 /* DMA chnls configured */
111 /* Current transfer state */
112 struct completion xfer_done
;
113 /* PIO transfer specific */
119 void (*rx_fifo
)(struct pic32_spi
*);
120 void (*tx_fifo
)(struct pic32_spi
*);
123 static inline void pic32_spi_enable(struct pic32_spi
*pic32s
)
125 writel(CTRL_ON
| CTRL_SIDL
, &pic32s
->regs
->ctrl_set
);
128 static inline void pic32_spi_disable(struct pic32_spi
*pic32s
)
130 writel(CTRL_ON
| CTRL_SIDL
, &pic32s
->regs
->ctrl_clr
);
132 /* avoid SPI registers read/write at immediate next CPU clock */
136 static void pic32_spi_set_clk_rate(struct pic32_spi
*pic32s
, u32 spi_ck
)
140 /* div = (clk_in / 2 * spi_ck) - 1 */
141 div
= DIV_ROUND_CLOSEST(clk_get_rate(pic32s
->clk
), 2 * spi_ck
) - 1;
143 writel(div
& BAUD_MASK
, &pic32s
->regs
->baud
);
146 static inline u32
pic32_rx_fifo_level(struct pic32_spi
*pic32s
)
148 u32 sr
= readl(&pic32s
->regs
->status
);
150 return (sr
>> STAT_RF_LVL_SHIFT
) & STAT_RF_LVL_MASK
;
153 static inline u32
pic32_tx_fifo_level(struct pic32_spi
*pic32s
)
155 u32 sr
= readl(&pic32s
->regs
->status
);
157 return (sr
>> STAT_TF_LVL_SHIFT
) & STAT_TF_LVL_MASK
;
160 /* Return the max entries we can fill into tx fifo */
161 static u32
pic32_tx_max(struct pic32_spi
*pic32s
, int n_bytes
)
163 u32 tx_left
, tx_room
, rxtx_gap
;
165 tx_left
= (pic32s
->tx_end
- pic32s
->tx
) / n_bytes
;
166 tx_room
= pic32s
->fifo_n_elm
- pic32_tx_fifo_level(pic32s
);
169 * Another concern is about the tx/rx mismatch, we
170 * though to use (pic32s->fifo_n_byte - rxfl - txfl) as
171 * one maximum value for tx, but it doesn't cover the
172 * data which is out of tx/rx fifo and inside the
173 * shift registers. So a ctrl from sw point of
176 rxtx_gap
= ((pic32s
->rx_end
- pic32s
->rx
) -
177 (pic32s
->tx_end
- pic32s
->tx
)) / n_bytes
;
178 return min3(tx_left
, tx_room
, (u32
)(pic32s
->fifo_n_elm
- rxtx_gap
));
181 /* Return the max entries we should read out of rx fifo */
182 static u32
pic32_rx_max(struct pic32_spi
*pic32s
, int n_bytes
)
184 u32 rx_left
= (pic32s
->rx_end
- pic32s
->rx
) / n_bytes
;
186 return min_t(u32
, rx_left
, pic32_rx_fifo_level(pic32s
));
189 #define BUILD_SPI_FIFO_RW(__name, __type, __bwl) \
190 static void pic32_spi_rx_##__name(struct pic32_spi *pic32s) \
193 u32 mx = pic32_rx_max(pic32s, sizeof(__type)); \
195 v = read##__bwl(&pic32s->regs->buf); \
196 if (pic32s->rx_end - pic32s->len) \
197 *(__type *)(pic32s->rx) = v; \
198 pic32s->rx += sizeof(__type); \
202 static void pic32_spi_tx_##__name(struct pic32_spi *pic32s) \
205 u32 mx = pic32_tx_max(pic32s, sizeof(__type)); \
206 for (; mx ; mx--) { \
208 if (pic32s->tx_end - pic32s->len) \
209 v = *(__type *)(pic32s->tx); \
210 write##__bwl(v, &pic32s->regs->buf); \
211 pic32s->tx += sizeof(__type); \
215 BUILD_SPI_FIFO_RW(byte
, u8
, b
);
216 BUILD_SPI_FIFO_RW(word
, u16
, w
);
217 BUILD_SPI_FIFO_RW(dword
, u32
, l
);
219 static void pic32_err_stop(struct pic32_spi
*pic32s
, const char *msg
)
221 /* disable all interrupts */
222 disable_irq_nosync(pic32s
->fault_irq
);
223 disable_irq_nosync(pic32s
->rx_irq
);
224 disable_irq_nosync(pic32s
->tx_irq
);
226 /* Show err message and abort xfer with err */
227 dev_err(&pic32s
->master
->dev
, "%s\n", msg
);
228 if (pic32s
->master
->cur_msg
)
229 pic32s
->master
->cur_msg
->status
= -EIO
;
230 complete(&pic32s
->xfer_done
);
233 static irqreturn_t
pic32_spi_fault_irq(int irq
, void *dev_id
)
235 struct pic32_spi
*pic32s
= dev_id
;
238 status
= readl(&pic32s
->regs
->status
);
241 if (status
& (STAT_RX_OV
| STAT_TX_UR
)) {
242 writel(STAT_RX_OV
, &pic32s
->regs
->status_clr
);
243 writel(STAT_TX_UR
, &pic32s
->regs
->status_clr
);
244 pic32_err_stop(pic32s
, "err_irq: fifo ov/ur-run\n");
248 if (status
& STAT_FRM_ERR
) {
249 pic32_err_stop(pic32s
, "err_irq: frame error");
253 if (!pic32s
->master
->cur_msg
) {
254 pic32_err_stop(pic32s
, "err_irq: no mesg");
261 static irqreturn_t
pic32_spi_rx_irq(int irq
, void *dev_id
)
263 struct pic32_spi
*pic32s
= dev_id
;
265 pic32s
->rx_fifo(pic32s
);
268 if (pic32s
->rx_end
== pic32s
->rx
) {
269 /* disable all interrupts */
270 disable_irq_nosync(pic32s
->fault_irq
);
271 disable_irq_nosync(pic32s
->rx_irq
);
273 /* complete current xfer */
274 complete(&pic32s
->xfer_done
);
280 static irqreturn_t
pic32_spi_tx_irq(int irq
, void *dev_id
)
282 struct pic32_spi
*pic32s
= dev_id
;
284 pic32s
->tx_fifo(pic32s
);
286 /* tx complete? disable tx interrupt */
287 if (pic32s
->tx_end
== pic32s
->tx
)
288 disable_irq_nosync(pic32s
->tx_irq
);
293 static void pic32_spi_dma_rx_notify(void *data
)
295 struct pic32_spi
*pic32s
= data
;
297 complete(&pic32s
->xfer_done
);
300 static int pic32_spi_dma_transfer(struct pic32_spi
*pic32s
,
301 struct spi_transfer
*xfer
)
303 struct spi_master
*master
= pic32s
->master
;
304 struct dma_async_tx_descriptor
*desc_rx
;
305 struct dma_async_tx_descriptor
*desc_tx
;
309 if (!master
->dma_rx
|| !master
->dma_tx
)
312 desc_rx
= dmaengine_prep_slave_sg(master
->dma_rx
,
316 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
322 desc_tx
= dmaengine_prep_slave_sg(master
->dma_tx
,
326 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
332 /* Put callback on the RX transfer, that should finish last */
333 desc_rx
->callback
= pic32_spi_dma_rx_notify
;
334 desc_rx
->callback_param
= pic32s
;
336 cookie
= dmaengine_submit(desc_rx
);
337 ret
= dma_submit_error(cookie
);
341 cookie
= dmaengine_submit(desc_tx
);
342 ret
= dma_submit_error(cookie
);
346 dma_async_issue_pending(master
->dma_rx
);
347 dma_async_issue_pending(master
->dma_tx
);
352 dmaengine_terminate_all(master
->dma_rx
);
357 static int pic32_spi_dma_config(struct pic32_spi
*pic32s
, u32 dma_width
)
359 int buf_offset
= offsetof(struct pic32_spi_regs
, buf
);
360 struct spi_master
*master
= pic32s
->master
;
361 struct dma_slave_config cfg
;
364 memset(&cfg
, 0, sizeof(cfg
));
365 cfg
.device_fc
= true;
366 cfg
.src_addr
= pic32s
->dma_base
+ buf_offset
;
367 cfg
.dst_addr
= pic32s
->dma_base
+ buf_offset
;
368 cfg
.src_maxburst
= pic32s
->fifo_n_elm
/ 2; /* fill one-half */
369 cfg
.dst_maxburst
= pic32s
->fifo_n_elm
/ 2; /* drain one-half */
370 cfg
.src_addr_width
= dma_width
;
371 cfg
.dst_addr_width
= dma_width
;
373 cfg
.slave_id
= pic32s
->tx_irq
;
374 cfg
.direction
= DMA_MEM_TO_DEV
;
375 ret
= dmaengine_slave_config(master
->dma_tx
, &cfg
);
377 dev_err(&master
->dev
, "tx channel setup failed\n");
381 cfg
.slave_id
= pic32s
->rx_irq
;
382 cfg
.direction
= DMA_DEV_TO_MEM
;
383 ret
= dmaengine_slave_config(master
->dma_rx
, &cfg
);
385 dev_err(&master
->dev
, "rx channel setup failed\n");
390 static int pic32_spi_set_word_size(struct pic32_spi
*pic32s
, u8 bits_per_word
)
392 enum dma_slave_buswidth dmawidth
;
395 switch (bits_per_word
) {
397 pic32s
->rx_fifo
= pic32_spi_rx_byte
;
398 pic32s
->tx_fifo
= pic32_spi_tx_byte
;
399 buswidth
= PIC32_BPW_8
;
400 dmawidth
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
403 pic32s
->rx_fifo
= pic32_spi_rx_word
;
404 pic32s
->tx_fifo
= pic32_spi_tx_word
;
405 buswidth
= PIC32_BPW_16
;
406 dmawidth
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
409 pic32s
->rx_fifo
= pic32_spi_rx_dword
;
410 pic32s
->tx_fifo
= pic32_spi_tx_dword
;
411 buswidth
= PIC32_BPW_32
;
412 dmawidth
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
419 /* calculate maximum number of words fifos can hold */
420 pic32s
->fifo_n_elm
= DIV_ROUND_UP(pic32s
->fifo_n_byte
,
423 v
= readl(&pic32s
->regs
->ctrl
);
424 v
&= ~(CTRL_BPW_MASK
<< CTRL_BPW_SHIFT
);
425 v
|= buswidth
<< CTRL_BPW_SHIFT
;
426 writel(v
, &pic32s
->regs
->ctrl
);
428 /* re-configure dma width, if required */
429 if (test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
430 pic32_spi_dma_config(pic32s
, dmawidth
);
435 static int pic32_spi_prepare_hardware(struct spi_master
*master
)
437 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
439 pic32_spi_enable(pic32s
);
444 static int pic32_spi_prepare_message(struct spi_master
*master
,
445 struct spi_message
*msg
)
447 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
448 struct spi_device
*spi
= msg
->spi
;
451 /* set device specific bits_per_word */
452 if (pic32s
->bits_per_word
!= spi
->bits_per_word
) {
453 pic32_spi_set_word_size(pic32s
, spi
->bits_per_word
);
454 pic32s
->bits_per_word
= spi
->bits_per_word
;
457 /* device specific speed change */
458 if (pic32s
->speed_hz
!= spi
->max_speed_hz
) {
459 pic32_spi_set_clk_rate(pic32s
, spi
->max_speed_hz
);
460 pic32s
->speed_hz
= spi
->max_speed_hz
;
463 /* device specific mode change */
464 if (pic32s
->mode
!= spi
->mode
) {
465 val
= readl(&pic32s
->regs
->ctrl
);
467 if (spi
->mode
& SPI_CPOL
)
471 /* tx on rising edge */
472 if (spi
->mode
& SPI_CPHA
)
477 /* rx at end of tx */
479 writel(val
, &pic32s
->regs
->ctrl
);
480 pic32s
->mode
= spi
->mode
;
486 static bool pic32_spi_can_dma(struct spi_master
*master
,
487 struct spi_device
*spi
,
488 struct spi_transfer
*xfer
)
490 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
492 /* skip using DMA on small size transfer to avoid overhead.*/
493 return (xfer
->len
>= PIC32_DMA_LEN_MIN
) &&
494 test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
497 static int pic32_spi_one_transfer(struct spi_master
*master
,
498 struct spi_device
*spi
,
499 struct spi_transfer
*transfer
)
501 struct pic32_spi
*pic32s
;
502 bool dma_issued
= false;
503 unsigned long timeout
;
506 pic32s
= spi_master_get_devdata(master
);
508 /* handle transfer specific word size change */
509 if (transfer
->bits_per_word
&&
510 (transfer
->bits_per_word
!= pic32s
->bits_per_word
)) {
511 ret
= pic32_spi_set_word_size(pic32s
, transfer
->bits_per_word
);
514 pic32s
->bits_per_word
= transfer
->bits_per_word
;
517 /* handle transfer specific speed change */
518 if (transfer
->speed_hz
&& (transfer
->speed_hz
!= pic32s
->speed_hz
)) {
519 pic32_spi_set_clk_rate(pic32s
, transfer
->speed_hz
);
520 pic32s
->speed_hz
= transfer
->speed_hz
;
523 reinit_completion(&pic32s
->xfer_done
);
525 /* transact by DMA mode */
526 if (transfer
->rx_sg
.nents
&& transfer
->tx_sg
.nents
) {
527 ret
= pic32_spi_dma_transfer(pic32s
, transfer
);
529 dev_err(&spi
->dev
, "dma submit error\n");
536 /* set current transfer information */
537 pic32s
->tx
= (const void *)transfer
->tx_buf
;
538 pic32s
->rx
= (const void *)transfer
->rx_buf
;
539 pic32s
->tx_end
= pic32s
->tx
+ transfer
->len
;
540 pic32s
->rx_end
= pic32s
->rx
+ transfer
->len
;
541 pic32s
->len
= transfer
->len
;
543 /* transact by interrupt driven PIO */
544 enable_irq(pic32s
->fault_irq
);
545 enable_irq(pic32s
->rx_irq
);
546 enable_irq(pic32s
->tx_irq
);
549 /* wait for completion */
550 timeout
= wait_for_completion_timeout(&pic32s
->xfer_done
, 2 * HZ
);
552 dev_err(&spi
->dev
, "wait error/timedout\n");
554 dmaengine_terminate_all(master
->dma_rx
);
555 dmaengine_terminate_all(master
->dma_tx
);
565 static int pic32_spi_unprepare_message(struct spi_master
*master
,
566 struct spi_message
*msg
)
572 static int pic32_spi_unprepare_hardware(struct spi_master
*master
)
574 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
576 pic32_spi_disable(pic32s
);
581 /* This may be called multiple times by same spi dev */
582 static int pic32_spi_setup(struct spi_device
*spi
)
584 if (!spi
->max_speed_hz
) {
585 dev_err(&spi
->dev
, "No max speed HZ parameter\n");
589 /* PIC32 spi controller can drive /CS during transfer depending
590 * on tx fifo fill-level. /CS will stay asserted as long as TX
591 * fifo is non-empty, else will be deasserted indicating
592 * completion of the ongoing transfer. This might result into
593 * unreliable/erroneous SPI transactions.
594 * To avoid that we will always handle /CS by toggling GPIO.
596 if (!gpio_is_valid(spi
->cs_gpio
))
599 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
604 static void pic32_spi_cleanup(struct spi_device
*spi
)
606 /* de-activate cs-gpio */
607 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
610 static int pic32_spi_dma_prep(struct pic32_spi
*pic32s
, struct device
*dev
)
612 struct spi_master
*master
= pic32s
->master
;
615 master
->dma_rx
= dma_request_chan(dev
, "spi-rx");
616 if (IS_ERR(master
->dma_rx
)) {
617 if (PTR_ERR(master
->dma_rx
) == -EPROBE_DEFER
)
620 dev_warn(dev
, "RX channel not found.\n");
622 master
->dma_rx
= NULL
;
626 master
->dma_tx
= dma_request_chan(dev
, "spi-tx");
627 if (IS_ERR(master
->dma_tx
)) {
628 if (PTR_ERR(master
->dma_tx
) == -EPROBE_DEFER
)
631 dev_warn(dev
, "TX channel not found.\n");
633 master
->dma_tx
= NULL
;
637 if (pic32_spi_dma_config(pic32s
, DMA_SLAVE_BUSWIDTH_1_BYTE
))
640 /* DMA chnls allocated and prepared */
641 set_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
646 if (master
->dma_rx
) {
647 dma_release_channel(master
->dma_rx
);
648 master
->dma_rx
= NULL
;
651 if (master
->dma_tx
) {
652 dma_release_channel(master
->dma_tx
);
653 master
->dma_tx
= NULL
;
659 static void pic32_spi_dma_unprep(struct pic32_spi
*pic32s
)
661 if (!test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
664 clear_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
665 if (pic32s
->master
->dma_rx
)
666 dma_release_channel(pic32s
->master
->dma_rx
);
668 if (pic32s
->master
->dma_tx
)
669 dma_release_channel(pic32s
->master
->dma_tx
);
672 static void pic32_spi_hw_init(struct pic32_spi
*pic32s
)
676 /* disable hardware */
677 pic32_spi_disable(pic32s
);
679 ctrl
= readl(&pic32s
->regs
->ctrl
);
680 /* enable enhanced fifo of 128bit deep */
682 pic32s
->fifo_n_byte
= 16;
684 /* disable framing mode */
687 /* enable master mode while disabled */
690 /* set tx fifo threshold interrupt */
691 ctrl
&= ~(0x3 << CTRL_TX_INT_SHIFT
);
692 ctrl
|= (TX_FIFO_HALF_EMPTY
<< CTRL_TX_INT_SHIFT
);
694 /* set rx fifo threshold interrupt */
695 ctrl
&= ~(0x3 << CTRL_RX_INT_SHIFT
);
696 ctrl
|= (RX_FIFO_NOT_EMPTY
<< CTRL_RX_INT_SHIFT
);
698 /* select clk source */
699 ctrl
&= ~CTRL_MCLKSEL
;
701 /* set manual /CS mode */
704 writel(ctrl
, &pic32s
->regs
->ctrl
);
706 /* enable error reporting */
707 ctrl
= CTRL2_TX_UR_EN
| CTRL2_RX_OV_EN
| CTRL2_FRM_ERR_EN
;
708 writel(ctrl
, &pic32s
->regs
->ctrl2_set
);
711 static int pic32_spi_hw_probe(struct platform_device
*pdev
,
712 struct pic32_spi
*pic32s
)
714 struct resource
*mem
;
717 mem
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
718 pic32s
->regs
= devm_ioremap_resource(&pdev
->dev
, mem
);
719 if (IS_ERR(pic32s
->regs
))
720 return PTR_ERR(pic32s
->regs
);
722 pic32s
->dma_base
= mem
->start
;
724 /* get irq resources: err-irq, rx-irq, tx-irq */
725 pic32s
->fault_irq
= platform_get_irq_byname(pdev
, "fault");
726 if (pic32s
->fault_irq
< 0)
727 return pic32s
->fault_irq
;
729 pic32s
->rx_irq
= platform_get_irq_byname(pdev
, "rx");
730 if (pic32s
->rx_irq
< 0)
731 return pic32s
->rx_irq
;
733 pic32s
->tx_irq
= platform_get_irq_byname(pdev
, "tx");
734 if (pic32s
->tx_irq
< 0)
735 return pic32s
->tx_irq
;
738 pic32s
->clk
= devm_clk_get(&pdev
->dev
, "mck0");
739 if (IS_ERR(pic32s
->clk
)) {
740 dev_err(&pdev
->dev
, "clk not found\n");
741 ret
= PTR_ERR(pic32s
->clk
);
745 ret
= clk_prepare_enable(pic32s
->clk
);
749 pic32_spi_hw_init(pic32s
);
754 dev_err(&pdev
->dev
, "%s failed, err %d\n", __func__
, ret
);
758 static int pic32_spi_probe(struct platform_device
*pdev
)
760 struct spi_master
*master
;
761 struct pic32_spi
*pic32s
;
764 master
= spi_alloc_master(&pdev
->dev
, sizeof(*pic32s
));
768 pic32s
= spi_master_get_devdata(master
);
769 pic32s
->master
= master
;
771 ret
= pic32_spi_hw_probe(pdev
, pic32s
);
775 master
->dev
.of_node
= pdev
->dev
.of_node
;
776 master
->mode_bits
= SPI_MODE_3
| SPI_MODE_0
| SPI_CS_HIGH
;
777 master
->num_chipselect
= 1; /* single chip-select */
778 master
->max_speed_hz
= clk_get_rate(pic32s
->clk
);
779 master
->setup
= pic32_spi_setup
;
780 master
->cleanup
= pic32_spi_cleanup
;
781 master
->flags
= SPI_MASTER_MUST_TX
| SPI_MASTER_MUST_RX
;
782 master
->bits_per_word_mask
= SPI_BPW_MASK(8) | SPI_BPW_MASK(16) |
784 master
->transfer_one
= pic32_spi_one_transfer
;
785 master
->prepare_message
= pic32_spi_prepare_message
;
786 master
->unprepare_message
= pic32_spi_unprepare_message
;
787 master
->prepare_transfer_hardware
= pic32_spi_prepare_hardware
;
788 master
->unprepare_transfer_hardware
= pic32_spi_unprepare_hardware
;
790 /* optional DMA support */
791 ret
= pic32_spi_dma_prep(pic32s
, &pdev
->dev
);
795 if (test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
796 master
->can_dma
= pic32_spi_can_dma
;
798 init_completion(&pic32s
->xfer_done
);
801 /* install irq handlers (with irq-disabled) */
802 irq_set_status_flags(pic32s
->fault_irq
, IRQ_NOAUTOEN
);
803 ret
= devm_request_irq(&pdev
->dev
, pic32s
->fault_irq
,
804 pic32_spi_fault_irq
, IRQF_NO_THREAD
,
805 dev_name(&pdev
->dev
), pic32s
);
807 dev_err(&pdev
->dev
, "request fault-irq %d\n", pic32s
->rx_irq
);
811 /* receive interrupt handler */
812 irq_set_status_flags(pic32s
->rx_irq
, IRQ_NOAUTOEN
);
813 ret
= devm_request_irq(&pdev
->dev
, pic32s
->rx_irq
,
814 pic32_spi_rx_irq
, IRQF_NO_THREAD
,
815 dev_name(&pdev
->dev
), pic32s
);
817 dev_err(&pdev
->dev
, "request rx-irq %d\n", pic32s
->rx_irq
);
821 /* transmit interrupt handler */
822 irq_set_status_flags(pic32s
->tx_irq
, IRQ_NOAUTOEN
);
823 ret
= devm_request_irq(&pdev
->dev
, pic32s
->tx_irq
,
824 pic32_spi_tx_irq
, IRQF_NO_THREAD
,
825 dev_name(&pdev
->dev
), pic32s
);
827 dev_err(&pdev
->dev
, "request tx-irq %d\n", pic32s
->tx_irq
);
831 /* register master */
832 ret
= devm_spi_register_master(&pdev
->dev
, master
);
834 dev_err(&master
->dev
, "failed registering spi master\n");
838 platform_set_drvdata(pdev
, pic32s
);
843 pic32_spi_dma_unprep(pic32s
);
844 clk_disable_unprepare(pic32s
->clk
);
846 spi_master_put(master
);
850 static int pic32_spi_remove(struct platform_device
*pdev
)
852 struct pic32_spi
*pic32s
;
854 pic32s
= platform_get_drvdata(pdev
);
855 pic32_spi_disable(pic32s
);
856 clk_disable_unprepare(pic32s
->clk
);
857 pic32_spi_dma_unprep(pic32s
);
862 static const struct of_device_id pic32_spi_of_match
[] = {
863 {.compatible
= "microchip,pic32mzda-spi",},
866 MODULE_DEVICE_TABLE(of
, pic32_spi_of_match
);
868 static struct platform_driver pic32_spi_driver
= {
871 .of_match_table
= of_match_ptr(pic32_spi_of_match
),
873 .probe
= pic32_spi_probe
,
874 .remove
= pic32_spi_remove
,
877 module_platform_driver(pic32_spi_driver
);
879 MODULE_AUTHOR("Purna Chandra Mandal <purna.mandal@microchip.com>");
880 MODULE_DESCRIPTION("Microchip SPI driver for PIC32 SPI controller.");
881 MODULE_LICENSE("GPL v2");