]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - drivers/spi/spi-sirf.c
spi: sirf: remove the allocation of dummypage
[mirror_ubuntu-artful-kernel.git] / drivers / spi / spi-sirf.c
1 /*
2 * SPI bus driver for CSR SiRFprimaII
3 *
4 * Copyright (c) 2011 Cambridge Silicon Radio Limited, a CSR plc group company.
5 *
6 * Licensed under GPLv2 or later.
7 */
8
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/slab.h>
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/interrupt.h>
15 #include <linux/io.h>
16 #include <linux/of.h>
17 #include <linux/bitops.h>
18 #include <linux/err.h>
19 #include <linux/platform_device.h>
20 #include <linux/of_gpio.h>
21 #include <linux/spi/spi.h>
22 #include <linux/spi/spi_bitbang.h>
23 #include <linux/dmaengine.h>
24 #include <linux/dma-direction.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/reset.h>
27
28 #define DRIVER_NAME "sirfsoc_spi"
29 /* SPI CTRL register defines */
30 #define SIRFSOC_SPI_SLV_MODE BIT(16)
31 #define SIRFSOC_SPI_CMD_MODE BIT(17)
32 #define SIRFSOC_SPI_CS_IO_OUT BIT(18)
33 #define SIRFSOC_SPI_CS_IO_MODE BIT(19)
34 #define SIRFSOC_SPI_CLK_IDLE_STAT BIT(20)
35 #define SIRFSOC_SPI_CS_IDLE_STAT BIT(21)
36 #define SIRFSOC_SPI_TRAN_MSB BIT(22)
37 #define SIRFSOC_SPI_DRV_POS_EDGE BIT(23)
38 #define SIRFSOC_SPI_CS_HOLD_TIME BIT(24)
39 #define SIRFSOC_SPI_CLK_SAMPLE_MODE BIT(25)
40 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_8 (0 << 26)
41 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_12 (1 << 26)
42 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_16 (2 << 26)
43 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_32 (3 << 26)
44 #define SIRFSOC_SPI_CMD_BYTE_NUM(x) ((x & 3) << 28)
45 #define SIRFSOC_SPI_ENA_AUTO_CLR BIT(30)
46 #define SIRFSOC_SPI_MUL_DAT_MODE BIT(31)
47
48 /* Interrupt Enable */
49 #define SIRFSOC_SPI_RX_DONE_INT_EN BIT(0)
50 #define SIRFSOC_SPI_TX_DONE_INT_EN BIT(1)
51 #define SIRFSOC_SPI_RX_OFLOW_INT_EN BIT(2)
52 #define SIRFSOC_SPI_TX_UFLOW_INT_EN BIT(3)
53 #define SIRFSOC_SPI_RX_IO_DMA_INT_EN BIT(4)
54 #define SIRFSOC_SPI_TX_IO_DMA_INT_EN BIT(5)
55 #define SIRFSOC_SPI_RXFIFO_FULL_INT_EN BIT(6)
56 #define SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN BIT(7)
57 #define SIRFSOC_SPI_RXFIFO_THD_INT_EN BIT(8)
58 #define SIRFSOC_SPI_TXFIFO_THD_INT_EN BIT(9)
59 #define SIRFSOC_SPI_FRM_END_INT_EN BIT(10)
60
61 /* Interrupt status */
62 #define SIRFSOC_SPI_RX_DONE BIT(0)
63 #define SIRFSOC_SPI_TX_DONE BIT(1)
64 #define SIRFSOC_SPI_RX_OFLOW BIT(2)
65 #define SIRFSOC_SPI_TX_UFLOW BIT(3)
66 #define SIRFSOC_SPI_RX_IO_DMA BIT(4)
67 #define SIRFSOC_SPI_RX_FIFO_FULL BIT(6)
68 #define SIRFSOC_SPI_TXFIFO_EMPTY BIT(7)
69 #define SIRFSOC_SPI_RXFIFO_THD_REACH BIT(8)
70 #define SIRFSOC_SPI_TXFIFO_THD_REACH BIT(9)
71 #define SIRFSOC_SPI_FRM_END BIT(10)
72
73 /* TX RX enable */
74 #define SIRFSOC_SPI_RX_EN BIT(0)
75 #define SIRFSOC_SPI_TX_EN BIT(1)
76 #define SIRFSOC_SPI_CMD_TX_EN BIT(2)
77
78 #define SIRFSOC_SPI_IO_MODE_SEL BIT(0)
79 #define SIRFSOC_SPI_RX_DMA_FLUSH BIT(2)
80
81 /* FIFO OPs */
82 #define SIRFSOC_SPI_FIFO_RESET BIT(0)
83 #define SIRFSOC_SPI_FIFO_START BIT(1)
84
85 /* FIFO CTRL */
86 #define SIRFSOC_SPI_FIFO_WIDTH_BYTE (0 << 0)
87 #define SIRFSOC_SPI_FIFO_WIDTH_WORD (1 << 0)
88 #define SIRFSOC_SPI_FIFO_WIDTH_DWORD (2 << 0)
89 /* USP related */
90 #define SIRFSOC_USP_SYNC_MODE BIT(0)
91 #define SIRFSOC_USP_SLV_MODE BIT(1)
92 #define SIRFSOC_USP_LSB BIT(4)
93 #define SIRFSOC_USP_EN BIT(5)
94 #define SIRFSOC_USP_RXD_FALLING_EDGE BIT(6)
95 #define SIRFSOC_USP_TXD_FALLING_EDGE BIT(7)
96 #define SIRFSOC_USP_CS_HIGH_VALID BIT(9)
97 #define SIRFSOC_USP_SCLK_IDLE_STAT BIT(11)
98 #define SIRFSOC_USP_TFS_IO_MODE BIT(14)
99 #define SIRFSOC_USP_TFS_IO_INPUT BIT(19)
100
101 #define SIRFSOC_USP_RXD_DELAY_LEN_MASK 0xFF
102 #define SIRFSOC_USP_TXD_DELAY_LEN_MASK 0xFF
103 #define SIRFSOC_USP_RXD_DELAY_OFFSET 0
104 #define SIRFSOC_USP_TXD_DELAY_OFFSET 8
105 #define SIRFSOC_USP_RXD_DELAY_LEN 1
106 #define SIRFSOC_USP_TXD_DELAY_LEN 1
107 #define SIRFSOC_USP_CLK_DIVISOR_OFFSET 21
108 #define SIRFSOC_USP_CLK_DIVISOR_MASK 0x3FF
109 #define SIRFSOC_USP_CLK_10_11_MASK 0x3
110 #define SIRFSOC_USP_CLK_10_11_OFFSET 30
111 #define SIRFSOC_USP_CLK_12_15_MASK 0xF
112 #define SIRFSOC_USP_CLK_12_15_OFFSET 24
113
114 #define SIRFSOC_USP_TX_DATA_OFFSET 0
115 #define SIRFSOC_USP_TX_SYNC_OFFSET 8
116 #define SIRFSOC_USP_TX_FRAME_OFFSET 16
117 #define SIRFSOC_USP_TX_SHIFTER_OFFSET 24
118
119 #define SIRFSOC_USP_TX_DATA_MASK 0xFF
120 #define SIRFSOC_USP_TX_SYNC_MASK 0xFF
121 #define SIRFSOC_USP_TX_FRAME_MASK 0xFF
122 #define SIRFSOC_USP_TX_SHIFTER_MASK 0x1F
123
124 #define SIRFSOC_USP_RX_DATA_OFFSET 0
125 #define SIRFSOC_USP_RX_FRAME_OFFSET 8
126 #define SIRFSOC_USP_RX_SHIFTER_OFFSET 16
127
128 #define SIRFSOC_USP_RX_DATA_MASK 0xFF
129 #define SIRFSOC_USP_RX_FRAME_MASK 0xFF
130 #define SIRFSOC_USP_RX_SHIFTER_MASK 0x1F
131 #define SIRFSOC_USP_CS_HIGH_VALUE BIT(1)
132
133 #define SIRFSOC_SPI_FIFO_SC_OFFSET 0
134 #define SIRFSOC_SPI_FIFO_LC_OFFSET 10
135 #define SIRFSOC_SPI_FIFO_HC_OFFSET 20
136
137 #define SIRFSOC_SPI_FIFO_FULL_MASK(s) (1 << ((s)->fifo_full_offset))
138 #define SIRFSOC_SPI_FIFO_EMPTY_MASK(s) (1 << ((s)->fifo_full_offset + 1))
139 #define SIRFSOC_SPI_FIFO_THD_MASK(s) ((s)->fifo_size - 1)
140 #define SIRFSOC_SPI_FIFO_THD_OFFSET 2
141 #define SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(s, val) \
142 ((val) & (s)->fifo_level_chk_mask)
143
144 enum sirf_spi_type {
145 SIRF_REAL_SPI,
146 SIRF_USP_SPI_P2,
147 SIRF_USP_SPI_A7,
148 };
149
150 /*
151 * only if the rx/tx buffer and transfer size are 4-bytes aligned, we use dma
152 * due to the limitation of dma controller
153 */
154
155 #define ALIGNED(x) (!((u32)x & 0x3))
156 #define IS_DMA_VALID(x) (x && ALIGNED(x->tx_buf) && ALIGNED(x->rx_buf) && \
157 ALIGNED(x->len) && (x->len < 2 * PAGE_SIZE))
158
159 #define SIRFSOC_MAX_CMD_BYTES 4
160 #define SIRFSOC_SPI_DEFAULT_FRQ 1000000
161
162 struct sirf_spi_register {
163 /*SPI and USP-SPI common*/
164 u32 tx_rx_en;
165 u32 int_en;
166 u32 int_st;
167 u32 tx_dma_io_ctrl;
168 u32 tx_dma_io_len;
169 u32 txfifo_ctrl;
170 u32 txfifo_level_chk;
171 u32 txfifo_op;
172 u32 txfifo_st;
173 u32 txfifo_data;
174 u32 rx_dma_io_ctrl;
175 u32 rx_dma_io_len;
176 u32 rxfifo_ctrl;
177 u32 rxfifo_level_chk;
178 u32 rxfifo_op;
179 u32 rxfifo_st;
180 u32 rxfifo_data;
181 /*SPI self*/
182 u32 spi_ctrl;
183 u32 spi_cmd;
184 u32 spi_dummy_delay_ctrl;
185 /*USP-SPI self*/
186 u32 usp_mode1;
187 u32 usp_mode2;
188 u32 usp_tx_frame_ctrl;
189 u32 usp_rx_frame_ctrl;
190 u32 usp_pin_io_data;
191 u32 usp_risc_dsp_mode;
192 u32 usp_async_param_reg;
193 u32 usp_irda_x_mode_div;
194 u32 usp_sm_cfg;
195 u32 usp_int_en_clr;
196 };
197
198 static const struct sirf_spi_register real_spi_register = {
199 .tx_rx_en = 0x8,
200 .int_en = 0xc,
201 .int_st = 0x10,
202 .tx_dma_io_ctrl = 0x100,
203 .tx_dma_io_len = 0x104,
204 .txfifo_ctrl = 0x108,
205 .txfifo_level_chk = 0x10c,
206 .txfifo_op = 0x110,
207 .txfifo_st = 0x114,
208 .txfifo_data = 0x118,
209 .rx_dma_io_ctrl = 0x120,
210 .rx_dma_io_len = 0x124,
211 .rxfifo_ctrl = 0x128,
212 .rxfifo_level_chk = 0x12c,
213 .rxfifo_op = 0x130,
214 .rxfifo_st = 0x134,
215 .rxfifo_data = 0x138,
216 .spi_ctrl = 0x0,
217 .spi_cmd = 0x4,
218 .spi_dummy_delay_ctrl = 0x144,
219 };
220
221 static const struct sirf_spi_register usp_spi_register = {
222 .tx_rx_en = 0x10,
223 .int_en = 0x14,
224 .int_st = 0x18,
225 .tx_dma_io_ctrl = 0x100,
226 .tx_dma_io_len = 0x104,
227 .txfifo_ctrl = 0x108,
228 .txfifo_level_chk = 0x10c,
229 .txfifo_op = 0x110,
230 .txfifo_st = 0x114,
231 .txfifo_data = 0x118,
232 .rx_dma_io_ctrl = 0x120,
233 .rx_dma_io_len = 0x124,
234 .rxfifo_ctrl = 0x128,
235 .rxfifo_level_chk = 0x12c,
236 .rxfifo_op = 0x130,
237 .rxfifo_st = 0x134,
238 .rxfifo_data = 0x138,
239 .usp_mode1 = 0x0,
240 .usp_mode2 = 0x4,
241 .usp_tx_frame_ctrl = 0x8,
242 .usp_rx_frame_ctrl = 0xc,
243 .usp_pin_io_data = 0x1c,
244 .usp_risc_dsp_mode = 0x20,
245 .usp_async_param_reg = 0x24,
246 .usp_irda_x_mode_div = 0x28,
247 .usp_sm_cfg = 0x2c,
248 .usp_int_en_clr = 0x140,
249 };
250
251 struct sirf_spi_comp_data {
252 const struct sirf_spi_register *regs;
253 enum sirf_spi_type type;
254 unsigned int dat_max_frm_len;
255 unsigned int fifo_size;
256 };
257
258 static const struct sirf_spi_comp_data sirf_real_spi = {
259 .regs = &real_spi_register,
260 .type = SIRF_REAL_SPI,
261 .dat_max_frm_len = 64 * 1024,
262 .fifo_size = 256,
263 };
264
265 static const struct sirf_spi_comp_data sirf_usp_spi_p2 = {
266 .regs = &usp_spi_register,
267 .type = SIRF_USP_SPI_P2,
268 .dat_max_frm_len = 1024 * 1024,
269 .fifo_size = 128,
270 };
271
272 static const struct sirf_spi_comp_data sirf_usp_spi_a7 = {
273 .regs = &usp_spi_register,
274 .type = SIRF_USP_SPI_A7,
275 .dat_max_frm_len = 1024 * 1024,
276 .fifo_size = 512,
277 };
278
279 struct sirfsoc_spi {
280 struct spi_bitbang bitbang;
281 struct completion rx_done;
282 struct completion tx_done;
283
284 void __iomem *base;
285 u32 ctrl_freq; /* SPI controller clock speed */
286 struct clk *clk;
287
288 /* rx & tx bufs from the spi_transfer */
289 const void *tx;
290 void *rx;
291
292 /* place received word into rx buffer */
293 void (*rx_word) (struct sirfsoc_spi *);
294 /* get word from tx buffer for sending */
295 void (*tx_word) (struct sirfsoc_spi *);
296
297 /* number of words left to be tranmitted/received */
298 unsigned int left_tx_word;
299 unsigned int left_rx_word;
300
301 /* rx & tx DMA channels */
302 struct dma_chan *rx_chan;
303 struct dma_chan *tx_chan;
304 dma_addr_t src_start;
305 dma_addr_t dst_start;
306 int word_width; /* in bytes */
307
308 /*
309 * if tx size is not more than 4 and rx size is NULL, use
310 * command model
311 */
312 bool tx_by_cmd;
313 bool hw_cs;
314 enum sirf_spi_type type;
315 const struct sirf_spi_register *regs;
316 unsigned int fifo_size;
317 /* fifo empty offset is (fifo full offset + 1)*/
318 unsigned int fifo_full_offset;
319 /* fifo_level_chk_mask is (fifo_size/4 - 1) */
320 unsigned int fifo_level_chk_mask;
321 unsigned int dat_max_frm_len;
322 };
323
324 static void spi_sirfsoc_rx_word_u8(struct sirfsoc_spi *sspi)
325 {
326 u32 data;
327 u8 *rx = sspi->rx;
328
329 data = readl(sspi->base + sspi->regs->rxfifo_data);
330
331 if (rx) {
332 *rx++ = (u8) data;
333 sspi->rx = rx;
334 }
335
336 sspi->left_rx_word--;
337 }
338
339 static void spi_sirfsoc_tx_word_u8(struct sirfsoc_spi *sspi)
340 {
341 u32 data = 0;
342 const u8 *tx = sspi->tx;
343
344 if (tx) {
345 data = *tx++;
346 sspi->tx = tx;
347 }
348 writel(data, sspi->base + sspi->regs->txfifo_data);
349 sspi->left_tx_word--;
350 }
351
352 static void spi_sirfsoc_rx_word_u16(struct sirfsoc_spi *sspi)
353 {
354 u32 data;
355 u16 *rx = sspi->rx;
356
357 data = readl(sspi->base + sspi->regs->rxfifo_data);
358
359 if (rx) {
360 *rx++ = (u16) data;
361 sspi->rx = rx;
362 }
363
364 sspi->left_rx_word--;
365 }
366
367 static void spi_sirfsoc_tx_word_u16(struct sirfsoc_spi *sspi)
368 {
369 u32 data = 0;
370 const u16 *tx = sspi->tx;
371
372 if (tx) {
373 data = *tx++;
374 sspi->tx = tx;
375 }
376
377 writel(data, sspi->base + sspi->regs->txfifo_data);
378 sspi->left_tx_word--;
379 }
380
381 static void spi_sirfsoc_rx_word_u32(struct sirfsoc_spi *sspi)
382 {
383 u32 data;
384 u32 *rx = sspi->rx;
385
386 data = readl(sspi->base + sspi->regs->rxfifo_data);
387
388 if (rx) {
389 *rx++ = (u32) data;
390 sspi->rx = rx;
391 }
392
393 sspi->left_rx_word--;
394
395 }
396
397 static void spi_sirfsoc_tx_word_u32(struct sirfsoc_spi *sspi)
398 {
399 u32 data = 0;
400 const u32 *tx = sspi->tx;
401
402 if (tx) {
403 data = *tx++;
404 sspi->tx = tx;
405 }
406
407 writel(data, sspi->base + sspi->regs->txfifo_data);
408 sspi->left_tx_word--;
409 }
410
411 static irqreturn_t spi_sirfsoc_irq(int irq, void *dev_id)
412 {
413 struct sirfsoc_spi *sspi = dev_id;
414 u32 spi_stat;
415
416 spi_stat = readl(sspi->base + sspi->regs->int_st);
417 if (sspi->tx_by_cmd && sspi->type == SIRF_REAL_SPI
418 && (spi_stat & SIRFSOC_SPI_FRM_END)) {
419 complete(&sspi->tx_done);
420 writel(0x0, sspi->base + sspi->regs->int_en);
421 writel(readl(sspi->base + sspi->regs->int_st),
422 sspi->base + sspi->regs->int_st);
423 return IRQ_HANDLED;
424 }
425 /* Error Conditions */
426 if (spi_stat & SIRFSOC_SPI_RX_OFLOW ||
427 spi_stat & SIRFSOC_SPI_TX_UFLOW) {
428 complete(&sspi->tx_done);
429 complete(&sspi->rx_done);
430 switch (sspi->type) {
431 case SIRF_REAL_SPI:
432 case SIRF_USP_SPI_P2:
433 writel(0x0, sspi->base + sspi->regs->int_en);
434 break;
435 case SIRF_USP_SPI_A7:
436 writel(~0UL, sspi->base + sspi->regs->usp_int_en_clr);
437 break;
438 }
439 writel(readl(sspi->base + sspi->regs->int_st),
440 sspi->base + sspi->regs->int_st);
441 return IRQ_HANDLED;
442 }
443 if (spi_stat & SIRFSOC_SPI_TXFIFO_EMPTY)
444 complete(&sspi->tx_done);
445 while (!(readl(sspi->base + sspi->regs->int_st) &
446 SIRFSOC_SPI_RX_IO_DMA))
447 cpu_relax();
448 complete(&sspi->rx_done);
449 switch (sspi->type) {
450 case SIRF_REAL_SPI:
451 case SIRF_USP_SPI_P2:
452 writel(0x0, sspi->base + sspi->regs->int_en);
453 break;
454 case SIRF_USP_SPI_A7:
455 writel(~0UL, sspi->base + sspi->regs->usp_int_en_clr);
456 break;
457 }
458 writel(readl(sspi->base + sspi->regs->int_st),
459 sspi->base + sspi->regs->int_st);
460
461 return IRQ_HANDLED;
462 }
463
464 static void spi_sirfsoc_dma_fini_callback(void *data)
465 {
466 struct completion *dma_complete = data;
467
468 complete(dma_complete);
469 }
470
471 static void spi_sirfsoc_cmd_transfer(struct spi_device *spi,
472 struct spi_transfer *t)
473 {
474 struct sirfsoc_spi *sspi;
475 int timeout = t->len * 10;
476 u32 cmd;
477
478 sspi = spi_master_get_devdata(spi->master);
479 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + sspi->regs->txfifo_op);
480 writel(SIRFSOC_SPI_FIFO_START, sspi->base + sspi->regs->txfifo_op);
481 memcpy(&cmd, sspi->tx, t->len);
482 if (sspi->word_width == 1 && !(spi->mode & SPI_LSB_FIRST))
483 cmd = cpu_to_be32(cmd) >>
484 ((SIRFSOC_MAX_CMD_BYTES - t->len) * 8);
485 if (sspi->word_width == 2 && t->len == 4 &&
486 (!(spi->mode & SPI_LSB_FIRST)))
487 cmd = ((cmd & 0xffff) << 16) | (cmd >> 16);
488 writel(cmd, sspi->base + sspi->regs->spi_cmd);
489 writel(SIRFSOC_SPI_FRM_END_INT_EN,
490 sspi->base + sspi->regs->int_en);
491 writel(SIRFSOC_SPI_CMD_TX_EN,
492 sspi->base + sspi->regs->tx_rx_en);
493 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
494 dev_err(&spi->dev, "cmd transfer timeout\n");
495 return;
496 }
497 sspi->left_rx_word -= t->len;
498 }
499
500 static void spi_sirfsoc_dma_transfer(struct spi_device *spi,
501 struct spi_transfer *t)
502 {
503 struct sirfsoc_spi *sspi;
504 struct dma_async_tx_descriptor *rx_desc, *tx_desc;
505 int timeout = t->len * 10;
506
507 sspi = spi_master_get_devdata(spi->master);
508 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + sspi->regs->rxfifo_op);
509 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + sspi->regs->txfifo_op);
510 switch (sspi->type) {
511 case SIRF_REAL_SPI:
512 writel(SIRFSOC_SPI_FIFO_START,
513 sspi->base + sspi->regs->rxfifo_op);
514 writel(SIRFSOC_SPI_FIFO_START,
515 sspi->base + sspi->regs->txfifo_op);
516 writel(0, sspi->base + sspi->regs->int_en);
517 break;
518 case SIRF_USP_SPI_P2:
519 writel(0x0, sspi->base + sspi->regs->rxfifo_op);
520 writel(0x0, sspi->base + sspi->regs->txfifo_op);
521 writel(0, sspi->base + sspi->regs->int_en);
522 break;
523 case SIRF_USP_SPI_A7:
524 writel(0x0, sspi->base + sspi->regs->rxfifo_op);
525 writel(0x0, sspi->base + sspi->regs->txfifo_op);
526 writel(~0UL, sspi->base + sspi->regs->usp_int_en_clr);
527 break;
528 }
529 writel(readl(sspi->base + sspi->regs->int_st),
530 sspi->base + sspi->regs->int_st);
531 if (sspi->left_tx_word < sspi->dat_max_frm_len) {
532 switch (sspi->type) {
533 case SIRF_REAL_SPI:
534 writel(readl(sspi->base + sspi->regs->spi_ctrl) |
535 SIRFSOC_SPI_ENA_AUTO_CLR |
536 SIRFSOC_SPI_MUL_DAT_MODE,
537 sspi->base + sspi->regs->spi_ctrl);
538 writel(sspi->left_tx_word - 1,
539 sspi->base + sspi->regs->tx_dma_io_len);
540 writel(sspi->left_tx_word - 1,
541 sspi->base + sspi->regs->rx_dma_io_len);
542 break;
543 case SIRF_USP_SPI_P2:
544 case SIRF_USP_SPI_A7:
545 /*USP simulate SPI, tx/rx_dma_io_len indicates bytes*/
546 writel(sspi->left_tx_word * sspi->word_width,
547 sspi->base + sspi->regs->tx_dma_io_len);
548 writel(sspi->left_tx_word * sspi->word_width,
549 sspi->base + sspi->regs->rx_dma_io_len);
550 break;
551 }
552 } else {
553 if (sspi->type == SIRF_REAL_SPI)
554 writel(readl(sspi->base + sspi->regs->spi_ctrl),
555 sspi->base + sspi->regs->spi_ctrl);
556 writel(0, sspi->base + sspi->regs->tx_dma_io_len);
557 writel(0, sspi->base + sspi->regs->rx_dma_io_len);
558 }
559 sspi->dst_start = dma_map_single(&spi->dev, sspi->rx, t->len,
560 (t->tx_buf != t->rx_buf) ?
561 DMA_FROM_DEVICE : DMA_BIDIRECTIONAL);
562 rx_desc = dmaengine_prep_slave_single(sspi->rx_chan,
563 sspi->dst_start, t->len, DMA_DEV_TO_MEM,
564 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
565 rx_desc->callback = spi_sirfsoc_dma_fini_callback;
566 rx_desc->callback_param = &sspi->rx_done;
567
568 sspi->src_start = dma_map_single(&spi->dev, (void *)sspi->tx, t->len,
569 (t->tx_buf != t->rx_buf) ?
570 DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
571 tx_desc = dmaengine_prep_slave_single(sspi->tx_chan,
572 sspi->src_start, t->len, DMA_MEM_TO_DEV,
573 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
574 tx_desc->callback = spi_sirfsoc_dma_fini_callback;
575 tx_desc->callback_param = &sspi->tx_done;
576
577 dmaengine_submit(tx_desc);
578 dmaengine_submit(rx_desc);
579 dma_async_issue_pending(sspi->tx_chan);
580 dma_async_issue_pending(sspi->rx_chan);
581 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
582 sspi->base + sspi->regs->tx_rx_en);
583 if (sspi->type == SIRF_USP_SPI_P2 ||
584 sspi->type == SIRF_USP_SPI_A7) {
585 writel(SIRFSOC_SPI_FIFO_START,
586 sspi->base + sspi->regs->rxfifo_op);
587 writel(SIRFSOC_SPI_FIFO_START,
588 sspi->base + sspi->regs->txfifo_op);
589 }
590 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0) {
591 dev_err(&spi->dev, "transfer timeout\n");
592 dmaengine_terminate_all(sspi->rx_chan);
593 } else
594 sspi->left_rx_word = 0;
595 /*
596 * we only wait tx-done event if transferring by DMA. for PIO,
597 * we get rx data by writing tx data, so if rx is done, tx has
598 * done earlier
599 */
600 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
601 dev_err(&spi->dev, "transfer timeout\n");
602 if (sspi->type == SIRF_USP_SPI_P2 ||
603 sspi->type == SIRF_USP_SPI_A7)
604 writel(0, sspi->base + sspi->regs->tx_rx_en);
605 dmaengine_terminate_all(sspi->tx_chan);
606 }
607 dma_unmap_single(&spi->dev, sspi->src_start, t->len, DMA_TO_DEVICE);
608 dma_unmap_single(&spi->dev, sspi->dst_start, t->len, DMA_FROM_DEVICE);
609 /* TX, RX FIFO stop */
610 writel(0, sspi->base + sspi->regs->rxfifo_op);
611 writel(0, sspi->base + sspi->regs->txfifo_op);
612 if (sspi->left_tx_word >= sspi->dat_max_frm_len)
613 writel(0, sspi->base + sspi->regs->tx_rx_en);
614 if (sspi->type == SIRF_USP_SPI_P2 ||
615 sspi->type == SIRF_USP_SPI_A7)
616 writel(0, sspi->base + sspi->regs->tx_rx_en);
617 }
618
619 static void spi_sirfsoc_pio_transfer(struct spi_device *spi,
620 struct spi_transfer *t)
621 {
622 struct sirfsoc_spi *sspi;
623 int timeout = t->len * 10;
624 unsigned int data_units;
625
626 sspi = spi_master_get_devdata(spi->master);
627 do {
628 writel(SIRFSOC_SPI_FIFO_RESET,
629 sspi->base + sspi->regs->rxfifo_op);
630 writel(SIRFSOC_SPI_FIFO_RESET,
631 sspi->base + sspi->regs->txfifo_op);
632 switch (sspi->type) {
633 case SIRF_USP_SPI_P2:
634 writel(0x0, sspi->base + sspi->regs->rxfifo_op);
635 writel(0x0, sspi->base + sspi->regs->txfifo_op);
636 writel(0, sspi->base + sspi->regs->int_en);
637 writel(readl(sspi->base + sspi->regs->int_st),
638 sspi->base + sspi->regs->int_st);
639 writel(min((sspi->left_tx_word * sspi->word_width),
640 sspi->fifo_size),
641 sspi->base + sspi->regs->tx_dma_io_len);
642 writel(min((sspi->left_rx_word * sspi->word_width),
643 sspi->fifo_size),
644 sspi->base + sspi->regs->rx_dma_io_len);
645 break;
646 case SIRF_USP_SPI_A7:
647 writel(0x0, sspi->base + sspi->regs->rxfifo_op);
648 writel(0x0, sspi->base + sspi->regs->txfifo_op);
649 writel(~0UL, sspi->base + sspi->regs->usp_int_en_clr);
650 writel(readl(sspi->base + sspi->regs->int_st),
651 sspi->base + sspi->regs->int_st);
652 writel(min((sspi->left_tx_word * sspi->word_width),
653 sspi->fifo_size),
654 sspi->base + sspi->regs->tx_dma_io_len);
655 writel(min((sspi->left_rx_word * sspi->word_width),
656 sspi->fifo_size),
657 sspi->base + sspi->regs->rx_dma_io_len);
658 break;
659 case SIRF_REAL_SPI:
660 writel(SIRFSOC_SPI_FIFO_START,
661 sspi->base + sspi->regs->rxfifo_op);
662 writel(SIRFSOC_SPI_FIFO_START,
663 sspi->base + sspi->regs->txfifo_op);
664 writel(0, sspi->base + sspi->regs->int_en);
665 writel(readl(sspi->base + sspi->regs->int_st),
666 sspi->base + sspi->regs->int_st);
667 writel(readl(sspi->base + sspi->regs->spi_ctrl) |
668 SIRFSOC_SPI_MUL_DAT_MODE |
669 SIRFSOC_SPI_ENA_AUTO_CLR,
670 sspi->base + sspi->regs->spi_ctrl);
671 data_units = sspi->fifo_size / sspi->word_width;
672 writel(min(sspi->left_tx_word, data_units) - 1,
673 sspi->base + sspi->regs->tx_dma_io_len);
674 writel(min(sspi->left_rx_word, data_units) - 1,
675 sspi->base + sspi->regs->rx_dma_io_len);
676 break;
677 }
678 while (!((readl(sspi->base + sspi->regs->txfifo_st)
679 & SIRFSOC_SPI_FIFO_FULL_MASK(sspi))) &&
680 sspi->left_tx_word)
681 sspi->tx_word(sspi);
682 writel(SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN |
683 SIRFSOC_SPI_TX_UFLOW_INT_EN |
684 SIRFSOC_SPI_RX_OFLOW_INT_EN |
685 SIRFSOC_SPI_RX_IO_DMA_INT_EN,
686 sspi->base + sspi->regs->int_en);
687 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
688 sspi->base + sspi->regs->tx_rx_en);
689 if (sspi->type == SIRF_USP_SPI_P2 ||
690 sspi->type == SIRF_USP_SPI_A7) {
691 writel(SIRFSOC_SPI_FIFO_START,
692 sspi->base + sspi->regs->rxfifo_op);
693 writel(SIRFSOC_SPI_FIFO_START,
694 sspi->base + sspi->regs->txfifo_op);
695 }
696 if (!wait_for_completion_timeout(&sspi->tx_done, timeout) ||
697 !wait_for_completion_timeout(&sspi->rx_done, timeout)) {
698 dev_err(&spi->dev, "transfer timeout\n");
699 if (sspi->type == SIRF_USP_SPI_P2 ||
700 sspi->type == SIRF_USP_SPI_A7)
701 writel(0, sspi->base + sspi->regs->tx_rx_en);
702 break;
703 }
704 while (!((readl(sspi->base + sspi->regs->rxfifo_st)
705 & SIRFSOC_SPI_FIFO_EMPTY_MASK(sspi))) &&
706 sspi->left_rx_word)
707 sspi->rx_word(sspi);
708 if (sspi->type == SIRF_USP_SPI_P2 ||
709 sspi->type == SIRF_USP_SPI_A7)
710 writel(0, sspi->base + sspi->regs->tx_rx_en);
711 writel(0, sspi->base + sspi->regs->rxfifo_op);
712 writel(0, sspi->base + sspi->regs->txfifo_op);
713 } while (sspi->left_tx_word != 0 || sspi->left_rx_word != 0);
714 }
715
716 static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t)
717 {
718 struct sirfsoc_spi *sspi;
719
720 sspi = spi_master_get_devdata(spi->master);
721 sspi->tx = t->tx_buf;
722 sspi->rx = t->rx_buf;
723 sspi->left_tx_word = sspi->left_rx_word = t->len / sspi->word_width;
724 reinit_completion(&sspi->rx_done);
725 reinit_completion(&sspi->tx_done);
726 /*
727 * in the transfer, if transfer data using command register with rx_buf
728 * null, just fill command data into command register and wait for its
729 * completion.
730 */
731 if (sspi->type == SIRF_REAL_SPI && sspi->tx_by_cmd)
732 spi_sirfsoc_cmd_transfer(spi, t);
733 else if (IS_DMA_VALID(t))
734 spi_sirfsoc_dma_transfer(spi, t);
735 else
736 spi_sirfsoc_pio_transfer(spi, t);
737
738 return t->len - sspi->left_rx_word * sspi->word_width;
739 }
740
741 static void spi_sirfsoc_chipselect(struct spi_device *spi, int value)
742 {
743 struct sirfsoc_spi *sspi = spi_master_get_devdata(spi->master);
744
745 if (sspi->hw_cs) {
746 u32 regval;
747
748 switch (sspi->type) {
749 case SIRF_REAL_SPI:
750 regval = readl(sspi->base + sspi->regs->spi_ctrl);
751 switch (value) {
752 case BITBANG_CS_ACTIVE:
753 if (spi->mode & SPI_CS_HIGH)
754 regval |= SIRFSOC_SPI_CS_IO_OUT;
755 else
756 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
757 break;
758 case BITBANG_CS_INACTIVE:
759 if (spi->mode & SPI_CS_HIGH)
760 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
761 else
762 regval |= SIRFSOC_SPI_CS_IO_OUT;
763 break;
764 }
765 writel(regval, sspi->base + sspi->regs->spi_ctrl);
766 break;
767 case SIRF_USP_SPI_P2:
768 case SIRF_USP_SPI_A7:
769 regval = readl(sspi->base +
770 sspi->regs->usp_pin_io_data);
771 switch (value) {
772 case BITBANG_CS_ACTIVE:
773 if (spi->mode & SPI_CS_HIGH)
774 regval |= SIRFSOC_USP_CS_HIGH_VALUE;
775 else
776 regval &= ~(SIRFSOC_USP_CS_HIGH_VALUE);
777 break;
778 case BITBANG_CS_INACTIVE:
779 if (spi->mode & SPI_CS_HIGH)
780 regval &= ~(SIRFSOC_USP_CS_HIGH_VALUE);
781 else
782 regval |= SIRFSOC_USP_CS_HIGH_VALUE;
783 break;
784 }
785 writel(regval,
786 sspi->base + sspi->regs->usp_pin_io_data);
787 break;
788 }
789 } else {
790 switch (value) {
791 case BITBANG_CS_ACTIVE:
792 gpio_direction_output(spi->cs_gpio,
793 spi->mode & SPI_CS_HIGH ? 1 : 0);
794 break;
795 case BITBANG_CS_INACTIVE:
796 gpio_direction_output(spi->cs_gpio,
797 spi->mode & SPI_CS_HIGH ? 0 : 1);
798 break;
799 }
800 }
801 }
802
803 static int spi_sirfsoc_config_mode(struct spi_device *spi)
804 {
805 struct sirfsoc_spi *sspi;
806 u32 regval, usp_mode1;
807
808 sspi = spi_master_get_devdata(spi->master);
809 regval = readl(sspi->base + sspi->regs->spi_ctrl);
810 usp_mode1 = readl(sspi->base + sspi->regs->usp_mode1);
811 if (!(spi->mode & SPI_CS_HIGH)) {
812 regval |= SIRFSOC_SPI_CS_IDLE_STAT;
813 usp_mode1 &= ~SIRFSOC_USP_CS_HIGH_VALID;
814 } else {
815 regval &= ~SIRFSOC_SPI_CS_IDLE_STAT;
816 usp_mode1 |= SIRFSOC_USP_CS_HIGH_VALID;
817 }
818 if (!(spi->mode & SPI_LSB_FIRST)) {
819 regval |= SIRFSOC_SPI_TRAN_MSB;
820 usp_mode1 &= ~SIRFSOC_USP_LSB;
821 } else {
822 regval &= ~SIRFSOC_SPI_TRAN_MSB;
823 usp_mode1 |= SIRFSOC_USP_LSB;
824 }
825 if (spi->mode & SPI_CPOL) {
826 regval |= SIRFSOC_SPI_CLK_IDLE_STAT;
827 usp_mode1 |= SIRFSOC_USP_SCLK_IDLE_STAT;
828 } else {
829 regval &= ~SIRFSOC_SPI_CLK_IDLE_STAT;
830 usp_mode1 &= ~SIRFSOC_USP_SCLK_IDLE_STAT;
831 }
832 /*
833 * Data should be driven at least 1/2 cycle before the fetch edge
834 * to make sure that data gets stable at the fetch edge.
835 */
836 if (((spi->mode & SPI_CPOL) && (spi->mode & SPI_CPHA)) ||
837 (!(spi->mode & SPI_CPOL) && !(spi->mode & SPI_CPHA))) {
838 regval &= ~SIRFSOC_SPI_DRV_POS_EDGE;
839 usp_mode1 |= (SIRFSOC_USP_TXD_FALLING_EDGE |
840 SIRFSOC_USP_RXD_FALLING_EDGE);
841 } else {
842 regval |= SIRFSOC_SPI_DRV_POS_EDGE;
843 usp_mode1 &= ~(SIRFSOC_USP_RXD_FALLING_EDGE |
844 SIRFSOC_USP_TXD_FALLING_EDGE);
845 }
846 writel((SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, sspi->fifo_size - 2) <<
847 SIRFSOC_SPI_FIFO_SC_OFFSET) |
848 (SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, sspi->fifo_size / 2) <<
849 SIRFSOC_SPI_FIFO_LC_OFFSET) |
850 (SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, 2) <<
851 SIRFSOC_SPI_FIFO_HC_OFFSET),
852 sspi->base + sspi->regs->txfifo_level_chk);
853 writel((SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, 2) <<
854 SIRFSOC_SPI_FIFO_SC_OFFSET) |
855 (SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, sspi->fifo_size / 2) <<
856 SIRFSOC_SPI_FIFO_LC_OFFSET) |
857 (SIRFSOC_SPI_FIFO_LEVEL_CHK_MASK(sspi, sspi->fifo_size - 2) <<
858 SIRFSOC_SPI_FIFO_HC_OFFSET),
859 sspi->base + sspi->regs->rxfifo_level_chk);
860 /*
861 * it should never set to hardware cs mode because in hardware cs mode,
862 * cs signal can't controlled by driver.
863 */
864 switch (sspi->type) {
865 case SIRF_REAL_SPI:
866 regval |= SIRFSOC_SPI_CS_IO_MODE;
867 writel(regval, sspi->base + sspi->regs->spi_ctrl);
868 break;
869 case SIRF_USP_SPI_P2:
870 case SIRF_USP_SPI_A7:
871 usp_mode1 |= SIRFSOC_USP_SYNC_MODE;
872 usp_mode1 |= SIRFSOC_USP_TFS_IO_MODE;
873 usp_mode1 &= ~SIRFSOC_USP_TFS_IO_INPUT;
874 writel(usp_mode1, sspi->base + sspi->regs->usp_mode1);
875 break;
876 }
877
878 return 0;
879 }
880
881 static int
882 spi_sirfsoc_setup_transfer(struct spi_device *spi, struct spi_transfer *t)
883 {
884 struct sirfsoc_spi *sspi;
885 u8 bits_per_word = 0;
886 int hz = 0;
887 u32 regval, txfifo_ctrl, rxfifo_ctrl, tx_frm_ctl, rx_frm_ctl, usp_mode2;
888
889 sspi = spi_master_get_devdata(spi->master);
890
891 bits_per_word = (t) ? t->bits_per_word : spi->bits_per_word;
892 hz = t && t->speed_hz ? t->speed_hz : spi->max_speed_hz;
893
894 usp_mode2 = regval = (sspi->ctrl_freq / (2 * hz)) - 1;
895 if (regval > 0xFFFF || regval < 0) {
896 dev_err(&spi->dev, "Speed %d not supported\n", hz);
897 return -EINVAL;
898 }
899 switch (bits_per_word) {
900 case 8:
901 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_8;
902 sspi->rx_word = spi_sirfsoc_rx_word_u8;
903 sspi->tx_word = spi_sirfsoc_tx_word_u8;
904 break;
905 case 12:
906 case 16:
907 regval |= (bits_per_word == 12) ?
908 SIRFSOC_SPI_TRAN_DAT_FORMAT_12 :
909 SIRFSOC_SPI_TRAN_DAT_FORMAT_16;
910 sspi->rx_word = spi_sirfsoc_rx_word_u16;
911 sspi->tx_word = spi_sirfsoc_tx_word_u16;
912 break;
913 case 32:
914 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_32;
915 sspi->rx_word = spi_sirfsoc_rx_word_u32;
916 sspi->tx_word = spi_sirfsoc_tx_word_u32;
917 break;
918 default:
919 dev_err(&spi->dev, "bpw %d not supported\n", bits_per_word);
920 return -EINVAL;
921 }
922 sspi->word_width = DIV_ROUND_UP(bits_per_word, 8);
923 txfifo_ctrl = (((sspi->fifo_size / 2) &
924 SIRFSOC_SPI_FIFO_THD_MASK(sspi))
925 << SIRFSOC_SPI_FIFO_THD_OFFSET) |
926 (sspi->word_width >> 1);
927 rxfifo_ctrl = (((sspi->fifo_size / 2) &
928 SIRFSOC_SPI_FIFO_THD_MASK(sspi))
929 << SIRFSOC_SPI_FIFO_THD_OFFSET) |
930 (sspi->word_width >> 1);
931 writel(txfifo_ctrl, sspi->base + sspi->regs->txfifo_ctrl);
932 writel(rxfifo_ctrl, sspi->base + sspi->regs->rxfifo_ctrl);
933 if (sspi->type == SIRF_USP_SPI_P2 ||
934 sspi->type == SIRF_USP_SPI_A7) {
935 tx_frm_ctl = 0;
936 tx_frm_ctl |= ((bits_per_word - 1) & SIRFSOC_USP_TX_DATA_MASK)
937 << SIRFSOC_USP_TX_DATA_OFFSET;
938 tx_frm_ctl |= ((bits_per_word + 1 + SIRFSOC_USP_TXD_DELAY_LEN
939 - 1) & SIRFSOC_USP_TX_SYNC_MASK) <<
940 SIRFSOC_USP_TX_SYNC_OFFSET;
941 tx_frm_ctl |= ((bits_per_word + 1 + SIRFSOC_USP_TXD_DELAY_LEN
942 + 2 - 1) & SIRFSOC_USP_TX_FRAME_MASK) <<
943 SIRFSOC_USP_TX_FRAME_OFFSET;
944 tx_frm_ctl |= ((bits_per_word - 1) &
945 SIRFSOC_USP_TX_SHIFTER_MASK) <<
946 SIRFSOC_USP_TX_SHIFTER_OFFSET;
947 rx_frm_ctl = 0;
948 rx_frm_ctl |= ((bits_per_word - 1) & SIRFSOC_USP_RX_DATA_MASK)
949 << SIRFSOC_USP_RX_DATA_OFFSET;
950 rx_frm_ctl |= ((bits_per_word + 1 + SIRFSOC_USP_RXD_DELAY_LEN
951 + 2 - 1) & SIRFSOC_USP_RX_FRAME_MASK) <<
952 SIRFSOC_USP_RX_FRAME_OFFSET;
953 rx_frm_ctl |= ((bits_per_word - 1)
954 & SIRFSOC_USP_RX_SHIFTER_MASK) <<
955 SIRFSOC_USP_RX_SHIFTER_OFFSET;
956 writel(tx_frm_ctl | (((usp_mode2 >> 10) &
957 SIRFSOC_USP_CLK_10_11_MASK) <<
958 SIRFSOC_USP_CLK_10_11_OFFSET),
959 sspi->base + sspi->regs->usp_tx_frame_ctrl);
960 writel(rx_frm_ctl | (((usp_mode2 >> 12) &
961 SIRFSOC_USP_CLK_12_15_MASK) <<
962 SIRFSOC_USP_CLK_12_15_OFFSET),
963 sspi->base + sspi->regs->usp_rx_frame_ctrl);
964 writel(readl(sspi->base + sspi->regs->usp_mode2) |
965 ((usp_mode2 & SIRFSOC_USP_CLK_DIVISOR_MASK) <<
966 SIRFSOC_USP_CLK_DIVISOR_OFFSET) |
967 (SIRFSOC_USP_RXD_DELAY_LEN <<
968 SIRFSOC_USP_RXD_DELAY_OFFSET) |
969 (SIRFSOC_USP_TXD_DELAY_LEN <<
970 SIRFSOC_USP_TXD_DELAY_OFFSET),
971 sspi->base + sspi->regs->usp_mode2);
972 }
973 if (sspi->type == SIRF_REAL_SPI)
974 writel(regval, sspi->base + sspi->regs->spi_ctrl);
975 spi_sirfsoc_config_mode(spi);
976 if (sspi->type == SIRF_REAL_SPI) {
977 if (t && t->tx_buf && !t->rx_buf &&
978 (t->len <= SIRFSOC_MAX_CMD_BYTES)) {
979 sspi->tx_by_cmd = true;
980 writel(readl(sspi->base + sspi->regs->spi_ctrl) |
981 (SIRFSOC_SPI_CMD_BYTE_NUM((t->len - 1)) |
982 SIRFSOC_SPI_CMD_MODE),
983 sspi->base + sspi->regs->spi_ctrl);
984 } else {
985 sspi->tx_by_cmd = false;
986 writel(readl(sspi->base + sspi->regs->spi_ctrl) &
987 ~SIRFSOC_SPI_CMD_MODE,
988 sspi->base + sspi->regs->spi_ctrl);
989 }
990 }
991 if (IS_DMA_VALID(t)) {
992 /* Enable DMA mode for RX, TX */
993 writel(0, sspi->base + sspi->regs->tx_dma_io_ctrl);
994 writel(SIRFSOC_SPI_RX_DMA_FLUSH,
995 sspi->base + sspi->regs->rx_dma_io_ctrl);
996 } else {
997 /* Enable IO mode for RX, TX */
998 writel(SIRFSOC_SPI_IO_MODE_SEL,
999 sspi->base + sspi->regs->tx_dma_io_ctrl);
1000 writel(SIRFSOC_SPI_IO_MODE_SEL,
1001 sspi->base + sspi->regs->rx_dma_io_ctrl);
1002 }
1003 return 0;
1004 }
1005
1006 static int spi_sirfsoc_setup(struct spi_device *spi)
1007 {
1008 struct sirfsoc_spi *sspi;
1009 int ret = 0;
1010
1011 sspi = spi_master_get_devdata(spi->master);
1012 if (spi->cs_gpio == -ENOENT)
1013 sspi->hw_cs = true;
1014 else {
1015 sspi->hw_cs = false;
1016 if (!spi_get_ctldata(spi)) {
1017 void *cs = kmalloc(sizeof(int), GFP_KERNEL);
1018 if (!cs) {
1019 ret = -ENOMEM;
1020 goto exit;
1021 }
1022 ret = gpio_is_valid(spi->cs_gpio);
1023 if (!ret) {
1024 dev_err(&spi->dev, "no valid gpio\n");
1025 ret = -ENOENT;
1026 goto exit;
1027 }
1028 ret = gpio_request(spi->cs_gpio, DRIVER_NAME);
1029 if (ret) {
1030 dev_err(&spi->dev, "failed to request gpio\n");
1031 goto exit;
1032 }
1033 spi_set_ctldata(spi, cs);
1034 }
1035 }
1036 spi_sirfsoc_config_mode(spi);
1037 spi_sirfsoc_chipselect(spi, BITBANG_CS_INACTIVE);
1038 exit:
1039 return ret;
1040 }
1041
1042 static void spi_sirfsoc_cleanup(struct spi_device *spi)
1043 {
1044 if (spi_get_ctldata(spi)) {
1045 gpio_free(spi->cs_gpio);
1046 kfree(spi_get_ctldata(spi));
1047 }
1048 }
1049
1050 static const struct of_device_id spi_sirfsoc_of_match[] = {
1051 { .compatible = "sirf,prima2-spi", .data = &sirf_real_spi},
1052 { .compatible = "sirf,prima2-usp-spi", .data = &sirf_usp_spi_p2},
1053 { .compatible = "sirf,atlas7-usp-spi", .data = &sirf_usp_spi_a7},
1054 {}
1055 };
1056 MODULE_DEVICE_TABLE(of, spi_sirfsoc_of_match);
1057
1058 static int spi_sirfsoc_probe(struct platform_device *pdev)
1059 {
1060 struct sirfsoc_spi *sspi;
1061 struct spi_master *master;
1062 struct resource *mem_res;
1063 struct sirf_spi_comp_data *spi_comp_data;
1064 int irq;
1065 int ret;
1066 const struct of_device_id *match;
1067
1068 ret = device_reset(&pdev->dev);
1069 if (ret) {
1070 dev_err(&pdev->dev, "SPI reset failed!\n");
1071 return ret;
1072 }
1073
1074 master = spi_alloc_master(&pdev->dev, sizeof(*sspi));
1075 if (!master) {
1076 dev_err(&pdev->dev, "Unable to allocate SPI master\n");
1077 return -ENOMEM;
1078 }
1079 match = of_match_node(spi_sirfsoc_of_match, pdev->dev.of_node);
1080 platform_set_drvdata(pdev, master);
1081 sspi = spi_master_get_devdata(master);
1082 sspi->fifo_full_offset = ilog2(sspi->fifo_size);
1083 spi_comp_data = (struct sirf_spi_comp_data *)match->data;
1084 sspi->regs = spi_comp_data->regs;
1085 sspi->type = spi_comp_data->type;
1086 sspi->fifo_level_chk_mask = (sspi->fifo_size / 4) - 1;
1087 sspi->dat_max_frm_len = spi_comp_data->dat_max_frm_len;
1088 sspi->fifo_size = spi_comp_data->fifo_size;
1089 mem_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1090 sspi->base = devm_ioremap_resource(&pdev->dev, mem_res);
1091 if (IS_ERR(sspi->base)) {
1092 ret = PTR_ERR(sspi->base);
1093 goto free_master;
1094 }
1095 irq = platform_get_irq(pdev, 0);
1096 if (irq < 0) {
1097 ret = -ENXIO;
1098 goto free_master;
1099 }
1100 ret = devm_request_irq(&pdev->dev, irq, spi_sirfsoc_irq, 0,
1101 DRIVER_NAME, sspi);
1102 if (ret)
1103 goto free_master;
1104
1105 sspi->bitbang.master = master;
1106 sspi->bitbang.chipselect = spi_sirfsoc_chipselect;
1107 sspi->bitbang.setup_transfer = spi_sirfsoc_setup_transfer;
1108 sspi->bitbang.txrx_bufs = spi_sirfsoc_transfer;
1109 sspi->bitbang.master->setup = spi_sirfsoc_setup;
1110 sspi->bitbang.master->cleanup = spi_sirfsoc_cleanup;
1111 master->bus_num = pdev->id;
1112 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LSB_FIRST | SPI_CS_HIGH;
1113 master->bits_per_word_mask = SPI_BPW_MASK(8) | SPI_BPW_MASK(12) |
1114 SPI_BPW_MASK(16) | SPI_BPW_MASK(32);
1115 master->max_speed_hz = SIRFSOC_SPI_DEFAULT_FRQ;
1116 master->flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX;
1117 sspi->bitbang.master->dev.of_node = pdev->dev.of_node;
1118
1119 /* request DMA channels */
1120 sspi->rx_chan = dma_request_slave_channel(&pdev->dev, "rx");
1121 if (!sspi->rx_chan) {
1122 dev_err(&pdev->dev, "can not allocate rx dma channel\n");
1123 ret = -ENODEV;
1124 goto free_master;
1125 }
1126 sspi->tx_chan = dma_request_slave_channel(&pdev->dev, "tx");
1127 if (!sspi->tx_chan) {
1128 dev_err(&pdev->dev, "can not allocate tx dma channel\n");
1129 ret = -ENODEV;
1130 goto free_rx_dma;
1131 }
1132
1133 sspi->clk = clk_get(&pdev->dev, NULL);
1134 if (IS_ERR(sspi->clk)) {
1135 ret = PTR_ERR(sspi->clk);
1136 goto free_tx_dma;
1137 }
1138 clk_prepare_enable(sspi->clk);
1139 sspi->ctrl_freq = clk_get_rate(sspi->clk);
1140
1141 init_completion(&sspi->rx_done);
1142 init_completion(&sspi->tx_done);
1143
1144 ret = spi_bitbang_start(&sspi->bitbang);
1145 if (ret)
1146 goto free_clk;
1147 dev_info(&pdev->dev, "registerred, bus number = %d\n", master->bus_num);
1148
1149 return 0;
1150 free_clk:
1151 clk_disable_unprepare(sspi->clk);
1152 clk_put(sspi->clk);
1153 free_tx_dma:
1154 dma_release_channel(sspi->tx_chan);
1155 free_rx_dma:
1156 dma_release_channel(sspi->rx_chan);
1157 free_master:
1158 spi_master_put(master);
1159
1160 return ret;
1161 }
1162
1163 static int spi_sirfsoc_remove(struct platform_device *pdev)
1164 {
1165 struct spi_master *master;
1166 struct sirfsoc_spi *sspi;
1167
1168 master = platform_get_drvdata(pdev);
1169 sspi = spi_master_get_devdata(master);
1170 spi_bitbang_stop(&sspi->bitbang);
1171 clk_disable_unprepare(sspi->clk);
1172 clk_put(sspi->clk);
1173 dma_release_channel(sspi->rx_chan);
1174 dma_release_channel(sspi->tx_chan);
1175 spi_master_put(master);
1176 return 0;
1177 }
1178
1179 #ifdef CONFIG_PM_SLEEP
1180 static int spi_sirfsoc_suspend(struct device *dev)
1181 {
1182 struct spi_master *master = dev_get_drvdata(dev);
1183 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
1184 int ret;
1185
1186 ret = spi_master_suspend(master);
1187 if (ret)
1188 return ret;
1189
1190 clk_disable(sspi->clk);
1191 return 0;
1192 }
1193
1194 static int spi_sirfsoc_resume(struct device *dev)
1195 {
1196 struct spi_master *master = dev_get_drvdata(dev);
1197 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
1198
1199 clk_enable(sspi->clk);
1200 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + sspi->regs->txfifo_op);
1201 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + sspi->regs->rxfifo_op);
1202 writel(SIRFSOC_SPI_FIFO_START, sspi->base + sspi->regs->txfifo_op);
1203 writel(SIRFSOC_SPI_FIFO_START, sspi->base + sspi->regs->rxfifo_op);
1204 return 0;
1205 }
1206 #endif
1207
1208 static SIMPLE_DEV_PM_OPS(spi_sirfsoc_pm_ops, spi_sirfsoc_suspend,
1209 spi_sirfsoc_resume);
1210
1211 static struct platform_driver spi_sirfsoc_driver = {
1212 .driver = {
1213 .name = DRIVER_NAME,
1214 .pm = &spi_sirfsoc_pm_ops,
1215 .of_match_table = spi_sirfsoc_of_match,
1216 },
1217 .probe = spi_sirfsoc_probe,
1218 .remove = spi_sirfsoc_remove,
1219 };
1220 module_platform_driver(spi_sirfsoc_driver);
1221 MODULE_DESCRIPTION("SiRF SoC SPI master driver");
1222 MODULE_AUTHOR("Zhiwu Song <Zhiwu.Song@csr.com>");
1223 MODULE_AUTHOR("Barry Song <Baohua.Song@csr.com>");
1224 MODULE_AUTHOR("Qipan Li <Qipan.Li@csr.com>");
1225 MODULE_LICENSE("GPL v2");