2 * Copyright (C) 2017 Spreadtrum Communications Inc.
4 * SPDX-License-Identifier: GPL-2.0
8 #include <linux/dma-mapping.h>
9 #include <linux/dma/sprd-dma.h>
10 #include <linux/errno.h>
11 #include <linux/init.h>
12 #include <linux/interrupt.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
17 #include <linux/of_dma.h>
18 #include <linux/of_device.h>
19 #include <linux/pm_runtime.h>
20 #include <linux/slab.h>
24 #define SPRD_DMA_CHN_REG_OFFSET 0x1000
25 #define SPRD_DMA_CHN_REG_LENGTH 0x40
26 #define SPRD_DMA_MEMCPY_MIN_SIZE 64
28 /* DMA global registers definition */
29 #define SPRD_DMA_GLB_PAUSE 0x0
30 #define SPRD_DMA_GLB_FRAG_WAIT 0x4
31 #define SPRD_DMA_GLB_REQ_PEND0_EN 0x8
32 #define SPRD_DMA_GLB_REQ_PEND1_EN 0xc
33 #define SPRD_DMA_GLB_INT_RAW_STS 0x10
34 #define SPRD_DMA_GLB_INT_MSK_STS 0x14
35 #define SPRD_DMA_GLB_REQ_STS 0x18
36 #define SPRD_DMA_GLB_CHN_EN_STS 0x1c
37 #define SPRD_DMA_GLB_DEBUG_STS 0x20
38 #define SPRD_DMA_GLB_ARB_SEL_STS 0x24
39 #define SPRD_DMA_GLB_2STAGE_GRP1 0x28
40 #define SPRD_DMA_GLB_2STAGE_GRP2 0x2c
41 #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
42 #define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000
44 /* DMA channel registers definition */
45 #define SPRD_DMA_CHN_PAUSE 0x0
46 #define SPRD_DMA_CHN_REQ 0x4
47 #define SPRD_DMA_CHN_CFG 0x8
48 #define SPRD_DMA_CHN_INTC 0xc
49 #define SPRD_DMA_CHN_SRC_ADDR 0x10
50 #define SPRD_DMA_CHN_DES_ADDR 0x14
51 #define SPRD_DMA_CHN_FRG_LEN 0x18
52 #define SPRD_DMA_CHN_BLK_LEN 0x1c
53 #define SPRD_DMA_CHN_TRSC_LEN 0x20
54 #define SPRD_DMA_CHN_TRSF_STEP 0x24
55 #define SPRD_DMA_CHN_WARP_PTR 0x28
56 #define SPRD_DMA_CHN_WARP_TO 0x2c
57 #define SPRD_DMA_CHN_LLIST_PTR 0x30
58 #define SPRD_DMA_CHN_FRAG_STEP 0x34
59 #define SPRD_DMA_CHN_SRC_BLK_STEP 0x38
60 #define SPRD_DMA_CHN_DES_BLK_STEP 0x3c
62 /* SPRD_DMA_GLB_2STAGE_GRP register definition */
63 #define SPRD_DMA_GLB_2STAGE_EN BIT(24)
64 #define SPRD_DMA_GLB_CHN_INT_MASK GENMASK(23, 20)
65 #define SPRD_DMA_GLB_DEST_INT BIT(22)
66 #define SPRD_DMA_GLB_SRC_INT BIT(20)
67 #define SPRD_DMA_GLB_LIST_DONE_TRG BIT(19)
68 #define SPRD_DMA_GLB_TRANS_DONE_TRG BIT(18)
69 #define SPRD_DMA_GLB_BLOCK_DONE_TRG BIT(17)
70 #define SPRD_DMA_GLB_FRAG_DONE_TRG BIT(16)
71 #define SPRD_DMA_GLB_TRG_OFFSET 16
72 #define SPRD_DMA_GLB_DEST_CHN_MASK GENMASK(13, 8)
73 #define SPRD_DMA_GLB_DEST_CHN_OFFSET 8
74 #define SPRD_DMA_GLB_SRC_CHN_MASK GENMASK(5, 0)
76 /* SPRD_DMA_CHN_INTC register definition */
77 #define SPRD_DMA_INT_MASK GENMASK(4, 0)
78 #define SPRD_DMA_INT_CLR_OFFSET 24
79 #define SPRD_DMA_FRAG_INT_EN BIT(0)
80 #define SPRD_DMA_BLK_INT_EN BIT(1)
81 #define SPRD_DMA_TRANS_INT_EN BIT(2)
82 #define SPRD_DMA_LIST_INT_EN BIT(3)
83 #define SPRD_DMA_CFG_ERR_INT_EN BIT(4)
85 /* SPRD_DMA_CHN_CFG register definition */
86 #define SPRD_DMA_CHN_EN BIT(0)
87 #define SPRD_DMA_LINKLIST_EN BIT(4)
88 #define SPRD_DMA_WAIT_BDONE_OFFSET 24
89 #define SPRD_DMA_DONOT_WAIT_BDONE 1
91 /* SPRD_DMA_CHN_REQ register definition */
92 #define SPRD_DMA_REQ_EN BIT(0)
94 /* SPRD_DMA_CHN_PAUSE register definition */
95 #define SPRD_DMA_PAUSE_EN BIT(0)
96 #define SPRD_DMA_PAUSE_STS BIT(2)
97 #define SPRD_DMA_PAUSE_CNT 0x2000
99 /* DMA_CHN_WARP_* register definition */
100 #define SPRD_DMA_HIGH_ADDR_MASK GENMASK(31, 28)
101 #define SPRD_DMA_LOW_ADDR_MASK GENMASK(31, 0)
102 #define SPRD_DMA_HIGH_ADDR_OFFSET 4
104 /* SPRD_DMA_CHN_INTC register definition */
105 #define SPRD_DMA_FRAG_INT_STS BIT(16)
106 #define SPRD_DMA_BLK_INT_STS BIT(17)
107 #define SPRD_DMA_TRSC_INT_STS BIT(18)
108 #define SPRD_DMA_LIST_INT_STS BIT(19)
109 #define SPRD_DMA_CFGERR_INT_STS BIT(20)
110 #define SPRD_DMA_CHN_INT_STS \
111 (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS | \
112 SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS | \
113 SPRD_DMA_CFGERR_INT_STS)
115 /* SPRD_DMA_CHN_FRG_LEN register definition */
116 #define SPRD_DMA_SRC_DATAWIDTH_OFFSET 30
117 #define SPRD_DMA_DES_DATAWIDTH_OFFSET 28
118 #define SPRD_DMA_SWT_MODE_OFFSET 26
119 #define SPRD_DMA_REQ_MODE_OFFSET 24
120 #define SPRD_DMA_REQ_MODE_MASK GENMASK(1, 0)
121 #define SPRD_DMA_FIX_SEL_OFFSET 21
122 #define SPRD_DMA_FIX_EN_OFFSET 20
123 #define SPRD_DMA_LLIST_END BIT(19)
124 #define SPRD_DMA_FRG_LEN_MASK GENMASK(16, 0)
126 /* SPRD_DMA_CHN_BLK_LEN register definition */
127 #define SPRD_DMA_BLK_LEN_MASK GENMASK(16, 0)
129 /* SPRD_DMA_CHN_TRSC_LEN register definition */
130 #define SPRD_DMA_TRSC_LEN_MASK GENMASK(27, 0)
132 /* SPRD_DMA_CHN_TRSF_STEP register definition */
133 #define SPRD_DMA_DEST_TRSF_STEP_OFFSET 16
134 #define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0
135 #define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0)
137 /* define DMA channel mode & trigger mode mask */
138 #define SPRD_DMA_CHN_MODE_MASK GENMASK(7, 0)
139 #define SPRD_DMA_TRG_MODE_MASK GENMASK(7, 0)
140 #define SPRD_DMA_INT_TYPE_MASK GENMASK(7, 0)
142 /* define the DMA transfer step type */
143 #define SPRD_DMA_NONE_STEP 0
144 #define SPRD_DMA_BYTE_STEP 1
145 #define SPRD_DMA_SHORT_STEP 2
146 #define SPRD_DMA_WORD_STEP 4
147 #define SPRD_DMA_DWORD_STEP 8
149 #define SPRD_DMA_SOFTWARE_UID 0
151 /* dma data width values */
152 enum sprd_dma_datawidth
{
153 SPRD_DMA_DATAWIDTH_1_BYTE
,
154 SPRD_DMA_DATAWIDTH_2_BYTES
,
155 SPRD_DMA_DATAWIDTH_4_BYTES
,
156 SPRD_DMA_DATAWIDTH_8_BYTES
,
159 /* dma channel hardware configuration */
160 struct sprd_dma_chn_hw
{
179 /* dma request description */
180 struct sprd_dma_desc
{
181 struct virt_dma_desc vd
;
182 struct sprd_dma_chn_hw chn_hw
;
183 enum dma_transfer_direction dir
;
186 /* dma channel description */
187 struct sprd_dma_chn
{
188 struct virt_dma_chan vc
;
189 void __iomem
*chn_base
;
190 struct sprd_dma_linklist linklist
;
191 struct dma_slave_config slave_cfg
;
194 enum sprd_dma_chn_mode chn_mode
;
195 enum sprd_dma_trg_mode trg_mode
;
196 enum sprd_dma_int_type int_type
;
197 struct sprd_dma_desc
*cur_desc
;
200 /* SPRD dma device */
201 struct sprd_dma_dev
{
202 struct dma_device dma_dev
;
203 void __iomem
*glb_base
;
205 struct clk
*ashb_clk
;
208 struct sprd_dma_chn channels
[0];
211 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
);
212 static struct of_dma_filter_info sprd_dma_info
= {
213 .filter_fn
= sprd_dma_filter_fn
,
216 static inline struct sprd_dma_chn
*to_sprd_dma_chan(struct dma_chan
*c
)
218 return container_of(c
, struct sprd_dma_chn
, vc
.chan
);
221 static inline struct sprd_dma_dev
*to_sprd_dma_dev(struct dma_chan
*c
)
223 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(c
);
225 return container_of(schan
, struct sprd_dma_dev
, channels
[c
->chan_id
]);
228 static inline struct sprd_dma_desc
*to_sprd_dma_desc(struct virt_dma_desc
*vd
)
230 return container_of(vd
, struct sprd_dma_desc
, vd
);
233 static void sprd_dma_glb_update(struct sprd_dma_dev
*sdev
, u32 reg
,
236 u32 orig
= readl(sdev
->glb_base
+ reg
);
239 tmp
= (orig
& ~mask
) | val
;
240 writel(tmp
, sdev
->glb_base
+ reg
);
243 static void sprd_dma_chn_update(struct sprd_dma_chn
*schan
, u32 reg
,
246 u32 orig
= readl(schan
->chn_base
+ reg
);
249 tmp
= (orig
& ~mask
) | val
;
250 writel(tmp
, schan
->chn_base
+ reg
);
253 static int sprd_dma_enable(struct sprd_dma_dev
*sdev
)
257 ret
= clk_prepare_enable(sdev
->clk
);
262 * The ashb_clk is optional and only for AGCP DMA controller, so we
263 * need add one condition to check if the ashb_clk need enable.
265 if (!IS_ERR(sdev
->ashb_clk
))
266 ret
= clk_prepare_enable(sdev
->ashb_clk
);
271 static void sprd_dma_disable(struct sprd_dma_dev
*sdev
)
273 clk_disable_unprepare(sdev
->clk
);
276 * Need to check if we need disable the optional ashb_clk for AGCP DMA.
278 if (!IS_ERR(sdev
->ashb_clk
))
279 clk_disable_unprepare(sdev
->ashb_clk
);
282 static void sprd_dma_set_uid(struct sprd_dma_chn
*schan
)
284 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
285 u32 dev_id
= schan
->dev_id
;
287 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
288 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
289 SPRD_DMA_GLB_REQ_UID(dev_id
);
291 writel(schan
->chn_num
+ 1, sdev
->glb_base
+ uid_offset
);
295 static void sprd_dma_unset_uid(struct sprd_dma_chn
*schan
)
297 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
298 u32 dev_id
= schan
->dev_id
;
300 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
301 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
302 SPRD_DMA_GLB_REQ_UID(dev_id
);
304 writel(0, sdev
->glb_base
+ uid_offset
);
308 static void sprd_dma_clear_int(struct sprd_dma_chn
*schan
)
310 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_INTC
,
311 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
,
312 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
);
315 static void sprd_dma_enable_chn(struct sprd_dma_chn
*schan
)
317 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
,
321 static void sprd_dma_disable_chn(struct sprd_dma_chn
*schan
)
323 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
, 0);
326 static void sprd_dma_soft_request(struct sprd_dma_chn
*schan
)
328 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_REQ
, SPRD_DMA_REQ_EN
,
332 static void sprd_dma_pause_resume(struct sprd_dma_chn
*schan
, bool enable
)
334 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
335 u32 pause
, timeout
= SPRD_DMA_PAUSE_CNT
;
338 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
339 SPRD_DMA_PAUSE_EN
, SPRD_DMA_PAUSE_EN
);
342 pause
= readl(schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
343 if (pause
& SPRD_DMA_PAUSE_STS
)
347 } while (--timeout
> 0);
350 dev_warn(sdev
->dma_dev
.dev
,
351 "pause dma controller timeout\n");
353 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
354 SPRD_DMA_PAUSE_EN
, 0);
358 static void sprd_dma_stop_and_disable(struct sprd_dma_chn
*schan
)
360 u32 cfg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
362 if (!(cfg
& SPRD_DMA_CHN_EN
))
365 sprd_dma_pause_resume(schan
, true);
366 sprd_dma_disable_chn(schan
);
369 static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn
*schan
)
371 unsigned long addr
, addr_high
;
373 addr
= readl(schan
->chn_base
+ SPRD_DMA_CHN_SRC_ADDR
);
374 addr_high
= readl(schan
->chn_base
+ SPRD_DMA_CHN_WARP_PTR
) &
375 SPRD_DMA_HIGH_ADDR_MASK
;
377 return addr
| (addr_high
<< SPRD_DMA_HIGH_ADDR_OFFSET
);
380 static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn
*schan
)
382 unsigned long addr
, addr_high
;
384 addr
= readl(schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
385 addr_high
= readl(schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
) &
386 SPRD_DMA_HIGH_ADDR_MASK
;
388 return addr
| (addr_high
<< SPRD_DMA_HIGH_ADDR_OFFSET
);
391 static enum sprd_dma_int_type
sprd_dma_get_int_type(struct sprd_dma_chn
*schan
)
393 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
394 u32 intc_sts
= readl(schan
->chn_base
+ SPRD_DMA_CHN_INTC
) &
395 SPRD_DMA_CHN_INT_STS
;
398 case SPRD_DMA_CFGERR_INT_STS
:
399 return SPRD_DMA_CFGERR_INT
;
401 case SPRD_DMA_LIST_INT_STS
:
402 return SPRD_DMA_LIST_INT
;
404 case SPRD_DMA_TRSC_INT_STS
:
405 return SPRD_DMA_TRANS_INT
;
407 case SPRD_DMA_BLK_INT_STS
:
408 return SPRD_DMA_BLK_INT
;
410 case SPRD_DMA_FRAG_INT_STS
:
411 return SPRD_DMA_FRAG_INT
;
414 dev_warn(sdev
->dma_dev
.dev
, "incorrect dma interrupt type\n");
415 return SPRD_DMA_NO_INT
;
419 static enum sprd_dma_req_mode
sprd_dma_get_req_type(struct sprd_dma_chn
*schan
)
421 u32 frag_reg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
423 return (frag_reg
>> SPRD_DMA_REQ_MODE_OFFSET
) & SPRD_DMA_REQ_MODE_MASK
;
426 static int sprd_dma_set_2stage_config(struct sprd_dma_chn
*schan
)
428 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
429 u32 val
, chn
= schan
->chn_num
+ 1;
431 switch (schan
->chn_mode
) {
432 case SPRD_DMA_SRC_CHN0
:
433 val
= chn
& SPRD_DMA_GLB_SRC_CHN_MASK
;
434 val
|= BIT(schan
->trg_mode
- 1) << SPRD_DMA_GLB_TRG_OFFSET
;
435 val
|= SPRD_DMA_GLB_2STAGE_EN
;
436 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
437 val
|= SPRD_DMA_GLB_SRC_INT
;
439 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP1
, val
, val
);
442 case SPRD_DMA_SRC_CHN1
:
443 val
= chn
& SPRD_DMA_GLB_SRC_CHN_MASK
;
444 val
|= BIT(schan
->trg_mode
- 1) << SPRD_DMA_GLB_TRG_OFFSET
;
445 val
|= SPRD_DMA_GLB_2STAGE_EN
;
446 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
447 val
|= SPRD_DMA_GLB_SRC_INT
;
449 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP2
, val
, val
);
452 case SPRD_DMA_DST_CHN0
:
453 val
= (chn
<< SPRD_DMA_GLB_DEST_CHN_OFFSET
) &
454 SPRD_DMA_GLB_DEST_CHN_MASK
;
455 val
|= SPRD_DMA_GLB_2STAGE_EN
;
456 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
457 val
|= SPRD_DMA_GLB_DEST_INT
;
459 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP1
, val
, val
);
462 case SPRD_DMA_DST_CHN1
:
463 val
= (chn
<< SPRD_DMA_GLB_DEST_CHN_OFFSET
) &
464 SPRD_DMA_GLB_DEST_CHN_MASK
;
465 val
|= SPRD_DMA_GLB_2STAGE_EN
;
466 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
467 val
|= SPRD_DMA_GLB_DEST_INT
;
469 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP2
, val
, val
);
473 dev_err(sdev
->dma_dev
.dev
, "invalid channel mode setting %d\n",
481 static void sprd_dma_set_chn_config(struct sprd_dma_chn
*schan
,
482 struct sprd_dma_desc
*sdesc
)
484 struct sprd_dma_chn_hw
*cfg
= &sdesc
->chn_hw
;
486 writel(cfg
->pause
, schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
487 writel(cfg
->cfg
, schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
488 writel(cfg
->intc
, schan
->chn_base
+ SPRD_DMA_CHN_INTC
);
489 writel(cfg
->src_addr
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_ADDR
);
490 writel(cfg
->des_addr
, schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
491 writel(cfg
->frg_len
, schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
492 writel(cfg
->blk_len
, schan
->chn_base
+ SPRD_DMA_CHN_BLK_LEN
);
493 writel(cfg
->trsc_len
, schan
->chn_base
+ SPRD_DMA_CHN_TRSC_LEN
);
494 writel(cfg
->trsf_step
, schan
->chn_base
+ SPRD_DMA_CHN_TRSF_STEP
);
495 writel(cfg
->wrap_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_PTR
);
496 writel(cfg
->wrap_to
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
);
497 writel(cfg
->llist_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_LLIST_PTR
);
498 writel(cfg
->frg_step
, schan
->chn_base
+ SPRD_DMA_CHN_FRAG_STEP
);
499 writel(cfg
->src_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_BLK_STEP
);
500 writel(cfg
->des_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_DES_BLK_STEP
);
501 writel(cfg
->req
, schan
->chn_base
+ SPRD_DMA_CHN_REQ
);
504 static void sprd_dma_start(struct sprd_dma_chn
*schan
)
506 struct virt_dma_desc
*vd
= vchan_next_desc(&schan
->vc
);
512 schan
->cur_desc
= to_sprd_dma_desc(vd
);
515 * Set 2-stage configuration if the channel starts one 2-stage
518 if (schan
->chn_mode
&& sprd_dma_set_2stage_config(schan
))
522 * Copy the DMA configuration from DMA descriptor to this hardware
525 sprd_dma_set_chn_config(schan
, schan
->cur_desc
);
526 sprd_dma_set_uid(schan
);
527 sprd_dma_enable_chn(schan
);
529 if (schan
->dev_id
== SPRD_DMA_SOFTWARE_UID
&&
530 schan
->chn_mode
!= SPRD_DMA_DST_CHN0
&&
531 schan
->chn_mode
!= SPRD_DMA_DST_CHN1
)
532 sprd_dma_soft_request(schan
);
535 static void sprd_dma_stop(struct sprd_dma_chn
*schan
)
537 sprd_dma_stop_and_disable(schan
);
538 sprd_dma_unset_uid(schan
);
539 sprd_dma_clear_int(schan
);
540 schan
->cur_desc
= NULL
;
543 static bool sprd_dma_check_trans_done(struct sprd_dma_desc
*sdesc
,
544 enum sprd_dma_int_type int_type
,
545 enum sprd_dma_req_mode req_mode
)
547 if (int_type
== SPRD_DMA_NO_INT
)
550 if (int_type
>= req_mode
+ 1)
556 static irqreturn_t
dma_irq_handle(int irq
, void *dev_id
)
558 struct sprd_dma_dev
*sdev
= (struct sprd_dma_dev
*)dev_id
;
559 u32 irq_status
= readl(sdev
->glb_base
+ SPRD_DMA_GLB_INT_MSK_STS
);
560 struct sprd_dma_chn
*schan
;
561 struct sprd_dma_desc
*sdesc
;
562 enum sprd_dma_req_mode req_type
;
563 enum sprd_dma_int_type int_type
;
564 bool trans_done
= false, cyclic
= false;
568 i
= __ffs(irq_status
);
569 irq_status
&= (irq_status
- 1);
570 schan
= &sdev
->channels
[i
];
572 spin_lock(&schan
->vc
.lock
);
574 sdesc
= schan
->cur_desc
;
576 spin_unlock(&schan
->vc
.lock
);
580 int_type
= sprd_dma_get_int_type(schan
);
581 req_type
= sprd_dma_get_req_type(schan
);
582 sprd_dma_clear_int(schan
);
584 /* cyclic mode schedule callback */
585 cyclic
= schan
->linklist
.phy_addr
? true : false;
586 if (cyclic
== true) {
587 vchan_cyclic_callback(&sdesc
->vd
);
589 /* Check if the dma request descriptor is done. */
590 trans_done
= sprd_dma_check_trans_done(sdesc
, int_type
,
592 if (trans_done
== true) {
593 vchan_cookie_complete(&sdesc
->vd
);
594 schan
->cur_desc
= NULL
;
595 sprd_dma_start(schan
);
598 spin_unlock(&schan
->vc
.lock
);
604 static int sprd_dma_alloc_chan_resources(struct dma_chan
*chan
)
606 return pm_runtime_get_sync(chan
->device
->dev
);
609 static void sprd_dma_free_chan_resources(struct dma_chan
*chan
)
611 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
614 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
615 sprd_dma_stop(schan
);
616 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
618 vchan_free_chan_resources(&schan
->vc
);
619 pm_runtime_put(chan
->device
->dev
);
622 static enum dma_status
sprd_dma_tx_status(struct dma_chan
*chan
,
624 struct dma_tx_state
*txstate
)
626 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
627 struct virt_dma_desc
*vd
;
632 ret
= dma_cookie_status(chan
, cookie
, txstate
);
633 if (ret
== DMA_COMPLETE
|| !txstate
)
636 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
637 vd
= vchan_find_desc(&schan
->vc
, cookie
);
639 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
640 struct sprd_dma_chn_hw
*hw
= &sdesc
->chn_hw
;
642 if (hw
->trsc_len
> 0)
644 else if (hw
->blk_len
> 0)
646 else if (hw
->frg_len
> 0)
650 } else if (schan
->cur_desc
&& schan
->cur_desc
->vd
.tx
.cookie
== cookie
) {
651 struct sprd_dma_desc
*sdesc
= schan
->cur_desc
;
653 if (sdesc
->dir
== DMA_DEV_TO_MEM
)
654 pos
= sprd_dma_get_dst_addr(schan
);
656 pos
= sprd_dma_get_src_addr(schan
);
660 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
662 dma_set_residue(txstate
, pos
);
666 static void sprd_dma_issue_pending(struct dma_chan
*chan
)
668 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
671 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
672 if (vchan_issue_pending(&schan
->vc
) && !schan
->cur_desc
)
673 sprd_dma_start(schan
);
674 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
677 static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth
)
680 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
681 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
682 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
683 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
684 return ffs(buswidth
) - 1;
691 static int sprd_dma_get_step(enum dma_slave_buswidth buswidth
)
694 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
695 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
696 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
697 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
705 static int sprd_dma_fill_desc(struct dma_chan
*chan
,
706 struct sprd_dma_chn_hw
*hw
,
707 unsigned int sglen
, int sg_index
,
708 dma_addr_t src
, dma_addr_t dst
, u32 len
,
709 enum dma_transfer_direction dir
,
711 struct dma_slave_config
*slave_cfg
)
713 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(chan
);
714 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
715 enum sprd_dma_chn_mode chn_mode
= schan
->chn_mode
;
716 u32 req_mode
= (flags
>> SPRD_DMA_REQ_SHIFT
) & SPRD_DMA_REQ_MODE_MASK
;
717 u32 int_mode
= flags
& SPRD_DMA_INT_MASK
;
718 int src_datawidth
, dst_datawidth
, src_step
, dst_step
;
719 u32 temp
, fix_mode
= 0, fix_en
= 0;
721 if (dir
== DMA_MEM_TO_DEV
) {
722 src_step
= sprd_dma_get_step(slave_cfg
->src_addr_width
);
724 dev_err(sdev
->dma_dev
.dev
, "invalid source step\n");
729 * For 2-stage transfer, destination channel step can not be 0,
730 * since destination device is AON IRAM.
732 if (chn_mode
== SPRD_DMA_DST_CHN0
||
733 chn_mode
== SPRD_DMA_DST_CHN1
)
736 dst_step
= SPRD_DMA_NONE_STEP
;
738 dst_step
= sprd_dma_get_step(slave_cfg
->dst_addr_width
);
740 dev_err(sdev
->dma_dev
.dev
, "invalid destination step\n");
743 src_step
= SPRD_DMA_NONE_STEP
;
746 src_datawidth
= sprd_dma_get_datawidth(slave_cfg
->src_addr_width
);
747 if (src_datawidth
< 0) {
748 dev_err(sdev
->dma_dev
.dev
, "invalid source datawidth\n");
749 return src_datawidth
;
752 dst_datawidth
= sprd_dma_get_datawidth(slave_cfg
->dst_addr_width
);
753 if (dst_datawidth
< 0) {
754 dev_err(sdev
->dma_dev
.dev
, "invalid destination datawidth\n");
755 return dst_datawidth
;
758 if (slave_cfg
->slave_id
)
759 schan
->dev_id
= slave_cfg
->slave_id
;
761 hw
->cfg
= SPRD_DMA_DONOT_WAIT_BDONE
<< SPRD_DMA_WAIT_BDONE_OFFSET
;
764 * wrap_ptr and wrap_to will save the high 4 bits source address and
765 * destination address.
767 hw
->wrap_ptr
= (src
>> SPRD_DMA_HIGH_ADDR_OFFSET
) & SPRD_DMA_HIGH_ADDR_MASK
;
768 hw
->wrap_to
= (dst
>> SPRD_DMA_HIGH_ADDR_OFFSET
) & SPRD_DMA_HIGH_ADDR_MASK
;
769 hw
->src_addr
= src
& SPRD_DMA_LOW_ADDR_MASK
;
770 hw
->des_addr
= dst
& SPRD_DMA_LOW_ADDR_MASK
;
773 * If the src step and dst step both are 0 or both are not 0, that means
774 * we can not enable the fix mode. If one is 0 and another one is not,
775 * we can enable the fix mode.
777 if ((src_step
!= 0 && dst_step
!= 0) || (src_step
| dst_step
) == 0) {
787 hw
->intc
= int_mode
| SPRD_DMA_CFG_ERR_INT_EN
;
789 temp
= src_datawidth
<< SPRD_DMA_SRC_DATAWIDTH_OFFSET
;
790 temp
|= dst_datawidth
<< SPRD_DMA_DES_DATAWIDTH_OFFSET
;
791 temp
|= req_mode
<< SPRD_DMA_REQ_MODE_OFFSET
;
792 temp
|= fix_mode
<< SPRD_DMA_FIX_SEL_OFFSET
;
793 temp
|= fix_en
<< SPRD_DMA_FIX_EN_OFFSET
;
794 temp
|= slave_cfg
->src_maxburst
& SPRD_DMA_FRG_LEN_MASK
;
797 hw
->blk_len
= slave_cfg
->src_maxburst
& SPRD_DMA_BLK_LEN_MASK
;
798 hw
->trsc_len
= len
& SPRD_DMA_TRSC_LEN_MASK
;
800 temp
= (dst_step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_DEST_TRSF_STEP_OFFSET
;
801 temp
|= (src_step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_SRC_TRSF_STEP_OFFSET
;
802 hw
->trsf_step
= temp
;
804 /* link-list configuration */
805 if (schan
->linklist
.phy_addr
) {
806 hw
->cfg
|= SPRD_DMA_LINKLIST_EN
;
808 /* link-list index */
809 temp
= sglen
? (sg_index
+ 1) % sglen
: 0;
811 /* Next link-list configuration's physical address offset */
812 temp
= temp
* sizeof(*hw
) + SPRD_DMA_CHN_SRC_ADDR
;
814 * Set the link-list pointer point to next link-list
815 * configuration's physical address.
817 hw
->llist_ptr
= schan
->linklist
.phy_addr
+ temp
;
823 hw
->src_blk_step
= 0;
824 hw
->des_blk_step
= 0;
828 static int sprd_dma_fill_linklist_desc(struct dma_chan
*chan
,
829 unsigned int sglen
, int sg_index
,
830 dma_addr_t src
, dma_addr_t dst
, u32 len
,
831 enum dma_transfer_direction dir
,
833 struct dma_slave_config
*slave_cfg
)
835 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
836 struct sprd_dma_chn_hw
*hw
;
838 if (!schan
->linklist
.virt_addr
)
841 hw
= (struct sprd_dma_chn_hw
*)(schan
->linklist
.virt_addr
+
842 sg_index
* sizeof(*hw
));
844 return sprd_dma_fill_desc(chan
, hw
, sglen
, sg_index
, src
, dst
, len
,
845 dir
, flags
, slave_cfg
);
848 static struct dma_async_tx_descriptor
*
849 sprd_dma_prep_dma_memcpy(struct dma_chan
*chan
, dma_addr_t dest
, dma_addr_t src
,
850 size_t len
, unsigned long flags
)
852 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
853 struct sprd_dma_desc
*sdesc
;
854 struct sprd_dma_chn_hw
*hw
;
855 enum sprd_dma_datawidth datawidth
;
858 sdesc
= kzalloc(sizeof(*sdesc
), GFP_NOWAIT
);
864 hw
->cfg
= SPRD_DMA_DONOT_WAIT_BDONE
<< SPRD_DMA_WAIT_BDONE_OFFSET
;
865 hw
->intc
= SPRD_DMA_TRANS_INT
| SPRD_DMA_CFG_ERR_INT_EN
;
866 hw
->src_addr
= src
& SPRD_DMA_LOW_ADDR_MASK
;
867 hw
->des_addr
= dest
& SPRD_DMA_LOW_ADDR_MASK
;
868 hw
->wrap_ptr
= (src
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
869 SPRD_DMA_HIGH_ADDR_MASK
;
870 hw
->wrap_to
= (dest
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
871 SPRD_DMA_HIGH_ADDR_MASK
;
873 if (IS_ALIGNED(len
, 8)) {
874 datawidth
= SPRD_DMA_DATAWIDTH_8_BYTES
;
875 step
= SPRD_DMA_DWORD_STEP
;
876 } else if (IS_ALIGNED(len
, 4)) {
877 datawidth
= SPRD_DMA_DATAWIDTH_4_BYTES
;
878 step
= SPRD_DMA_WORD_STEP
;
879 } else if (IS_ALIGNED(len
, 2)) {
880 datawidth
= SPRD_DMA_DATAWIDTH_2_BYTES
;
881 step
= SPRD_DMA_SHORT_STEP
;
883 datawidth
= SPRD_DMA_DATAWIDTH_1_BYTE
;
884 step
= SPRD_DMA_BYTE_STEP
;
887 temp
= datawidth
<< SPRD_DMA_SRC_DATAWIDTH_OFFSET
;
888 temp
|= datawidth
<< SPRD_DMA_DES_DATAWIDTH_OFFSET
;
889 temp
|= SPRD_DMA_TRANS_REQ
<< SPRD_DMA_REQ_MODE_OFFSET
;
890 temp
|= len
& SPRD_DMA_FRG_LEN_MASK
;
893 hw
->blk_len
= len
& SPRD_DMA_BLK_LEN_MASK
;
894 hw
->trsc_len
= len
& SPRD_DMA_TRSC_LEN_MASK
;
896 temp
= (step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_DEST_TRSF_STEP_OFFSET
;
897 temp
|= (step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_SRC_TRSF_STEP_OFFSET
;
898 hw
->trsf_step
= temp
;
900 return vchan_tx_prep(&schan
->vc
, &sdesc
->vd
, flags
);
903 static struct dma_async_tx_descriptor
*
904 sprd_dma_prep_slave_sg(struct dma_chan
*chan
, struct scatterlist
*sgl
,
905 unsigned int sglen
, enum dma_transfer_direction dir
,
906 unsigned long flags
, void *context
)
908 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
909 struct dma_slave_config
*slave_cfg
= &schan
->slave_cfg
;
910 dma_addr_t src
= 0, dst
= 0;
911 dma_addr_t start_src
= 0, start_dst
= 0;
912 struct sprd_dma_desc
*sdesc
;
913 struct scatterlist
*sg
;
917 if (!is_slave_direction(dir
))
921 struct sprd_dma_linklist
*ll_cfg
=
922 (struct sprd_dma_linklist
*)context
;
924 schan
->linklist
.phy_addr
= ll_cfg
->phy_addr
;
925 schan
->linklist
.virt_addr
= ll_cfg
->virt_addr
;
927 schan
->linklist
.phy_addr
= 0;
928 schan
->linklist
.virt_addr
= 0;
932 * Set channel mode, interrupt mode and trigger mode for 2-stage
936 (flags
>> SPRD_DMA_CHN_MODE_SHIFT
) & SPRD_DMA_CHN_MODE_MASK
;
938 (flags
>> SPRD_DMA_TRG_MODE_SHIFT
) & SPRD_DMA_TRG_MODE_MASK
;
939 schan
->int_type
= flags
& SPRD_DMA_INT_TYPE_MASK
;
941 sdesc
= kzalloc(sizeof(*sdesc
), GFP_NOWAIT
);
947 for_each_sg(sgl
, sg
, sglen
, i
) {
948 len
= sg_dma_len(sg
);
950 if (dir
== DMA_MEM_TO_DEV
) {
951 src
= sg_dma_address(sg
);
952 dst
= slave_cfg
->dst_addr
;
954 src
= slave_cfg
->src_addr
;
955 dst
= sg_dma_address(sg
);
964 * The link-list mode needs at least 2 link-list
965 * configurations. If there is only one sg, it doesn't
966 * need to fill the link-list configuration.
971 ret
= sprd_dma_fill_linklist_desc(chan
, sglen
, i
, src
, dst
, len
,
972 dir
, flags
, slave_cfg
);
979 ret
= sprd_dma_fill_desc(chan
, &sdesc
->chn_hw
, 0, 0, start_src
,
980 start_dst
, len
, dir
, flags
, slave_cfg
);
986 return vchan_tx_prep(&schan
->vc
, &sdesc
->vd
, flags
);
989 static int sprd_dma_slave_config(struct dma_chan
*chan
,
990 struct dma_slave_config
*config
)
992 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
993 struct dma_slave_config
*slave_cfg
= &schan
->slave_cfg
;
995 memcpy(slave_cfg
, config
, sizeof(*config
));
999 static int sprd_dma_pause(struct dma_chan
*chan
)
1001 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1002 unsigned long flags
;
1004 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1005 sprd_dma_pause_resume(schan
, true);
1006 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1011 static int sprd_dma_resume(struct dma_chan
*chan
)
1013 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1014 unsigned long flags
;
1016 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1017 sprd_dma_pause_resume(schan
, false);
1018 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1023 static int sprd_dma_terminate_all(struct dma_chan
*chan
)
1025 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1026 unsigned long flags
;
1029 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1030 sprd_dma_stop(schan
);
1032 vchan_get_all_descriptors(&schan
->vc
, &head
);
1033 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1035 vchan_dma_desc_free_list(&schan
->vc
, &head
);
1039 static void sprd_dma_free_desc(struct virt_dma_desc
*vd
)
1041 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
1046 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
)
1048 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1049 u32 slave_id
= *(u32
*)param
;
1051 schan
->dev_id
= slave_id
;
1055 static int sprd_dma_probe(struct platform_device
*pdev
)
1057 struct device_node
*np
= pdev
->dev
.of_node
;
1058 struct sprd_dma_dev
*sdev
;
1059 struct sprd_dma_chn
*dma_chn
;
1060 struct resource
*res
;
1064 ret
= device_property_read_u32(&pdev
->dev
, "#dma-channels", &chn_count
);
1066 dev_err(&pdev
->dev
, "get dma channels count failed\n");
1070 sdev
= devm_kzalloc(&pdev
->dev
,
1071 struct_size(sdev
, channels
, chn_count
),
1076 sdev
->clk
= devm_clk_get(&pdev
->dev
, "enable");
1077 if (IS_ERR(sdev
->clk
)) {
1078 dev_err(&pdev
->dev
, "get enable clock failed\n");
1079 return PTR_ERR(sdev
->clk
);
1082 /* ashb clock is optional for AGCP DMA */
1083 sdev
->ashb_clk
= devm_clk_get(&pdev
->dev
, "ashb_eb");
1084 if (IS_ERR(sdev
->ashb_clk
))
1085 dev_warn(&pdev
->dev
, "no optional ashb eb clock\n");
1088 * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP
1089 * DMA controller, it can or do not request the irq, which will save
1090 * system power without resuming system by DMA interrupts if AGCP DMA
1091 * does not request the irq. Thus the DMA interrupts property should
1094 sdev
->irq
= platform_get_irq(pdev
, 0);
1095 if (sdev
->irq
> 0) {
1096 ret
= devm_request_irq(&pdev
->dev
, sdev
->irq
, dma_irq_handle
,
1097 0, "sprd_dma", (void *)sdev
);
1099 dev_err(&pdev
->dev
, "request dma irq failed\n");
1103 dev_warn(&pdev
->dev
, "no interrupts for the dma controller\n");
1106 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1107 sdev
->glb_base
= devm_ioremap_resource(&pdev
->dev
, res
);
1108 if (IS_ERR(sdev
->glb_base
))
1109 return PTR_ERR(sdev
->glb_base
);
1111 dma_cap_set(DMA_MEMCPY
, sdev
->dma_dev
.cap_mask
);
1112 sdev
->total_chns
= chn_count
;
1113 sdev
->dma_dev
.chancnt
= chn_count
;
1114 INIT_LIST_HEAD(&sdev
->dma_dev
.channels
);
1115 INIT_LIST_HEAD(&sdev
->dma_dev
.global_node
);
1116 sdev
->dma_dev
.dev
= &pdev
->dev
;
1117 sdev
->dma_dev
.device_alloc_chan_resources
= sprd_dma_alloc_chan_resources
;
1118 sdev
->dma_dev
.device_free_chan_resources
= sprd_dma_free_chan_resources
;
1119 sdev
->dma_dev
.device_tx_status
= sprd_dma_tx_status
;
1120 sdev
->dma_dev
.device_issue_pending
= sprd_dma_issue_pending
;
1121 sdev
->dma_dev
.device_prep_dma_memcpy
= sprd_dma_prep_dma_memcpy
;
1122 sdev
->dma_dev
.device_prep_slave_sg
= sprd_dma_prep_slave_sg
;
1123 sdev
->dma_dev
.device_config
= sprd_dma_slave_config
;
1124 sdev
->dma_dev
.device_pause
= sprd_dma_pause
;
1125 sdev
->dma_dev
.device_resume
= sprd_dma_resume
;
1126 sdev
->dma_dev
.device_terminate_all
= sprd_dma_terminate_all
;
1128 for (i
= 0; i
< chn_count
; i
++) {
1129 dma_chn
= &sdev
->channels
[i
];
1130 dma_chn
->chn_num
= i
;
1131 dma_chn
->cur_desc
= NULL
;
1132 /* get each channel's registers base address. */
1133 dma_chn
->chn_base
= sdev
->glb_base
+ SPRD_DMA_CHN_REG_OFFSET
+
1134 SPRD_DMA_CHN_REG_LENGTH
* i
;
1136 dma_chn
->vc
.desc_free
= sprd_dma_free_desc
;
1137 vchan_init(&dma_chn
->vc
, &sdev
->dma_dev
);
1140 platform_set_drvdata(pdev
, sdev
);
1141 ret
= sprd_dma_enable(sdev
);
1145 pm_runtime_set_active(&pdev
->dev
);
1146 pm_runtime_enable(&pdev
->dev
);
1148 ret
= pm_runtime_get_sync(&pdev
->dev
);
1152 ret
= dma_async_device_register(&sdev
->dma_dev
);
1154 dev_err(&pdev
->dev
, "register dma device failed:%d\n", ret
);
1158 sprd_dma_info
.dma_cap
= sdev
->dma_dev
.cap_mask
;
1159 ret
= of_dma_controller_register(np
, of_dma_simple_xlate
,
1162 goto err_of_register
;
1164 pm_runtime_put(&pdev
->dev
);
1168 dma_async_device_unregister(&sdev
->dma_dev
);
1170 pm_runtime_put_noidle(&pdev
->dev
);
1171 pm_runtime_disable(&pdev
->dev
);
1173 sprd_dma_disable(sdev
);
1177 static int sprd_dma_remove(struct platform_device
*pdev
)
1179 struct sprd_dma_dev
*sdev
= platform_get_drvdata(pdev
);
1180 struct sprd_dma_chn
*c
, *cn
;
1183 ret
= pm_runtime_get_sync(&pdev
->dev
);
1187 /* explicitly free the irq */
1189 devm_free_irq(&pdev
->dev
, sdev
->irq
, sdev
);
1191 list_for_each_entry_safe(c
, cn
, &sdev
->dma_dev
.channels
,
1192 vc
.chan
.device_node
) {
1193 list_del(&c
->vc
.chan
.device_node
);
1194 tasklet_kill(&c
->vc
.task
);
1197 of_dma_controller_free(pdev
->dev
.of_node
);
1198 dma_async_device_unregister(&sdev
->dma_dev
);
1199 sprd_dma_disable(sdev
);
1201 pm_runtime_put_noidle(&pdev
->dev
);
1202 pm_runtime_disable(&pdev
->dev
);
1206 static const struct of_device_id sprd_dma_match
[] = {
1207 { .compatible
= "sprd,sc9860-dma", },
1211 static int __maybe_unused
sprd_dma_runtime_suspend(struct device
*dev
)
1213 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
1215 sprd_dma_disable(sdev
);
1219 static int __maybe_unused
sprd_dma_runtime_resume(struct device
*dev
)
1221 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
1224 ret
= sprd_dma_enable(sdev
);
1226 dev_err(sdev
->dma_dev
.dev
, "enable dma failed\n");
1231 static const struct dev_pm_ops sprd_dma_pm_ops
= {
1232 SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend
,
1233 sprd_dma_runtime_resume
,
1237 static struct platform_driver sprd_dma_driver
= {
1238 .probe
= sprd_dma_probe
,
1239 .remove
= sprd_dma_remove
,
1242 .of_match_table
= sprd_dma_match
,
1243 .pm
= &sprd_dma_pm_ops
,
1246 module_platform_driver(sprd_dma_driver
);
1248 MODULE_LICENSE("GPL v2");
1249 MODULE_DESCRIPTION("DMA driver for Spreadtrum");
1250 MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
1251 MODULE_AUTHOR("Eric Long <eric.long@spreadtrum.com>");
1252 MODULE_ALIAS("platform:sprd-dma");