2 * Renesas R-Car Audio DMAC support
4 * Copyright (C) 2015 Renesas Electronics Corp.
5 * Copyright (c) 2015 Kuninori Morimoto <kuninori.morimoto.gx@renesas.com>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 #include <linux/delay.h>
12 #include <linux/of_dma.h>
16 * Audio DMAC peri peri register
23 #define PDMACHCR_DE (1 << 0)
27 struct dma_chan
*chan
;
31 unsigned int dma_period
;
42 struct rsnd_mod
*mod_from
;
43 struct rsnd_mod
*mod_to
;
52 struct rsnd_dma_ctrl
{
58 #define rsnd_priv_to_dmac(p) ((struct rsnd_dma_ctrl *)(p)->dma)
59 #define rsnd_mod_to_dma(_mod) container_of((_mod), struct rsnd_dma, mod)
60 #define rsnd_dma_to_dmaen(dma) (&(dma)->dma.en)
61 #define rsnd_dma_to_dmapp(dma) (&(dma)->dma.pp)
64 static struct rsnd_mod_ops mem_ops
= {
68 static struct rsnd_mod mem
= {
74 #define rsnd_dmaen_sync(dmaen, io, i) __rsnd_dmaen_sync(dmaen, io, i, 1)
75 #define rsnd_dmaen_unsync(dmaen, io, i) __rsnd_dmaen_sync(dmaen, io, i, 0)
76 static void __rsnd_dmaen_sync(struct rsnd_dmaen
*dmaen
, struct rsnd_dai_stream
*io
,
79 struct device
*dev
= dmaen
->chan
->device
->dev
;
80 enum dma_data_direction dir
;
81 int is_play
= rsnd_io_is_play(io
);
87 period
= dmaen
->dma_period
;
90 buf
= dmaen
->dma_buf
+ (period
* i
);
92 dir
= is_play
? DMA_TO_DEVICE
: DMA_FROM_DEVICE
;
95 dma_sync_single_for_device(dev
, buf
, period
, dir
);
97 dma_sync_single_for_cpu(dev
, buf
, period
, dir
);
100 static void __rsnd_dmaen_complete(struct rsnd_mod
*mod
,
101 struct rsnd_dai_stream
*io
)
103 struct rsnd_priv
*priv
= rsnd_mod_to_priv(mod
);
104 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
105 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
106 bool elapsed
= false;
110 * Renesas sound Gen1 needs 1 DMAC,
112 * In Gen2 case, it are Audio-DMAC, and Audio-DMAC-peri-peri.
113 * But, Audio-DMAC-peri-peri doesn't have interrupt,
114 * and this driver is assuming that here.
116 spin_lock_irqsave(&priv
->lock
, flags
);
118 if (rsnd_io_is_working(io
)) {
119 rsnd_dmaen_unsync(dmaen
, io
, dmaen
->dma_cnt
);
122 * Next period is already started.
123 * Let's sync Next Next period
127 rsnd_dmaen_sync(dmaen
, io
, dmaen
->dma_cnt
+ 2);
134 spin_unlock_irqrestore(&priv
->lock
, flags
);
137 rsnd_dai_period_elapsed(io
);
140 static void rsnd_dmaen_complete(void *data
)
142 struct rsnd_mod
*mod
= data
;
144 rsnd_mod_interrupt(mod
, __rsnd_dmaen_complete
);
147 static struct dma_chan
*rsnd_dmaen_request_channel(struct rsnd_dai_stream
*io
,
148 struct rsnd_mod
*mod_from
,
149 struct rsnd_mod
*mod_to
)
151 if ((!mod_from
&& !mod_to
) ||
152 (mod_from
&& mod_to
))
156 return rsnd_mod_dma_req(io
, mod_from
);
158 return rsnd_mod_dma_req(io
, mod_to
);
161 static int rsnd_dmaen_stop(struct rsnd_mod
*mod
,
162 struct rsnd_dai_stream
*io
,
163 struct rsnd_priv
*priv
)
165 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
166 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
169 int is_play
= rsnd_io_is_play(io
);
171 dmaengine_terminate_all(dmaen
->chan
);
172 dma_unmap_single(dmaen
->chan
->device
->dev
,
173 dmaen
->dma_buf
, dmaen
->dma_len
,
174 is_play
? DMA_TO_DEVICE
: DMA_FROM_DEVICE
);
180 static int rsnd_dmaen_nolock_stop(struct rsnd_mod
*mod
,
181 struct rsnd_dai_stream
*io
,
182 struct rsnd_priv
*priv
)
184 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
185 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
188 * DMAEngine release uses mutex lock.
189 * Thus, it shouldn't be called under spinlock.
190 * Let's call it under nolock_start
193 dma_release_channel(dmaen
->chan
);
200 static int rsnd_dmaen_nolock_start(struct rsnd_mod
*mod
,
201 struct rsnd_dai_stream
*io
,
202 struct rsnd_priv
*priv
)
204 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
205 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
206 struct device
*dev
= rsnd_priv_to_dev(priv
);
209 dev_err(dev
, "it already has dma channel\n");
214 * DMAEngine request uses mutex lock.
215 * Thus, it shouldn't be called under spinlock.
216 * Let's call it under nolock_start
218 dmaen
->chan
= rsnd_dmaen_request_channel(io
,
221 if (IS_ERR_OR_NULL(dmaen
->chan
)) {
223 dev_err(dev
, "can't get dma channel\n");
230 static int rsnd_dmaen_start(struct rsnd_mod
*mod
,
231 struct rsnd_dai_stream
*io
,
232 struct rsnd_priv
*priv
)
234 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
235 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
236 struct snd_pcm_substream
*substream
= io
->substream
;
237 struct device
*dev
= rsnd_priv_to_dev(priv
);
238 struct dma_async_tx_descriptor
*desc
;
239 struct dma_slave_config cfg
= {};
243 int is_play
= rsnd_io_is_play(io
);
247 cfg
.direction
= is_play
? DMA_MEM_TO_DEV
: DMA_DEV_TO_MEM
;
248 cfg
.src_addr
= dma
->src_addr
;
249 cfg
.dst_addr
= dma
->dst_addr
;
250 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
251 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
253 dev_dbg(dev
, "%s[%d] %pad -> %pad\n",
254 rsnd_mod_name(mod
), rsnd_mod_id(mod
),
255 &cfg
.src_addr
, &cfg
.dst_addr
);
257 ret
= dmaengine_slave_config(dmaen
->chan
, &cfg
);
261 len
= snd_pcm_lib_buffer_bytes(substream
);
262 period
= snd_pcm_lib_period_bytes(substream
);
263 buf
= dma_map_single(dmaen
->chan
->device
->dev
,
264 substream
->runtime
->dma_area
,
266 is_play
? DMA_TO_DEVICE
: DMA_FROM_DEVICE
);
267 if (dma_mapping_error(dmaen
->chan
->device
->dev
, buf
)) {
268 dev_err(dev
, "dma map failed\n");
272 desc
= dmaengine_prep_dma_cyclic(dmaen
->chan
,
274 is_play
? DMA_MEM_TO_DEV
: DMA_DEV_TO_MEM
,
275 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
278 dev_err(dev
, "dmaengine_prep_slave_sg() fail\n");
282 desc
->callback
= rsnd_dmaen_complete
;
283 desc
->callback_param
= rsnd_mod_get(dma
);
285 dmaen
->dma_buf
= buf
;
286 dmaen
->dma_len
= len
;
287 dmaen
->dma_period
= period
;
291 * synchronize this and next period
293 * __rsnd_dmaen_complete()
295 for (i
= 0; i
< 2; i
++)
296 rsnd_dmaen_sync(dmaen
, io
, i
);
298 dmaen
->cookie
= dmaengine_submit(desc
);
299 if (dmaen
->cookie
< 0) {
300 dev_err(dev
, "dmaengine_submit() fail\n");
304 dma_async_issue_pending(dmaen
->chan
);
309 struct dma_chan
*rsnd_dma_request_channel(struct device_node
*of_node
,
310 struct rsnd_mod
*mod
, char *name
)
312 struct dma_chan
*chan
= NULL
;
313 struct device_node
*np
;
316 for_each_child_of_node(of_node
, np
) {
317 if (i
== rsnd_mod_id(mod
) && (!chan
))
318 chan
= of_dma_request_slave_channel(np
, name
);
322 /* It should call of_node_put(), since, it is rsnd_xxx_of_node() */
323 of_node_put(of_node
);
328 static int rsnd_dmaen_attach(struct rsnd_dai_stream
*io
,
329 struct rsnd_dma
*dma
,
330 struct rsnd_mod
*mod_from
, struct rsnd_mod
*mod_to
)
332 struct rsnd_priv
*priv
= rsnd_io_to_priv(io
);
333 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
334 struct dma_chan
*chan
;
336 /* try to get DMAEngine channel */
337 chan
= rsnd_dmaen_request_channel(io
, mod_from
, mod_to
);
338 if (IS_ERR_OR_NULL(chan
)) {
340 * DMA failed. try to PIO mode
342 * rsnd_ssi_fallback()
343 * rsnd_rdai_continuance_probe()
348 dma_release_channel(chan
);
355 static int rsnd_dmaen_pointer(struct rsnd_mod
*mod
,
356 struct rsnd_dai_stream
*io
,
357 snd_pcm_uframes_t
*pointer
)
359 struct snd_pcm_runtime
*runtime
= rsnd_io_to_runtime(io
);
360 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
361 struct rsnd_dmaen
*dmaen
= rsnd_dma_to_dmaen(dma
);
362 struct dma_tx_state state
;
363 enum dma_status status
;
364 unsigned int pos
= 0;
366 status
= dmaengine_tx_status(dmaen
->chan
, dmaen
->cookie
, &state
);
367 if (status
== DMA_IN_PROGRESS
|| status
== DMA_PAUSED
) {
368 if (state
.residue
> 0 && state
.residue
<= dmaen
->dma_len
)
369 pos
= dmaen
->dma_len
- state
.residue
;
371 *pointer
= bytes_to_frames(runtime
, pos
);
376 static struct rsnd_mod_ops rsnd_dmaen_ops
= {
378 .nolock_start
= rsnd_dmaen_nolock_start
,
379 .nolock_stop
= rsnd_dmaen_nolock_stop
,
380 .start
= rsnd_dmaen_start
,
381 .stop
= rsnd_dmaen_stop
,
382 .pointer
= rsnd_dmaen_pointer
,
386 * Audio DMAC peri peri
388 static const u8 gen2_id_table_ssiu
[] = {
400 static const u8 gen2_id_table_scu
[] = {
401 0x2d, /* SCU_SRCI0 */
402 0x2e, /* SCU_SRCI1 */
403 0x2f, /* SCU_SRCI2 */
404 0x30, /* SCU_SRCI3 */
405 0x31, /* SCU_SRCI4 */
406 0x32, /* SCU_SRCI5 */
407 0x33, /* SCU_SRCI6 */
408 0x34, /* SCU_SRCI7 */
409 0x35, /* SCU_SRCI8 */
410 0x36, /* SCU_SRCI9 */
412 static const u8 gen2_id_table_cmd
[] = {
417 static u32
rsnd_dmapp_get_id(struct rsnd_dai_stream
*io
,
418 struct rsnd_mod
*mod
)
420 struct rsnd_mod
*ssi
= rsnd_io_to_mod_ssi(io
);
421 struct rsnd_mod
*src
= rsnd_io_to_mod_src(io
);
422 struct rsnd_mod
*dvc
= rsnd_io_to_mod_dvc(io
);
423 const u8
*entry
= NULL
;
424 int id
= rsnd_mod_id(mod
);
428 entry
= gen2_id_table_ssiu
;
429 size
= ARRAY_SIZE(gen2_id_table_ssiu
);
430 } else if (mod
== src
) {
431 entry
= gen2_id_table_scu
;
432 size
= ARRAY_SIZE(gen2_id_table_scu
);
433 } else if (mod
== dvc
) {
434 entry
= gen2_id_table_cmd
;
435 size
= ARRAY_SIZE(gen2_id_table_cmd
);
438 if ((!entry
) || (size
<= id
)) {
439 struct device
*dev
= rsnd_priv_to_dev(rsnd_io_to_priv(io
));
441 dev_err(dev
, "unknown connection (%s[%d])\n",
442 rsnd_mod_name(mod
), rsnd_mod_id(mod
));
444 /* use non-prohibited SRS number as error */
445 return 0x00; /* SSI00 */
451 static u32
rsnd_dmapp_get_chcr(struct rsnd_dai_stream
*io
,
452 struct rsnd_mod
*mod_from
,
453 struct rsnd_mod
*mod_to
)
455 return (rsnd_dmapp_get_id(io
, mod_from
) << 24) +
456 (rsnd_dmapp_get_id(io
, mod_to
) << 16);
459 #define rsnd_dmapp_addr(dmac, dma, reg) \
460 (dmac->base + 0x20 + reg + \
461 (0x10 * rsnd_dma_to_dmapp(dma)->dmapp_id))
462 static void rsnd_dmapp_write(struct rsnd_dma
*dma
, u32 data
, u32 reg
)
464 struct rsnd_mod
*mod
= rsnd_mod_get(dma
);
465 struct rsnd_priv
*priv
= rsnd_mod_to_priv(mod
);
466 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
467 struct device
*dev
= rsnd_priv_to_dev(priv
);
469 dev_dbg(dev
, "w %p : %08x\n", rsnd_dmapp_addr(dmac
, dma
, reg
), data
);
471 iowrite32(data
, rsnd_dmapp_addr(dmac
, dma
, reg
));
474 static u32
rsnd_dmapp_read(struct rsnd_dma
*dma
, u32 reg
)
476 struct rsnd_mod
*mod
= rsnd_mod_get(dma
);
477 struct rsnd_priv
*priv
= rsnd_mod_to_priv(mod
);
478 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
480 return ioread32(rsnd_dmapp_addr(dmac
, dma
, reg
));
483 static void rsnd_dmapp_bset(struct rsnd_dma
*dma
, u32 data
, u32 mask
, u32 reg
)
485 struct rsnd_mod
*mod
= rsnd_mod_get(dma
);
486 struct rsnd_priv
*priv
= rsnd_mod_to_priv(mod
);
487 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
488 void __iomem
*addr
= rsnd_dmapp_addr(dmac
, dma
, reg
);
489 u32 val
= ioread32(addr
);
492 val
|= (data
& mask
);
494 iowrite32(val
, addr
);
497 static int rsnd_dmapp_stop(struct rsnd_mod
*mod
,
498 struct rsnd_dai_stream
*io
,
499 struct rsnd_priv
*priv
)
501 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
504 rsnd_dmapp_bset(dma
, 0, PDMACHCR_DE
, PDMACHCR
);
506 for (i
= 0; i
< 1024; i
++) {
507 if (0 == (rsnd_dmapp_read(dma
, PDMACHCR
) & PDMACHCR_DE
))
515 static int rsnd_dmapp_start(struct rsnd_mod
*mod
,
516 struct rsnd_dai_stream
*io
,
517 struct rsnd_priv
*priv
)
519 struct rsnd_dma
*dma
= rsnd_mod_to_dma(mod
);
520 struct rsnd_dmapp
*dmapp
= rsnd_dma_to_dmapp(dma
);
522 rsnd_dmapp_write(dma
, dma
->src_addr
, PDMASAR
);
523 rsnd_dmapp_write(dma
, dma
->dst_addr
, PDMADAR
);
524 rsnd_dmapp_write(dma
, dmapp
->chcr
, PDMACHCR
);
529 static int rsnd_dmapp_attach(struct rsnd_dai_stream
*io
,
530 struct rsnd_dma
*dma
,
531 struct rsnd_mod
*mod_from
, struct rsnd_mod
*mod_to
)
533 struct rsnd_dmapp
*dmapp
= rsnd_dma_to_dmapp(dma
);
534 struct rsnd_priv
*priv
= rsnd_io_to_priv(io
);
535 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
536 struct device
*dev
= rsnd_priv_to_dev(priv
);
538 dmapp
->dmapp_id
= dmac
->dmapp_num
;
539 dmapp
->chcr
= rsnd_dmapp_get_chcr(io
, mod_from
, mod_to
) | PDMACHCR_DE
;
543 dev_dbg(dev
, "id/src/dst/chcr = %d/%pad/%pad/%08x\n",
544 dmapp
->dmapp_id
, &dma
->src_addr
, &dma
->dst_addr
, dmapp
->chcr
);
549 static struct rsnd_mod_ops rsnd_dmapp_ops
= {
551 .start
= rsnd_dmapp_start
,
552 .stop
= rsnd_dmapp_stop
,
553 .quit
= rsnd_dmapp_stop
,
557 * Common DMAC Interface
561 * DMA read/write register offset
563 * RSND_xxx_I_N for Audio DMAC input
564 * RSND_xxx_O_N for Audio DMAC output
565 * RSND_xxx_I_P for Audio DMAC peri peri input
566 * RSND_xxx_O_P for Audio DMAC peri peri output
569 * mod / DMAC in / DMAC out / DMAC PP in / DMAC pp out
570 * SSI : 0xec541000 / 0xec241008 / 0xec24100c
571 * SSIU: 0xec541000 / 0xec100000 / 0xec100000 / 0xec400000 / 0xec400000
572 * SCU : 0xec500000 / 0xec000000 / 0xec004000 / 0xec300000 / 0xec304000
573 * CMD : 0xec500000 / / 0xec008000 0xec308000
575 #define RDMA_SSI_I_N(addr, i) (addr ##_reg - 0x00300000 + (0x40 * i) + 0x8)
576 #define RDMA_SSI_O_N(addr, i) (addr ##_reg - 0x00300000 + (0x40 * i) + 0xc)
578 #define RDMA_SSIU_I_N(addr, i) (addr ##_reg - 0x00441000 + (0x1000 * i))
579 #define RDMA_SSIU_O_N(addr, i) (addr ##_reg - 0x00441000 + (0x1000 * i))
581 #define RDMA_SSIU_I_P(addr, i) (addr ##_reg - 0x00141000 + (0x1000 * i))
582 #define RDMA_SSIU_O_P(addr, i) (addr ##_reg - 0x00141000 + (0x1000 * i))
584 #define RDMA_SRC_I_N(addr, i) (addr ##_reg - 0x00500000 + (0x400 * i))
585 #define RDMA_SRC_O_N(addr, i) (addr ##_reg - 0x004fc000 + (0x400 * i))
587 #define RDMA_SRC_I_P(addr, i) (addr ##_reg - 0x00200000 + (0x400 * i))
588 #define RDMA_SRC_O_P(addr, i) (addr ##_reg - 0x001fc000 + (0x400 * i))
590 #define RDMA_CMD_O_N(addr, i) (addr ##_reg - 0x004f8000 + (0x400 * i))
591 #define RDMA_CMD_O_P(addr, i) (addr ##_reg - 0x001f8000 + (0x400 * i))
594 rsnd_gen2_dma_addr(struct rsnd_dai_stream
*io
,
595 struct rsnd_mod
*mod
,
596 int is_play
, int is_from
)
598 struct rsnd_priv
*priv
= rsnd_io_to_priv(io
);
599 struct device
*dev
= rsnd_priv_to_dev(priv
);
600 phys_addr_t ssi_reg
= rsnd_gen_get_phy_addr(priv
, RSND_GEN2_SSI
);
601 phys_addr_t src_reg
= rsnd_gen_get_phy_addr(priv
, RSND_GEN2_SCU
);
602 int is_ssi
= !!(rsnd_io_to_mod_ssi(io
) == mod
);
603 int use_src
= !!rsnd_io_to_mod_src(io
);
604 int use_cmd
= !!rsnd_io_to_mod_dvc(io
) ||
605 !!rsnd_io_to_mod_mix(io
) ||
606 !!rsnd_io_to_mod_ctu(io
);
607 int id
= rsnd_mod_id(mod
);
611 } dma_addrs
[3][2][3] = {
615 { RDMA_SRC_O_N(src
, id
), RDMA_SRC_I_P(src
, id
) },
616 { RDMA_CMD_O_N(src
, id
), RDMA_SRC_I_P(src
, id
) } },
619 { RDMA_SRC_O_P(src
, id
), RDMA_SRC_I_N(src
, id
) },
620 { RDMA_CMD_O_P(src
, id
), RDMA_SRC_I_N(src
, id
) } }
624 {{{ RDMA_SSI_O_N(ssi
, id
), 0 },
625 { RDMA_SSIU_O_P(ssi
, id
), 0 },
626 { RDMA_SSIU_O_P(ssi
, id
), 0 } },
628 {{ 0, RDMA_SSI_I_N(ssi
, id
) },
629 { 0, RDMA_SSIU_I_P(ssi
, id
) },
630 { 0, RDMA_SSIU_I_P(ssi
, id
) } }
634 {{{ RDMA_SSIU_O_N(ssi
, id
), 0 },
635 { RDMA_SSIU_O_P(ssi
, id
), 0 },
636 { RDMA_SSIU_O_P(ssi
, id
), 0 } },
638 {{ 0, RDMA_SSIU_I_N(ssi
, id
) },
639 { 0, RDMA_SSIU_I_P(ssi
, id
) },
640 { 0, RDMA_SSIU_I_P(ssi
, id
) } } },
643 /* it shouldn't happen */
644 if (use_cmd
&& !use_src
)
645 dev_err(dev
, "DVC is selected without SRC\n");
647 /* use SSIU or SSI ? */
648 if (is_ssi
&& rsnd_ssi_use_busif(io
))
652 dma_addrs
[is_ssi
][is_play
][use_src
+ use_cmd
].out_addr
:
653 dma_addrs
[is_ssi
][is_play
][use_src
+ use_cmd
].in_addr
;
656 static dma_addr_t
rsnd_dma_addr(struct rsnd_dai_stream
*io
,
657 struct rsnd_mod
*mod
,
658 int is_play
, int is_from
)
660 struct rsnd_priv
*priv
= rsnd_io_to_priv(io
);
663 * gen1 uses default DMA addr
665 if (rsnd_is_gen1(priv
))
671 return rsnd_gen2_dma_addr(io
, mod
, is_play
, is_from
);
674 #define MOD_MAX (RSND_MOD_MAX + 1) /* +Memory */
675 static void rsnd_dma_of_path(struct rsnd_mod
*this,
676 struct rsnd_dai_stream
*io
,
678 struct rsnd_mod
**mod_from
,
679 struct rsnd_mod
**mod_to
)
681 struct rsnd_mod
*ssi
= rsnd_io_to_mod_ssi(io
);
682 struct rsnd_mod
*src
= rsnd_io_to_mod_src(io
);
683 struct rsnd_mod
*ctu
= rsnd_io_to_mod_ctu(io
);
684 struct rsnd_mod
*mix
= rsnd_io_to_mod_mix(io
);
685 struct rsnd_mod
*dvc
= rsnd_io_to_mod_dvc(io
);
686 struct rsnd_mod
*mod
[MOD_MAX
];
687 struct rsnd_mod
*mod_start
, *mod_end
;
688 struct rsnd_priv
*priv
= rsnd_mod_to_priv(this);
689 struct device
*dev
= rsnd_priv_to_dev(priv
);
696 for (i
= 0; i
< MOD_MAX
; i
++) {
698 nr
+= !!rsnd_io_to_mod(io
, i
);
703 * [S] -*-> SRC -o-> [E]
704 * [S] -*-> SRC -> DVC -o-> [E]
705 * [S] -*-> SRC -> CTU -> MIX -> DVC -o-> [E]
714 * -o-> Audio DMAC peri peri
716 mod_start
= (is_play
) ? NULL
: ssi
;
717 mod_end
= (is_play
) ? ssi
: NULL
;
720 mod
[idx
++] = mod_start
;
721 for (i
= 1; i
< nr
; i
++) {
740 * -------------+-----+-----+
744 if ((this == ssi
) == (is_play
)) {
745 *mod_from
= mod
[idx
- 1];
752 dev_dbg(dev
, "module connection (this is %s[%d])\n",
753 rsnd_mod_name(this), rsnd_mod_id(this));
754 for (i
= 0; i
<= idx
; i
++) {
755 dev_dbg(dev
, " %s[%d]%s\n",
756 rsnd_mod_name(mod
[i
] ? mod
[i
] : &mem
),
757 rsnd_mod_id (mod
[i
] ? mod
[i
] : &mem
),
758 (mod
[i
] == *mod_from
) ? " from" :
759 (mod
[i
] == *mod_to
) ? " to" : "");
763 static int rsnd_dma_alloc(struct rsnd_dai_stream
*io
, struct rsnd_mod
*mod
,
764 struct rsnd_mod
**dma_mod
)
766 struct rsnd_mod
*mod_from
= NULL
;
767 struct rsnd_mod
*mod_to
= NULL
;
768 struct rsnd_priv
*priv
= rsnd_io_to_priv(io
);
769 struct rsnd_dma_ctrl
*dmac
= rsnd_priv_to_dmac(priv
);
770 struct device
*dev
= rsnd_priv_to_dev(priv
);
771 struct rsnd_dma
*dma
;
772 struct rsnd_mod_ops
*ops
;
773 enum rsnd_mod_type type
;
774 int (*attach
)(struct rsnd_dai_stream
*io
, struct rsnd_dma
*dma
,
775 struct rsnd_mod
*mod_from
, struct rsnd_mod
*mod_to
);
776 int is_play
= rsnd_io_is_play(io
);
780 * DMA failed. try to PIO mode
782 * rsnd_ssi_fallback()
783 * rsnd_rdai_continuance_probe()
788 rsnd_dma_of_path(mod
, io
, is_play
, &mod_from
, &mod_to
);
791 if (mod_from
&& mod_to
) {
792 ops
= &rsnd_dmapp_ops
;
793 attach
= rsnd_dmapp_attach
;
794 dma_id
= dmac
->dmapp_num
;
795 type
= RSND_MOD_AUDMAPP
;
797 ops
= &rsnd_dmaen_ops
;
798 attach
= rsnd_dmaen_attach
;
799 dma_id
= dmac
->dmaen_num
;
800 type
= RSND_MOD_AUDMA
;
803 /* for Gen1, overwrite */
804 if (rsnd_is_gen1(priv
)) {
805 ops
= &rsnd_dmaen_ops
;
806 attach
= rsnd_dmaen_attach
;
807 dma_id
= dmac
->dmaen_num
;
808 type
= RSND_MOD_AUDMA
;
811 dma
= devm_kzalloc(dev
, sizeof(*dma
), GFP_KERNEL
);
815 *dma_mod
= rsnd_mod_get(dma
);
817 ret
= rsnd_mod_init(priv
, *dma_mod
, ops
, NULL
,
818 rsnd_mod_get_status
, type
, dma_id
);
822 dev_dbg(dev
, "%s[%d] %s[%d] -> %s[%d]\n",
823 rsnd_mod_name(*dma_mod
), rsnd_mod_id(*dma_mod
),
824 rsnd_mod_name(mod_from
? mod_from
: &mem
),
825 rsnd_mod_id (mod_from
? mod_from
: &mem
),
826 rsnd_mod_name(mod_to
? mod_to
: &mem
),
827 rsnd_mod_id (mod_to
? mod_to
: &mem
));
829 ret
= attach(io
, dma
, mod_from
, mod_to
);
833 dma
->src_addr
= rsnd_dma_addr(io
, mod_from
, is_play
, 1);
834 dma
->dst_addr
= rsnd_dma_addr(io
, mod_to
, is_play
, 0);
835 dma
->mod_from
= mod_from
;
836 dma
->mod_to
= mod_to
;
841 int rsnd_dma_attach(struct rsnd_dai_stream
*io
, struct rsnd_mod
*mod
,
842 struct rsnd_mod
**dma_mod
)
845 int ret
= rsnd_dma_alloc(io
, mod
, dma_mod
);
851 return rsnd_dai_connect(*dma_mod
, io
, (*dma_mod
)->type
);
854 int rsnd_dma_probe(struct rsnd_priv
*priv
)
856 struct platform_device
*pdev
= rsnd_priv_to_pdev(priv
);
857 struct device
*dev
= rsnd_priv_to_dev(priv
);
858 struct rsnd_dma_ctrl
*dmac
;
859 struct resource
*res
;
864 if (rsnd_is_gen1(priv
))
870 res
= platform_get_resource_byname(pdev
, IORESOURCE_MEM
, "audmapp");
871 dmac
= devm_kzalloc(dev
, sizeof(*dmac
), GFP_KERNEL
);
873 dev_err(dev
, "dma allocate failed\n");
874 return 0; /* it will be PIO mode */
878 dmac
->base
= devm_ioremap_resource(dev
, res
);
879 if (IS_ERR(dmac
->base
))
880 return PTR_ERR(dmac
->base
);
884 /* dummy mem mod for debug */
885 return rsnd_mod_init(NULL
, &mem
, &mem_ops
, NULL
, NULL
, 0, 0);