2 * AMD ALSA SoC PCM Driver for ACP 2.x
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 #include <linux/module.h>
17 #include <linux/delay.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
22 #include <sound/soc.h>
23 #include <drm/amd_asic_type.h>
26 #define PLAYBACK_MIN_NUM_PERIODS 2
27 #define PLAYBACK_MAX_NUM_PERIODS 2
28 #define PLAYBACK_MAX_PERIOD_SIZE 16384
29 #define PLAYBACK_MIN_PERIOD_SIZE 1024
30 #define CAPTURE_MIN_NUM_PERIODS 2
31 #define CAPTURE_MAX_NUM_PERIODS 2
32 #define CAPTURE_MAX_PERIOD_SIZE 16384
33 #define CAPTURE_MIN_PERIOD_SIZE 1024
35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
36 #define MIN_BUFFER MAX_BUFFER
38 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192
39 #define ST_CAPTURE_MAX_PERIOD_SIZE ST_PLAYBACK_MAX_PERIOD_SIZE
40 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
41 #define ST_MIN_BUFFER ST_MAX_BUFFER
43 static const struct snd_pcm_hardware acp_pcm_hardware_playback
= {
44 .info
= SNDRV_PCM_INFO_INTERLEAVED
|
45 SNDRV_PCM_INFO_BLOCK_TRANSFER
| SNDRV_PCM_INFO_MMAP
|
46 SNDRV_PCM_INFO_MMAP_VALID
| SNDRV_PCM_INFO_BATCH
|
47 SNDRV_PCM_INFO_PAUSE
| SNDRV_PCM_INFO_RESUME
,
48 .formats
= SNDRV_PCM_FMTBIT_S16_LE
|
49 SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE
,
52 .rates
= SNDRV_PCM_RATE_8000_96000
,
55 .buffer_bytes_max
= PLAYBACK_MAX_NUM_PERIODS
* PLAYBACK_MAX_PERIOD_SIZE
,
56 .period_bytes_min
= PLAYBACK_MIN_PERIOD_SIZE
,
57 .period_bytes_max
= PLAYBACK_MAX_PERIOD_SIZE
,
58 .periods_min
= PLAYBACK_MIN_NUM_PERIODS
,
59 .periods_max
= PLAYBACK_MAX_NUM_PERIODS
,
62 static const struct snd_pcm_hardware acp_pcm_hardware_capture
= {
63 .info
= SNDRV_PCM_INFO_INTERLEAVED
|
64 SNDRV_PCM_INFO_BLOCK_TRANSFER
| SNDRV_PCM_INFO_MMAP
|
65 SNDRV_PCM_INFO_MMAP_VALID
| SNDRV_PCM_INFO_BATCH
|
66 SNDRV_PCM_INFO_PAUSE
| SNDRV_PCM_INFO_RESUME
,
67 .formats
= SNDRV_PCM_FMTBIT_S16_LE
|
68 SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE
,
71 .rates
= SNDRV_PCM_RATE_8000_48000
,
74 .buffer_bytes_max
= CAPTURE_MAX_NUM_PERIODS
* CAPTURE_MAX_PERIOD_SIZE
,
75 .period_bytes_min
= CAPTURE_MIN_PERIOD_SIZE
,
76 .period_bytes_max
= CAPTURE_MAX_PERIOD_SIZE
,
77 .periods_min
= CAPTURE_MIN_NUM_PERIODS
,
78 .periods_max
= CAPTURE_MAX_NUM_PERIODS
,
81 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback
= {
82 .info
= SNDRV_PCM_INFO_INTERLEAVED
|
83 SNDRV_PCM_INFO_BLOCK_TRANSFER
| SNDRV_PCM_INFO_MMAP
|
84 SNDRV_PCM_INFO_MMAP_VALID
| SNDRV_PCM_INFO_BATCH
|
85 SNDRV_PCM_INFO_PAUSE
| SNDRV_PCM_INFO_RESUME
,
86 .formats
= SNDRV_PCM_FMTBIT_S16_LE
|
87 SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE
,
90 .rates
= SNDRV_PCM_RATE_8000_96000
,
93 .buffer_bytes_max
= ST_MAX_BUFFER
,
94 .period_bytes_min
= PLAYBACK_MIN_PERIOD_SIZE
,
95 .period_bytes_max
= ST_PLAYBACK_MAX_PERIOD_SIZE
,
96 .periods_min
= PLAYBACK_MIN_NUM_PERIODS
,
97 .periods_max
= PLAYBACK_MAX_NUM_PERIODS
,
100 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture
= {
101 .info
= SNDRV_PCM_INFO_INTERLEAVED
|
102 SNDRV_PCM_INFO_BLOCK_TRANSFER
| SNDRV_PCM_INFO_MMAP
|
103 SNDRV_PCM_INFO_MMAP_VALID
| SNDRV_PCM_INFO_BATCH
|
104 SNDRV_PCM_INFO_PAUSE
| SNDRV_PCM_INFO_RESUME
,
105 .formats
= SNDRV_PCM_FMTBIT_S16_LE
|
106 SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE
,
109 .rates
= SNDRV_PCM_RATE_8000_48000
,
112 .buffer_bytes_max
= ST_MAX_BUFFER
,
113 .period_bytes_min
= CAPTURE_MIN_PERIOD_SIZE
,
114 .period_bytes_max
= ST_CAPTURE_MAX_PERIOD_SIZE
,
115 .periods_min
= CAPTURE_MIN_NUM_PERIODS
,
116 .periods_max
= CAPTURE_MAX_NUM_PERIODS
,
119 static u32
acp_reg_read(void __iomem
*acp_mmio
, u32 reg
)
121 return readl(acp_mmio
+ (reg
* 4));
124 static void acp_reg_write(u32 val
, void __iomem
*acp_mmio
, u32 reg
)
126 writel(val
, acp_mmio
+ (reg
* 4));
129 /* Configure a given dma channel parameters - enable/disable,
130 * number of descriptors, priority
132 static void config_acp_dma_channel(void __iomem
*acp_mmio
, u8 ch_num
,
133 u16 dscr_strt_idx
, u16 num_dscrs
,
134 enum acp_dma_priority_level priority_level
)
138 /* disable the channel run field */
139 dma_ctrl
= acp_reg_read(acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
140 dma_ctrl
&= ~ACP_DMA_CNTL_0__DMAChRun_MASK
;
141 acp_reg_write(dma_ctrl
, acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
143 /* program a DMA channel with first descriptor to be processed. */
144 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
146 acp_mmio
, mmACP_DMA_DSCR_STRT_IDX_0
+ ch_num
);
148 /* program a DMA channel with the number of descriptors to be
149 * processed in the transfer
151 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK
& num_dscrs
,
152 acp_mmio
, mmACP_DMA_DSCR_CNT_0
+ ch_num
);
154 /* set DMA channel priority */
155 acp_reg_write(priority_level
, acp_mmio
, mmACP_DMA_PRIO_0
+ ch_num
);
158 /* Initialize a dma descriptor in SRAM based on descritor information passed */
159 static void config_dma_descriptor_in_sram(void __iomem
*acp_mmio
,
161 acp_dma_dscr_transfer_t
*descr_info
)
165 sram_offset
= (descr_idx
* sizeof(acp_dma_dscr_transfer_t
));
167 /* program the source base address. */
168 acp_reg_write(sram_offset
, acp_mmio
, mmACP_SRBM_Targ_Idx_Addr
);
169 acp_reg_write(descr_info
->src
, acp_mmio
, mmACP_SRBM_Targ_Idx_Data
);
170 /* program the destination base address. */
171 acp_reg_write(sram_offset
+ 4, acp_mmio
, mmACP_SRBM_Targ_Idx_Addr
);
172 acp_reg_write(descr_info
->dest
, acp_mmio
, mmACP_SRBM_Targ_Idx_Data
);
174 /* program the number of bytes to be transferred for this descriptor. */
175 acp_reg_write(sram_offset
+ 8, acp_mmio
, mmACP_SRBM_Targ_Idx_Addr
);
176 acp_reg_write(descr_info
->xfer_val
, acp_mmio
, mmACP_SRBM_Targ_Idx_Data
);
179 /* Initialize the DMA descriptor information for transfer between
180 * system memory <-> ACP SRAM
182 static void set_acp_sysmem_dma_descriptors(void __iomem
*acp_mmio
,
183 u32 size
, int direction
,
184 u32 pte_offset
, u32 asic_type
)
187 u16 dma_dscr_idx
= PLAYBACK_START_DMA_DESCR_CH12
;
188 acp_dma_dscr_transfer_t dmadscr
[NUM_DSCRS_PER_CHANNEL
];
190 for (i
= 0; i
< NUM_DSCRS_PER_CHANNEL
; i
++) {
191 dmadscr
[i
].xfer_val
= 0;
192 if (direction
== SNDRV_PCM_STREAM_PLAYBACK
) {
193 dma_dscr_idx
= PLAYBACK_START_DMA_DESCR_CH12
+ i
;
194 dmadscr
[i
].dest
= ACP_SHARED_RAM_BANK_1_ADDRESS
+
195 (size
/ 2) - (i
* (size
/2));
196 dmadscr
[i
].src
= ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
197 + (pte_offset
* SZ_4K
) + (i
* (size
/2));
200 dmadscr
[i
].xfer_val
|=
201 (ACP_DMA_ATTRIBUTES_DAGB_GARLIC_TO_SHAREDMEM
<< 16) |
205 dmadscr
[i
].xfer_val
|=
206 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM
<< 16) |
210 dma_dscr_idx
= CAPTURE_START_DMA_DESCR_CH14
+ i
;
213 dmadscr
[i
].src
= ACP_SHARED_RAM_BANK_3_ADDRESS
+
216 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
+
217 (pte_offset
* SZ_4K
) + (i
* (size
/2));
218 dmadscr
[i
].xfer_val
|=
220 (ACP_DMA_ATTRIBUTES_SHARED_MEM_TO_DAGB_GARLIC
<< 16) |
224 dmadscr
[i
].src
= ACP_SHARED_RAM_BANK_5_ADDRESS
+
227 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
+
228 (pte_offset
* SZ_4K
) + (i
* (size
/2));
229 dmadscr
[i
].xfer_val
|=
231 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION
<< 16) |
235 config_dma_descriptor_in_sram(acp_mmio
, dma_dscr_idx
,
238 if (direction
== SNDRV_PCM_STREAM_PLAYBACK
)
239 config_acp_dma_channel(acp_mmio
, SYSRAM_TO_ACP_CH_NUM
,
240 PLAYBACK_START_DMA_DESCR_CH12
,
241 NUM_DSCRS_PER_CHANNEL
,
242 ACP_DMA_PRIORITY_LEVEL_NORMAL
);
244 config_acp_dma_channel(acp_mmio
, ACP_TO_SYSRAM_CH_NUM
,
245 CAPTURE_START_DMA_DESCR_CH14
,
246 NUM_DSCRS_PER_CHANNEL
,
247 ACP_DMA_PRIORITY_LEVEL_NORMAL
);
250 /* Initialize the DMA descriptor information for transfer between
253 static void set_acp_to_i2s_dma_descriptors(void __iomem
*acp_mmio
,
254 u32 size
, int direction
,
259 u16 dma_dscr_idx
= PLAYBACK_START_DMA_DESCR_CH13
;
260 acp_dma_dscr_transfer_t dmadscr
[NUM_DSCRS_PER_CHANNEL
];
262 for (i
= 0; i
< NUM_DSCRS_PER_CHANNEL
; i
++) {
263 dmadscr
[i
].xfer_val
= 0;
264 if (direction
== SNDRV_PCM_STREAM_PLAYBACK
) {
265 dma_dscr_idx
= PLAYBACK_START_DMA_DESCR_CH13
+ i
;
266 dmadscr
[i
].src
= ACP_SHARED_RAM_BANK_1_ADDRESS
+
268 /* dmadscr[i].dest is unused by hardware. */
270 dmadscr
[i
].xfer_val
|= BIT(22) | (TO_ACP_I2S_1
<< 16) |
273 dma_dscr_idx
= CAPTURE_START_DMA_DESCR_CH15
+ i
;
274 /* dmadscr[i].src is unused by hardware. */
279 ACP_SHARED_RAM_BANK_3_ADDRESS
+
284 ACP_SHARED_RAM_BANK_5_ADDRESS
+
287 dmadscr
[i
].xfer_val
|= BIT(22) |
288 (FROM_ACP_I2S_1
<< 16) | (size
/ 2);
290 config_dma_descriptor_in_sram(acp_mmio
, dma_dscr_idx
,
293 /* Configure the DMA channel with the above descriptore */
294 if (direction
== SNDRV_PCM_STREAM_PLAYBACK
)
295 config_acp_dma_channel(acp_mmio
, ACP_TO_I2S_DMA_CH_NUM
,
296 PLAYBACK_START_DMA_DESCR_CH13
,
297 NUM_DSCRS_PER_CHANNEL
,
298 ACP_DMA_PRIORITY_LEVEL_NORMAL
);
300 config_acp_dma_channel(acp_mmio
, I2S_TO_ACP_DMA_CH_NUM
,
301 CAPTURE_START_DMA_DESCR_CH15
,
302 NUM_DSCRS_PER_CHANNEL
,
303 ACP_DMA_PRIORITY_LEVEL_NORMAL
);
306 /* Create page table entries in ACP SRAM for the allocated memory */
307 static void acp_pte_config(void __iomem
*acp_mmio
, struct page
*pg
,
308 u16 num_of_pages
, u32 pte_offset
)
316 offset
= ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET
+ (pte_offset
* 8);
317 for (page_idx
= 0; page_idx
< (num_of_pages
); page_idx
++) {
318 /* Load the low address of page int ACP SRAM through SRBM */
319 acp_reg_write((offset
+ (page_idx
* 8)),
320 acp_mmio
, mmACP_SRBM_Targ_Idx_Addr
);
321 addr
= page_to_phys(pg
);
323 low
= lower_32_bits(addr
);
324 high
= upper_32_bits(addr
);
326 acp_reg_write(low
, acp_mmio
, mmACP_SRBM_Targ_Idx_Data
);
328 /* Load the High address of page int ACP SRAM through SRBM */
329 acp_reg_write((offset
+ (page_idx
* 8) + 4),
330 acp_mmio
, mmACP_SRBM_Targ_Idx_Addr
);
332 /* page enable in ACP */
334 acp_reg_write(high
, acp_mmio
, mmACP_SRBM_Targ_Idx_Data
);
336 /* Move to next physically contiguos page */
341 static void config_acp_dma(void __iomem
*acp_mmio
,
342 struct audio_substream_data
*audio_config
,
347 if (audio_config
->direction
== SNDRV_PCM_STREAM_PLAYBACK
)
348 pte_offset
= ACP_PLAYBACK_PTE_OFFSET
;
350 pte_offset
= ACP_CAPTURE_PTE_OFFSET
;
352 acp_pte_config(acp_mmio
, audio_config
->pg
, audio_config
->num_of_pages
,
355 /* Configure System memory <-> ACP SRAM DMA descriptors */
356 set_acp_sysmem_dma_descriptors(acp_mmio
, audio_config
->size
,
357 audio_config
->direction
, pte_offset
, asic_type
);
359 /* Configure ACP SRAM <-> I2S DMA descriptors */
360 set_acp_to_i2s_dma_descriptors(acp_mmio
, audio_config
->size
,
361 audio_config
->direction
, asic_type
);
364 /* Start a given DMA channel transfer */
365 static void acp_dma_start(void __iomem
*acp_mmio
,
366 u16 ch_num
, bool is_circular
)
370 /* read the dma control register and disable the channel run field */
371 dma_ctrl
= acp_reg_read(acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
373 /* Invalidating the DAGB cache */
374 acp_reg_write(1, acp_mmio
, mmACP_DAGB_ATU_CTRL
);
376 /* configure the DMA channel and start the DMA transfer
377 * set dmachrun bit to start the transfer and enable the
378 * interrupt on completion of the dma transfer
380 dma_ctrl
|= ACP_DMA_CNTL_0__DMAChRun_MASK
;
383 case ACP_TO_I2S_DMA_CH_NUM
:
384 case ACP_TO_SYSRAM_CH_NUM
:
385 case I2S_TO_ACP_DMA_CH_NUM
:
386 dma_ctrl
|= ACP_DMA_CNTL_0__DMAChIOCEn_MASK
;
389 dma_ctrl
&= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK
;
393 /* enable for ACP SRAM to/from I2S DMA channel */
394 if (is_circular
== true)
395 dma_ctrl
|= ACP_DMA_CNTL_0__Circular_DMA_En_MASK
;
397 dma_ctrl
&= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK
;
399 acp_reg_write(dma_ctrl
, acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
402 /* Stop a given DMA channel transfer */
403 static int acp_dma_stop(void __iomem
*acp_mmio
, u8 ch_num
)
407 u32 count
= ACP_DMA_RESET_TIME
;
409 dma_ctrl
= acp_reg_read(acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
411 /* clear the dma control register fields before writing zero
414 dma_ctrl
&= ~ACP_DMA_CNTL_0__DMAChRun_MASK
;
415 dma_ctrl
&= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK
;
417 acp_reg_write(dma_ctrl
, acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
418 dma_ch_sts
= acp_reg_read(acp_mmio
, mmACP_DMA_CH_STS
);
420 if (dma_ch_sts
& BIT(ch_num
)) {
421 /* set the reset bit for this channel to stop the dma
424 dma_ctrl
|= ACP_DMA_CNTL_0__DMAChRst_MASK
;
425 acp_reg_write(dma_ctrl
, acp_mmio
, mmACP_DMA_CNTL_0
+ ch_num
);
428 /* check the channel status bit for some time and return the status */
430 dma_ch_sts
= acp_reg_read(acp_mmio
, mmACP_DMA_CH_STS
);
431 if (!(dma_ch_sts
& BIT(ch_num
))) {
432 /* clear the reset flag after successfully stopping
433 * the dma transfer and break from the loop
435 dma_ctrl
&= ~ACP_DMA_CNTL_0__DMAChRst_MASK
;
437 acp_reg_write(dma_ctrl
, acp_mmio
, mmACP_DMA_CNTL_0
442 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num
);
450 static void acp_set_sram_bank_state(void __iomem
*acp_mmio
, u16 bank
,
453 u32 val
, req_reg
, sts_reg
, sts_reg_mask
;
457 req_reg
= mmACP_MEM_SHUT_DOWN_REQ_LO
;
458 sts_reg
= mmACP_MEM_SHUT_DOWN_STS_LO
;
459 sts_reg_mask
= 0xFFFFFFFF;
463 req_reg
= mmACP_MEM_SHUT_DOWN_REQ_HI
;
464 sts_reg
= mmACP_MEM_SHUT_DOWN_STS_HI
;
465 sts_reg_mask
= 0x0000FFFF;
468 val
= acp_reg_read(acp_mmio
, req_reg
);
469 if (val
& (1 << bank
)) {
470 /* bank is in off state */
471 if (power_on
== true)
478 /* bank is in on state */
479 if (power_on
== false)
486 acp_reg_write(val
, acp_mmio
, req_reg
);
488 while (acp_reg_read(acp_mmio
, sts_reg
) != sts_reg_mask
) {
490 pr_err("ACP SRAM bank %d state change failed\n", bank
);
497 /* Initialize and bring ACP hardware to default state. */
498 static int acp_init(void __iomem
*acp_mmio
, u32 asic_type
)
501 u32 val
, count
, sram_pte_offset
;
503 /* Assert Soft reset of ACP */
504 val
= acp_reg_read(acp_mmio
, mmACP_SOFT_RESET
);
506 val
|= ACP_SOFT_RESET__SoftResetAud_MASK
;
507 acp_reg_write(val
, acp_mmio
, mmACP_SOFT_RESET
);
509 count
= ACP_SOFT_RESET_DONE_TIME_OUT_VALUE
;
511 val
= acp_reg_read(acp_mmio
, mmACP_SOFT_RESET
);
512 if (ACP_SOFT_RESET__SoftResetAudDone_MASK
==
513 (val
& ACP_SOFT_RESET__SoftResetAudDone_MASK
))
516 pr_err("Failed to reset ACP\n");
522 /* Enable clock to ACP and wait until the clock is enabled */
523 val
= acp_reg_read(acp_mmio
, mmACP_CONTROL
);
524 val
= val
| ACP_CONTROL__ClkEn_MASK
;
525 acp_reg_write(val
, acp_mmio
, mmACP_CONTROL
);
527 count
= ACP_CLOCK_EN_TIME_OUT_VALUE
;
530 val
= acp_reg_read(acp_mmio
, mmACP_STATUS
);
534 pr_err("Failed to reset ACP\n");
540 /* Deassert the SOFT RESET flags */
541 val
= acp_reg_read(acp_mmio
, mmACP_SOFT_RESET
);
542 val
&= ~ACP_SOFT_RESET__SoftResetAud_MASK
;
543 acp_reg_write(val
, acp_mmio
, mmACP_SOFT_RESET
);
545 /* initiailize Onion control DAGB register */
546 acp_reg_write(ACP_ONION_CNTL_DEFAULT
, acp_mmio
,
547 mmACP_AXI2DAGB_ONION_CNTL
);
549 /* initiailize Garlic control DAGB registers */
550 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT
, acp_mmio
,
551 mmACP_AXI2DAGB_GARLIC_CNTL
);
553 sram_pte_offset
= ACP_DAGB_GRP_SRAM_BASE_ADDRESS
|
554 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK
|
555 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK
|
556 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK
;
557 acp_reg_write(sram_pte_offset
, acp_mmio
, mmACP_DAGB_BASE_ADDR_GRP_1
);
558 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE
, acp_mmio
,
559 mmACP_DAGB_PAGE_SIZE_GRP_1
);
561 acp_reg_write(ACP_SRAM_BASE_ADDRESS
, acp_mmio
,
562 mmACP_DMA_DESC_BASE_ADDR
);
564 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
565 acp_reg_write(0x4, acp_mmio
, mmACP_DMA_DESC_MAX_NUM_DSCR
);
566 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK
,
567 acp_mmio
, mmACP_EXTERNAL_INTR_CNTL
);
569 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
570 * Now, turn off all of them. This can't be done in 'poweron' of
571 * ACP pm domain, as this requires ACP to be initialized.
572 * For Stoney, Memory gating is disabled,i.e SRAM Banks
573 * won't be turned off. The default state for SRAM banks is ON.
574 * Setting SRAM bank state code skipped for STONEY platform.
576 if (asic_type
!= CHIP_STONEY
) {
577 for (bank
= 1; bank
< 48; bank
++)
578 acp_set_sram_bank_state(acp_mmio
, bank
, false);
581 /* Stoney supports 16bit resolution */
582 if (asic_type
== CHIP_STONEY
) {
583 val
= acp_reg_read(acp_mmio
, mmACP_I2S_16BIT_RESOLUTION_EN
);
585 acp_reg_write(val
, acp_mmio
, mmACP_I2S_16BIT_RESOLUTION_EN
);
590 /* Deinitialize ACP */
591 static int acp_deinit(void __iomem
*acp_mmio
)
596 /* Assert Soft reset of ACP */
597 val
= acp_reg_read(acp_mmio
, mmACP_SOFT_RESET
);
599 val
|= ACP_SOFT_RESET__SoftResetAud_MASK
;
600 acp_reg_write(val
, acp_mmio
, mmACP_SOFT_RESET
);
602 count
= ACP_SOFT_RESET_DONE_TIME_OUT_VALUE
;
604 val
= acp_reg_read(acp_mmio
, mmACP_SOFT_RESET
);
605 if (ACP_SOFT_RESET__SoftResetAudDone_MASK
==
606 (val
& ACP_SOFT_RESET__SoftResetAudDone_MASK
))
609 pr_err("Failed to reset ACP\n");
614 /** Disable ACP clock */
615 val
= acp_reg_read(acp_mmio
, mmACP_CONTROL
);
616 val
&= ~ACP_CONTROL__ClkEn_MASK
;
617 acp_reg_write(val
, acp_mmio
, mmACP_CONTROL
);
619 count
= ACP_CLOCK_EN_TIME_OUT_VALUE
;
622 val
= acp_reg_read(acp_mmio
, mmACP_STATUS
);
623 if (!(val
& (u32
) 0x1))
626 pr_err("Failed to reset ACP\n");
634 /* ACP DMA irq handler routine for playback, capture usecases */
635 static irqreturn_t
dma_irq_handler(int irq
, void *arg
)
638 u32 intr_flag
, ext_intr_status
;
639 struct audio_drv_data
*irq_data
;
640 void __iomem
*acp_mmio
;
641 struct device
*dev
= arg
;
642 bool valid_irq
= false;
644 irq_data
= dev_get_drvdata(dev
);
645 acp_mmio
= irq_data
->acp_mmio
;
647 ext_intr_status
= acp_reg_read(acp_mmio
, mmACP_EXTERNAL_INTR_STAT
);
648 intr_flag
= (((ext_intr_status
&
649 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK
) >>
650 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT
));
652 if ((intr_flag
& BIT(ACP_TO_I2S_DMA_CH_NUM
)) != 0) {
654 if (acp_reg_read(acp_mmio
, mmACP_DMA_CUR_DSCR_13
) ==
655 PLAYBACK_START_DMA_DESCR_CH13
)
656 dscr_idx
= PLAYBACK_START_DMA_DESCR_CH12
;
658 dscr_idx
= PLAYBACK_END_DMA_DESCR_CH12
;
659 config_acp_dma_channel(acp_mmio
, SYSRAM_TO_ACP_CH_NUM
, dscr_idx
,
661 acp_dma_start(acp_mmio
, SYSRAM_TO_ACP_CH_NUM
, false);
663 snd_pcm_period_elapsed(irq_data
->play_stream
);
665 acp_reg_write((intr_flag
& BIT(ACP_TO_I2S_DMA_CH_NUM
)) << 16,
666 acp_mmio
, mmACP_EXTERNAL_INTR_STAT
);
669 if ((intr_flag
& BIT(I2S_TO_ACP_DMA_CH_NUM
)) != 0) {
671 if (acp_reg_read(acp_mmio
, mmACP_DMA_CUR_DSCR_15
) ==
672 CAPTURE_START_DMA_DESCR_CH15
)
673 dscr_idx
= CAPTURE_END_DMA_DESCR_CH14
;
675 dscr_idx
= CAPTURE_START_DMA_DESCR_CH14
;
676 config_acp_dma_channel(acp_mmio
, ACP_TO_SYSRAM_CH_NUM
, dscr_idx
,
678 acp_dma_start(acp_mmio
, ACP_TO_SYSRAM_CH_NUM
, false);
680 acp_reg_write((intr_flag
& BIT(I2S_TO_ACP_DMA_CH_NUM
)) << 16,
681 acp_mmio
, mmACP_EXTERNAL_INTR_STAT
);
684 if ((intr_flag
& BIT(ACP_TO_SYSRAM_CH_NUM
)) != 0) {
686 snd_pcm_period_elapsed(irq_data
->capture_stream
);
687 acp_reg_write((intr_flag
& BIT(ACP_TO_SYSRAM_CH_NUM
)) << 16,
688 acp_mmio
, mmACP_EXTERNAL_INTR_STAT
);
697 static int acp_dma_open(struct snd_pcm_substream
*substream
)
701 struct snd_pcm_runtime
*runtime
= substream
->runtime
;
702 struct snd_soc_pcm_runtime
*prtd
= substream
->private_data
;
703 struct audio_drv_data
*intr_data
= dev_get_drvdata(prtd
->platform
->dev
);
705 struct audio_substream_data
*adata
=
706 kzalloc(sizeof(struct audio_substream_data
), GFP_KERNEL
);
710 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
711 switch (intr_data
->asic_type
) {
713 runtime
->hw
= acp_st_pcm_hardware_playback
;
716 runtime
->hw
= acp_pcm_hardware_playback
;
719 switch (intr_data
->asic_type
) {
721 runtime
->hw
= acp_st_pcm_hardware_capture
;
724 runtime
->hw
= acp_pcm_hardware_capture
;
728 ret
= snd_pcm_hw_constraint_integer(runtime
,
729 SNDRV_PCM_HW_PARAM_PERIODS
);
731 dev_err(prtd
->platform
->dev
, "set integer constraint failed\n");
736 adata
->acp_mmio
= intr_data
->acp_mmio
;
737 runtime
->private_data
= adata
;
739 /* Enable ACP irq, when neither playback or capture streams are
740 * active by the time when a new stream is being opened.
741 * This enablement is not required for another stream, if current
742 * stream is not closed
744 if (!intr_data
->play_stream
&& !intr_data
->capture_stream
)
745 acp_reg_write(1, adata
->acp_mmio
, mmACP_EXTERNAL_INTR_ENB
);
747 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
748 intr_data
->play_stream
= substream
;
749 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
750 * won't be turned off. The default state for SRAM banks is ON.
751 * Setting SRAM bank state code skipped for STONEY platform.
753 if (intr_data
->asic_type
!= CHIP_STONEY
) {
754 for (bank
= 1; bank
<= 4; bank
++)
755 acp_set_sram_bank_state(intr_data
->acp_mmio
,
759 intr_data
->capture_stream
= substream
;
760 if (intr_data
->asic_type
!= CHIP_STONEY
) {
761 for (bank
= 5; bank
<= 8; bank
++)
762 acp_set_sram_bank_state(intr_data
->acp_mmio
,
770 static int acp_dma_hw_params(struct snd_pcm_substream
*substream
,
771 struct snd_pcm_hw_params
*params
)
776 struct snd_pcm_runtime
*runtime
;
777 struct audio_substream_data
*rtd
;
778 struct snd_soc_pcm_runtime
*prtd
= substream
->private_data
;
779 struct audio_drv_data
*adata
= dev_get_drvdata(prtd
->platform
->dev
);
781 runtime
= substream
->runtime
;
782 rtd
= runtime
->private_data
;
787 size
= params_buffer_bytes(params
);
788 status
= snd_pcm_lib_malloc_pages(substream
, size
);
792 memset(substream
->runtime
->dma_area
, 0, params_buffer_bytes(params
));
793 pg
= virt_to_page(substream
->dma_buffer
.area
);
796 acp_set_sram_bank_state(rtd
->acp_mmio
, 0, true);
797 /* Save for runtime private data */
799 rtd
->order
= get_order(size
);
801 /* Fill the page table entries in ACP SRAM */
804 rtd
->num_of_pages
= PAGE_ALIGN(size
) >> PAGE_SHIFT
;
805 rtd
->direction
= substream
->stream
;
807 config_acp_dma(rtd
->acp_mmio
, rtd
, adata
->asic_type
);
815 static int acp_dma_hw_free(struct snd_pcm_substream
*substream
)
817 return snd_pcm_lib_free_pages(substream
);
820 static snd_pcm_uframes_t
acp_dma_pointer(struct snd_pcm_substream
*substream
)
823 u32 mul
, dma_config
, period_bytes
;
826 struct snd_pcm_runtime
*runtime
= substream
->runtime
;
827 struct audio_substream_data
*rtd
= runtime
->private_data
;
829 period_bytes
= frames_to_bytes(runtime
, runtime
->period_size
);
830 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
831 dscr
= acp_reg_read(rtd
->acp_mmio
, mmACP_DMA_CUR_DSCR_13
);
833 if (dscr
== PLAYBACK_START_DMA_DESCR_CH13
)
837 pos
= (mul
* period_bytes
);
839 dma_config
= acp_reg_read(rtd
->acp_mmio
, mmACP_DMA_CNTL_14
);
840 if (dma_config
!= 0) {
841 dscr
= acp_reg_read(rtd
->acp_mmio
,
842 mmACP_DMA_CUR_DSCR_14
);
843 if (dscr
== CAPTURE_START_DMA_DESCR_CH14
)
847 pos
= (mul
* period_bytes
);
850 if (pos
>= (2 * period_bytes
))
854 return bytes_to_frames(runtime
, pos
);
857 static int acp_dma_mmap(struct snd_pcm_substream
*substream
,
858 struct vm_area_struct
*vma
)
860 return snd_pcm_lib_default_mmap(substream
, vma
);
863 static int acp_dma_prepare(struct snd_pcm_substream
*substream
)
865 struct snd_pcm_runtime
*runtime
= substream
->runtime
;
866 struct audio_substream_data
*rtd
= runtime
->private_data
;
868 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
869 config_acp_dma_channel(rtd
->acp_mmio
, SYSRAM_TO_ACP_CH_NUM
,
870 PLAYBACK_START_DMA_DESCR_CH12
,
871 NUM_DSCRS_PER_CHANNEL
, 0);
872 config_acp_dma_channel(rtd
->acp_mmio
, ACP_TO_I2S_DMA_CH_NUM
,
873 PLAYBACK_START_DMA_DESCR_CH13
,
874 NUM_DSCRS_PER_CHANNEL
, 0);
875 /* Fill ACP SRAM (2 periods) with zeros from System RAM
876 * which is zero-ed in hw_params
878 acp_dma_start(rtd
->acp_mmio
, SYSRAM_TO_ACP_CH_NUM
, false);
880 /* ACP SRAM (2 periods of buffer size) is intially filled with
881 * zeros. Before rendering starts, 2nd half of SRAM will be
882 * filled with valid audio data DMA'ed from first half of system
883 * RAM and 1st half of SRAM will be filled with Zeros. This is
884 * the initial scenario when redering starts from SRAM. Later
885 * on, 2nd half of system memory will be DMA'ed to 1st half of
886 * SRAM, 1st half of system memory will be DMA'ed to 2nd half of
887 * SRAM in ping-pong way till rendering stops.
889 config_acp_dma_channel(rtd
->acp_mmio
, SYSRAM_TO_ACP_CH_NUM
,
890 PLAYBACK_START_DMA_DESCR_CH12
,
893 config_acp_dma_channel(rtd
->acp_mmio
, ACP_TO_SYSRAM_CH_NUM
,
894 CAPTURE_START_DMA_DESCR_CH14
,
895 NUM_DSCRS_PER_CHANNEL
, 0);
896 config_acp_dma_channel(rtd
->acp_mmio
, I2S_TO_ACP_DMA_CH_NUM
,
897 CAPTURE_START_DMA_DESCR_CH15
,
898 NUM_DSCRS_PER_CHANNEL
, 0);
903 static int acp_dma_trigger(struct snd_pcm_substream
*substream
, int cmd
)
908 struct snd_pcm_runtime
*runtime
= substream
->runtime
;
909 struct snd_soc_pcm_runtime
*prtd
= substream
->private_data
;
910 struct audio_substream_data
*rtd
= runtime
->private_data
;
915 case SNDRV_PCM_TRIGGER_START
:
916 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE
:
917 case SNDRV_PCM_TRIGGER_RESUME
:
918 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
919 acp_dma_start(rtd
->acp_mmio
,
920 SYSRAM_TO_ACP_CH_NUM
, false);
921 while (acp_reg_read(rtd
->acp_mmio
, mmACP_DMA_CH_STS
) &
922 BIT(SYSRAM_TO_ACP_CH_NUM
)) {
924 dev_err(prtd
->platform
->dev
,
925 "acp dma start timeout\n");
931 acp_dma_start(rtd
->acp_mmio
,
932 ACP_TO_I2S_DMA_CH_NUM
, true);
935 acp_dma_start(rtd
->acp_mmio
,
936 I2S_TO_ACP_DMA_CH_NUM
, true);
940 case SNDRV_PCM_TRIGGER_STOP
:
941 case SNDRV_PCM_TRIGGER_PAUSE_PUSH
:
942 case SNDRV_PCM_TRIGGER_SUSPEND
:
943 /* Need to stop only circular DMA channels :
944 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
945 * channels will stopped automatically after its transfer
946 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
948 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
)
949 ret
= acp_dma_stop(rtd
->acp_mmio
,
950 ACP_TO_I2S_DMA_CH_NUM
);
952 ret
= acp_dma_stop(rtd
->acp_mmio
,
953 I2S_TO_ACP_DMA_CH_NUM
);
962 static int acp_dma_new(struct snd_soc_pcm_runtime
*rtd
)
965 struct audio_drv_data
*adata
= dev_get_drvdata(rtd
->platform
->dev
);
967 switch (adata
->asic_type
) {
969 ret
= snd_pcm_lib_preallocate_pages_for_all(rtd
->pcm
,
975 ret
= snd_pcm_lib_preallocate_pages_for_all(rtd
->pcm
,
982 dev_err(rtd
->platform
->dev
,
983 "buffer preallocation failer error:%d\n", ret
);
987 static int acp_dma_close(struct snd_pcm_substream
*substream
)
990 struct snd_pcm_runtime
*runtime
= substream
->runtime
;
991 struct audio_substream_data
*rtd
= runtime
->private_data
;
992 struct snd_soc_pcm_runtime
*prtd
= substream
->private_data
;
993 struct audio_drv_data
*adata
= dev_get_drvdata(prtd
->platform
->dev
);
997 if (substream
->stream
== SNDRV_PCM_STREAM_PLAYBACK
) {
998 adata
->play_stream
= NULL
;
999 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1000 * won't be turned off. The default state for SRAM banks is ON.
1001 * Setting SRAM bank state code skipped for STONEY platform.
1002 * added condition checks for Carrizo platform only
1004 if (adata
->asic_type
!= CHIP_STONEY
) {
1005 for (bank
= 1; bank
<= 4; bank
++)
1006 acp_set_sram_bank_state(adata
->acp_mmio
, bank
,
1010 adata
->capture_stream
= NULL
;
1011 if (adata
->asic_type
!= CHIP_STONEY
) {
1012 for (bank
= 5; bank
<= 8; bank
++)
1013 acp_set_sram_bank_state(adata
->acp_mmio
, bank
,
1018 /* Disable ACP irq, when the current stream is being closed and
1019 * another stream is also not active.
1021 if (!adata
->play_stream
&& !adata
->capture_stream
)
1022 acp_reg_write(0, adata
->acp_mmio
, mmACP_EXTERNAL_INTR_ENB
);
1027 static const struct snd_pcm_ops acp_dma_ops
= {
1028 .open
= acp_dma_open
,
1029 .close
= acp_dma_close
,
1030 .ioctl
= snd_pcm_lib_ioctl
,
1031 .hw_params
= acp_dma_hw_params
,
1032 .hw_free
= acp_dma_hw_free
,
1033 .trigger
= acp_dma_trigger
,
1034 .pointer
= acp_dma_pointer
,
1035 .mmap
= acp_dma_mmap
,
1036 .prepare
= acp_dma_prepare
,
1039 static struct snd_soc_platform_driver acp_asoc_platform
= {
1040 .ops
= &acp_dma_ops
,
1041 .pcm_new
= acp_dma_new
,
1044 static int acp_audio_probe(struct platform_device
*pdev
)
1047 struct audio_drv_data
*audio_drv_data
;
1048 struct resource
*res
;
1049 const u32
*pdata
= pdev
->dev
.platform_data
;
1051 audio_drv_data
= devm_kzalloc(&pdev
->dev
, sizeof(struct audio_drv_data
),
1053 if (audio_drv_data
== NULL
)
1056 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1057 audio_drv_data
->acp_mmio
= devm_ioremap_resource(&pdev
->dev
, res
);
1059 /* The following members gets populated in device 'open'
1060 * function. Till then interrupts are disabled in 'acp_init'
1061 * and device doesn't generate any interrupts.
1064 audio_drv_data
->play_stream
= NULL
;
1065 audio_drv_data
->capture_stream
= NULL
;
1066 audio_drv_data
->asic_type
= *pdata
;
1068 res
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
1070 dev_err(&pdev
->dev
, "IORESOURCE_IRQ FAILED\n");
1074 status
= devm_request_irq(&pdev
->dev
, res
->start
, dma_irq_handler
,
1075 0, "ACP_IRQ", &pdev
->dev
);
1077 dev_err(&pdev
->dev
, "ACP IRQ request failed\n");
1081 dev_set_drvdata(&pdev
->dev
, audio_drv_data
);
1083 /* Initialize the ACP */
1084 acp_init(audio_drv_data
->acp_mmio
, audio_drv_data
->asic_type
);
1086 status
= snd_soc_register_platform(&pdev
->dev
, &acp_asoc_platform
);
1088 dev_err(&pdev
->dev
, "Fail to register ALSA platform device\n");
1092 pm_runtime_set_autosuspend_delay(&pdev
->dev
, 10000);
1093 pm_runtime_use_autosuspend(&pdev
->dev
);
1094 pm_runtime_enable(&pdev
->dev
);
1099 static int acp_audio_remove(struct platform_device
*pdev
)
1101 struct audio_drv_data
*adata
= dev_get_drvdata(&pdev
->dev
);
1103 acp_deinit(adata
->acp_mmio
);
1104 snd_soc_unregister_platform(&pdev
->dev
);
1105 pm_runtime_disable(&pdev
->dev
);
1110 static int acp_pcm_resume(struct device
*dev
)
1113 struct audio_drv_data
*adata
= dev_get_drvdata(dev
);
1115 acp_init(adata
->acp_mmio
, adata
->asic_type
);
1117 if (adata
->play_stream
&& adata
->play_stream
->runtime
) {
1118 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1119 * won't be turned off. The default state for SRAM banks is ON.
1120 * Setting SRAM bank state code skipped for STONEY platform.
1122 if (adata
->asic_type
!= CHIP_STONEY
) {
1123 for (bank
= 1; bank
<= 4; bank
++)
1124 acp_set_sram_bank_state(adata
->acp_mmio
, bank
,
1127 config_acp_dma(adata
->acp_mmio
,
1128 adata
->play_stream
->runtime
->private_data
,
1131 if (adata
->capture_stream
&& adata
->capture_stream
->runtime
) {
1132 if (adata
->asic_type
!= CHIP_STONEY
) {
1133 for (bank
= 5; bank
<= 8; bank
++)
1134 acp_set_sram_bank_state(adata
->acp_mmio
, bank
,
1137 config_acp_dma(adata
->acp_mmio
,
1138 adata
->capture_stream
->runtime
->private_data
,
1141 acp_reg_write(1, adata
->acp_mmio
, mmACP_EXTERNAL_INTR_ENB
);
1145 static int acp_pcm_runtime_suspend(struct device
*dev
)
1147 struct audio_drv_data
*adata
= dev_get_drvdata(dev
);
1149 acp_deinit(adata
->acp_mmio
);
1150 acp_reg_write(0, adata
->acp_mmio
, mmACP_EXTERNAL_INTR_ENB
);
1154 static int acp_pcm_runtime_resume(struct device
*dev
)
1156 struct audio_drv_data
*adata
= dev_get_drvdata(dev
);
1158 acp_init(adata
->acp_mmio
, adata
->asic_type
);
1159 acp_reg_write(1, adata
->acp_mmio
, mmACP_EXTERNAL_INTR_ENB
);
1163 static const struct dev_pm_ops acp_pm_ops
= {
1164 .resume
= acp_pcm_resume
,
1165 .runtime_suspend
= acp_pcm_runtime_suspend
,
1166 .runtime_resume
= acp_pcm_runtime_resume
,
1169 static struct platform_driver acp_dma_driver
= {
1170 .probe
= acp_audio_probe
,
1171 .remove
= acp_audio_remove
,
1173 .name
= "acp_audio_dma",
1178 module_platform_driver(acp_dma_driver
);
1180 MODULE_AUTHOR("Vijendar.Mukunda@amd.com");
1181 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1182 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1183 MODULE_LICENSE("GPL v2");
1184 MODULE_ALIAS("platform:acp-dma-audio");