]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - sound/soc/amd/acp-pcm-dma.c
ASoC: AMD: Audio buffer related changes for Stoney
[mirror_ubuntu-focal-kernel.git] / sound / soc / amd / acp-pcm-dma.c
1 /*
2 * AMD ALSA SoC PCM Driver for ACP 2.x
3 *
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 */
15
16 #include <linux/module.h>
17 #include <linux/delay.h>
18 #include <linux/io.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
21
22 #include <sound/soc.h>
23 #include <drm/amd_asic_type.h>
24 #include "acp.h"
25
26 #define PLAYBACK_MIN_NUM_PERIODS 2
27 #define PLAYBACK_MAX_NUM_PERIODS 2
28 #define PLAYBACK_MAX_PERIOD_SIZE 16384
29 #define PLAYBACK_MIN_PERIOD_SIZE 1024
30 #define CAPTURE_MIN_NUM_PERIODS 2
31 #define CAPTURE_MAX_NUM_PERIODS 2
32 #define CAPTURE_MAX_PERIOD_SIZE 16384
33 #define CAPTURE_MIN_PERIOD_SIZE 1024
34
35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
36 #define MIN_BUFFER MAX_BUFFER
37
38 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192
39 #define ST_CAPTURE_MAX_PERIOD_SIZE ST_PLAYBACK_MAX_PERIOD_SIZE
40 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
41 #define ST_MIN_BUFFER ST_MAX_BUFFER
42
43 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
44 .info = SNDRV_PCM_INFO_INTERLEAVED |
45 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
46 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
47 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
48 .formats = SNDRV_PCM_FMTBIT_S16_LE |
49 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
50 .channels_min = 1,
51 .channels_max = 8,
52 .rates = SNDRV_PCM_RATE_8000_96000,
53 .rate_min = 8000,
54 .rate_max = 96000,
55 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
56 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
57 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
58 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
59 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
60 };
61
62 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
63 .info = SNDRV_PCM_INFO_INTERLEAVED |
64 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
65 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
66 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
67 .formats = SNDRV_PCM_FMTBIT_S16_LE |
68 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
69 .channels_min = 1,
70 .channels_max = 2,
71 .rates = SNDRV_PCM_RATE_8000_48000,
72 .rate_min = 8000,
73 .rate_max = 48000,
74 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
75 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
76 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
77 .periods_min = CAPTURE_MIN_NUM_PERIODS,
78 .periods_max = CAPTURE_MAX_NUM_PERIODS,
79 };
80
81 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback = {
82 .info = SNDRV_PCM_INFO_INTERLEAVED |
83 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
84 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
85 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
86 .formats = SNDRV_PCM_FMTBIT_S16_LE |
87 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
88 .channels_min = 1,
89 .channels_max = 8,
90 .rates = SNDRV_PCM_RATE_8000_96000,
91 .rate_min = 8000,
92 .rate_max = 96000,
93 .buffer_bytes_max = ST_MAX_BUFFER,
94 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
95 .period_bytes_max = ST_PLAYBACK_MAX_PERIOD_SIZE,
96 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
97 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
98 };
99
100 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture = {
101 .info = SNDRV_PCM_INFO_INTERLEAVED |
102 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
103 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
104 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
105 .formats = SNDRV_PCM_FMTBIT_S16_LE |
106 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
107 .channels_min = 1,
108 .channels_max = 2,
109 .rates = SNDRV_PCM_RATE_8000_48000,
110 .rate_min = 8000,
111 .rate_max = 48000,
112 .buffer_bytes_max = ST_MAX_BUFFER,
113 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
114 .period_bytes_max = ST_CAPTURE_MAX_PERIOD_SIZE,
115 .periods_min = CAPTURE_MIN_NUM_PERIODS,
116 .periods_max = CAPTURE_MAX_NUM_PERIODS,
117 };
118
119 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
120 {
121 return readl(acp_mmio + (reg * 4));
122 }
123
124 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
125 {
126 writel(val, acp_mmio + (reg * 4));
127 }
128
129 /* Configure a given dma channel parameters - enable/disable,
130 * number of descriptors, priority
131 */
132 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
133 u16 dscr_strt_idx, u16 num_dscrs,
134 enum acp_dma_priority_level priority_level)
135 {
136 u32 dma_ctrl;
137
138 /* disable the channel run field */
139 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
140 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
141 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
142
143 /* program a DMA channel with first descriptor to be processed. */
144 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
145 & dscr_strt_idx),
146 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
147
148 /* program a DMA channel with the number of descriptors to be
149 * processed in the transfer
150 */
151 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
152 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
153
154 /* set DMA channel priority */
155 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
156 }
157
158 /* Initialize a dma descriptor in SRAM based on descritor information passed */
159 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
160 u16 descr_idx,
161 acp_dma_dscr_transfer_t *descr_info)
162 {
163 u32 sram_offset;
164
165 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
166
167 /* program the source base address. */
168 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
169 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
170 /* program the destination base address. */
171 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
172 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
173
174 /* program the number of bytes to be transferred for this descriptor. */
175 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
176 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
177 }
178
179 /* Initialize the DMA descriptor information for transfer between
180 * system memory <-> ACP SRAM
181 */
182 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
183 u32 size, int direction,
184 u32 pte_offset, u32 asic_type)
185 {
186 u16 i;
187 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
188 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
189
190 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
191 dmadscr[i].xfer_val = 0;
192 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
193 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i;
194 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS +
195 (size / 2) - (i * (size/2));
196 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
197 + (pte_offset * SZ_4K) + (i * (size/2));
198 switch (asic_type) {
199 case CHIP_STONEY:
200 dmadscr[i].xfer_val |=
201 (ACP_DMA_ATTRIBUTES_DAGB_GARLIC_TO_SHAREDMEM << 16) |
202 (size / 2);
203 break;
204 default:
205 dmadscr[i].xfer_val |=
206 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) |
207 (size / 2);
208 }
209 } else {
210 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i;
211 switch (asic_type) {
212 case CHIP_STONEY:
213 dmadscr[i].src = ACP_SHARED_RAM_BANK_3_ADDRESS +
214 (i * (size/2));
215 dmadscr[i].dest =
216 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
217 (pte_offset * SZ_4K) + (i * (size/2));
218 dmadscr[i].xfer_val |=
219 BIT(22) |
220 (ACP_DMA_ATTRIBUTES_SHARED_MEM_TO_DAGB_GARLIC << 16) |
221 (size / 2);
222 break;
223 default:
224 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS +
225 (i * (size/2));
226 dmadscr[i].dest =
227 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
228 (pte_offset * SZ_4K) + (i * (size/2));
229 dmadscr[i].xfer_val |=
230 BIT(22) |
231 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) |
232 (size / 2);
233 }
234 }
235 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
236 &dmadscr[i]);
237 }
238 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
239 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM,
240 PLAYBACK_START_DMA_DESCR_CH12,
241 NUM_DSCRS_PER_CHANNEL,
242 ACP_DMA_PRIORITY_LEVEL_NORMAL);
243 else
244 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM,
245 CAPTURE_START_DMA_DESCR_CH14,
246 NUM_DSCRS_PER_CHANNEL,
247 ACP_DMA_PRIORITY_LEVEL_NORMAL);
248 }
249
250 /* Initialize the DMA descriptor information for transfer between
251 * ACP SRAM <-> I2S
252 */
253 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio,
254 u32 size, int direction,
255 u32 asic_type)
256 {
257
258 u16 i;
259 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13;
260 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
261
262 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
263 dmadscr[i].xfer_val = 0;
264 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
265 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i;
266 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS +
267 (i * (size/2));
268 /* dmadscr[i].dest is unused by hardware. */
269 dmadscr[i].dest = 0;
270 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) |
271 (size / 2);
272 } else {
273 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i;
274 /* dmadscr[i].src is unused by hardware. */
275 dmadscr[i].src = 0;
276 switch (asic_type) {
277 case CHIP_STONEY:
278 dmadscr[i].dest =
279 ACP_SHARED_RAM_BANK_3_ADDRESS +
280 (i * (size / 2));
281 break;
282 default:
283 dmadscr[i].dest =
284 ACP_SHARED_RAM_BANK_5_ADDRESS +
285 (i * (size / 2));
286 }
287 dmadscr[i].xfer_val |= BIT(22) |
288 (FROM_ACP_I2S_1 << 16) | (size / 2);
289 }
290 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
291 &dmadscr[i]);
292 }
293 /* Configure the DMA channel with the above descriptore */
294 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
295 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
296 PLAYBACK_START_DMA_DESCR_CH13,
297 NUM_DSCRS_PER_CHANNEL,
298 ACP_DMA_PRIORITY_LEVEL_NORMAL);
299 else
300 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
301 CAPTURE_START_DMA_DESCR_CH15,
302 NUM_DSCRS_PER_CHANNEL,
303 ACP_DMA_PRIORITY_LEVEL_NORMAL);
304 }
305
306 /* Create page table entries in ACP SRAM for the allocated memory */
307 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
308 u16 num_of_pages, u32 pte_offset)
309 {
310 u16 page_idx;
311 u64 addr;
312 u32 low;
313 u32 high;
314 u32 offset;
315
316 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
317 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
318 /* Load the low address of page int ACP SRAM through SRBM */
319 acp_reg_write((offset + (page_idx * 8)),
320 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
321 addr = page_to_phys(pg);
322
323 low = lower_32_bits(addr);
324 high = upper_32_bits(addr);
325
326 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
327
328 /* Load the High address of page int ACP SRAM through SRBM */
329 acp_reg_write((offset + (page_idx * 8) + 4),
330 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
331
332 /* page enable in ACP */
333 high |= BIT(31);
334 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
335
336 /* Move to next physically contiguos page */
337 pg++;
338 }
339 }
340
341 static void config_acp_dma(void __iomem *acp_mmio,
342 struct audio_substream_data *audio_config,
343 u32 asic_type)
344 {
345 u32 pte_offset;
346
347 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK)
348 pte_offset = ACP_PLAYBACK_PTE_OFFSET;
349 else
350 pte_offset = ACP_CAPTURE_PTE_OFFSET;
351
352 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages,
353 pte_offset);
354
355 /* Configure System memory <-> ACP SRAM DMA descriptors */
356 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size,
357 audio_config->direction, pte_offset, asic_type);
358
359 /* Configure ACP SRAM <-> I2S DMA descriptors */
360 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size,
361 audio_config->direction, asic_type);
362 }
363
364 /* Start a given DMA channel transfer */
365 static void acp_dma_start(void __iomem *acp_mmio,
366 u16 ch_num, bool is_circular)
367 {
368 u32 dma_ctrl;
369
370 /* read the dma control register and disable the channel run field */
371 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
372
373 /* Invalidating the DAGB cache */
374 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
375
376 /* configure the DMA channel and start the DMA transfer
377 * set dmachrun bit to start the transfer and enable the
378 * interrupt on completion of the dma transfer
379 */
380 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
381
382 switch (ch_num) {
383 case ACP_TO_I2S_DMA_CH_NUM:
384 case ACP_TO_SYSRAM_CH_NUM:
385 case I2S_TO_ACP_DMA_CH_NUM:
386 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
387 break;
388 default:
389 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
390 break;
391 }
392
393 /* enable for ACP SRAM to/from I2S DMA channel */
394 if (is_circular == true)
395 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
396 else
397 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
398
399 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
400 }
401
402 /* Stop a given DMA channel transfer */
403 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
404 {
405 u32 dma_ctrl;
406 u32 dma_ch_sts;
407 u32 count = ACP_DMA_RESET_TIME;
408
409 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
410
411 /* clear the dma control register fields before writing zero
412 * in reset bit
413 */
414 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
415 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
416
417 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
418 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
419
420 if (dma_ch_sts & BIT(ch_num)) {
421 /* set the reset bit for this channel to stop the dma
422 * transfer
423 */
424 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
425 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
426 }
427
428 /* check the channel status bit for some time and return the status */
429 while (true) {
430 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
431 if (!(dma_ch_sts & BIT(ch_num))) {
432 /* clear the reset flag after successfully stopping
433 * the dma transfer and break from the loop
434 */
435 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
436
437 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
438 + ch_num);
439 break;
440 }
441 if (--count == 0) {
442 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
443 return -ETIMEDOUT;
444 }
445 udelay(100);
446 }
447 return 0;
448 }
449
450 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank,
451 bool power_on)
452 {
453 u32 val, req_reg, sts_reg, sts_reg_mask;
454 u32 loops = 1000;
455
456 if (bank < 32) {
457 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO;
458 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO;
459 sts_reg_mask = 0xFFFFFFFF;
460
461 } else {
462 bank -= 32;
463 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI;
464 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI;
465 sts_reg_mask = 0x0000FFFF;
466 }
467
468 val = acp_reg_read(acp_mmio, req_reg);
469 if (val & (1 << bank)) {
470 /* bank is in off state */
471 if (power_on == true)
472 /* request to on */
473 val &= ~(1 << bank);
474 else
475 /* request to off */
476 return;
477 } else {
478 /* bank is in on state */
479 if (power_on == false)
480 /* request to off */
481 val |= 1 << bank;
482 else
483 /* request to on */
484 return;
485 }
486 acp_reg_write(val, acp_mmio, req_reg);
487
488 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) {
489 if (!loops--) {
490 pr_err("ACP SRAM bank %d state change failed\n", bank);
491 break;
492 }
493 cpu_relax();
494 }
495 }
496
497 /* Initialize and bring ACP hardware to default state. */
498 static int acp_init(void __iomem *acp_mmio, u32 asic_type)
499 {
500 u16 bank;
501 u32 val, count, sram_pte_offset;
502
503 /* Assert Soft reset of ACP */
504 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
505
506 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
507 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
508
509 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
510 while (true) {
511 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
512 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
513 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
514 break;
515 if (--count == 0) {
516 pr_err("Failed to reset ACP\n");
517 return -ETIMEDOUT;
518 }
519 udelay(100);
520 }
521
522 /* Enable clock to ACP and wait until the clock is enabled */
523 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
524 val = val | ACP_CONTROL__ClkEn_MASK;
525 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
526
527 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
528
529 while (true) {
530 val = acp_reg_read(acp_mmio, mmACP_STATUS);
531 if (val & (u32) 0x1)
532 break;
533 if (--count == 0) {
534 pr_err("Failed to reset ACP\n");
535 return -ETIMEDOUT;
536 }
537 udelay(100);
538 }
539
540 /* Deassert the SOFT RESET flags */
541 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
542 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
543 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
544
545 /* initiailize Onion control DAGB register */
546 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
547 mmACP_AXI2DAGB_ONION_CNTL);
548
549 /* initiailize Garlic control DAGB registers */
550 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
551 mmACP_AXI2DAGB_GARLIC_CNTL);
552
553 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
554 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
555 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
556 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
557 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
558 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
559 mmACP_DAGB_PAGE_SIZE_GRP_1);
560
561 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
562 mmACP_DMA_DESC_BASE_ADDR);
563
564 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
565 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
566 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
567 acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
568
569 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
570 * Now, turn off all of them. This can't be done in 'poweron' of
571 * ACP pm domain, as this requires ACP to be initialized.
572 * For Stoney, Memory gating is disabled,i.e SRAM Banks
573 * won't be turned off. The default state for SRAM banks is ON.
574 * Setting SRAM bank state code skipped for STONEY platform.
575 */
576 if (asic_type != CHIP_STONEY) {
577 for (bank = 1; bank < 48; bank++)
578 acp_set_sram_bank_state(acp_mmio, bank, false);
579 }
580
581 /* Stoney supports 16bit resolution */
582 if (asic_type == CHIP_STONEY) {
583 val = acp_reg_read(acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
584 val |= 0x03;
585 acp_reg_write(val, acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
586 }
587 return 0;
588 }
589
590 /* Deinitialize ACP */
591 static int acp_deinit(void __iomem *acp_mmio)
592 {
593 u32 val;
594 u32 count;
595
596 /* Assert Soft reset of ACP */
597 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
598
599 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
600 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
601
602 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
603 while (true) {
604 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
605 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
606 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
607 break;
608 if (--count == 0) {
609 pr_err("Failed to reset ACP\n");
610 return -ETIMEDOUT;
611 }
612 udelay(100);
613 }
614 /** Disable ACP clock */
615 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
616 val &= ~ACP_CONTROL__ClkEn_MASK;
617 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
618
619 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
620
621 while (true) {
622 val = acp_reg_read(acp_mmio, mmACP_STATUS);
623 if (!(val & (u32) 0x1))
624 break;
625 if (--count == 0) {
626 pr_err("Failed to reset ACP\n");
627 return -ETIMEDOUT;
628 }
629 udelay(100);
630 }
631 return 0;
632 }
633
634 /* ACP DMA irq handler routine for playback, capture usecases */
635 static irqreturn_t dma_irq_handler(int irq, void *arg)
636 {
637 u16 dscr_idx;
638 u32 intr_flag, ext_intr_status;
639 struct audio_drv_data *irq_data;
640 void __iomem *acp_mmio;
641 struct device *dev = arg;
642 bool valid_irq = false;
643
644 irq_data = dev_get_drvdata(dev);
645 acp_mmio = irq_data->acp_mmio;
646
647 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
648 intr_flag = (((ext_intr_status &
649 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
650 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
651
652 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
653 valid_irq = true;
654 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
655 PLAYBACK_START_DMA_DESCR_CH13)
656 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
657 else
658 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
659 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
660 1, 0);
661 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
662
663 snd_pcm_period_elapsed(irq_data->play_stream);
664
665 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
666 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
667 }
668
669 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
670 valid_irq = true;
671 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
672 CAPTURE_START_DMA_DESCR_CH15)
673 dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
674 else
675 dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
676 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
677 1, 0);
678 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
679
680 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
681 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
682 }
683
684 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
685 valid_irq = true;
686 snd_pcm_period_elapsed(irq_data->capture_stream);
687 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
688 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
689 }
690
691 if (valid_irq)
692 return IRQ_HANDLED;
693 else
694 return IRQ_NONE;
695 }
696
697 static int acp_dma_open(struct snd_pcm_substream *substream)
698 {
699 u16 bank;
700 int ret = 0;
701 struct snd_pcm_runtime *runtime = substream->runtime;
702 struct snd_soc_pcm_runtime *prtd = substream->private_data;
703 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev);
704
705 struct audio_substream_data *adata =
706 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
707 if (adata == NULL)
708 return -ENOMEM;
709
710 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
711 switch (intr_data->asic_type) {
712 case CHIP_STONEY:
713 runtime->hw = acp_st_pcm_hardware_playback;
714 break;
715 default:
716 runtime->hw = acp_pcm_hardware_playback;
717 }
718 } else {
719 switch (intr_data->asic_type) {
720 case CHIP_STONEY:
721 runtime->hw = acp_st_pcm_hardware_capture;
722 break;
723 default:
724 runtime->hw = acp_pcm_hardware_capture;
725 }
726 }
727
728 ret = snd_pcm_hw_constraint_integer(runtime,
729 SNDRV_PCM_HW_PARAM_PERIODS);
730 if (ret < 0) {
731 dev_err(prtd->platform->dev, "set integer constraint failed\n");
732 kfree(adata);
733 return ret;
734 }
735
736 adata->acp_mmio = intr_data->acp_mmio;
737 runtime->private_data = adata;
738
739 /* Enable ACP irq, when neither playback or capture streams are
740 * active by the time when a new stream is being opened.
741 * This enablement is not required for another stream, if current
742 * stream is not closed
743 */
744 if (!intr_data->play_stream && !intr_data->capture_stream)
745 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
746
747 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
748 intr_data->play_stream = substream;
749 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
750 * won't be turned off. The default state for SRAM banks is ON.
751 * Setting SRAM bank state code skipped for STONEY platform.
752 */
753 if (intr_data->asic_type != CHIP_STONEY) {
754 for (bank = 1; bank <= 4; bank++)
755 acp_set_sram_bank_state(intr_data->acp_mmio,
756 bank, true);
757 }
758 } else {
759 intr_data->capture_stream = substream;
760 if (intr_data->asic_type != CHIP_STONEY) {
761 for (bank = 5; bank <= 8; bank++)
762 acp_set_sram_bank_state(intr_data->acp_mmio,
763 bank, true);
764 }
765 }
766
767 return 0;
768 }
769
770 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
771 struct snd_pcm_hw_params *params)
772 {
773 int status;
774 uint64_t size;
775 struct page *pg;
776 struct snd_pcm_runtime *runtime;
777 struct audio_substream_data *rtd;
778 struct snd_soc_pcm_runtime *prtd = substream->private_data;
779 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
780
781 runtime = substream->runtime;
782 rtd = runtime->private_data;
783
784 if (WARN_ON(!rtd))
785 return -EINVAL;
786
787 size = params_buffer_bytes(params);
788 status = snd_pcm_lib_malloc_pages(substream, size);
789 if (status < 0)
790 return status;
791
792 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
793 pg = virt_to_page(substream->dma_buffer.area);
794
795 if (pg != NULL) {
796 acp_set_sram_bank_state(rtd->acp_mmio, 0, true);
797 /* Save for runtime private data */
798 rtd->pg = pg;
799 rtd->order = get_order(size);
800
801 /* Fill the page table entries in ACP SRAM */
802 rtd->pg = pg;
803 rtd->size = size;
804 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
805 rtd->direction = substream->stream;
806
807 config_acp_dma(rtd->acp_mmio, rtd, adata->asic_type);
808 status = 0;
809 } else {
810 status = -ENOMEM;
811 }
812 return status;
813 }
814
815 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
816 {
817 return snd_pcm_lib_free_pages(substream);
818 }
819
820 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
821 {
822 u16 dscr;
823 u32 mul, dma_config, period_bytes;
824 u32 pos = 0;
825
826 struct snd_pcm_runtime *runtime = substream->runtime;
827 struct audio_substream_data *rtd = runtime->private_data;
828
829 period_bytes = frames_to_bytes(runtime, runtime->period_size);
830 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
831 dscr = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CUR_DSCR_13);
832
833 if (dscr == PLAYBACK_START_DMA_DESCR_CH13)
834 mul = 0;
835 else
836 mul = 1;
837 pos = (mul * period_bytes);
838 } else {
839 dma_config = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CNTL_14);
840 if (dma_config != 0) {
841 dscr = acp_reg_read(rtd->acp_mmio,
842 mmACP_DMA_CUR_DSCR_14);
843 if (dscr == CAPTURE_START_DMA_DESCR_CH14)
844 mul = 1;
845 else
846 mul = 2;
847 pos = (mul * period_bytes);
848 }
849
850 if (pos >= (2 * period_bytes))
851 pos = 0;
852
853 }
854 return bytes_to_frames(runtime, pos);
855 }
856
857 static int acp_dma_mmap(struct snd_pcm_substream *substream,
858 struct vm_area_struct *vma)
859 {
860 return snd_pcm_lib_default_mmap(substream, vma);
861 }
862
863 static int acp_dma_prepare(struct snd_pcm_substream *substream)
864 {
865 struct snd_pcm_runtime *runtime = substream->runtime;
866 struct audio_substream_data *rtd = runtime->private_data;
867
868 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
869 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
870 PLAYBACK_START_DMA_DESCR_CH12,
871 NUM_DSCRS_PER_CHANNEL, 0);
872 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
873 PLAYBACK_START_DMA_DESCR_CH13,
874 NUM_DSCRS_PER_CHANNEL, 0);
875 /* Fill ACP SRAM (2 periods) with zeros from System RAM
876 * which is zero-ed in hw_params
877 */
878 acp_dma_start(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
879
880 /* ACP SRAM (2 periods of buffer size) is intially filled with
881 * zeros. Before rendering starts, 2nd half of SRAM will be
882 * filled with valid audio data DMA'ed from first half of system
883 * RAM and 1st half of SRAM will be filled with Zeros. This is
884 * the initial scenario when redering starts from SRAM. Later
885 * on, 2nd half of system memory will be DMA'ed to 1st half of
886 * SRAM, 1st half of system memory will be DMA'ed to 2nd half of
887 * SRAM in ping-pong way till rendering stops.
888 */
889 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
890 PLAYBACK_START_DMA_DESCR_CH12,
891 1, 0);
892 } else {
893 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM,
894 CAPTURE_START_DMA_DESCR_CH14,
895 NUM_DSCRS_PER_CHANNEL, 0);
896 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
897 CAPTURE_START_DMA_DESCR_CH15,
898 NUM_DSCRS_PER_CHANNEL, 0);
899 }
900 return 0;
901 }
902
903 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
904 {
905 int ret;
906 u32 loops = 1000;
907
908 struct snd_pcm_runtime *runtime = substream->runtime;
909 struct snd_soc_pcm_runtime *prtd = substream->private_data;
910 struct audio_substream_data *rtd = runtime->private_data;
911
912 if (!rtd)
913 return -EINVAL;
914 switch (cmd) {
915 case SNDRV_PCM_TRIGGER_START:
916 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
917 case SNDRV_PCM_TRIGGER_RESUME:
918 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
919 acp_dma_start(rtd->acp_mmio,
920 SYSRAM_TO_ACP_CH_NUM, false);
921 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
922 BIT(SYSRAM_TO_ACP_CH_NUM)) {
923 if (!loops--) {
924 dev_err(prtd->platform->dev,
925 "acp dma start timeout\n");
926 return -ETIMEDOUT;
927 }
928 cpu_relax();
929 }
930
931 acp_dma_start(rtd->acp_mmio,
932 ACP_TO_I2S_DMA_CH_NUM, true);
933
934 } else {
935 acp_dma_start(rtd->acp_mmio,
936 I2S_TO_ACP_DMA_CH_NUM, true);
937 }
938 ret = 0;
939 break;
940 case SNDRV_PCM_TRIGGER_STOP:
941 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
942 case SNDRV_PCM_TRIGGER_SUSPEND:
943 /* Need to stop only circular DMA channels :
944 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
945 * channels will stopped automatically after its transfer
946 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
947 */
948 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
949 ret = acp_dma_stop(rtd->acp_mmio,
950 ACP_TO_I2S_DMA_CH_NUM);
951 else
952 ret = acp_dma_stop(rtd->acp_mmio,
953 I2S_TO_ACP_DMA_CH_NUM);
954 break;
955 default:
956 ret = -EINVAL;
957
958 }
959 return ret;
960 }
961
962 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
963 {
964 int ret;
965 struct audio_drv_data *adata = dev_get_drvdata(rtd->platform->dev);
966
967 switch (adata->asic_type) {
968 case CHIP_STONEY:
969 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
970 SNDRV_DMA_TYPE_DEV,
971 NULL, ST_MIN_BUFFER,
972 ST_MAX_BUFFER);
973 break;
974 default:
975 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
976 SNDRV_DMA_TYPE_DEV,
977 NULL, MIN_BUFFER,
978 MAX_BUFFER);
979 break;
980 }
981 if (ret < 0)
982 dev_err(rtd->platform->dev,
983 "buffer preallocation failer error:%d\n", ret);
984 return ret;
985 }
986
987 static int acp_dma_close(struct snd_pcm_substream *substream)
988 {
989 u16 bank;
990 struct snd_pcm_runtime *runtime = substream->runtime;
991 struct audio_substream_data *rtd = runtime->private_data;
992 struct snd_soc_pcm_runtime *prtd = substream->private_data;
993 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
994
995 kfree(rtd);
996
997 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
998 adata->play_stream = NULL;
999 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1000 * won't be turned off. The default state for SRAM banks is ON.
1001 * Setting SRAM bank state code skipped for STONEY platform.
1002 * added condition checks for Carrizo platform only
1003 */
1004 if (adata->asic_type != CHIP_STONEY) {
1005 for (bank = 1; bank <= 4; bank++)
1006 acp_set_sram_bank_state(adata->acp_mmio, bank,
1007 false);
1008 }
1009 } else {
1010 adata->capture_stream = NULL;
1011 if (adata->asic_type != CHIP_STONEY) {
1012 for (bank = 5; bank <= 8; bank++)
1013 acp_set_sram_bank_state(adata->acp_mmio, bank,
1014 false);
1015 }
1016 }
1017
1018 /* Disable ACP irq, when the current stream is being closed and
1019 * another stream is also not active.
1020 */
1021 if (!adata->play_stream && !adata->capture_stream)
1022 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1023
1024 return 0;
1025 }
1026
1027 static const struct snd_pcm_ops acp_dma_ops = {
1028 .open = acp_dma_open,
1029 .close = acp_dma_close,
1030 .ioctl = snd_pcm_lib_ioctl,
1031 .hw_params = acp_dma_hw_params,
1032 .hw_free = acp_dma_hw_free,
1033 .trigger = acp_dma_trigger,
1034 .pointer = acp_dma_pointer,
1035 .mmap = acp_dma_mmap,
1036 .prepare = acp_dma_prepare,
1037 };
1038
1039 static struct snd_soc_platform_driver acp_asoc_platform = {
1040 .ops = &acp_dma_ops,
1041 .pcm_new = acp_dma_new,
1042 };
1043
1044 static int acp_audio_probe(struct platform_device *pdev)
1045 {
1046 int status;
1047 struct audio_drv_data *audio_drv_data;
1048 struct resource *res;
1049 const u32 *pdata = pdev->dev.platform_data;
1050
1051 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
1052 GFP_KERNEL);
1053 if (audio_drv_data == NULL)
1054 return -ENOMEM;
1055
1056 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1057 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
1058
1059 /* The following members gets populated in device 'open'
1060 * function. Till then interrupts are disabled in 'acp_init'
1061 * and device doesn't generate any interrupts.
1062 */
1063
1064 audio_drv_data->play_stream = NULL;
1065 audio_drv_data->capture_stream = NULL;
1066 audio_drv_data->asic_type = *pdata;
1067
1068 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1069 if (!res) {
1070 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
1071 return -ENODEV;
1072 }
1073
1074 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
1075 0, "ACP_IRQ", &pdev->dev);
1076 if (status) {
1077 dev_err(&pdev->dev, "ACP IRQ request failed\n");
1078 return status;
1079 }
1080
1081 dev_set_drvdata(&pdev->dev, audio_drv_data);
1082
1083 /* Initialize the ACP */
1084 acp_init(audio_drv_data->acp_mmio, audio_drv_data->asic_type);
1085
1086 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform);
1087 if (status != 0) {
1088 dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
1089 return status;
1090 }
1091
1092 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000);
1093 pm_runtime_use_autosuspend(&pdev->dev);
1094 pm_runtime_enable(&pdev->dev);
1095
1096 return status;
1097 }
1098
1099 static int acp_audio_remove(struct platform_device *pdev)
1100 {
1101 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
1102
1103 acp_deinit(adata->acp_mmio);
1104 snd_soc_unregister_platform(&pdev->dev);
1105 pm_runtime_disable(&pdev->dev);
1106
1107 return 0;
1108 }
1109
1110 static int acp_pcm_resume(struct device *dev)
1111 {
1112 u16 bank;
1113 struct audio_drv_data *adata = dev_get_drvdata(dev);
1114
1115 acp_init(adata->acp_mmio, adata->asic_type);
1116
1117 if (adata->play_stream && adata->play_stream->runtime) {
1118 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1119 * won't be turned off. The default state for SRAM banks is ON.
1120 * Setting SRAM bank state code skipped for STONEY platform.
1121 */
1122 if (adata->asic_type != CHIP_STONEY) {
1123 for (bank = 1; bank <= 4; bank++)
1124 acp_set_sram_bank_state(adata->acp_mmio, bank,
1125 true);
1126 }
1127 config_acp_dma(adata->acp_mmio,
1128 adata->play_stream->runtime->private_data,
1129 adata->asic_type);
1130 }
1131 if (adata->capture_stream && adata->capture_stream->runtime) {
1132 if (adata->asic_type != CHIP_STONEY) {
1133 for (bank = 5; bank <= 8; bank++)
1134 acp_set_sram_bank_state(adata->acp_mmio, bank,
1135 true);
1136 }
1137 config_acp_dma(adata->acp_mmio,
1138 adata->capture_stream->runtime->private_data,
1139 adata->asic_type);
1140 }
1141 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1142 return 0;
1143 }
1144
1145 static int acp_pcm_runtime_suspend(struct device *dev)
1146 {
1147 struct audio_drv_data *adata = dev_get_drvdata(dev);
1148
1149 acp_deinit(adata->acp_mmio);
1150 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1151 return 0;
1152 }
1153
1154 static int acp_pcm_runtime_resume(struct device *dev)
1155 {
1156 struct audio_drv_data *adata = dev_get_drvdata(dev);
1157
1158 acp_init(adata->acp_mmio, adata->asic_type);
1159 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1160 return 0;
1161 }
1162
1163 static const struct dev_pm_ops acp_pm_ops = {
1164 .resume = acp_pcm_resume,
1165 .runtime_suspend = acp_pcm_runtime_suspend,
1166 .runtime_resume = acp_pcm_runtime_resume,
1167 };
1168
1169 static struct platform_driver acp_dma_driver = {
1170 .probe = acp_audio_probe,
1171 .remove = acp_audio_remove,
1172 .driver = {
1173 .name = "acp_audio_dma",
1174 .pm = &acp_pm_ops,
1175 },
1176 };
1177
1178 module_platform_driver(acp_dma_driver);
1179
1180 MODULE_AUTHOR("Vijendar.Mukunda@amd.com");
1181 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1182 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1183 MODULE_LICENSE("GPL v2");
1184 MODULE_ALIAS("platform:acp-dma-audio");