]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - sound/soc/amd/acp-pcm-dma.c
Merge remote-tracking branches 'asoc/topic/tas6424', 'asoc/topic/tfa9879', 'asoc...
[mirror_ubuntu-focal-kernel.git] / sound / soc / amd / acp-pcm-dma.c
1 /*
2 * AMD ALSA SoC PCM Driver for ACP 2.x
3 *
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 */
15
16 #include <linux/module.h>
17 #include <linux/delay.h>
18 #include <linux/io.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
21
22 #include <sound/soc.h>
23 #include <drm/amd_asic_type.h>
24 #include "acp.h"
25
26 #define PLAYBACK_MIN_NUM_PERIODS 2
27 #define PLAYBACK_MAX_NUM_PERIODS 2
28 #define PLAYBACK_MAX_PERIOD_SIZE 16384
29 #define PLAYBACK_MIN_PERIOD_SIZE 1024
30 #define CAPTURE_MIN_NUM_PERIODS 2
31 #define CAPTURE_MAX_NUM_PERIODS 2
32 #define CAPTURE_MAX_PERIOD_SIZE 16384
33 #define CAPTURE_MIN_PERIOD_SIZE 1024
34
35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
36 #define MIN_BUFFER MAX_BUFFER
37
38 #define ST_PLAYBACK_MAX_PERIOD_SIZE 8192
39 #define ST_CAPTURE_MAX_PERIOD_SIZE ST_PLAYBACK_MAX_PERIOD_SIZE
40 #define ST_MAX_BUFFER (ST_PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
41 #define ST_MIN_BUFFER ST_MAX_BUFFER
42
43 #define DRV_NAME "acp_audio_dma"
44
45 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
46 .info = SNDRV_PCM_INFO_INTERLEAVED |
47 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
48 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
49 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
50 .formats = SNDRV_PCM_FMTBIT_S16_LE |
51 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
52 .channels_min = 1,
53 .channels_max = 8,
54 .rates = SNDRV_PCM_RATE_8000_96000,
55 .rate_min = 8000,
56 .rate_max = 96000,
57 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
58 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
59 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
60 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
61 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
62 };
63
64 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
65 .info = SNDRV_PCM_INFO_INTERLEAVED |
66 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
67 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
68 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
69 .formats = SNDRV_PCM_FMTBIT_S16_LE |
70 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
71 .channels_min = 1,
72 .channels_max = 2,
73 .rates = SNDRV_PCM_RATE_8000_48000,
74 .rate_min = 8000,
75 .rate_max = 48000,
76 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
77 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
78 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
79 .periods_min = CAPTURE_MIN_NUM_PERIODS,
80 .periods_max = CAPTURE_MAX_NUM_PERIODS,
81 };
82
83 static const struct snd_pcm_hardware acp_st_pcm_hardware_playback = {
84 .info = SNDRV_PCM_INFO_INTERLEAVED |
85 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
86 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
87 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
88 .formats = SNDRV_PCM_FMTBIT_S16_LE |
89 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
90 .channels_min = 1,
91 .channels_max = 8,
92 .rates = SNDRV_PCM_RATE_8000_96000,
93 .rate_min = 8000,
94 .rate_max = 96000,
95 .buffer_bytes_max = ST_MAX_BUFFER,
96 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
97 .period_bytes_max = ST_PLAYBACK_MAX_PERIOD_SIZE,
98 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
99 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
100 };
101
102 static const struct snd_pcm_hardware acp_st_pcm_hardware_capture = {
103 .info = SNDRV_PCM_INFO_INTERLEAVED |
104 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
105 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
106 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
107 .formats = SNDRV_PCM_FMTBIT_S16_LE |
108 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
109 .channels_min = 1,
110 .channels_max = 2,
111 .rates = SNDRV_PCM_RATE_8000_48000,
112 .rate_min = 8000,
113 .rate_max = 48000,
114 .buffer_bytes_max = ST_MAX_BUFFER,
115 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
116 .period_bytes_max = ST_CAPTURE_MAX_PERIOD_SIZE,
117 .periods_min = CAPTURE_MIN_NUM_PERIODS,
118 .periods_max = CAPTURE_MAX_NUM_PERIODS,
119 };
120
121 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
122 {
123 return readl(acp_mmio + (reg * 4));
124 }
125
126 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
127 {
128 writel(val, acp_mmio + (reg * 4));
129 }
130
131 /* Configure a given dma channel parameters - enable/disable,
132 * number of descriptors, priority
133 */
134 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
135 u16 dscr_strt_idx, u16 num_dscrs,
136 enum acp_dma_priority_level priority_level)
137 {
138 u32 dma_ctrl;
139
140 /* disable the channel run field */
141 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
142 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
143 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
144
145 /* program a DMA channel with first descriptor to be processed. */
146 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
147 & dscr_strt_idx),
148 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
149
150 /* program a DMA channel with the number of descriptors to be
151 * processed in the transfer
152 */
153 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
154 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
155
156 /* set DMA channel priority */
157 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
158 }
159
160 /* Initialize a dma descriptor in SRAM based on descritor information passed */
161 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
162 u16 descr_idx,
163 acp_dma_dscr_transfer_t *descr_info)
164 {
165 u32 sram_offset;
166
167 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
168
169 /* program the source base address. */
170 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
171 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
172 /* program the destination base address. */
173 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
174 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
175
176 /* program the number of bytes to be transferred for this descriptor. */
177 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
178 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
179 }
180
181 /* Initialize the DMA descriptor information for transfer between
182 * system memory <-> ACP SRAM
183 */
184 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
185 u32 size, int direction,
186 u32 pte_offset, u32 asic_type)
187 {
188 u16 i;
189 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
190 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
191
192 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
193 dmadscr[i].xfer_val = 0;
194 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
195 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i;
196 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS
197 + (i * (size/2));
198 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
199 + (pte_offset * SZ_4K) + (i * (size/2));
200 switch (asic_type) {
201 case CHIP_STONEY:
202 dmadscr[i].xfer_val |=
203 (ACP_DMA_ATTRIBUTES_DAGB_GARLIC_TO_SHAREDMEM << 16) |
204 (size / 2);
205 break;
206 default:
207 dmadscr[i].xfer_val |=
208 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) |
209 (size / 2);
210 }
211 } else {
212 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i;
213 switch (asic_type) {
214 case CHIP_STONEY:
215 dmadscr[i].src = ACP_SHARED_RAM_BANK_3_ADDRESS +
216 (i * (size/2));
217 dmadscr[i].dest =
218 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
219 (pte_offset * SZ_4K) + (i * (size/2));
220 dmadscr[i].xfer_val |=
221 BIT(22) |
222 (ACP_DMA_ATTRIBUTES_SHARED_MEM_TO_DAGB_GARLIC << 16) |
223 (size / 2);
224 break;
225 default:
226 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS +
227 (i * (size/2));
228 dmadscr[i].dest =
229 ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS +
230 (pte_offset * SZ_4K) + (i * (size/2));
231 dmadscr[i].xfer_val |=
232 BIT(22) |
233 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) |
234 (size / 2);
235 }
236 }
237 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
238 &dmadscr[i]);
239 }
240 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
241 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM,
242 PLAYBACK_START_DMA_DESCR_CH12,
243 NUM_DSCRS_PER_CHANNEL,
244 ACP_DMA_PRIORITY_LEVEL_NORMAL);
245 else
246 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM,
247 CAPTURE_START_DMA_DESCR_CH14,
248 NUM_DSCRS_PER_CHANNEL,
249 ACP_DMA_PRIORITY_LEVEL_NORMAL);
250 }
251
252 /* Initialize the DMA descriptor information for transfer between
253 * ACP SRAM <-> I2S
254 */
255 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio,
256 u32 size, int direction,
257 u32 asic_type)
258 {
259
260 u16 i;
261 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13;
262 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
263
264 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
265 dmadscr[i].xfer_val = 0;
266 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
267 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i;
268 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS +
269 (i * (size/2));
270 /* dmadscr[i].dest is unused by hardware. */
271 dmadscr[i].dest = 0;
272 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) |
273 (size / 2);
274 } else {
275 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i;
276 /* dmadscr[i].src is unused by hardware. */
277 dmadscr[i].src = 0;
278 switch (asic_type) {
279 case CHIP_STONEY:
280 dmadscr[i].dest =
281 ACP_SHARED_RAM_BANK_3_ADDRESS +
282 (i * (size / 2));
283 break;
284 default:
285 dmadscr[i].dest =
286 ACP_SHARED_RAM_BANK_5_ADDRESS +
287 (i * (size / 2));
288 }
289 dmadscr[i].xfer_val |= BIT(22) |
290 (FROM_ACP_I2S_1 << 16) | (size / 2);
291 }
292 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
293 &dmadscr[i]);
294 }
295 /* Configure the DMA channel with the above descriptore */
296 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
297 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
298 PLAYBACK_START_DMA_DESCR_CH13,
299 NUM_DSCRS_PER_CHANNEL,
300 ACP_DMA_PRIORITY_LEVEL_NORMAL);
301 else
302 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
303 CAPTURE_START_DMA_DESCR_CH15,
304 NUM_DSCRS_PER_CHANNEL,
305 ACP_DMA_PRIORITY_LEVEL_NORMAL);
306 }
307
308 /* Create page table entries in ACP SRAM for the allocated memory */
309 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
310 u16 num_of_pages, u32 pte_offset)
311 {
312 u16 page_idx;
313 u64 addr;
314 u32 low;
315 u32 high;
316 u32 offset;
317
318 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
319 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
320 /* Load the low address of page int ACP SRAM through SRBM */
321 acp_reg_write((offset + (page_idx * 8)),
322 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
323 addr = page_to_phys(pg);
324
325 low = lower_32_bits(addr);
326 high = upper_32_bits(addr);
327
328 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
329
330 /* Load the High address of page int ACP SRAM through SRBM */
331 acp_reg_write((offset + (page_idx * 8) + 4),
332 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
333
334 /* page enable in ACP */
335 high |= BIT(31);
336 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
337
338 /* Move to next physically contiguos page */
339 pg++;
340 }
341 }
342
343 static void config_acp_dma(void __iomem *acp_mmio,
344 struct audio_substream_data *audio_config,
345 u32 asic_type)
346 {
347 u32 pte_offset;
348
349 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK)
350 pte_offset = ACP_PLAYBACK_PTE_OFFSET;
351 else
352 pte_offset = ACP_CAPTURE_PTE_OFFSET;
353
354 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages,
355 pte_offset);
356
357 /* Configure System memory <-> ACP SRAM DMA descriptors */
358 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size,
359 audio_config->direction, pte_offset, asic_type);
360
361 /* Configure ACP SRAM <-> I2S DMA descriptors */
362 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size,
363 audio_config->direction, asic_type);
364 }
365
366 /* Start a given DMA channel transfer */
367 static void acp_dma_start(void __iomem *acp_mmio,
368 u16 ch_num, bool is_circular)
369 {
370 u32 dma_ctrl;
371
372 /* read the dma control register and disable the channel run field */
373 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
374
375 /* Invalidating the DAGB cache */
376 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
377
378 /* configure the DMA channel and start the DMA transfer
379 * set dmachrun bit to start the transfer and enable the
380 * interrupt on completion of the dma transfer
381 */
382 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
383
384 switch (ch_num) {
385 case ACP_TO_I2S_DMA_CH_NUM:
386 case ACP_TO_SYSRAM_CH_NUM:
387 case I2S_TO_ACP_DMA_CH_NUM:
388 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
389 break;
390 default:
391 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
392 break;
393 }
394
395 /* enable for ACP SRAM to/from I2S DMA channel */
396 if (is_circular == true)
397 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
398 else
399 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
400
401 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
402 }
403
404 /* Stop a given DMA channel transfer */
405 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
406 {
407 u32 dma_ctrl;
408 u32 dma_ch_sts;
409 u32 count = ACP_DMA_RESET_TIME;
410
411 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
412
413 /* clear the dma control register fields before writing zero
414 * in reset bit
415 */
416 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
417 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
418
419 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
420 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
421
422 if (dma_ch_sts & BIT(ch_num)) {
423 /* set the reset bit for this channel to stop the dma
424 * transfer
425 */
426 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
427 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
428 }
429
430 /* check the channel status bit for some time and return the status */
431 while (true) {
432 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
433 if (!(dma_ch_sts & BIT(ch_num))) {
434 /* clear the reset flag after successfully stopping
435 * the dma transfer and break from the loop
436 */
437 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
438
439 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
440 + ch_num);
441 break;
442 }
443 if (--count == 0) {
444 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
445 return -ETIMEDOUT;
446 }
447 udelay(100);
448 }
449 return 0;
450 }
451
452 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank,
453 bool power_on)
454 {
455 u32 val, req_reg, sts_reg, sts_reg_mask;
456 u32 loops = 1000;
457
458 if (bank < 32) {
459 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO;
460 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO;
461 sts_reg_mask = 0xFFFFFFFF;
462
463 } else {
464 bank -= 32;
465 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI;
466 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI;
467 sts_reg_mask = 0x0000FFFF;
468 }
469
470 val = acp_reg_read(acp_mmio, req_reg);
471 if (val & (1 << bank)) {
472 /* bank is in off state */
473 if (power_on == true)
474 /* request to on */
475 val &= ~(1 << bank);
476 else
477 /* request to off */
478 return;
479 } else {
480 /* bank is in on state */
481 if (power_on == false)
482 /* request to off */
483 val |= 1 << bank;
484 else
485 /* request to on */
486 return;
487 }
488 acp_reg_write(val, acp_mmio, req_reg);
489
490 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) {
491 if (!loops--) {
492 pr_err("ACP SRAM bank %d state change failed\n", bank);
493 break;
494 }
495 cpu_relax();
496 }
497 }
498
499 /* Initialize and bring ACP hardware to default state. */
500 static int acp_init(void __iomem *acp_mmio, u32 asic_type)
501 {
502 u16 bank;
503 u32 val, count, sram_pte_offset;
504
505 /* Assert Soft reset of ACP */
506 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
507
508 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
509 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
510
511 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
512 while (true) {
513 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
514 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
515 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
516 break;
517 if (--count == 0) {
518 pr_err("Failed to reset ACP\n");
519 return -ETIMEDOUT;
520 }
521 udelay(100);
522 }
523
524 /* Enable clock to ACP and wait until the clock is enabled */
525 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
526 val = val | ACP_CONTROL__ClkEn_MASK;
527 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
528
529 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
530
531 while (true) {
532 val = acp_reg_read(acp_mmio, mmACP_STATUS);
533 if (val & (u32) 0x1)
534 break;
535 if (--count == 0) {
536 pr_err("Failed to reset ACP\n");
537 return -ETIMEDOUT;
538 }
539 udelay(100);
540 }
541
542 /* Deassert the SOFT RESET flags */
543 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
544 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
545 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
546
547 /* initiailize Onion control DAGB register */
548 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
549 mmACP_AXI2DAGB_ONION_CNTL);
550
551 /* initiailize Garlic control DAGB registers */
552 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
553 mmACP_AXI2DAGB_GARLIC_CNTL);
554
555 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
556 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
557 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
558 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
559 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
560 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
561 mmACP_DAGB_PAGE_SIZE_GRP_1);
562
563 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
564 mmACP_DMA_DESC_BASE_ADDR);
565
566 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
567 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
568 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
569 acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
570
571 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
572 * Now, turn off all of them. This can't be done in 'poweron' of
573 * ACP pm domain, as this requires ACP to be initialized.
574 * For Stoney, Memory gating is disabled,i.e SRAM Banks
575 * won't be turned off. The default state for SRAM banks is ON.
576 * Setting SRAM bank state code skipped for STONEY platform.
577 */
578 if (asic_type != CHIP_STONEY) {
579 for (bank = 1; bank < 48; bank++)
580 acp_set_sram_bank_state(acp_mmio, bank, false);
581 }
582
583 /* Stoney supports 16bit resolution */
584 if (asic_type == CHIP_STONEY) {
585 val = acp_reg_read(acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
586 val |= 0x03;
587 acp_reg_write(val, acp_mmio, mmACP_I2S_16BIT_RESOLUTION_EN);
588 }
589 return 0;
590 }
591
592 /* Deinitialize ACP */
593 static int acp_deinit(void __iomem *acp_mmio)
594 {
595 u32 val;
596 u32 count;
597
598 /* Assert Soft reset of ACP */
599 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
600
601 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
602 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
603
604 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
605 while (true) {
606 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
607 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
608 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
609 break;
610 if (--count == 0) {
611 pr_err("Failed to reset ACP\n");
612 return -ETIMEDOUT;
613 }
614 udelay(100);
615 }
616 /** Disable ACP clock */
617 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
618 val &= ~ACP_CONTROL__ClkEn_MASK;
619 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
620
621 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
622
623 while (true) {
624 val = acp_reg_read(acp_mmio, mmACP_STATUS);
625 if (!(val & (u32) 0x1))
626 break;
627 if (--count == 0) {
628 pr_err("Failed to reset ACP\n");
629 return -ETIMEDOUT;
630 }
631 udelay(100);
632 }
633 return 0;
634 }
635
636 /* ACP DMA irq handler routine for playback, capture usecases */
637 static irqreturn_t dma_irq_handler(int irq, void *arg)
638 {
639 u16 dscr_idx;
640 u32 intr_flag, ext_intr_status;
641 struct audio_drv_data *irq_data;
642 void __iomem *acp_mmio;
643 struct device *dev = arg;
644 bool valid_irq = false;
645
646 irq_data = dev_get_drvdata(dev);
647 acp_mmio = irq_data->acp_mmio;
648
649 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
650 intr_flag = (((ext_intr_status &
651 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
652 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
653
654 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
655 valid_irq = true;
656 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
657 PLAYBACK_START_DMA_DESCR_CH13)
658 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
659 else
660 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
661 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
662 1, 0);
663 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
664
665 snd_pcm_period_elapsed(irq_data->play_stream);
666
667 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
668 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
669 }
670
671 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
672 valid_irq = true;
673 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
674 CAPTURE_START_DMA_DESCR_CH15)
675 dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
676 else
677 dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
678 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
679 1, 0);
680 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
681
682 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
683 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
684 }
685
686 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
687 valid_irq = true;
688 snd_pcm_period_elapsed(irq_data->capture_stream);
689 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
690 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
691 }
692
693 if (valid_irq)
694 return IRQ_HANDLED;
695 else
696 return IRQ_NONE;
697 }
698
699 static int acp_dma_open(struct snd_pcm_substream *substream)
700 {
701 u16 bank;
702 int ret = 0;
703 struct snd_pcm_runtime *runtime = substream->runtime;
704 struct snd_soc_pcm_runtime *prtd = substream->private_data;
705 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev);
706
707 struct audio_substream_data *adata =
708 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
709 if (adata == NULL)
710 return -ENOMEM;
711
712 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
713 switch (intr_data->asic_type) {
714 case CHIP_STONEY:
715 runtime->hw = acp_st_pcm_hardware_playback;
716 break;
717 default:
718 runtime->hw = acp_pcm_hardware_playback;
719 }
720 } else {
721 switch (intr_data->asic_type) {
722 case CHIP_STONEY:
723 runtime->hw = acp_st_pcm_hardware_capture;
724 break;
725 default:
726 runtime->hw = acp_pcm_hardware_capture;
727 }
728 }
729
730 ret = snd_pcm_hw_constraint_integer(runtime,
731 SNDRV_PCM_HW_PARAM_PERIODS);
732 if (ret < 0) {
733 dev_err(prtd->platform->dev, "set integer constraint failed\n");
734 kfree(adata);
735 return ret;
736 }
737
738 adata->acp_mmio = intr_data->acp_mmio;
739 runtime->private_data = adata;
740
741 /* Enable ACP irq, when neither playback or capture streams are
742 * active by the time when a new stream is being opened.
743 * This enablement is not required for another stream, if current
744 * stream is not closed
745 */
746 if (!intr_data->play_stream && !intr_data->capture_stream)
747 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
748
749 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
750 intr_data->play_stream = substream;
751 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
752 * won't be turned off. The default state for SRAM banks is ON.
753 * Setting SRAM bank state code skipped for STONEY platform.
754 */
755 if (intr_data->asic_type != CHIP_STONEY) {
756 for (bank = 1; bank <= 4; bank++)
757 acp_set_sram_bank_state(intr_data->acp_mmio,
758 bank, true);
759 }
760 } else {
761 intr_data->capture_stream = substream;
762 if (intr_data->asic_type != CHIP_STONEY) {
763 for (bank = 5; bank <= 8; bank++)
764 acp_set_sram_bank_state(intr_data->acp_mmio,
765 bank, true);
766 }
767 }
768
769 return 0;
770 }
771
772 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
773 struct snd_pcm_hw_params *params)
774 {
775 int status;
776 uint64_t size;
777 struct page *pg;
778 struct snd_pcm_runtime *runtime;
779 struct audio_substream_data *rtd;
780 struct snd_soc_pcm_runtime *prtd = substream->private_data;
781 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
782
783 runtime = substream->runtime;
784 rtd = runtime->private_data;
785
786 if (WARN_ON(!rtd))
787 return -EINVAL;
788
789 size = params_buffer_bytes(params);
790 status = snd_pcm_lib_malloc_pages(substream, size);
791 if (status < 0)
792 return status;
793
794 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
795 pg = virt_to_page(substream->dma_buffer.area);
796
797 if (pg != NULL) {
798 acp_set_sram_bank_state(rtd->acp_mmio, 0, true);
799 /* Save for runtime private data */
800 rtd->pg = pg;
801 rtd->order = get_order(size);
802
803 /* Fill the page table entries in ACP SRAM */
804 rtd->pg = pg;
805 rtd->size = size;
806 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
807 rtd->direction = substream->stream;
808
809 config_acp_dma(rtd->acp_mmio, rtd, adata->asic_type);
810 status = 0;
811 } else {
812 status = -ENOMEM;
813 }
814 return status;
815 }
816
817 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
818 {
819 return snd_pcm_lib_free_pages(substream);
820 }
821
822 static u64 acp_get_byte_count(void __iomem *acp_mmio, int stream)
823 {
824 union acp_dma_count playback_dma_count;
825 union acp_dma_count capture_dma_count;
826 u64 bytescount = 0;
827
828 if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
829 playback_dma_count.bcount.high = acp_reg_read(acp_mmio,
830 mmACP_I2S_TRANSMIT_BYTE_CNT_HIGH);
831 playback_dma_count.bcount.low = acp_reg_read(acp_mmio,
832 mmACP_I2S_TRANSMIT_BYTE_CNT_LOW);
833 bytescount = playback_dma_count.bytescount;
834 } else {
835 capture_dma_count.bcount.high = acp_reg_read(acp_mmio,
836 mmACP_I2S_RECEIVED_BYTE_CNT_HIGH);
837 capture_dma_count.bcount.low = acp_reg_read(acp_mmio,
838 mmACP_I2S_RECEIVED_BYTE_CNT_LOW);
839 bytescount = capture_dma_count.bytescount;
840 }
841 return bytescount;
842 }
843
844 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
845 {
846 u32 buffersize;
847 u32 pos = 0;
848 u64 bytescount = 0;
849
850 struct snd_pcm_runtime *runtime = substream->runtime;
851 struct audio_substream_data *rtd = runtime->private_data;
852
853 if (!rtd)
854 return -EINVAL;
855
856 buffersize = frames_to_bytes(runtime, runtime->buffer_size);
857 bytescount = acp_get_byte_count(rtd->acp_mmio, substream->stream);
858
859 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
860 if (bytescount > rtd->renderbytescount)
861 bytescount = bytescount - rtd->renderbytescount;
862 } else {
863 if (bytescount > rtd->capturebytescount)
864 bytescount = bytescount - rtd->capturebytescount;
865 }
866 pos = do_div(bytescount, buffersize);
867 return bytes_to_frames(runtime, pos);
868 }
869
870 static int acp_dma_mmap(struct snd_pcm_substream *substream,
871 struct vm_area_struct *vma)
872 {
873 return snd_pcm_lib_default_mmap(substream, vma);
874 }
875
876 static int acp_dma_prepare(struct snd_pcm_substream *substream)
877 {
878 struct snd_pcm_runtime *runtime = substream->runtime;
879 struct audio_substream_data *rtd = runtime->private_data;
880
881 if (!rtd)
882 return -EINVAL;
883 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
884 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
885 PLAYBACK_START_DMA_DESCR_CH12,
886 NUM_DSCRS_PER_CHANNEL, 0);
887 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
888 PLAYBACK_START_DMA_DESCR_CH13,
889 NUM_DSCRS_PER_CHANNEL, 0);
890 } else {
891 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM,
892 CAPTURE_START_DMA_DESCR_CH14,
893 NUM_DSCRS_PER_CHANNEL, 0);
894 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
895 CAPTURE_START_DMA_DESCR_CH15,
896 NUM_DSCRS_PER_CHANNEL, 0);
897 }
898 return 0;
899 }
900
901 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
902 {
903 int ret;
904 u32 loops = 4000;
905 u64 bytescount = 0;
906
907 struct snd_pcm_runtime *runtime = substream->runtime;
908 struct snd_soc_pcm_runtime *prtd = substream->private_data;
909 struct audio_substream_data *rtd = runtime->private_data;
910
911 if (!rtd)
912 return -EINVAL;
913 switch (cmd) {
914 case SNDRV_PCM_TRIGGER_START:
915 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
916 case SNDRV_PCM_TRIGGER_RESUME:
917 bytescount = acp_get_byte_count(rtd->acp_mmio,
918 substream->stream);
919 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
920 if (rtd->renderbytescount == 0)
921 rtd->renderbytescount = bytescount;
922 acp_dma_start(rtd->acp_mmio,
923 SYSRAM_TO_ACP_CH_NUM, false);
924 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
925 BIT(SYSRAM_TO_ACP_CH_NUM)) {
926 if (!loops--) {
927 dev_err(prtd->platform->dev,
928 "acp dma start timeout\n");
929 return -ETIMEDOUT;
930 }
931 cpu_relax();
932 }
933
934 acp_dma_start(rtd->acp_mmio,
935 ACP_TO_I2S_DMA_CH_NUM, true);
936
937 } else {
938 if (rtd->capturebytescount == 0)
939 rtd->capturebytescount = bytescount;
940 acp_dma_start(rtd->acp_mmio,
941 I2S_TO_ACP_DMA_CH_NUM, true);
942 }
943 ret = 0;
944 break;
945 case SNDRV_PCM_TRIGGER_STOP:
946 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
947 case SNDRV_PCM_TRIGGER_SUSPEND:
948 /* Need to stop only circular DMA channels :
949 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
950 * channels will stopped automatically after its transfer
951 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
952 */
953 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
954 ret = acp_dma_stop(rtd->acp_mmio,
955 ACP_TO_I2S_DMA_CH_NUM);
956 rtd->renderbytescount = 0;
957 } else {
958 ret = acp_dma_stop(rtd->acp_mmio,
959 I2S_TO_ACP_DMA_CH_NUM);
960 rtd->capturebytescount = 0;
961 }
962 break;
963 default:
964 ret = -EINVAL;
965
966 }
967 return ret;
968 }
969
970 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
971 {
972 int ret;
973 struct audio_drv_data *adata = dev_get_drvdata(rtd->platform->dev);
974
975 switch (adata->asic_type) {
976 case CHIP_STONEY:
977 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
978 SNDRV_DMA_TYPE_DEV,
979 NULL, ST_MIN_BUFFER,
980 ST_MAX_BUFFER);
981 break;
982 default:
983 ret = snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
984 SNDRV_DMA_TYPE_DEV,
985 NULL, MIN_BUFFER,
986 MAX_BUFFER);
987 break;
988 }
989 if (ret < 0)
990 dev_err(rtd->platform->dev,
991 "buffer preallocation failer error:%d\n", ret);
992 return ret;
993 }
994
995 static int acp_dma_close(struct snd_pcm_substream *substream)
996 {
997 u16 bank;
998 struct snd_pcm_runtime *runtime = substream->runtime;
999 struct audio_substream_data *rtd = runtime->private_data;
1000 struct snd_soc_pcm_runtime *prtd = substream->private_data;
1001 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
1002
1003 kfree(rtd);
1004
1005 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1006 adata->play_stream = NULL;
1007 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1008 * won't be turned off. The default state for SRAM banks is ON.
1009 * Setting SRAM bank state code skipped for STONEY platform.
1010 * added condition checks for Carrizo platform only
1011 */
1012 if (adata->asic_type != CHIP_STONEY) {
1013 for (bank = 1; bank <= 4; bank++)
1014 acp_set_sram_bank_state(adata->acp_mmio, bank,
1015 false);
1016 }
1017 } else {
1018 adata->capture_stream = NULL;
1019 if (adata->asic_type != CHIP_STONEY) {
1020 for (bank = 5; bank <= 8; bank++)
1021 acp_set_sram_bank_state(adata->acp_mmio, bank,
1022 false);
1023 }
1024 }
1025
1026 /* Disable ACP irq, when the current stream is being closed and
1027 * another stream is also not active.
1028 */
1029 if (!adata->play_stream && !adata->capture_stream)
1030 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1031
1032 return 0;
1033 }
1034
1035 static const struct snd_pcm_ops acp_dma_ops = {
1036 .open = acp_dma_open,
1037 .close = acp_dma_close,
1038 .ioctl = snd_pcm_lib_ioctl,
1039 .hw_params = acp_dma_hw_params,
1040 .hw_free = acp_dma_hw_free,
1041 .trigger = acp_dma_trigger,
1042 .pointer = acp_dma_pointer,
1043 .mmap = acp_dma_mmap,
1044 .prepare = acp_dma_prepare,
1045 };
1046
1047 static struct snd_soc_platform_driver acp_asoc_platform = {
1048 .ops = &acp_dma_ops,
1049 .pcm_new = acp_dma_new,
1050 };
1051
1052 static int acp_audio_probe(struct platform_device *pdev)
1053 {
1054 int status;
1055 struct audio_drv_data *audio_drv_data;
1056 struct resource *res;
1057 const u32 *pdata = pdev->dev.platform_data;
1058
1059 if (!pdata) {
1060 dev_err(&pdev->dev, "Missing platform data\n");
1061 return -ENODEV;
1062 }
1063
1064 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
1065 GFP_KERNEL);
1066 if (audio_drv_data == NULL)
1067 return -ENOMEM;
1068
1069 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1070 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
1071 if (IS_ERR(audio_drv_data->acp_mmio))
1072 return PTR_ERR(audio_drv_data->acp_mmio);
1073
1074 /* The following members gets populated in device 'open'
1075 * function. Till then interrupts are disabled in 'acp_init'
1076 * and device doesn't generate any interrupts.
1077 */
1078
1079 audio_drv_data->play_stream = NULL;
1080 audio_drv_data->capture_stream = NULL;
1081 audio_drv_data->asic_type = *pdata;
1082
1083 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1084 if (!res) {
1085 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
1086 return -ENODEV;
1087 }
1088
1089 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
1090 0, "ACP_IRQ", &pdev->dev);
1091 if (status) {
1092 dev_err(&pdev->dev, "ACP IRQ request failed\n");
1093 return status;
1094 }
1095
1096 dev_set_drvdata(&pdev->dev, audio_drv_data);
1097
1098 /* Initialize the ACP */
1099 status = acp_init(audio_drv_data->acp_mmio, audio_drv_data->asic_type);
1100 if (status) {
1101 dev_err(&pdev->dev, "ACP Init failed status:%d\n", status);
1102 return status;
1103 }
1104
1105 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform);
1106 if (status != 0) {
1107 dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
1108 return status;
1109 }
1110
1111 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000);
1112 pm_runtime_use_autosuspend(&pdev->dev);
1113 pm_runtime_enable(&pdev->dev);
1114
1115 return status;
1116 }
1117
1118 static int acp_audio_remove(struct platform_device *pdev)
1119 {
1120 int status;
1121 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
1122
1123 status = acp_deinit(adata->acp_mmio);
1124 if (status)
1125 dev_err(&pdev->dev, "ACP Deinit failed status:%d\n", status);
1126 snd_soc_unregister_platform(&pdev->dev);
1127 pm_runtime_disable(&pdev->dev);
1128
1129 return 0;
1130 }
1131
1132 static int acp_pcm_resume(struct device *dev)
1133 {
1134 u16 bank;
1135 int status;
1136 struct audio_drv_data *adata = dev_get_drvdata(dev);
1137
1138 status = acp_init(adata->acp_mmio, adata->asic_type);
1139 if (status) {
1140 dev_err(dev, "ACP Init failed status:%d\n", status);
1141 return status;
1142 }
1143
1144 if (adata->play_stream && adata->play_stream->runtime) {
1145 /* For Stoney, Memory gating is disabled,i.e SRAM Banks
1146 * won't be turned off. The default state for SRAM banks is ON.
1147 * Setting SRAM bank state code skipped for STONEY platform.
1148 */
1149 if (adata->asic_type != CHIP_STONEY) {
1150 for (bank = 1; bank <= 4; bank++)
1151 acp_set_sram_bank_state(adata->acp_mmio, bank,
1152 true);
1153 }
1154 config_acp_dma(adata->acp_mmio,
1155 adata->play_stream->runtime->private_data,
1156 adata->asic_type);
1157 }
1158 if (adata->capture_stream && adata->capture_stream->runtime) {
1159 if (adata->asic_type != CHIP_STONEY) {
1160 for (bank = 5; bank <= 8; bank++)
1161 acp_set_sram_bank_state(adata->acp_mmio, bank,
1162 true);
1163 }
1164 config_acp_dma(adata->acp_mmio,
1165 adata->capture_stream->runtime->private_data,
1166 adata->asic_type);
1167 }
1168 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1169 return 0;
1170 }
1171
1172 static int acp_pcm_runtime_suspend(struct device *dev)
1173 {
1174 int status;
1175 struct audio_drv_data *adata = dev_get_drvdata(dev);
1176
1177 status = acp_deinit(adata->acp_mmio);
1178 if (status)
1179 dev_err(dev, "ACP Deinit failed status:%d\n", status);
1180 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1181 return 0;
1182 }
1183
1184 static int acp_pcm_runtime_resume(struct device *dev)
1185 {
1186 int status;
1187 struct audio_drv_data *adata = dev_get_drvdata(dev);
1188
1189 status = acp_init(adata->acp_mmio, adata->asic_type);
1190 if (status) {
1191 dev_err(dev, "ACP Init failed status:%d\n", status);
1192 return status;
1193 }
1194 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1195 return 0;
1196 }
1197
1198 static const struct dev_pm_ops acp_pm_ops = {
1199 .resume = acp_pcm_resume,
1200 .runtime_suspend = acp_pcm_runtime_suspend,
1201 .runtime_resume = acp_pcm_runtime_resume,
1202 };
1203
1204 static struct platform_driver acp_dma_driver = {
1205 .probe = acp_audio_probe,
1206 .remove = acp_audio_remove,
1207 .driver = {
1208 .name = DRV_NAME,
1209 .pm = &acp_pm_ops,
1210 },
1211 };
1212
1213 module_platform_driver(acp_dma_driver);
1214
1215 MODULE_AUTHOR("Vijendar.Mukunda@amd.com");
1216 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1217 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1218 MODULE_LICENSE("GPL v2");
1219 MODULE_ALIAS("platform:"DRV_NAME);