]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - sound/soc/amd/acp-pcm-dma.c
Merge remote-tracking branches 'asoc/topic/sgtl5000', 'asoc/topic/simple', 'asoc...
[mirror_ubuntu-zesty-kernel.git] / sound / soc / amd / acp-pcm-dma.c
1 /*
2 * AMD ALSA SoC PCM Driver for ACP 2.x
3 *
4 * Copyright 2014-2015 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 */
15
16 #include <linux/module.h>
17 #include <linux/delay.h>
18 #include <linux/io.h>
19 #include <linux/sizes.h>
20 #include <linux/pm_runtime.h>
21
22 #include <sound/soc.h>
23
24 #include "acp.h"
25
26 #define PLAYBACK_MIN_NUM_PERIODS 2
27 #define PLAYBACK_MAX_NUM_PERIODS 2
28 #define PLAYBACK_MAX_PERIOD_SIZE 16384
29 #define PLAYBACK_MIN_PERIOD_SIZE 1024
30 #define CAPTURE_MIN_NUM_PERIODS 2
31 #define CAPTURE_MAX_NUM_PERIODS 2
32 #define CAPTURE_MAX_PERIOD_SIZE 16384
33 #define CAPTURE_MIN_PERIOD_SIZE 1024
34
35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS)
36 #define MIN_BUFFER MAX_BUFFER
37
38 static const struct snd_pcm_hardware acp_pcm_hardware_playback = {
39 .info = SNDRV_PCM_INFO_INTERLEAVED |
40 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
41 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
42 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
43 .formats = SNDRV_PCM_FMTBIT_S16_LE |
44 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
45 .channels_min = 1,
46 .channels_max = 8,
47 .rates = SNDRV_PCM_RATE_8000_96000,
48 .rate_min = 8000,
49 .rate_max = 96000,
50 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE,
51 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE,
52 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE,
53 .periods_min = PLAYBACK_MIN_NUM_PERIODS,
54 .periods_max = PLAYBACK_MAX_NUM_PERIODS,
55 };
56
57 static const struct snd_pcm_hardware acp_pcm_hardware_capture = {
58 .info = SNDRV_PCM_INFO_INTERLEAVED |
59 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP |
60 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH |
61 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME,
62 .formats = SNDRV_PCM_FMTBIT_S16_LE |
63 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE,
64 .channels_min = 1,
65 .channels_max = 2,
66 .rates = SNDRV_PCM_RATE_8000_48000,
67 .rate_min = 8000,
68 .rate_max = 48000,
69 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE,
70 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE,
71 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE,
72 .periods_min = CAPTURE_MIN_NUM_PERIODS,
73 .periods_max = CAPTURE_MAX_NUM_PERIODS,
74 };
75
76 struct audio_drv_data {
77 struct snd_pcm_substream *play_stream;
78 struct snd_pcm_substream *capture_stream;
79 void __iomem *acp_mmio;
80 };
81
82 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg)
83 {
84 return readl(acp_mmio + (reg * 4));
85 }
86
87 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg)
88 {
89 writel(val, acp_mmio + (reg * 4));
90 }
91
92 /* Configure a given dma channel parameters - enable/disble,
93 * number of descriptors, priority
94 */
95 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num,
96 u16 dscr_strt_idx, u16 num_dscrs,
97 enum acp_dma_priority_level priority_level)
98 {
99 u32 dma_ctrl;
100
101 /* disable the channel run field */
102 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
103 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
104 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
105
106 /* program a DMA channel with first descriptor to be processed. */
107 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK
108 & dscr_strt_idx),
109 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num);
110
111 /* program a DMA channel with the number of descriptors to be
112 * processed in the transfer
113 */
114 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs,
115 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num);
116
117 /* set DMA channel priority */
118 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num);
119 }
120
121 /* Initialize a dma descriptor in SRAM based on descritor information passed */
122 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio,
123 u16 descr_idx,
124 acp_dma_dscr_transfer_t *descr_info)
125 {
126 u32 sram_offset;
127
128 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t));
129
130 /* program the source base address. */
131 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
132 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
133 /* program the destination base address. */
134 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
135 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
136
137 /* program the number of bytes to be transferred for this descriptor. */
138 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
139 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
140 }
141
142 /* Initialize the DMA descriptor information for transfer between
143 * system memory <-> ACP SRAM
144 */
145 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio,
146 u32 size, int direction,
147 u32 pte_offset)
148 {
149 u16 i;
150 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
151 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
152
153 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
154 dmadscr[i].xfer_val = 0;
155 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
156 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i;
157 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS +
158 (size / 2) - (i * (size/2));
159 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
160 + (pte_offset * SZ_4K) + (i * (size/2));
161 dmadscr[i].xfer_val |=
162 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) |
163 (size / 2);
164 } else {
165 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i;
166 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS +
167 (i * (size/2));
168 dmadscr[i].dest = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS
169 + (pte_offset * SZ_4K) +
170 (i * (size/2));
171 dmadscr[i].xfer_val |=
172 BIT(22) |
173 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) |
174 (size / 2);
175 }
176 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
177 &dmadscr[i]);
178 }
179 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
180 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM,
181 PLAYBACK_START_DMA_DESCR_CH12,
182 NUM_DSCRS_PER_CHANNEL,
183 ACP_DMA_PRIORITY_LEVEL_NORMAL);
184 else
185 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM,
186 CAPTURE_START_DMA_DESCR_CH14,
187 NUM_DSCRS_PER_CHANNEL,
188 ACP_DMA_PRIORITY_LEVEL_NORMAL);
189 }
190
191 /* Initialize the DMA descriptor information for transfer between
192 * ACP SRAM <-> I2S
193 */
194 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio,
195 u32 size, int direction)
196 {
197
198 u16 i;
199 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13;
200 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL];
201
202 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) {
203 dmadscr[i].xfer_val = 0;
204 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
205 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i;
206 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS +
207 (i * (size/2));
208 /* dmadscr[i].dest is unused by hardware. */
209 dmadscr[i].dest = 0;
210 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) |
211 (size / 2);
212 } else {
213 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i;
214 /* dmadscr[i].src is unused by hardware. */
215 dmadscr[i].src = 0;
216 dmadscr[i].dest = ACP_SHARED_RAM_BANK_5_ADDRESS +
217 (i * (size / 2));
218 dmadscr[i].xfer_val |= BIT(22) |
219 (FROM_ACP_I2S_1 << 16) | (size / 2);
220 }
221 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx,
222 &dmadscr[i]);
223 }
224 /* Configure the DMA channel with the above descriptore */
225 if (direction == SNDRV_PCM_STREAM_PLAYBACK)
226 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
227 PLAYBACK_START_DMA_DESCR_CH13,
228 NUM_DSCRS_PER_CHANNEL,
229 ACP_DMA_PRIORITY_LEVEL_NORMAL);
230 else
231 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
232 CAPTURE_START_DMA_DESCR_CH15,
233 NUM_DSCRS_PER_CHANNEL,
234 ACP_DMA_PRIORITY_LEVEL_NORMAL);
235 }
236
237 /* Create page table entries in ACP SRAM for the allocated memory */
238 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg,
239 u16 num_of_pages, u32 pte_offset)
240 {
241 u16 page_idx;
242 u64 addr;
243 u32 low;
244 u32 high;
245 u32 offset;
246
247 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8);
248 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) {
249 /* Load the low address of page int ACP SRAM through SRBM */
250 acp_reg_write((offset + (page_idx * 8)),
251 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
252 addr = page_to_phys(pg);
253
254 low = lower_32_bits(addr);
255 high = upper_32_bits(addr);
256
257 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
258
259 /* Load the High address of page int ACP SRAM through SRBM */
260 acp_reg_write((offset + (page_idx * 8) + 4),
261 acp_mmio, mmACP_SRBM_Targ_Idx_Addr);
262
263 /* page enable in ACP */
264 high |= BIT(31);
265 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data);
266
267 /* Move to next physically contiguos page */
268 pg++;
269 }
270 }
271
272 static void config_acp_dma(void __iomem *acp_mmio,
273 struct audio_substream_data *audio_config)
274 {
275 u32 pte_offset;
276
277 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK)
278 pte_offset = ACP_PLAYBACK_PTE_OFFSET;
279 else
280 pte_offset = ACP_CAPTURE_PTE_OFFSET;
281
282 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages,
283 pte_offset);
284
285 /* Configure System memory <-> ACP SRAM DMA descriptors */
286 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size,
287 audio_config->direction, pte_offset);
288
289 /* Configure ACP SRAM <-> I2S DMA descriptors */
290 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size,
291 audio_config->direction);
292 }
293
294 /* Start a given DMA channel transfer */
295 static void acp_dma_start(void __iomem *acp_mmio,
296 u16 ch_num, bool is_circular)
297 {
298 u32 dma_ctrl;
299
300 /* read the dma control register and disable the channel run field */
301 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
302
303 /* Invalidating the DAGB cache */
304 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL);
305
306 /* configure the DMA channel and start the DMA transfer
307 * set dmachrun bit to start the transfer and enable the
308 * interrupt on completion of the dma transfer
309 */
310 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK;
311
312 switch (ch_num) {
313 case ACP_TO_I2S_DMA_CH_NUM:
314 case ACP_TO_SYSRAM_CH_NUM:
315 case I2S_TO_ACP_DMA_CH_NUM:
316 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
317 break;
318 default:
319 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
320 break;
321 }
322
323 /* enable for ACP SRAM to/from I2S DMA channel */
324 if (is_circular == true)
325 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
326 else
327 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK;
328
329 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
330 }
331
332 /* Stop a given DMA channel transfer */
333 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num)
334 {
335 u32 dma_ctrl;
336 u32 dma_ch_sts;
337 u32 count = ACP_DMA_RESET_TIME;
338
339 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
340
341 /* clear the dma control register fields before writing zero
342 * in reset bit
343 */
344 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK;
345 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK;
346
347 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
348 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
349
350 if (dma_ch_sts & BIT(ch_num)) {
351 /* set the reset bit for this channel to stop the dma
352 * transfer
353 */
354 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK;
355 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num);
356 }
357
358 /* check the channel status bit for some time and return the status */
359 while (true) {
360 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS);
361 if (!(dma_ch_sts & BIT(ch_num))) {
362 /* clear the reset flag after successfully stopping
363 * the dma transfer and break from the loop
364 */
365 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK;
366
367 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0
368 + ch_num);
369 break;
370 }
371 if (--count == 0) {
372 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num);
373 return -ETIMEDOUT;
374 }
375 udelay(100);
376 }
377 return 0;
378 }
379
380 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank,
381 bool power_on)
382 {
383 u32 val, req_reg, sts_reg, sts_reg_mask;
384 u32 loops = 1000;
385
386 if (bank < 32) {
387 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO;
388 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO;
389 sts_reg_mask = 0xFFFFFFFF;
390
391 } else {
392 bank -= 32;
393 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI;
394 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI;
395 sts_reg_mask = 0x0000FFFF;
396 }
397
398 val = acp_reg_read(acp_mmio, req_reg);
399 if (val & (1 << bank)) {
400 /* bank is in off state */
401 if (power_on == true)
402 /* request to on */
403 val &= ~(1 << bank);
404 else
405 /* request to off */
406 return;
407 } else {
408 /* bank is in on state */
409 if (power_on == false)
410 /* request to off */
411 val |= 1 << bank;
412 else
413 /* request to on */
414 return;
415 }
416 acp_reg_write(val, acp_mmio, req_reg);
417
418 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) {
419 if (!loops--) {
420 pr_err("ACP SRAM bank %d state change failed\n", bank);
421 break;
422 }
423 cpu_relax();
424 }
425 }
426
427 /* Initialize and bring ACP hardware to default state. */
428 static int acp_init(void __iomem *acp_mmio)
429 {
430 u16 bank;
431 u32 val, count, sram_pte_offset;
432
433 /* Assert Soft reset of ACP */
434 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
435
436 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
437 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
438
439 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
440 while (true) {
441 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
442 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
443 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
444 break;
445 if (--count == 0) {
446 pr_err("Failed to reset ACP\n");
447 return -ETIMEDOUT;
448 }
449 udelay(100);
450 }
451
452 /* Enable clock to ACP and wait until the clock is enabled */
453 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
454 val = val | ACP_CONTROL__ClkEn_MASK;
455 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
456
457 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
458
459 while (true) {
460 val = acp_reg_read(acp_mmio, mmACP_STATUS);
461 if (val & (u32) 0x1)
462 break;
463 if (--count == 0) {
464 pr_err("Failed to reset ACP\n");
465 return -ETIMEDOUT;
466 }
467 udelay(100);
468 }
469
470 /* Deassert the SOFT RESET flags */
471 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
472 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK;
473 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
474
475 /* initiailize Onion control DAGB register */
476 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio,
477 mmACP_AXI2DAGB_ONION_CNTL);
478
479 /* initiailize Garlic control DAGB registers */
480 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio,
481 mmACP_AXI2DAGB_GARLIC_CNTL);
482
483 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS |
484 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK |
485 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK |
486 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK;
487 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1);
488 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio,
489 mmACP_DAGB_PAGE_SIZE_GRP_1);
490
491 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio,
492 mmACP_DMA_DESC_BASE_ADDR);
493
494 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */
495 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR);
496 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK,
497 acp_mmio, mmACP_EXTERNAL_INTR_CNTL);
498
499 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on.
500 * Now, turn off all of them. This can't be done in 'poweron' of
501 * ACP pm domain, as this requires ACP to be initialized.
502 */
503 for (bank = 1; bank < 48; bank++)
504 acp_set_sram_bank_state(acp_mmio, bank, false);
505
506 return 0;
507 }
508
509 /* Deintialize ACP */
510 static int acp_deinit(void __iomem *acp_mmio)
511 {
512 u32 val;
513 u32 count;
514
515 /* Assert Soft reset of ACP */
516 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
517
518 val |= ACP_SOFT_RESET__SoftResetAud_MASK;
519 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET);
520
521 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE;
522 while (true) {
523 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET);
524 if (ACP_SOFT_RESET__SoftResetAudDone_MASK ==
525 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK))
526 break;
527 if (--count == 0) {
528 pr_err("Failed to reset ACP\n");
529 return -ETIMEDOUT;
530 }
531 udelay(100);
532 }
533 /** Disable ACP clock */
534 val = acp_reg_read(acp_mmio, mmACP_CONTROL);
535 val &= ~ACP_CONTROL__ClkEn_MASK;
536 acp_reg_write(val, acp_mmio, mmACP_CONTROL);
537
538 count = ACP_CLOCK_EN_TIME_OUT_VALUE;
539
540 while (true) {
541 val = acp_reg_read(acp_mmio, mmACP_STATUS);
542 if (!(val & (u32) 0x1))
543 break;
544 if (--count == 0) {
545 pr_err("Failed to reset ACP\n");
546 return -ETIMEDOUT;
547 }
548 udelay(100);
549 }
550 return 0;
551 }
552
553 /* ACP DMA irq handler routine for playback, capture usecases */
554 static irqreturn_t dma_irq_handler(int irq, void *arg)
555 {
556 u16 dscr_idx;
557 u32 intr_flag, ext_intr_status;
558 struct audio_drv_data *irq_data;
559 void __iomem *acp_mmio;
560 struct device *dev = arg;
561 bool valid_irq = false;
562
563 irq_data = dev_get_drvdata(dev);
564 acp_mmio = irq_data->acp_mmio;
565
566 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT);
567 intr_flag = (((ext_intr_status &
568 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >>
569 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT));
570
571 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) {
572 valid_irq = true;
573 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) ==
574 PLAYBACK_START_DMA_DESCR_CH13)
575 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12;
576 else
577 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12;
578 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx,
579 1, 0);
580 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
581
582 snd_pcm_period_elapsed(irq_data->play_stream);
583
584 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16,
585 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
586 }
587
588 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) {
589 valid_irq = true;
590 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) ==
591 CAPTURE_START_DMA_DESCR_CH15)
592 dscr_idx = CAPTURE_END_DMA_DESCR_CH14;
593 else
594 dscr_idx = CAPTURE_START_DMA_DESCR_CH14;
595 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx,
596 1, 0);
597 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false);
598
599 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16,
600 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
601 }
602
603 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) {
604 valid_irq = true;
605 snd_pcm_period_elapsed(irq_data->capture_stream);
606 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16,
607 acp_mmio, mmACP_EXTERNAL_INTR_STAT);
608 }
609
610 if (valid_irq)
611 return IRQ_HANDLED;
612 else
613 return IRQ_NONE;
614 }
615
616 static int acp_dma_open(struct snd_pcm_substream *substream)
617 {
618 u16 bank;
619 int ret = 0;
620 struct snd_pcm_runtime *runtime = substream->runtime;
621 struct snd_soc_pcm_runtime *prtd = substream->private_data;
622 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev);
623
624 struct audio_substream_data *adata =
625 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL);
626 if (adata == NULL)
627 return -ENOMEM;
628
629 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
630 runtime->hw = acp_pcm_hardware_playback;
631 else
632 runtime->hw = acp_pcm_hardware_capture;
633
634 ret = snd_pcm_hw_constraint_integer(runtime,
635 SNDRV_PCM_HW_PARAM_PERIODS);
636 if (ret < 0) {
637 dev_err(prtd->platform->dev, "set integer constraint failed\n");
638 kfree(adata);
639 return ret;
640 }
641
642 adata->acp_mmio = intr_data->acp_mmio;
643 runtime->private_data = adata;
644
645 /* Enable ACP irq, when neither playback or capture streams are
646 * active by the time when a new stream is being opened.
647 * This enablement is not required for another stream, if current
648 * stream is not closed
649 */
650 if (!intr_data->play_stream && !intr_data->capture_stream)
651 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
652
653 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
654 intr_data->play_stream = substream;
655 for (bank = 1; bank <= 4; bank++)
656 acp_set_sram_bank_state(intr_data->acp_mmio, bank,
657 true);
658 } else {
659 intr_data->capture_stream = substream;
660 for (bank = 5; bank <= 8; bank++)
661 acp_set_sram_bank_state(intr_data->acp_mmio, bank,
662 true);
663 }
664
665 return 0;
666 }
667
668 static int acp_dma_hw_params(struct snd_pcm_substream *substream,
669 struct snd_pcm_hw_params *params)
670 {
671 int status;
672 uint64_t size;
673 struct snd_dma_buffer *dma_buffer;
674 struct page *pg;
675 struct snd_pcm_runtime *runtime;
676 struct audio_substream_data *rtd;
677
678 dma_buffer = &substream->dma_buffer;
679
680 runtime = substream->runtime;
681 rtd = runtime->private_data;
682
683 if (WARN_ON(!rtd))
684 return -EINVAL;
685
686 size = params_buffer_bytes(params);
687 status = snd_pcm_lib_malloc_pages(substream, size);
688 if (status < 0)
689 return status;
690
691 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params));
692 pg = virt_to_page(substream->dma_buffer.area);
693
694 if (pg != NULL) {
695 acp_set_sram_bank_state(rtd->acp_mmio, 0, true);
696 /* Save for runtime private data */
697 rtd->pg = pg;
698 rtd->order = get_order(size);
699
700 /* Fill the page table entries in ACP SRAM */
701 rtd->pg = pg;
702 rtd->size = size;
703 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
704 rtd->direction = substream->stream;
705
706 config_acp_dma(rtd->acp_mmio, rtd);
707 status = 0;
708 } else {
709 status = -ENOMEM;
710 }
711 return status;
712 }
713
714 static int acp_dma_hw_free(struct snd_pcm_substream *substream)
715 {
716 return snd_pcm_lib_free_pages(substream);
717 }
718
719 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream)
720 {
721 u16 dscr;
722 u32 mul, dma_config, period_bytes;
723 u32 pos = 0;
724
725 struct snd_pcm_runtime *runtime = substream->runtime;
726 struct audio_substream_data *rtd = runtime->private_data;
727
728 period_bytes = frames_to_bytes(runtime, runtime->period_size);
729 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
730 dscr = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CUR_DSCR_13);
731
732 if (dscr == PLAYBACK_START_DMA_DESCR_CH13)
733 mul = 0;
734 else
735 mul = 1;
736 pos = (mul * period_bytes);
737 } else {
738 dma_config = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CNTL_14);
739 if (dma_config != 0) {
740 dscr = acp_reg_read(rtd->acp_mmio,
741 mmACP_DMA_CUR_DSCR_14);
742 if (dscr == CAPTURE_START_DMA_DESCR_CH14)
743 mul = 1;
744 else
745 mul = 2;
746 pos = (mul * period_bytes);
747 }
748
749 if (pos >= (2 * period_bytes))
750 pos = 0;
751
752 }
753 return bytes_to_frames(runtime, pos);
754 }
755
756 static int acp_dma_mmap(struct snd_pcm_substream *substream,
757 struct vm_area_struct *vma)
758 {
759 return snd_pcm_lib_default_mmap(substream, vma);
760 }
761
762 static int acp_dma_prepare(struct snd_pcm_substream *substream)
763 {
764 struct snd_pcm_runtime *runtime = substream->runtime;
765 struct audio_substream_data *rtd = runtime->private_data;
766
767 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
768 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
769 PLAYBACK_START_DMA_DESCR_CH12,
770 NUM_DSCRS_PER_CHANNEL, 0);
771 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM,
772 PLAYBACK_START_DMA_DESCR_CH13,
773 NUM_DSCRS_PER_CHANNEL, 0);
774 /* Fill ACP SRAM (2 periods) with zeros from System RAM
775 * which is zero-ed in hw_params
776 */
777 acp_dma_start(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, false);
778
779 /* ACP SRAM (2 periods of buffer size) is intially filled with
780 * zeros. Before rendering starts, 2nd half of SRAM will be
781 * filled with valid audio data DMA'ed from first half of system
782 * RAM and 1st half of SRAM will be filled with Zeros. This is
783 * the initial scenario when redering starts from SRAM. Later
784 * on, 2nd half of system memory will be DMA'ed to 1st half of
785 * SRAM, 1st half of system memory will be DMA'ed to 2nd half of
786 * SRAM in ping-pong way till rendering stops.
787 */
788 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM,
789 PLAYBACK_START_DMA_DESCR_CH12,
790 1, 0);
791 } else {
792 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM,
793 CAPTURE_START_DMA_DESCR_CH14,
794 NUM_DSCRS_PER_CHANNEL, 0);
795 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM,
796 CAPTURE_START_DMA_DESCR_CH15,
797 NUM_DSCRS_PER_CHANNEL, 0);
798 }
799 return 0;
800 }
801
802 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd)
803 {
804 int ret;
805 u32 loops = 1000;
806
807 struct snd_pcm_runtime *runtime = substream->runtime;
808 struct snd_soc_pcm_runtime *prtd = substream->private_data;
809 struct audio_substream_data *rtd = runtime->private_data;
810
811 if (!rtd)
812 return -EINVAL;
813 switch (cmd) {
814 case SNDRV_PCM_TRIGGER_START:
815 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
816 case SNDRV_PCM_TRIGGER_RESUME:
817 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
818 acp_dma_start(rtd->acp_mmio,
819 SYSRAM_TO_ACP_CH_NUM, false);
820 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) &
821 BIT(SYSRAM_TO_ACP_CH_NUM)) {
822 if (!loops--) {
823 dev_err(prtd->platform->dev,
824 "acp dma start timeout\n");
825 return -ETIMEDOUT;
826 }
827 cpu_relax();
828 }
829
830 acp_dma_start(rtd->acp_mmio,
831 ACP_TO_I2S_DMA_CH_NUM, true);
832
833 } else {
834 acp_dma_start(rtd->acp_mmio,
835 I2S_TO_ACP_DMA_CH_NUM, true);
836 }
837 ret = 0;
838 break;
839 case SNDRV_PCM_TRIGGER_STOP:
840 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
841 case SNDRV_PCM_TRIGGER_SUSPEND:
842 /* Need to stop only circular DMA channels :
843 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular
844 * channels will stopped automatically after its transfer
845 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM
846 */
847 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
848 ret = acp_dma_stop(rtd->acp_mmio,
849 ACP_TO_I2S_DMA_CH_NUM);
850 else
851 ret = acp_dma_stop(rtd->acp_mmio,
852 I2S_TO_ACP_DMA_CH_NUM);
853 break;
854 default:
855 ret = -EINVAL;
856
857 }
858 return ret;
859 }
860
861 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd)
862 {
863 return snd_pcm_lib_preallocate_pages_for_all(rtd->pcm,
864 SNDRV_DMA_TYPE_DEV,
865 NULL, MIN_BUFFER,
866 MAX_BUFFER);
867 }
868
869 static int acp_dma_close(struct snd_pcm_substream *substream)
870 {
871 u16 bank;
872 struct snd_pcm_runtime *runtime = substream->runtime;
873 struct audio_substream_data *rtd = runtime->private_data;
874 struct snd_soc_pcm_runtime *prtd = substream->private_data;
875 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev);
876
877 kfree(rtd);
878
879 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
880 adata->play_stream = NULL;
881 for (bank = 1; bank <= 4; bank++)
882 acp_set_sram_bank_state(adata->acp_mmio, bank,
883 false);
884 } else {
885 adata->capture_stream = NULL;
886 for (bank = 5; bank <= 8; bank++)
887 acp_set_sram_bank_state(adata->acp_mmio, bank,
888 false);
889 }
890
891 /* Disable ACP irq, when the current stream is being closed and
892 * another stream is also not active.
893 */
894 if (!adata->play_stream && !adata->capture_stream)
895 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
896
897 return 0;
898 }
899
900 static const struct snd_pcm_ops acp_dma_ops = {
901 .open = acp_dma_open,
902 .close = acp_dma_close,
903 .ioctl = snd_pcm_lib_ioctl,
904 .hw_params = acp_dma_hw_params,
905 .hw_free = acp_dma_hw_free,
906 .trigger = acp_dma_trigger,
907 .pointer = acp_dma_pointer,
908 .mmap = acp_dma_mmap,
909 .prepare = acp_dma_prepare,
910 };
911
912 static struct snd_soc_platform_driver acp_asoc_platform = {
913 .ops = &acp_dma_ops,
914 .pcm_new = acp_dma_new,
915 };
916
917 static int acp_audio_probe(struct platform_device *pdev)
918 {
919 int status;
920 struct audio_drv_data *audio_drv_data;
921 struct resource *res;
922
923 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data),
924 GFP_KERNEL);
925 if (audio_drv_data == NULL)
926 return -ENOMEM;
927
928 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
929 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res);
930
931 /* The following members gets populated in device 'open'
932 * function. Till then interrupts are disabled in 'acp_init'
933 * and device doesn't generate any interrupts.
934 */
935
936 audio_drv_data->play_stream = NULL;
937 audio_drv_data->capture_stream = NULL;
938
939 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
940 if (!res) {
941 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n");
942 return -ENODEV;
943 }
944
945 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler,
946 0, "ACP_IRQ", &pdev->dev);
947 if (status) {
948 dev_err(&pdev->dev, "ACP IRQ request failed\n");
949 return status;
950 }
951
952 dev_set_drvdata(&pdev->dev, audio_drv_data);
953
954 /* Initialize the ACP */
955 acp_init(audio_drv_data->acp_mmio);
956
957 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform);
958 if (status != 0) {
959 dev_err(&pdev->dev, "Fail to register ALSA platform device\n");
960 return status;
961 }
962
963 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000);
964 pm_runtime_use_autosuspend(&pdev->dev);
965 pm_runtime_enable(&pdev->dev);
966
967 return status;
968 }
969
970 static int acp_audio_remove(struct platform_device *pdev)
971 {
972 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev);
973
974 acp_deinit(adata->acp_mmio);
975 snd_soc_unregister_platform(&pdev->dev);
976 pm_runtime_disable(&pdev->dev);
977
978 return 0;
979 }
980
981 static int acp_pcm_resume(struct device *dev)
982 {
983 u16 bank;
984 struct audio_drv_data *adata = dev_get_drvdata(dev);
985
986 acp_init(adata->acp_mmio);
987
988 if (adata->play_stream && adata->play_stream->runtime) {
989 for (bank = 1; bank <= 4; bank++)
990 acp_set_sram_bank_state(adata->acp_mmio, bank,
991 true);
992 config_acp_dma(adata->acp_mmio,
993 adata->play_stream->runtime->private_data);
994 }
995 if (adata->capture_stream && adata->capture_stream->runtime) {
996 for (bank = 5; bank <= 8; bank++)
997 acp_set_sram_bank_state(adata->acp_mmio, bank,
998 true);
999 config_acp_dma(adata->acp_mmio,
1000 adata->capture_stream->runtime->private_data);
1001 }
1002 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1003 return 0;
1004 }
1005
1006 static int acp_pcm_runtime_suspend(struct device *dev)
1007 {
1008 struct audio_drv_data *adata = dev_get_drvdata(dev);
1009
1010 acp_deinit(adata->acp_mmio);
1011 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1012 return 0;
1013 }
1014
1015 static int acp_pcm_runtime_resume(struct device *dev)
1016 {
1017 struct audio_drv_data *adata = dev_get_drvdata(dev);
1018
1019 acp_init(adata->acp_mmio);
1020 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB);
1021 return 0;
1022 }
1023
1024 static const struct dev_pm_ops acp_pm_ops = {
1025 .resume = acp_pcm_resume,
1026 .runtime_suspend = acp_pcm_runtime_suspend,
1027 .runtime_resume = acp_pcm_runtime_resume,
1028 };
1029
1030 static struct platform_driver acp_dma_driver = {
1031 .probe = acp_audio_probe,
1032 .remove = acp_audio_remove,
1033 .driver = {
1034 .name = "acp_audio_dma",
1035 .pm = &acp_pm_ops,
1036 },
1037 };
1038
1039 module_platform_driver(acp_dma_driver);
1040
1041 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com");
1042 MODULE_DESCRIPTION("AMD ACP PCM Driver");
1043 MODULE_LICENSE("GPL v2");
1044 MODULE_ALIAS("platform:acp-dma-audio");