2 * QEMU model of the ZynqMP generic DMA
4 * Copyright (c) 2014 Xilinx Inc.
5 * Copyright (c) 2018 FEIMTECH AB
7 * Written by Edgar E. Iglesias <edgar.iglesias@xilinx.com>,
8 * Francisco Iglesias <francisco.iglesias@feimtech.se>
10 * Permission is hereby granted, free of charge, to any person obtaining a copy
11 * of this software and associated documentation files (the "Software"), to deal
12 * in the Software without restriction, including without limitation the rights
13 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 * copies of the Software, and to permit persons to whom the Software is
15 * furnished to do so, subject to the following conditions:
17 * The above copyright notice and this permission notice shall be included in
18 * all copies or substantial portions of the Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
29 #include "qemu/osdep.h"
30 #include "hw/dma/xlnx-zdma.h"
32 #include "migration/vmstate.h"
33 #include "qemu/bitops.h"
35 #include "qemu/module.h"
36 #include "qapi/error.h"
38 #ifndef XLNX_ZDMA_ERR_DEBUG
39 #define XLNX_ZDMA_ERR_DEBUG 0
42 REG32(ZDMA_ERR_CTRL
, 0x0)
43 FIELD(ZDMA_ERR_CTRL
, APB_ERR_RES
, 0, 1)
44 REG32(ZDMA_CH_ISR
, 0x100)
45 FIELD(ZDMA_CH_ISR
, DMA_PAUSE
, 11, 1)
46 FIELD(ZDMA_CH_ISR
, DMA_DONE
, 10, 1)
47 FIELD(ZDMA_CH_ISR
, AXI_WR_DATA
, 9, 1)
48 FIELD(ZDMA_CH_ISR
, AXI_RD_DATA
, 8, 1)
49 FIELD(ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, 7, 1)
50 FIELD(ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, 6, 1)
51 FIELD(ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, 5, 1)
52 FIELD(ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, 4, 1)
53 FIELD(ZDMA_CH_ISR
, BYTE_CNT_OVRFL
, 3, 1)
54 FIELD(ZDMA_CH_ISR
, DST_DSCR_DONE
, 2, 1)
55 FIELD(ZDMA_CH_ISR
, SRC_DSCR_DONE
, 1, 1)
56 FIELD(ZDMA_CH_ISR
, INV_APB
, 0, 1)
57 REG32(ZDMA_CH_IMR
, 0x104)
58 FIELD(ZDMA_CH_IMR
, DMA_PAUSE
, 11, 1)
59 FIELD(ZDMA_CH_IMR
, DMA_DONE
, 10, 1)
60 FIELD(ZDMA_CH_IMR
, AXI_WR_DATA
, 9, 1)
61 FIELD(ZDMA_CH_IMR
, AXI_RD_DATA
, 8, 1)
62 FIELD(ZDMA_CH_IMR
, AXI_RD_DST_DSCR
, 7, 1)
63 FIELD(ZDMA_CH_IMR
, AXI_RD_SRC_DSCR
, 6, 1)
64 FIELD(ZDMA_CH_IMR
, IRQ_DST_ACCT_ERR
, 5, 1)
65 FIELD(ZDMA_CH_IMR
, IRQ_SRC_ACCT_ERR
, 4, 1)
66 FIELD(ZDMA_CH_IMR
, BYTE_CNT_OVRFL
, 3, 1)
67 FIELD(ZDMA_CH_IMR
, DST_DSCR_DONE
, 2, 1)
68 FIELD(ZDMA_CH_IMR
, SRC_DSCR_DONE
, 1, 1)
69 FIELD(ZDMA_CH_IMR
, INV_APB
, 0, 1)
70 REG32(ZDMA_CH_IEN
, 0x108)
71 FIELD(ZDMA_CH_IEN
, DMA_PAUSE
, 11, 1)
72 FIELD(ZDMA_CH_IEN
, DMA_DONE
, 10, 1)
73 FIELD(ZDMA_CH_IEN
, AXI_WR_DATA
, 9, 1)
74 FIELD(ZDMA_CH_IEN
, AXI_RD_DATA
, 8, 1)
75 FIELD(ZDMA_CH_IEN
, AXI_RD_DST_DSCR
, 7, 1)
76 FIELD(ZDMA_CH_IEN
, AXI_RD_SRC_DSCR
, 6, 1)
77 FIELD(ZDMA_CH_IEN
, IRQ_DST_ACCT_ERR
, 5, 1)
78 FIELD(ZDMA_CH_IEN
, IRQ_SRC_ACCT_ERR
, 4, 1)
79 FIELD(ZDMA_CH_IEN
, BYTE_CNT_OVRFL
, 3, 1)
80 FIELD(ZDMA_CH_IEN
, DST_DSCR_DONE
, 2, 1)
81 FIELD(ZDMA_CH_IEN
, SRC_DSCR_DONE
, 1, 1)
82 FIELD(ZDMA_CH_IEN
, INV_APB
, 0, 1)
83 REG32(ZDMA_CH_IDS
, 0x10c)
84 FIELD(ZDMA_CH_IDS
, DMA_PAUSE
, 11, 1)
85 FIELD(ZDMA_CH_IDS
, DMA_DONE
, 10, 1)
86 FIELD(ZDMA_CH_IDS
, AXI_WR_DATA
, 9, 1)
87 FIELD(ZDMA_CH_IDS
, AXI_RD_DATA
, 8, 1)
88 FIELD(ZDMA_CH_IDS
, AXI_RD_DST_DSCR
, 7, 1)
89 FIELD(ZDMA_CH_IDS
, AXI_RD_SRC_DSCR
, 6, 1)
90 FIELD(ZDMA_CH_IDS
, IRQ_DST_ACCT_ERR
, 5, 1)
91 FIELD(ZDMA_CH_IDS
, IRQ_SRC_ACCT_ERR
, 4, 1)
92 FIELD(ZDMA_CH_IDS
, BYTE_CNT_OVRFL
, 3, 1)
93 FIELD(ZDMA_CH_IDS
, DST_DSCR_DONE
, 2, 1)
94 FIELD(ZDMA_CH_IDS
, SRC_DSCR_DONE
, 1, 1)
95 FIELD(ZDMA_CH_IDS
, INV_APB
, 0, 1)
96 REG32(ZDMA_CH_CTRL0
, 0x110)
97 FIELD(ZDMA_CH_CTRL0
, OVR_FETCH
, 7, 1)
98 FIELD(ZDMA_CH_CTRL0
, POINT_TYPE
, 6, 1)
99 FIELD(ZDMA_CH_CTRL0
, MODE
, 4, 2)
100 FIELD(ZDMA_CH_CTRL0
, RATE_CTRL
, 3, 1)
101 FIELD(ZDMA_CH_CTRL0
, CONT_ADDR
, 2, 1)
102 FIELD(ZDMA_CH_CTRL0
, CONT
, 1, 1)
103 REG32(ZDMA_CH_CTRL1
, 0x114)
104 FIELD(ZDMA_CH_CTRL1
, DST_ISSUE
, 5, 5)
105 FIELD(ZDMA_CH_CTRL1
, SRC_ISSUE
, 0, 5)
106 REG32(ZDMA_CH_FCI
, 0x118)
107 FIELD(ZDMA_CH_FCI
, PROG_CELL_CNT
, 2, 2)
108 FIELD(ZDMA_CH_FCI
, SIDE
, 1, 1)
109 FIELD(ZDMA_CH_FCI
, EN
, 0, 1)
110 REG32(ZDMA_CH_STATUS
, 0x11c)
111 FIELD(ZDMA_CH_STATUS
, STATE
, 0, 2)
112 REG32(ZDMA_CH_DATA_ATTR
, 0x120)
113 FIELD(ZDMA_CH_DATA_ATTR
, ARBURST
, 26, 2)
114 FIELD(ZDMA_CH_DATA_ATTR
, ARCACHE
, 22, 4)
115 FIELD(ZDMA_CH_DATA_ATTR
, ARQOS
, 18, 4)
116 FIELD(ZDMA_CH_DATA_ATTR
, ARLEN
, 14, 4)
117 FIELD(ZDMA_CH_DATA_ATTR
, AWBURST
, 12, 2)
118 FIELD(ZDMA_CH_DATA_ATTR
, AWCACHE
, 8, 4)
119 FIELD(ZDMA_CH_DATA_ATTR
, AWQOS
, 4, 4)
120 FIELD(ZDMA_CH_DATA_ATTR
, AWLEN
, 0, 4)
121 REG32(ZDMA_CH_DSCR_ATTR
, 0x124)
122 FIELD(ZDMA_CH_DSCR_ATTR
, AXCOHRNT
, 8, 1)
123 FIELD(ZDMA_CH_DSCR_ATTR
, AXCACHE
, 4, 4)
124 FIELD(ZDMA_CH_DSCR_ATTR
, AXQOS
, 0, 4)
125 REG32(ZDMA_CH_SRC_DSCR_WORD0
, 0x128)
126 REG32(ZDMA_CH_SRC_DSCR_WORD1
, 0x12c)
127 FIELD(ZDMA_CH_SRC_DSCR_WORD1
, MSB
, 0, 17)
128 REG32(ZDMA_CH_SRC_DSCR_WORD2
, 0x130)
129 FIELD(ZDMA_CH_SRC_DSCR_WORD2
, SIZE
, 0, 30)
130 REG32(ZDMA_CH_SRC_DSCR_WORD3
, 0x134)
131 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, CMD
, 3, 2)
132 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, INTR
, 2, 1)
133 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, TYPE
, 1, 1)
134 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, COHRNT
, 0, 1)
135 REG32(ZDMA_CH_DST_DSCR_WORD0
, 0x138)
136 REG32(ZDMA_CH_DST_DSCR_WORD1
, 0x13c)
137 FIELD(ZDMA_CH_DST_DSCR_WORD1
, MSB
, 0, 17)
138 REG32(ZDMA_CH_DST_DSCR_WORD2
, 0x140)
139 FIELD(ZDMA_CH_DST_DSCR_WORD2
, SIZE
, 0, 30)
140 REG32(ZDMA_CH_DST_DSCR_WORD3
, 0x144)
141 FIELD(ZDMA_CH_DST_DSCR_WORD3
, INTR
, 2, 1)
142 FIELD(ZDMA_CH_DST_DSCR_WORD3
, TYPE
, 1, 1)
143 FIELD(ZDMA_CH_DST_DSCR_WORD3
, COHRNT
, 0, 1)
144 REG32(ZDMA_CH_WR_ONLY_WORD0
, 0x148)
145 REG32(ZDMA_CH_WR_ONLY_WORD1
, 0x14c)
146 REG32(ZDMA_CH_WR_ONLY_WORD2
, 0x150)
147 REG32(ZDMA_CH_WR_ONLY_WORD3
, 0x154)
148 REG32(ZDMA_CH_SRC_START_LSB
, 0x158)
149 REG32(ZDMA_CH_SRC_START_MSB
, 0x15c)
150 FIELD(ZDMA_CH_SRC_START_MSB
, ADDR
, 0, 17)
151 REG32(ZDMA_CH_DST_START_LSB
, 0x160)
152 REG32(ZDMA_CH_DST_START_MSB
, 0x164)
153 FIELD(ZDMA_CH_DST_START_MSB
, ADDR
, 0, 17)
154 REG32(ZDMA_CH_RATE_CTRL
, 0x18c)
155 FIELD(ZDMA_CH_RATE_CTRL
, CNT
, 0, 12)
156 REG32(ZDMA_CH_SRC_CUR_PYLD_LSB
, 0x168)
157 REG32(ZDMA_CH_SRC_CUR_PYLD_MSB
, 0x16c)
158 FIELD(ZDMA_CH_SRC_CUR_PYLD_MSB
, ADDR
, 0, 17)
159 REG32(ZDMA_CH_DST_CUR_PYLD_LSB
, 0x170)
160 REG32(ZDMA_CH_DST_CUR_PYLD_MSB
, 0x174)
161 FIELD(ZDMA_CH_DST_CUR_PYLD_MSB
, ADDR
, 0, 17)
162 REG32(ZDMA_CH_SRC_CUR_DSCR_LSB
, 0x178)
163 REG32(ZDMA_CH_SRC_CUR_DSCR_MSB
, 0x17c)
164 FIELD(ZDMA_CH_SRC_CUR_DSCR_MSB
, ADDR
, 0, 17)
165 REG32(ZDMA_CH_DST_CUR_DSCR_LSB
, 0x180)
166 REG32(ZDMA_CH_DST_CUR_DSCR_MSB
, 0x184)
167 FIELD(ZDMA_CH_DST_CUR_DSCR_MSB
, ADDR
, 0, 17)
168 REG32(ZDMA_CH_TOTAL_BYTE
, 0x188)
169 REG32(ZDMA_CH_RATE_CNTL
, 0x18c)
170 FIELD(ZDMA_CH_RATE_CNTL
, CNT
, 0, 12)
171 REG32(ZDMA_CH_IRQ_SRC_ACCT
, 0x190)
172 FIELD(ZDMA_CH_IRQ_SRC_ACCT
, CNT
, 0, 8)
173 REG32(ZDMA_CH_IRQ_DST_ACCT
, 0x194)
174 FIELD(ZDMA_CH_IRQ_DST_ACCT
, CNT
, 0, 8)
175 REG32(ZDMA_CH_DBG0
, 0x198)
176 FIELD(ZDMA_CH_DBG0
, CMN_BUF_FREE
, 0, 9)
177 REG32(ZDMA_CH_DBG1
, 0x19c)
178 FIELD(ZDMA_CH_DBG1
, CMN_BUF_OCC
, 0, 9)
179 REG32(ZDMA_CH_CTRL2
, 0x200)
180 FIELD(ZDMA_CH_CTRL2
, EN
, 0, 1)
208 static void zdma_ch_imr_update_irq(XlnxZDMA
*s
)
212 pending
= s
->regs
[R_ZDMA_CH_ISR
] & ~s
->regs
[R_ZDMA_CH_IMR
];
214 qemu_set_irq(s
->irq_zdma_ch_imr
, pending
);
217 static void zdma_ch_isr_postw(RegisterInfo
*reg
, uint64_t val64
)
219 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
220 zdma_ch_imr_update_irq(s
);
223 static uint64_t zdma_ch_ien_prew(RegisterInfo
*reg
, uint64_t val64
)
225 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
226 uint32_t val
= val64
;
228 s
->regs
[R_ZDMA_CH_IMR
] &= ~val
;
229 zdma_ch_imr_update_irq(s
);
233 static uint64_t zdma_ch_ids_prew(RegisterInfo
*reg
, uint64_t val64
)
235 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
236 uint32_t val
= val64
;
238 s
->regs
[R_ZDMA_CH_IMR
] |= val
;
239 zdma_ch_imr_update_irq(s
);
243 static void zdma_set_state(XlnxZDMA
*s
, XlnxZDMAState state
)
246 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, state
);
248 /* Signal error if we have an error condition. */
250 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, 3);
254 static void zdma_src_done(XlnxZDMA
*s
)
257 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
);
259 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
, cnt
);
260 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, SRC_DSCR_DONE
, true);
262 /* Did we overflow? */
263 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
)) {
264 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, true);
266 zdma_ch_imr_update_irq(s
);
269 static void zdma_dst_done(XlnxZDMA
*s
)
272 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
);
274 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
, cnt
);
275 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DST_DSCR_DONE
, true);
277 /* Did we overflow? */
278 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
)) {
279 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, true);
281 zdma_ch_imr_update_irq(s
);
284 static uint64_t zdma_get_regaddr64(XlnxZDMA
*s
, unsigned int basereg
)
288 addr
= s
->regs
[basereg
+ 1];
290 addr
|= s
->regs
[basereg
];
295 static void zdma_put_regaddr64(XlnxZDMA
*s
, unsigned int basereg
, uint64_t addr
)
297 s
->regs
[basereg
] = addr
;
298 s
->regs
[basereg
+ 1] = addr
>> 32;
301 static bool zdma_load_descriptor(XlnxZDMA
*s
, uint64_t addr
, void *buf
)
303 /* ZDMA descriptors must be aligned to their own size. */
304 if (addr
% sizeof(XlnxZDMADescr
)) {
305 qemu_log_mask(LOG_GUEST_ERROR
,
306 "zdma: unaligned descriptor at %" PRIx64
,
308 memset(buf
, 0x0, sizeof(XlnxZDMADescr
));
313 address_space_rw(s
->dma_as
, addr
, s
->attr
,
314 buf
, sizeof(XlnxZDMADescr
), false);
318 static void zdma_load_src_descriptor(XlnxZDMA
*s
)
321 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
323 if (ptype
== PT_REG
) {
324 memcpy(&s
->dsc_src
, &s
->regs
[R_ZDMA_CH_SRC_DSCR_WORD0
],
329 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
331 if (!zdma_load_descriptor(s
, src_addr
, &s
->dsc_src
)) {
332 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, true);
336 static void zdma_load_dst_descriptor(XlnxZDMA
*s
)
339 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
341 if (ptype
== PT_REG
) {
342 memcpy(&s
->dsc_dst
, &s
->regs
[R_ZDMA_CH_DST_DSCR_WORD0
],
347 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
);
349 if (!zdma_load_descriptor(s
, dst_addr
, &s
->dsc_dst
)) {
350 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, true);
354 static uint64_t zdma_update_descr_addr(XlnxZDMA
*s
, bool type
,
355 unsigned int basereg
)
359 if (type
== DTYPE_LINEAR
) {
360 next
= zdma_get_regaddr64(s
, basereg
);
361 next
+= sizeof(s
->dsc_dst
);
362 zdma_put_regaddr64(s
, basereg
, next
);
364 addr
= zdma_get_regaddr64(s
, basereg
);
365 addr
+= sizeof(s
->dsc_dst
);
366 address_space_rw(s
->dma_as
, addr
, s
->attr
, (void *) &next
, 8, false);
367 zdma_put_regaddr64(s
, basereg
, next
);
372 static void zdma_write_dst(XlnxZDMA
*s
, uint8_t *buf
, uint32_t len
)
374 uint32_t dst_size
, dlen
;
375 bool dst_intr
, dst_type
;
376 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
377 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
378 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
381 /* FIXED burst types are only supported in simple dma mode. */
382 if (ptype
!= PT_REG
) {
383 burst_type
= AXI_BURST_INCR
;
387 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
389 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
391 if (dst_size
== 0 && ptype
== PT_MEM
) {
393 next
= zdma_update_descr_addr(s
, dst_type
,
394 R_ZDMA_CH_DST_CUR_DSCR_LSB
);
395 zdma_load_descriptor(s
, next
, &s
->dsc_dst
);
396 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
398 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
402 /* Match what hardware does by ignoring the dst_size and only using
403 * the src size for Simple register mode. */
404 if (ptype
== PT_REG
&& rw_mode
!= RW_MODE_WO
) {
408 dst_intr
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
411 dlen
= len
> dst_size
? dst_size
: len
;
412 if (burst_type
== AXI_BURST_FIXED
) {
413 if (dlen
> (s
->cfg
.bus_width
/ 8)) {
414 dlen
= s
->cfg
.bus_width
/ 8;
418 address_space_rw(s
->dma_as
, s
->dsc_dst
.addr
, s
->attr
, buf
, dlen
,
420 if (burst_type
== AXI_BURST_INCR
) {
421 s
->dsc_dst
.addr
+= dlen
;
427 if (dst_size
== 0 && dst_intr
) {
431 /* Write back to buffered descriptor. */
432 s
->dsc_dst
.words
[2] = FIELD_DP32(s
->dsc_dst
.words
[2],
433 ZDMA_CH_DST_DSCR_WORD2
,
439 static void zdma_process_descr(XlnxZDMA
*s
)
442 uint32_t src_size
, len
;
443 unsigned int src_cmd
;
444 bool src_intr
, src_type
;
445 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
446 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
447 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
450 src_addr
= s
->dsc_src
.addr
;
451 src_size
= FIELD_EX32(s
->dsc_src
.words
[2], ZDMA_CH_SRC_DSCR_WORD2
, SIZE
);
452 src_cmd
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, CMD
);
453 src_type
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
454 src_intr
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, INTR
);
456 /* FIXED burst types and non-rw modes are only supported in
459 if (ptype
!= PT_REG
) {
460 if (rw_mode
!= RW_MODE_RW
) {
461 qemu_log_mask(LOG_GUEST_ERROR
,
462 "zDMA: rw-mode=%d but not simple DMA mode.\n",
465 if (burst_type
!= AXI_BURST_INCR
) {
466 qemu_log_mask(LOG_GUEST_ERROR
,
467 "zDMA: burst_type=%d but not simple DMA mode.\n",
470 burst_type
= AXI_BURST_INCR
;
471 rw_mode
= RW_MODE_RW
;
474 if (rw_mode
== RW_MODE_WO
) {
475 /* In Simple DMA Write-Only, we need to push DST size bytes
476 * regardless of what SRC size is set to. */
477 src_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
479 memcpy(s
->buf
, &s
->regs
[R_ZDMA_CH_WR_ONLY_WORD0
], s
->cfg
.bus_width
/ 8);
483 len
= src_size
> ARRAY_SIZE(s
->buf
) ? ARRAY_SIZE(s
->buf
) : src_size
;
484 if (burst_type
== AXI_BURST_FIXED
) {
485 if (len
> (s
->cfg
.bus_width
/ 8)) {
486 len
= s
->cfg
.bus_width
/ 8;
490 if (rw_mode
== RW_MODE_WO
) {
491 if (len
> s
->cfg
.bus_width
/ 8) {
492 len
= s
->cfg
.bus_width
/ 8;
495 address_space_rw(s
->dma_as
, src_addr
, s
->attr
, s
->buf
, len
,
497 if (burst_type
== AXI_BURST_INCR
) {
502 if (rw_mode
!= RW_MODE_RO
) {
503 zdma_write_dst(s
, s
->buf
, len
);
506 s
->regs
[R_ZDMA_CH_TOTAL_BYTE
] += len
;
510 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_DONE
, true);
516 /* Load next descriptor. */
517 if (ptype
== PT_REG
|| src_cmd
== CMD_STOP
) {
518 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL2
, EN
, 0);
519 zdma_set_state(s
, DISABLED
);
523 if (src_cmd
== CMD_HALT
) {
524 zdma_set_state(s
, PAUSED
);
525 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_PAUSE
, 1);
526 zdma_ch_imr_update_irq(s
);
530 zdma_update_descr_addr(s
, src_type
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
533 static void zdma_run(XlnxZDMA
*s
)
535 while (s
->state
== ENABLED
&& !s
->error
) {
536 zdma_load_src_descriptor(s
);
539 zdma_set_state(s
, DISABLED
);
541 zdma_process_descr(s
);
545 zdma_ch_imr_update_irq(s
);
548 static void zdma_update_descr_addr_from_start(XlnxZDMA
*s
)
550 uint64_t src_addr
, dst_addr
;
552 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_START_LSB
);
553 zdma_put_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
, src_addr
);
554 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_START_LSB
);
555 zdma_put_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
, dst_addr
);
556 zdma_load_dst_descriptor(s
);
559 static void zdma_ch_ctrlx_postw(RegisterInfo
*reg
, uint64_t val64
)
561 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
563 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL2
, EN
)) {
566 if (s
->state
== PAUSED
&&
567 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
568 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT_ADDR
) == 1) {
569 zdma_update_descr_addr_from_start(s
);
571 bool src_type
= FIELD_EX32(s
->dsc_src
.words
[3],
572 ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
573 zdma_update_descr_addr(s
, src_type
,
574 R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
576 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL0
, CONT
, false);
577 zdma_set_state(s
, ENABLED
);
578 } else if (s
->state
== DISABLED
) {
579 zdma_update_descr_addr_from_start(s
);
580 zdma_set_state(s
, ENABLED
);
583 /* Leave Paused state? */
584 if (s
->state
== PAUSED
&&
585 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
586 zdma_set_state(s
, DISABLED
);
593 static RegisterAccessInfo zdma_regs_info
[] = {
594 { .name
= "ZDMA_ERR_CTRL", .addr
= A_ZDMA_ERR_CTRL
,
596 },{ .name
= "ZDMA_CH_ISR", .addr
= A_ZDMA_CH_ISR
,
599 .post_write
= zdma_ch_isr_postw
,
600 },{ .name
= "ZDMA_CH_IMR", .addr
= A_ZDMA_CH_IMR
,
604 },{ .name
= "ZDMA_CH_IEN", .addr
= A_ZDMA_CH_IEN
,
606 .pre_write
= zdma_ch_ien_prew
,
607 },{ .name
= "ZDMA_CH_IDS", .addr
= A_ZDMA_CH_IDS
,
609 .pre_write
= zdma_ch_ids_prew
,
610 },{ .name
= "ZDMA_CH_CTRL0", .addr
= A_ZDMA_CH_CTRL0
,
613 .post_write
= zdma_ch_ctrlx_postw
,
614 },{ .name
= "ZDMA_CH_CTRL1", .addr
= A_ZDMA_CH_CTRL1
,
617 },{ .name
= "ZDMA_CH_FCI", .addr
= A_ZDMA_CH_FCI
,
619 },{ .name
= "ZDMA_CH_STATUS", .addr
= A_ZDMA_CH_STATUS
,
622 },{ .name
= "ZDMA_CH_DATA_ATTR", .addr
= A_ZDMA_CH_DATA_ATTR
,
625 },{ .name
= "ZDMA_CH_DSCR_ATTR", .addr
= A_ZDMA_CH_DSCR_ATTR
,
627 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD0", .addr
= A_ZDMA_CH_SRC_DSCR_WORD0
,
628 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD1", .addr
= A_ZDMA_CH_SRC_DSCR_WORD1
,
630 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD2", .addr
= A_ZDMA_CH_SRC_DSCR_WORD2
,
632 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD3", .addr
= A_ZDMA_CH_SRC_DSCR_WORD3
,
634 },{ .name
= "ZDMA_CH_DST_DSCR_WORD0", .addr
= A_ZDMA_CH_DST_DSCR_WORD0
,
635 },{ .name
= "ZDMA_CH_DST_DSCR_WORD1", .addr
= A_ZDMA_CH_DST_DSCR_WORD1
,
637 },{ .name
= "ZDMA_CH_DST_DSCR_WORD2", .addr
= A_ZDMA_CH_DST_DSCR_WORD2
,
639 },{ .name
= "ZDMA_CH_DST_DSCR_WORD3", .addr
= A_ZDMA_CH_DST_DSCR_WORD3
,
641 },{ .name
= "ZDMA_CH_WR_ONLY_WORD0", .addr
= A_ZDMA_CH_WR_ONLY_WORD0
,
642 },{ .name
= "ZDMA_CH_WR_ONLY_WORD1", .addr
= A_ZDMA_CH_WR_ONLY_WORD1
,
643 },{ .name
= "ZDMA_CH_WR_ONLY_WORD2", .addr
= A_ZDMA_CH_WR_ONLY_WORD2
,
644 },{ .name
= "ZDMA_CH_WR_ONLY_WORD3", .addr
= A_ZDMA_CH_WR_ONLY_WORD3
,
645 },{ .name
= "ZDMA_CH_SRC_START_LSB", .addr
= A_ZDMA_CH_SRC_START_LSB
,
646 },{ .name
= "ZDMA_CH_SRC_START_MSB", .addr
= A_ZDMA_CH_SRC_START_MSB
,
648 },{ .name
= "ZDMA_CH_DST_START_LSB", .addr
= A_ZDMA_CH_DST_START_LSB
,
649 },{ .name
= "ZDMA_CH_DST_START_MSB", .addr
= A_ZDMA_CH_DST_START_MSB
,
651 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_LSB
,
653 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_MSB
,
656 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_LSB
,
658 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_MSB
,
661 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_LSB
,
663 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_MSB
,
666 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_LSB
,
668 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_MSB
,
671 },{ .name
= "ZDMA_CH_TOTAL_BYTE", .addr
= A_ZDMA_CH_TOTAL_BYTE
,
673 },{ .name
= "ZDMA_CH_RATE_CNTL", .addr
= A_ZDMA_CH_RATE_CNTL
,
675 },{ .name
= "ZDMA_CH_IRQ_SRC_ACCT", .addr
= A_ZDMA_CH_IRQ_SRC_ACCT
,
679 },{ .name
= "ZDMA_CH_IRQ_DST_ACCT", .addr
= A_ZDMA_CH_IRQ_DST_ACCT
,
683 },{ .name
= "ZDMA_CH_DBG0", .addr
= A_ZDMA_CH_DBG0
,
686 },{ .name
= "ZDMA_CH_DBG1", .addr
= A_ZDMA_CH_DBG1
,
689 },{ .name
= "ZDMA_CH_CTRL2", .addr
= A_ZDMA_CH_CTRL2
,
691 .post_write
= zdma_ch_ctrlx_postw
,
695 static void zdma_reset(DeviceState
*dev
)
697 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
700 for (i
= 0; i
< ARRAY_SIZE(s
->regs_info
); ++i
) {
701 register_reset(&s
->regs_info
[i
]);
704 zdma_ch_imr_update_irq(s
);
707 static uint64_t zdma_read(void *opaque
, hwaddr addr
, unsigned size
)
709 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
710 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
713 gchar
*path
= object_get_canonical_path(OBJECT(s
));
714 qemu_log("%s: Decode error: read from %" HWADDR_PRIx
"\n",
718 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
719 zdma_ch_imr_update_irq(s
);
722 return register_read(r
, ~0, NULL
, false);
725 static void zdma_write(void *opaque
, hwaddr addr
, uint64_t value
,
728 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
729 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
732 gchar
*path
= object_get_canonical_path(OBJECT(s
));
733 qemu_log("%s: Decode error: write to %" HWADDR_PRIx
"=%" PRIx64
"\n",
737 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
738 zdma_ch_imr_update_irq(s
);
741 register_write(r
, value
, ~0, NULL
, false);
744 static const MemoryRegionOps zdma_ops
= {
747 .endianness
= DEVICE_LITTLE_ENDIAN
,
749 .min_access_size
= 4,
750 .max_access_size
= 4,
754 static void zdma_realize(DeviceState
*dev
, Error
**errp
)
756 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
759 for (i
= 0; i
< ARRAY_SIZE(zdma_regs_info
); ++i
) {
760 RegisterInfo
*r
= &s
->regs_info
[zdma_regs_info
[i
].addr
/ 4];
762 *r
= (RegisterInfo
) {
763 .data
= (uint8_t *)&s
->regs
[
764 zdma_regs_info
[i
].addr
/ 4],
765 .data_size
= sizeof(uint32_t),
766 .access
= &zdma_regs_info
[i
],
772 s
->dma_as
= g_malloc0(sizeof(AddressSpace
));
773 address_space_init(s
->dma_as
, s
->dma_mr
, NULL
);
775 s
->dma_as
= &address_space_memory
;
777 s
->attr
= MEMTXATTRS_UNSPECIFIED
;
780 static void zdma_init(Object
*obj
)
782 XlnxZDMA
*s
= XLNX_ZDMA(obj
);
783 SysBusDevice
*sbd
= SYS_BUS_DEVICE(obj
);
785 memory_region_init_io(&s
->iomem
, obj
, &zdma_ops
, s
,
786 TYPE_XLNX_ZDMA
, ZDMA_R_MAX
* 4);
787 sysbus_init_mmio(sbd
, &s
->iomem
);
788 sysbus_init_irq(sbd
, &s
->irq_zdma_ch_imr
);
790 object_property_add_link(obj
, "dma", TYPE_MEMORY_REGION
,
791 (Object
**)&s
->dma_mr
,
792 qdev_prop_allow_set_link_before_realize
,
793 OBJ_PROP_LINK_STRONG
,
797 static const VMStateDescription vmstate_zdma
= {
798 .name
= TYPE_XLNX_ZDMA
,
800 .minimum_version_id
= 1,
801 .minimum_version_id_old
= 1,
802 .fields
= (VMStateField
[]) {
803 VMSTATE_UINT32_ARRAY(regs
, XlnxZDMA
, ZDMA_R_MAX
),
804 VMSTATE_UINT32(state
, XlnxZDMA
),
805 VMSTATE_UINT32_ARRAY(dsc_src
.words
, XlnxZDMA
, 4),
806 VMSTATE_UINT32_ARRAY(dsc_dst
.words
, XlnxZDMA
, 4),
807 VMSTATE_END_OF_LIST(),
811 static Property zdma_props
[] = {
812 DEFINE_PROP_UINT32("bus-width", XlnxZDMA
, cfg
.bus_width
, 64),
813 DEFINE_PROP_END_OF_LIST(),
816 static void zdma_class_init(ObjectClass
*klass
, void *data
)
818 DeviceClass
*dc
= DEVICE_CLASS(klass
);
820 dc
->reset
= zdma_reset
;
821 dc
->realize
= zdma_realize
;
822 dc
->props
= zdma_props
;
823 dc
->vmsd
= &vmstate_zdma
;
826 static const TypeInfo zdma_info
= {
827 .name
= TYPE_XLNX_ZDMA
,
828 .parent
= TYPE_SYS_BUS_DEVICE
,
829 .instance_size
= sizeof(XlnxZDMA
),
830 .class_init
= zdma_class_init
,
831 .instance_init
= zdma_init
,
834 static void zdma_register_types(void)
836 type_register_static(&zdma_info
);
839 type_init(zdma_register_types
)