]>
Commit | Line | Data |
---|---|---|
d8b46839 CM |
1 | /* |
2 | * Driver for STM32 DMA controller | |
3 | * | |
4 | * Inspired by dma-jz4740.c and tegra20-apb-dma.c | |
5 | * | |
6 | * Copyright (C) M'boumba Cedric Madianga 2015 | |
7 | * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com> | |
8 | * | |
9 | * License terms: GNU General Public License (GPL), version 2 | |
10 | */ | |
11 | ||
12 | #include <linux/clk.h> | |
13 | #include <linux/delay.h> | |
14 | #include <linux/dmaengine.h> | |
15 | #include <linux/dma-mapping.h> | |
16 | #include <linux/err.h> | |
17 | #include <linux/init.h> | |
18 | #include <linux/jiffies.h> | |
19 | #include <linux/list.h> | |
20 | #include <linux/module.h> | |
21 | #include <linux/of.h> | |
22 | #include <linux/of_device.h> | |
23 | #include <linux/of_dma.h> | |
24 | #include <linux/platform_device.h> | |
25 | #include <linux/reset.h> | |
26 | #include <linux/sched.h> | |
27 | #include <linux/slab.h> | |
28 | ||
29 | #include "virt-dma.h" | |
30 | ||
31 | #define STM32_DMA_LISR 0x0000 /* DMA Low Int Status Reg */ | |
32 | #define STM32_DMA_HISR 0x0004 /* DMA High Int Status Reg */ | |
33 | #define STM32_DMA_LIFCR 0x0008 /* DMA Low Int Flag Clear Reg */ | |
34 | #define STM32_DMA_HIFCR 0x000c /* DMA High Int Flag Clear Reg */ | |
35 | #define STM32_DMA_TCI BIT(5) /* Transfer Complete Interrupt */ | |
36 | #define STM32_DMA_TEI BIT(3) /* Transfer Error Interrupt */ | |
37 | #define STM32_DMA_DMEI BIT(2) /* Direct Mode Error Interrupt */ | |
38 | #define STM32_DMA_FEI BIT(0) /* FIFO Error Interrupt */ | |
39 | ||
40 | /* DMA Stream x Configuration Register */ | |
41 | #define STM32_DMA_SCR(x) (0x0010 + 0x18 * (x)) /* x = 0..7 */ | |
42 | #define STM32_DMA_SCR_REQ(n) ((n & 0x7) << 25) | |
43 | #define STM32_DMA_SCR_MBURST_MASK GENMASK(24, 23) | |
44 | #define STM32_DMA_SCR_MBURST(n) ((n & 0x3) << 23) | |
45 | #define STM32_DMA_SCR_PBURST_MASK GENMASK(22, 21) | |
46 | #define STM32_DMA_SCR_PBURST(n) ((n & 0x3) << 21) | |
47 | #define STM32_DMA_SCR_PL_MASK GENMASK(17, 16) | |
48 | #define STM32_DMA_SCR_PL(n) ((n & 0x3) << 16) | |
49 | #define STM32_DMA_SCR_MSIZE_MASK GENMASK(14, 13) | |
50 | #define STM32_DMA_SCR_MSIZE(n) ((n & 0x3) << 13) | |
51 | #define STM32_DMA_SCR_PSIZE_MASK GENMASK(12, 11) | |
52 | #define STM32_DMA_SCR_PSIZE(n) ((n & 0x3) << 11) | |
53 | #define STM32_DMA_SCR_PSIZE_GET(n) ((n & STM32_DMA_SCR_PSIZE_MASK) >> 11) | |
54 | #define STM32_DMA_SCR_DIR_MASK GENMASK(7, 6) | |
55 | #define STM32_DMA_SCR_DIR(n) ((n & 0x3) << 6) | |
56 | #define STM32_DMA_SCR_CT BIT(19) /* Target in double buffer */ | |
57 | #define STM32_DMA_SCR_DBM BIT(18) /* Double Buffer Mode */ | |
58 | #define STM32_DMA_SCR_PINCOS BIT(15) /* Peripheral inc offset size */ | |
59 | #define STM32_DMA_SCR_MINC BIT(10) /* Memory increment mode */ | |
60 | #define STM32_DMA_SCR_PINC BIT(9) /* Peripheral increment mode */ | |
61 | #define STM32_DMA_SCR_CIRC BIT(8) /* Circular mode */ | |
62 | #define STM32_DMA_SCR_PFCTRL BIT(5) /* Peripheral Flow Controller */ | |
249d5531 PYM |
63 | #define STM32_DMA_SCR_TCIE BIT(4) /* Transfer Complete Int Enable |
64 | */ | |
d8b46839 CM |
65 | #define STM32_DMA_SCR_TEIE BIT(2) /* Transfer Error Int Enable */ |
66 | #define STM32_DMA_SCR_DMEIE BIT(1) /* Direct Mode Err Int Enable */ | |
67 | #define STM32_DMA_SCR_EN BIT(0) /* Stream Enable */ | |
68 | #define STM32_DMA_SCR_CFG_MASK (STM32_DMA_SCR_PINC \ | |
69 | | STM32_DMA_SCR_MINC \ | |
70 | | STM32_DMA_SCR_PINCOS \ | |
71 | | STM32_DMA_SCR_PL_MASK) | |
72 | #define STM32_DMA_SCR_IRQ_MASK (STM32_DMA_SCR_TCIE \ | |
73 | | STM32_DMA_SCR_TEIE \ | |
74 | | STM32_DMA_SCR_DMEIE) | |
75 | ||
76 | /* DMA Stream x number of data register */ | |
77 | #define STM32_DMA_SNDTR(x) (0x0014 + 0x18 * (x)) | |
78 | ||
79 | /* DMA stream peripheral address register */ | |
80 | #define STM32_DMA_SPAR(x) (0x0018 + 0x18 * (x)) | |
81 | ||
82 | /* DMA stream x memory 0 address register */ | |
83 | #define STM32_DMA_SM0AR(x) (0x001c + 0x18 * (x)) | |
84 | ||
85 | /* DMA stream x memory 1 address register */ | |
86 | #define STM32_DMA_SM1AR(x) (0x0020 + 0x18 * (x)) | |
87 | ||
88 | /* DMA stream x FIFO control register */ | |
89 | #define STM32_DMA_SFCR(x) (0x0024 + 0x18 * (x)) | |
90 | #define STM32_DMA_SFCR_FTH_MASK GENMASK(1, 0) | |
91 | #define STM32_DMA_SFCR_FTH(n) (n & STM32_DMA_SFCR_FTH_MASK) | |
92 | #define STM32_DMA_SFCR_FEIE BIT(7) /* FIFO error interrupt enable */ | |
93 | #define STM32_DMA_SFCR_DMDIS BIT(2) /* Direct mode disable */ | |
94 | #define STM32_DMA_SFCR_MASK (STM32_DMA_SFCR_FEIE \ | |
95 | | STM32_DMA_SFCR_DMDIS) | |
96 | ||
97 | /* DMA direction */ | |
98 | #define STM32_DMA_DEV_TO_MEM 0x00 | |
99 | #define STM32_DMA_MEM_TO_DEV 0x01 | |
100 | #define STM32_DMA_MEM_TO_MEM 0x02 | |
101 | ||
102 | /* DMA priority level */ | |
103 | #define STM32_DMA_PRIORITY_LOW 0x00 | |
104 | #define STM32_DMA_PRIORITY_MEDIUM 0x01 | |
105 | #define STM32_DMA_PRIORITY_HIGH 0x02 | |
106 | #define STM32_DMA_PRIORITY_VERY_HIGH 0x03 | |
107 | ||
108 | /* DMA FIFO threshold selection */ | |
109 | #define STM32_DMA_FIFO_THRESHOLD_1QUARTERFULL 0x00 | |
110 | #define STM32_DMA_FIFO_THRESHOLD_HALFFULL 0x01 | |
111 | #define STM32_DMA_FIFO_THRESHOLD_3QUARTERSFULL 0x02 | |
112 | #define STM32_DMA_FIFO_THRESHOLD_FULL 0x03 | |
113 | ||
114 | #define STM32_DMA_MAX_DATA_ITEMS 0xffff | |
115 | #define STM32_DMA_MAX_CHANNELS 0x08 | |
116 | #define STM32_DMA_MAX_REQUEST_ID 0x08 | |
117 | #define STM32_DMA_MAX_DATA_PARAM 0x03 | |
276b0046 | 118 | #define STM32_DMA_MAX_BURST 16 |
d8b46839 | 119 | |
951f44cb PYM |
120 | /* DMA Features */ |
121 | #define STM32_DMA_THRESHOLD_FTR_MASK GENMASK(1, 0) | |
122 | #define STM32_DMA_THRESHOLD_FTR_GET(n) ((n) & STM32_DMA_THRESHOLD_FTR_MASK) | |
123 | ||
d8b46839 CM |
124 | enum stm32_dma_width { |
125 | STM32_DMA_BYTE, | |
126 | STM32_DMA_HALF_WORD, | |
127 | STM32_DMA_WORD, | |
128 | }; | |
129 | ||
130 | enum stm32_dma_burst_size { | |
131 | STM32_DMA_BURST_SINGLE, | |
132 | STM32_DMA_BURST_INCR4, | |
133 | STM32_DMA_BURST_INCR8, | |
134 | STM32_DMA_BURST_INCR16, | |
135 | }; | |
136 | ||
951f44cb PYM |
137 | /** |
138 | * struct stm32_dma_cfg - STM32 DMA custom configuration | |
139 | * @channel_id: channel ID | |
140 | * @request_line: DMA request | |
141 | * @stream_config: 32bit mask specifying the DMA channel configuration | |
142 | * @features: 32bit mask specifying the DMA Feature list | |
143 | */ | |
d8b46839 CM |
144 | struct stm32_dma_cfg { |
145 | u32 channel_id; | |
146 | u32 request_line; | |
147 | u32 stream_config; | |
951f44cb | 148 | u32 features; |
d8b46839 CM |
149 | }; |
150 | ||
151 | struct stm32_dma_chan_reg { | |
152 | u32 dma_lisr; | |
153 | u32 dma_hisr; | |
154 | u32 dma_lifcr; | |
155 | u32 dma_hifcr; | |
156 | u32 dma_scr; | |
157 | u32 dma_sndtr; | |
158 | u32 dma_spar; | |
159 | u32 dma_sm0ar; | |
160 | u32 dma_sm1ar; | |
161 | u32 dma_sfcr; | |
162 | }; | |
163 | ||
164 | struct stm32_dma_sg_req { | |
165 | u32 len; | |
166 | struct stm32_dma_chan_reg chan_reg; | |
167 | }; | |
168 | ||
169 | struct stm32_dma_desc { | |
170 | struct virt_dma_desc vdesc; | |
171 | bool cyclic; | |
172 | u32 num_sgs; | |
173 | struct stm32_dma_sg_req sg_req[]; | |
174 | }; | |
175 | ||
176 | struct stm32_dma_chan { | |
177 | struct virt_dma_chan vchan; | |
178 | bool config_init; | |
179 | bool busy; | |
180 | u32 id; | |
181 | u32 irq; | |
182 | struct stm32_dma_desc *desc; | |
183 | u32 next_sg; | |
184 | struct dma_slave_config dma_sconfig; | |
185 | struct stm32_dma_chan_reg chan_reg; | |
951f44cb | 186 | u32 threshold; |
d8b46839 CM |
187 | }; |
188 | ||
189 | struct stm32_dma_device { | |
190 | struct dma_device ddev; | |
191 | void __iomem *base; | |
192 | struct clk *clk; | |
193 | struct reset_control *rst; | |
194 | bool mem2mem; | |
195 | struct stm32_dma_chan chan[STM32_DMA_MAX_CHANNELS]; | |
196 | }; | |
197 | ||
198 | static struct stm32_dma_device *stm32_dma_get_dev(struct stm32_dma_chan *chan) | |
199 | { | |
200 | return container_of(chan->vchan.chan.device, struct stm32_dma_device, | |
201 | ddev); | |
202 | } | |
203 | ||
204 | static struct stm32_dma_chan *to_stm32_dma_chan(struct dma_chan *c) | |
205 | { | |
206 | return container_of(c, struct stm32_dma_chan, vchan.chan); | |
207 | } | |
208 | ||
209 | static struct stm32_dma_desc *to_stm32_dma_desc(struct virt_dma_desc *vdesc) | |
210 | { | |
211 | return container_of(vdesc, struct stm32_dma_desc, vdesc); | |
212 | } | |
213 | ||
214 | static struct device *chan2dev(struct stm32_dma_chan *chan) | |
215 | { | |
216 | return &chan->vchan.chan.dev->device; | |
217 | } | |
218 | ||
219 | static u32 stm32_dma_read(struct stm32_dma_device *dmadev, u32 reg) | |
220 | { | |
221 | return readl_relaxed(dmadev->base + reg); | |
222 | } | |
223 | ||
224 | static void stm32_dma_write(struct stm32_dma_device *dmadev, u32 reg, u32 val) | |
225 | { | |
226 | writel_relaxed(val, dmadev->base + reg); | |
227 | } | |
228 | ||
229 | static struct stm32_dma_desc *stm32_dma_alloc_desc(u32 num_sgs) | |
230 | { | |
231 | return kzalloc(sizeof(struct stm32_dma_desc) + | |
232 | sizeof(struct stm32_dma_sg_req) * num_sgs, GFP_NOWAIT); | |
233 | } | |
234 | ||
235 | static int stm32_dma_get_width(struct stm32_dma_chan *chan, | |
236 | enum dma_slave_buswidth width) | |
237 | { | |
238 | switch (width) { | |
239 | case DMA_SLAVE_BUSWIDTH_1_BYTE: | |
240 | return STM32_DMA_BYTE; | |
241 | case DMA_SLAVE_BUSWIDTH_2_BYTES: | |
242 | return STM32_DMA_HALF_WORD; | |
243 | case DMA_SLAVE_BUSWIDTH_4_BYTES: | |
244 | return STM32_DMA_WORD; | |
245 | default: | |
246 | dev_err(chan2dev(chan), "Dma bus width not supported\n"); | |
247 | return -EINVAL; | |
248 | } | |
249 | } | |
250 | ||
251 | static int stm32_dma_get_burst(struct stm32_dma_chan *chan, u32 maxburst) | |
252 | { | |
253 | switch (maxburst) { | |
254 | case 0: | |
255 | case 1: | |
256 | return STM32_DMA_BURST_SINGLE; | |
257 | case 4: | |
258 | return STM32_DMA_BURST_INCR4; | |
259 | case 8: | |
260 | return STM32_DMA_BURST_INCR8; | |
261 | case 16: | |
262 | return STM32_DMA_BURST_INCR16; | |
263 | default: | |
264 | dev_err(chan2dev(chan), "Dma burst size not supported\n"); | |
265 | return -EINVAL; | |
266 | } | |
267 | } | |
268 | ||
269 | static void stm32_dma_set_fifo_config(struct stm32_dma_chan *chan, | |
270 | u32 src_maxburst, u32 dst_maxburst) | |
271 | { | |
272 | chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_MASK; | |
273 | chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_DMEIE; | |
274 | ||
275 | if ((!src_maxburst) && (!dst_maxburst)) { | |
276 | /* Using direct mode */ | |
277 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_DMEIE; | |
278 | } else { | |
279 | /* Using FIFO mode */ | |
280 | chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK; | |
281 | } | |
282 | } | |
283 | ||
284 | static int stm32_dma_slave_config(struct dma_chan *c, | |
285 | struct dma_slave_config *config) | |
286 | { | |
287 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
288 | ||
289 | memcpy(&chan->dma_sconfig, config, sizeof(*config)); | |
290 | ||
291 | chan->config_init = true; | |
292 | ||
293 | return 0; | |
294 | } | |
295 | ||
296 | static u32 stm32_dma_irq_status(struct stm32_dma_chan *chan) | |
297 | { | |
298 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
299 | u32 flags, dma_isr; | |
300 | ||
301 | /* | |
302 | * Read "flags" from DMA_xISR register corresponding to the selected | |
303 | * DMA channel at the correct bit offset inside that register. | |
304 | * | |
305 | * If (ch % 4) is 2 or 3, left shift the mask by 16 bits. | |
306 | * If (ch % 4) is 1 or 3, additionally left shift the mask by 6 bits. | |
307 | */ | |
308 | ||
309 | if (chan->id & 4) | |
310 | dma_isr = stm32_dma_read(dmadev, STM32_DMA_HISR); | |
311 | else | |
312 | dma_isr = stm32_dma_read(dmadev, STM32_DMA_LISR); | |
313 | ||
314 | flags = dma_isr >> (((chan->id & 2) << 3) | ((chan->id & 1) * 6)); | |
315 | ||
316 | return flags; | |
317 | } | |
318 | ||
319 | static void stm32_dma_irq_clear(struct stm32_dma_chan *chan, u32 flags) | |
320 | { | |
321 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
322 | u32 dma_ifcr; | |
323 | ||
324 | /* | |
325 | * Write "flags" to the DMA_xIFCR register corresponding to the selected | |
326 | * DMA channel at the correct bit offset inside that register. | |
327 | * | |
328 | * If (ch % 4) is 2 or 3, left shift the mask by 16 bits. | |
329 | * If (ch % 4) is 1 or 3, additionally left shift the mask by 6 bits. | |
330 | */ | |
331 | dma_ifcr = flags << (((chan->id & 2) << 3) | ((chan->id & 1) * 6)); | |
332 | ||
333 | if (chan->id & 4) | |
334 | stm32_dma_write(dmadev, STM32_DMA_HIFCR, dma_ifcr); | |
335 | else | |
336 | stm32_dma_write(dmadev, STM32_DMA_LIFCR, dma_ifcr); | |
337 | } | |
338 | ||
339 | static int stm32_dma_disable_chan(struct stm32_dma_chan *chan) | |
340 | { | |
341 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
342 | unsigned long timeout = jiffies + msecs_to_jiffies(5000); | |
343 | u32 dma_scr, id; | |
344 | ||
345 | id = chan->id; | |
346 | dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id)); | |
347 | ||
348 | if (dma_scr & STM32_DMA_SCR_EN) { | |
349 | dma_scr &= ~STM32_DMA_SCR_EN; | |
350 | stm32_dma_write(dmadev, STM32_DMA_SCR(id), dma_scr); | |
351 | ||
352 | do { | |
353 | dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id)); | |
354 | dma_scr &= STM32_DMA_SCR_EN; | |
355 | if (!dma_scr) | |
356 | break; | |
357 | ||
358 | if (time_after_eq(jiffies, timeout)) { | |
359 | dev_err(chan2dev(chan), "%s: timeout!\n", | |
360 | __func__); | |
361 | return -EBUSY; | |
362 | } | |
363 | cond_resched(); | |
364 | } while (1); | |
365 | } | |
366 | ||
367 | return 0; | |
368 | } | |
369 | ||
370 | static void stm32_dma_stop(struct stm32_dma_chan *chan) | |
371 | { | |
372 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
373 | u32 dma_scr, dma_sfcr, status; | |
374 | int ret; | |
375 | ||
376 | /* Disable interrupts */ | |
377 | dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); | |
378 | dma_scr &= ~STM32_DMA_SCR_IRQ_MASK; | |
379 | stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); | |
380 | dma_sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); | |
381 | dma_sfcr &= ~STM32_DMA_SFCR_FEIE; | |
382 | stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), dma_sfcr); | |
383 | ||
384 | /* Disable DMA */ | |
385 | ret = stm32_dma_disable_chan(chan); | |
386 | if (ret < 0) | |
387 | return; | |
388 | ||
389 | /* Clear interrupt status if it is there */ | |
390 | status = stm32_dma_irq_status(chan); | |
391 | if (status) { | |
392 | dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n", | |
393 | __func__, status); | |
394 | stm32_dma_irq_clear(chan, status); | |
395 | } | |
396 | ||
397 | chan->busy = false; | |
398 | } | |
399 | ||
400 | static int stm32_dma_terminate_all(struct dma_chan *c) | |
401 | { | |
402 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
403 | unsigned long flags; | |
404 | LIST_HEAD(head); | |
405 | ||
406 | spin_lock_irqsave(&chan->vchan.lock, flags); | |
407 | ||
408 | if (chan->busy) { | |
409 | stm32_dma_stop(chan); | |
410 | chan->desc = NULL; | |
411 | } | |
412 | ||
413 | vchan_get_all_descriptors(&chan->vchan, &head); | |
414 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | |
415 | vchan_dma_desc_free_list(&chan->vchan, &head); | |
416 | ||
417 | return 0; | |
418 | } | |
419 | ||
dc808675 CM |
420 | static void stm32_dma_synchronize(struct dma_chan *c) |
421 | { | |
422 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
423 | ||
424 | vchan_synchronize(&chan->vchan); | |
425 | } | |
426 | ||
d8b46839 CM |
427 | static void stm32_dma_dump_reg(struct stm32_dma_chan *chan) |
428 | { | |
429 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
430 | u32 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); | |
431 | u32 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); | |
432 | u32 spar = stm32_dma_read(dmadev, STM32_DMA_SPAR(chan->id)); | |
433 | u32 sm0ar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(chan->id)); | |
434 | u32 sm1ar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(chan->id)); | |
435 | u32 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); | |
436 | ||
437 | dev_dbg(chan2dev(chan), "SCR: 0x%08x\n", scr); | |
438 | dev_dbg(chan2dev(chan), "NDTR: 0x%08x\n", ndtr); | |
439 | dev_dbg(chan2dev(chan), "SPAR: 0x%08x\n", spar); | |
440 | dev_dbg(chan2dev(chan), "SM0AR: 0x%08x\n", sm0ar); | |
441 | dev_dbg(chan2dev(chan), "SM1AR: 0x%08x\n", sm1ar); | |
442 | dev_dbg(chan2dev(chan), "SFCR: 0x%08x\n", sfcr); | |
443 | } | |
444 | ||
e57cb3b3 PYM |
445 | static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan); |
446 | ||
8d1b76f0 | 447 | static void stm32_dma_start_transfer(struct stm32_dma_chan *chan) |
d8b46839 CM |
448 | { |
449 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
450 | struct virt_dma_desc *vdesc; | |
451 | struct stm32_dma_sg_req *sg_req; | |
452 | struct stm32_dma_chan_reg *reg; | |
453 | u32 status; | |
454 | int ret; | |
455 | ||
456 | ret = stm32_dma_disable_chan(chan); | |
457 | if (ret < 0) | |
8d1b76f0 | 458 | return; |
d8b46839 CM |
459 | |
460 | if (!chan->desc) { | |
461 | vdesc = vchan_next_desc(&chan->vchan); | |
462 | if (!vdesc) | |
8d1b76f0 | 463 | return; |
d8b46839 CM |
464 | |
465 | chan->desc = to_stm32_dma_desc(vdesc); | |
466 | chan->next_sg = 0; | |
467 | } | |
468 | ||
469 | if (chan->next_sg == chan->desc->num_sgs) | |
470 | chan->next_sg = 0; | |
471 | ||
472 | sg_req = &chan->desc->sg_req[chan->next_sg]; | |
473 | reg = &sg_req->chan_reg; | |
474 | ||
475 | stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); | |
476 | stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar); | |
477 | stm32_dma_write(dmadev, STM32_DMA_SM0AR(chan->id), reg->dma_sm0ar); | |
478 | stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), reg->dma_sfcr); | |
479 | stm32_dma_write(dmadev, STM32_DMA_SM1AR(chan->id), reg->dma_sm1ar); | |
480 | stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), reg->dma_sndtr); | |
481 | ||
482 | chan->next_sg++; | |
483 | ||
484 | /* Clear interrupt status if it is there */ | |
485 | status = stm32_dma_irq_status(chan); | |
486 | if (status) | |
487 | stm32_dma_irq_clear(chan, status); | |
488 | ||
e57cb3b3 PYM |
489 | if (chan->desc->cyclic) |
490 | stm32_dma_configure_next_sg(chan); | |
491 | ||
d8b46839 CM |
492 | stm32_dma_dump_reg(chan); |
493 | ||
494 | /* Start DMA */ | |
495 | reg->dma_scr |= STM32_DMA_SCR_EN; | |
496 | stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); | |
497 | ||
498 | chan->busy = true; | |
499 | ||
8d1b76f0 | 500 | dev_dbg(chan2dev(chan), "vchan %p: started\n", &chan->vchan); |
d8b46839 CM |
501 | } |
502 | ||
503 | static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan) | |
504 | { | |
505 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
506 | struct stm32_dma_sg_req *sg_req; | |
507 | u32 dma_scr, dma_sm0ar, dma_sm1ar, id; | |
508 | ||
509 | id = chan->id; | |
510 | dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id)); | |
511 | ||
512 | if (dma_scr & STM32_DMA_SCR_DBM) { | |
513 | if (chan->next_sg == chan->desc->num_sgs) | |
514 | chan->next_sg = 0; | |
515 | ||
516 | sg_req = &chan->desc->sg_req[chan->next_sg]; | |
517 | ||
518 | if (dma_scr & STM32_DMA_SCR_CT) { | |
519 | dma_sm0ar = sg_req->chan_reg.dma_sm0ar; | |
520 | stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), dma_sm0ar); | |
521 | dev_dbg(chan2dev(chan), "CT=1 <=> SM0AR: 0x%08x\n", | |
522 | stm32_dma_read(dmadev, STM32_DMA_SM0AR(id))); | |
523 | } else { | |
524 | dma_sm1ar = sg_req->chan_reg.dma_sm1ar; | |
525 | stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), dma_sm1ar); | |
526 | dev_dbg(chan2dev(chan), "CT=0 <=> SM1AR: 0x%08x\n", | |
527 | stm32_dma_read(dmadev, STM32_DMA_SM1AR(id))); | |
528 | } | |
d8b46839 CM |
529 | } |
530 | } | |
531 | ||
532 | static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan) | |
533 | { | |
534 | if (chan->desc) { | |
535 | if (chan->desc->cyclic) { | |
536 | vchan_cyclic_callback(&chan->desc->vdesc); | |
2b12c558 | 537 | chan->next_sg++; |
d8b46839 CM |
538 | stm32_dma_configure_next_sg(chan); |
539 | } else { | |
540 | chan->busy = false; | |
541 | if (chan->next_sg == chan->desc->num_sgs) { | |
542 | list_del(&chan->desc->vdesc.node); | |
543 | vchan_cookie_complete(&chan->desc->vdesc); | |
544 | chan->desc = NULL; | |
545 | } | |
546 | stm32_dma_start_transfer(chan); | |
547 | } | |
548 | } | |
549 | } | |
550 | ||
551 | static irqreturn_t stm32_dma_chan_irq(int irq, void *devid) | |
552 | { | |
553 | struct stm32_dma_chan *chan = devid; | |
554 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
1bc4f06c | 555 | u32 status, scr; |
d8b46839 CM |
556 | |
557 | spin_lock(&chan->vchan.lock); | |
558 | ||
559 | status = stm32_dma_irq_status(chan); | |
560 | scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); | |
d8b46839 CM |
561 | |
562 | if ((status & STM32_DMA_TCI) && (scr & STM32_DMA_SCR_TCIE)) { | |
563 | stm32_dma_irq_clear(chan, STM32_DMA_TCI); | |
564 | stm32_dma_handle_chan_done(chan); | |
565 | ||
566 | } else { | |
567 | stm32_dma_irq_clear(chan, status); | |
568 | dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status); | |
569 | } | |
570 | ||
571 | spin_unlock(&chan->vchan.lock); | |
572 | ||
573 | return IRQ_HANDLED; | |
574 | } | |
575 | ||
576 | static void stm32_dma_issue_pending(struct dma_chan *c) | |
577 | { | |
578 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
579 | unsigned long flags; | |
d8b46839 CM |
580 | |
581 | spin_lock_irqsave(&chan->vchan.lock, flags); | |
8d1b76f0 CM |
582 | if (vchan_issue_pending(&chan->vchan) && !chan->desc && !chan->busy) { |
583 | dev_dbg(chan2dev(chan), "vchan %p: issued\n", &chan->vchan); | |
584 | stm32_dma_start_transfer(chan); | |
e57cb3b3 | 585 | |
d8b46839 CM |
586 | } |
587 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | |
588 | } | |
589 | ||
590 | static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan, | |
591 | enum dma_transfer_direction direction, | |
592 | enum dma_slave_buswidth *buswidth) | |
593 | { | |
594 | enum dma_slave_buswidth src_addr_width, dst_addr_width; | |
595 | int src_bus_width, dst_bus_width; | |
596 | int src_burst_size, dst_burst_size; | |
597 | u32 src_maxburst, dst_maxburst; | |
d8b46839 CM |
598 | u32 dma_scr = 0; |
599 | ||
600 | src_addr_width = chan->dma_sconfig.src_addr_width; | |
601 | dst_addr_width = chan->dma_sconfig.dst_addr_width; | |
602 | src_maxburst = chan->dma_sconfig.src_maxburst; | |
603 | dst_maxburst = chan->dma_sconfig.dst_maxburst; | |
d8b46839 CM |
604 | |
605 | switch (direction) { | |
606 | case DMA_MEM_TO_DEV: | |
607 | dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); | |
608 | if (dst_bus_width < 0) | |
609 | return dst_bus_width; | |
610 | ||
611 | dst_burst_size = stm32_dma_get_burst(chan, dst_maxburst); | |
612 | if (dst_burst_size < 0) | |
613 | return dst_burst_size; | |
614 | ||
615 | if (!src_addr_width) | |
616 | src_addr_width = dst_addr_width; | |
617 | ||
618 | src_bus_width = stm32_dma_get_width(chan, src_addr_width); | |
619 | if (src_bus_width < 0) | |
620 | return src_bus_width; | |
621 | ||
622 | src_burst_size = stm32_dma_get_burst(chan, src_maxburst); | |
623 | if (src_burst_size < 0) | |
624 | return src_burst_size; | |
625 | ||
626 | dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_DEV) | | |
627 | STM32_DMA_SCR_PSIZE(dst_bus_width) | | |
628 | STM32_DMA_SCR_MSIZE(src_bus_width) | | |
629 | STM32_DMA_SCR_PBURST(dst_burst_size) | | |
630 | STM32_DMA_SCR_MBURST(src_burst_size); | |
631 | ||
632 | chan->chan_reg.dma_spar = chan->dma_sconfig.dst_addr; | |
633 | *buswidth = dst_addr_width; | |
634 | break; | |
635 | ||
636 | case DMA_DEV_TO_MEM: | |
637 | src_bus_width = stm32_dma_get_width(chan, src_addr_width); | |
638 | if (src_bus_width < 0) | |
639 | return src_bus_width; | |
640 | ||
641 | src_burst_size = stm32_dma_get_burst(chan, src_maxburst); | |
642 | if (src_burst_size < 0) | |
643 | return src_burst_size; | |
644 | ||
645 | if (!dst_addr_width) | |
646 | dst_addr_width = src_addr_width; | |
647 | ||
648 | dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); | |
649 | if (dst_bus_width < 0) | |
650 | return dst_bus_width; | |
651 | ||
652 | dst_burst_size = stm32_dma_get_burst(chan, dst_maxburst); | |
653 | if (dst_burst_size < 0) | |
654 | return dst_burst_size; | |
655 | ||
656 | dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_DEV_TO_MEM) | | |
657 | STM32_DMA_SCR_PSIZE(src_bus_width) | | |
658 | STM32_DMA_SCR_MSIZE(dst_bus_width) | | |
659 | STM32_DMA_SCR_PBURST(src_burst_size) | | |
660 | STM32_DMA_SCR_MBURST(dst_burst_size); | |
661 | ||
662 | chan->chan_reg.dma_spar = chan->dma_sconfig.src_addr; | |
663 | *buswidth = chan->dma_sconfig.src_addr_width; | |
664 | break; | |
665 | ||
666 | default: | |
667 | dev_err(chan2dev(chan), "Dma direction is not supported\n"); | |
668 | return -EINVAL; | |
669 | } | |
670 | ||
671 | stm32_dma_set_fifo_config(chan, src_maxburst, dst_maxburst); | |
672 | ||
673 | chan->chan_reg.dma_scr &= ~(STM32_DMA_SCR_DIR_MASK | | |
674 | STM32_DMA_SCR_PSIZE_MASK | STM32_DMA_SCR_MSIZE_MASK | | |
675 | STM32_DMA_SCR_PBURST_MASK | STM32_DMA_SCR_MBURST_MASK); | |
676 | chan->chan_reg.dma_scr |= dma_scr; | |
677 | ||
678 | return 0; | |
679 | } | |
680 | ||
681 | static void stm32_dma_clear_reg(struct stm32_dma_chan_reg *regs) | |
682 | { | |
683 | memset(regs, 0, sizeof(struct stm32_dma_chan_reg)); | |
684 | } | |
685 | ||
686 | static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg( | |
687 | struct dma_chan *c, struct scatterlist *sgl, | |
688 | u32 sg_len, enum dma_transfer_direction direction, | |
689 | unsigned long flags, void *context) | |
690 | { | |
691 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
692 | struct stm32_dma_desc *desc; | |
693 | struct scatterlist *sg; | |
694 | enum dma_slave_buswidth buswidth; | |
695 | u32 nb_data_items; | |
696 | int i, ret; | |
697 | ||
698 | if (!chan->config_init) { | |
699 | dev_err(chan2dev(chan), "dma channel is not configured\n"); | |
700 | return NULL; | |
701 | } | |
702 | ||
703 | if (sg_len < 1) { | |
704 | dev_err(chan2dev(chan), "Invalid segment length %d\n", sg_len); | |
705 | return NULL; | |
706 | } | |
707 | ||
708 | desc = stm32_dma_alloc_desc(sg_len); | |
709 | if (!desc) | |
710 | return NULL; | |
711 | ||
712 | ret = stm32_dma_set_xfer_param(chan, direction, &buswidth); | |
713 | if (ret < 0) | |
714 | goto err; | |
715 | ||
716 | /* Set peripheral flow controller */ | |
717 | if (chan->dma_sconfig.device_fc) | |
718 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_PFCTRL; | |
719 | else | |
720 | chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; | |
721 | ||
722 | for_each_sg(sgl, sg, sg_len, i) { | |
723 | desc->sg_req[i].len = sg_dma_len(sg); | |
724 | ||
725 | nb_data_items = desc->sg_req[i].len / buswidth; | |
726 | if (nb_data_items > STM32_DMA_MAX_DATA_ITEMS) { | |
727 | dev_err(chan2dev(chan), "nb items not supported\n"); | |
728 | goto err; | |
729 | } | |
730 | ||
731 | stm32_dma_clear_reg(&desc->sg_req[i].chan_reg); | |
732 | desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; | |
733 | desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; | |
734 | desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; | |
735 | desc->sg_req[i].chan_reg.dma_sm0ar = sg_dma_address(sg); | |
736 | desc->sg_req[i].chan_reg.dma_sm1ar = sg_dma_address(sg); | |
737 | desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items; | |
738 | } | |
739 | ||
740 | desc->num_sgs = sg_len; | |
741 | desc->cyclic = false; | |
742 | ||
743 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | |
744 | ||
745 | err: | |
746 | kfree(desc); | |
747 | return NULL; | |
748 | } | |
749 | ||
750 | static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic( | |
751 | struct dma_chan *c, dma_addr_t buf_addr, size_t buf_len, | |
752 | size_t period_len, enum dma_transfer_direction direction, | |
753 | unsigned long flags) | |
754 | { | |
755 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
756 | struct stm32_dma_desc *desc; | |
757 | enum dma_slave_buswidth buswidth; | |
758 | u32 num_periods, nb_data_items; | |
759 | int i, ret; | |
760 | ||
761 | if (!buf_len || !period_len) { | |
762 | dev_err(chan2dev(chan), "Invalid buffer/period len\n"); | |
763 | return NULL; | |
764 | } | |
765 | ||
766 | if (!chan->config_init) { | |
767 | dev_err(chan2dev(chan), "dma channel is not configured\n"); | |
768 | return NULL; | |
769 | } | |
770 | ||
771 | if (buf_len % period_len) { | |
772 | dev_err(chan2dev(chan), "buf_len not multiple of period_len\n"); | |
773 | return NULL; | |
774 | } | |
775 | ||
776 | /* | |
777 | * We allow to take more number of requests till DMA is | |
778 | * not started. The driver will loop over all requests. | |
779 | * Once DMA is started then new requests can be queued only after | |
780 | * terminating the DMA. | |
781 | */ | |
782 | if (chan->busy) { | |
783 | dev_err(chan2dev(chan), "Request not allowed when dma busy\n"); | |
784 | return NULL; | |
785 | } | |
786 | ||
787 | ret = stm32_dma_set_xfer_param(chan, direction, &buswidth); | |
788 | if (ret < 0) | |
789 | return NULL; | |
790 | ||
791 | nb_data_items = period_len / buswidth; | |
792 | if (nb_data_items > STM32_DMA_MAX_DATA_ITEMS) { | |
793 | dev_err(chan2dev(chan), "number of items not supported\n"); | |
794 | return NULL; | |
795 | } | |
796 | ||
797 | /* Enable Circular mode or double buffer mode */ | |
798 | if (buf_len == period_len) | |
799 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC; | |
800 | else | |
801 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM; | |
802 | ||
803 | /* Clear periph ctrl if client set it */ | |
804 | chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; | |
805 | ||
806 | num_periods = buf_len / period_len; | |
807 | ||
808 | desc = stm32_dma_alloc_desc(num_periods); | |
809 | if (!desc) | |
810 | return NULL; | |
811 | ||
812 | for (i = 0; i < num_periods; i++) { | |
813 | desc->sg_req[i].len = period_len; | |
814 | ||
815 | stm32_dma_clear_reg(&desc->sg_req[i].chan_reg); | |
816 | desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; | |
817 | desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; | |
818 | desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; | |
819 | desc->sg_req[i].chan_reg.dma_sm0ar = buf_addr; | |
820 | desc->sg_req[i].chan_reg.dma_sm1ar = buf_addr; | |
821 | desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items; | |
822 | buf_addr += period_len; | |
823 | } | |
824 | ||
825 | desc->num_sgs = num_periods; | |
826 | desc->cyclic = true; | |
827 | ||
828 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | |
829 | } | |
830 | ||
831 | static struct dma_async_tx_descriptor *stm32_dma_prep_dma_memcpy( | |
832 | struct dma_chan *c, dma_addr_t dest, | |
833 | dma_addr_t src, size_t len, unsigned long flags) | |
834 | { | |
835 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
836 | u32 num_sgs; | |
837 | struct stm32_dma_desc *desc; | |
838 | size_t xfer_count, offset; | |
839 | int i; | |
840 | ||
841 | num_sgs = DIV_ROUND_UP(len, STM32_DMA_MAX_DATA_ITEMS); | |
842 | desc = stm32_dma_alloc_desc(num_sgs); | |
843 | if (!desc) | |
844 | return NULL; | |
845 | ||
846 | for (offset = 0, i = 0; offset < len; offset += xfer_count, i++) { | |
847 | xfer_count = min_t(size_t, len - offset, | |
848 | STM32_DMA_MAX_DATA_ITEMS); | |
849 | ||
850 | desc->sg_req[i].len = xfer_count; | |
851 | ||
852 | stm32_dma_clear_reg(&desc->sg_req[i].chan_reg); | |
853 | desc->sg_req[i].chan_reg.dma_scr = | |
854 | STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_MEM) | | |
855 | STM32_DMA_SCR_MINC | | |
856 | STM32_DMA_SCR_PINC | | |
857 | STM32_DMA_SCR_TCIE | | |
858 | STM32_DMA_SCR_TEIE; | |
859 | desc->sg_req[i].chan_reg.dma_sfcr = STM32_DMA_SFCR_DMDIS | | |
860 | STM32_DMA_SFCR_FTH(STM32_DMA_FIFO_THRESHOLD_FULL) | | |
861 | STM32_DMA_SFCR_FEIE; | |
862 | desc->sg_req[i].chan_reg.dma_spar = src + offset; | |
863 | desc->sg_req[i].chan_reg.dma_sm0ar = dest + offset; | |
864 | desc->sg_req[i].chan_reg.dma_sndtr = xfer_count; | |
865 | } | |
866 | ||
867 | desc->num_sgs = num_sgs; | |
868 | desc->cyclic = false; | |
869 | ||
870 | return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); | |
871 | } | |
872 | ||
2b12c558 CM |
873 | static u32 stm32_dma_get_remaining_bytes(struct stm32_dma_chan *chan) |
874 | { | |
875 | u32 dma_scr, width, ndtr; | |
876 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
877 | ||
878 | dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); | |
879 | width = STM32_DMA_SCR_PSIZE_GET(dma_scr); | |
880 | ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); | |
881 | ||
882 | return ndtr << width; | |
883 | } | |
884 | ||
d8b46839 CM |
885 | static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan, |
886 | struct stm32_dma_desc *desc, | |
887 | u32 next_sg) | |
888 | { | |
2b12c558 | 889 | u32 residue = 0; |
d8b46839 CM |
890 | int i; |
891 | ||
2b12c558 CM |
892 | /* |
893 | * In cyclic mode, for the last period, residue = remaining bytes from | |
894 | * NDTR | |
895 | */ | |
896 | if (chan->desc->cyclic && next_sg == 0) | |
897 | return stm32_dma_get_remaining_bytes(chan); | |
d8b46839 | 898 | |
2b12c558 CM |
899 | /* |
900 | * For all other periods in cyclic mode, and in sg mode, | |
901 | * residue = remaining bytes from NDTR + remaining periods/sg to be | |
902 | * transferred | |
903 | */ | |
d8b46839 CM |
904 | for (i = next_sg; i < desc->num_sgs; i++) |
905 | residue += desc->sg_req[i].len; | |
2b12c558 | 906 | residue += stm32_dma_get_remaining_bytes(chan); |
d8b46839 CM |
907 | |
908 | return residue; | |
909 | } | |
910 | ||
911 | static enum dma_status stm32_dma_tx_status(struct dma_chan *c, | |
912 | dma_cookie_t cookie, | |
913 | struct dma_tx_state *state) | |
914 | { | |
915 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
916 | struct virt_dma_desc *vdesc; | |
917 | enum dma_status status; | |
918 | unsigned long flags; | |
57b5a321 | 919 | u32 residue = 0; |
d8b46839 CM |
920 | |
921 | status = dma_cookie_status(c, cookie, state); | |
249d5531 | 922 | if (status == DMA_COMPLETE || !state) |
d8b46839 CM |
923 | return status; |
924 | ||
925 | spin_lock_irqsave(&chan->vchan.lock, flags); | |
926 | vdesc = vchan_find_desc(&chan->vchan, cookie); | |
57b5a321 | 927 | if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) |
d8b46839 CM |
928 | residue = stm32_dma_desc_residue(chan, chan->desc, |
929 | chan->next_sg); | |
57b5a321 | 930 | else if (vdesc) |
d8b46839 CM |
931 | residue = stm32_dma_desc_residue(chan, |
932 | to_stm32_dma_desc(vdesc), 0); | |
d8b46839 CM |
933 | dma_set_residue(state, residue); |
934 | ||
935 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | |
936 | ||
937 | return status; | |
938 | } | |
939 | ||
940 | static int stm32_dma_alloc_chan_resources(struct dma_chan *c) | |
941 | { | |
942 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
943 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
944 | int ret; | |
945 | ||
946 | chan->config_init = false; | |
947 | ret = clk_prepare_enable(dmadev->clk); | |
948 | if (ret < 0) { | |
949 | dev_err(chan2dev(chan), "clk_prepare_enable failed: %d\n", ret); | |
950 | return ret; | |
951 | } | |
952 | ||
953 | ret = stm32_dma_disable_chan(chan); | |
954 | if (ret < 0) | |
955 | clk_disable_unprepare(dmadev->clk); | |
956 | ||
957 | return ret; | |
958 | } | |
959 | ||
960 | static void stm32_dma_free_chan_resources(struct dma_chan *c) | |
961 | { | |
962 | struct stm32_dma_chan *chan = to_stm32_dma_chan(c); | |
963 | struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); | |
964 | unsigned long flags; | |
965 | ||
966 | dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); | |
967 | ||
968 | if (chan->busy) { | |
969 | spin_lock_irqsave(&chan->vchan.lock, flags); | |
970 | stm32_dma_stop(chan); | |
971 | chan->desc = NULL; | |
972 | spin_unlock_irqrestore(&chan->vchan.lock, flags); | |
973 | } | |
974 | ||
975 | clk_disable_unprepare(dmadev->clk); | |
976 | ||
977 | vchan_free_chan_resources(to_virt_chan(c)); | |
978 | } | |
979 | ||
980 | static void stm32_dma_desc_free(struct virt_dma_desc *vdesc) | |
981 | { | |
982 | kfree(container_of(vdesc, struct stm32_dma_desc, vdesc)); | |
983 | } | |
984 | ||
e97adb49 | 985 | static void stm32_dma_set_config(struct stm32_dma_chan *chan, |
249d5531 | 986 | struct stm32_dma_cfg *cfg) |
d8b46839 CM |
987 | { |
988 | stm32_dma_clear_reg(&chan->chan_reg); | |
989 | ||
990 | chan->chan_reg.dma_scr = cfg->stream_config & STM32_DMA_SCR_CFG_MASK; | |
991 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_REQ(cfg->request_line); | |
992 | ||
993 | /* Enable Interrupts */ | |
994 | chan->chan_reg.dma_scr |= STM32_DMA_SCR_TEIE | STM32_DMA_SCR_TCIE; | |
995 | ||
951f44cb PYM |
996 | chan->threshold = STM32_DMA_THRESHOLD_FTR_GET(cfg->features); |
997 | chan->chan_reg.dma_sfcr = STM32_DMA_SFCR_FTH(chan->threshold); | |
d8b46839 CM |
998 | } |
999 | ||
1000 | static struct dma_chan *stm32_dma_of_xlate(struct of_phandle_args *dma_spec, | |
1001 | struct of_dma *ofdma) | |
1002 | { | |
1003 | struct stm32_dma_device *dmadev = ofdma->of_dma_data; | |
5df4eb45 | 1004 | struct device *dev = dmadev->ddev.dev; |
d8b46839 CM |
1005 | struct stm32_dma_cfg cfg; |
1006 | struct stm32_dma_chan *chan; | |
1007 | struct dma_chan *c; | |
1008 | ||
5df4eb45 CM |
1009 | if (dma_spec->args_count < 4) { |
1010 | dev_err(dev, "Bad number of cells\n"); | |
d8b46839 | 1011 | return NULL; |
5df4eb45 | 1012 | } |
d8b46839 CM |
1013 | |
1014 | cfg.channel_id = dma_spec->args[0]; | |
1015 | cfg.request_line = dma_spec->args[1]; | |
1016 | cfg.stream_config = dma_spec->args[2]; | |
951f44cb | 1017 | cfg.features = dma_spec->args[3]; |
d8b46839 | 1018 | |
249d5531 PYM |
1019 | if (cfg.channel_id >= STM32_DMA_MAX_CHANNELS || |
1020 | cfg.request_line >= STM32_DMA_MAX_REQUEST_ID) { | |
5df4eb45 | 1021 | dev_err(dev, "Bad channel and/or request id\n"); |
d8b46839 | 1022 | return NULL; |
5df4eb45 | 1023 | } |
d8b46839 | 1024 | |
d8b46839 CM |
1025 | chan = &dmadev->chan[cfg.channel_id]; |
1026 | ||
1027 | c = dma_get_slave_channel(&chan->vchan.chan); | |
5df4eb45 | 1028 | if (!c) { |
041cf7e0 | 1029 | dev_err(dev, "No more channels available\n"); |
5df4eb45 CM |
1030 | return NULL; |
1031 | } | |
1032 | ||
1033 | stm32_dma_set_config(chan, &cfg); | |
d8b46839 CM |
1034 | |
1035 | return c; | |
1036 | } | |
1037 | ||
1038 | static const struct of_device_id stm32_dma_of_match[] = { | |
1039 | { .compatible = "st,stm32-dma", }, | |
1040 | { /* sentinel */ }, | |
1041 | }; | |
1042 | MODULE_DEVICE_TABLE(of, stm32_dma_of_match); | |
1043 | ||
1044 | static int stm32_dma_probe(struct platform_device *pdev) | |
1045 | { | |
1046 | struct stm32_dma_chan *chan; | |
1047 | struct stm32_dma_device *dmadev; | |
1048 | struct dma_device *dd; | |
1049 | const struct of_device_id *match; | |
1050 | struct resource *res; | |
1051 | int i, ret; | |
1052 | ||
1053 | match = of_match_device(stm32_dma_of_match, &pdev->dev); | |
1054 | if (!match) { | |
1055 | dev_err(&pdev->dev, "Error: No device match found\n"); | |
1056 | return -ENODEV; | |
1057 | } | |
1058 | ||
1059 | dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev), GFP_KERNEL); | |
1060 | if (!dmadev) | |
1061 | return -ENOMEM; | |
1062 | ||
1063 | dd = &dmadev->ddev; | |
1064 | ||
1065 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); | |
1066 | dmadev->base = devm_ioremap_resource(&pdev->dev, res); | |
1067 | if (IS_ERR(dmadev->base)) | |
1068 | return PTR_ERR(dmadev->base); | |
1069 | ||
1070 | dmadev->clk = devm_clk_get(&pdev->dev, NULL); | |
1071 | if (IS_ERR(dmadev->clk)) { | |
1072 | dev_err(&pdev->dev, "Error: Missing controller clock\n"); | |
1073 | return PTR_ERR(dmadev->clk); | |
1074 | } | |
1075 | ||
1076 | dmadev->mem2mem = of_property_read_bool(pdev->dev.of_node, | |
1077 | "st,mem2mem"); | |
1078 | ||
1079 | dmadev->rst = devm_reset_control_get(&pdev->dev, NULL); | |
1080 | if (!IS_ERR(dmadev->rst)) { | |
1081 | reset_control_assert(dmadev->rst); | |
1082 | udelay(2); | |
1083 | reset_control_deassert(dmadev->rst); | |
1084 | } | |
1085 | ||
1086 | dma_cap_set(DMA_SLAVE, dd->cap_mask); | |
1087 | dma_cap_set(DMA_PRIVATE, dd->cap_mask); | |
1088 | dma_cap_set(DMA_CYCLIC, dd->cap_mask); | |
1089 | dd->device_alloc_chan_resources = stm32_dma_alloc_chan_resources; | |
1090 | dd->device_free_chan_resources = stm32_dma_free_chan_resources; | |
1091 | dd->device_tx_status = stm32_dma_tx_status; | |
1092 | dd->device_issue_pending = stm32_dma_issue_pending; | |
1093 | dd->device_prep_slave_sg = stm32_dma_prep_slave_sg; | |
1094 | dd->device_prep_dma_cyclic = stm32_dma_prep_dma_cyclic; | |
1095 | dd->device_config = stm32_dma_slave_config; | |
1096 | dd->device_terminate_all = stm32_dma_terminate_all; | |
dc808675 | 1097 | dd->device_synchronize = stm32_dma_synchronize; |
d8b46839 CM |
1098 | dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | |
1099 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | | |
1100 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); | |
1101 | dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | | |
1102 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | | |
1103 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); | |
1104 | dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); | |
1105 | dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; | |
276b0046 | 1106 | dd->max_burst = STM32_DMA_MAX_BURST; |
d8b46839 CM |
1107 | dd->dev = &pdev->dev; |
1108 | INIT_LIST_HEAD(&dd->channels); | |
1109 | ||
1110 | if (dmadev->mem2mem) { | |
1111 | dma_cap_set(DMA_MEMCPY, dd->cap_mask); | |
1112 | dd->device_prep_dma_memcpy = stm32_dma_prep_dma_memcpy; | |
1113 | dd->directions |= BIT(DMA_MEM_TO_MEM); | |
1114 | } | |
1115 | ||
1116 | for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) { | |
1117 | chan = &dmadev->chan[i]; | |
1118 | chan->id = i; | |
1119 | chan->vchan.desc_free = stm32_dma_desc_free; | |
1120 | vchan_init(&chan->vchan, dd); | |
1121 | } | |
1122 | ||
1123 | ret = dma_async_device_register(dd); | |
1124 | if (ret) | |
1125 | return ret; | |
1126 | ||
1127 | for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) { | |
1128 | chan = &dmadev->chan[i]; | |
1129 | res = platform_get_resource(pdev, IORESOURCE_IRQ, i); | |
1130 | if (!res) { | |
1131 | ret = -EINVAL; | |
1132 | dev_err(&pdev->dev, "No irq resource for chan %d\n", i); | |
1133 | goto err_unregister; | |
1134 | } | |
1135 | chan->irq = res->start; | |
1136 | ret = devm_request_irq(&pdev->dev, chan->irq, | |
1137 | stm32_dma_chan_irq, 0, | |
1138 | dev_name(chan2dev(chan)), chan); | |
1139 | if (ret) { | |
1140 | dev_err(&pdev->dev, | |
1141 | "request_irq failed with err %d channel %d\n", | |
1142 | ret, i); | |
1143 | goto err_unregister; | |
1144 | } | |
1145 | } | |
1146 | ||
1147 | ret = of_dma_controller_register(pdev->dev.of_node, | |
1148 | stm32_dma_of_xlate, dmadev); | |
1149 | if (ret < 0) { | |
1150 | dev_err(&pdev->dev, | |
1151 | "STM32 DMA DMA OF registration failed %d\n", ret); | |
1152 | goto err_unregister; | |
1153 | } | |
1154 | ||
1155 | platform_set_drvdata(pdev, dmadev); | |
1156 | ||
1157 | dev_info(&pdev->dev, "STM32 DMA driver registered\n"); | |
1158 | ||
1159 | return 0; | |
1160 | ||
1161 | err_unregister: | |
1162 | dma_async_device_unregister(dd); | |
1163 | ||
1164 | return ret; | |
1165 | } | |
1166 | ||
1167 | static struct platform_driver stm32_dma_driver = { | |
1168 | .driver = { | |
1169 | .name = "stm32-dma", | |
1170 | .of_match_table = stm32_dma_of_match, | |
1171 | }, | |
1172 | }; | |
1173 | ||
1174 | static int __init stm32_dma_init(void) | |
1175 | { | |
1176 | return platform_driver_probe(&stm32_dma_driver, stm32_dma_probe); | |
1177 | } | |
1178 | subsys_initcall(stm32_dma_init); |