]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/ppc/syslib/ppc4xx_dma.c
Merge branch 'x86/urgent' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux...
[mirror_ubuntu-artful-kernel.git] / arch / ppc / syslib / ppc4xx_dma.c
CommitLineData
1da177e4 1/*
1da177e4
LT
2 * IBM PPC4xx DMA engine core library
3 *
4 * Copyright 2000-2004 MontaVista Software Inc.
5 *
6 * Cleaned up and converted to new DCR access
7 * Matt Porter <mporter@kernel.crashing.org>
8 *
9 * Original code by Armin Kuster <akuster@mvista.com>
10 * and Pete Popov <ppopov@mvista.com>
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the
14 * Free Software Foundation; either version 2 of the License, or (at your
15 * option) any later version.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 675 Mass Ave, Cambridge, MA 02139, USA.
20 */
21
1da177e4
LT
22#include <linux/kernel.h>
23#include <linux/mm.h>
24#include <linux/miscdevice.h>
25#include <linux/init.h>
26#include <linux/module.h>
27
28#include <asm/system.h>
29#include <asm/io.h>
7c3dbbe9 30#include <asm/dma.h>
1da177e4
LT
31#include <asm/ppc4xx_dma.h>
32
33ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
34
35int
36ppc4xx_get_dma_status(void)
37{
38 return (mfdcr(DCRN_DMASR));
39}
40
41void
42ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
43{
44 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
45 printk("set_src_addr: bad channel: %d\n", dmanr);
46 return;
47 }
48
49#ifdef PPC4xx_DMA_64BIT
50 mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
51#else
52 mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
53#endif
54}
55
56void
57ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
58{
59 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
60 printk("set_dst_addr: bad channel: %d\n", dmanr);
61 return;
62 }
63
64#ifdef PPC4xx_DMA_64BIT
65 mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
66#else
67 mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
68#endif
69}
70
71void
72ppc4xx_enable_dma(unsigned int dmanr)
73{
74 unsigned int control;
75 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
76 unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
77 DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
78 DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
79 DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
80
81 if (p_dma_ch->in_use) {
82 printk("enable_dma: channel %d in use\n", dmanr);
83 return;
84 }
85
86 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
87 printk("enable_dma: bad channel: %d\n", dmanr);
88 return;
89 }
90
91 if (p_dma_ch->mode == DMA_MODE_READ) {
92 /* peripheral to memory */
93 ppc4xx_set_src_addr(dmanr, 0);
94 ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
95 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
96 /* memory to peripheral */
97 ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
98 ppc4xx_set_dst_addr(dmanr, 0);
99 }
100
101 /* for other xfer modes, the addresses are already set */
102 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
103
104 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
105 if (p_dma_ch->mode == DMA_MODE_MM) {
106 /* software initiated memory to memory */
107 control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
108 }
109
110 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
111
112 /*
113 * Clear the CS, TS, RI bits for the channel from DMASR. This
114 * has been observed to happen correctly only after the mode and
115 * ETD/DCE bits in DMACRx are set above. Must do this before
116 * enabling the channel.
117 */
118
119 mtdcr(DCRN_DMASR, status_bits[dmanr]);
120
121 /*
122 * For device-paced transfers, Terminal Count Enable apparently
123 * must be on, and this must be turned on after the mode, etc.
124 * bits are cleared above (at least on Redwood-6).
125 */
126
127 if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
128 (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
129 control |= DMA_TCE_ENABLE;
130
131 /*
132 * Now enable the channel.
133 */
134
135 control |= (p_dma_ch->mode | DMA_CE_ENABLE);
136
137 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
138
139 p_dma_ch->in_use = 1;
140}
141
142void
143ppc4xx_disable_dma(unsigned int dmanr)
144{
145 unsigned int control;
146 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
147
148 if (!p_dma_ch->in_use) {
149 printk("disable_dma: channel %d not in use\n", dmanr);
150 return;
151 }
152
153 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
154 printk("disable_dma: bad channel: %d\n", dmanr);
155 return;
156 }
157
158 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
159 control &= ~DMA_CE_ENABLE;
160 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
161
162 p_dma_ch->in_use = 0;
163}
164
165/*
166 * Sets the dma mode for single DMA transfers only.
167 * For scatter/gather transfers, the mode is passed to the
168 * alloc_dma_handle() function as one of the parameters.
169 *
170 * The mode is simply saved and used later. This allows
171 * the driver to call set_dma_mode() and set_dma_addr() in
172 * any order.
173 *
174 * Valid mode values are:
175 *
176 * DMA_MODE_READ peripheral to memory
177 * DMA_MODE_WRITE memory to peripheral
178 * DMA_MODE_MM memory to memory
179 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
180 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
181 */
182int
183ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
184{
185 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
186
187 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
188 printk("set_dma_mode: bad channel 0x%x\n", dmanr);
189 return DMA_STATUS_BAD_CHANNEL;
190 }
191
192 p_dma_ch->mode = mode;
193
194 return DMA_STATUS_GOOD;
195}
196
197/*
198 * Sets the DMA Count register. Note that 'count' is in bytes.
199 * However, the DMA Count register counts the number of "transfers",
200 * where each transfer is equal to the bus width. Thus, count
201 * MUST be a multiple of the bus width.
202 */
203void
204ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
205{
206 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
207
208#ifdef DEBUG_4xxDMA
209 {
210 int error = 0;
211 switch (p_dma_ch->pwidth) {
212 case PW_8:
213 break;
214 case PW_16:
215 if (count & 0x1)
216 error = 1;
217 break;
218 case PW_32:
219 if (count & 0x3)
220 error = 1;
221 break;
222 case PW_64:
223 if (count & 0x7)
224 error = 1;
225 break;
226 default:
227 printk("set_dma_count: invalid bus width: 0x%x\n",
228 p_dma_ch->pwidth);
229 return;
230 }
231 if (error)
232 printk
233 ("Warning: set_dma_count count 0x%x bus width %d\n",
234 count, p_dma_ch->pwidth);
235 }
236#endif
237
238 count = count >> p_dma_ch->shift;
239
240 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
241}
242
243/*
a8de5ce9 244 * Returns the number of bytes left to be transferred.
1da177e4
LT
245 * After a DMA transfer, this should return zero.
246 * Reading this while a DMA transfer is still in progress will return
247 * unpredictable results.
248 */
249int
250ppc4xx_get_dma_residue(unsigned int dmanr)
251{
252 unsigned int count;
253 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
254
255 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
256 printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
257 return DMA_STATUS_BAD_CHANNEL;
258 }
259
260 count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
261
262 return (count << p_dma_ch->shift);
263}
264
265/*
266 * Sets the DMA address for a memory to peripheral or peripheral
267 * to memory transfer. The address is just saved in the channel
268 * structure for now and used later in enable_dma().
269 */
270void
271ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
272{
273 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
274
275 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
276 printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
277 return;
278 }
279
280#ifdef DEBUG_4xxDMA
281 {
282 int error = 0;
283 switch (p_dma_ch->pwidth) {
284 case PW_8:
285 break;
286 case PW_16:
287 if ((unsigned) addr & 0x1)
288 error = 1;
289 break;
290 case PW_32:
291 if ((unsigned) addr & 0x3)
292 error = 1;
293 break;
294 case PW_64:
295 if ((unsigned) addr & 0x7)
296 error = 1;
297 break;
298 default:
299 printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
300 p_dma_ch->pwidth);
301 return;
302 }
303 if (error)
304 printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
305 addr, p_dma_ch->pwidth);
306 }
307#endif
308
309 /* save dma address and program it later after we know the xfer mode */
310 p_dma_ch->addr = addr;
311}
312
313/*
314 * Sets both DMA addresses for a memory to memory transfer.
315 * For memory to peripheral or peripheral to memory transfers
316 * the function set_dma_addr() should be used instead.
317 */
318void
319ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
320 phys_addr_t dst_dma_addr)
321{
322 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
323 printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
324 return;
325 }
326
327#ifdef DEBUG_4xxDMA
328 {
329 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
330 int error = 0;
331 switch (p_dma_ch->pwidth) {
332 case PW_8:
333 break;
334 case PW_16:
335 if (((unsigned) src_dma_addr & 0x1) ||
336 ((unsigned) dst_dma_addr & 0x1)
337 )
338 error = 1;
339 break;
340 case PW_32:
341 if (((unsigned) src_dma_addr & 0x3) ||
342 ((unsigned) dst_dma_addr & 0x3)
343 )
344 error = 1;
345 break;
346 case PW_64:
347 if (((unsigned) src_dma_addr & 0x7) ||
348 ((unsigned) dst_dma_addr & 0x7)
349 )
350 error = 1;
351 break;
352 default:
353 printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
354 p_dma_ch->pwidth);
355 return;
356 }
357 if (error)
358 printk
359 ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
360 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
361 }
362#endif
363
364 ppc4xx_set_src_addr(dmanr, src_dma_addr);
365 ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
366}
367
368/*
369 * Enables the channel interrupt.
370 *
371 * If performing a scatter/gatter transfer, this function
372 * MUST be called before calling alloc_dma_handle() and building
373 * the sgl list. Otherwise, interrupts will not be enabled, if
374 * they were previously disabled.
375 */
376int
377ppc4xx_enable_dma_interrupt(unsigned int dmanr)
378{
379 unsigned int control;
380 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
381
382 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
383 printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
384 return DMA_STATUS_BAD_CHANNEL;
385 }
386
387 p_dma_ch->int_enable = 1;
388
389 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
390 control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
391 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
392
393 return DMA_STATUS_GOOD;
394}
395
396/*
397 * Disables the channel interrupt.
398 *
399 * If performing a scatter/gatter transfer, this function
400 * MUST be called before calling alloc_dma_handle() and building
401 * the sgl list. Otherwise, interrupts will not be disabled, if
402 * they were previously enabled.
403 */
404int
405ppc4xx_disable_dma_interrupt(unsigned int dmanr)
406{
407 unsigned int control;
408 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
409
410 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
411 printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
412 return DMA_STATUS_BAD_CHANNEL;
413 }
414
415 p_dma_ch->int_enable = 0;
416
417 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
418 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
419 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
420
421 return DMA_STATUS_GOOD;
422}
423
424/*
425 * Configures a DMA channel, including the peripheral bus width, if a
426 * peripheral is attached to the channel, the polarity of the DMAReq and
427 * DMAAck signals, etc. This information should really be setup by the boot
428 * code, since most likely the configuration won't change dynamically.
429 * If the kernel has to call this function, it's recommended that it's
430 * called from platform specific init code. The driver should not need to
431 * call this function.
432 */
433int
434ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
435{
436 unsigned int polarity;
437 uint32_t control = 0;
438 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
439
440 DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
441 DMA_MODE_WRITE = 0; /* Memory to Peripheral */
442
443 if (!p_init) {
444 printk("ppc4xx_init_dma_channel: NULL p_init\n");
445 return DMA_STATUS_NULL_POINTER;
446 }
447
448 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
449 printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
450 return DMA_STATUS_BAD_CHANNEL;
451 }
452
453#if DCRN_POL > 0
454 polarity = mfdcr(DCRN_POL);
455#else
456 polarity = 0;
457#endif
458
459 /* Setup the control register based on the values passed to
460 * us in p_init. Then, over-write the control register with this
461 * new value.
462 */
463 control |= SET_DMA_CONTROL;
464
465 /* clear all polarity signals and then "or" in new signal levels */
466 polarity &= ~GET_DMA_POLARITY(dmanr);
467 polarity |= p_init->polarity;
468#if DCRN_POL > 0
469 mtdcr(DCRN_POL, polarity);
470#endif
471 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
472
473 /* save these values in our dma channel structure */
474 memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
475
476 /*
477 * The peripheral width values written in the control register are:
478 * PW_8 0
479 * PW_16 1
480 * PW_32 2
481 * PW_64 3
482 *
483 * Since the DMA count register takes the number of "transfers",
484 * we need to divide the count sent to us in certain
485 * functions by the appropriate number. It so happens that our
486 * right shift value is equal to the peripheral width value.
487 */
488 p_dma_ch->shift = p_init->pwidth;
489
490 /*
491 * Save the control word for easy access.
492 */
493 p_dma_ch->control = control;
494
495 mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
496 return DMA_STATUS_GOOD;
497}
498
499/*
500 * This function returns the channel configuration.
501 */
502int
503ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
504{
505 unsigned int polarity;
506 unsigned int control;
507
508 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
509 printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
510 return DMA_STATUS_BAD_CHANNEL;
511 }
512
513 memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
514
515#if DCRN_POL > 0
516 polarity = mfdcr(DCRN_POL);
517#else
518 polarity = 0;
519#endif
520
521 p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
522 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
523
524 p_dma_ch->cp = GET_DMA_PRIORITY(control);
525 p_dma_ch->pwidth = GET_DMA_PW(control);
526 p_dma_ch->psc = GET_DMA_PSC(control);
527 p_dma_ch->pwc = GET_DMA_PWC(control);
528 p_dma_ch->phc = GET_DMA_PHC(control);
529 p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
530 p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
531 p_dma_ch->shift = GET_DMA_PW(control);
532
533#ifdef CONFIG_PPC4xx_EDMA
534 p_dma_ch->pf = GET_DMA_PREFETCH(control);
535#else
536 p_dma_ch->ch_enable = GET_DMA_CH(control);
537 p_dma_ch->ece_enable = GET_DMA_ECE(control);
538 p_dma_ch->tcd_disable = GET_DMA_TCD(control);
539#endif
540 return DMA_STATUS_GOOD;
541}
542
543/*
544 * Sets the priority for the DMA channel dmanr.
545 * Since this is setup by the hardware init function, this function
546 * can be used to dynamically change the priority of a channel.
547 *
548 * Acceptable priorities:
549 *
550 * PRIORITY_LOW
551 * PRIORITY_MID_LOW
552 * PRIORITY_MID_HIGH
553 * PRIORITY_HIGH
554 *
555 */
556int
557ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
558{
559 unsigned int control;
560
561 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
562 printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
563 return DMA_STATUS_BAD_CHANNEL;
564 }
565
566 if ((priority != PRIORITY_LOW) &&
567 (priority != PRIORITY_MID_LOW) &&
568 (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
569 printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
570 }
571
572 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
573 control |= SET_DMA_PRIORITY(priority);
574 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
575
576 return DMA_STATUS_GOOD;
577}
578
579/*
580 * Returns the width of the peripheral attached to this channel. This assumes
581 * that someone who knows the hardware configuration, boot code or some other
582 * init code, already set the width.
583 *
584 * The return value is one of:
585 * PW_8
586 * PW_16
587 * PW_32
588 * PW_64
589 *
590 * The function returns 0 on error.
591 */
592unsigned int
593ppc4xx_get_peripheral_width(unsigned int dmanr)
594{
595 unsigned int control;
596
597 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
598 printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
599 return DMA_STATUS_BAD_CHANNEL;
600 }
601
602 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
603
604 return (GET_DMA_PW(control));
605}
606
607/*
608 * Clears the channel status bits
609 */
610int
611ppc4xx_clr_dma_status(unsigned int dmanr)
612{
613 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
614 printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
615 return DMA_STATUS_BAD_CHANNEL;
616 }
617 mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
618 return DMA_STATUS_GOOD;
619}
620
28cd1d17 621#ifdef CONFIG_PPC4xx_EDMA
1da177e4
LT
622/*
623 * Enables the burst on the channel (BTEN bit in the control/count register)
624 * Note:
625 * For scatter/gather dma, this function MUST be called before the
626 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
627 * sgl list and used as each sgl element is added.
628 */
629int
630ppc4xx_enable_burst(unsigned int dmanr)
631{
632 unsigned int ctc;
633 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
634 printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
635 return DMA_STATUS_BAD_CHANNEL;
636 }
637 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
638 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
639 return DMA_STATUS_GOOD;
640}
641/*
642 * Disables the burst on the channel (BTEN bit in the control/count register)
643 * Note:
644 * For scatter/gather dma, this function MUST be called before the
645 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
646 * sgl list and used as each sgl element is added.
647 */
648int
649ppc4xx_disable_burst(unsigned int dmanr)
650{
651 unsigned int ctc;
652 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
653 printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
654 return DMA_STATUS_BAD_CHANNEL;
655 }
656 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
657 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
658 return DMA_STATUS_GOOD;
659}
660/*
661 * Sets the burst size (number of peripheral widths) for the channel
662 * (BSIZ bits in the control/count register))
663 * must be one of:
664 * DMA_CTC_BSIZ_2
665 * DMA_CTC_BSIZ_4
666 * DMA_CTC_BSIZ_8
667 * DMA_CTC_BSIZ_16
668 * Note:
669 * For scatter/gather dma, this function MUST be called before the
670 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
671 * sgl list and used as each sgl element is added.
672 */
673int
674ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
675{
676 unsigned int ctc;
677 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
678 printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
679 return DMA_STATUS_BAD_CHANNEL;
680 }
681 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
682 ctc |= (bsize & DMA_CTC_BSIZ_MSK);
683 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
684 return DMA_STATUS_GOOD;
685}
686
28cd1d17
MP
687EXPORT_SYMBOL(ppc4xx_enable_burst);
688EXPORT_SYMBOL(ppc4xx_disable_burst);
689EXPORT_SYMBOL(ppc4xx_set_burst_size);
690#endif /* CONFIG_PPC4xx_EDMA */
691
1da177e4
LT
692EXPORT_SYMBOL(ppc4xx_init_dma_channel);
693EXPORT_SYMBOL(ppc4xx_get_channel_config);
694EXPORT_SYMBOL(ppc4xx_set_channel_priority);
695EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
696EXPORT_SYMBOL(dma_channels);
697EXPORT_SYMBOL(ppc4xx_set_src_addr);
698EXPORT_SYMBOL(ppc4xx_set_dst_addr);
699EXPORT_SYMBOL(ppc4xx_set_dma_addr);
700EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
701EXPORT_SYMBOL(ppc4xx_enable_dma);
702EXPORT_SYMBOL(ppc4xx_disable_dma);
703EXPORT_SYMBOL(ppc4xx_set_dma_mode);
704EXPORT_SYMBOL(ppc4xx_set_dma_count);
705EXPORT_SYMBOL(ppc4xx_get_dma_residue);
706EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
707EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
708EXPORT_SYMBOL(ppc4xx_get_dma_status);
709EXPORT_SYMBOL(ppc4xx_clr_dma_status);
28cd1d17 710