]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/blob - arch/arm/mach-pnx4008/dma.c
Merge branch 'for-rmk-realview' of git://linux-arm.org/linux-2.6 into devel
[mirror_ubuntu-eoan-kernel.git] / arch / arm / mach-pnx4008 / dma.c
1 /*
2 * linux/arch/arm/mach-pnx4008/dma.c
3 *
4 * PNX4008 DMA registration and IRQ dispatching
5 *
6 * Author: Vitaly Wool
7 * Copyright: MontaVista Software Inc. (c) 2005
8 *
9 * Based on the code from Nicolas Pitre
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
14 */
15
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
24 #include <linux/io.h>
25
26 #include <asm/system.h>
27 #include <mach/hardware.h>
28 #include <mach/dma.h>
29 #include <asm/dma-mapping.h>
30 #include <mach/clock.h>
31
32 static struct dma_channel {
33 char *name;
34 void (*irq_handler) (int, int, void *);
35 void *data;
36 struct pnx4008_dma_ll *ll;
37 u32 ll_dma;
38 void *target_addr;
39 int target_id;
40 } dma_channels[MAX_DMA_CHANNELS];
41
42 static struct ll_pool {
43 void *vaddr;
44 void *cur;
45 dma_addr_t dma_addr;
46 int count;
47 } ll_pool;
48
49 static DEFINE_SPINLOCK(ll_lock);
50
51 struct pnx4008_dma_ll *pnx4008_alloc_ll_entry(dma_addr_t * ll_dma)
52 {
53 struct pnx4008_dma_ll *ll = NULL;
54 unsigned long flags;
55
56 spin_lock_irqsave(&ll_lock, flags);
57 if (ll_pool.count > 4) { /* can give one more */
58 ll = *(struct pnx4008_dma_ll **) ll_pool.cur;
59 *ll_dma = ll_pool.dma_addr + ((void *)ll - ll_pool.vaddr);
60 *(void **)ll_pool.cur = **(void ***)ll_pool.cur;
61 memset(ll, 0, sizeof(*ll));
62 ll_pool.count--;
63 }
64 spin_unlock_irqrestore(&ll_lock, flags);
65
66 return ll;
67 }
68
69 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry);
70
71 void pnx4008_free_ll_entry(struct pnx4008_dma_ll * ll, dma_addr_t ll_dma)
72 {
73 unsigned long flags;
74
75 if (ll) {
76 if ((unsigned long)((long)ll - (long)ll_pool.vaddr) > 0x4000) {
77 printk(KERN_ERR "Trying to free entry not allocated by DMA\n");
78 BUG();
79 }
80
81 if (ll->flags & DMA_BUFFER_ALLOCATED)
82 ll->free(ll->alloc_data);
83
84 spin_lock_irqsave(&ll_lock, flags);
85 *(long *)ll = *(long *)ll_pool.cur;
86 *(long *)ll_pool.cur = (long)ll;
87 ll_pool.count++;
88 spin_unlock_irqrestore(&ll_lock, flags);
89 }
90 }
91
92 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry);
93
94 void pnx4008_free_ll(u32 ll_dma, struct pnx4008_dma_ll * ll)
95 {
96 struct pnx4008_dma_ll *ptr;
97 u32 dma;
98
99 while (ll) {
100 dma = ll->next_dma;
101 ptr = ll->next;
102 pnx4008_free_ll_entry(ll, ll_dma);
103
104 ll_dma = dma;
105 ll = ptr;
106 }
107 }
108
109 EXPORT_SYMBOL_GPL(pnx4008_free_ll);
110
111 static int dma_channels_requested = 0;
112
113 static inline void dma_increment_usage(void)
114 {
115 if (!dma_channels_requested++) {
116 struct clk *clk = clk_get(0, "dma_ck");
117 if (!IS_ERR(clk)) {
118 clk_set_rate(clk, 1);
119 clk_put(clk);
120 }
121 pnx4008_config_dma(-1, -1, 1);
122 }
123 }
124 static inline void dma_decrement_usage(void)
125 {
126 if (!--dma_channels_requested) {
127 struct clk *clk = clk_get(0, "dma_ck");
128 if (!IS_ERR(clk)) {
129 clk_set_rate(clk, 0);
130 clk_put(clk);
131 }
132 pnx4008_config_dma(-1, -1, 0);
133
134 }
135 }
136
137 static DEFINE_SPINLOCK(dma_lock);
138
139 static inline void pnx4008_dma_lock(void)
140 {
141 spin_lock_irq(&dma_lock);
142 }
143
144 static inline void pnx4008_dma_unlock(void)
145 {
146 spin_unlock_irq(&dma_lock);
147 }
148
149 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
150
151 int pnx4008_request_channel(char *name, int ch,
152 void (*irq_handler) (int, int, void *), void *data)
153 {
154 int i, found = 0;
155
156 /* basic sanity checks */
157 if (!name || (ch != -1 && !VALID_CHANNEL(ch)))
158 return -EINVAL;
159
160 pnx4008_dma_lock();
161
162 /* try grabbing a DMA channel with the requested priority */
163 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
164 if (!dma_channels[i].name && (ch == -1 || ch == i)) {
165 found = 1;
166 break;
167 }
168 }
169
170 if (found) {
171 dma_increment_usage();
172 dma_channels[i].name = name;
173 dma_channels[i].irq_handler = irq_handler;
174 dma_channels[i].data = data;
175 dma_channels[i].ll = NULL;
176 dma_channels[i].ll_dma = 0;
177 } else {
178 printk(KERN_WARNING "No more available DMA channels for %s\n",
179 name);
180 i = -ENODEV;
181 }
182
183 pnx4008_dma_unlock();
184 return i;
185 }
186
187 EXPORT_SYMBOL_GPL(pnx4008_request_channel);
188
189 void pnx4008_free_channel(int ch)
190 {
191 if (!dma_channels[ch].name) {
192 printk(KERN_CRIT
193 "%s: trying to free channel %d which is already freed\n",
194 __func__, ch);
195 return;
196 }
197
198 pnx4008_dma_lock();
199 pnx4008_free_ll(dma_channels[ch].ll_dma, dma_channels[ch].ll);
200 dma_channels[ch].ll = NULL;
201 dma_decrement_usage();
202
203 dma_channels[ch].name = NULL;
204 pnx4008_dma_unlock();
205 }
206
207 EXPORT_SYMBOL_GPL(pnx4008_free_channel);
208
209 int pnx4008_config_dma(int ahb_m1_be, int ahb_m2_be, int enable)
210 {
211 unsigned long dma_cfg = __raw_readl(DMAC_CONFIG);
212
213 switch (ahb_m1_be) {
214 case 0:
215 dma_cfg &= ~(1 << 1);
216 break;
217 case 1:
218 dma_cfg |= (1 << 1);
219 break;
220 default:
221 break;
222 }
223
224 switch (ahb_m2_be) {
225 case 0:
226 dma_cfg &= ~(1 << 2);
227 break;
228 case 1:
229 dma_cfg |= (1 << 2);
230 break;
231 default:
232 break;
233 }
234
235 switch (enable) {
236 case 0:
237 dma_cfg &= ~(1 << 0);
238 break;
239 case 1:
240 dma_cfg |= (1 << 0);
241 break;
242 default:
243 break;
244 }
245
246 pnx4008_dma_lock();
247 __raw_writel(dma_cfg, DMAC_CONFIG);
248 pnx4008_dma_unlock();
249
250 return 0;
251 }
252
253 EXPORT_SYMBOL_GPL(pnx4008_config_dma);
254
255 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl * ch_ctrl,
256 unsigned long *ctrl)
257 {
258 int i = 0, dbsize, sbsize, err = 0;
259
260 if (!ctrl || !ch_ctrl) {
261 err = -EINVAL;
262 goto out;
263 }
264
265 *ctrl = 0;
266
267 switch (ch_ctrl->tc_mask) {
268 case 0:
269 break;
270 case 1:
271 *ctrl |= (1 << 31);
272 break;
273
274 default:
275 err = -EINVAL;
276 goto out;
277 }
278
279 switch (ch_ctrl->cacheable) {
280 case 0:
281 break;
282 case 1:
283 *ctrl |= (1 << 30);
284 break;
285
286 default:
287 err = -EINVAL;
288 goto out;
289 }
290 switch (ch_ctrl->bufferable) {
291 case 0:
292 break;
293 case 1:
294 *ctrl |= (1 << 29);
295 break;
296
297 default:
298 err = -EINVAL;
299 goto out;
300 }
301 switch (ch_ctrl->priv_mode) {
302 case 0:
303 break;
304 case 1:
305 *ctrl |= (1 << 28);
306 break;
307
308 default:
309 err = -EINVAL;
310 goto out;
311 }
312 switch (ch_ctrl->di) {
313 case 0:
314 break;
315 case 1:
316 *ctrl |= (1 << 27);
317 break;
318
319 default:
320 err = -EINVAL;
321 goto out;
322 }
323 switch (ch_ctrl->si) {
324 case 0:
325 break;
326 case 1:
327 *ctrl |= (1 << 26);
328 break;
329
330 default:
331 err = -EINVAL;
332 goto out;
333 }
334 switch (ch_ctrl->dest_ahb1) {
335 case 0:
336 break;
337 case 1:
338 *ctrl |= (1 << 25);
339 break;
340
341 default:
342 err = -EINVAL;
343 goto out;
344 }
345 switch (ch_ctrl->src_ahb1) {
346 case 0:
347 break;
348 case 1:
349 *ctrl |= (1 << 24);
350 break;
351
352 default:
353 err = -EINVAL;
354 goto out;
355 }
356 switch (ch_ctrl->dwidth) {
357 case WIDTH_BYTE:
358 *ctrl &= ~(7 << 21);
359 break;
360 case WIDTH_HWORD:
361 *ctrl &= ~(7 << 21);
362 *ctrl |= (1 << 21);
363 break;
364 case WIDTH_WORD:
365 *ctrl &= ~(7 << 21);
366 *ctrl |= (2 << 21);
367 break;
368
369 default:
370 err = -EINVAL;
371 goto out;
372 }
373 switch (ch_ctrl->swidth) {
374 case WIDTH_BYTE:
375 *ctrl &= ~(7 << 18);
376 break;
377 case WIDTH_HWORD:
378 *ctrl &= ~(7 << 18);
379 *ctrl |= (1 << 18);
380 break;
381 case WIDTH_WORD:
382 *ctrl &= ~(7 << 18);
383 *ctrl |= (2 << 18);
384 break;
385
386 default:
387 err = -EINVAL;
388 goto out;
389 }
390 dbsize = ch_ctrl->dbsize;
391 while (!(dbsize & 1)) {
392 i++;
393 dbsize >>= 1;
394 }
395 if (ch_ctrl->dbsize != 1 || i > 8 || i == 1) {
396 err = -EINVAL;
397 goto out;
398 } else if (i > 1)
399 i--;
400 *ctrl &= ~(7 << 15);
401 *ctrl |= (i << 15);
402
403 sbsize = ch_ctrl->sbsize;
404 while (!(sbsize & 1)) {
405 i++;
406 sbsize >>= 1;
407 }
408 if (ch_ctrl->sbsize != 1 || i > 8 || i == 1) {
409 err = -EINVAL;
410 goto out;
411 } else if (i > 1)
412 i--;
413 *ctrl &= ~(7 << 12);
414 *ctrl |= (i << 12);
415
416 if (ch_ctrl->tr_size > 0x7ff) {
417 err = -E2BIG;
418 goto out;
419 }
420 *ctrl &= ~0x7ff;
421 *ctrl |= ch_ctrl->tr_size & 0x7ff;
422
423 out:
424 return err;
425 }
426
427 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control);
428
429 int pnx4008_dma_parse_control(unsigned long ctrl,
430 struct pnx4008_dma_ch_ctrl * ch_ctrl)
431 {
432 int err = 0;
433
434 if (!ch_ctrl) {
435 err = -EINVAL;
436 goto out;
437 }
438
439 ch_ctrl->tr_size = ctrl & 0x7ff;
440 ctrl >>= 12;
441
442 ch_ctrl->sbsize = 1 << (ctrl & 7);
443 if (ch_ctrl->sbsize > 1)
444 ch_ctrl->sbsize <<= 1;
445 ctrl >>= 3;
446
447 ch_ctrl->dbsize = 1 << (ctrl & 7);
448 if (ch_ctrl->dbsize > 1)
449 ch_ctrl->dbsize <<= 1;
450 ctrl >>= 3;
451
452 switch (ctrl & 7) {
453 case 0:
454 ch_ctrl->swidth = WIDTH_BYTE;
455 break;
456 case 1:
457 ch_ctrl->swidth = WIDTH_HWORD;
458 break;
459 case 2:
460 ch_ctrl->swidth = WIDTH_WORD;
461 break;
462 default:
463 err = -EINVAL;
464 goto out;
465 }
466 ctrl >>= 3;
467
468 switch (ctrl & 7) {
469 case 0:
470 ch_ctrl->dwidth = WIDTH_BYTE;
471 break;
472 case 1:
473 ch_ctrl->dwidth = WIDTH_HWORD;
474 break;
475 case 2:
476 ch_ctrl->dwidth = WIDTH_WORD;
477 break;
478 default:
479 err = -EINVAL;
480 goto out;
481 }
482 ctrl >>= 3;
483
484 ch_ctrl->src_ahb1 = ctrl & 1;
485 ctrl >>= 1;
486
487 ch_ctrl->dest_ahb1 = ctrl & 1;
488 ctrl >>= 1;
489
490 ch_ctrl->si = ctrl & 1;
491 ctrl >>= 1;
492
493 ch_ctrl->di = ctrl & 1;
494 ctrl >>= 1;
495
496 ch_ctrl->priv_mode = ctrl & 1;
497 ctrl >>= 1;
498
499 ch_ctrl->bufferable = ctrl & 1;
500 ctrl >>= 1;
501
502 ch_ctrl->cacheable = ctrl & 1;
503 ctrl >>= 1;
504
505 ch_ctrl->tc_mask = ctrl & 1;
506
507 out:
508 return err;
509 }
510
511 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control);
512
513 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config * ch_cfg,
514 unsigned long *cfg)
515 {
516 int err = 0;
517
518 if (!cfg || !ch_cfg) {
519 err = -EINVAL;
520 goto out;
521 }
522
523 *cfg = 0;
524
525 switch (ch_cfg->halt) {
526 case 0:
527 break;
528 case 1:
529 *cfg |= (1 << 18);
530 break;
531
532 default:
533 err = -EINVAL;
534 goto out;
535 }
536 switch (ch_cfg->active) {
537 case 0:
538 break;
539 case 1:
540 *cfg |= (1 << 17);
541 break;
542
543 default:
544 err = -EINVAL;
545 goto out;
546 }
547 switch (ch_cfg->lock) {
548 case 0:
549 break;
550 case 1:
551 *cfg |= (1 << 16);
552 break;
553
554 default:
555 err = -EINVAL;
556 goto out;
557 }
558 switch (ch_cfg->itc) {
559 case 0:
560 break;
561 case 1:
562 *cfg |= (1 << 15);
563 break;
564
565 default:
566 err = -EINVAL;
567 goto out;
568 }
569 switch (ch_cfg->ie) {
570 case 0:
571 break;
572 case 1:
573 *cfg |= (1 << 14);
574 break;
575
576 default:
577 err = -EINVAL;
578 goto out;
579 }
580 switch (ch_cfg->flow_cntrl) {
581 case FC_MEM2MEM_DMA:
582 *cfg &= ~(7 << 11);
583 break;
584 case FC_MEM2PER_DMA:
585 *cfg &= ~(7 << 11);
586 *cfg |= (1 << 11);
587 break;
588 case FC_PER2MEM_DMA:
589 *cfg &= ~(7 << 11);
590 *cfg |= (2 << 11);
591 break;
592 case FC_PER2PER_DMA:
593 *cfg &= ~(7 << 11);
594 *cfg |= (3 << 11);
595 break;
596 case FC_PER2PER_DPER:
597 *cfg &= ~(7 << 11);
598 *cfg |= (4 << 11);
599 break;
600 case FC_MEM2PER_PER:
601 *cfg &= ~(7 << 11);
602 *cfg |= (5 << 11);
603 break;
604 case FC_PER2MEM_PER:
605 *cfg &= ~(7 << 11);
606 *cfg |= (6 << 11);
607 break;
608 case FC_PER2PER_SPER:
609 *cfg |= (7 << 11);
610 break;
611
612 default:
613 err = -EINVAL;
614 goto out;
615 }
616 *cfg &= ~(0x1f << 6);
617 *cfg |= ((ch_cfg->dest_per & 0x1f) << 6);
618
619 *cfg &= ~(0x1f << 1);
620 *cfg |= ((ch_cfg->src_per & 0x1f) << 1);
621
622 out:
623 return err;
624 }
625
626 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config);
627
628 int pnx4008_dma_parse_config(unsigned long cfg,
629 struct pnx4008_dma_ch_config * ch_cfg)
630 {
631 int err = 0;
632
633 if (!ch_cfg) {
634 err = -EINVAL;
635 goto out;
636 }
637
638 cfg >>= 1;
639
640 ch_cfg->src_per = cfg & 0x1f;
641 cfg >>= 5;
642
643 ch_cfg->dest_per = cfg & 0x1f;
644 cfg >>= 5;
645
646 switch (cfg & 7) {
647 case 0:
648 ch_cfg->flow_cntrl = FC_MEM2MEM_DMA;
649 break;
650 case 1:
651 ch_cfg->flow_cntrl = FC_MEM2PER_DMA;
652 break;
653 case 2:
654 ch_cfg->flow_cntrl = FC_PER2MEM_DMA;
655 break;
656 case 3:
657 ch_cfg->flow_cntrl = FC_PER2PER_DMA;
658 break;
659 case 4:
660 ch_cfg->flow_cntrl = FC_PER2PER_DPER;
661 break;
662 case 5:
663 ch_cfg->flow_cntrl = FC_MEM2PER_PER;
664 break;
665 case 6:
666 ch_cfg->flow_cntrl = FC_PER2MEM_PER;
667 break;
668 case 7:
669 ch_cfg->flow_cntrl = FC_PER2PER_SPER;
670 }
671 cfg >>= 3;
672
673 ch_cfg->ie = cfg & 1;
674 cfg >>= 1;
675
676 ch_cfg->itc = cfg & 1;
677 cfg >>= 1;
678
679 ch_cfg->lock = cfg & 1;
680 cfg >>= 1;
681
682 ch_cfg->active = cfg & 1;
683 cfg >>= 1;
684
685 ch_cfg->halt = cfg & 1;
686
687 out:
688 return err;
689 }
690
691 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config);
692
693 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config * config,
694 struct pnx4008_dma_ch_ctrl * ctrl)
695 {
696 int new_len = ctrl->tr_size, num_entries = 0;
697 int old_len = new_len;
698 int src_width, dest_width, count = 1;
699
700 switch (ctrl->swidth) {
701 case WIDTH_BYTE:
702 src_width = 1;
703 break;
704 case WIDTH_HWORD:
705 src_width = 2;
706 break;
707 case WIDTH_WORD:
708 src_width = 4;
709 break;
710 default:
711 return;
712 }
713
714 switch (ctrl->dwidth) {
715 case WIDTH_BYTE:
716 dest_width = 1;
717 break;
718 case WIDTH_HWORD:
719 dest_width = 2;
720 break;
721 case WIDTH_WORD:
722 dest_width = 4;
723 break;
724 default:
725 return;
726 }
727
728 while (new_len > 0x7FF) {
729 num_entries++;
730 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
731 }
732 if (num_entries != 0) {
733 struct pnx4008_dma_ll *ll = NULL;
734 config->ch_ctrl &= ~0x7ff;
735 config->ch_ctrl |= new_len;
736 if (!config->is_ll) {
737 config->is_ll = 1;
738 while (num_entries) {
739 if (!ll) {
740 config->ll =
741 pnx4008_alloc_ll_entry(&config->
742 ll_dma);
743 ll = config->ll;
744 } else {
745 ll->next =
746 pnx4008_alloc_ll_entry(&ll->
747 next_dma);
748 ll = ll->next;
749 }
750
751 if (ctrl->si)
752 ll->src_addr =
753 config->src_addr +
754 src_width * new_len * count;
755 else
756 ll->src_addr = config->src_addr;
757 if (ctrl->di)
758 ll->dest_addr =
759 config->dest_addr +
760 dest_width * new_len * count;
761 else
762 ll->dest_addr = config->dest_addr;
763 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
764 ll->next_dma = 0;
765 ll->next = NULL;
766 num_entries--;
767 count++;
768 }
769 } else {
770 struct pnx4008_dma_ll *ll_old = config->ll;
771 unsigned long ll_dma_old = config->ll_dma;
772 while (num_entries) {
773 if (!ll) {
774 config->ll =
775 pnx4008_alloc_ll_entry(&config->
776 ll_dma);
777 ll = config->ll;
778 } else {
779 ll->next =
780 pnx4008_alloc_ll_entry(&ll->
781 next_dma);
782 ll = ll->next;
783 }
784
785 if (ctrl->si)
786 ll->src_addr =
787 config->src_addr +
788 src_width * new_len * count;
789 else
790 ll->src_addr = config->src_addr;
791 if (ctrl->di)
792 ll->dest_addr =
793 config->dest_addr +
794 dest_width * new_len * count;
795 else
796 ll->dest_addr = config->dest_addr;
797 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
798 ll->next_dma = 0;
799 ll->next = NULL;
800 num_entries--;
801 count++;
802 }
803 ll->next_dma = ll_dma_old;
804 ll->next = ll_old;
805 }
806 /* adjust last length/tc */
807 ll->ch_ctrl = config->ch_ctrl & (~0x7ff);
808 ll->ch_ctrl |= old_len - new_len * (count - 1);
809 config->ch_ctrl &= 0x7fffffff;
810 }
811 }
812
813 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry);
814
815 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll * cur_ll,
816 struct pnx4008_dma_ch_ctrl * ctrl)
817 {
818 int new_len = ctrl->tr_size, num_entries = 0;
819 int old_len = new_len;
820 int src_width, dest_width, count = 1;
821
822 switch (ctrl->swidth) {
823 case WIDTH_BYTE:
824 src_width = 1;
825 break;
826 case WIDTH_HWORD:
827 src_width = 2;
828 break;
829 case WIDTH_WORD:
830 src_width = 4;
831 break;
832 default:
833 return;
834 }
835
836 switch (ctrl->dwidth) {
837 case WIDTH_BYTE:
838 dest_width = 1;
839 break;
840 case WIDTH_HWORD:
841 dest_width = 2;
842 break;
843 case WIDTH_WORD:
844 dest_width = 4;
845 break;
846 default:
847 return;
848 }
849
850 while (new_len > 0x7FF) {
851 num_entries++;
852 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
853 }
854 if (num_entries != 0) {
855 struct pnx4008_dma_ll *ll = NULL;
856 cur_ll->ch_ctrl &= ~0x7ff;
857 cur_ll->ch_ctrl |= new_len;
858 if (!cur_ll->next) {
859 while (num_entries) {
860 if (!ll) {
861 cur_ll->next =
862 pnx4008_alloc_ll_entry(&cur_ll->
863 next_dma);
864 ll = cur_ll->next;
865 } else {
866 ll->next =
867 pnx4008_alloc_ll_entry(&ll->
868 next_dma);
869 ll = ll->next;
870 }
871
872 if (ctrl->si)
873 ll->src_addr =
874 cur_ll->src_addr +
875 src_width * new_len * count;
876 else
877 ll->src_addr = cur_ll->src_addr;
878 if (ctrl->di)
879 ll->dest_addr =
880 cur_ll->dest_addr +
881 dest_width * new_len * count;
882 else
883 ll->dest_addr = cur_ll->dest_addr;
884 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
885 ll->next_dma = 0;
886 ll->next = NULL;
887 num_entries--;
888 count++;
889 }
890 } else {
891 struct pnx4008_dma_ll *ll_old = cur_ll->next;
892 unsigned long ll_dma_old = cur_ll->next_dma;
893 while (num_entries) {
894 if (!ll) {
895 cur_ll->next =
896 pnx4008_alloc_ll_entry(&cur_ll->
897 next_dma);
898 ll = cur_ll->next;
899 } else {
900 ll->next =
901 pnx4008_alloc_ll_entry(&ll->
902 next_dma);
903 ll = ll->next;
904 }
905
906 if (ctrl->si)
907 ll->src_addr =
908 cur_ll->src_addr +
909 src_width * new_len * count;
910 else
911 ll->src_addr = cur_ll->src_addr;
912 if (ctrl->di)
913 ll->dest_addr =
914 cur_ll->dest_addr +
915 dest_width * new_len * count;
916 else
917 ll->dest_addr = cur_ll->dest_addr;
918 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
919 ll->next_dma = 0;
920 ll->next = NULL;
921 num_entries--;
922 count++;
923 }
924
925 ll->next_dma = ll_dma_old;
926 ll->next = ll_old;
927 }
928 /* adjust last length/tc */
929 ll->ch_ctrl = cur_ll->ch_ctrl & (~0x7ff);
930 ll->ch_ctrl |= old_len - new_len * (count - 1);
931 cur_ll->ch_ctrl &= 0x7fffffff;
932 }
933 }
934
935 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry);
936
937 int pnx4008_config_channel(int ch, struct pnx4008_dma_config * config)
938 {
939 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
940 return -EINVAL;
941
942 pnx4008_dma_lock();
943 __raw_writel(config->src_addr, DMAC_Cx_SRC_ADDR(ch));
944 __raw_writel(config->dest_addr, DMAC_Cx_DEST_ADDR(ch));
945
946 if (config->is_ll)
947 __raw_writel(config->ll_dma, DMAC_Cx_LLI(ch));
948 else
949 __raw_writel(0, DMAC_Cx_LLI(ch));
950
951 __raw_writel(config->ch_ctrl, DMAC_Cx_CONTROL(ch));
952 __raw_writel(config->ch_cfg, DMAC_Cx_CONFIG(ch));
953 pnx4008_dma_unlock();
954
955 return 0;
956
957 }
958
959 EXPORT_SYMBOL_GPL(pnx4008_config_channel);
960
961 int pnx4008_channel_get_config(int ch, struct pnx4008_dma_config * config)
962 {
963 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name || !config)
964 return -EINVAL;
965
966 pnx4008_dma_lock();
967 config->ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
968 config->ch_ctrl = __raw_readl(DMAC_Cx_CONTROL(ch));
969
970 config->ll_dma = __raw_readl(DMAC_Cx_LLI(ch));
971 config->is_ll = config->ll_dma ? 1 : 0;
972
973 config->src_addr = __raw_readl(DMAC_Cx_SRC_ADDR(ch));
974 config->dest_addr = __raw_readl(DMAC_Cx_DEST_ADDR(ch));
975 pnx4008_dma_unlock();
976
977 return 0;
978 }
979
980 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config);
981
982 int pnx4008_dma_ch_enable(int ch)
983 {
984 unsigned long ch_cfg;
985
986 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
987 return -EINVAL;
988
989 pnx4008_dma_lock();
990 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
991 ch_cfg |= 1;
992 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
993 pnx4008_dma_unlock();
994
995 return 0;
996 }
997
998 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable);
999
1000 int pnx4008_dma_ch_disable(int ch)
1001 {
1002 unsigned long ch_cfg;
1003
1004 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1005 return -EINVAL;
1006
1007 pnx4008_dma_lock();
1008 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1009 ch_cfg &= ~1;
1010 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
1011 pnx4008_dma_unlock();
1012
1013 return 0;
1014 }
1015
1016 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable);
1017
1018 int pnx4008_dma_ch_enabled(int ch)
1019 {
1020 unsigned long ch_cfg;
1021
1022 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1023 return -EINVAL;
1024
1025 pnx4008_dma_lock();
1026 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1027 pnx4008_dma_unlock();
1028
1029 return ch_cfg & 1;
1030 }
1031
1032 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled);
1033
1034 static irqreturn_t dma_irq_handler(int irq, void *dev_id)
1035 {
1036 int i;
1037 unsigned long dint = __raw_readl(DMAC_INT_STAT);
1038 unsigned long tcint = __raw_readl(DMAC_INT_TC_STAT);
1039 unsigned long eint = __raw_readl(DMAC_INT_ERR_STAT);
1040 unsigned long i_bit;
1041
1042 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
1043 i_bit = 1 << i;
1044 if (dint & i_bit) {
1045 struct dma_channel *channel = &dma_channels[i];
1046
1047 if (channel->name && channel->irq_handler) {
1048 int cause = 0;
1049
1050 if (eint & i_bit)
1051 cause |= DMA_ERR_INT;
1052 if (tcint & i_bit)
1053 cause |= DMA_TC_INT;
1054 channel->irq_handler(i, cause, channel->data);
1055 } else {
1056 /*
1057 * IRQ for an unregistered DMA channel
1058 */
1059 printk(KERN_WARNING
1060 "spurious IRQ for DMA channel %d\n", i);
1061 }
1062 if (tcint & i_bit)
1063 __raw_writel(i_bit, DMAC_INT_TC_CLEAR);
1064 if (eint & i_bit)
1065 __raw_writel(i_bit, DMAC_INT_ERR_CLEAR);
1066 }
1067 }
1068 return IRQ_HANDLED;
1069 }
1070
1071 static int __init pnx4008_dma_init(void)
1072 {
1073 int ret, i;
1074
1075 ret = request_irq(DMA_INT, dma_irq_handler, 0, "DMA", NULL);
1076 if (ret) {
1077 printk(KERN_CRIT "Wow! Can't register IRQ for DMA\n");
1078 goto out;
1079 }
1080
1081 ll_pool.count = 0x4000 / sizeof(struct pnx4008_dma_ll);
1082 ll_pool.cur = ll_pool.vaddr =
1083 dma_alloc_coherent(NULL, ll_pool.count * sizeof(struct pnx4008_dma_ll),
1084 &ll_pool.dma_addr, GFP_KERNEL);
1085
1086 if (!ll_pool.vaddr) {
1087 ret = -ENOMEM;
1088 free_irq(DMA_INT, NULL);
1089 goto out;
1090 }
1091
1092 for (i = 0; i < ll_pool.count - 1; i++) {
1093 void **addr = ll_pool.vaddr + i * sizeof(struct pnx4008_dma_ll);
1094 *addr = (void *)addr + sizeof(struct pnx4008_dma_ll);
1095 }
1096 *(long *)(ll_pool.vaddr +
1097 (ll_pool.count - 1) * sizeof(struct pnx4008_dma_ll)) =
1098 (long)ll_pool.vaddr;
1099
1100 __raw_writel(1, DMAC_CONFIG);
1101
1102 out:
1103 return ret;
1104 }
1105 arch_initcall(pnx4008_dma_init);