]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/unicore32/mm/tlb-ucv2.S
Merge git://git.kernel.org/pub/scm/linux/kernel/git/ebiederm/linux-2.6-nsfd
[mirror_ubuntu-artful-kernel.git] / arch / unicore32 / mm / tlb-ucv2.S
CommitLineData
10c9c10c
G
1/*
2 * linux/arch/unicore32/mm/tlb-ucv2.S
3 *
4 * Code specific to PKUnity SoC and UniCore ISA
5 *
6 * Copyright (C) 2001-2010 GUAN Xue-tao
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12#include <linux/init.h>
13#include <linux/linkage.h>
14#include <asm/assembler.h>
15#include <asm/page.h>
16#include <asm/tlbflush.h>
17#include "proc-macros.S"
18
19/*
20 * __cpu_flush_user_tlb_range(start, end, vma)
21 *
22 * Invalidate a range of TLB entries in the specified address space.
23 *
24 * - start - start address (may not be aligned)
25 * - end - end address (exclusive, may not be aligned)
26 * - vma - vma_struct describing address range
27 */
28ENTRY(__cpu_flush_user_tlb_range)
29#ifndef CONFIG_CPU_TLB_SINGLE_ENTRY_DISABLE
30 mov r0, r0 >> #PAGE_SHIFT @ align address
31 mov r0, r0 << #PAGE_SHIFT
32 vma_vm_flags r2, r2 @ get vma->vm_flags
331:
34 movc p0.c6, r0, #3
35 nop8
36
37 cand.a r2, #VM_EXEC @ Executable area ?
38 beq 2f
39
40 movc p0.c6, r0, #5
41 nop8
422:
43 add r0, r0, #PAGE_SZ
44 csub.a r0, r1
45 beb 1b
46#else
47 movc p0.c6, r0, #2
48 nop8
49
50 cand.a r2, #VM_EXEC @ Executable area ?
51 beq 2f
52
53 movc p0.c6, r0, #4
54 nop8
552:
56#endif
57 mov pc, lr
58
59/*
60 * __cpu_flush_kern_tlb_range(start,end)
61 *
62 * Invalidate a range of kernel TLB entries
63 *
64 * - start - start address (may not be aligned)
65 * - end - end address (exclusive, may not be aligned)
66 */
67ENTRY(__cpu_flush_kern_tlb_range)
68#ifndef CONFIG_CPU_TLB_SINGLE_ENTRY_DISABLE
69 mov r0, r0 >> #PAGE_SHIFT @ align address
70 mov r0, r0 << #PAGE_SHIFT
711:
72 movc p0.c6, r0, #3
73 nop8
74
75 movc p0.c6, r0, #5
76 nop8
77
78 add r0, r0, #PAGE_SZ
79 csub.a r0, r1
80 beb 1b
81#else
82 movc p0.c6, r0, #2
83 nop8
84
85 movc p0.c6, r0, #4
86 nop8
87#endif
88 mov pc, lr
89