Root/
1 | /* |
2 | * linux/arch/arm/mm/tlb-v7.S |
3 | * |
4 | * Copyright (C) 1997-2002 Russell King |
5 | * Modified for ARMv7 by Catalin Marinas |
6 | * |
7 | * This program is free software; you can redistribute it and/or modify |
8 | * it under the terms of the GNU General Public License version 2 as |
9 | * published by the Free Software Foundation. |
10 | * |
11 | * ARM architecture version 6 TLB handling functions. |
12 | * These assume a split I/D TLB. |
13 | */ |
14 | #include <linux/init.h> |
15 | #include <linux/linkage.h> |
16 | #include <asm/assembler.h> |
17 | #include <asm/asm-offsets.h> |
18 | #include <asm/page.h> |
19 | #include <asm/tlbflush.h> |
20 | #include "proc-macros.S" |
21 | |
22 | /* |
23 | * v7wbi_flush_user_tlb_range(start, end, vma) |
24 | * |
25 | * Invalidate a range of TLB entries in the specified address space. |
26 | * |
27 | * - start - start address (may not be aligned) |
28 | * - end - end address (exclusive, may not be aligned) |
29 | * - vma - vma_struct describing address range |
30 | * |
31 | * It is assumed that: |
32 | * - the "Invalidate single entry" instruction will invalidate |
33 | * both the I and the D TLBs on Harvard-style TLBs |
34 | */ |
35 | ENTRY(v7wbi_flush_user_tlb_range) |
36 | vma_vm_mm r3, r2 @ get vma->vm_mm |
37 | mmid r3, r3 @ get vm_mm->context.id |
38 | dsb |
39 | mov r0, r0, lsr #PAGE_SHIFT @ align address |
40 | mov r1, r1, lsr #PAGE_SHIFT |
41 | asid r3, r3 @ mask ASID |
42 | orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA |
43 | mov r1, r1, lsl #PAGE_SHIFT |
44 | 1: |
45 | ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) |
46 | ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA |
47 | |
48 | add r0, r0, #PAGE_SZ |
49 | cmp r0, r1 |
50 | blo 1b |
51 | mov ip, #0 |
52 | ALT_SMP(mcr p15, 0, ip, c7, c1, 6) @ flush BTAC/BTB Inner Shareable |
53 | ALT_UP(mcr p15, 0, ip, c7, c5, 6) @ flush BTAC/BTB |
54 | dsb |
55 | mov pc, lr |
56 | ENDPROC(v7wbi_flush_user_tlb_range) |
57 | |
58 | /* |
59 | * v7wbi_flush_kern_tlb_range(start,end) |
60 | * |
61 | * Invalidate a range of kernel TLB entries |
62 | * |
63 | * - start - start address (may not be aligned) |
64 | * - end - end address (exclusive, may not be aligned) |
65 | */ |
66 | ENTRY(v7wbi_flush_kern_tlb_range) |
67 | dsb |
68 | mov r0, r0, lsr #PAGE_SHIFT @ align address |
69 | mov r1, r1, lsr #PAGE_SHIFT |
70 | mov r0, r0, lsl #PAGE_SHIFT |
71 | mov r1, r1, lsl #PAGE_SHIFT |
72 | 1: |
73 | ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) |
74 | ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA |
75 | add r0, r0, #PAGE_SZ |
76 | cmp r0, r1 |
77 | blo 1b |
78 | mov r2, #0 |
79 | ALT_SMP(mcr p15, 0, r2, c7, c1, 6) @ flush BTAC/BTB Inner Shareable |
80 | ALT_UP(mcr p15, 0, r2, c7, c5, 6) @ flush BTAC/BTB |
81 | dsb |
82 | isb |
83 | mov pc, lr |
84 | ENDPROC(v7wbi_flush_kern_tlb_range) |
85 | |
86 | __INIT |
87 | |
88 | .type v7wbi_tlb_fns, #object |
89 | ENTRY(v7wbi_tlb_fns) |
90 | .long v7wbi_flush_user_tlb_range |
91 | .long v7wbi_flush_kern_tlb_range |
92 | ALT_SMP(.long v7wbi_tlb_flags_smp) |
93 | ALT_UP(.long v7wbi_tlb_flags_up) |
94 | .size v7wbi_tlb_fns, . - v7wbi_tlb_fns |
95 |
Branches:
ben-wpan
ben-wpan-stefan
javiroman/ks7010
jz-2.6.34
jz-2.6.34-rc5
jz-2.6.34-rc6
jz-2.6.34-rc7
jz-2.6.35
jz-2.6.36
jz-2.6.37
jz-2.6.38
jz-2.6.39
jz-3.0
jz-3.1
jz-3.11
jz-3.12
jz-3.13
jz-3.15
jz-3.16
jz-3.18-dt
jz-3.2
jz-3.3
jz-3.4
jz-3.5
jz-3.6
jz-3.6-rc2-pwm
jz-3.9
jz-3.9-clk
jz-3.9-rc8
jz47xx
jz47xx-2.6.38
master
Tags:
od-2011-09-04
od-2011-09-18
v2.6.34-rc5
v2.6.34-rc6
v2.6.34-rc7
v3.9