summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/arm/include/asm/tlbflush.h58
-rw-r--r--arch/arm/mm/tlb-fa.S4
-rw-r--r--arch/arm/mm/tlb-v6.S4
-rw-r--r--arch/arm/mm/tlb-v7.S6
4 files changed, 12 insertions, 60 deletions
diff --git a/arch/arm/include/asm/tlbflush.h b/arch/arm/include/asm/tlbflush.h
index d2005de383b8..8077145698ff 100644
--- a/arch/arm/include/asm/tlbflush.h
+++ b/arch/arm/include/asm/tlbflush.h
@@ -34,16 +34,12 @@
#define TLB_V6_D_ASID (1 << 17)
#define TLB_V6_I_ASID (1 << 18)
-#define TLB_BTB (1 << 28)
-
/* Unified Inner Shareable TLB operations (ARMv7 MP extensions) */
#define TLB_V7_UIS_PAGE (1 << 19)
#define TLB_V7_UIS_FULL (1 << 20)
#define TLB_V7_UIS_ASID (1 << 21)
-/* Inner Shareable BTB operation (ARMv7 MP extensions) */
-#define TLB_V7_IS_BTB (1 << 22)
-
+#define TLB_BARRIER (1 << 28)
#define TLB_L2CLEAN_FR (1 << 29) /* Feroceon */
#define TLB_DCLEAN (1 << 30)
#define TLB_WB (1 << 31)
@@ -58,7 +54,7 @@
* v4wb - ARMv4 with write buffer without I TLB flush entry instruction
* v4wbi - ARMv4 with write buffer with I TLB flush entry instruction
* fr - Feroceon (v4wbi with non-outer-cacheable page table walks)
- * fa - Faraday (v4 with write buffer with UTLB and branch target buffer (BTB))
+ * fa - Faraday (v4 with write buffer with UTLB)
* v6wbi - ARMv6 with write buffer with I TLB flush entry instruction
* v7wbi - identical to v6wbi
*/
@@ -99,7 +95,7 @@
# define v4_always_flags (-1UL)
#endif
-#define fa_tlb_flags (TLB_WB | TLB_BTB | TLB_DCLEAN | \
+#define fa_tlb_flags (TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
TLB_V4_U_FULL | TLB_V4_U_PAGE)
#ifdef CONFIG_CPU_TLB_FA
@@ -166,7 +162,7 @@
# define v4wb_always_flags (-1UL)
#endif
-#define v6wbi_tlb_flags (TLB_WB | TLB_DCLEAN | TLB_BTB | \
+#define v6wbi_tlb_flags (TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
TLB_V6_I_FULL | TLB_V6_D_FULL | \
TLB_V6_I_PAGE | TLB_V6_D_PAGE | \
TLB_V6_I_ASID | TLB_V6_D_ASID)
@@ -184,9 +180,9 @@
# define v6wbi_always_flags (-1UL)
#endif
-#define v7wbi_tlb_flags_smp (TLB_WB | TLB_DCLEAN | TLB_V7_IS_BTB | \
+#define v7wbi_tlb_flags_smp (TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
TLB_V7_UIS_FULL | TLB_V7_UIS_PAGE | TLB_V7_UIS_ASID)
-#define v7wbi_tlb_flags_up (TLB_WB | TLB_DCLEAN | TLB_BTB | \
+#define v7wbi_tlb_flags_up (TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
TLB_V6_U_FULL | TLB_V6_U_PAGE | TLB_V6_U_ASID)
#ifdef CONFIG_CPU_TLB_V7
@@ -341,15 +337,7 @@ static inline void local_flush_tlb_all(void)
if (tlb_flag(TLB_V7_UIS_FULL))
asm("mcr p15, 0, %0, c8, c3, 0" : : "r" (zero) : "cc");
- if (tlb_flag(TLB_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c5, 6" : : "r" (zero) : "cc");
- dsb();
- isb();
- }
- if (tlb_flag(TLB_V7_IS_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c1, 6" : : "r" (zero) : "cc");
+ if (tlb_flag(TLB_BARRIER)) {
dsb();
isb();
}
@@ -389,17 +377,8 @@ static inline void local_flush_tlb_mm(struct mm_struct *mm)
asm("mcr p15, 0, %0, c8, c3, 2" : : "r" (asid) : "cc");
#endif
- if (tlb_flag(TLB_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c5, 6" : : "r" (zero) : "cc");
- dsb();
- }
- if (tlb_flag(TLB_V7_IS_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c1, 6" : : "r" (zero) : "cc");
+ if (tlb_flag(TLB_BARRIER))
dsb();
- isb();
- }
}
static inline void
@@ -439,17 +418,8 @@ local_flush_tlb_page(struct vm_area_struct *vma, unsigned long uaddr)
asm("mcr p15, 0, %0, c8, c3, 1" : : "r" (uaddr) : "cc");
#endif
- if (tlb_flag(TLB_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c5, 6" : : "r" (zero) : "cc");
- dsb();
- }
- if (tlb_flag(TLB_V7_IS_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c1, 6" : : "r" (zero) : "cc");
+ if (tlb_flag(TLB_BARRIER))
dsb();
- isb();
- }
}
static inline void local_flush_tlb_kernel_page(unsigned long kaddr)
@@ -482,15 +452,7 @@ static inline void local_flush_tlb_kernel_page(unsigned long kaddr)
if (tlb_flag(TLB_V7_UIS_PAGE))
asm("mcr p15, 0, %0, c8, c3, 1" : : "r" (kaddr) : "cc");
- if (tlb_flag(TLB_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c5, 6" : : "r" (zero) : "cc");
- dsb();
- isb();
- }
- if (tlb_flag(TLB_V7_IS_BTB)) {
- /* flush the branch target cache */
- asm("mcr p15, 0, %0, c7, c1, 6" : : "r" (zero) : "cc");
+ if (tlb_flag(TLB_BARRIER)) {
dsb();
isb();
}
diff --git a/arch/arm/mm/tlb-fa.S b/arch/arm/mm/tlb-fa.S
index 9694f1f6f485..d887a31faaae 100644
--- a/arch/arm/mm/tlb-fa.S
+++ b/arch/arm/mm/tlb-fa.S
@@ -46,7 +46,6 @@ ENTRY(fa_flush_user_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mcr p15, 0, r3, c7, c5, 6 @ invalidate BTB
mcr p15, 0, r3, c7, c10, 4 @ data write barrier
mov pc, lr
@@ -60,9 +59,8 @@ ENTRY(fa_flush_kern_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mcr p15, 0, r3, c7, c5, 6 @ invalidate BTB
mcr p15, 0, r3, c7, c10, 4 @ data write barrier
- mcr p15, 0, r3, c7, c5, 4 @ prefetch flush
+ mcr p15, 0, r3, c7, c5, 4 @ prefetch flush (isb)
mov pc, lr
__INITDATA
diff --git a/arch/arm/mm/tlb-v6.S b/arch/arm/mm/tlb-v6.S
index 73d7d89b04c4..ffe06a69a6e5 100644
--- a/arch/arm/mm/tlb-v6.S
+++ b/arch/arm/mm/tlb-v6.S
@@ -54,7 +54,6 @@ ENTRY(v6wbi_flush_user_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mcr p15, 0, ip, c7, c5, 6 @ flush BTAC/BTB
mcr p15, 0, ip, c7, c10, 4 @ data synchronization barrier
mov pc, lr
@@ -83,9 +82,8 @@ ENTRY(v6wbi_flush_kern_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mcr p15, 0, r2, c7, c5, 6 @ flush BTAC/BTB
mcr p15, 0, r2, c7, c10, 4 @ data synchronization barrier
- mcr p15, 0, r2, c7, c5, 4 @ prefetch flush
+ mcr p15, 0, r2, c7, c5, 4 @ prefetch flush (isb)
mov pc, lr
__INIT
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
index 53cd5b454673..86bb71664508 100644
--- a/arch/arm/mm/tlb-v7.S
+++ b/arch/arm/mm/tlb-v7.S
@@ -48,9 +48,6 @@ ENTRY(v7wbi_flush_user_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mov ip, #0
- ALT_SMP(mcr p15, 0, ip, c7, c1, 6) @ flush BTAC/BTB Inner Shareable
- ALT_UP(mcr p15, 0, ip, c7, c5, 6) @ flush BTAC/BTB
dsb
mov pc, lr
ENDPROC(v7wbi_flush_user_tlb_range)
@@ -75,9 +72,6 @@ ENTRY(v7wbi_flush_kern_tlb_range)
add r0, r0, #PAGE_SZ
cmp r0, r1
blo 1b
- mov r2, #0
- ALT_SMP(mcr p15, 0, r2, c7, c1, 6) @ flush BTAC/BTB Inner Shareable
- ALT_UP(mcr p15, 0, r2, c7, c5, 6) @ flush BTAC/BTB
dsb
isb
mov pc, lr
OpenPOWER on IntegriCloud