aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHaojian Zhuang <haojian.zhuang@linaro.org>2014-06-22 16:17:47 +0800
committerHaojian Zhuang <haojian.zhuang@linaro.org>2014-06-22 16:22:28 +0800
commitfac09fe81ef1802cf1f7ef89ad83c8407964ae54 (patch)
tree59cba79e0bde07e2419628127e2c299655db31a1
parentef3cc2def9729f72263c0dd5c5db1345d7fbf0a9 (diff)
debug: test l3hang-with-l3
Hang when kernel is executed from stext with L3 enabled. If we set the debug code as ".align 3" followed by stext in arch/arm/kernel/head.S file, 1 test LED could be light. Or if we set the debug code as ".aligh 4" followed by stext in arch/arm/kernel/head.S file, 3 test LEDs could be light. It seems that the next instruction cache line after stext is broken. If L3 is disabled, there's no hang. Signed-off-by: Haojian Zhuang <haojian.zhuang@linaro.org>
-rw-r--r--arch/arm/boot/compressed/head.S58
-rw-r--r--arch/arm/kernel/head.S9
2 files changed, 65 insertions, 2 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S
index 066b03480b63..dfcc711ff1f0 100644
--- a/arch/arm/boot/compressed/head.S
+++ b/arch/arm/boot/compressed/head.S
@@ -11,6 +11,7 @@
#include <linux/linkage.h>
#include <asm/assembler.h>
+#define DEBUG
.arch armv7-a
/*
* Debugging stuff
@@ -671,6 +672,16 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size
str r1, [r0], #4
add r1, r1, #1048576
str r1, [r0]
+
+ mov r1, #0x12
+ orr r1, r1, #3 << 10
+ mov r2, #0xe3000000
+ orr r2, r2, #0x020000
+ orr r2, r2, #0xb000
+ mov r2, r2, lsr #20
+ orr r1, r1, r2, lsl #20
+ add r0, r3, r2, lsl #2
+ str r1, [r0]
mov pc, lr
ENDPROC(__setup_mmu)
@@ -721,8 +732,9 @@ __armv7_mmu_cache_on:
#endif
mrc p15, 0, r0, c1, c0, 0 @ read control reg
bic r0, r0, #1 << 28 @ clear SCTLR.TRE
- orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement
- orr r0, r0, #0x003c @ write buffer
+ orr r0, r0, #0x0038
+ #orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement
+ #orr r0, r0, #0x003c @ write buffer
bic r0, r0, #2 @ A (no unaligned access fault)
orr r0, r0, #1 << 22 @ U (v6 unaligned access model)
@ (needed for ARM1176)
@@ -1018,6 +1030,18 @@ __armv4_mmu_cache_off:
mov pc, lr
__armv7_mmu_cache_off:
+ ldr r0, =0xe302b000
+ ldr r10, [r0]
+ cmp r10, #1
+ beq 1f
+ mov r10, #5
+ str r10, [r0, #0x24]
+2:
+ ldr r10, [r0, #0x24]
+ cmp r10, #4
+ bne 2b
+ ldr r10, [r0, #0x20]
+1:
mrc p15, 0, r0, c1, c0
#ifdef CONFIG_MMU
bic r0, r0, #0x000d
@@ -1027,6 +1051,22 @@ __armv7_mmu_cache_off:
mcr p15, 0, r0, c1, c0 @ turn MMU and cache off
mov r12, lr
bl __armv7_mmu_cache_flush
+ mcr p15, 0, r0, c7, c5, 0 @ invalidate I-Cache
+ mcr p15, 0, r0, c7, c5, 4 @ ISB
+ ldr r0, =0xe302b000
+ ldr r10, [r0]
+ cmp r10, #1
+ beq 1f
+ mov r10, #5
+ str r10, [r0, #0x24]
+2:
+ ldr r10, [r0, #0x24]
+ cmp r10, #4
+ bne 2b
+ ldr r10, [r0, #0x20]
+1:
+ mcr p15, 0, r0, c7, c10, 4 @ DSB
+ mcr p15, 0, r0, c7, c5, 4 @ ISB
mov r0, #0
#ifdef CONFIG_MMU
mcr p15, 0, r0, c8, c7, 0 @ invalidate whole TLB
@@ -1279,6 +1319,20 @@ __hyp_reentry_vectors:
#endif /* CONFIG_ARM_VIRT_EXT */
__enter_kernel:
+ ldr r8, =0xe4002000
+ ldr r6, [r8]
+ orr r6, r6, #0x70000000
+ str r6, [r8]
+ ldr r6, [r8]
+/*
+ kphex r6, 8
+ ldr r6, [r8, #4]
+ kphex r6, 8
+ ldr r0, [r4]
+ kphex r0, 8
+ mrc p15, 0, r0, c1, c0
+ kphex r0, 8
+*/
mov r0, #0 @ must be 0
ARM( mov pc, r4 ) @ call kernel
THUMB( bx r4 ) @ entry point is always ARM
diff --git a/arch/arm/kernel/head.S b/arch/arm/kernel/head.S
index 914616e0bdcd..08fb56227193 100644
--- a/arch/arm/kernel/head.S
+++ b/arch/arm/kernel/head.S
@@ -78,6 +78,15 @@
__HEAD
ENTRY(stext)
+ ldr r6, [r8]
+ .align 3
+ bic r6, r6, #0x70000000
+ orr r6, r6, #0x10000000
+ str r6, [r8]
+ ldr r6, [r8]
+ orr r6, r6, #0x20000000
+ str r6, [r8]
+ ldr r6, [r8]
ARM_BE8(setend be ) @ ensure we are in BE8 mode
THUMB( adr r9, BSYM(1f) ) @ Kernel is always entered in ARM.