summaryrefslogtreecommitdiffstats
path: root/releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch
diff options
context:
space:
mode:
Diffstat (limited to 'releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch')
-rw-r--r--releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch57
1 files changed, 57 insertions, 0 deletions
diff --git a/releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch b/releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch
new file mode 100644
index 0000000..765010d
--- /dev/null
+++ b/releases/2.6.32.58/arm-7325-1-fix-v7-boot-with-lockdep-enabled.patch
@@ -0,0 +1,57 @@
+From 8e43a905dd574f54c5715d978318290ceafbe275 Mon Sep 17 00:00:00 2001
+From: Rabin Vincent <rabin@rab.in>
+Date: Wed, 15 Feb 2012 16:01:42 +0100
+Subject: ARM: 7325/1: fix v7 boot with lockdep enabled
+
+From: Rabin Vincent <rabin@rab.in>
+
+commit 8e43a905dd574f54c5715d978318290ceafbe275 upstream.
+
+Bootup with lockdep enabled has been broken on v7 since b46c0f74657d
+("ARM: 7321/1: cache-v7: Disable preemption when reading CCSIDR").
+
+This is because v7_setup (which is called very early during boot) calls
+v7_flush_dcache_all, and the save_and_disable_irqs added by that patch
+ends up attempting to call into lockdep C code (trace_hardirqs_off())
+when we are in no position to execute it (no stack, MMU off).
+
+Fix this by using a notrace variant of save_and_disable_irqs. The code
+already uses the notrace variant of restore_irqs.
+
+Reviewed-by: Nicolas Pitre <nico@linaro.org>
+Acked-by: Stephen Boyd <sboyd@codeaurora.org>
+Cc: Catalin Marinas <catalin.marinas@arm.com>
+Signed-off-by: Rabin Vincent <rabin@rab.in>
+Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+
+---
+ arch/arm/include/asm/assembler.h | 5 +++++
+ arch/arm/mm/cache-v7.S | 2 +-
+ 2 files changed, 6 insertions(+), 1 deletion(-)
+
+--- a/arch/arm/include/asm/assembler.h
++++ b/arch/arm/include/asm/assembler.h
+@@ -133,6 +133,11 @@
+ disable_irq
+ .endm
+
++ .macro save_and_disable_irqs_notrace, oldcpsr
++ mrs \oldcpsr, cpsr
++ disable_irq_notrace
++ .endm
++
+ /*
+ * Restore interrupt state previously stored in a register. We don't
+ * guarantee that this will preserve the flags.
+--- a/arch/arm/mm/cache-v7.S
++++ b/arch/arm/mm/cache-v7.S
+@@ -40,7 +40,7 @@ loop1:
+ cmp r1, #2 @ see what cache we have at this level
+ blt skip @ skip if no cache, or just i-cache
+ #ifdef CONFIG_PREEMPT
+- save_and_disable_irqs r9 @ make cssr&csidr read atomic
++ save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
+ #endif
+ mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
+ isb @ isb to sych the new cssr&csidr