arm64: Fix !CONFIG_SMP kernel build
authorCatalin Marinas <catalin.marinas@arm.com>
Fri, 28 Feb 2014 16:12:25 +0000 (16:12 +0000)
committerCatalin Marinas <catalin.marinas@arm.com>
Fri, 28 Feb 2014 16:12:25 +0000 (16:12 +0000)
Commit fb4a96029c8a (arm64: kernel: fix per-cpu offset restore on
resume) uses per_cpu_offset() unconditionally during CPU wakeup,
however, this is only defined for the SMP case.

Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Reported-by: Dave P Martin <Dave.Martin@arm.com>
arch/arm64/include/asm/percpu.h

index 13fb0b3..453a179 100644 (file)
@@ -16,6 +16,8 @@
 #ifndef __ASM_PERCPU_H
 #define __ASM_PERCPU_H
 
+#ifdef CONFIG_SMP
+
 static inline void set_my_cpu_offset(unsigned long off)
 {
        asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
@@ -36,6 +38,12 @@ static inline unsigned long __my_cpu_offset(void)
 }
 #define __my_cpu_offset __my_cpu_offset()
 
+#else  /* !CONFIG_SMP */
+
+#define set_my_cpu_offset(x)   do { } while (0)
+
+#endif /* CONFIG_SMP */
+
 #include <asm-generic/percpu.h>
 
 #endif /* __ASM_PERCPU_H */