summaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/special_insns.h
diff options
context:
space:
mode:
authorAndy Lutomirski <luto@amacapital.net>2014-10-24 15:58:08 -0700
committerIngo Molnar <mingo@kernel.org>2015-02-04 12:10:42 +0100
commit1e02ce4cccdcb9688386e5b8d2c9fa4660b45389 (patch)
tree7d514286844acea505228590119ac1a886cf6995 /arch/x86/include/asm/special_insns.h
parent375074cc736ab1d89a708c0a8d7baa4a70d5d476 (diff)
downloadlinux-0-day-1e02ce4cccdcb9688386e5b8d2c9fa4660b45389.tar.gz
linux-0-day-1e02ce4cccdcb9688386e5b8d2c9fa4660b45389.tar.xz
x86: Store a per-cpu shadow copy of CR4
Context switches and TLB flushes can change individual bits of CR4. CR4 reads take several cycles, so store a shadow copy of CR4 in a per-cpu variable. To avoid wasting a cache line, I added the CR4 shadow to cpu_tlbstate, which is already touched in switch_mm. The heaviest users of the cr4 shadow will be switch_mm and __switch_to_xtra, and __switch_to_xtra is called shortly after switch_mm during context switch, so the cacheline is likely to be hot. Signed-off-by: Andy Lutomirski <luto@amacapital.net> Reviewed-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Kees Cook <keescook@chromium.org> Cc: Andrea Arcangeli <aarcange@redhat.com> Cc: Vince Weaver <vince@deater.net> Cc: "hillf.zj" <hillf.zj@alibaba-inc.com> Cc: Valdis Kletnieks <Valdis.Kletnieks@vt.edu> Cc: Paul Mackerras <paulus@samba.org> Cc: Arnaldo Carvalho de Melo <acme@kernel.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Link: http://lkml.kernel.org/r/3a54dd3353fffbf84804398e00dfdc5b7c1afd7d.1414190806.git.luto@amacapital.net Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/x86/include/asm/special_insns.h')
-rw-r--r--arch/x86/include/asm/special_insns.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/x86/include/asm/special_insns.h b/arch/x86/include/asm/special_insns.h
index e820c080a4e99..6a4b00fafb003 100644
--- a/arch/x86/include/asm/special_insns.h
+++ b/arch/x86/include/asm/special_insns.h
@@ -137,17 +137,17 @@ static inline void write_cr3(unsigned long x)
native_write_cr3(x);
}
-static inline unsigned long read_cr4(void)
+static inline unsigned long __read_cr4(void)
{
return native_read_cr4();
}
-static inline unsigned long read_cr4_safe(void)
+static inline unsigned long __read_cr4_safe(void)
{
return native_read_cr4_safe();
}
-static inline void write_cr4(unsigned long x)
+static inline void __write_cr4(unsigned long x)
{
native_write_cr4(x);
}