diff options
Diffstat (limited to 'arch/x86/lib')
| -rw-r--r-- | arch/x86/lib/Makefile | 1 | ||||
| -rw-r--r-- | arch/x86/lib/cpu.c | 2 | ||||
| -rw-r--r-- | arch/x86/lib/error-inject.c | 1 | ||||
| -rw-r--r-- | arch/x86/lib/retpoline.S | 56 | 
4 files changed, 2 insertions, 58 deletions
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile index 91e9700cc6dc..25a972c61b0a 100644 --- a/arch/x86/lib/Makefile +++ b/arch/x86/lib/Makefile @@ -28,7 +28,6 @@ lib-$(CONFIG_INSTRUCTION_DECODER) += insn.o inat.o insn-eval.o  lib-$(CONFIG_RANDOMIZE_BASE) += kaslr.o  lib-$(CONFIG_FUNCTION_ERROR_INJECTION)	+= error-inject.o  lib-$(CONFIG_RETPOLINE) += retpoline.o -OBJECT_FILES_NON_STANDARD_retpoline.o :=y  obj-y += msr.o msr-reg.o msr-reg-export.o hweight.o diff --git a/arch/x86/lib/cpu.c b/arch/x86/lib/cpu.c index d6f848d1211d..2dd1fe13a37b 100644 --- a/arch/x86/lib/cpu.c +++ b/arch/x86/lib/cpu.c @@ -18,7 +18,7 @@ unsigned int x86_model(unsigned int sig)  {  	unsigned int fam, model; -	 fam = x86_family(sig); +	fam = x86_family(sig);  	model = (sig >> 4) & 0xf; diff --git a/arch/x86/lib/error-inject.c b/arch/x86/lib/error-inject.c index 7b881d03d0dd..3cdf06128d13 100644 --- a/arch/x86/lib/error-inject.c +++ b/arch/x86/lib/error-inject.c @@ -7,6 +7,7 @@ asmlinkage void just_return_func(void);  asm(  	".type just_return_func, @function\n" +	".globl just_return_func\n"  	"just_return_func:\n"  	"	ret\n"  	".size just_return_func, .-just_return_func\n" diff --git a/arch/x86/lib/retpoline.S b/arch/x86/lib/retpoline.S index 480edc3a5e03..c909961e678a 100644 --- a/arch/x86/lib/retpoline.S +++ b/arch/x86/lib/retpoline.S @@ -7,7 +7,6 @@  #include <asm/alternative-asm.h>  #include <asm/export.h>  #include <asm/nospec-branch.h> -#include <asm/bitsperlong.h>  .macro THUNK reg  	.section .text.__x86.indirect_thunk @@ -47,58 +46,3 @@ GENERATE_THUNK(r13)  GENERATE_THUNK(r14)  GENERATE_THUNK(r15)  #endif - -/* - * Fill the CPU return stack buffer. - * - * Each entry in the RSB, if used for a speculative 'ret', contains an - * infinite 'pause; lfence; jmp' loop to capture speculative execution. - * - * This is required in various cases for retpoline and IBRS-based - * mitigations for the Spectre variant 2 vulnerability. Sometimes to - * eliminate potentially bogus entries from the RSB, and sometimes - * purely to ensure that it doesn't get empty, which on some CPUs would - * allow predictions from other (unwanted!) sources to be used. - * - * Google experimented with loop-unrolling and this turned out to be - * the optimal version - two calls, each with their own speculation - * trap should their return address end up getting used, in a loop. - */ -.macro STUFF_RSB nr:req sp:req -	mov	$(\nr / 2), %_ASM_BX -	.align 16 -771: -	call	772f -773:						/* speculation trap */ -	pause -	lfence -	jmp	773b -	.align 16 -772: -	call	774f -775:						/* speculation trap */ -	pause -	lfence -	jmp	775b -	.align 16 -774: -	dec	%_ASM_BX -	jnz	771b -	add	$((BITS_PER_LONG/8) * \nr), \sp -.endm - -#define RSB_FILL_LOOPS		16	/* To avoid underflow */ - -ENTRY(__fill_rsb) -	STUFF_RSB RSB_FILL_LOOPS, %_ASM_SP -	ret -END(__fill_rsb) -EXPORT_SYMBOL_GPL(__fill_rsb) - -#define RSB_CLEAR_LOOPS		32	/* To forcibly overwrite all entries */ - -ENTRY(__clear_rsb) -	STUFF_RSB RSB_CLEAR_LOOPS, %_ASM_SP -	ret -END(__clear_rsb) -EXPORT_SYMBOL_GPL(__clear_rsb)  | 

