From: Denis Mukhin <dmuk...@ford.com>

Remove the workaround under HAVE_AS_VMX for older compilers, as
the minimally required GCC 5.1 / Binutils 2.25, and Clang 11
natively support the VMX instructions used in the hypervisor code.

Signed-off-by: Denis Mukhin <dmuk...@ford.com>
---
 xen/arch/x86/arch.mk                   |  3 +--
 xen/arch/x86/include/asm/hvm/vmx/vmx.h | 37 --------------------------
 2 files changed, 1 insertion(+), 39 deletions(-)

diff --git a/xen/arch/x86/arch.mk b/xen/arch/x86/arch.mk
index cb47d72991..6e23d3c27c 100644
--- a/xen/arch/x86/arch.mk
+++ b/xen/arch/x86/arch.mk
@@ -10,7 +10,6 @@ CFLAGS += -msoft-float
 
 $(call cc-options-add,CFLAGS,CC,$(EMBEDDED_EXTRA_CFLAGS))
 $(call cc-option-add,CFLAGS,CC,-Wnested-externs)
-$(call as-option-add,CFLAGS,CC,"vmcall",-DHAVE_AS_VMX)
 $(call as-option-add,CFLAGS,CC,"crc32 %eax$(comma)%eax",-DHAVE_AS_SSE4_2)
 $(call as-option-add,CFLAGS,CC,"invept (%rax)$(comma)%rax",-DHAVE_AS_EPT)
 $(call as-option-add,CFLAGS,CC,"rdrand %eax",-DHAVE_AS_RDRAND)
@@ -24,7 +23,7 @@ $(call as-option-add,CFLAGS,CC,"invpcid 
(%rax)$(comma)%rax",-DHAVE_AS_INVPCID)
 $(call as-option-add,CFLAGS,CC,"movdiri %rax$(comma)(%rax)",-DHAVE_AS_MOVDIR)
 $(call as-option-add,CFLAGS,CC,"enqcmd (%rax)$(comma)%rax",-DHAVE_AS_ENQCMD)
 
-# Check to see whether the assmbler supports the .nop directive.
+# Check to see whether the assembler supports the .nop directive.
 $(call as-option-add,CFLAGS,CC,\
     ".L1: .L2: .nops (.L2 - .L1)$(comma)9",-DHAVE_AS_NOPS_DIRECTIVE)
 
diff --git a/xen/arch/x86/include/asm/hvm/vmx/vmx.h 
b/xen/arch/x86/include/asm/hvm/vmx/vmx.h
index 843f8591b9..cfa04aa5a4 100644
--- a/xen/arch/x86/include/asm/hvm/vmx/vmx.h
+++ b/xen/arch/x86/include/asm/hvm/vmx/vmx.h
@@ -310,30 +310,18 @@ extern uint8_t posted_intr_vector;
 #define INVVPID_ALL_CONTEXT                     2
 #define INVVPID_SINGLE_CONTEXT_RETAINING_GLOBAL 3
 
-#ifdef HAVE_AS_VMX
 # define GAS_VMX_OP(yes, no) yes
-#else
-# define GAS_VMX_OP(yes, no) no
-#endif
 
 static always_inline void __vmptrld(u64 addr)
 {
     asm volatile (
-#ifdef HAVE_AS_VMX
                    "vmptrld %0\n"
-#else
-                   VMPTRLD_OPCODE MODRM_EAX_06
-#endif
                    /* CF==1 or ZF==1 --> BUG() */
                    UNLIKELY_START(be, vmptrld)
                    _ASM_BUGFRAME_TEXT(0)
                    UNLIKELY_END_SECTION
                    :
-#ifdef HAVE_AS_VMX
                    : "m" (addr),
-#else
-                   : "a" (&addr),
-#endif
                      _ASM_BUGFRAME_INFO(BUGFRAME_bug, __LINE__, __FILE__, 0)
                    : "memory");
 }
@@ -341,21 +329,13 @@ static always_inline void __vmptrld(u64 addr)
 static always_inline void __vmpclear(u64 addr)
 {
     asm volatile (
-#ifdef HAVE_AS_VMX
                    "vmclear %0\n"
-#else
-                   VMCLEAR_OPCODE MODRM_EAX_06
-#endif
                    /* CF==1 or ZF==1 --> BUG() */
                    UNLIKELY_START(be, vmclear)
                    _ASM_BUGFRAME_TEXT(0)
                    UNLIKELY_END_SECTION
                    :
-#ifdef HAVE_AS_VMX
                    : "m" (addr),
-#else
-                   : "a" (&addr),
-#endif
                      _ASM_BUGFRAME_INFO(BUGFRAME_bug, __LINE__, __FILE__, 0)
                    : "memory");
 }
@@ -363,22 +343,13 @@ static always_inline void __vmpclear(u64 addr)
 static always_inline void __vmread(unsigned long field, unsigned long *value)
 {
     asm volatile (
-#ifdef HAVE_AS_VMX
                    "vmread %1, %0\n\t"
-#else
-                   VMREAD_OPCODE MODRM_EAX_ECX
-#endif
                    /* CF==1 or ZF==1 --> BUG() */
                    UNLIKELY_START(be, vmread)
                    _ASM_BUGFRAME_TEXT(0)
                    UNLIKELY_END_SECTION
-#ifdef HAVE_AS_VMX
                    : "=rm" (*value)
                    : "r" (field),
-#else
-                   : "=c" (*value)
-                   : "a" (field),
-#endif
                      _ASM_BUGFRAME_INFO(BUGFRAME_bug, __LINE__, __FILE__, 0)
         );
 }
@@ -386,21 +357,13 @@ static always_inline void __vmread(unsigned long field, 
unsigned long *value)
 static always_inline void __vmwrite(unsigned long field, unsigned long value)
 {
     asm volatile (
-#ifdef HAVE_AS_VMX
                    "vmwrite %1, %0\n"
-#else
-                   VMWRITE_OPCODE MODRM_EAX_ECX
-#endif
                    /* CF==1 or ZF==1 --> BUG() */
                    UNLIKELY_START(be, vmwrite)
                    _ASM_BUGFRAME_TEXT(0)
                    UNLIKELY_END_SECTION
                    :
-#ifdef HAVE_AS_VMX
                    : "r" (field) , "rm" (value),
-#else
-                   : "a" (field) , "c" (value),
-#endif
                      _ASM_BUGFRAME_INFO(BUGFRAME_bug, __LINE__, __FILE__, 0)
         );
 }
-- 
2.34.1



Reply via email to