Inline assembly is not supported for MSVC x64. Convert code to use __rdtsc intrinsic.
Signed-off-by: Tyler Retzlaff <roret...@linux.microsoft.com> Acked-by: Konstantin Ananyev <konstantin.v.anan...@yandex.ru> Acked-by: Morten Brørup <m...@smartsharesystems.com> --- lib/eal/x86/include/rte_cycles.h | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/eal/x86/include/rte_cycles.h b/lib/eal/x86/include/rte_cycles.h index a461a4d..ca0fb10 100644 --- a/lib/eal/x86/include/rte_cycles.h +++ b/lib/eal/x86/include/rte_cycles.h @@ -6,6 +6,12 @@ #ifndef _RTE_CYCLES_X86_64_H_ #define _RTE_CYCLES_X86_64_H_ +#ifdef RTE_TOOLCHAIN_MSVC +#include <intrin.h> +#else +#include <x86intrin.h> +#endif + #ifdef __cplusplus extern "C" { #endif @@ -23,6 +29,7 @@ static inline uint64_t rte_rdtsc(void) { +#ifdef RTE_LIBRTE_EAL_VMWARE_TSC_MAP_SUPPORT union { uint64_t tsc_64; RTE_STD_C11 @@ -32,7 +39,6 @@ }; } tsc; -#ifdef RTE_LIBRTE_EAL_VMWARE_TSC_MAP_SUPPORT if (unlikely(rte_cycles_vmware_tsc_map)) { /* ecx = 0x10000 corresponds to the physical TSC for VMware */ asm volatile("rdpmc" : @@ -42,11 +48,7 @@ return tsc.tsc_64; } #endif - - asm volatile("rdtsc" : - "=a" (tsc.lo_32), - "=d" (tsc.hi_32)); - return tsc.tsc_64; + return __rdtsc(); } static inline uint64_t -- 1.8.3.1