Since we will support 512 bit on both P-core and E-core, 256 bit rounding is not that useful because we currently have rounding feature directly on E-core now and no need to use 256-bit rounding as somehow a workaround. This patch will remove 256 bit rounding in AVX10.2 satcvt intrins.
gcc/ChangeLog: * config/i386/avx10_2satcvtintrin.h: Remove rounding intrins. Use non-round intrins. * config/i386/i386-builtin.def (BDESC): Ditto. gcc/testsuite/ChangeLog: * gcc.target/i386/avx10_2-512-vcvtph2ibs-2.c: Adjust condition for rounding test. * gcc.target/i386/avx10_2-512-vcvtph2iubs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvtps2ibs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvtps2iubs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttpd2dqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttpd2qqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttpd2udqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttpd2uqqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttph2ibs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttph2iubs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2dqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2ibs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2iubs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2qqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2udqs-2.c: Ditto. * gcc.target/i386/avx10_2-512-vcvttps2uqqs-2.c: Ditto. * gcc.target/i386/avx-1.c: Remove rounding tests. * gcc.target/i386/avx10_2-satcvt-1.c: Ditto. * gcc.target/i386/sse-13.c: Ditto. * gcc.target/i386/sse-14.c: Ditto. * gcc.target/i386/sse-22.c: Ditto. * gcc.target/i386/sse-23.c: Ditto. --- gcc/config/i386/avx10_2satcvtintrin.h | 1103 +---------------- gcc/config/i386/i386-builtin.def | 16 - gcc/testsuite/gcc.target/i386/avx-1.c | 16 - .../i386/avx10_2-512-vcvtph2ibs-2.c | 2 +- .../i386/avx10_2-512-vcvtph2iubs-2.c | 2 +- .../i386/avx10_2-512-vcvtps2ibs-2.c | 2 +- .../i386/avx10_2-512-vcvtps2iubs-2.c | 2 +- .../i386/avx10_2-512-vcvttpd2dqs-2.c | 2 +- .../i386/avx10_2-512-vcvttpd2qqs-2.c | 2 +- .../i386/avx10_2-512-vcvttpd2udqs-2.c | 2 +- .../i386/avx10_2-512-vcvttpd2uqqs-2.c | 2 +- .../i386/avx10_2-512-vcvttph2ibs-2.c | 2 +- .../i386/avx10_2-512-vcvttph2iubs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2dqs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2ibs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2iubs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2qqs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2udqs-2.c | 2 +- .../i386/avx10_2-512-vcvttps2uqqs-2.c | 2 +- .../gcc.target/i386/avx10_2-satcvt-1.c | 104 +- gcc/testsuite/gcc.target/i386/sse-13.c | 16 - gcc/testsuite/gcc.target/i386/sse-14.c | 48 - gcc/testsuite/gcc.target/i386/sse-22.c | 48 - gcc/testsuite/gcc.target/i386/sse-23.c | 16 - 24 files changed, 25 insertions(+), 1374 deletions(-) diff --git a/gcc/config/i386/avx10_2satcvtintrin.h b/gcc/config/i386/avx10_2satcvtintrin.h index c662c4b7630..e68a066ae5e 100644 --- a/gcc/config/i386/avx10_2satcvtintrin.h +++ b/gcc/config/i386/avx10_2satcvtintrin.h @@ -1267,566 +1267,6 @@ _mm_cvtts_ss_epu32 (__m128 __A) } #ifdef __OPTIMIZE__ -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvts_roundph_epi8 (__m256h __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_undefined_si256 (), - (__mmask16) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvts_roundph_epi8 (__m256i __W, __mmask16 __U, __m256h __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) __W, - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvts_roundph_epi8 (__mmask16 __U, __m256h __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_setzero_si256 (), - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvts_roundph_epu8 (__m256h __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_undefined_si256 (), - (__mmask16) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvts_roundph_epu8 (__m256i __W, __mmask16 __U, __m256h __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) __W, - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvts_roundph_epu8 (__mmask16 __U, __m256h __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_setzero_si256 (), - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvts_roundps_epi8 (__m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvts_roundps_epi8 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvts_roundps_epi8 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvts_roundps_epu8 (__m256 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvts_roundps_epu8 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvts_roundps_epu8 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvtts_roundph_epi8 (__m256h __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_undefined_si256 (), - (__mmask16) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvtts_roundph_epi8 (__m256i __W, __mmask16 __U, __m256h __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) __W, - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvtts_roundph_epi8 (__mmask16 __U, __m256h __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_setzero_si256 (), - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvtts_roundph_epu8 (__m256h __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_undefined_si256 (), - (__mmask16) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvtts_roundph_epu8 (__m256i __W, __mmask16 __U, __m256h __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) __W, - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvtts_roundph_epu8 (__mmask16 __U, __m256h __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) __A, - (__v16hi) - _mm256_setzero_si256 (), - (__mmask16) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvtts_roundps_epi8 (__m256 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvtts_roundps_epi8 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvtts_roundps_epi8 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_ipcvtts_roundps_epu8 (__m256 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_ipcvtts_roundps_epu8 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_ipcvtts_roundps_epu8 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundpd_epi32 (__m256d __A, const int __R) -{ - return - (__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) __A, - (__v4si) - _mm_undefined_si128 (), - (__mmask8) -1, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundpd_epi32 (__m128i __W, __mmask8 __U, __m256d __A, - const int __R) -{ - return (__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) __A, - (__v4si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundpd_epi32 (__mmask8 __U, __m256d __A, const int __R) -{ - return - (__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) __A, - (__v4si) - _mm_setzero_si128 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundpd_epi64 (__m256d __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) __A, - (__v4di) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundpd_epi64 (__m256i __W, __mmask8 __U, __m256d __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) __A, - (__v4di) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundpd_epi64 (__mmask8 __U, __m256d __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) __A, - (__v4di) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundpd_epu32 (__m256d __A, const int __R) -{ - return - (__m128i) __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) __A, - (__v4si) - _mm_undefined_si128 (), - (__mmask8) -1, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundpd_epu32 (__m128i __W, __mmask8 __U, __m256d __A, - const int __R) -{ - return (__m128i) __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) __A, - (__v4si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m128i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundpd_epu32 (__mmask8 __U, __m256d __A, const int __R) -{ - return - (__m128i) __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) __A, - (__v4si) - _mm_setzero_si128 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundpd_epu64 (__m256d __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) __A, - (__v4di) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundpd_epu64 (__m256i __W, __mmask8 __U, __m256d __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) __A, - (__v4di) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundpd_epu64 (__mmask8 __U, __m256d __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) __A, - (__v4di) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundps_epi32 (__m256 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundps_epi32 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundps_epi32 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundps_epi64 (__m128 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) __A, - (__v4di) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundps_epi64 (__m256i __W, __mmask8 __U, __m128 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) __A, - (__v4di) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundps_epi64 (__mmask8 __U, __m128 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) __A, - (__v4di) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundps_epu32 (__m256 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundps_epu32 (__m256i __W, __mmask8 __U, __m256 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) __A, - (__v8si) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundps_epu32 (__mmask8 __U, __m256 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) __A, - (__v8si) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_cvtts_roundps_epu64 (__m128 __A, const int __R) -{ - return (__m256i) - __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) __A, - (__v4di) - _mm256_undefined_si256 (), - (__mmask8) -1, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_mask_cvtts_roundps_epu64 (__m256i __W, __mmask8 __U, __m128 __A, - const int __R) -{ - return (__m256i) __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) __A, - (__v4di) __W, - (__mmask8) __U, - __R); -} - -extern __inline __m256i -__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) -_mm256_maskz_cvtts_roundps_epu64 (__mmask8 __U, __m128 __A, const int __R) -{ - return - (__m256i) __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) __A, - (__v4di) - _mm256_setzero_si256 (), - (__mmask8) __U, - __R); -} - extern __inline int __attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) _mm_cvtts_roundsd_epi32 (__m128d __A, const int __R) @@ -1859,546 +1299,6 @@ _mm_cvtts_roundss_epu32 (__m128 __A, const int __R) __R); } #else - -#define _mm256_ipcvts_roundph_epi8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_undefined_si256 ()), \ - (__mmask16) (-1), \ - (R))) - -#define _mm256_mask_ipcvts_roundph_epi8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) (W), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_maskz_ipcvts_roundph_epi8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvtph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_setzero_si256 ()), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_ipcvts_roundph_epu8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_undefined_si256 ()), \ - (__mmask16) (-1), \ - (R))) - -#define _mm256_mask_ipcvts_roundph_epu8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) (W), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_maskz_ipcvts_roundph_epu8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvtph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_setzero_si256 ()), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_ipcvts_roundps_epi8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_ipcvts_roundps_epi8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_ipcvts_roundps_epi8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvtps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_ipcvts_roundps_epu8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_ipcvts_roundps_epu8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_ipcvts_roundps_epu8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvtps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_ipcvtts_roundph_epi8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_undefined_si256 ()), \ - (__mmask16) (-1), \ - (R))) - -#define _mm256_mask_ipcvtts_roundph_epi8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) (W), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_maskz_ipcvtts_roundph_epi8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttph2ibs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_setzero_si256 ()), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_ipcvtts_roundph_epu8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_undefined_si256 ()), \ - (__mmask16) (-1), \ - (R))) - -#define _mm256_mask_ipcvtts_roundph_epu8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) (W), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_maskz_ipcvtts_roundph_epu8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttph2iubs256_mask_round ((__v16hf) (A), \ - (__v16hi) \ - (_mm256_setzero_si256 ()), \ - (__mmask16) (U), \ - (R))) - -#define _mm256_ipcvtts_roundps_epi8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_ipcvtts_roundps_epi8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_ipcvtts_roundps_epi8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2ibs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_ipcvtts_roundps_epu8(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_ipcvtts_roundps_epu8(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_ipcvtts_roundps_epu8(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2iubs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epi32(A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_undefined_si128 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epi32(W, U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epi32(U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_setzero_si128 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epi64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epi64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epi64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epu32(A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_undefined_si128 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epu32(W, U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epu32(U, A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) (_mm_setzero_si128 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epu64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epu64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epu64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epi32(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epi32(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epi32(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epi64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epi64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epi64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epu32(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epu32(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epu32(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epu64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epu64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epu64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm_cvtts_roundsd_epi32(A, R) \ - ((int) __builtin_ia32_cvttsd2sis32_round ((__v2df) (A), \ - (R))) - -#define _mm_cvtts_roundsd_epu32(A, R) \ - ((unsigned int) __builtin_ia32_cvttsd2usis32_round ((__v2df) (A), \ - (R))) - -#define _mm_cvtts_roundss_epi32(A, R) \ - ((int) __builtin_ia32_cvttss2sis32_round ((__v4sf) (A), \ - (R))) - -#define _mm_cvtts_roundss_epu32(A, R) \ - ((unsigned int) __builtin_ia32_cvttss2usis32_round ((__v4sf) (A), \ - (R))) -#define _mm256_cvtts_roundpd_epi32(A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_undefined_si128 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epi32(W, U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epi32(U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2dqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_setzero_si128 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epi64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epi64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epi64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2qqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epu32(A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) \ - (_mm_undefined_si128 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epu32(W, U, A, R) \ - ((__m128i) __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epu32(U, A, R) \ - ((__m128i) \ - __builtin_ia32_cvttpd2udqs256_mask_round ((__v4df) (A), \ - (__v4si) (_mm_setzero_si128 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundpd_epu64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundpd_epu64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundpd_epu64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttpd2uqqs256_mask_round ((__v4df) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epi32(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epi32(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epi32(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2dqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epi64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epi64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epi64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2qqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epu32(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epu32(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epu32(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2udqs256_mask_round ((__v8sf) (A), \ - (__v8si) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_cvtts_roundps_epu64(A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_undefined_si256 ()), \ - (__mmask8) (-1), \ - (R))) - -#define _mm256_mask_cvtts_roundps_epu64(W, U, A, R) \ - ((__m256i) __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) (W), \ - (__mmask8) (U), \ - (R))) - -#define _mm256_maskz_cvtts_roundps_epu64(U, A, R) \ - ((__m256i) \ - __builtin_ia32_cvttps2uqqs256_mask_round ((__v4sf) (A), \ - (__v4di) \ - (_mm256_setzero_si256 ()), \ - (__mmask8) (U), \ - (R))) - #define _mm_cvtts_roundsd_epi32(A, R) \ ((int) __builtin_ia32_cvttsd2sis32_round ((__v2df) (A), \ (R))) @@ -2449,6 +1349,7 @@ _mm_cvtts_ss_epu64 (__m128 __A) return (unsigned long long) __builtin_ia32_cvttss2usis64_round ((__v4sf) __A, _MM_FROUND_CUR_DIRECTION); } + #ifdef __OPTIMIZE__ extern __inline long long __attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) @@ -2474,7 +1375,6 @@ _mm_cvtts_roundss_epi64 (__m128 __A, const int __R) __R); } - extern __inline unsigned long long __attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) _mm_cvtts_roundss_epu64 (__m128 __A, const int __R) @@ -2483,7 +1383,6 @@ _mm_cvtts_roundss_epu64 (__m128 __A, const int __R) __R); } #else - #define _mm_cvtts_roundsd_epi64(A, R) \ ((long long) __builtin_ia32_cvttsd2sis64_round ((__v2df) (A), \ (R))) diff --git a/gcc/config/i386/i386-builtin.def b/gcc/config/i386/i386-builtin.def index 030c72bd758..06855b1e017 100644 --- a/gcc/config/i386/i386-builtin.def +++ b/gcc/config/i386/i386-builtin.def @@ -3819,37 +3819,21 @@ BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_subv4df3_mask_round, "__builtin BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_subv16hf3_mask_round, "__builtin_ia32_subph256_mask_round", IX86_BUILTIN_VSUBPH256_MASK_ROUND, UNKNOWN, (int) V16HF_FTYPE_V16HF_V16HF_V16HF_UHI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_subv8sf3_mask_round, "__builtin_ia32_subps256_mask_round", IX86_BUILTIN_VSUBPS256_MASK_ROUND, UNKNOWN, (int) V8SF_FTYPE_V8SF_V8SF_V8SF_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvt2ps2phx_v32hf_mask_round, "__builtin_ia32_vcvt2ps2phx512_mask_round", IX86_BUILTIN_VCVT2PS2PHX_V32HF_MASK_ROUND, UNKNOWN, (int) V32HF_FTYPE_V16SF_V16SF_V32HF_USI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvtph2ibsv16hf_mask_round, "__builtin_ia32_cvtph2ibs256_mask_round", IX86_BUILTIN_CVTPH2IBS256_MASK_ROUND, UNKNOWN, (int) V16HI_FTYPE_V16HF_V16HI_UHI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvtph2ibsv32hf_mask_round, "__builtin_ia32_cvtph2ibs512_mask_round", IX86_BUILTIN_CVTPH2IBS512_MASK_ROUND, UNKNOWN, (int) V32HI_FTYPE_V32HF_V32HI_USI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvtph2iubsv16hf_mask_round, "__builtin_ia32_cvtph2iubs256_mask_round", IX86_BUILTIN_CVTPH2IUBS256_MASK_ROUND, UNKNOWN, (int) V16HI_FTYPE_V16HF_V16HI_UHI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvtph2iubsv32hf_mask_round, "__builtin_ia32_cvtph2iubs512_mask_round", IX86_BUILTIN_CVTPH2IUBS512_MASK_ROUND, UNKNOWN, (int) V32HI_FTYPE_V32HF_V32HI_USI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvtps2ibsv8sf_mask_round, "__builtin_ia32_cvtps2ibs256_mask_round", IX86_BUILTIN_CVTPS2IBS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvtps2ibsv16sf_mask_round, "__builtin_ia32_cvtps2ibs512_mask_round", IX86_BUILTIN_CVTPS2IBS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_UHI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvtps2iubsv8sf_mask_round, "__builtin_ia32_cvtps2iubs256_mask_round", IX86_BUILTIN_CVTPS2IUBS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvtps2iubsv16sf_mask_round, "__builtin_ia32_cvtps2iubs512_mask_round", IX86_BUILTIN_CVTPS2IUBS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_UHI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvttph2ibsv16hf_mask_round, "__builtin_ia32_cvttph2ibs256_mask_round", IX86_BUILTIN_CVTTPH2IBS256_MASK_ROUND, UNKNOWN, (int) V16HI_FTYPE_V16HF_V16HI_UHI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvttph2ibsv32hf_mask_round, "__builtin_ia32_cvttph2ibs512_mask_round", IX86_BUILTIN_CVTTPH2IBS512_MASK_ROUND, UNKNOWN, (int) V32HI_FTYPE_V32HF_V32HI_USI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvttph2iubsv16hf_mask_round, "__builtin_ia32_cvttph2iubs256_mask_round", IX86_BUILTIN_CVTTPH2IUBS256_MASK_ROUND, UNKNOWN, (int) V16HI_FTYPE_V16HF_V16HI_UHI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvttph2iubsv32hf_mask_round, "__builtin_ia32_cvttph2iubs512_mask_round", IX86_BUILTIN_CVTTPH2IUBS512_MASK_ROUND, UNKNOWN, (int) V32HI_FTYPE_V32HF_V32HI_USI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvttps2ibsv8sf_mask_round, "__builtin_ia32_cvttps2ibs256_mask_round", IX86_BUILTIN_CVTTPS2IBS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvttps2ibsv16sf_mask_round, "__builtin_ia32_cvttps2ibs512_mask_round", IX86_BUILTIN_CVTTPS2IBS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_UHI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_cvttps2iubsv8sf_mask_round, "__builtin_ia32_cvttps2iubs256_mask_round", IX86_BUILTIN_CVTTPS2IUBS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_cvttps2iubsv16sf_mask_round, "__builtin_ia32_cvttps2iubs512_mask_round", IX86_BUILTIN_CVTTPS2IUBS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_UHI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttpd2dqsv4df_mask_round, "__builtin_ia32_cvttpd2dqs256_mask_round", IX86_BUILTIN_VCVTTPD2DQS256_MASK_ROUND, UNKNOWN, (int) V4SI_FTYPE_V4DF_V4SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttpd2dqsv8df_mask_round, "__builtin_ia32_cvttpd2dqs512_mask_round", IX86_BUILTIN_VCVTTPD2DQS512_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8DF_V8SI_QI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttpd2qqsv4df_mask_round, "__builtin_ia32_cvttpd2qqs256_mask_round", IX86_BUILTIN_VCVTTPD2QQS256_MASK_ROUND, UNKNOWN, (int) V4DI_FTYPE_V4DF_V4DI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttpd2qqsv8df_mask_round, "__builtin_ia32_cvttpd2qqs512_mask_round", IX86_BUILTIN_VCVTTPD2QQS512_MASK_ROUND, UNKNOWN, (int) V8DI_FTYPE_V8DF_V8DI_QI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttpd2udqsv4df_mask_round, "__builtin_ia32_cvttpd2udqs256_mask_round", IX86_BUILTIN_VCVTTPD2UDQS256_MASK_ROUND, UNKNOWN, (int) V4SI_FTYPE_V4DF_V4SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttpd2udqsv8df_mask_round, "__builtin_ia32_cvttpd2udqs512_mask_round", IX86_BUILTIN_VCVTTPD2UDQS512_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8DF_V8SI_QI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttpd2uqqsv4df_mask_round, "__builtin_ia32_cvttpd2uqqs256_mask_round", IX86_BUILTIN_VCVTTPD2UQQS256_MASK_ROUND, UNKNOWN, (int) V4DI_FTYPE_V4DF_V4DI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttpd2uqqsv8df_mask_round, "__builtin_ia32_cvttpd2uqqs512_mask_round", IX86_BUILTIN_VCVTTPD2UQQS512_MASK_ROUND, UNKNOWN, (int) V8DI_FTYPE_V8DF_V8DI_QI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttps2dqsv8sf_mask_round, "__builtin_ia32_cvttps2dqs256_mask_round", IX86_BUILTIN_VCVTTPS2DQS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttps2dqsv16sf_mask_round, "__builtin_ia32_cvttps2dqs512_mask_round", IX86_BUILTIN_VCVTTPS2DQS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_HI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttps2qqsv4di_mask_round, "__builtin_ia32_cvttps2qqs256_mask_round", IX86_BUILTIN_VCVTTPS2QQS256_MASK_ROUND, UNKNOWN, (int) V4DI_FTYPE_V4SF_V4DI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttps2qqsv8di_mask_round, "__builtin_ia32_cvttps2qqs512_mask_round", IX86_BUILTIN_VCVTTPS2QQS512_MASK_ROUND, UNKNOWN, (int) V8DI_FTYPE_V8SF_V8DI_QI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttps2udqsv8sf_mask_round, "__builtin_ia32_cvttps2udqs256_mask_round", IX86_BUILTIN_VCVTTPS2UDQS256_MASK_ROUND, UNKNOWN, (int) V8SI_FTYPE_V8SF_V8SI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttps2udqsv16sf_mask_round, "__builtin_ia32_cvttps2udqs512_mask_round", IX86_BUILTIN_VCVTTPS2UDQS512_MASK_ROUND, UNKNOWN, (int) V16SI_FTYPE_V16SF_V16SI_HI_INT) -BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttps2uqqsv4di_mask_round, "__builtin_ia32_cvttps2uqqs256_mask_round", IX86_BUILTIN_VCVTTPS2UQQS256_MASK_ROUND, UNKNOWN, (int) V4DI_FTYPE_V4SF_V4DI_UQI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_512, CODE_FOR_avx10_2_vcvttps2uqqsv8di_mask_round, "__builtin_ia32_cvttps2uqqs512_mask_round", IX86_BUILTIN_VCVTTPS2UQQS512_MASK_ROUND, UNKNOWN, (int) V8DI_FTYPE_V8SF_V8DI_QI_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttsd2sissi_round, "__builtin_ia32_cvttsd2sis32_round", IX86_BUILTIN_VCVTTSD2SIS32_ROUND, UNKNOWN, (int) INT_FTYPE_V2DF_INT) BDESC (0, OPTION_MASK_ISA2_AVX10_2_256, CODE_FOR_avx10_2_vcvttsd2sisdi_round, "__builtin_ia32_cvttsd2sis64_round", IX86_BUILTIN_VCVTTSD2SIS64_ROUND, UNKNOWN, (int) INT64_FTYPE_V2DF_INT) diff --git a/gcc/testsuite/gcc.target/i386/avx-1.c b/gcc/testsuite/gcc.target/i386/avx-1.c index 7df03a0096e..4fc54246d1d 100644 --- a/gcc/testsuite/gcc.target/i386/avx-1.c +++ b/gcc/testsuite/gcc.target/i386/avx-1.c @@ -1051,22 +1051,6 @@ #define __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, 8) /* avx10_2satcvtintrin.h */ -#define __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, 8) #define __builtin_ia32_cvttsd2sis32_round(A, B) __builtin_ia32_cvttsd2sis32_round(A, 8) #define __builtin_ia32_cvttsd2usis32_round(A, B) __builtin_ia32_cvttsd2usis32_round(A, 8) #define __builtin_ia32_cvttss2sis32_round(A, B) __builtin_ia32_cvttss2sis32_round(A, 8) diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2ibs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2ibs-2.c index 0c860b02046..e193b7bcaad 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2ibs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2ibs-2.c @@ -66,7 +66,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_w) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvts_roundph_epi8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvts_roundph_epi8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvts_roundph_epi8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2iubs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2iubs-2.c index 75e4e1141be..d2d53cc135a 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2iubs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtph2iubs-2.c @@ -66,7 +66,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_w) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvts_roundph_epu8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvts_roundph_epu8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvts_roundph_epu8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2ibs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2ibs-2.c index 44a0e2ce58c..592d50bbe05 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2ibs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2ibs-2.c @@ -67,7 +67,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvts_roundps_epi8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvts_roundps_epi8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvts_roundps_epi8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2iubs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2iubs-2.c index 709734e1eeb..e21136aee52 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2iubs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvtps2iubs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvts_roundps_epu8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvts_roundps_epu8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvts_roundps_epu8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2dqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2dqs-2.c index 0b12aed2b27..bccc6438ed7 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2dqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2dqs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN_HALF, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundpd_epi32) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundpd_epi32) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundpd_epi32) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2qqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2qqs-2.c index fdadda9f93a..ddbb47d68ef 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2qqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2qqs-2.c @@ -64,7 +64,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_q) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundpd_epi64) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundpd_epi64) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundpd_epi64) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2udqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2udqs-2.c index dbf056726ce..8057ee38c80 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2udqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2udqs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN_HALF, i_ud) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundpd_epu32) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundpd_epu32) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundpd_epu32) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2uqqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2uqqs-2.c index d5bb46a7c5a..a3dd54b3ace 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2uqqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttpd2uqqs-2.c @@ -64,7 +64,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_uq) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundpd_epu64) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundpd_epu64) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundpd_epu64) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2ibs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2ibs-2.c index 03f18f7f21f..ebf2eaa8673 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2ibs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2ibs-2.c @@ -66,7 +66,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_w) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvtts_roundph_epi8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvtts_roundph_epi8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvtts_roundph_epi8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2iubs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2iubs-2.c index 8a05dfd951a..c7783bd04bd 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2iubs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttph2iubs-2.c @@ -55,7 +55,7 @@ TEST (void) CALC (s.a, res_ref); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvtts_roundph_epu8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvtts_roundph_epu8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvtts_roundph_epu8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2dqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2dqs-2.c index ee268cc95ab..1e12c673543 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2dqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2dqs-2.c @@ -64,7 +64,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundps_epi32) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundps_epi32) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundps_epi32) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2ibs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2ibs-2.c index f63fee4367a..ccdfd27784b 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2ibs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2ibs-2.c @@ -67,7 +67,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvtts_roundps_epi8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvtts_roundps_epi8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvtts_roundps_epi8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2iubs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2iubs-2.c index 1b0f70e0624..f0c05275e34 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2iubs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2iubs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_d) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_ipcvtts_roundps_epu8) (s.x, 8); res2.x = INTRINSIC (_mask_ipcvtts_roundps_epu8) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_ipcvtts_roundps_epu8) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2qqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2qqs-2.c index f8fde12f986..98ee3c96e09 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2qqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2qqs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_q) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundps_epi64) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundps_epi64) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundps_epi64) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2udqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2udqs-2.c index 2d7f631273c..4e02fe7ba6c 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2udqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2udqs-2.c @@ -64,7 +64,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_ud) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundps_epu32) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundps_epu32) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundps_epu32) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2uqqs-2.c b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2uqqs-2.c index 32e206d7127..6beeb26c22f 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2uqqs-2.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-512-vcvttps2uqqs-2.c @@ -65,7 +65,7 @@ TEST (void) if (UNION_CHECK (AVX512F_LEN, i_uq) (res3, res_ref)) abort (); -#if AVX512F_LEN != 128 +#if AVX512F_LEN == 512 res1.x = INTRINSIC (_cvtts_roundps_epu64) (s.x, 8); res2.x = INTRINSIC (_mask_cvtts_roundps_epu64) (res2.x, mask, s.x, 8); res3.x = INTRINSIC (_maskz_cvtts_roundps_epu64) (mask, s.x, 8); diff --git a/gcc/testsuite/gcc.target/i386/avx10_2-satcvt-1.c b/gcc/testsuite/gcc.target/i386/avx10_2-satcvt-1.c index dd8b874d7c3..47c0cce9ced 100644 --- a/gcc/testsuite/gcc.target/i386/avx10_2-satcvt-1.c +++ b/gcc/testsuite/gcc.target/i386/avx10_2-satcvt-1.c @@ -1,45 +1,29 @@ /* { dg-do compile } */ /* { dg-options "-O2 -march=x86-64-v3 -mavx10.2-256" } */ -/* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\{rn-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtph2ibs\[ \\t\]+\{rz-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\{rn-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtph2iubs\[ \\t\]+\{rz-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2ibs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttph2iubs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\{rn-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2ibs\[ \\t\]+\{rz-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\{rn-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvtps2iubs\[ \\t\]+\{rz-sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2ibs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 2 } } */ +/* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2iubs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtbf162ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtbf162ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\[^\{\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvtbf162ibs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}\[^\n\r]*(?:\n|\[ \\t\]+#)" 1 } } */ @@ -91,51 +75,27 @@ /* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2dqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2udqsy\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttpd2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2dqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2qqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2udqs\[ \\t\]+\{sae\}\[^\{\n\]*%ymm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ -/* { dg-final { scan-assembler-times "vcvttps2uqqs\[ \\t\]+\{sae\}\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%ymm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2dqsx\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2dqsx\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}(?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "vcvttpd2dqsx\[ \\t\]+\[^\{\n\]*%xmm\[0-9\]+\[^\n\r]*%xmm\[0-9\]+\{%k\[0-9\]\}\{z\}(?:\n|\[ \\t\]+#)" 1 } } */ @@ -202,58 +162,34 @@ avx10_2_test (void) xi = _mm256_ipcvts_ph_epi8 (xh); xi = _mm256_mask_ipcvts_ph_epi8 (xi, m16, xh); xi = _mm256_maskz_ipcvts_ph_epi8 (m16, xh); - xi = _mm256_ipcvts_roundph_epi8 (xh, 4); - xi = _mm256_mask_ipcvts_roundph_epi8 (xi, m16, xh, 8); - xi = _mm256_maskz_ipcvts_roundph_epi8 (m16, xh, 11); xi = _mm256_ipcvts_ph_epu8 (xh); xi = _mm256_mask_ipcvts_ph_epu8 (xi, m16, xh); xi = _mm256_maskz_ipcvts_ph_epu8 (m16, xh); - xi = _mm256_ipcvts_roundph_epu8 (xh, 4); - xi = _mm256_mask_ipcvts_roundph_epu8 (xi, m16, xh, 8); - xi = _mm256_maskz_ipcvts_roundph_epu8 (m16, xh, 11); xi = _mm256_ipcvtts_ph_epi8 (xh); xi = _mm256_mask_ipcvtts_ph_epi8 (xi, m16, xh); xi = _mm256_maskz_ipcvtts_ph_epi8 (m16, xh); - xi = _mm256_ipcvtts_roundph_epi8 (xh, 4); - xi = _mm256_mask_ipcvtts_roundph_epi8 (xi, m16, xh, 8); - xi = _mm256_maskz_ipcvtts_roundph_epi8 (m16, xh, 8); xi = _mm256_ipcvtts_ph_epu8 (xh); xi = _mm256_mask_ipcvtts_ph_epu8 (xi, m16, xh); xi = _mm256_maskz_ipcvtts_ph_epu8 (m16, xh); - xi = _mm256_ipcvtts_roundph_epu8 (xh, 4); - xi = _mm256_mask_ipcvtts_roundph_epu8 (xi, m16, xh, 8); - xi = _mm256_maskz_ipcvtts_roundph_epu8 (m16, xh, 8); xi = _mm256_ipcvts_ps_epi8 (x); xi = _mm256_mask_ipcvts_ps_epi8 (xi, m8, x); xi = _mm256_maskz_ipcvts_ps_epi8 (m8, x); - xi = _mm256_ipcvts_roundps_epi8 (x, 4); - xi = _mm256_mask_ipcvts_roundps_epi8 (xi, m8, x, 8); - xi = _mm256_maskz_ipcvts_roundps_epi8 (m8, x, 11); xi = _mm256_ipcvts_ps_epu8 (x); xi = _mm256_mask_ipcvts_ps_epu8 (xi, m8, x); xi = _mm256_maskz_ipcvts_ps_epu8 (m8, x); - xi = _mm256_ipcvts_roundps_epu8 (x, 4); - xi = _mm256_mask_ipcvts_roundps_epu8 (xi, m8, x, 8); - xi = _mm256_maskz_ipcvts_roundps_epu8 (m8, x, 11); xi = _mm256_ipcvtts_ps_epi8 (x); xi = _mm256_mask_ipcvtts_ps_epi8 (xi, m8, x); xi = _mm256_maskz_ipcvtts_ps_epi8 (m8, x); - xi = _mm256_ipcvtts_roundps_epi8 (x, 4); - xi = _mm256_mask_ipcvtts_roundps_epi8 (xi, m8, x, 8); - xi = _mm256_maskz_ipcvtts_roundps_epi8 (m8, x, 8); xi = _mm256_ipcvtts_ps_epu8 (x); xi = _mm256_mask_ipcvtts_ps_epu8 (xi, m8, x); xi = _mm256_maskz_ipcvtts_ps_epu8 (m8, x); - xi = _mm256_ipcvtts_roundps_epu8 (x, 4); - xi = _mm256_mask_ipcvtts_roundps_epu8 (xi, m8, x, 8); - xi = _mm256_maskz_ipcvtts_roundps_epu8 (m8, x, 8); xi = _mm256_ipcvts_bf16_epi8 (xbh); xi = _mm256_mask_ipcvts_bf16_epi8 (xi, m16, xbh); @@ -322,58 +258,34 @@ avx10_2_test (void) hxi = _mm256_cvtts_pd_epi32 (xd); hxi = _mm256_mask_cvtts_pd_epi32 (hxi, m8, xd); hxi = _mm256_maskz_cvtts_pd_epi32 (m8, xd); - hxi = _mm256_cvtts_roundpd_epi32 (xd, 8); - hxi = _mm256_mask_cvtts_roundpd_epi32 (hxi, m8, xd, 8); - hxi = _mm256_maskz_cvtts_roundpd_epi32 (m8, xd, 8); xi = _mm256_cvtts_pd_epi64 (xd); xi = _mm256_mask_cvtts_pd_epi64 (xi, m8, xd); xi = _mm256_maskz_cvtts_pd_epi64 (m8, xd); - xi = _mm256_cvtts_roundpd_epi64 (xd, 8); - xi = _mm256_mask_cvtts_roundpd_epi64 (xi, m8, xd, 8); - xi = _mm256_maskz_cvtts_roundpd_epi64 (m8, xd, 8); hxi = _mm256_cvtts_pd_epu32 (xd); hxi = _mm256_mask_cvtts_pd_epu32 (hxi, m8, xd); hxi = _mm256_maskz_cvtts_pd_epu32 (m8, xd); - hxi = _mm256_cvtts_roundpd_epu32 (xd, 8); - hxi = _mm256_mask_cvtts_roundpd_epu32 (hxi, m8, xd, 8); - hxi = _mm256_maskz_cvtts_roundpd_epu32 (m8, xd, 8); xi = _mm256_cvtts_pd_epu64 (xd); xi = _mm256_mask_cvtts_pd_epu64 (xi, m8, xd); xi = _mm256_maskz_cvtts_pd_epu64 (m8, xd); - xi = _mm256_cvtts_roundpd_epu64 (xd, 8); - xi = _mm256_mask_cvtts_roundpd_epu64 (xi, m8, xd, 8); - xi = _mm256_maskz_cvtts_roundpd_epu64 (m8, xd, 8); xi = _mm256_cvtts_ps_epi32 (x); xi = _mm256_mask_cvtts_ps_epi32 (xi, m16, x); xi = _mm256_maskz_cvtts_ps_epi32 (m16, x); - xi = _mm256_cvtts_roundps_epi32 (x, 8); - xi = _mm256_mask_cvtts_roundps_epi32 (xi, m16, x, 8); - xi = _mm256_maskz_cvtts_roundps_epi32 (m16, x, 8); xi = _mm256_cvtts_ps_epi64 (hx); xi = _mm256_mask_cvtts_ps_epi64 (xi, m8, hx); xi = _mm256_maskz_cvtts_ps_epi64 (m8, hx); - xi = _mm256_cvtts_roundps_epi64 (hx, 8); - xi = _mm256_mask_cvtts_roundps_epi64 (xi, m8, hx, 8); - xi = _mm256_maskz_cvtts_roundps_epi64 (m8, hx, 8); xi = _mm256_cvtts_ps_epu32 (x); xi = _mm256_mask_cvtts_ps_epu32 (xi, m16, x); xi = _mm256_maskz_cvtts_ps_epu32 (m16, x); - xi = _mm256_cvtts_roundps_epu32 (x, 8); - xi = _mm256_mask_cvtts_roundps_epu32 (xi, m16, x, 8); - xi = _mm256_maskz_cvtts_roundps_epu32 (m16, x, 8); xi = _mm256_cvtts_ps_epu64 (hx); xi = _mm256_mask_cvtts_ps_epu64 (xi, m8, hx); xi = _mm256_maskz_cvtts_ps_epu64 (m8, hx); - xi = _mm256_cvtts_roundps_epu64 (hx, 8); - xi = _mm256_mask_cvtts_roundps_epu64 (xi, m8, hx, 8); - xi = _mm256_maskz_cvtts_roundps_epu64 (m8, hx, 8); hxi = _mm_cvtts_pd_epi32 (hxd); hxi = _mm_mask_cvtts_pd_epi32 (hxi, m8, hxd); diff --git a/gcc/testsuite/gcc.target/i386/sse-13.c b/gcc/testsuite/gcc.target/i386/sse-13.c index 2dd03ac7f1e..e5d8e7b61fd 100644 --- a/gcc/testsuite/gcc.target/i386/sse-13.c +++ b/gcc/testsuite/gcc.target/i386/sse-13.c @@ -1062,22 +1062,6 @@ #define __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, 8) /* avx10_2satcvtintrin.h */ -#define __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, 8) #define __builtin_ia32_cvttsd2sis32_round(A, B) __builtin_ia32_cvttsd2sis32_round(A, 8) #define __builtin_ia32_cvttsd2usis32_round(A, B) __builtin_ia32_cvttsd2usis32_round(A, 8) #define __builtin_ia32_cvttss2sis32_round(A, B) __builtin_ia32_cvttss2sis32_round(A, 8) diff --git a/gcc/testsuite/gcc.target/i386/sse-14.c b/gcc/testsuite/gcc.target/i386/sse-14.c index bacd58896d8..07110900025 100644 --- a/gcc/testsuite/gcc.target/i386/sse-14.c +++ b/gcc/testsuite/gcc.target/i386/sse-14.c @@ -1480,54 +1480,6 @@ test_2 (_mm512_maskz_cvtts_roundps_epu64, __m512i, __mmask8, __m256, 8) test_3 (_mm512_mask_cvtts_roundps_epu64, __m512i, __m512i, __mmask8, __m256, 8) /* avx10_2satcvtintrin.h */ -test_1 (_mm256_ipcvts_roundph_epi8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvts_roundph_epu8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvts_roundps_epi8, __m256i, __m256, 8) -test_1 (_mm256_ipcvts_roundps_epu8, __m256i, __m256, 8) -test_1 (_mm256_ipcvtts_roundph_epi8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvtts_roundph_epu8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvtts_roundps_epi8, __m256i, __m256, 8) -test_1 (_mm256_ipcvtts_roundps_epu8, __m256i, __m256, 8) -test_2 (_mm256_maskz_ipcvts_roundph_epi8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvts_roundph_epu8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvts_roundps_epi8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvts_roundps_epu8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvtts_roundph_epi8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvtts_roundph_epu8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvtts_roundps_epi8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvtts_roundps_epu8, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvts_roundph_epi8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvts_roundph_epu8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvts_roundps_epi8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvts_roundps_epu8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvtts_roundph_epi8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvtts_roundph_epu8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvtts_roundps_epi8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvtts_roundps_epu8, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundpd_epi32, __m128i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epi32, __m128i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epi32, __m128i, __m128i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epi64, __m256i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epi64, __m256i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epi64, __m256i, __m256i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epu32, __m128i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epu32, __m128i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epu32, __m128i, __m128i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epu64, __m256i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epu64, __m256i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epu64, __m256i, __m256i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundps_epi32, __m256i, __m256, 8) -test_2 (_mm256_maskz_cvtts_roundps_epi32, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_cvtts_roundps_epi32, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundps_epi64, __m256i, __m128, 8) -test_2 (_mm256_maskz_cvtts_roundps_epi64, __m256i, __mmask8, __m128, 8) -test_3 (_mm256_mask_cvtts_roundps_epi64, __m256i, __m256i, __mmask8, __m128, 8) -test_1 (_mm256_cvtts_roundps_epu32, __m256i, __m256, 8) -test_2 (_mm256_maskz_cvtts_roundps_epu32, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_cvtts_roundps_epu32, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundps_epu64, __m256i, __m128, 8) -test_2 (_mm256_maskz_cvtts_roundps_epu64, __m256i, __mmask8, __m128, 8) -test_3 (_mm256_mask_cvtts_roundps_epu64, __m256i, __m256i, __mmask8, __m128, 8) test_1 (_mm_cvtts_roundsd_epi32, int, __m128d, 8) test_1 (_mm_cvtts_roundsd_epu32, unsigned int, __m128d, 8) test_1 (_mm_cvtts_roundss_epi32, int, __m128, 8) diff --git a/gcc/testsuite/gcc.target/i386/sse-22.c b/gcc/testsuite/gcc.target/i386/sse-22.c index eb65328b894..bb725055e9a 100644 --- a/gcc/testsuite/gcc.target/i386/sse-22.c +++ b/gcc/testsuite/gcc.target/i386/sse-22.c @@ -1519,54 +1519,6 @@ test_2 (_mm512_maskz_cvtts_roundps_epu64, __m512i, __mmask8, __m256, 8) test_3 (_mm512_mask_cvtts_roundps_epu64, __m512i, __m512i, __mmask8, __m256, 8) /* avx10_2satcvtintrin.h */ -test_1 (_mm256_ipcvts_roundph_epi8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvts_roundph_epu8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvts_roundps_epi8, __m256i, __m256, 8) -test_1 (_mm256_ipcvts_roundps_epu8, __m256i, __m256, 8) -test_1 (_mm256_ipcvtts_roundph_epi8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvtts_roundph_epu8, __m256i, __m256h, 8) -test_1 (_mm256_ipcvtts_roundps_epi8, __m256i, __m256, 8) -test_1 (_mm256_ipcvtts_roundps_epu8, __m256i, __m256, 8) -test_2 (_mm256_maskz_ipcvts_roundph_epi8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvts_roundph_epu8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvts_roundps_epi8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvts_roundps_epu8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvtts_roundph_epi8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvtts_roundph_epu8, __m256i, __mmask16, __m256h, 8) -test_2 (_mm256_maskz_ipcvtts_roundps_epi8, __m256i, __mmask8, __m256, 8) -test_2 (_mm256_maskz_ipcvtts_roundps_epu8, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvts_roundph_epi8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvts_roundph_epu8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvts_roundps_epi8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvts_roundps_epu8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvtts_roundph_epi8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvtts_roundph_epu8, __m256i, __m256i, __mmask16, __m256h, 8) -test_3 (_mm256_mask_ipcvtts_roundps_epi8, __m256i, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_ipcvtts_roundps_epu8, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundpd_epi32, __m128i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epi32, __m128i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epi32, __m128i, __m128i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epi64, __m256i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epi64, __m256i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epi64, __m256i, __m256i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epu32, __m128i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epu32, __m128i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epu32, __m128i, __m128i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundpd_epu64, __m256i, __m256d, 8) -test_2 (_mm256_maskz_cvtts_roundpd_epu64, __m256i, __mmask8, __m256d, 8) -test_3 (_mm256_mask_cvtts_roundpd_epu64, __m256i, __m256i, __mmask8, __m256d, 8) -test_1 (_mm256_cvtts_roundps_epi32, __m256i, __m256, 8) -test_2 (_mm256_maskz_cvtts_roundps_epi32, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_cvtts_roundps_epi32, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundps_epi64, __m256i, __m128, 8) -test_2 (_mm256_maskz_cvtts_roundps_epi64, __m256i, __mmask8, __m128, 8) -test_3 (_mm256_mask_cvtts_roundps_epi64, __m256i, __m256i, __mmask8, __m128, 8) -test_1 (_mm256_cvtts_roundps_epu32, __m256i, __m256, 8) -test_2 (_mm256_maskz_cvtts_roundps_epu32, __m256i, __mmask8, __m256, 8) -test_3 (_mm256_mask_cvtts_roundps_epu32, __m256i, __m256i, __mmask8, __m256, 8) -test_1 (_mm256_cvtts_roundps_epu64, __m256i, __m128, 8) -test_2 (_mm256_maskz_cvtts_roundps_epu64, __m256i, __mmask8, __m128, 8) -test_3 (_mm256_mask_cvtts_roundps_epu64, __m256i, __m256i, __mmask8, __m128, 8) test_1 (_mm_cvtts_roundsd_epi32, int, __m128d, 8) test_1 (_mm_cvtts_roundsd_epu32, unsigned int, __m128d, 8) test_1 (_mm_cvtts_roundss_epi32, int, __m128, 8) diff --git a/gcc/testsuite/gcc.target/i386/sse-23.c b/gcc/testsuite/gcc.target/i386/sse-23.c index 292b566c5ce..6d02a16e681 100644 --- a/gcc/testsuite/gcc.target/i386/sse-23.c +++ b/gcc/testsuite/gcc.target/i386/sse-23.c @@ -1033,22 +1033,6 @@ #define __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs512_mask_round(A, B, C, 8) /* avx10_2satcvtintrin.h */ -#define __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvtps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttph2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2ibs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2iubs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttpd2uqqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2dqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2qqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2udqs256_mask_round(A, B, C, 8) -#define __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, D) __builtin_ia32_cvttps2uqqs256_mask_round(A, B, C, 8) #define __builtin_ia32_cvttsd2sis32_round(A, B) __builtin_ia32_cvttsd2sis32_round(A, 8) #define __builtin_ia32_cvttsd2usis32_round(A, B) __builtin_ia32_cvttsd2usis32_round(A, 8) #define __builtin_ia32_cvttss2sis32_round(A, B) __builtin_ia32_cvttss2sis32_round(A, 8) -- 2.31.1