From 918621d3a8b84a6afda723f22e50b19ec305f77f Mon Sep 17 00:00:00 2001 From: Alan Lawrence Date: Tue, 3 Jun 2014 15:06:01 +0000 Subject: [PATCH] [PATCH AArch64 2/2] Correct signedness of builtins, remove casts from arm_neon.h * gcc/config/aarch64/aarch64-builtins.c (aarch64_types_binop_ssu_qualifiers): New static data. (TYPES_BINOP_SSU): Define. * gcc/config/aarch64/aarch64-simd-builtins.def (suqadd, ushl, urshl, urshr_n, ushll_n): Use appropriate unsigned qualifiers. * gcc/config/aarch64/arm_neon.h (vrshl_u8, vrshl_u16, vrshl_u32, vrshl_u64, vrshlq_u8, vrshlq_u16, vrshlq_u32, vrshlq_u64, vrshld_u64, vrshr_n_u8, vrshr_n_u16, vrshr_n_u32, vrshr_n_u64, vrshrq_n_u8, vrshrq_n_u16, vrshrq_n_u32, vrshrq_n_u64, vrshrd_n_u64, vshll_n_u8, vshll_n_u16, vshll_n_u32, vuqadd_s8, vuqadd_s16, vuqadd_s32, vuqadd_s64, vuqaddq_s8, vuqaddq_s16, vuqaddq_s32, vuqaddq_s64, vuqaddb_s8, vuqaddh_s16, vuqadds_s32, vuqaddd_s64): Add signedness suffix to builtin function name, remove cast. (vshl_s8, vshl_s16, vshl_s32, vshl_s64, vshl_u8, vshl_u16, vshl_u32, vshl_u64, vshlq_s8, vshlq_s16, vshlq_s32, vshlq_s64, vshlq_u8, vshlq_u16, vshlq_u32, vshlq_u64, vshld_s64, vshld_u64): Remove cast. From-SVN: r211186 --- gcc/ChangeLog | 19 ++++ gcc/config/aarch64/aarch64-builtins.c | 4 + gcc/config/aarch64/aarch64-simd-builtins.def | 10 +- gcc/config/aarch64/arm_neon.h | 102 +++++++++---------- 4 files changed, 79 insertions(+), 56 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index c196684ade0..f8a3f87e02c 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,22 @@ +2014-06-03 Alan Lawrence + + * gcc/config/aarch64/aarch64-builtins.c + (aarch64_types_binop_ssu_qualifiers): New static data. + (TYPES_BINOP_SSU): Define. + * gcc/config/aarch64/aarch64-simd-builtins.def (suqadd, ushl, urshl, + urshr_n, ushll_n): Use appropriate unsigned qualifiers. + * gcc/config/aarch64/arm_neon.h (vrshl_u8, vrshl_u16, vrshl_u32, + vrshl_u64, vrshlq_u8, vrshlq_u16, vrshlq_u32, vrshlq_u64, vrshld_u64, + vrshr_n_u8, vrshr_n_u16, vrshr_n_u32, vrshr_n_u64, vrshrq_n_u8, + vrshrq_n_u16, vrshrq_n_u32, vrshrq_n_u64, vrshrd_n_u64, vshll_n_u8, + vshll_n_u16, vshll_n_u32, vuqadd_s8, vuqadd_s16, vuqadd_s32, + vuqadd_s64, vuqaddq_s8, vuqaddq_s16, vuqaddq_s32, vuqaddq_s64, + vuqaddb_s8, vuqaddh_s16, vuqadds_s32, vuqaddd_s64): Add signedness + suffix to builtin function name, remove cast. + (vshl_s8, vshl_s16, vshl_s32, vshl_s64, vshl_u8, vshl_u16, vshl_u32, + vshl_u64, vshlq_s8, vshlq_s16, vshlq_s32, vshlq_s64, vshlq_u8, + vshlq_u16, vshlq_u32, vshlq_u64, vshld_s64, vshld_u64): Remove cast. + 2014-06-03 Alan Lawrence * gcc/config/aarch64/aarch64-builtins.c diff --git a/gcc/config/aarch64/aarch64-builtins.c b/gcc/config/aarch64/aarch64-builtins.c index eee3f2fd45a..fe4d39283b0 100644 --- a/gcc/config/aarch64/aarch64-builtins.c +++ b/gcc/config/aarch64/aarch64-builtins.c @@ -181,6 +181,10 @@ aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_unsigned, qualifier_unsigned, qualifier_none }; #define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers) static enum aarch64_type_qualifiers +aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS] + = { qualifier_none, qualifier_none, qualifier_unsigned }; +#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers) +static enum aarch64_type_qualifiers aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_poly, qualifier_poly, qualifier_poly }; #define TYPES_BINOPP (aarch64_types_binopp_qualifiers) diff --git a/gcc/config/aarch64/aarch64-simd-builtins.def b/gcc/config/aarch64/aarch64-simd-builtins.def index b357be4d890..faa0858e3be 100644 --- a/gcc/config/aarch64/aarch64-simd-builtins.def +++ b/gcc/config/aarch64/aarch64-simd-builtins.def @@ -86,7 +86,7 @@ BUILTIN_VSDQ_I (BINOP, sqsub, 0) BUILTIN_VSDQ_I (BINOPU, uqsub, 0) /* Implemented by aarch64_qadd. */ - BUILTIN_VSDQ_I (BINOP, suqadd, 0) + BUILTIN_VSDQ_I (BINOP_SSU, suqadd, 0) BUILTIN_VSDQ_I (BINOP_UUS, usqadd, 0) /* Implemented by aarch64_get_dreg. */ @@ -201,9 +201,9 @@ BUILTIN_VSDQ_I_DI (BINOP, ashl, 3) /* Implemented by aarch64_shl. */ BUILTIN_VSDQ_I_DI (BINOP, sshl, 0) - BUILTIN_VSDQ_I_DI (BINOP, ushl, 0) + BUILTIN_VSDQ_I_DI (BINOP_UUS, ushl, 0) BUILTIN_VSDQ_I_DI (BINOP, srshl, 0) - BUILTIN_VSDQ_I_DI (BINOP, urshl, 0) + BUILTIN_VSDQ_I_DI (BINOP_UUS, urshl, 0) BUILTIN_VDQ_I (SHIFTIMM, ashr, 3) VAR1 (SHIFTIMM, ashr_simd, 0, di) @@ -211,7 +211,7 @@ VAR1 (USHIFTIMM, lshr_simd, 0, di) /* Implemented by aarch64_shr_n. */ BUILTIN_VSDQ_I_DI (SHIFTIMM, srshr_n, 0) - BUILTIN_VSDQ_I_DI (SHIFTIMM, urshr_n, 0) + BUILTIN_VSDQ_I_DI (USHIFTIMM, urshr_n, 0) /* Implemented by aarch64_sra_n. */ BUILTIN_VSDQ_I_DI (SHIFTACC, ssra_n, 0) BUILTIN_VSDQ_I_DI (USHIFTACC, usra_n, 0) @@ -219,7 +219,7 @@ BUILTIN_VSDQ_I_DI (USHIFTACC, ursra_n, 0) /* Implemented by aarch64_shll_n. */ BUILTIN_VDW (SHIFTIMM, sshll_n, 0) - BUILTIN_VDW (SHIFTIMM, ushll_n, 0) + BUILTIN_VDW (USHIFTIMM, ushll_n, 0) /* Implemented by aarch64_shll2_n. */ BUILTIN_VQW (SHIFTIMM, sshll2_n, 0) BUILTIN_VQW (SHIFTIMM, ushll2_n, 0) diff --git a/gcc/config/aarch64/arm_neon.h b/gcc/config/aarch64/arm_neon.h index 18de229eab0..e1c27f76a60 100644 --- a/gcc/config/aarch64/arm_neon.h +++ b/gcc/config/aarch64/arm_neon.h @@ -21445,25 +21445,25 @@ vrshl_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vrshl_u8 (uint8x8_t __a, int8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_urshlv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_urshlv8qi_uus (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vrshl_u16 (uint16x4_t __a, int16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_urshlv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_urshlv4hi_uus (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vrshl_u32 (uint32x2_t __a, int32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_urshlv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_urshlv2si_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrshl_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_urshldi ((int64x1_t) __a, __b); + return __builtin_aarch64_urshldi_uus (__a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -21493,25 +21493,25 @@ vrshlq_s64 (int64x2_t __a, int64x2_t __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vrshlq_u8 (uint8x16_t __a, int8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_urshlv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_urshlv16qi_uus (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vrshlq_u16 (uint16x8_t __a, int16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_urshlv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_urshlv8hi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vrshlq_u32 (uint32x4_t __a, int32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_urshlv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_urshlv4si_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vrshlq_u64 (uint64x2_t __a, int64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_urshlv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_urshlv2di_uus (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -21523,7 +21523,7 @@ vrshld_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrshld_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_urshldi (__a, __b); + return __builtin_aarch64_urshldi_uus (__a, __b); } /* vrshr */ @@ -21555,25 +21555,25 @@ vrshr_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vrshr_n_u8 (uint8x8_t __a, const int __b) { - return (uint8x8_t) __builtin_aarch64_urshr_nv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_urshr_nv8qi_uus (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vrshr_n_u16 (uint16x4_t __a, const int __b) { - return (uint16x4_t) __builtin_aarch64_urshr_nv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_urshr_nv4hi_uus (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vrshr_n_u32 (uint32x2_t __a, const int __b) { - return (uint32x2_t) __builtin_aarch64_urshr_nv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_urshr_nv2si_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrshr_n_u64 (uint64x1_t __a, const int __b) { - return (uint64x1_t) __builtin_aarch64_urshr_ndi ((int64x1_t) __a, __b); + return __builtin_aarch64_urshr_ndi_uus (__a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -21603,25 +21603,25 @@ vrshrq_n_s64 (int64x2_t __a, const int __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vrshrq_n_u8 (uint8x16_t __a, const int __b) { - return (uint8x16_t) __builtin_aarch64_urshr_nv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_urshr_nv16qi_uus (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vrshrq_n_u16 (uint16x8_t __a, const int __b) { - return (uint16x8_t) __builtin_aarch64_urshr_nv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_urshr_nv8hi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vrshrq_n_u32 (uint32x4_t __a, const int __b) { - return (uint32x4_t) __builtin_aarch64_urshr_nv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_urshr_nv4si_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vrshrq_n_u64 (uint64x2_t __a, const int __b) { - return (uint64x2_t) __builtin_aarch64_urshr_nv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_urshr_nv2di_uus (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -21633,7 +21633,7 @@ vrshrd_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrshrd_n_u64 (uint64x1_t __a, const int __b) { - return (uint64x1_t) __builtin_aarch64_urshr_ndi (__a, __b); + return __builtin_aarch64_urshr_ndi_uus (__a, __b); } /* vrsra */ @@ -21936,109 +21936,109 @@ vshld_n_u64 (uint64x1_t __a, const int __b) __extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) vshl_s8 (int8x8_t __a, int8x8_t __b) { - return (int8x8_t) __builtin_aarch64_sshlv8qi (__a, __b); + return __builtin_aarch64_sshlv8qi (__a, __b); } __extension__ static __inline int16x4_t __attribute__ ((__always_inline__)) vshl_s16 (int16x4_t __a, int16x4_t __b) { - return (int16x4_t) __builtin_aarch64_sshlv4hi (__a, __b); + return __builtin_aarch64_sshlv4hi (__a, __b); } __extension__ static __inline int32x2_t __attribute__ ((__always_inline__)) vshl_s32 (int32x2_t __a, int32x2_t __b) { - return (int32x2_t) __builtin_aarch64_sshlv2si (__a, __b); + return __builtin_aarch64_sshlv2si (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) vshl_s64 (int64x1_t __a, int64x1_t __b) { - return (int64x1_t) __builtin_aarch64_sshldi (__a, __b); + return __builtin_aarch64_sshldi (__a, __b); } __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vshl_u8 (uint8x8_t __a, int8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_ushlv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_ushlv8qi_uus (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vshl_u16 (uint16x4_t __a, int16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_ushlv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_ushlv4hi_uus (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vshl_u32 (uint32x2_t __a, int32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_ushlv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_ushlv2si_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vshl_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_ushldi ((int64x1_t) __a, __b); + return __builtin_aarch64_ushldi_uus (__a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) vshlq_s8 (int8x16_t __a, int8x16_t __b) { - return (int8x16_t) __builtin_aarch64_sshlv16qi (__a, __b); + return __builtin_aarch64_sshlv16qi (__a, __b); } __extension__ static __inline int16x8_t __attribute__ ((__always_inline__)) vshlq_s16 (int16x8_t __a, int16x8_t __b) { - return (int16x8_t) __builtin_aarch64_sshlv8hi (__a, __b); + return __builtin_aarch64_sshlv8hi (__a, __b); } __extension__ static __inline int32x4_t __attribute__ ((__always_inline__)) vshlq_s32 (int32x4_t __a, int32x4_t __b) { - return (int32x4_t) __builtin_aarch64_sshlv4si (__a, __b); + return __builtin_aarch64_sshlv4si (__a, __b); } __extension__ static __inline int64x2_t __attribute__ ((__always_inline__)) vshlq_s64 (int64x2_t __a, int64x2_t __b) { - return (int64x2_t) __builtin_aarch64_sshlv2di (__a, __b); + return __builtin_aarch64_sshlv2di (__a, __b); } __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vshlq_u8 (uint8x16_t __a, int8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_ushlv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_ushlv16qi_uus (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vshlq_u16 (uint16x8_t __a, int16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_ushlv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_ushlv8hi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vshlq_u32 (uint32x4_t __a, int32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_ushlv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_ushlv4si_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vshlq_u64 (uint64x2_t __a, int64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_ushlv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_ushlv2di_uus (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) vshld_s64 (int64x1_t __a, int64x1_t __b) { - return (int64x1_t) __builtin_aarch64_sshldi (__a, __b); + return __builtin_aarch64_sshldi (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vshld_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_ushldi (__a, __b); + return __builtin_aarch64_ushldi_uus (__a, __b); } __extension__ static __inline int16x8_t __attribute__ ((__always_inline__)) @@ -22098,19 +22098,19 @@ vshll_n_s32 (int32x2_t __a, const int __b) __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vshll_n_u8 (uint8x8_t __a, const int __b) { - return (uint16x8_t) __builtin_aarch64_ushll_nv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_ushll_nv8qi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vshll_n_u16 (uint16x4_t __a, const int __b) { - return (uint32x4_t) __builtin_aarch64_ushll_nv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_ushll_nv4hi_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vshll_n_u32 (uint32x2_t __a, const int __b) { - return (uint64x2_t) __builtin_aarch64_ushll_nv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_ushll_nv2si_uus (__a, __b); } /* vshr */ @@ -24409,73 +24409,73 @@ vtstd_u64 (uint64x1_t __a, uint64x1_t __b) __extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) vuqadd_s8 (int8x8_t __a, uint8x8_t __b) { - return (int8x8_t) __builtin_aarch64_suqaddv8qi (__a, (int8x8_t) __b); + return __builtin_aarch64_suqaddv8qi_ssu (__a, __b); } __extension__ static __inline int16x4_t __attribute__ ((__always_inline__)) vuqadd_s16 (int16x4_t __a, uint16x4_t __b) { - return (int16x4_t) __builtin_aarch64_suqaddv4hi (__a, (int16x4_t) __b); + return __builtin_aarch64_suqaddv4hi_ssu (__a, __b); } __extension__ static __inline int32x2_t __attribute__ ((__always_inline__)) vuqadd_s32 (int32x2_t __a, uint32x2_t __b) { - return (int32x2_t) __builtin_aarch64_suqaddv2si (__a, (int32x2_t) __b); + return __builtin_aarch64_suqaddv2si_ssu (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) vuqadd_s64 (int64x1_t __a, uint64x1_t __b) { - return (int64x1_t) __builtin_aarch64_suqadddi (__a, (int64x1_t) __b); + return __builtin_aarch64_suqadddi_ssu (__a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) vuqaddq_s8 (int8x16_t __a, uint8x16_t __b) { - return (int8x16_t) __builtin_aarch64_suqaddv16qi (__a, (int8x16_t) __b); + return __builtin_aarch64_suqaddv16qi_ssu (__a, __b); } __extension__ static __inline int16x8_t __attribute__ ((__always_inline__)) vuqaddq_s16 (int16x8_t __a, uint16x8_t __b) { - return (int16x8_t) __builtin_aarch64_suqaddv8hi (__a, (int16x8_t) __b); + return __builtin_aarch64_suqaddv8hi_ssu (__a, __b); } __extension__ static __inline int32x4_t __attribute__ ((__always_inline__)) vuqaddq_s32 (int32x4_t __a, uint32x4_t __b) { - return (int32x4_t) __builtin_aarch64_suqaddv4si (__a, (int32x4_t) __b); + return __builtin_aarch64_suqaddv4si_ssu (__a, __b); } __extension__ static __inline int64x2_t __attribute__ ((__always_inline__)) vuqaddq_s64 (int64x2_t __a, uint64x2_t __b) { - return (int64x2_t) __builtin_aarch64_suqaddv2di (__a, (int64x2_t) __b); + return __builtin_aarch64_suqaddv2di_ssu (__a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) vuqaddb_s8 (int8x1_t __a, uint8x1_t __b) { - return (int8x1_t) __builtin_aarch64_suqaddqi (__a, (int8x1_t) __b); + return __builtin_aarch64_suqaddqi_ssu (__a, __b); } __extension__ static __inline int16x1_t __attribute__ ((__always_inline__)) vuqaddh_s16 (int16x1_t __a, uint16x1_t __b) { - return (int16x1_t) __builtin_aarch64_suqaddhi (__a, (int16x1_t) __b); + return __builtin_aarch64_suqaddhi_ssu (__a, __b); } __extension__ static __inline int32x1_t __attribute__ ((__always_inline__)) vuqadds_s32 (int32x1_t __a, uint32x1_t __b) { - return (int32x1_t) __builtin_aarch64_suqaddsi (__a, (int32x1_t) __b); + return __builtin_aarch64_suqaddsi_ssu (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) vuqaddd_s64 (int64x1_t __a, uint64x1_t __b) { - return (int64x1_t) __builtin_aarch64_suqadddi (__a, (int64x1_t) __b); + return __builtin_aarch64_suqadddi_ssu (__a, __b); } #define __DEFINTERLEAVE(op, rettype, intype, funcsuffix, Q) \