[3/9] arm: MVE: Remove _s and _u suffixes from vcmp* builtins.

Message ID 1619791790-628-3-git-send-email-christophe.lyon@linaro.org
State New
Headers show
Series
  • [1/9] arm: MVE: Convert vcmp[eq|ne]* in arm_mve.h to use only 's' builtin version
Related show

Commit Message

Jason Merrill via Gcc-patches April 30, 2021, 2:09 p.m.
This patch brings more unification in the vector comparison builtins,
by removing the useless 's' (signed) suffix since we no longer need
unsigned versions.

2021-03-01  Christophe Lyon  <christophe.lyon@linaro.org>

	gcc/
	* config/arm/arm_mve.h (__arm_vcmp*): Remove 's' suffix.
	* config/arm/arm_mve_builtins.def (vcmp*): Remove 's' suffix.
	* config/arm/mve.md (mve_vcmp*): Remove 's' suffix in pattern
	names.
---
 gcc/config/arm/arm_mve.h            | 120 ++++++++++++++++++------------------
 gcc/config/arm/arm_mve_builtins.def |  32 +++++-----
 gcc/config/arm/mve.md               |  64 +++++++++----------
 3 files changed, 108 insertions(+), 108 deletions(-)

-- 
2.7.4

Comments

Jason Merrill via Gcc-patches May 10, 2021, 11:58 a.m. | #1
> -----Original Message-----

> From: Gcc-patches <gcc-patches-bounces@gcc.gnu.org> On Behalf Of

> Christophe Lyon via Gcc-patches

> Sent: 30 April 2021 15:10

> To: gcc-patches@gcc.gnu.org

> Subject: [PATCH 3/9] arm: MVE: Remove _s and _u suffixes from vcmp*

> builtins.

> 

> This patch brings more unification in the vector comparison builtins,

> by removing the useless 's' (signed) suffix since we no longer need

> unsigned versions.

> 


Ok.
Thanks,
Kyrill

> 2021-03-01  Christophe Lyon  <christophe.lyon@linaro.org>

> 

> 	gcc/

> 	* config/arm/arm_mve.h (__arm_vcmp*): Remove 's' suffix.

> 	* config/arm/arm_mve_builtins.def (vcmp*): Remove 's' suffix.

> 	* config/arm/mve.md (mve_vcmp*): Remove 's' suffix in pattern

> 	names.

> ---

>  gcc/config/arm/arm_mve.h            | 120 ++++++++++++++++++-----------------

> -

>  gcc/config/arm/arm_mve_builtins.def |  32 +++++-----

>  gcc/config/arm/mve.md               |  64 +++++++++----------

>  3 files changed, 108 insertions(+), 108 deletions(-)

> 

> diff --git a/gcc/config/arm/arm_mve.h b/gcc/config/arm/arm_mve.h

> index e4dfe91..5d78269 100644

> --- a/gcc/config/arm/arm_mve.h

> +++ b/gcc/config/arm/arm_mve.h

> @@ -3674,42 +3674,42 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpneq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpneq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpneq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_u8 (uint8x16_t __a, uint8x16_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv16qi ((int8x16_t)__a, (int8x16_t)__b);

> +  return __builtin_mve_vcmpneq_v16qi ((int8x16_t)__a, (int8x16_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_u16 (uint16x8_t __a, uint16x8_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv8hi ((int16x8_t)__a, (int16x8_t)__b);

> +  return __builtin_mve_vcmpneq_v8hi ((int16x8_t)__a, (int16x8_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_u32 (uint32x4_t __a, uint32x4_t __b)

>  {

> -  return __builtin_mve_vcmpneq_sv4si ((int32x4_t)__a, (int32x4_t)__b);

> +  return __builtin_mve_vcmpneq_v4si ((int32x4_t)__a, (int32x4_t)__b);

>  }

> 

>  __extension__ extern __inline int8x16_t

> @@ -3932,49 +3932,49 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_u8 (uint8x16_t __a, uint8_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv16qi ((int8x16_t)__a, (int8_t)__b);

> +  return __builtin_mve_vcmpneq_n_v16qi ((int8x16_t)__a, (int8_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_u8 (uint8x16_t __a, uint8x16_t __b)

>  {

> -  return __builtin_mve_vcmphiq_uv16qi (__a, __b);

> +  return __builtin_mve_vcmphiq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_n_u8 (uint8x16_t __a, uint8_t __b)

>  {

> -  return __builtin_mve_vcmphiq_n_uv16qi (__a, __b);

> +  return __builtin_mve_vcmphiq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_u8 (uint8x16_t __a, uint8x16_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv16qi ((int8x16_t)__a, (int8x16_t)__b);

> +  return __builtin_mve_vcmpeqq_v16qi ((int8x16_t)__a, (int8x16_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_u8 (uint8x16_t __a, uint8_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv16qi ((int8x16_t)__a, (int8_t)__b);

> +  return __builtin_mve_vcmpeqq_n_v16qi ((int8x16_t)__a, (int8_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_u8 (uint8x16_t __a, uint8x16_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_uv16qi (__a, __b);

> +  return __builtin_mve_vcmpcsq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_n_u8 (uint8x16_t __a, uint8_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_n_uv16qi (__a, __b);

> +  return __builtin_mve_vcmpcsq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline uint8x16_t

> @@ -4144,77 +4144,77 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpneq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpltq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpltq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpltq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpltq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpleq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpleq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpleq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpleq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpgtq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpgtq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpgeq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpgeq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_s8 (int8x16_t __a, int8x16_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpeqq_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_s8 (int8x16_t __a, int8_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv16qi (__a, __b);

> +  return __builtin_mve_vcmpeqq_n_v16qi (__a, __b);

>  }

> 

>  __extension__ extern __inline uint8x16_t

> @@ -4774,49 +4774,49 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_u16 (uint16x8_t __a, uint16_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv8hi ((int16x8_t)__a, (int16_t)__b);

> +  return __builtin_mve_vcmpneq_n_v8hi ((int16x8_t)__a, (int16_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_u16 (uint16x8_t __a, uint16x8_t __b)

>  {

> -  return __builtin_mve_vcmphiq_uv8hi (__a, __b);

> +  return __builtin_mve_vcmphiq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_n_u16 (uint16x8_t __a, uint16_t __b)

>  {

> -  return __builtin_mve_vcmphiq_n_uv8hi (__a, __b);

> +  return __builtin_mve_vcmphiq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_u16 (uint16x8_t __a, uint16x8_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv8hi ((int16x8_t)__a, (int16x8_t)__b);

> +  return __builtin_mve_vcmpeqq_v8hi ((int16x8_t)__a, (int16x8_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_u16 (uint16x8_t __a, uint16_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv8hi ((int16x8_t)__a, (int16_t)__b);

> +  return __builtin_mve_vcmpeqq_n_v8hi ((int16x8_t)__a, (int16_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_u16 (uint16x8_t __a, uint16x8_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_uv8hi (__a, __b);

> +  return __builtin_mve_vcmpcsq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_n_u16 (uint16x8_t __a, uint16_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_n_uv8hi (__a, __b);

> +  return __builtin_mve_vcmpcsq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline uint16x8_t

> @@ -4986,77 +4986,77 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpneq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpltq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpltq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpltq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpltq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpleq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpleq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpleq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpleq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpgtq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpgtq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpgeq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpgeq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_s16 (int16x8_t __a, int16x8_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpeqq_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_s16 (int16x8_t __a, int16_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv8hi (__a, __b);

> +  return __builtin_mve_vcmpeqq_n_v8hi (__a, __b);

>  }

> 

>  __extension__ extern __inline uint16x8_t

> @@ -5616,49 +5616,49 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_u32 (uint32x4_t __a, uint32_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv4si ((int32x4_t)__a, (int32_t)__b);

> +  return __builtin_mve_vcmpneq_n_v4si ((int32x4_t)__a, (int32_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_u32 (uint32x4_t __a, uint32x4_t __b)

>  {

> -  return __builtin_mve_vcmphiq_uv4si (__a, __b);

> +  return __builtin_mve_vcmphiq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmphiq_n_u32 (uint32x4_t __a, uint32_t __b)

>  {

> -  return __builtin_mve_vcmphiq_n_uv4si (__a, __b);

> +  return __builtin_mve_vcmphiq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_u32 (uint32x4_t __a, uint32x4_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv4si ((int32x4_t)__a, (int32x4_t)__b);

> +  return __builtin_mve_vcmpeqq_v4si ((int32x4_t)__a, (int32x4_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_u32 (uint32x4_t __a, uint32_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv4si ((int32x4_t)__a, (int32_t)__b);

> +  return __builtin_mve_vcmpeqq_n_v4si ((int32x4_t)__a, (int32_t)__b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_u32 (uint32x4_t __a, uint32x4_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_uv4si (__a, __b);

> +  return __builtin_mve_vcmpcsq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpcsq_n_u32 (uint32x4_t __a, uint32_t __b)

>  {

> -  return __builtin_mve_vcmpcsq_n_uv4si (__a, __b);

> +  return __builtin_mve_vcmpcsq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline uint32x4_t

> @@ -5828,77 +5828,77 @@ __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpneq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpneq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpneq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpltq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpltq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpltq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpltq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpltq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpleq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpleq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpleq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpleq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpleq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpgtq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgtq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpgtq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpgtq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpgeq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpgeq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpgeq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpgeq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_s32 (int32x4_t __a, int32x4_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_sv4si (__a, __b);

> +  return __builtin_mve_vcmpeqq_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline mve_pred16_t

>  __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))

>  __arm_vcmpeqq_n_s32 (int32x4_t __a, int32_t __b)

>  {

> -  return __builtin_mve_vcmpeqq_n_sv4si (__a, __b);

> +  return __builtin_mve_vcmpeqq_n_v4si (__a, __b);

>  }

> 

>  __extension__ extern __inline uint32x4_t

> diff --git a/gcc/config/arm/arm_mve_builtins.def

> b/gcc/config/arm/arm_mve_builtins.def

> index ee34fd1..e9b5b28 100644

> --- a/gcc/config/arm/arm_mve_builtins.def

> +++ b/gcc/config/arm/arm_mve_builtins.def

> @@ -89,7 +89,7 @@ VAR3 (BINOP_UNONE_UNONE_IMM, vshrq_n_u, v16qi,

> v8hi, v4si)

>  VAR3 (BINOP_NONE_NONE_IMM, vshrq_n_s, v16qi, v8hi, v4si)

>  VAR1 (BINOP_NONE_NONE_UNONE, vaddlvq_p_s, v4si)

>  VAR1 (BINOP_UNONE_UNONE_UNONE, vaddlvq_p_u, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_s, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_, v16qi, v8hi, v4si)

>  VAR3 (BINOP_NONE_NONE_NONE, vshlq_s, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_NONE, vshlq_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vsubq_u, v16qi, v8hi, v4si)

> @@ -117,10 +117,10 @@ VAR3 (BINOP_UNONE_UNONE_UNONE,

> vhsubq_n_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_n_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, veorq_u, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_u, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_n_u, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_u, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_u, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vbicq_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vandq_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_UNONE, vaddvq_p_u, v16qi, v8hi, v4si)

> @@ -142,17 +142,17 @@ VAR3 (BINOP_UNONE_UNONE_NONE,

> vbrsrq_n_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_IMM, vshlq_n_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_IMM, vrshrq_n_u, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_UNONE_IMM, vqshlq_n_u, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_n_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_n_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_n_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_n_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_n_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_s, v16qi, v8hi, v4si)

> -VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_n_s, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_n_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_, v16qi, v8hi, v4si)

> +VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_n_, v16qi, v8hi, v4si)

>  VAR3 (BINOP_UNONE_NONE_IMM, vqshluq_n_s, v16qi, v8hi, v4si)

>  VAR3 (BINOP_NONE_NONE_UNONE, vaddvq_p_s, v16qi, v8hi, v4si)

>  VAR3 (BINOP_NONE_NONE_NONE, vsubq_s, v16qi, v8hi, v4si)

> diff --git a/gcc/config/arm/mve.md b/gcc/config/arm/mve.md

> index b04c22b..e9f095d 100644

> --- a/gcc/config/arm/mve.md

> +++ b/gcc/config/arm/mve.md

> @@ -836,9 +836,9 @@ (define_insn "mve_vaddlvq_p_<supf>v4si"

>     (set_attr "length""8")])

> 

>  ;;

> -;; [vcmpneq_s])

> +;; [vcmpneq_])

>  ;;

> -(define_insn "mve_vcmpneq_s<mode>"

> +(define_insn "mve_vcmpneq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1005,9 +1005,9 @@ (define_expand "cadd<rot><mode>3"

>  )

> 

>  ;;

> -;; [vcmpcsq_n_u])

> +;; [vcmpcsq_n_])

>  ;;

> -(define_insn "mve_vcmpcsq_n_u<mode>"

> +(define_insn "mve_vcmpcsq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1020,9 +1020,9 @@ (define_insn "mve_vcmpcsq_n_u<mode>"

>  ])

> 

>  ;;

> -;; [vcmpcsq_u])

> +;; [vcmpcsq_])

>  ;;

> -(define_insn "mve_vcmpcsq_u<mode>"

> +(define_insn "mve_vcmpcsq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1035,9 +1035,9 @@ (define_insn "mve_vcmpcsq_u<mode>"

>  ])

> 

>  ;;

> -;; [vcmpeqq_n_s])

> +;; [vcmpeqq_n_])

>  ;;

> -(define_insn "mve_vcmpeqq_n_s<mode>"

> +(define_insn "mve_vcmpeqq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1050,9 +1050,9 @@ (define_insn "mve_vcmpeqq_n_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpeqq_s])

> +;; [vcmpeqq_])

>  ;;

> -(define_insn "mve_vcmpeqq_s<mode>"

> +(define_insn "mve_vcmpeqq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1065,9 +1065,9 @@ (define_insn "mve_vcmpeqq_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpgeq_n_s])

> +;; [vcmpgeq_n_])

>  ;;

> -(define_insn "mve_vcmpgeq_n_s<mode>"

> +(define_insn "mve_vcmpgeq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1080,9 +1080,9 @@ (define_insn "mve_vcmpgeq_n_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpgeq_s])

> +;; [vcmpgeq_])

>  ;;

> -(define_insn "mve_vcmpgeq_s<mode>"

> +(define_insn "mve_vcmpgeq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1095,9 +1095,9 @@ (define_insn "mve_vcmpgeq_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpgtq_n_s])

> +;; [vcmpgtq_n_])

>  ;;

> -(define_insn "mve_vcmpgtq_n_s<mode>"

> +(define_insn "mve_vcmpgtq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1110,9 +1110,9 @@ (define_insn "mve_vcmpgtq_n_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpgtq_s])

> +;; [vcmpgtq_])

>  ;;

> -(define_insn "mve_vcmpgtq_s<mode>"

> +(define_insn "mve_vcmpgtq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1125,9 +1125,9 @@ (define_insn "mve_vcmpgtq_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmphiq_n_u])

> +;; [vcmphiq_n_])

>  ;;

> -(define_insn "mve_vcmphiq_n_u<mode>"

> +(define_insn "mve_vcmphiq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1140,9 +1140,9 @@ (define_insn "mve_vcmphiq_n_u<mode>"

>  ])

> 

>  ;;

> -;; [vcmphiq_u])

> +;; [vcmphiq_])

>  ;;

> -(define_insn "mve_vcmphiq_u<mode>"

> +(define_insn "mve_vcmphiq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1155,9 +1155,9 @@ (define_insn "mve_vcmphiq_u<mode>"

>  ])

> 

>  ;;

> -;; [vcmpleq_n_s])

> +;; [vcmpleq_n_])

>  ;;

> -(define_insn "mve_vcmpleq_n_s<mode>"

> +(define_insn "mve_vcmpleq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1170,9 +1170,9 @@ (define_insn "mve_vcmpleq_n_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpleq_s])

> +;; [vcmpleq_])

>  ;;

> -(define_insn "mve_vcmpleq_s<mode>"

> +(define_insn "mve_vcmpleq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1185,9 +1185,9 @@ (define_insn "mve_vcmpleq_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpltq_n_s])

> +;; [vcmpltq_n_])

>  ;;

> -(define_insn "mve_vcmpltq_n_s<mode>"

> +(define_insn "mve_vcmpltq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1200,9 +1200,9 @@ (define_insn "mve_vcmpltq_n_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpltq_s])

> +;; [vcmpltq_])

>  ;;

> -(define_insn "mve_vcmpltq_s<mode>"

> +(define_insn "mve_vcmpltq_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> @@ -1215,9 +1215,9 @@ (define_insn "mve_vcmpltq_s<mode>"

>  ])

> 

>  ;;

> -;; [vcmpneq_n_s])

> +;; [vcmpneq_n_])

>  ;;

> -(define_insn "mve_vcmpneq_n_s<mode>"

> +(define_insn "mve_vcmpneq_n_<mode>"

>    [

>     (set (match_operand:HI 0 "vpr_register_operand" "=Up")

>  	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")

> --

> 2.7.4

Patch

diff --git a/gcc/config/arm/arm_mve.h b/gcc/config/arm/arm_mve.h
index e4dfe91..5d78269 100644
--- a/gcc/config/arm/arm_mve.h
+++ b/gcc/config/arm/arm_mve.h
@@ -3674,42 +3674,42 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpneq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpneq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpneq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpneq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpneq_sv4si (__a, __b);
+  return __builtin_mve_vcmpneq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_u8 (uint8x16_t __a, uint8x16_t __b)
 {
-  return __builtin_mve_vcmpneq_sv16qi ((int8x16_t)__a, (int8x16_t)__b);
+  return __builtin_mve_vcmpneq_v16qi ((int8x16_t)__a, (int8x16_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_u16 (uint16x8_t __a, uint16x8_t __b)
 {
-  return __builtin_mve_vcmpneq_sv8hi ((int16x8_t)__a, (int16x8_t)__b);
+  return __builtin_mve_vcmpneq_v8hi ((int16x8_t)__a, (int16x8_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_u32 (uint32x4_t __a, uint32x4_t __b)
 {
-  return __builtin_mve_vcmpneq_sv4si ((int32x4_t)__a, (int32x4_t)__b);
+  return __builtin_mve_vcmpneq_v4si ((int32x4_t)__a, (int32x4_t)__b);
 }
 
 __extension__ extern __inline int8x16_t
@@ -3932,49 +3932,49 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_u8 (uint8x16_t __a, uint8_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv16qi ((int8x16_t)__a, (int8_t)__b);
+  return __builtin_mve_vcmpneq_n_v16qi ((int8x16_t)__a, (int8_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_u8 (uint8x16_t __a, uint8x16_t __b)
 {
-  return __builtin_mve_vcmphiq_uv16qi (__a, __b);
+  return __builtin_mve_vcmphiq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_n_u8 (uint8x16_t __a, uint8_t __b)
 {
-  return __builtin_mve_vcmphiq_n_uv16qi (__a, __b);
+  return __builtin_mve_vcmphiq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_u8 (uint8x16_t __a, uint8x16_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv16qi ((int8x16_t)__a, (int8x16_t)__b);
+  return __builtin_mve_vcmpeqq_v16qi ((int8x16_t)__a, (int8x16_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_u8 (uint8x16_t __a, uint8_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv16qi ((int8x16_t)__a, (int8_t)__b);
+  return __builtin_mve_vcmpeqq_n_v16qi ((int8x16_t)__a, (int8_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_u8 (uint8x16_t __a, uint8x16_t __b)
 {
-  return __builtin_mve_vcmpcsq_uv16qi (__a, __b);
+  return __builtin_mve_vcmpcsq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_n_u8 (uint8x16_t __a, uint8_t __b)
 {
-  return __builtin_mve_vcmpcsq_n_uv16qi (__a, __b);
+  return __builtin_mve_vcmpcsq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline uint8x16_t
@@ -4144,77 +4144,77 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpneq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpltq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpltq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpltq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpltq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpleq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpleq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpleq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpleq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpgtq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpgtq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpgtq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpgtq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpgeq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpgeq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpgeq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpgeq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_s8 (int8x16_t __a, int8x16_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv16qi (__a, __b);
+  return __builtin_mve_vcmpeqq_v16qi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_s8 (int8x16_t __a, int8_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv16qi (__a, __b);
+  return __builtin_mve_vcmpeqq_n_v16qi (__a, __b);
 }
 
 __extension__ extern __inline uint8x16_t
@@ -4774,49 +4774,49 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_u16 (uint16x8_t __a, uint16_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv8hi ((int16x8_t)__a, (int16_t)__b);
+  return __builtin_mve_vcmpneq_n_v8hi ((int16x8_t)__a, (int16_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_u16 (uint16x8_t __a, uint16x8_t __b)
 {
-  return __builtin_mve_vcmphiq_uv8hi (__a, __b);
+  return __builtin_mve_vcmphiq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_n_u16 (uint16x8_t __a, uint16_t __b)
 {
-  return __builtin_mve_vcmphiq_n_uv8hi (__a, __b);
+  return __builtin_mve_vcmphiq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_u16 (uint16x8_t __a, uint16x8_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv8hi ((int16x8_t)__a, (int16x8_t)__b);
+  return __builtin_mve_vcmpeqq_v8hi ((int16x8_t)__a, (int16x8_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_u16 (uint16x8_t __a, uint16_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv8hi ((int16x8_t)__a, (int16_t)__b);
+  return __builtin_mve_vcmpeqq_n_v8hi ((int16x8_t)__a, (int16_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_u16 (uint16x8_t __a, uint16x8_t __b)
 {
-  return __builtin_mve_vcmpcsq_uv8hi (__a, __b);
+  return __builtin_mve_vcmpcsq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_n_u16 (uint16x8_t __a, uint16_t __b)
 {
-  return __builtin_mve_vcmpcsq_n_uv8hi (__a, __b);
+  return __builtin_mve_vcmpcsq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline uint16x8_t
@@ -4986,77 +4986,77 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpneq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpltq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpltq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpltq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpltq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpleq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpleq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpleq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpleq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpgtq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpgtq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpgtq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpgtq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpgeq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpgeq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpgeq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpgeq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_s16 (int16x8_t __a, int16x8_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv8hi (__a, __b);
+  return __builtin_mve_vcmpeqq_v8hi (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_s16 (int16x8_t __a, int16_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv8hi (__a, __b);
+  return __builtin_mve_vcmpeqq_n_v8hi (__a, __b);
 }
 
 __extension__ extern __inline uint16x8_t
@@ -5616,49 +5616,49 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_u32 (uint32x4_t __a, uint32_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv4si ((int32x4_t)__a, (int32_t)__b);
+  return __builtin_mve_vcmpneq_n_v4si ((int32x4_t)__a, (int32_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_u32 (uint32x4_t __a, uint32x4_t __b)
 {
-  return __builtin_mve_vcmphiq_uv4si (__a, __b);
+  return __builtin_mve_vcmphiq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmphiq_n_u32 (uint32x4_t __a, uint32_t __b)
 {
-  return __builtin_mve_vcmphiq_n_uv4si (__a, __b);
+  return __builtin_mve_vcmphiq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_u32 (uint32x4_t __a, uint32x4_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv4si ((int32x4_t)__a, (int32x4_t)__b);
+  return __builtin_mve_vcmpeqq_v4si ((int32x4_t)__a, (int32x4_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_u32 (uint32x4_t __a, uint32_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv4si ((int32x4_t)__a, (int32_t)__b);
+  return __builtin_mve_vcmpeqq_n_v4si ((int32x4_t)__a, (int32_t)__b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_u32 (uint32x4_t __a, uint32x4_t __b)
 {
-  return __builtin_mve_vcmpcsq_uv4si (__a, __b);
+  return __builtin_mve_vcmpcsq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpcsq_n_u32 (uint32x4_t __a, uint32_t __b)
 {
-  return __builtin_mve_vcmpcsq_n_uv4si (__a, __b);
+  return __builtin_mve_vcmpcsq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline uint32x4_t
@@ -5828,77 +5828,77 @@  __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpneq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpneq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpneq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpltq_sv4si (__a, __b);
+  return __builtin_mve_vcmpltq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpltq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpltq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpltq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpleq_sv4si (__a, __b);
+  return __builtin_mve_vcmpleq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpleq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpleq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpleq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpgtq_sv4si (__a, __b);
+  return __builtin_mve_vcmpgtq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgtq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpgtq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpgtq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpgeq_sv4si (__a, __b);
+  return __builtin_mve_vcmpgeq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpgeq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpgeq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpgeq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_s32 (int32x4_t __a, int32x4_t __b)
 {
-  return __builtin_mve_vcmpeqq_sv4si (__a, __b);
+  return __builtin_mve_vcmpeqq_v4si (__a, __b);
 }
 
 __extension__ extern __inline mve_pred16_t
 __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
 __arm_vcmpeqq_n_s32 (int32x4_t __a, int32_t __b)
 {
-  return __builtin_mve_vcmpeqq_n_sv4si (__a, __b);
+  return __builtin_mve_vcmpeqq_n_v4si (__a, __b);
 }
 
 __extension__ extern __inline uint32x4_t
diff --git a/gcc/config/arm/arm_mve_builtins.def b/gcc/config/arm/arm_mve_builtins.def
index ee34fd1..e9b5b28 100644
--- a/gcc/config/arm/arm_mve_builtins.def
+++ b/gcc/config/arm/arm_mve_builtins.def
@@ -89,7 +89,7 @@  VAR3 (BINOP_UNONE_UNONE_IMM, vshrq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_NONE_NONE_IMM, vshrq_n_s, v16qi, v8hi, v4si)
 VAR1 (BINOP_NONE_NONE_UNONE, vaddlvq_p_s, v4si)
 VAR1 (BINOP_UNONE_UNONE_UNONE, vaddlvq_p_u, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_s, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_, v16qi, v8hi, v4si)
 VAR3 (BINOP_NONE_NONE_NONE, vshlq_s, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_NONE, vshlq_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vsubq_u, v16qi, v8hi, v4si)
@@ -117,10 +117,10 @@  VAR3 (BINOP_UNONE_UNONE_UNONE, vhsubq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, veorq_u, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_u, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_n_u, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_u, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_u, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vbicq_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vandq_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_UNONE, vaddvq_p_u, v16qi, v8hi, v4si)
@@ -142,17 +142,17 @@  VAR3 (BINOP_UNONE_UNONE_NONE, vbrsrq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_IMM, vshlq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_IMM, vrshrq_n_u, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_UNONE_IMM, vqshlq_n_u, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_n_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_n_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_n_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_n_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_n_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_s, v16qi, v8hi, v4si)
-VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_n_s, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_n_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_, v16qi, v8hi, v4si)
+VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_n_, v16qi, v8hi, v4si)
 VAR3 (BINOP_UNONE_NONE_IMM, vqshluq_n_s, v16qi, v8hi, v4si)
 VAR3 (BINOP_NONE_NONE_UNONE, vaddvq_p_s, v16qi, v8hi, v4si)
 VAR3 (BINOP_NONE_NONE_NONE, vsubq_s, v16qi, v8hi, v4si)
diff --git a/gcc/config/arm/mve.md b/gcc/config/arm/mve.md
index b04c22b..e9f095d 100644
--- a/gcc/config/arm/mve.md
+++ b/gcc/config/arm/mve.md
@@ -836,9 +836,9 @@  (define_insn "mve_vaddlvq_p_<supf>v4si"
    (set_attr "length""8")])
 
 ;;
-;; [vcmpneq_s])
+;; [vcmpneq_])
 ;;
-(define_insn "mve_vcmpneq_s<mode>"
+(define_insn "mve_vcmpneq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1005,9 +1005,9 @@  (define_expand "cadd<rot><mode>3"
 )
 
 ;;
-;; [vcmpcsq_n_u])
+;; [vcmpcsq_n_])
 ;;
-(define_insn "mve_vcmpcsq_n_u<mode>"
+(define_insn "mve_vcmpcsq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1020,9 +1020,9 @@  (define_insn "mve_vcmpcsq_n_u<mode>"
 ])
 
 ;;
-;; [vcmpcsq_u])
+;; [vcmpcsq_])
 ;;
-(define_insn "mve_vcmpcsq_u<mode>"
+(define_insn "mve_vcmpcsq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1035,9 +1035,9 @@  (define_insn "mve_vcmpcsq_u<mode>"
 ])
 
 ;;
-;; [vcmpeqq_n_s])
+;; [vcmpeqq_n_])
 ;;
-(define_insn "mve_vcmpeqq_n_s<mode>"
+(define_insn "mve_vcmpeqq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1050,9 +1050,9 @@  (define_insn "mve_vcmpeqq_n_s<mode>"
 ])
 
 ;;
-;; [vcmpeqq_s])
+;; [vcmpeqq_])
 ;;
-(define_insn "mve_vcmpeqq_s<mode>"
+(define_insn "mve_vcmpeqq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1065,9 +1065,9 @@  (define_insn "mve_vcmpeqq_s<mode>"
 ])
 
 ;;
-;; [vcmpgeq_n_s])
+;; [vcmpgeq_n_])
 ;;
-(define_insn "mve_vcmpgeq_n_s<mode>"
+(define_insn "mve_vcmpgeq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1080,9 +1080,9 @@  (define_insn "mve_vcmpgeq_n_s<mode>"
 ])
 
 ;;
-;; [vcmpgeq_s])
+;; [vcmpgeq_])
 ;;
-(define_insn "mve_vcmpgeq_s<mode>"
+(define_insn "mve_vcmpgeq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1095,9 +1095,9 @@  (define_insn "mve_vcmpgeq_s<mode>"
 ])
 
 ;;
-;; [vcmpgtq_n_s])
+;; [vcmpgtq_n_])
 ;;
-(define_insn "mve_vcmpgtq_n_s<mode>"
+(define_insn "mve_vcmpgtq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1110,9 +1110,9 @@  (define_insn "mve_vcmpgtq_n_s<mode>"
 ])
 
 ;;
-;; [vcmpgtq_s])
+;; [vcmpgtq_])
 ;;
-(define_insn "mve_vcmpgtq_s<mode>"
+(define_insn "mve_vcmpgtq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1125,9 +1125,9 @@  (define_insn "mve_vcmpgtq_s<mode>"
 ])
 
 ;;
-;; [vcmphiq_n_u])
+;; [vcmphiq_n_])
 ;;
-(define_insn "mve_vcmphiq_n_u<mode>"
+(define_insn "mve_vcmphiq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1140,9 +1140,9 @@  (define_insn "mve_vcmphiq_n_u<mode>"
 ])
 
 ;;
-;; [vcmphiq_u])
+;; [vcmphiq_])
 ;;
-(define_insn "mve_vcmphiq_u<mode>"
+(define_insn "mve_vcmphiq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1155,9 +1155,9 @@  (define_insn "mve_vcmphiq_u<mode>"
 ])
 
 ;;
-;; [vcmpleq_n_s])
+;; [vcmpleq_n_])
 ;;
-(define_insn "mve_vcmpleq_n_s<mode>"
+(define_insn "mve_vcmpleq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1170,9 +1170,9 @@  (define_insn "mve_vcmpleq_n_s<mode>"
 ])
 
 ;;
-;; [vcmpleq_s])
+;; [vcmpleq_])
 ;;
-(define_insn "mve_vcmpleq_s<mode>"
+(define_insn "mve_vcmpleq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1185,9 +1185,9 @@  (define_insn "mve_vcmpleq_s<mode>"
 ])
 
 ;;
-;; [vcmpltq_n_s])
+;; [vcmpltq_n_])
 ;;
-(define_insn "mve_vcmpltq_n_s<mode>"
+(define_insn "mve_vcmpltq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1200,9 +1200,9 @@  (define_insn "mve_vcmpltq_n_s<mode>"
 ])
 
 ;;
-;; [vcmpltq_s])
+;; [vcmpltq_])
 ;;
-(define_insn "mve_vcmpltq_s<mode>"
+(define_insn "mve_vcmpltq_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")
@@ -1215,9 +1215,9 @@  (define_insn "mve_vcmpltq_s<mode>"
 ])
 
 ;;
-;; [vcmpneq_n_s])
+;; [vcmpneq_n_])
 ;;
-(define_insn "mve_vcmpneq_n_s<mode>"
+(define_insn "mve_vcmpneq_n_<mode>"
   [
    (set (match_operand:HI 0 "vpr_register_operand" "=Up")
 	(unspec:HI [(match_operand:MVE_2 1 "s_register_operand" "w")