Convert input vector into a different narrow vector type. The output type must be twice as less as the input type.
#define vdowncvt(a0, a1, from_type, to_type)
#define vdowncvt_e(a0, a1, from_type, to_type, simd_ext)
#define nsimd_downcvt(to_type, a0, a1)
template <NSIMD_CONCEPT_VALUE_TYPE F, NSIMD_CONCEPT_VALUE_TYPE T> NSIMD_REQUIRES(sizeof_v<F> == 2 * sizeof_v<T>) typename simd_traits<T, NSIMD_SIMD>::simd_vector NSIMD_VECTORCALL downcvt(typename simd_traits<F, NSIMD_SIMD>::simd_vector a0, typename simd_traits<F, NSIMD_SIMD>::simd_vector a1, F, T);
template <NSIMD_CONCEPT_PACK ToPackType, NSIMD_CONCEPT_VALUE_TYPE T, NSIMD_CONCEPT_SIMD_EXT SimdExt> NSIMD_REQUIRES((2 * sizeof_v<typename ToPackType::value_type> == sizeof_v<T> && ToPackType::unroll == 1 && std::is_same_v<typename ToPackType::simd_ext, SimdExt>))ToPackType downcvt(ToPackType const&, pack<T, 1, SimdExt> const& a0, pack<T, 1, SimdExt> const& a1);
template <NSIMD_CONCEPT_PACK ToPackType, NSIMD_CONCEPT_VALUE_TYPE T, int N, NSIMD_CONCEPT_SIMD_EXT SimdExt> NSIMD_REQUIRES((2 * sizeof_v<typename ToPackType::value_type> == sizeof_v<T> && ToPackType::unroll == N && std::is_same_v<typename ToPackType::simd_ext, SimdExt>))ToPackType downcvt(ToPackType const&, pack<T, N, SimdExt> const& a0, pack<T, N, SimdExt> const& a1);
template <NSIMD_CONCEPT_PACK ToPackType, NSIMD_CONCEPT_VALUE_TYPE T, int N, NSIMD_CONCEPT_SIMD_EXT SimdExt> NSIMD_REQUIRES((2 * sizeof_v<typename ToPackType::value_type> == sizeof_v<T> && ToPackType::unroll == N && std::is_same_v<typename ToPackType::simd_ext, SimdExt>))ToPackType downcvt(pack<T, N, SimdExt> const& a0, pack<T, N, SimdExt> const& a1);
nsimd_avx2_vi16 NSIMD_VECTORCALL nsimd_downcvt_avx2_i16_i16(nsimd_avx2_vi16 a0, nsimd_avx2_vi16 a1);
nsimd_avx2_vu16 NSIMD_VECTORCALL nsimd_downcvt_avx2_u16_u16(nsimd_avx2_vu16 a0, nsimd_avx2_vu16 a1);
nsimd_avx2_vf16 NSIMD_VECTORCALL nsimd_downcvt_avx2_f16_f16(nsimd_avx2_vf16 a0, nsimd_avx2_vf16 a1);
nsimd_avx2_vi32 NSIMD_VECTORCALL nsimd_downcvt_avx2_i32_i32(nsimd_avx2_vi32 a0, nsimd_avx2_vi32 a1);
nsimd_avx2_vu32 NSIMD_VECTORCALL nsimd_downcvt_avx2_u32_u32(nsimd_avx2_vu32 a0, nsimd_avx2_vu32 a1);
nsimd_avx2_vf32 NSIMD_VECTORCALL nsimd_downcvt_avx2_f32_f32(nsimd_avx2_vf32 a0, nsimd_avx2_vf32 a1);
nsimd_avx2_vi64 NSIMD_VECTORCALL nsimd_downcvt_avx2_i64_i64(nsimd_avx2_vi64 a0, nsimd_avx2_vi64 a1);
nsimd_avx2_vu64 NSIMD_VECTORCALL nsimd_downcvt_avx2_u64_u64(nsimd_avx2_vu64 a0, nsimd_avx2_vu64 a1);
nsimd_avx2_vf64 NSIMD_VECTORCALL nsimd_downcvt_avx2_f64_f64(nsimd_avx2_vf64 a0, nsimd_avx2_vf64 a1);
nsimd_sve512_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve512_i16_i16(nsimd_sve512_vi16 a0, nsimd_sve512_vi16 a1);
nsimd_sve512_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve512_u16_u16(nsimd_sve512_vu16 a0, nsimd_sve512_vu16 a1);
nsimd_sve512_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve512_f16_f16(nsimd_sve512_vf16 a0, nsimd_sve512_vf16 a1);
nsimd_sve512_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve512_i32_i32(nsimd_sve512_vi32 a0, nsimd_sve512_vi32 a1);
nsimd_sve512_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve512_u32_u32(nsimd_sve512_vu32 a0, nsimd_sve512_vu32 a1);
nsimd_sve512_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve512_f32_f32(nsimd_sve512_vf32 a0, nsimd_sve512_vf32 a1);
nsimd_sve512_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve512_i64_i64(nsimd_sve512_vi64 a0, nsimd_sve512_vi64 a1);
nsimd_sve512_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve512_u64_u64(nsimd_sve512_vu64 a0, nsimd_sve512_vu64 a1);
nsimd_sve512_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve512_f64_f64(nsimd_sve512_vf64 a0, nsimd_sve512_vf64 a1);
nsimd_sve_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve_i16_i16(nsimd_sve_vi16 a0, nsimd_sve_vi16 a1);
nsimd_sve_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve_u16_u16(nsimd_sve_vu16 a0, nsimd_sve_vu16 a1);
nsimd_sve_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve_f16_f16(nsimd_sve_vf16 a0, nsimd_sve_vf16 a1);
nsimd_sve_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve_i32_i32(nsimd_sve_vi32 a0, nsimd_sve_vi32 a1);
nsimd_sve_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve_u32_u32(nsimd_sve_vu32 a0, nsimd_sve_vu32 a1);
nsimd_sve_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve_f32_f32(nsimd_sve_vf32 a0, nsimd_sve_vf32 a1);
nsimd_sve_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve_i64_i64(nsimd_sve_vi64 a0, nsimd_sve_vi64 a1);
nsimd_sve_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve_u64_u64(nsimd_sve_vu64 a0, nsimd_sve_vu64 a1);
nsimd_sve_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve_f64_f64(nsimd_sve_vf64 a0, nsimd_sve_vf64 a1);
nsimd_cpu_vi16 NSIMD_VECTORCALL nsimd_downcvt_cpu_i16_i16(nsimd_cpu_vi16 a0, nsimd_cpu_vi16 a1);
nsimd_cpu_vu16 NSIMD_VECTORCALL nsimd_downcvt_cpu_u16_u16(nsimd_cpu_vu16 a0, nsimd_cpu_vu16 a1);
nsimd_cpu_vf16 NSIMD_VECTORCALL nsimd_downcvt_cpu_f16_f16(nsimd_cpu_vf16 a0, nsimd_cpu_vf16 a1);
nsimd_cpu_vi32 NSIMD_VECTORCALL nsimd_downcvt_cpu_i32_i32(nsimd_cpu_vi32 a0, nsimd_cpu_vi32 a1);
nsimd_cpu_vu32 NSIMD_VECTORCALL nsimd_downcvt_cpu_u32_u32(nsimd_cpu_vu32 a0, nsimd_cpu_vu32 a1);
nsimd_cpu_vf32 NSIMD_VECTORCALL nsimd_downcvt_cpu_f32_f32(nsimd_cpu_vf32 a0, nsimd_cpu_vf32 a1);
nsimd_cpu_vi64 NSIMD_VECTORCALL nsimd_downcvt_cpu_i64_i64(nsimd_cpu_vi64 a0, nsimd_cpu_vi64 a1);
nsimd_cpu_vu64 NSIMD_VECTORCALL nsimd_downcvt_cpu_u64_u64(nsimd_cpu_vu64 a0, nsimd_cpu_vu64 a1);
nsimd_cpu_vf64 NSIMD_VECTORCALL nsimd_downcvt_cpu_f64_f64(nsimd_cpu_vf64 a0, nsimd_cpu_vf64 a1);
nsimd_sve2048_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve2048_i16_i16(nsimd_sve2048_vi16 a0, nsimd_sve2048_vi16 a1);
nsimd_sve2048_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve2048_u16_u16(nsimd_sve2048_vu16 a0, nsimd_sve2048_vu16 a1);
nsimd_sve2048_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve2048_f16_f16(nsimd_sve2048_vf16 a0, nsimd_sve2048_vf16 a1);
nsimd_sve2048_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve2048_i32_i32(nsimd_sve2048_vi32 a0, nsimd_sve2048_vi32 a1);
nsimd_sve2048_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve2048_u32_u32(nsimd_sve2048_vu32 a0, nsimd_sve2048_vu32 a1);
nsimd_sve2048_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve2048_f32_f32(nsimd_sve2048_vf32 a0, nsimd_sve2048_vf32 a1);
nsimd_sve2048_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve2048_i64_i64(nsimd_sve2048_vi64 a0, nsimd_sve2048_vi64 a1);
nsimd_sve2048_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve2048_u64_u64(nsimd_sve2048_vu64 a0, nsimd_sve2048_vu64 a1);
nsimd_sve2048_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve2048_f64_f64(nsimd_sve2048_vf64 a0, nsimd_sve2048_vf64 a1);
nsimd_neon128_vi16 NSIMD_VECTORCALL nsimd_downcvt_neon128_i16_i16(nsimd_neon128_vi16 a0, nsimd_neon128_vi16 a1);
nsimd_neon128_vu16 NSIMD_VECTORCALL nsimd_downcvt_neon128_u16_u16(nsimd_neon128_vu16 a0, nsimd_neon128_vu16 a1);
nsimd_neon128_vf16 NSIMD_VECTORCALL nsimd_downcvt_neon128_f16_f16(nsimd_neon128_vf16 a0, nsimd_neon128_vf16 a1);
nsimd_neon128_vi32 NSIMD_VECTORCALL nsimd_downcvt_neon128_i32_i32(nsimd_neon128_vi32 a0, nsimd_neon128_vi32 a1);
nsimd_neon128_vu32 NSIMD_VECTORCALL nsimd_downcvt_neon128_u32_u32(nsimd_neon128_vu32 a0, nsimd_neon128_vu32 a1);
nsimd_neon128_vf32 NSIMD_VECTORCALL nsimd_downcvt_neon128_f32_f32(nsimd_neon128_vf32 a0, nsimd_neon128_vf32 a1);
nsimd_neon128_vi64 NSIMD_VECTORCALL nsimd_downcvt_neon128_i64_i64(nsimd_neon128_vi64 a0, nsimd_neon128_vi64 a1);
nsimd_neon128_vu64 NSIMD_VECTORCALL nsimd_downcvt_neon128_u64_u64(nsimd_neon128_vu64 a0, nsimd_neon128_vu64 a1);
nsimd_neon128_vf64 NSIMD_VECTORCALL nsimd_downcvt_neon128_f64_f64(nsimd_neon128_vf64 a0, nsimd_neon128_vf64 a1);
nsimd_avx512_skylake_vi16 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_i16_i16(nsimd_avx512_skylake_vi16 a0, nsimd_avx512_skylake_vi16 a1);
nsimd_avx512_skylake_vu16 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_u16_u16(nsimd_avx512_skylake_vu16 a0, nsimd_avx512_skylake_vu16 a1);
nsimd_avx512_skylake_vf16 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_f16_f16(nsimd_avx512_skylake_vf16 a0, nsimd_avx512_skylake_vf16 a1);
nsimd_avx512_skylake_vi32 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_i32_i32(nsimd_avx512_skylake_vi32 a0, nsimd_avx512_skylake_vi32 a1);
nsimd_avx512_skylake_vu32 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_u32_u32(nsimd_avx512_skylake_vu32 a0, nsimd_avx512_skylake_vu32 a1);
nsimd_avx512_skylake_vf32 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_f32_f32(nsimd_avx512_skylake_vf32 a0, nsimd_avx512_skylake_vf32 a1);
nsimd_avx512_skylake_vi64 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_i64_i64(nsimd_avx512_skylake_vi64 a0, nsimd_avx512_skylake_vi64 a1);
nsimd_avx512_skylake_vu64 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_u64_u64(nsimd_avx512_skylake_vu64 a0, nsimd_avx512_skylake_vu64 a1);
nsimd_avx512_skylake_vf64 NSIMD_VECTORCALL nsimd_downcvt_avx512_skylake_f64_f64(nsimd_avx512_skylake_vf64 a0, nsimd_avx512_skylake_vf64 a1);
nsimd_aarch64_vi16 NSIMD_VECTORCALL nsimd_downcvt_aarch64_i16_i16(nsimd_aarch64_vi16 a0, nsimd_aarch64_vi16 a1);
nsimd_aarch64_vu16 NSIMD_VECTORCALL nsimd_downcvt_aarch64_u16_u16(nsimd_aarch64_vu16 a0, nsimd_aarch64_vu16 a1);
nsimd_aarch64_vf16 NSIMD_VECTORCALL nsimd_downcvt_aarch64_f16_f16(nsimd_aarch64_vf16 a0, nsimd_aarch64_vf16 a1);
nsimd_aarch64_vi32 NSIMD_VECTORCALL nsimd_downcvt_aarch64_i32_i32(nsimd_aarch64_vi32 a0, nsimd_aarch64_vi32 a1);
nsimd_aarch64_vu32 NSIMD_VECTORCALL nsimd_downcvt_aarch64_u32_u32(nsimd_aarch64_vu32 a0, nsimd_aarch64_vu32 a1);
nsimd_aarch64_vf32 NSIMD_VECTORCALL nsimd_downcvt_aarch64_f32_f32(nsimd_aarch64_vf32 a0, nsimd_aarch64_vf32 a1);
nsimd_aarch64_vi64 NSIMD_VECTORCALL nsimd_downcvt_aarch64_i64_i64(nsimd_aarch64_vi64 a0, nsimd_aarch64_vi64 a1);
nsimd_aarch64_vu64 NSIMD_VECTORCALL nsimd_downcvt_aarch64_u64_u64(nsimd_aarch64_vu64 a0, nsimd_aarch64_vu64 a1);
nsimd_aarch64_vf64 NSIMD_VECTORCALL nsimd_downcvt_aarch64_f64_f64(nsimd_aarch64_vf64 a0, nsimd_aarch64_vf64 a1);
nsimd_avx512_knl_vi16 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_i16_i16(nsimd_avx512_knl_vi16 a0, nsimd_avx512_knl_vi16 a1);
nsimd_avx512_knl_vu16 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_u16_u16(nsimd_avx512_knl_vu16 a0, nsimd_avx512_knl_vu16 a1);
nsimd_avx512_knl_vf16 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_f16_f16(nsimd_avx512_knl_vf16 a0, nsimd_avx512_knl_vf16 a1);
nsimd_avx512_knl_vi32 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_i32_i32(nsimd_avx512_knl_vi32 a0, nsimd_avx512_knl_vi32 a1);
nsimd_avx512_knl_vu32 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_u32_u32(nsimd_avx512_knl_vu32 a0, nsimd_avx512_knl_vu32 a1);
nsimd_avx512_knl_vf32 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_f32_f32(nsimd_avx512_knl_vf32 a0, nsimd_avx512_knl_vf32 a1);
nsimd_avx512_knl_vi64 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_i64_i64(nsimd_avx512_knl_vi64 a0, nsimd_avx512_knl_vi64 a1);
nsimd_avx512_knl_vu64 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_u64_u64(nsimd_avx512_knl_vu64 a0, nsimd_avx512_knl_vu64 a1);
nsimd_avx512_knl_vf64 NSIMD_VECTORCALL nsimd_downcvt_avx512_knl_f64_f64(nsimd_avx512_knl_vf64 a0, nsimd_avx512_knl_vf64 a1);
nsimd_sse2_vi16 NSIMD_VECTORCALL nsimd_downcvt_sse2_i16_i16(nsimd_sse2_vi16 a0, nsimd_sse2_vi16 a1);
nsimd_sse2_vu16 NSIMD_VECTORCALL nsimd_downcvt_sse2_u16_u16(nsimd_sse2_vu16 a0, nsimd_sse2_vu16 a1);
nsimd_sse2_vf16 NSIMD_VECTORCALL nsimd_downcvt_sse2_f16_f16(nsimd_sse2_vf16 a0, nsimd_sse2_vf16 a1);
nsimd_sse2_vi32 NSIMD_VECTORCALL nsimd_downcvt_sse2_i32_i32(nsimd_sse2_vi32 a0, nsimd_sse2_vi32 a1);
nsimd_sse2_vu32 NSIMD_VECTORCALL nsimd_downcvt_sse2_u32_u32(nsimd_sse2_vu32 a0, nsimd_sse2_vu32 a1);
nsimd_sse2_vf32 NSIMD_VECTORCALL nsimd_downcvt_sse2_f32_f32(nsimd_sse2_vf32 a0, nsimd_sse2_vf32 a1);
nsimd_sse2_vi64 NSIMD_VECTORCALL nsimd_downcvt_sse2_i64_i64(nsimd_sse2_vi64 a0, nsimd_sse2_vi64 a1);
nsimd_sse2_vu64 NSIMD_VECTORCALL nsimd_downcvt_sse2_u64_u64(nsimd_sse2_vu64 a0, nsimd_sse2_vu64 a1);
nsimd_sse2_vf64 NSIMD_VECTORCALL nsimd_downcvt_sse2_f64_f64(nsimd_sse2_vf64 a0, nsimd_sse2_vf64 a1);
nsimd_sse42_vi16 NSIMD_VECTORCALL nsimd_downcvt_sse42_i16_i16(nsimd_sse42_vi16 a0, nsimd_sse42_vi16 a1);
nsimd_sse42_vu16 NSIMD_VECTORCALL nsimd_downcvt_sse42_u16_u16(nsimd_sse42_vu16 a0, nsimd_sse42_vu16 a1);
nsimd_sse42_vf16 NSIMD_VECTORCALL nsimd_downcvt_sse42_f16_f16(nsimd_sse42_vf16 a0, nsimd_sse42_vf16 a1);
nsimd_sse42_vi32 NSIMD_VECTORCALL nsimd_downcvt_sse42_i32_i32(nsimd_sse42_vi32 a0, nsimd_sse42_vi32 a1);
nsimd_sse42_vu32 NSIMD_VECTORCALL nsimd_downcvt_sse42_u32_u32(nsimd_sse42_vu32 a0, nsimd_sse42_vu32 a1);
nsimd_sse42_vf32 NSIMD_VECTORCALL nsimd_downcvt_sse42_f32_f32(nsimd_sse42_vf32 a0, nsimd_sse42_vf32 a1);
nsimd_sse42_vi64 NSIMD_VECTORCALL nsimd_downcvt_sse42_i64_i64(nsimd_sse42_vi64 a0, nsimd_sse42_vi64 a1);
nsimd_sse42_vu64 NSIMD_VECTORCALL nsimd_downcvt_sse42_u64_u64(nsimd_sse42_vu64 a0, nsimd_sse42_vu64 a1);
nsimd_sse42_vf64 NSIMD_VECTORCALL nsimd_downcvt_sse42_f64_f64(nsimd_sse42_vf64 a0, nsimd_sse42_vf64 a1);
nsimd_sve256_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve256_i16_i16(nsimd_sve256_vi16 a0, nsimd_sve256_vi16 a1);
nsimd_sve256_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve256_u16_u16(nsimd_sve256_vu16 a0, nsimd_sve256_vu16 a1);
nsimd_sve256_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve256_f16_f16(nsimd_sve256_vf16 a0, nsimd_sve256_vf16 a1);
nsimd_sve256_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve256_i32_i32(nsimd_sve256_vi32 a0, nsimd_sve256_vi32 a1);
nsimd_sve256_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve256_u32_u32(nsimd_sve256_vu32 a0, nsimd_sve256_vu32 a1);
nsimd_sve256_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve256_f32_f32(nsimd_sve256_vf32 a0, nsimd_sve256_vf32 a1);
nsimd_sve256_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve256_i64_i64(nsimd_sve256_vi64 a0, nsimd_sve256_vi64 a1);
nsimd_sve256_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve256_u64_u64(nsimd_sve256_vu64 a0, nsimd_sve256_vu64 a1);
nsimd_sve256_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve256_f64_f64(nsimd_sve256_vf64 a0, nsimd_sve256_vf64 a1);
nsimd_sve1024_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve1024_i16_i16(nsimd_sve1024_vi16 a0, nsimd_sve1024_vi16 a1);
nsimd_sve1024_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve1024_u16_u16(nsimd_sve1024_vu16 a0, nsimd_sve1024_vu16 a1);
nsimd_sve1024_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve1024_f16_f16(nsimd_sve1024_vf16 a0, nsimd_sve1024_vf16 a1);
nsimd_sve1024_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve1024_i32_i32(nsimd_sve1024_vi32 a0, nsimd_sve1024_vi32 a1);
nsimd_sve1024_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve1024_u32_u32(nsimd_sve1024_vu32 a0, nsimd_sve1024_vu32 a1);
nsimd_sve1024_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve1024_f32_f32(nsimd_sve1024_vf32 a0, nsimd_sve1024_vf32 a1);
nsimd_sve1024_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve1024_i64_i64(nsimd_sve1024_vi64 a0, nsimd_sve1024_vi64 a1);
nsimd_sve1024_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve1024_u64_u64(nsimd_sve1024_vu64 a0, nsimd_sve1024_vu64 a1);
nsimd_sve1024_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve1024_f64_f64(nsimd_sve1024_vf64 a0, nsimd_sve1024_vf64 a1);
nsimd_vsx_vi16 NSIMD_VECTORCALL nsimd_downcvt_vsx_i16_i16(nsimd_vsx_vi16 a0, nsimd_vsx_vi16 a1);
nsimd_vsx_vu16 NSIMD_VECTORCALL nsimd_downcvt_vsx_u16_u16(nsimd_vsx_vu16 a0, nsimd_vsx_vu16 a1);
nsimd_vsx_vf16 NSIMD_VECTORCALL nsimd_downcvt_vsx_f16_f16(nsimd_vsx_vf16 a0, nsimd_vsx_vf16 a1);
nsimd_vsx_vi32 NSIMD_VECTORCALL nsimd_downcvt_vsx_i32_i32(nsimd_vsx_vi32 a0, nsimd_vsx_vi32 a1);
nsimd_vsx_vu32 NSIMD_VECTORCALL nsimd_downcvt_vsx_u32_u32(nsimd_vsx_vu32 a0, nsimd_vsx_vu32 a1);
nsimd_vsx_vf32 NSIMD_VECTORCALL nsimd_downcvt_vsx_f32_f32(nsimd_vsx_vf32 a0, nsimd_vsx_vf32 a1);
nsimd_vsx_vi64 NSIMD_VECTORCALL nsimd_downcvt_vsx_i64_i64(nsimd_vsx_vi64 a0, nsimd_vsx_vi64 a1);
nsimd_vsx_vu64 NSIMD_VECTORCALL nsimd_downcvt_vsx_u64_u64(nsimd_vsx_vu64 a0, nsimd_vsx_vu64 a1);
nsimd_vsx_vf64 NSIMD_VECTORCALL nsimd_downcvt_vsx_f64_f64(nsimd_vsx_vf64 a0, nsimd_vsx_vf64 a1);
nsimd_sve128_vi16 NSIMD_VECTORCALL nsimd_downcvt_sve128_i16_i16(nsimd_sve128_vi16 a0, nsimd_sve128_vi16 a1);
nsimd_sve128_vu16 NSIMD_VECTORCALL nsimd_downcvt_sve128_u16_u16(nsimd_sve128_vu16 a0, nsimd_sve128_vu16 a1);
nsimd_sve128_vf16 NSIMD_VECTORCALL nsimd_downcvt_sve128_f16_f16(nsimd_sve128_vf16 a0, nsimd_sve128_vf16 a1);
nsimd_sve128_vi32 NSIMD_VECTORCALL nsimd_downcvt_sve128_i32_i32(nsimd_sve128_vi32 a0, nsimd_sve128_vi32 a1);
nsimd_sve128_vu32 NSIMD_VECTORCALL nsimd_downcvt_sve128_u32_u32(nsimd_sve128_vu32 a0, nsimd_sve128_vu32 a1);
nsimd_sve128_vf32 NSIMD_VECTORCALL nsimd_downcvt_sve128_f32_f32(nsimd_sve128_vf32 a0, nsimd_sve128_vf32 a1);
nsimd_sve128_vi64 NSIMD_VECTORCALL nsimd_downcvt_sve128_i64_i64(nsimd_sve128_vi64 a0, nsimd_sve128_vi64 a1);
nsimd_sve128_vu64 NSIMD_VECTORCALL nsimd_downcvt_sve128_u64_u64(nsimd_sve128_vu64 a0, nsimd_sve128_vu64 a1);
nsimd_sve128_vf64 NSIMD_VECTORCALL nsimd_downcvt_sve128_f64_f64(nsimd_sve128_vf64 a0, nsimd_sve128_vf64 a1);
nsimd_vmx_vi16 NSIMD_VECTORCALL nsimd_downcvt_vmx_i16_i16(nsimd_vmx_vi16 a0, nsimd_vmx_vi16 a1);
nsimd_vmx_vu16 NSIMD_VECTORCALL nsimd_downcvt_vmx_u16_u16(nsimd_vmx_vu16 a0, nsimd_vmx_vu16 a1);
nsimd_vmx_vf16 NSIMD_VECTORCALL nsimd_downcvt_vmx_f16_f16(nsimd_vmx_vf16 a0, nsimd_vmx_vf16 a1);
nsimd_vmx_vi32 NSIMD_VECTORCALL nsimd_downcvt_vmx_i32_i32(nsimd_vmx_vi32 a0, nsimd_vmx_vi32 a1);
nsimd_vmx_vu32 NSIMD_VECTORCALL nsimd_downcvt_vmx_u32_u32(nsimd_vmx_vu32 a0, nsimd_vmx_vu32 a1);
nsimd_vmx_vf32 NSIMD_VECTORCALL nsimd_downcvt_vmx_f32_f32(nsimd_vmx_vf32 a0, nsimd_vmx_vf32 a1);
nsimd_vmx_vi64 NSIMD_VECTORCALL nsimd_downcvt_vmx_i64_i64(nsimd_vmx_vi64 a0, nsimd_vmx_vi64 a1);
nsimd_vmx_vu64 NSIMD_VECTORCALL nsimd_downcvt_vmx_u64_u64(nsimd_vmx_vu64 a0, nsimd_vmx_vu64 a1);
nsimd_vmx_vf64 NSIMD_VECTORCALL nsimd_downcvt_vmx_f64_f64(nsimd_vmx_vf64 a0, nsimd_vmx_vf64 a1);
nsimd_avx_vi16 NSIMD_VECTORCALL nsimd_downcvt_avx_i16_i16(nsimd_avx_vi16 a0, nsimd_avx_vi16 a1);
nsimd_avx_vu16 NSIMD_VECTORCALL nsimd_downcvt_avx_u16_u16(nsimd_avx_vu16 a0, nsimd_avx_vu16 a1);
nsimd_avx_vf16 NSIMD_VECTORCALL nsimd_downcvt_avx_f16_f16(nsimd_avx_vf16 a0, nsimd_avx_vf16 a1);
nsimd_avx_vi32 NSIMD_VECTORCALL nsimd_downcvt_avx_i32_i32(nsimd_avx_vi32 a0, nsimd_avx_vi32 a1);
nsimd_avx_vu32 NSIMD_VECTORCALL nsimd_downcvt_avx_u32_u32(nsimd_avx_vu32 a0, nsimd_avx_vu32 a1);
nsimd_avx_vf32 NSIMD_VECTORCALL nsimd_downcvt_avx_f32_f32(nsimd_avx_vf32 a0, nsimd_avx_vf32 a1);
nsimd_avx_vi64 NSIMD_VECTORCALL nsimd_downcvt_avx_i64_i64(nsimd_avx_vi64 a0, nsimd_avx_vi64 a1);
nsimd_avx_vu64 NSIMD_VECTORCALL nsimd_downcvt_avx_u64_u64(nsimd_avx_vu64 a0, nsimd_avx_vu64 a1);
nsimd_avx_vf64 NSIMD_VECTORCALL nsimd_downcvt_avx_f64_f64(nsimd_avx_vf64 a0, nsimd_avx_vf64 a1);
nsimd_avx2_vi16 NSIMD_VECTORCALL downcvt(nsimd_avx2_vi16 a0, nsimd_avx2_vi16 a1, i16, i16, avx2);
nsimd_avx2_vu16 NSIMD_VECTORCALL downcvt(nsimd_avx2_vu16 a0, nsimd_avx2_vu16 a1, u16, u16, avx2);
nsimd_avx2_vf16 NSIMD_VECTORCALL downcvt(nsimd_avx2_vf16 a0, nsimd_avx2_vf16 a1, f16, f16, avx2);
nsimd_avx2_vi32 NSIMD_VECTORCALL downcvt(nsimd_avx2_vi32 a0, nsimd_avx2_vi32 a1, i32, i32, avx2);
nsimd_avx2_vu32 NSIMD_VECTORCALL downcvt(nsimd_avx2_vu32 a0, nsimd_avx2_vu32 a1, u32, u32, avx2);
nsimd_avx2_vf32 NSIMD_VECTORCALL downcvt(nsimd_avx2_vf32 a0, nsimd_avx2_vf32 a1, f32, f32, avx2);
nsimd_avx2_vi64 NSIMD_VECTORCALL downcvt(nsimd_avx2_vi64 a0, nsimd_avx2_vi64 a1, i64, i64, avx2);
nsimd_avx2_vu64 NSIMD_VECTORCALL downcvt(nsimd_avx2_vu64 a0, nsimd_avx2_vu64 a1, u64, u64, avx2);
nsimd_avx2_vf64 NSIMD_VECTORCALL downcvt(nsimd_avx2_vf64 a0, nsimd_avx2_vf64 a1, f64, f64, avx2);
nsimd_sve512_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve512_vi16 a0, nsimd_sve512_vi16 a1, i16, i16, sve512);
nsimd_sve512_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve512_vu16 a0, nsimd_sve512_vu16 a1, u16, u16, sve512);
nsimd_sve512_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve512_vf16 a0, nsimd_sve512_vf16 a1, f16, f16, sve512);
nsimd_sve512_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve512_vi32 a0, nsimd_sve512_vi32 a1, i32, i32, sve512);
nsimd_sve512_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve512_vu32 a0, nsimd_sve512_vu32 a1, u32, u32, sve512);
nsimd_sve512_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve512_vf32 a0, nsimd_sve512_vf32 a1, f32, f32, sve512);
nsimd_sve512_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve512_vi64 a0, nsimd_sve512_vi64 a1, i64, i64, sve512);
nsimd_sve512_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve512_vu64 a0, nsimd_sve512_vu64 a1, u64, u64, sve512);
nsimd_sve512_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve512_vf64 a0, nsimd_sve512_vf64 a1, f64, f64, sve512);
nsimd_sve_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve_vi16 a0, nsimd_sve_vi16 a1, i16, i16, sve);
nsimd_sve_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve_vu16 a0, nsimd_sve_vu16 a1, u16, u16, sve);
nsimd_sve_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve_vf16 a0, nsimd_sve_vf16 a1, f16, f16, sve);
nsimd_sve_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve_vi32 a0, nsimd_sve_vi32 a1, i32, i32, sve);
nsimd_sve_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve_vu32 a0, nsimd_sve_vu32 a1, u32, u32, sve);
nsimd_sve_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve_vf32 a0, nsimd_sve_vf32 a1, f32, f32, sve);
nsimd_sve_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve_vi64 a0, nsimd_sve_vi64 a1, i64, i64, sve);
nsimd_sve_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve_vu64 a0, nsimd_sve_vu64 a1, u64, u64, sve);
nsimd_sve_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve_vf64 a0, nsimd_sve_vf64 a1, f64, f64, sve);
nsimd_cpu_vi16 NSIMD_VECTORCALL downcvt(nsimd_cpu_vi16 a0, nsimd_cpu_vi16 a1, i16, i16, cpu);
nsimd_cpu_vu16 NSIMD_VECTORCALL downcvt(nsimd_cpu_vu16 a0, nsimd_cpu_vu16 a1, u16, u16, cpu);
nsimd_cpu_vf16 NSIMD_VECTORCALL downcvt(nsimd_cpu_vf16 a0, nsimd_cpu_vf16 a1, f16, f16, cpu);
nsimd_cpu_vi32 NSIMD_VECTORCALL downcvt(nsimd_cpu_vi32 a0, nsimd_cpu_vi32 a1, i32, i32, cpu);
nsimd_cpu_vu32 NSIMD_VECTORCALL downcvt(nsimd_cpu_vu32 a0, nsimd_cpu_vu32 a1, u32, u32, cpu);
nsimd_cpu_vf32 NSIMD_VECTORCALL downcvt(nsimd_cpu_vf32 a0, nsimd_cpu_vf32 a1, f32, f32, cpu);
nsimd_cpu_vi64 NSIMD_VECTORCALL downcvt(nsimd_cpu_vi64 a0, nsimd_cpu_vi64 a1, i64, i64, cpu);
nsimd_cpu_vu64 NSIMD_VECTORCALL downcvt(nsimd_cpu_vu64 a0, nsimd_cpu_vu64 a1, u64, u64, cpu);
nsimd_cpu_vf64 NSIMD_VECTORCALL downcvt(nsimd_cpu_vf64 a0, nsimd_cpu_vf64 a1, f64, f64, cpu);
nsimd_sve2048_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vi16 a0, nsimd_sve2048_vi16 a1, i16, i16, sve2048);
nsimd_sve2048_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vu16 a0, nsimd_sve2048_vu16 a1, u16, u16, sve2048);
nsimd_sve2048_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vf16 a0, nsimd_sve2048_vf16 a1, f16, f16, sve2048);
nsimd_sve2048_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vi32 a0, nsimd_sve2048_vi32 a1, i32, i32, sve2048);
nsimd_sve2048_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vu32 a0, nsimd_sve2048_vu32 a1, u32, u32, sve2048);
nsimd_sve2048_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vf32 a0, nsimd_sve2048_vf32 a1, f32, f32, sve2048);
nsimd_sve2048_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vi64 a0, nsimd_sve2048_vi64 a1, i64, i64, sve2048);
nsimd_sve2048_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vu64 a0, nsimd_sve2048_vu64 a1, u64, u64, sve2048);
nsimd_sve2048_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve2048_vf64 a0, nsimd_sve2048_vf64 a1, f64, f64, sve2048);
nsimd_neon128_vi16 NSIMD_VECTORCALL downcvt(nsimd_neon128_vi16 a0, nsimd_neon128_vi16 a1, i16, i16, neon128);
nsimd_neon128_vu16 NSIMD_VECTORCALL downcvt(nsimd_neon128_vu16 a0, nsimd_neon128_vu16 a1, u16, u16, neon128);
nsimd_neon128_vf16 NSIMD_VECTORCALL downcvt(nsimd_neon128_vf16 a0, nsimd_neon128_vf16 a1, f16, f16, neon128);
nsimd_neon128_vi32 NSIMD_VECTORCALL downcvt(nsimd_neon128_vi32 a0, nsimd_neon128_vi32 a1, i32, i32, neon128);
nsimd_neon128_vu32 NSIMD_VECTORCALL downcvt(nsimd_neon128_vu32 a0, nsimd_neon128_vu32 a1, u32, u32, neon128);
nsimd_neon128_vf32 NSIMD_VECTORCALL downcvt(nsimd_neon128_vf32 a0, nsimd_neon128_vf32 a1, f32, f32, neon128);
nsimd_neon128_vi64 NSIMD_VECTORCALL downcvt(nsimd_neon128_vi64 a0, nsimd_neon128_vi64 a1, i64, i64, neon128);
nsimd_neon128_vu64 NSIMD_VECTORCALL downcvt(nsimd_neon128_vu64 a0, nsimd_neon128_vu64 a1, u64, u64, neon128);
nsimd_neon128_vf64 NSIMD_VECTORCALL downcvt(nsimd_neon128_vf64 a0, nsimd_neon128_vf64 a1, f64, f64, neon128);
nsimd_avx512_skylake_vi16 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vi16 a0, nsimd_avx512_skylake_vi16 a1, i16, i16, avx512_skylake);
nsimd_avx512_skylake_vu16 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vu16 a0, nsimd_avx512_skylake_vu16 a1, u16, u16, avx512_skylake);
nsimd_avx512_skylake_vf16 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vf16 a0, nsimd_avx512_skylake_vf16 a1, f16, f16, avx512_skylake);
nsimd_avx512_skylake_vi32 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vi32 a0, nsimd_avx512_skylake_vi32 a1, i32, i32, avx512_skylake);
nsimd_avx512_skylake_vu32 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vu32 a0, nsimd_avx512_skylake_vu32 a1, u32, u32, avx512_skylake);
nsimd_avx512_skylake_vf32 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vf32 a0, nsimd_avx512_skylake_vf32 a1, f32, f32, avx512_skylake);
nsimd_avx512_skylake_vi64 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vi64 a0, nsimd_avx512_skylake_vi64 a1, i64, i64, avx512_skylake);
nsimd_avx512_skylake_vu64 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vu64 a0, nsimd_avx512_skylake_vu64 a1, u64, u64, avx512_skylake);
nsimd_avx512_skylake_vf64 NSIMD_VECTORCALL downcvt(nsimd_avx512_skylake_vf64 a0, nsimd_avx512_skylake_vf64 a1, f64, f64, avx512_skylake);
nsimd_aarch64_vi16 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vi16 a0, nsimd_aarch64_vi16 a1, i16, i16, aarch64);
nsimd_aarch64_vu16 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vu16 a0, nsimd_aarch64_vu16 a1, u16, u16, aarch64);
nsimd_aarch64_vf16 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vf16 a0, nsimd_aarch64_vf16 a1, f16, f16, aarch64);
nsimd_aarch64_vi32 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vi32 a0, nsimd_aarch64_vi32 a1, i32, i32, aarch64);
nsimd_aarch64_vu32 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vu32 a0, nsimd_aarch64_vu32 a1, u32, u32, aarch64);
nsimd_aarch64_vf32 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vf32 a0, nsimd_aarch64_vf32 a1, f32, f32, aarch64);
nsimd_aarch64_vi64 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vi64 a0, nsimd_aarch64_vi64 a1, i64, i64, aarch64);
nsimd_aarch64_vu64 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vu64 a0, nsimd_aarch64_vu64 a1, u64, u64, aarch64);
nsimd_aarch64_vf64 NSIMD_VECTORCALL downcvt(nsimd_aarch64_vf64 a0, nsimd_aarch64_vf64 a1, f64, f64, aarch64);
nsimd_avx512_knl_vi16 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vi16 a0, nsimd_avx512_knl_vi16 a1, i16, i16, avx512_knl);
nsimd_avx512_knl_vu16 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vu16 a0, nsimd_avx512_knl_vu16 a1, u16, u16, avx512_knl);
nsimd_avx512_knl_vf16 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vf16 a0, nsimd_avx512_knl_vf16 a1, f16, f16, avx512_knl);
nsimd_avx512_knl_vi32 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vi32 a0, nsimd_avx512_knl_vi32 a1, i32, i32, avx512_knl);
nsimd_avx512_knl_vu32 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vu32 a0, nsimd_avx512_knl_vu32 a1, u32, u32, avx512_knl);
nsimd_avx512_knl_vf32 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vf32 a0, nsimd_avx512_knl_vf32 a1, f32, f32, avx512_knl);
nsimd_avx512_knl_vi64 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vi64 a0, nsimd_avx512_knl_vi64 a1, i64, i64, avx512_knl);
nsimd_avx512_knl_vu64 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vu64 a0, nsimd_avx512_knl_vu64 a1, u64, u64, avx512_knl);
nsimd_avx512_knl_vf64 NSIMD_VECTORCALL downcvt(nsimd_avx512_knl_vf64 a0, nsimd_avx512_knl_vf64 a1, f64, f64, avx512_knl);
nsimd_sse2_vi16 NSIMD_VECTORCALL downcvt(nsimd_sse2_vi16 a0, nsimd_sse2_vi16 a1, i16, i16, sse2);
nsimd_sse2_vu16 NSIMD_VECTORCALL downcvt(nsimd_sse2_vu16 a0, nsimd_sse2_vu16 a1, u16, u16, sse2);
nsimd_sse2_vf16 NSIMD_VECTORCALL downcvt(nsimd_sse2_vf16 a0, nsimd_sse2_vf16 a1, f16, f16, sse2);
nsimd_sse2_vi32 NSIMD_VECTORCALL downcvt(nsimd_sse2_vi32 a0, nsimd_sse2_vi32 a1, i32, i32, sse2);
nsimd_sse2_vu32 NSIMD_VECTORCALL downcvt(nsimd_sse2_vu32 a0, nsimd_sse2_vu32 a1, u32, u32, sse2);
nsimd_sse2_vf32 NSIMD_VECTORCALL downcvt(nsimd_sse2_vf32 a0, nsimd_sse2_vf32 a1, f32, f32, sse2);
nsimd_sse2_vi64 NSIMD_VECTORCALL downcvt(nsimd_sse2_vi64 a0, nsimd_sse2_vi64 a1, i64, i64, sse2);
nsimd_sse2_vu64 NSIMD_VECTORCALL downcvt(nsimd_sse2_vu64 a0, nsimd_sse2_vu64 a1, u64, u64, sse2);
nsimd_sse2_vf64 NSIMD_VECTORCALL downcvt(nsimd_sse2_vf64 a0, nsimd_sse2_vf64 a1, f64, f64, sse2);
nsimd_sse42_vi16 NSIMD_VECTORCALL downcvt(nsimd_sse42_vi16 a0, nsimd_sse42_vi16 a1, i16, i16, sse42);
nsimd_sse42_vu16 NSIMD_VECTORCALL downcvt(nsimd_sse42_vu16 a0, nsimd_sse42_vu16 a1, u16, u16, sse42);
nsimd_sse42_vf16 NSIMD_VECTORCALL downcvt(nsimd_sse42_vf16 a0, nsimd_sse42_vf16 a1, f16, f16, sse42);
nsimd_sse42_vi32 NSIMD_VECTORCALL downcvt(nsimd_sse42_vi32 a0, nsimd_sse42_vi32 a1, i32, i32, sse42);
nsimd_sse42_vu32 NSIMD_VECTORCALL downcvt(nsimd_sse42_vu32 a0, nsimd_sse42_vu32 a1, u32, u32, sse42);
nsimd_sse42_vf32 NSIMD_VECTORCALL downcvt(nsimd_sse42_vf32 a0, nsimd_sse42_vf32 a1, f32, f32, sse42);
nsimd_sse42_vi64 NSIMD_VECTORCALL downcvt(nsimd_sse42_vi64 a0, nsimd_sse42_vi64 a1, i64, i64, sse42);
nsimd_sse42_vu64 NSIMD_VECTORCALL downcvt(nsimd_sse42_vu64 a0, nsimd_sse42_vu64 a1, u64, u64, sse42);
nsimd_sse42_vf64 NSIMD_VECTORCALL downcvt(nsimd_sse42_vf64 a0, nsimd_sse42_vf64 a1, f64, f64, sse42);
nsimd_sve256_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve256_vi16 a0, nsimd_sve256_vi16 a1, i16, i16, sve256);
nsimd_sve256_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve256_vu16 a0, nsimd_sve256_vu16 a1, u16, u16, sve256);
nsimd_sve256_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve256_vf16 a0, nsimd_sve256_vf16 a1, f16, f16, sve256);
nsimd_sve256_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve256_vi32 a0, nsimd_sve256_vi32 a1, i32, i32, sve256);
nsimd_sve256_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve256_vu32 a0, nsimd_sve256_vu32 a1, u32, u32, sve256);
nsimd_sve256_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve256_vf32 a0, nsimd_sve256_vf32 a1, f32, f32, sve256);
nsimd_sve256_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve256_vi64 a0, nsimd_sve256_vi64 a1, i64, i64, sve256);
nsimd_sve256_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve256_vu64 a0, nsimd_sve256_vu64 a1, u64, u64, sve256);
nsimd_sve256_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve256_vf64 a0, nsimd_sve256_vf64 a1, f64, f64, sve256);
nsimd_sve1024_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vi16 a0, nsimd_sve1024_vi16 a1, i16, i16, sve1024);
nsimd_sve1024_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vu16 a0, nsimd_sve1024_vu16 a1, u16, u16, sve1024);
nsimd_sve1024_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vf16 a0, nsimd_sve1024_vf16 a1, f16, f16, sve1024);
nsimd_sve1024_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vi32 a0, nsimd_sve1024_vi32 a1, i32, i32, sve1024);
nsimd_sve1024_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vu32 a0, nsimd_sve1024_vu32 a1, u32, u32, sve1024);
nsimd_sve1024_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vf32 a0, nsimd_sve1024_vf32 a1, f32, f32, sve1024);
nsimd_sve1024_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vi64 a0, nsimd_sve1024_vi64 a1, i64, i64, sve1024);
nsimd_sve1024_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vu64 a0, nsimd_sve1024_vu64 a1, u64, u64, sve1024);
nsimd_sve1024_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve1024_vf64 a0, nsimd_sve1024_vf64 a1, f64, f64, sve1024);
nsimd_vsx_vi16 NSIMD_VECTORCALL downcvt(nsimd_vsx_vi16 a0, nsimd_vsx_vi16 a1, i16, i16, vsx);
nsimd_vsx_vu16 NSIMD_VECTORCALL downcvt(nsimd_vsx_vu16 a0, nsimd_vsx_vu16 a1, u16, u16, vsx);
nsimd_vsx_vf16 NSIMD_VECTORCALL downcvt(nsimd_vsx_vf16 a0, nsimd_vsx_vf16 a1, f16, f16, vsx);
nsimd_vsx_vi32 NSIMD_VECTORCALL downcvt(nsimd_vsx_vi32 a0, nsimd_vsx_vi32 a1, i32, i32, vsx);
nsimd_vsx_vu32 NSIMD_VECTORCALL downcvt(nsimd_vsx_vu32 a0, nsimd_vsx_vu32 a1, u32, u32, vsx);
nsimd_vsx_vf32 NSIMD_VECTORCALL downcvt(nsimd_vsx_vf32 a0, nsimd_vsx_vf32 a1, f32, f32, vsx);
nsimd_vsx_vi64 NSIMD_VECTORCALL downcvt(nsimd_vsx_vi64 a0, nsimd_vsx_vi64 a1, i64, i64, vsx);
nsimd_vsx_vu64 NSIMD_VECTORCALL downcvt(nsimd_vsx_vu64 a0, nsimd_vsx_vu64 a1, u64, u64, vsx);
nsimd_vsx_vf64 NSIMD_VECTORCALL downcvt(nsimd_vsx_vf64 a0, nsimd_vsx_vf64 a1, f64, f64, vsx);
nsimd_sve128_vi16 NSIMD_VECTORCALL downcvt(nsimd_sve128_vi16 a0, nsimd_sve128_vi16 a1, i16, i16, sve128);
nsimd_sve128_vu16 NSIMD_VECTORCALL downcvt(nsimd_sve128_vu16 a0, nsimd_sve128_vu16 a1, u16, u16, sve128);
nsimd_sve128_vf16 NSIMD_VECTORCALL downcvt(nsimd_sve128_vf16 a0, nsimd_sve128_vf16 a1, f16, f16, sve128);
nsimd_sve128_vi32 NSIMD_VECTORCALL downcvt(nsimd_sve128_vi32 a0, nsimd_sve128_vi32 a1, i32, i32, sve128);
nsimd_sve128_vu32 NSIMD_VECTORCALL downcvt(nsimd_sve128_vu32 a0, nsimd_sve128_vu32 a1, u32, u32, sve128);
nsimd_sve128_vf32 NSIMD_VECTORCALL downcvt(nsimd_sve128_vf32 a0, nsimd_sve128_vf32 a1, f32, f32, sve128);
nsimd_sve128_vi64 NSIMD_VECTORCALL downcvt(nsimd_sve128_vi64 a0, nsimd_sve128_vi64 a1, i64, i64, sve128);
nsimd_sve128_vu64 NSIMD_VECTORCALL downcvt(nsimd_sve128_vu64 a0, nsimd_sve128_vu64 a1, u64, u64, sve128);
nsimd_sve128_vf64 NSIMD_VECTORCALL downcvt(nsimd_sve128_vf64 a0, nsimd_sve128_vf64 a1, f64, f64, sve128);
nsimd_vmx_vi16 NSIMD_VECTORCALL downcvt(nsimd_vmx_vi16 a0, nsimd_vmx_vi16 a1, i16, i16, vmx);
nsimd_vmx_vu16 NSIMD_VECTORCALL downcvt(nsimd_vmx_vu16 a0, nsimd_vmx_vu16 a1, u16, u16, vmx);
nsimd_vmx_vf16 NSIMD_VECTORCALL downcvt(nsimd_vmx_vf16 a0, nsimd_vmx_vf16 a1, f16, f16, vmx);
nsimd_vmx_vi32 NSIMD_VECTORCALL downcvt(nsimd_vmx_vi32 a0, nsimd_vmx_vi32 a1, i32, i32, vmx);
nsimd_vmx_vu32 NSIMD_VECTORCALL downcvt(nsimd_vmx_vu32 a0, nsimd_vmx_vu32 a1, u32, u32, vmx);
nsimd_vmx_vf32 NSIMD_VECTORCALL downcvt(nsimd_vmx_vf32 a0, nsimd_vmx_vf32 a1, f32, f32, vmx);
nsimd_vmx_vi64 NSIMD_VECTORCALL downcvt(nsimd_vmx_vi64 a0, nsimd_vmx_vi64 a1, i64, i64, vmx);
nsimd_vmx_vu64 NSIMD_VECTORCALL downcvt(nsimd_vmx_vu64 a0, nsimd_vmx_vu64 a1, u64, u64, vmx);
nsimd_vmx_vf64 NSIMD_VECTORCALL downcvt(nsimd_vmx_vf64 a0, nsimd_vmx_vf64 a1, f64, f64, vmx);
nsimd_avx_vi16 NSIMD_VECTORCALL downcvt(nsimd_avx_vi16 a0, nsimd_avx_vi16 a1, i16, i16, avx);
nsimd_avx_vu16 NSIMD_VECTORCALL downcvt(nsimd_avx_vu16 a0, nsimd_avx_vu16 a1, u16, u16, avx);
nsimd_avx_vf16 NSIMD_VECTORCALL downcvt(nsimd_avx_vf16 a0, nsimd_avx_vf16 a1, f16, f16, avx);
nsimd_avx_vi32 NSIMD_VECTORCALL downcvt(nsimd_avx_vi32 a0, nsimd_avx_vi32 a1, i32, i32, avx);
nsimd_avx_vu32 NSIMD_VECTORCALL downcvt(nsimd_avx_vu32 a0, nsimd_avx_vu32 a1, u32, u32, avx);
nsimd_avx_vf32 NSIMD_VECTORCALL downcvt(nsimd_avx_vf32 a0, nsimd_avx_vf32 a1, f32, f32, avx);
nsimd_avx_vi64 NSIMD_VECTORCALL downcvt(nsimd_avx_vi64 a0, nsimd_avx_vi64 a1, i64, i64, avx);
nsimd_avx_vu64 NSIMD_VECTORCALL downcvt(nsimd_avx_vu64 a0, nsimd_avx_vu64 a1, u64, u64, avx);
nsimd_avx_vf64 NSIMD_VECTORCALL downcvt(nsimd_avx_vf64 a0, nsimd_avx_vf64 a1, f64, f64, avx);