Compute the inverse hyperbolic tangent of its argument with a precision of 1.0 ulps. For more informations visit https://sleef.org/purec.xhtml.
#define vatanh_u10(a0, type)
#define vatanh_u10_e(a0, type, simd_ext)
#define nsimd_atanh_u10(a0)
template <NSIMD_CONCEPT_VALUE_TYPE T> typename simd_traits<T, NSIMD_SIMD>::simd_vector NSIMD_VECTORCALL atanh_u10(typename simd_traits<T, NSIMD_SIMD>::simd_vector a0, T);
template <NSIMD_CONCEPT_VALUE_TYPE T, NSIMD_CONCEPT_SIMD_EXT SimdExt> pack<T, 1, SimdExt> atanh_u10(pack<T, 1, SimdExt> const& a0);
template <NSIMD_CONCEPT_VALUE_TYPE T, int N, NSIMD_CONCEPT_SIMD_EXT SimdExt> pack<T, N, SimdExt> atanh_u10(pack<T, N, SimdExt> const& a0);
nsimd_avx2_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_avx2_f64(nsimd_avx2_vf64 a0);
nsimd_avx2_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_avx2_f32(nsimd_avx2_vf32 a0);
nsimd_avx2_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_avx2_f16(nsimd_avx2_vf16 a0);
nsimd_sve512_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve512_f64(nsimd_sve512_vf64 a0);
nsimd_sve512_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve512_f32(nsimd_sve512_vf32 a0);
nsimd_sve512_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve512_f16(nsimd_sve512_vf16 a0);
nsimd_sve_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve_f64(nsimd_sve_vf64 a0);
nsimd_sve_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve_f32(nsimd_sve_vf32 a0);
nsimd_sve_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve_f16(nsimd_sve_vf16 a0);
nsimd_cpu_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_cpu_f64(nsimd_cpu_vf64 a0);
nsimd_cpu_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_cpu_f32(nsimd_cpu_vf32 a0);
nsimd_cpu_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_cpu_f16(nsimd_cpu_vf16 a0);
nsimd_sve2048_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve2048_f64(nsimd_sve2048_vf64 a0);
nsimd_sve2048_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve2048_f32(nsimd_sve2048_vf32 a0);
nsimd_sve2048_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve2048_f16(nsimd_sve2048_vf16 a0);
nsimd_neon128_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_neon128_f64(nsimd_neon128_vf64 a0);
nsimd_neon128_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_neon128_f32(nsimd_neon128_vf32 a0);
nsimd_neon128_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_neon128_f16(nsimd_neon128_vf16 a0);
nsimd_avx512_skylake_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_skylake_f64(nsimd_avx512_skylake_vf64 a0);
nsimd_avx512_skylake_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_skylake_f32(nsimd_avx512_skylake_vf32 a0);
nsimd_avx512_skylake_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_skylake_f16(nsimd_avx512_skylake_vf16 a0);
nsimd_aarch64_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_aarch64_f64(nsimd_aarch64_vf64 a0);
nsimd_aarch64_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_aarch64_f32(nsimd_aarch64_vf32 a0);
nsimd_aarch64_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_aarch64_f16(nsimd_aarch64_vf16 a0);
nsimd_avx512_knl_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_knl_f64(nsimd_avx512_knl_vf64 a0);
nsimd_avx512_knl_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_knl_f32(nsimd_avx512_knl_vf32 a0);
nsimd_avx512_knl_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_avx512_knl_f16(nsimd_avx512_knl_vf16 a0);
nsimd_sse2_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sse2_f64(nsimd_sse2_vf64 a0);
nsimd_sse2_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sse2_f32(nsimd_sse2_vf32 a0);
nsimd_sse2_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sse2_f16(nsimd_sse2_vf16 a0);
nsimd_sse42_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sse42_f64(nsimd_sse42_vf64 a0);
nsimd_sse42_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sse42_f32(nsimd_sse42_vf32 a0);
nsimd_sse42_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sse42_f16(nsimd_sse42_vf16 a0);
nsimd_sve256_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve256_f64(nsimd_sve256_vf64 a0);
nsimd_sve256_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve256_f32(nsimd_sve256_vf32 a0);
nsimd_sve256_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve256_f16(nsimd_sve256_vf16 a0);
nsimd_sve1024_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve1024_f64(nsimd_sve1024_vf64 a0);
nsimd_sve1024_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve1024_f32(nsimd_sve1024_vf32 a0);
nsimd_sve1024_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve1024_f16(nsimd_sve1024_vf16 a0);
nsimd_vsx_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_vsx_f64(nsimd_vsx_vf64 a0);
nsimd_vsx_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_vsx_f32(nsimd_vsx_vf32 a0);
nsimd_vsx_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_vsx_f16(nsimd_vsx_vf16 a0);
nsimd_sve128_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_sve128_f64(nsimd_sve128_vf64 a0);
nsimd_sve128_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_sve128_f32(nsimd_sve128_vf32 a0);
nsimd_sve128_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_sve128_f16(nsimd_sve128_vf16 a0);
nsimd_vmx_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_vmx_f64(nsimd_vmx_vf64 a0);
nsimd_vmx_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_vmx_f32(nsimd_vmx_vf32 a0);
nsimd_vmx_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_vmx_f16(nsimd_vmx_vf16 a0);
nsimd_avx_vf64 NSIMD_VECTORCALL nsimd_atanh_u10_avx_f64(nsimd_avx_vf64 a0);
nsimd_avx_vf32 NSIMD_VECTORCALL nsimd_atanh_u10_avx_f32(nsimd_avx_vf32 a0);
nsimd_avx_vf16 NSIMD_VECTORCALL nsimd_atanh_u10_avx_f16(nsimd_avx_vf16 a0);
nsimd_avx2_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_avx2_vf64 a0, f64, avx2);
nsimd_avx2_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_avx2_vf32 a0, f32, avx2);
nsimd_avx2_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_avx2_vf16 a0, f16, avx2);
nsimd_sve512_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve512_vf64 a0, f64, sve512);
nsimd_sve512_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve512_vf32 a0, f32, sve512);
nsimd_sve512_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve512_vf16 a0, f16, sve512);
nsimd_sve_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve_vf64 a0, f64, sve);
nsimd_sve_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve_vf32 a0, f32, sve);
nsimd_sve_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve_vf16 a0, f16, sve);
nsimd_cpu_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_cpu_vf64 a0, f64, cpu);
nsimd_cpu_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_cpu_vf32 a0, f32, cpu);
nsimd_cpu_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_cpu_vf16 a0, f16, cpu);
nsimd_sve2048_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve2048_vf64 a0, f64, sve2048);
nsimd_sve2048_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve2048_vf32 a0, f32, sve2048);
nsimd_sve2048_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve2048_vf16 a0, f16, sve2048);
nsimd_neon128_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_neon128_vf64 a0, f64, neon128);
nsimd_neon128_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_neon128_vf32 a0, f32, neon128);
nsimd_neon128_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_neon128_vf16 a0, f16, neon128);
nsimd_avx512_skylake_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_skylake_vf64 a0, f64, avx512_skylake);
nsimd_avx512_skylake_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_skylake_vf32 a0, f32, avx512_skylake);
nsimd_avx512_skylake_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_skylake_vf16 a0, f16, avx512_skylake);
nsimd_aarch64_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_aarch64_vf64 a0, f64, aarch64);
nsimd_aarch64_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_aarch64_vf32 a0, f32, aarch64);
nsimd_aarch64_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_aarch64_vf16 a0, f16, aarch64);
nsimd_avx512_knl_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_knl_vf64 a0, f64, avx512_knl);
nsimd_avx512_knl_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_knl_vf32 a0, f32, avx512_knl);
nsimd_avx512_knl_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_avx512_knl_vf16 a0, f16, avx512_knl);
nsimd_sse2_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sse2_vf64 a0, f64, sse2);
nsimd_sse2_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sse2_vf32 a0, f32, sse2);
nsimd_sse2_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sse2_vf16 a0, f16, sse2);
nsimd_sse42_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sse42_vf64 a0, f64, sse42);
nsimd_sse42_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sse42_vf32 a0, f32, sse42);
nsimd_sse42_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sse42_vf16 a0, f16, sse42);
nsimd_sve256_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve256_vf64 a0, f64, sve256);
nsimd_sve256_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve256_vf32 a0, f32, sve256);
nsimd_sve256_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve256_vf16 a0, f16, sve256);
nsimd_sve1024_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve1024_vf64 a0, f64, sve1024);
nsimd_sve1024_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve1024_vf32 a0, f32, sve1024);
nsimd_sve1024_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve1024_vf16 a0, f16, sve1024);
nsimd_vsx_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_vsx_vf64 a0, f64, vsx);
nsimd_vsx_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_vsx_vf32 a0, f32, vsx);
nsimd_vsx_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_vsx_vf16 a0, f16, vsx);
nsimd_sve128_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_sve128_vf64 a0, f64, sve128);
nsimd_sve128_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_sve128_vf32 a0, f32, sve128);
nsimd_sve128_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_sve128_vf16 a0, f16, sve128);
nsimd_vmx_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_vmx_vf64 a0, f64, vmx);
nsimd_vmx_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_vmx_vf32 a0, f32, vmx);
nsimd_vmx_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_vmx_vf16 a0, f16, vmx);
nsimd_avx_vf64 NSIMD_VECTORCALL atanh_u10(nsimd_avx_vf64 a0, f64, avx);
nsimd_avx_vf32 NSIMD_VECTORCALL atanh_u10(nsimd_avx_vf32 a0, f32, avx);
nsimd_avx_vf16 NSIMD_VECTORCALL atanh_u10(nsimd_avx_vf16 a0, f16, avx);