10#ifndef EIGEN_PACKET_MATH_SVE_H
11#define EIGEN_PACKET_MATH_SVE_H
14#include "../../InternalHeaderCheck.h"
18#ifndef EIGEN_CACHEFRIENDLY_PRODUCT_THRESHOLD
19#define EIGEN_CACHEFRIENDLY_PRODUCT_THRESHOLD 8
22#ifndef EIGEN_HAS_SINGLE_INSTRUCTION_MADD
23#define EIGEN_HAS_SINGLE_INSTRUCTION_MADD
26#define EIGEN_ARCH_DEFAULT_NUMBER_OF_REGISTERS 32
28template <
typename Scalar,
int SVEVectorLength>
29struct sve_packet_size_selector {
30 enum { size = SVEVectorLength / (
sizeof(Scalar) * CHAR_BIT) };
34typedef svint32_t PacketXi __attribute__((arm_sve_vector_bits(EIGEN_ARM64_SVE_VL)));
37struct packet_traits<numext::int32_t> : default_packet_traits {
38 typedef PacketXi type;
39 typedef PacketXi half;
43 size = sve_packet_size_selector<numext::int32_t, EIGEN_ARM64_SVE_VL>::size,
61struct unpacket_traits<PacketXi> {
62 typedef numext::int32_t type;
63 typedef PacketXi half;
65 size = sve_packet_size_selector<numext::int32_t, EIGEN_ARM64_SVE_VL>::size,
68 masked_load_available =
false,
69 masked_store_available =
false
74EIGEN_STRONG_INLINE
void prefetch<numext::int32_t>(
const numext::int32_t* addr) {
75 svprfw(svptrue_b32(), addr, SV_PLDL1KEEP);
79EIGEN_STRONG_INLINE PacketXi pset1<PacketXi>(
const numext::int32_t& from) {
80 return svdup_n_s32(from);
84EIGEN_STRONG_INLINE PacketXi plset<PacketXi>(
const numext::int32_t& a) {
85 numext::int32_t c[packet_traits<numext::int32_t>::size];
86 for (
int i = 0; i < packet_traits<numext::int32_t>::size; i++) c[i] = i;
87 return svadd_s32_x(svptrue_b32(), pset1<PacketXi>(a), svld1_s32(svptrue_b32(), c));
91EIGEN_STRONG_INLINE PacketXi padd<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
92 return svadd_s32_x(svptrue_b32(), a, b);
96EIGEN_STRONG_INLINE PacketXi psub<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
97 return svsub_s32_x(svptrue_b32(), a, b);
101EIGEN_STRONG_INLINE PacketXi pnegate(
const PacketXi& a) {
102 return svneg_s32_x(svptrue_b32(), a);
106EIGEN_STRONG_INLINE PacketXi pconj(
const PacketXi& a) {
111EIGEN_STRONG_INLINE PacketXi pmul<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
112 return svmul_s32_x(svptrue_b32(), a, b);
116EIGEN_STRONG_INLINE PacketXi pdiv<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
117 return svdiv_s32_x(svptrue_b32(), a, b);
121EIGEN_STRONG_INLINE PacketXi pmadd(
const PacketXi& a,
const PacketXi& b,
const PacketXi& c) {
122 return svmla_s32_x(svptrue_b32(), c, a, b);
126EIGEN_STRONG_INLINE PacketXi pmin<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
127 return svmin_s32_x(svptrue_b32(), a, b);
131EIGEN_STRONG_INLINE PacketXi pmax<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
132 return svmax_s32_x(svptrue_b32(), a, b);
136EIGEN_STRONG_INLINE PacketXi pcmp_le<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
137 return svdup_n_s32_z(svcmple_s32(svptrue_b32(), a, b), 0xffffffffu);
141EIGEN_STRONG_INLINE PacketXi pcmp_lt<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
142 return svdup_n_s32_z(svcmplt_s32(svptrue_b32(), a, b), 0xffffffffu);
146EIGEN_STRONG_INLINE PacketXi pcmp_eq<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
147 return svdup_n_s32_z(svcmpeq_s32(svptrue_b32(), a, b), 0xffffffffu);
151EIGEN_STRONG_INLINE PacketXi ptrue<PacketXi>(
const PacketXi& ) {
152 return svdup_n_s32_x(svptrue_b32(), 0xffffffffu);
156EIGEN_STRONG_INLINE PacketXi pzero<PacketXi>(
const PacketXi& ) {
157 return svdup_n_s32_x(svptrue_b32(), 0);
161EIGEN_STRONG_INLINE PacketXi pand<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
162 return svand_s32_x(svptrue_b32(), a, b);
166EIGEN_STRONG_INLINE PacketXi por<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
167 return svorr_s32_x(svptrue_b32(), a, b);
171EIGEN_STRONG_INLINE PacketXi pxor<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
172 return sveor_s32_x(svptrue_b32(), a, b);
176EIGEN_STRONG_INLINE PacketXi pandnot<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
177 return svbic_s32_x(svptrue_b32(), a, b);
181EIGEN_STRONG_INLINE PacketXi parithmetic_shift_right(PacketXi a) {
182 return svasrd_n_s32_x(svptrue_b32(), a, N);
186EIGEN_STRONG_INLINE PacketXi plogical_shift_right(PacketXi a) {
187 return svreinterpret_s32_u32(svlsr_n_u32_x(svptrue_b32(), svreinterpret_u32_s32(a), N));
191EIGEN_STRONG_INLINE PacketXi plogical_shift_left(PacketXi a) {
192 return svlsl_n_s32_x(svptrue_b32(), a, N);
196EIGEN_STRONG_INLINE PacketXi pload<PacketXi>(
const numext::int32_t* from) {
197 EIGEN_DEBUG_ALIGNED_LOAD
return svld1_s32(svptrue_b32(), from);
201EIGEN_STRONG_INLINE PacketXi ploadu<PacketXi>(
const numext::int32_t* from) {
202 EIGEN_DEBUG_UNALIGNED_LOAD
return svld1_s32(svptrue_b32(), from);
206EIGEN_STRONG_INLINE PacketXi ploaddup<PacketXi>(
const numext::int32_t* from) {
207 svuint32_t indices = svindex_u32(0, 1);
208 indices = svzip1_u32(indices, indices);
209 return svld1_gather_u32index_s32(svptrue_b32(), from, indices);
213EIGEN_STRONG_INLINE PacketXi ploadquad<PacketXi>(
const numext::int32_t* from) {
214 svuint32_t indices = svindex_u32(0, 1);
215 indices = svzip1_u32(indices, indices);
216 indices = svzip1_u32(indices, indices);
217 return svld1_gather_u32index_s32(svptrue_b32(), from, indices);
221EIGEN_STRONG_INLINE
void pstore<numext::int32_t>(numext::int32_t* to,
const PacketXi& from) {
222 EIGEN_DEBUG_ALIGNED_STORE svst1_s32(svptrue_b32(), to, from);
226EIGEN_STRONG_INLINE
void pstoreu<numext::int32_t>(numext::int32_t* to,
const PacketXi& from) {
227 EIGEN_DEBUG_UNALIGNED_STORE svst1_s32(svptrue_b32(), to, from);
231EIGEN_DEVICE_FUNC
inline PacketXi pgather<numext::int32_t, PacketXi>(
const numext::int32_t* from,
Index stride) {
233 svint32_t indices = svindex_s32(0, stride);
234 return svld1_gather_s32index_s32(svptrue_b32(), from, indices);
238EIGEN_DEVICE_FUNC
inline void pscatter<numext::int32_t, PacketXi>(numext::int32_t* to,
const PacketXi& from,
241 svint32_t indices = svindex_s32(0, stride);
242 svst1_scatter_s32index_s32(svptrue_b32(), to, indices, from);
246EIGEN_STRONG_INLINE numext::int32_t pfirst<PacketXi>(
const PacketXi& a) {
248 return svlasta_s32(svpfalse_b(), a);
252EIGEN_STRONG_INLINE PacketXi preverse(
const PacketXi& a) {
257EIGEN_STRONG_INLINE PacketXi pabs(
const PacketXi& a) {
258 return svabs_s32_x(svptrue_b32(), a);
262EIGEN_STRONG_INLINE numext::int32_t predux<PacketXi>(
const PacketXi& a) {
263 return static_cast<numext::int32_t
>(svaddv_s32(svptrue_b32(), a));
267EIGEN_STRONG_INLINE numext::int32_t predux_mul<PacketXi>(
const PacketXi& a) {
268 EIGEN_STATIC_ASSERT((EIGEN_ARM64_SVE_VL % 128 == 0), EIGEN_INTERNAL_ERROR_PLEASE_FILE_A_BUG_REPORT);
271 svint32_t prod = svmul_s32_x(svptrue_b32(), a, svrev_s32(a));
275 if (EIGEN_ARM64_SVE_VL >= 2048) {
276 half_prod = svtbl_s32(prod, svindex_u32(32, 1));
277 prod = svmul_s32_x(svptrue_b32(), prod, half_prod);
279 if (EIGEN_ARM64_SVE_VL >= 1024) {
280 half_prod = svtbl_s32(prod, svindex_u32(16, 1));
281 prod = svmul_s32_x(svptrue_b32(), prod, half_prod);
283 if (EIGEN_ARM64_SVE_VL >= 512) {
284 half_prod = svtbl_s32(prod, svindex_u32(8, 1));
285 prod = svmul_s32_x(svptrue_b32(), prod, half_prod);
287 if (EIGEN_ARM64_SVE_VL >= 256) {
288 half_prod = svtbl_s32(prod, svindex_u32(4, 1));
289 prod = svmul_s32_x(svptrue_b32(), prod, half_prod);
292 half_prod = svtbl_s32(prod, svindex_u32(2, 1));
293 prod = svmul_s32_x(svptrue_b32(), prod, half_prod);
296 return pfirst<PacketXi>(prod);
300EIGEN_STRONG_INLINE numext::int32_t predux_min<PacketXi>(
const PacketXi& a) {
301 return svminv_s32(svptrue_b32(), a);
305EIGEN_STRONG_INLINE numext::int32_t predux_max<PacketXi>(
const PacketXi& a) {
306 return svmaxv_s32(svptrue_b32(), a);
310EIGEN_DEVICE_FUNC
inline void ptranspose(PacketBlock<PacketXi, N>& kernel) {
311 int buffer[packet_traits<numext::int32_t>::size * N] = {0};
314 PacketXi stride_index = svindex_s32(0, N);
316 for (i = 0; i < N; i++) {
317 svst1_scatter_s32index_s32(svptrue_b32(), buffer + i, stride_index, kernel.packet[i]);
319 for (i = 0; i < N; i++) {
320 kernel.packet[i] = svld1_s32(svptrue_b32(), buffer + i * packet_traits<numext::int32_t>::size);
326typedef svfloat32_t PacketXf __attribute__((arm_sve_vector_bits(EIGEN_ARM64_SVE_VL)));
329struct packet_traits<float> : default_packet_traits {
330 typedef PacketXf type;
331 typedef PacketXf half;
336 size = sve_packet_size_selector<float, EIGEN_ARM64_SVE_VL>::size,
354 HasSin = EIGEN_FAST_MATH,
355 HasCos = EIGEN_FAST_MATH,
360 HasTanh = EIGEN_FAST_MATH,
361 HasErf = EIGEN_FAST_MATH,
362 HasErfc = EIGEN_FAST_MATH
367struct unpacket_traits<PacketXf> {
369 typedef PacketXf half;
370 typedef PacketXi integer_packet;
373 size = sve_packet_size_selector<float, EIGEN_ARM64_SVE_VL>::size,
376 masked_load_available =
false,
377 masked_store_available =
false
382EIGEN_STRONG_INLINE PacketXf pset1<PacketXf>(
const float& from) {
383 return svdup_n_f32(from);
387EIGEN_STRONG_INLINE PacketXf pset1frombits<PacketXf>(numext::uint32_t from) {
388 return svreinterpret_f32_u32(svdup_n_u32_x(svptrue_b32(), from));
392EIGEN_STRONG_INLINE PacketXf plset<PacketXf>(
const float& a) {
393 float c[packet_traits<float>::size];
394 for (
int i = 0; i < packet_traits<float>::size; i++) c[i] = i;
395 return svadd_f32_x(svptrue_b32(), pset1<PacketXf>(a), svld1_f32(svptrue_b32(), c));
399EIGEN_STRONG_INLINE PacketXf padd<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
400 return svadd_f32_x(svptrue_b32(), a, b);
404EIGEN_STRONG_INLINE PacketXf psub<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
405 return svsub_f32_x(svptrue_b32(), a, b);
409EIGEN_STRONG_INLINE PacketXf pnegate(
const PacketXf& a) {
410 return svneg_f32_x(svptrue_b32(), a);
414EIGEN_STRONG_INLINE PacketXf pconj(
const PacketXf& a) {
419EIGEN_STRONG_INLINE PacketXf pmul<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
420 return svmul_f32_x(svptrue_b32(), a, b);
424EIGEN_STRONG_INLINE PacketXf pdiv<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
425 return svdiv_f32_x(svptrue_b32(), a, b);
429EIGEN_STRONG_INLINE PacketXf pmadd(
const PacketXf& a,
const PacketXf& b,
const PacketXf& c) {
430 return svmla_f32_x(svptrue_b32(), c, a, b);
434EIGEN_STRONG_INLINE PacketXf pmin<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
435 return svmin_f32_x(svptrue_b32(), a, b);
439EIGEN_STRONG_INLINE PacketXf pmin<PropagateNaN, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
440 return pmin<PacketXf>(a, b);
444EIGEN_STRONG_INLINE PacketXf pmin<PropagateNumbers, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
445 return svminnm_f32_x(svptrue_b32(), a, b);
449EIGEN_STRONG_INLINE PacketXf pmax<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
450 return svmax_f32_x(svptrue_b32(), a, b);
454EIGEN_STRONG_INLINE PacketXf pmax<PropagateNaN, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
455 return pmax<PacketXf>(a, b);
459EIGEN_STRONG_INLINE PacketXf pmax<PropagateNumbers, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
460 return svmaxnm_f32_x(svptrue_b32(), a, b);
466EIGEN_STRONG_INLINE PacketXf pcmp_le<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
467 return svreinterpret_f32_u32(svdup_n_u32_z(svcmple_f32(svptrue_b32(), a, b), 0xffffffffu));
471EIGEN_STRONG_INLINE PacketXf pcmp_lt<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
472 return svreinterpret_f32_u32(svdup_n_u32_z(svcmplt_f32(svptrue_b32(), a, b), 0xffffffffu));
476EIGEN_STRONG_INLINE PacketXf pcmp_eq<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
477 return svreinterpret_f32_u32(svdup_n_u32_z(svcmpeq_f32(svptrue_b32(), a, b), 0xffffffffu));
484EIGEN_STRONG_INLINE PacketXf pcmp_lt_or_nan<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
485 return svreinterpret_f32_u32(svdup_n_u32_z(svnot_b_z(svptrue_b32(), svcmpge_f32(svptrue_b32(), a, b)), 0xffffffffu));
489EIGEN_STRONG_INLINE PacketXf pfloor<PacketXf>(
const PacketXf& a) {
490 return svrintm_f32_x(svptrue_b32(), a);
494EIGEN_STRONG_INLINE PacketXf ptrue<PacketXf>(
const PacketXf& ) {
495 return svreinterpret_f32_u32(svdup_n_u32_x(svptrue_b32(), 0xffffffffu));
500EIGEN_STRONG_INLINE PacketXf pand<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
501 return svreinterpret_f32_u32(svand_u32_x(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
505EIGEN_STRONG_INLINE PacketXf por<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
506 return svreinterpret_f32_u32(svorr_u32_x(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
510EIGEN_STRONG_INLINE PacketXf pxor<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
511 return svreinterpret_f32_u32(sveor_u32_x(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
515EIGEN_STRONG_INLINE PacketXf pandnot<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
516 return svreinterpret_f32_u32(svbic_u32_x(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
520EIGEN_STRONG_INLINE PacketXf pload<PacketXf>(
const float* from) {
521 EIGEN_DEBUG_ALIGNED_LOAD
return svld1_f32(svptrue_b32(), from);
525EIGEN_STRONG_INLINE PacketXf ploadu<PacketXf>(
const float* from) {
526 EIGEN_DEBUG_UNALIGNED_LOAD
return svld1_f32(svptrue_b32(), from);
530EIGEN_STRONG_INLINE PacketXf ploaddup<PacketXf>(
const float* from) {
531 svuint32_t indices = svindex_u32(0, 1);
532 indices = svzip1_u32(indices, indices);
533 return svld1_gather_u32index_f32(svptrue_b32(), from, indices);
537EIGEN_STRONG_INLINE PacketXf ploadquad<PacketXf>(
const float* from) {
538 svuint32_t indices = svindex_u32(0, 1);
539 indices = svzip1_u32(indices, indices);
540 indices = svzip1_u32(indices, indices);
541 return svld1_gather_u32index_f32(svptrue_b32(), from, indices);
545EIGEN_STRONG_INLINE
void pstore<float>(
float* to,
const PacketXf& from) {
546 EIGEN_DEBUG_ALIGNED_STORE svst1_f32(svptrue_b32(), to, from);
550EIGEN_STRONG_INLINE
void pstoreu<float>(
float* to,
const PacketXf& from) {
551 EIGEN_DEBUG_UNALIGNED_STORE svst1_f32(svptrue_b32(), to, from);
555EIGEN_DEVICE_FUNC
inline PacketXf pgather<float, PacketXf>(
const float* from,
Index stride) {
557 svint32_t indices = svindex_s32(0, stride);
558 return svld1_gather_s32index_f32(svptrue_b32(), from, indices);
562EIGEN_DEVICE_FUNC
inline void pscatter<float, PacketXf>(
float* to,
const PacketXf& from,
Index stride) {
564 svint32_t indices = svindex_s32(0, stride);
565 svst1_scatter_s32index_f32(svptrue_b32(), to, indices, from);
569EIGEN_STRONG_INLINE
float pfirst<PacketXf>(
const PacketXf& a) {
571 return svlasta_f32(svpfalse_b(), a);
575EIGEN_STRONG_INLINE PacketXf preverse(
const PacketXf& a) {
580EIGEN_STRONG_INLINE PacketXf pabs(
const PacketXf& a) {
581 return svabs_f32_x(svptrue_b32(), a);
587EIGEN_STRONG_INLINE PacketXf pfrexp<PacketXf>(
const PacketXf& a, PacketXf& exponent) {
588 return pfrexp_generic(a, exponent);
592EIGEN_STRONG_INLINE
float predux<PacketXf>(
const PacketXf& a) {
593 return svaddv_f32(svptrue_b32(), a);
600EIGEN_STRONG_INLINE
float predux_mul<PacketXf>(
const PacketXf& a) {
601 EIGEN_STATIC_ASSERT((EIGEN_ARM64_SVE_VL % 128 == 0), EIGEN_INTERNAL_ERROR_PLEASE_FILE_A_BUG_REPORT);
603 svfloat32_t prod = svmul_f32_x(svptrue_b32(), a, svrev_f32(a));
604 svfloat32_t half_prod;
607 if (EIGEN_ARM64_SVE_VL >= 2048) {
608 half_prod = svtbl_f32(prod, svindex_u32(32, 1));
609 prod = svmul_f32_x(svptrue_b32(), prod, half_prod);
611 if (EIGEN_ARM64_SVE_VL >= 1024) {
612 half_prod = svtbl_f32(prod, svindex_u32(16, 1));
613 prod = svmul_f32_x(svptrue_b32(), prod, half_prod);
615 if (EIGEN_ARM64_SVE_VL >= 512) {
616 half_prod = svtbl_f32(prod, svindex_u32(8, 1));
617 prod = svmul_f32_x(svptrue_b32(), prod, half_prod);
619 if (EIGEN_ARM64_SVE_VL >= 256) {
620 half_prod = svtbl_f32(prod, svindex_u32(4, 1));
621 prod = svmul_f32_x(svptrue_b32(), prod, half_prod);
624 half_prod = svtbl_f32(prod, svindex_u32(2, 1));
625 prod = svmul_f32_x(svptrue_b32(), prod, half_prod);
628 return pfirst<PacketXf>(prod);
632EIGEN_STRONG_INLINE
float predux_min<PacketXf>(
const PacketXf& a) {
633 return svminv_f32(svptrue_b32(), a);
637EIGEN_STRONG_INLINE
float predux_max<PacketXf>(
const PacketXf& a) {
638 return svmaxv_f32(svptrue_b32(), a);
642EIGEN_DEVICE_FUNC
inline void ptranspose(PacketBlock<PacketXf, N>& kernel) {
643 float buffer[packet_traits<float>::size * N] = {0};
646 PacketXi stride_index = svindex_s32(0, N);
648 for (i = 0; i < N; i++) {
649 svst1_scatter_s32index_f32(svptrue_b32(), buffer + i, stride_index, kernel.packet[i]);
652 for (i = 0; i < N; i++) {
653 kernel.packet[i] = svld1_f32(svptrue_b32(), buffer + i * packet_traits<float>::size);
658EIGEN_STRONG_INLINE PacketXf pldexp<PacketXf>(
const PacketXf& a,
const PacketXf& exponent) {
659 return pldexp_generic(a, exponent);
663EIGEN_STRONG_INLINE PacketXf psqrt<PacketXf>(
const PacketXf& a) {
664 return svsqrt_f32_x(svptrue_b32(), a);
@ Aligned64
Definition Constants.h:239
Namespace containing all symbols from the Eigen library.
Definition B01_Experimental.dox:1
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:82