11#ifndef EIGEN_CXX11_TENSOR_TENSOR_REVERSE_H
12#define EIGEN_CXX11_TENSOR_TENSOR_REVERSE_H
16template<
typename ReverseDimensions,
typename XprType>
17struct traits<TensorReverseOp<ReverseDimensions,
18 XprType> > :
public traits<XprType>
20 typedef typename XprType::Scalar Scalar;
21 typedef traits<XprType> XprTraits;
22 typedef typename XprTraits::StorageKind StorageKind;
23 typedef typename XprTraits::Index
Index;
24 typedef typename XprType::Nested Nested;
25 typedef typename remove_reference<Nested>::type _Nested;
26 static const int NumDimensions = XprTraits::NumDimensions;
27 static const int Layout = XprTraits::Layout;
30template<
typename ReverseDimensions,
typename XprType>
31struct eval<TensorReverseOp<ReverseDimensions, XprType>, Eigen::Dense>
33 typedef const TensorReverseOp<ReverseDimensions, XprType>& type;
36template<
typename ReverseDimensions,
typename XprType>
37struct nested<TensorReverseOp<ReverseDimensions, XprType>, 1,
38 typename eval<TensorReverseOp<ReverseDimensions, XprType> >::type>
40 typedef TensorReverseOp<ReverseDimensions, XprType> type;
51template <
typename ReverseDimensions,
typename XprType>
52class TensorReverseOp :
public TensorBase<TensorReverseOp<ReverseDimensions, XprType>, WriteAccessors>
55 typedef typename Eigen::internal::traits<TensorReverseOp>::Scalar Scalar;
57 typedef typename XprType::CoeffReturnType CoeffReturnType;
58 typedef typename Eigen::internal::nested<TensorReverseOp>::type Nested;
59 typedef typename Eigen::internal::traits<TensorReverseOp>::StorageKind StorageKind;
60 typedef typename Eigen::internal::traits<TensorReverseOp>::Index Index;
62 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorReverseOp(
63 const XprType& expr,
const ReverseDimensions& reverse_dims)
64 : m_xpr(expr), m_reverse_dims(reverse_dims) { }
67 const ReverseDimensions& reverse()
const {
return m_reverse_dims; }
70 const typename internal::remove_all<typename XprType::Nested>::type&
71 expression()
const {
return m_xpr; }
74 EIGEN_STRONG_INLINE TensorReverseOp& operator = (
const TensorReverseOp& other)
77 Assign assign(*
this, other);
78 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
82 template<
typename OtherDerived>
84 EIGEN_STRONG_INLINE TensorReverseOp& operator = (
const OtherDerived& other)
87 Assign assign(*
this, other);
88 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
93 typename XprType::Nested m_xpr;
94 const ReverseDimensions m_reverse_dims;
98template<
typename ReverseDimensions,
typename ArgType,
typename Device>
102 typedef typename XprType::Index
Index;
103 static const int NumDims = internal::array_size<ReverseDimensions>::value;
105 typedef typename XprType::Scalar
Scalar;
106 typedef typename XprType::CoeffReturnType CoeffReturnType;
107 typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
108 static const int PacketSize = internal::unpacket_traits<PacketReturnType>::size;
112 PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
113 Layout = TensorEvaluator<ArgType, Device>::Layout,
118 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(
const XprType& op,
120 : m_impl(op.expression(),
device), m_reverse(op.reverse())
123 EIGEN_STATIC_ASSERT((NumDims > 0), YOU_MADE_A_PROGRAMMING_MISTAKE);
126 m_dimensions = m_impl.dimensions();
127 if (
static_cast<int>(Layout) ==
static_cast<int>(
ColMajor)) {
129 for (
int i = 1; i < NumDims; ++i) {
130 m_strides[i] = m_strides[i-1] * m_dimensions[i-1];
133 m_strides[NumDims-1] = 1;
134 for (
int i = NumDims - 2; i >= 0; --i) {
135 m_strides[i] = m_strides[i+1] * m_dimensions[i+1];
140 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
141 const Dimensions& dimensions()
const {
return m_dimensions; }
143 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
bool evalSubExprsIfNeeded(Scalar*) {
144 m_impl.evalSubExprsIfNeeded(NULL);
147 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void cleanup() {
151 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index reverseIndex(
153 eigen_assert(index < dimensions().TotalSize());
154 Index inputIndex = 0;
155 if (
static_cast<int>(Layout) ==
static_cast<int>(
ColMajor)) {
156 for (
int i = NumDims - 1; i > 0; --i) {
157 Index idx = index / m_strides[i];
158 index -= idx * m_strides[i];
160 idx = m_dimensions[i] - idx - 1;
162 inputIndex += idx * m_strides[i] ;
165 inputIndex += (m_dimensions[0] - index - 1);
170 for (
int i = 0; i < NumDims - 1; ++i) {
171 Index idx = index / m_strides[i];
172 index -= idx * m_strides[i];
174 idx = m_dimensions[i] - idx - 1;
176 inputIndex += idx * m_strides[i] ;
178 if (m_reverse[NumDims-1]) {
179 inputIndex += (m_dimensions[NumDims-1] - index - 1);
187 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(
189 return m_impl.coeff(reverseIndex(index));
192 template<
int LoadMode>
193 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
194 PacketReturnType packet(Index index)
const
196 EIGEN_STATIC_ASSERT((PacketSize > 1), YOU_MADE_A_PROGRAMMING_MISTAKE)
197 eigen_assert(index+PacketSize-1 < dimensions().TotalSize());
201 EIGEN_ALIGN_MAX
typename internal::remove_const<CoeffReturnType>::type
203 for (
int i = 0; i < PacketSize; ++i) {
204 values[i] = coeff(index+i);
206 PacketReturnType rslt = internal::pload<PacketReturnType>(values);
210 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorOpCost costPerCoeff(
bool vectorized)
const {
211 double compute_cost = NumDims * (2 * TensorOpCost::AddCost<Index>() +
212 2 * TensorOpCost::MulCost<Index>() +
213 TensorOpCost::DivCost<Index>());
214 for (
int i = 0; i < NumDims; ++i) {
216 compute_cost += 2 * TensorOpCost::AddCost<Index>();
219 return m_impl.costPerCoeff(vectorized) +
220 TensorOpCost(0, 0, compute_cost,
false , PacketSize);
223 EIGEN_DEVICE_FUNC Scalar* data()
const {
return NULL; }
226 Dimensions m_dimensions;
227 array<Index, NumDims> m_strides;
228 TensorEvaluator<ArgType, Device> m_impl;
229 ReverseDimensions m_reverse;
234template <
typename ReverseDimensions,
typename ArgType,
typename Device>
236 :
public TensorEvaluator<const TensorReverseOp<ReverseDimensions, ArgType>,
238 typedef TensorEvaluator<const TensorReverseOp<ReverseDimensions, ArgType>,
240 typedef TensorReverseOp<ReverseDimensions, ArgType> XprType;
241 typedef typename XprType::Index Index;
242 static const int NumDims = internal::array_size<ReverseDimensions>::value;
243 typedef DSizes<Index, NumDims> Dimensions;
247 PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
248 Layout = TensorEvaluator<ArgType, Device>::Layout,
252 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(
const XprType& op,
256 typedef typename XprType::Scalar Scalar;
257 typedef typename XprType::CoeffReturnType CoeffReturnType;
258 typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
259 static const int PacketSize = internal::unpacket_traits<PacketReturnType>::size;
261 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
262 const Dimensions& dimensions()
const {
return this->m_dimensions; }
264 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(Index index) {
265 return this->m_impl.coeffRef(this->reverseIndex(index));
268 template <
int StoreMode> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
269 void writePacket(Index index,
const PacketReturnType& x) {
270 EIGEN_STATIC_ASSERT((PacketSize > 1), YOU_MADE_A_PROGRAMMING_MISTAKE)
271 eigen_assert(index+PacketSize-1 < dimensions().TotalSize());
274 EIGEN_ALIGN_MAX CoeffReturnType values[PacketSize];
275 internal::pstore<CoeffReturnType, PacketReturnType>(values, x);
276 for (
int i = 0; i < PacketSize; ++i) {
277 this->coeffRef(index+i) = values[i];
Definition TensorAssign.h:56
The tensor base class.
Definition TensorForwardDeclarations.h:29
Tensor reverse elements class.
Definition TensorReverse.h:53
Namespace containing all symbols from the Eigen library.
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The tensor evaluator class.
Definition TensorEvaluator.h:27
const Device & device() const
required by sycl in order to construct sycl buffer from raw pointer
Definition TensorEvaluator.h:112