10#ifndef EIGEN_SPARSEVECTOR_H
11#define EIGEN_SPARSEVECTOR_H
14#include "./InternalHeaderCheck.h"
32template <
typename Scalar_,
int Options_,
typename StorageIndex_>
33struct traits<SparseVector<Scalar_, Options_, StorageIndex_> > {
34 typedef Scalar_ Scalar;
35 typedef StorageIndex_ StorageIndex;
36 typedef Sparse StorageKind;
37 typedef MatrixXpr XprKind;
41 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
42 ColsAtCompileTime = IsColVector ? 1 :
Dynamic,
43 MaxRowsAtCompileTime = RowsAtCompileTime,
44 MaxColsAtCompileTime = ColsAtCompileTime,
46 SupportedAccessPatterns = InnerRandomAccessPattern
51enum { SVA_RuntimeSwitch, SVA_Inner, SVA_Outer };
53template <
typename Dest,
typename Src,
54 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
55 : Src::InnerSizeAtCompileTime == 1 ? SVA_Outer
57struct sparse_vector_assign_selector;
61template <
typename Scalar_,
int Options_,
typename StorageIndex_>
64 using Base::convert_index;
67 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
68 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
69 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
71 typedef internal::CompressedStorage<Scalar, StorageIndex> Storage;
72 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
74 enum { Options = Options_ };
76 EIGEN_STRONG_INLINE
Index rows()
const {
return IsColVector ? m_size : 1; }
77 EIGEN_STRONG_INLINE
Index cols()
const {
return IsColVector ? 1 : m_size; }
78 EIGEN_STRONG_INLINE
Index innerSize()
const {
return m_size; }
79 EIGEN_STRONG_INLINE
Index outerSize()
const {
return 1; }
81 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return m_data.valuePtr(); }
82 EIGEN_STRONG_INLINE
Scalar* valuePtr() {
return m_data.valuePtr(); }
84 EIGEN_STRONG_INLINE
const StorageIndex* innerIndexPtr()
const {
return m_data.indexPtr(); }
85 EIGEN_STRONG_INLINE
StorageIndex* innerIndexPtr() {
return m_data.indexPtr(); }
87 inline const StorageIndex* outerIndexPtr()
const {
return 0; }
89 inline const StorageIndex* innerNonZeroPtr()
const {
return 0; }
93 constexpr Storage& data() {
return m_data; }
95 constexpr const Storage& data()
const {
return m_data; }
98 eigen_assert(IsColVector ? (
col == 0 &&
row >= 0 &&
row < m_size) : (
row == 0 &&
col >= 0 &&
col < m_size));
99 return coeff(IsColVector ?
row :
col);
102 eigen_assert(i >= 0 && i < m_size);
107 eigen_assert(IsColVector ? (
col == 0 &&
row >= 0 &&
row < m_size) : (
row == 0 &&
col >= 0 &&
col < m_size));
108 return coeffRef(IsColVector ?
row :
col);
118 eigen_assert(i >= 0 && i < m_size);
125 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
127 inline void setZero() { m_data.clear(); }
132 inline void startVec(
Index outer) {
133 EIGEN_UNUSED_VARIABLE(outer);
134 eigen_assert(outer == 0);
137 inline Scalar& insertBackByOuterInner(Index outer, Index inner) {
138 EIGEN_UNUSED_VARIABLE(outer);
139 eigen_assert(outer == 0);
140 return insertBack(inner);
142 inline Scalar& insertBack(Index i) {
143 m_data.append(Scalar(0), i);
144 return m_data.value(m_data.size() - 1);
147 Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner) {
148 EIGEN_UNUSED_VARIABLE(outer);
149 eigen_assert(outer == 0);
150 return insertBackUnordered(inner);
152 inline Scalar& insertBackUnordered(Index i) {
153 m_data.append(Scalar(0), i);
154 return m_data.value(m_data.size() - 1);
157 inline Scalar& insert(Index row, Index col) {
158 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
160 Index inner = IsColVector ? row : col;
161 Index outer = IsColVector ? col : row;
162 EIGEN_ONLY_USED_FOR_DEBUG(outer);
163 eigen_assert(outer == 0);
164 return insert(inner);
166 Scalar& insert(Index i) {
167 eigen_assert(i >= 0 && i < m_size);
172 m_data.resize(p + 2, 1);
174 while ((p >= startId) && (m_data.index(p) > i)) {
175 m_data.index(p + 1) = m_data.index(p);
176 m_data.value(p + 1) = m_data.value(p);
179 m_data.index(p + 1) = convert_index(i);
180 m_data.value(p + 1) = Scalar(0);
181 return m_data.value(p + 1);
186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
188 inline void finalize() {}
192 return prune([&](
const Scalar& val) {
return !internal::isMuchSmallerThan(val, reference, epsilon); });
205 Index n = m_data.size();
206 for (
Index i = 0; i < n; ++i) {
207 if (keep_predicate(m_data.value(i))) {
208 m_data.value(k) = std::move(m_data.value(i));
209 m_data.index(k) = m_data.index(i);
226 eigen_assert((IsColVector ? cols : rows) == 1 &&
"Outer dimension must equal 1");
227 resize(IsColVector ? rows : cols);
247 if (newSize < m_size) {
249 while (i < m_data.size() && m_data.index(i) < newSize) ++i;
255 void resizeNonZeros(
Index size) { m_data.resize(size); }
257 inline SparseVector() : m_size(0) { resize(0); }
259 explicit inline SparseVector(Index size) : m_size(0) { resize(size); }
261 inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows, cols); }
263 template <
typename OtherDerived>
264 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other) : m_size(0) {
265#ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
266 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
268 *
this = other.derived();
271 inline SparseVector(
const SparseVector& other) : Base(other), m_size(0) { *
this = other.derived(); }
277 inline void swap(SparseVector& other) {
278 std::swap(m_size, other.m_size);
279 m_data.swap(other.m_data);
283 template <
int OtherOptions>
284 inline void swap(SparseMatrix<Scalar, OtherOptions, StorageIndex>& other) {
285 eigen_assert(other.outerSize() == 1);
286 std::swap(m_size, other.m_innerSize);
287 m_data.swap(other.m_data);
289 template <
int OtherOptions>
290 friend EIGEN_DEVICE_FUNC
void swap(SparseVector& a, SparseMatrix<Scalar, OtherOptions, StorageIndex>& b) {
293 template <
int OtherOptions>
294 friend EIGEN_DEVICE_FUNC
void swap(SparseMatrix<Scalar, OtherOptions, StorageIndex>& a, SparseVector& b) {
298 inline SparseVector& operator=(
const SparseVector& other) {
299 if (other.isRValue()) {
300 swap(other.const_cast_derived());
302 resize(other.size());
303 m_data = other.m_data;
308 template <
typename OtherDerived>
309 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other) {
310 SparseVector tmp(other.size());
311 internal::sparse_vector_assign_selector<SparseVector, OtherDerived>::run(tmp, other.derived());
316 inline SparseVector(SparseVector&& other) : SparseVector() { this->
swap(other); }
318 template <
typename OtherDerived>
319 inline SparseVector(SparseCompressedBase<OtherDerived>&& other) : SparseVector() {
320 *
this = other.derived().markAsRValue();
323 inline SparseVector& operator=(SparseVector&& other) {
328 template <
typename OtherDerived>
329 inline SparseVector& operator=(SparseCompressedBase<OtherDerived>&& other) {
330 *
this = other.derived().markAsRValue();
334#ifndef EIGEN_PARSED_BY_DOXYGEN
335 template <
typename Lhs,
typename Rhs>
336 inline SparseVector& operator=(
const SparseSparseProduct<Lhs, Rhs>& product) {
337 return Base::operator=(product);
342 friend std::ostream& operator<<(std::ostream& s,
const SparseVector& m) {
343 for (Index i = 0; i < m.nonZeros(); ++i) s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
357 EIGEN_DEPRECATED_WITH_REASON(
"Use .setZero() and .reserve() instead.") void startFill(
Index reserve) {
359 m_data.reserve(reserve);
364 eigen_assert(r == 0 || c == 0);
365 return fill(IsColVector ? r : c);
369 EIGEN_DEPRECATED_WITH_REASON(
"Use .insertBack() instead.") Scalar& fill(Index i) {
370 m_data.append(Scalar(0), i);
371 return m_data.value(m_data.size() - 1);
375 EIGEN_DEPRECATED_WITH_REASON(
"Use .insert() instead.") Scalar& fillrand(Index r, Index c) {
376 eigen_assert(r == 0 || c == 0);
377 return fillrand(IsColVector ? r : c);
381 EIGEN_DEPRECATED_WITH_REASON(
"Use .insert() instead.") Scalar& fillrand(Index i) {
return insert(i); }
384 EIGEN_DEPRECATED_WITH_REASON(
"Use .finalize() instead.") void endFill() {}
388 EIGEN_DEPRECATED_WITH_REASON(
"Use .data() instead.") Storage& _data() {
return m_data; }
390 EIGEN_DEPRECATED_WITH_REASON(
"Use .data() instead.") const Storage& _data()
const {
return m_data; }
392#ifdef EIGEN_SPARSEVECTOR_PLUGIN
393#include EIGEN_SPARSEVECTOR_PLUGIN
397 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned, THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE)
398 EIGEN_STATIC_ASSERT((Options_ & (ColMajor | RowMajor)) == Options, INVALID_MATRIX_TEMPLATE_PARAMETERS)
406template <
typename Scalar_,
int Options_,
typename Index_>
407struct evaluator<SparseVector<Scalar_, Options_, Index_> > : evaluator_base<SparseVector<Scalar_, Options_, Index_> > {
408 typedef SparseVector<Scalar_, Options_, Index_> SparseVectorType;
409 typedef evaluator_base<SparseVectorType> Base;
410 typedef typename SparseVectorType::InnerIterator InnerIterator;
411 typedef typename SparseVectorType::ReverseInnerIterator ReverseInnerIterator;
413 enum { CoeffReadCost = NumTraits<Scalar_>::ReadCost, Flags = SparseVectorType::Flags };
415 evaluator() : Base() {}
417 explicit evaluator(
const SparseVectorType& mat) : m_matrix(&mat) { EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); }
419 inline Index nonZerosEstimate()
const {
return m_matrix->nonZeros(); }
421 operator SparseVectorType&() {
return m_matrix->const_cast_derived(); }
422 operator const SparseVectorType&()
const {
return *m_matrix; }
424 const SparseVectorType* m_matrix;
427template <
typename Dest,
typename Src>
428struct sparse_vector_assign_selector<Dest, Src, SVA_Inner> {
429 static void run(Dest& dst,
const Src& src) {
430 eigen_internal_assert(src.innerSize() == src.size());
431 typedef internal::evaluator<Src> SrcEvaluatorType;
432 SrcEvaluatorType srcEval(src);
433 for (
typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it) dst.insert(it.index()) = it.value();
437template <
typename Dest,
typename Src>
438struct sparse_vector_assign_selector<Dest, Src, SVA_Outer> {
439 static void run(Dest& dst,
const Src& src) {
440 eigen_internal_assert(src.outerSize() == src.size());
441 typedef internal::evaluator<Src> SrcEvaluatorType;
442 SrcEvaluatorType srcEval(src);
443 for (Index i = 0; i < src.size(); ++i) {
444 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
445 if (it) dst.insert(i) = it.value();
450template <
typename Dest,
typename Src>
451struct sparse_vector_assign_selector<Dest, Src, SVA_RuntimeSwitch> {
452 static void run(Dest& dst,
const Src& src) {
453 if (src.outerSize() == 1)
454 sparse_vector_assign_selector<Dest, Src, SVA_Inner>::run(dst, src);
456 sparse_vector_assign_selector<Dest, Src, SVA_Outer>::run(dst, src);
464template <
typename Scalar,
int Options,
typename StorageIndex>
465class Serializer<SparseVector<Scalar, Options, StorageIndex>, void> {
467 typedef SparseVector<Scalar, Options, StorageIndex> SparseMat;
470 typename SparseMat::Index size;
474 EIGEN_DEVICE_FUNC
size_t size(
const SparseMat& value)
const {
475 return sizeof(Header) + (
sizeof(Scalar) +
sizeof(StorageIndex)) * value.nonZeros();
478 EIGEN_DEVICE_FUNC uint8_t*
serialize(uint8_t* dest, uint8_t* end,
const SparseMat& value) {
479 if (EIGEN_PREDICT_FALSE(dest ==
nullptr))
return nullptr;
480 if (EIGEN_PREDICT_FALSE(dest + size(value) > end))
return nullptr;
482 const size_t header_bytes =
sizeof(Header);
483 Header header = {value.innerSize(), value.nonZeros()};
484 EIGEN_USING_STD(memcpy)
485 memcpy(dest, &header, header_bytes);
486 dest += header_bytes;
489 std::size_t data_bytes =
sizeof(StorageIndex) * header.num_non_zeros;
490 memcpy(dest, value.innerIndexPtr(), data_bytes);
494 data_bytes =
sizeof(Scalar) * header.num_non_zeros;
495 memcpy(dest, value.valuePtr(), data_bytes);
501 EIGEN_DEVICE_FUNC
const uint8_t*
deserialize(
const uint8_t* src,
const uint8_t* end, SparseMat& value)
const {
502 if (EIGEN_PREDICT_FALSE(src ==
nullptr))
return nullptr;
503 if (EIGEN_PREDICT_FALSE(src +
sizeof(Header) > end))
return nullptr;
505 const size_t header_bytes =
sizeof(Header);
507 EIGEN_USING_STD(memcpy)
508 memcpy(&header, src, header_bytes);
512 value.resize(header.size);
513 value.resizeNonZeros(header.num_non_zeros);
516 std::size_t data_bytes =
sizeof(StorageIndex) * header.num_non_zeros;
517 if (EIGEN_PREDICT_FALSE(src + data_bytes > end))
return nullptr;
518 memcpy(value.innerIndexPtr(), src, data_bytes);
522 data_bytes =
sizeof(Scalar) * header.num_non_zeros;
523 if (EIGEN_PREDICT_FALSE(src + data_bytes > end))
return nullptr;
524 memcpy(value.valuePtr(), src, data_bytes);
An InnerIterator allows to loop over the element of any matrix expression.
Definition CoreIterators.h:37
SparseCompressedBase()
Definition SparseCompressedBase.h:177
internal::traits< SparseVector< Scalar_, Options_, StorageIndex_ > >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
RowXpr row(Index i)
Definition SparseMatrixBase.h:1085
NumTraits< Scalar >::Real RealScalar
Definition SparseMatrixBase.h:127
ColXpr col(Index i)
Definition SparseMatrixBase.h:1072
a sparse vector class
Definition SparseVector.h:62
void conservativeResize(Index newSize)
Definition SparseVector.h:246
Scalar & coeffRef(Index i)
Definition SparseVector.h:117
Index prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Definition SparseVector.h:191
void resize(Index rows, Index cols)
Definition SparseVector.h:225
~SparseVector()
Definition SparseVector.h:350
void swap(SparseVector &other)
Definition SparseVector.h:277
Index prune(F &&keep_predicate)
Prunes the entries of the vector based on a predicate
Definition SparseVector.h:203
Index nonZeros() const
Definition SparseVector.h:130
void resize(Index newSize)
Definition SparseVector.h:234
Scalar sum() const
Definition SparseRedux.h:40
const unsigned int LvalueBit
Definition Constants.h:148
const unsigned int RowMajorBit
Definition Constants.h:70
const unsigned int CompressedAccessBit
Definition Constants.h:195
Namespace containing all symbols from the Eigen library.
Definition B01_Experimental.dox:1
EIGEN_DEPRECATED_WITH_REASON("Initialization is no longer needed.") inline void initParallel()
Definition Parallelizer.h:50
std::enable_if_t< std::is_base_of< DenseBase< std::decay_t< DerivedA > >, std::decay_t< DerivedA > >::value &&std::is_base_of< DenseBase< std::decay_t< DerivedB > >, std::decay_t< DerivedB > >::value, void > swap(DerivedA &&a, DerivedB &&b)
Definition DenseBase.h:667
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:82
uint8_t * serialize(uint8_t *dest, uint8_t *end, const Args &... args)
Definition Serializer.h:189
const int Dynamic
Definition Constants.h:25
const uint8_t * deserialize(const uint8_t *src, const uint8_t *end, Args &... args)
Definition Serializer.h:202