Eigen  5.0.1-dev+60122df6
 
Loading...
Searching...
No Matches
SparseVector.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2015 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_SPARSEVECTOR_H
11#define EIGEN_SPARSEVECTOR_H
12
13// IWYU pragma: private
14#include "./InternalHeaderCheck.h"
15
16namespace Eigen {
17
30
31namespace internal {
32template <typename Scalar_, int Options_, typename StorageIndex_>
33struct traits<SparseVector<Scalar_, Options_, StorageIndex_> > {
34 typedef Scalar_ Scalar;
35 typedef StorageIndex_ StorageIndex;
36 typedef Sparse StorageKind;
37 typedef MatrixXpr XprKind;
38 enum {
39 IsColVector = (Options_ & RowMajorBit) ? 0 : 1,
40
41 RowsAtCompileTime = IsColVector ? Dynamic : 1,
42 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
43 MaxRowsAtCompileTime = RowsAtCompileTime,
44 MaxColsAtCompileTime = ColsAtCompileTime,
45 Flags = Options_ | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit) | CompressedAccessBit,
46 SupportedAccessPatterns = InnerRandomAccessPattern
47 };
48};
49
50// Sparse-Vector-Assignment kinds:
51enum { SVA_RuntimeSwitch, SVA_Inner, SVA_Outer };
52
53template <typename Dest, typename Src,
54 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
55 : Src::InnerSizeAtCompileTime == 1 ? SVA_Outer
56 : SVA_Inner>
57struct sparse_vector_assign_selector;
58
59} // namespace internal
60
61template <typename Scalar_, int Options_, typename StorageIndex_>
62class SparseVector : public SparseCompressedBase<SparseVector<Scalar_, Options_, StorageIndex_> > {
64 using Base::convert_index;
65
66 public:
67 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
68 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
69 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
70
71 typedef internal::CompressedStorage<Scalar, StorageIndex> Storage;
72 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
73
74 enum { Options = Options_ };
75
76 EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
77 EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
78 EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
79 EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
80
81 EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return m_data.valuePtr(); }
82 EIGEN_STRONG_INLINE Scalar* valuePtr() { return m_data.valuePtr(); }
83
84 EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return m_data.indexPtr(); }
85 EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return m_data.indexPtr(); }
86
87 inline const StorageIndex* outerIndexPtr() const { return 0; }
88 inline StorageIndex* outerIndexPtr() { return 0; }
89 inline const StorageIndex* innerNonZeroPtr() const { return 0; }
90 inline StorageIndex* innerNonZeroPtr() { return 0; }
91
93 constexpr Storage& data() { return m_data; }
95 constexpr const Storage& data() const { return m_data; }
96
97 inline Scalar coeff(Index row, Index col) const {
98 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
99 return coeff(IsColVector ? row : col);
100 }
101 inline Scalar coeff(Index i) const {
102 eigen_assert(i >= 0 && i < m_size);
103 return m_data.at(StorageIndex(i));
104 }
105
106 inline Scalar& coeffRef(Index row, Index col) {
107 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
108 return coeffRef(IsColVector ? row : col);
109 }
110
117 inline Scalar& coeffRef(Index i) {
118 eigen_assert(i >= 0 && i < m_size);
119
120 return m_data.atWithInsertion(StorageIndex(i));
121 }
122
123 public:
124 typedef typename Base::InnerIterator InnerIterator;
125 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
126
127 inline void setZero() { m_data.clear(); }
128
130 inline Index nonZeros() const { return m_data.size(); }
131
132 inline void startVec(Index outer) {
133 EIGEN_UNUSED_VARIABLE(outer);
134 eigen_assert(outer == 0);
135 }
136
137 inline Scalar& insertBackByOuterInner(Index outer, Index inner) {
138 EIGEN_UNUSED_VARIABLE(outer);
139 eigen_assert(outer == 0);
140 return insertBack(inner);
141 }
142 inline Scalar& insertBack(Index i) {
143 m_data.append(Scalar(0), i);
144 return m_data.value(m_data.size() - 1);
145 }
146
147 Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner) {
148 EIGEN_UNUSED_VARIABLE(outer);
149 eigen_assert(outer == 0);
150 return insertBackUnordered(inner);
151 }
152 inline Scalar& insertBackUnordered(Index i) {
153 m_data.append(Scalar(0), i);
154 return m_data.value(m_data.size() - 1);
155 }
156
157 inline Scalar& insert(Index row, Index col) {
158 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
159
160 Index inner = IsColVector ? row : col;
161 Index outer = IsColVector ? col : row;
162 EIGEN_ONLY_USED_FOR_DEBUG(outer);
163 eigen_assert(outer == 0);
164 return insert(inner);
165 }
166 Scalar& insert(Index i) {
167 eigen_assert(i >= 0 && i < m_size);
168
169 Index startId = 0;
170 Index p = Index(m_data.size()) - 1;
171 // TODO smart realloc
172 m_data.resize(p + 2, 1);
173
174 while ((p >= startId) && (m_data.index(p) > i)) {
175 m_data.index(p + 1) = m_data.index(p);
176 m_data.value(p + 1) = m_data.value(p);
177 --p;
178 }
179 m_data.index(p + 1) = convert_index(i);
180 m_data.value(p + 1) = Scalar(0);
181 return m_data.value(p + 1);
182 }
183
186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
187
188 inline void finalize() {}
189
191 Index prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision()) {
192 return prune([&](const Scalar& val) { return !internal::isMuchSmallerThan(val, reference, epsilon); });
193 }
194
202 template <class F>
203 Index prune(F&& keep_predicate) {
204 Index k = 0;
205 Index n = m_data.size();
206 for (Index i = 0; i < n; ++i) {
207 if (keep_predicate(m_data.value(i))) {
208 m_data.value(k) = std::move(m_data.value(i));
209 m_data.index(k) = m_data.index(i);
210 ++k;
211 }
212 }
213 m_data.resize(k);
214 return k;
215 }
216
225 void resize(Index rows, Index cols) {
226 eigen_assert((IsColVector ? cols : rows) == 1 && "Outer dimension must equal 1");
227 resize(IsColVector ? rows : cols);
228 }
229
234 void resize(Index newSize) {
235 m_size = newSize;
236 m_data.clear();
237 }
238
246 void conservativeResize(Index newSize) {
247 if (newSize < m_size) {
248 Index i = 0;
249 while (i < m_data.size() && m_data.index(i) < newSize) ++i;
250 m_data.resize(i);
251 }
252 m_size = newSize;
253 }
254
255 void resizeNonZeros(Index size) { m_data.resize(size); }
256
257 inline SparseVector() : m_size(0) { resize(0); }
258
259 explicit inline SparseVector(Index size) : m_size(0) { resize(size); }
260
261 inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows, cols); }
262
263 template <typename OtherDerived>
264 inline SparseVector(const SparseMatrixBase<OtherDerived>& other) : m_size(0) {
265#ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
266 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
267#endif
268 *this = other.derived();
269 }
270
271 inline SparseVector(const SparseVector& other) : Base(other), m_size(0) { *this = other.derived(); }
272
277 inline void swap(SparseVector& other) {
278 std::swap(m_size, other.m_size);
279 m_data.swap(other.m_data);
280 }
281 friend EIGEN_DEVICE_FUNC void swap(SparseVector& a, SparseVector& b) { a.swap(b); }
282
283 template <int OtherOptions>
284 inline void swap(SparseMatrix<Scalar, OtherOptions, StorageIndex>& other) {
285 eigen_assert(other.outerSize() == 1);
286 std::swap(m_size, other.m_innerSize);
287 m_data.swap(other.m_data);
288 }
289 template <int OtherOptions>
290 friend EIGEN_DEVICE_FUNC void swap(SparseVector& a, SparseMatrix<Scalar, OtherOptions, StorageIndex>& b) {
291 a.swap(b);
292 }
293 template <int OtherOptions>
294 friend EIGEN_DEVICE_FUNC void swap(SparseMatrix<Scalar, OtherOptions, StorageIndex>& a, SparseVector& b) {
295 b.swap(a);
296 }
297
298 inline SparseVector& operator=(const SparseVector& other) {
299 if (other.isRValue()) {
300 swap(other.const_cast_derived());
301 } else {
302 resize(other.size());
303 m_data = other.m_data;
304 }
305 return *this;
306 }
307
308 template <typename OtherDerived>
309 inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other) {
310 SparseVector tmp(other.size());
311 internal::sparse_vector_assign_selector<SparseVector, OtherDerived>::run(tmp, other.derived());
312 this->swap(tmp);
313 return *this;
314 }
315
316 inline SparseVector(SparseVector&& other) : SparseVector() { this->swap(other); }
317
318 template <typename OtherDerived>
319 inline SparseVector(SparseCompressedBase<OtherDerived>&& other) : SparseVector() {
320 *this = other.derived().markAsRValue();
321 }
322
323 inline SparseVector& operator=(SparseVector&& other) {
324 this->swap(other);
325 return *this;
326 }
327
328 template <typename OtherDerived>
329 inline SparseVector& operator=(SparseCompressedBase<OtherDerived>&& other) {
330 *this = other.derived().markAsRValue();
331 return *this;
332 }
333
334#ifndef EIGEN_PARSED_BY_DOXYGEN
335 template <typename Lhs, typename Rhs>
336 inline SparseVector& operator=(const SparseSparseProduct<Lhs, Rhs>& product) {
337 return Base::operator=(product);
338 }
339#endif
340
341#ifndef EIGEN_NO_IO
342 friend std::ostream& operator<<(std::ostream& s, const SparseVector& m) {
343 for (Index i = 0; i < m.nonZeros(); ++i) s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
344 s << std::endl;
345 return s;
346 }
347#endif
348
350 inline ~SparseVector() {}
351
353 Scalar sum() const;
354
355 public:
357 EIGEN_DEPRECATED_WITH_REASON("Use .setZero() and .reserve() instead.") void startFill(Index reserve) {
358 setZero();
359 m_data.reserve(reserve);
360 }
361
363 EIGEN_DEPRECATED_WITH_REASON("Use .insertBack() instead.") Scalar& fill(Index r, Index c) {
364 eigen_assert(r == 0 || c == 0);
365 return fill(IsColVector ? r : c);
366 }
367
369 EIGEN_DEPRECATED_WITH_REASON("Use .insertBack() instead.") Scalar& fill(Index i) {
370 m_data.append(Scalar(0), i);
371 return m_data.value(m_data.size() - 1);
372 }
373
375 EIGEN_DEPRECATED_WITH_REASON("Use .insert() instead.") Scalar& fillrand(Index r, Index c) {
376 eigen_assert(r == 0 || c == 0);
377 return fillrand(IsColVector ? r : c);
378 }
379
381 EIGEN_DEPRECATED_WITH_REASON("Use .insert() instead.") Scalar& fillrand(Index i) { return insert(i); }
382
384 EIGEN_DEPRECATED_WITH_REASON("Use .finalize() instead.") void endFill() {}
385
386 // These two functions were here in the 3.1 release, so let's keep them in case some code rely on them.
388 EIGEN_DEPRECATED_WITH_REASON("Use .data() instead.") Storage& _data() { return m_data; }
390 EIGEN_DEPRECATED_WITH_REASON("Use .data() instead.") const Storage& _data() const { return m_data; }
391
392#ifdef EIGEN_SPARSEVECTOR_PLUGIN
393#include EIGEN_SPARSEVECTOR_PLUGIN
394#endif
395
396 protected:
397 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned, THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE)
398 EIGEN_STATIC_ASSERT((Options_ & (ColMajor | RowMajor)) == Options, INVALID_MATRIX_TEMPLATE_PARAMETERS)
399
400 Storage m_data;
401 Index m_size;
402};
403
404namespace internal {
405
406template <typename Scalar_, int Options_, typename Index_>
407struct evaluator<SparseVector<Scalar_, Options_, Index_> > : evaluator_base<SparseVector<Scalar_, Options_, Index_> > {
408 typedef SparseVector<Scalar_, Options_, Index_> SparseVectorType;
409 typedef evaluator_base<SparseVectorType> Base;
410 typedef typename SparseVectorType::InnerIterator InnerIterator;
411 typedef typename SparseVectorType::ReverseInnerIterator ReverseInnerIterator;
412
413 enum { CoeffReadCost = NumTraits<Scalar_>::ReadCost, Flags = SparseVectorType::Flags };
414
415 evaluator() : Base() {}
416
417 explicit evaluator(const SparseVectorType& mat) : m_matrix(&mat) { EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); }
418
419 inline Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
420
421 operator SparseVectorType&() { return m_matrix->const_cast_derived(); }
422 operator const SparseVectorType&() const { return *m_matrix; }
423
424 const SparseVectorType* m_matrix;
425};
426
427template <typename Dest, typename Src>
428struct sparse_vector_assign_selector<Dest, Src, SVA_Inner> {
429 static void run(Dest& dst, const Src& src) {
430 eigen_internal_assert(src.innerSize() == src.size());
431 typedef internal::evaluator<Src> SrcEvaluatorType;
432 SrcEvaluatorType srcEval(src);
433 for (typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it) dst.insert(it.index()) = it.value();
434 }
435};
436
437template <typename Dest, typename Src>
438struct sparse_vector_assign_selector<Dest, Src, SVA_Outer> {
439 static void run(Dest& dst, const Src& src) {
440 eigen_internal_assert(src.outerSize() == src.size());
441 typedef internal::evaluator<Src> SrcEvaluatorType;
442 SrcEvaluatorType srcEval(src);
443 for (Index i = 0; i < src.size(); ++i) {
444 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
445 if (it) dst.insert(i) = it.value();
446 }
447 }
448};
449
450template <typename Dest, typename Src>
451struct sparse_vector_assign_selector<Dest, Src, SVA_RuntimeSwitch> {
452 static void run(Dest& dst, const Src& src) {
453 if (src.outerSize() == 1)
454 sparse_vector_assign_selector<Dest, Src, SVA_Inner>::run(dst, src);
455 else
456 sparse_vector_assign_selector<Dest, Src, SVA_Outer>::run(dst, src);
457 }
458};
459
460} // namespace internal
461
462// Specialization for SparseVector.
463// Serializes [size, numNonZeros, innerIndices, values].
464template <typename Scalar, int Options, typename StorageIndex>
465class Serializer<SparseVector<Scalar, Options, StorageIndex>, void> {
466 public:
467 typedef SparseVector<Scalar, Options, StorageIndex> SparseMat;
468
469 struct Header {
470 typename SparseMat::Index size;
471 Index num_non_zeros;
472 };
473
474 EIGEN_DEVICE_FUNC size_t size(const SparseMat& value) const {
475 return sizeof(Header) + (sizeof(Scalar) + sizeof(StorageIndex)) * value.nonZeros();
476 }
477
478 EIGEN_DEVICE_FUNC uint8_t* serialize(uint8_t* dest, uint8_t* end, const SparseMat& value) {
479 if (EIGEN_PREDICT_FALSE(dest == nullptr)) return nullptr;
480 if (EIGEN_PREDICT_FALSE(dest + size(value) > end)) return nullptr;
481
482 const size_t header_bytes = sizeof(Header);
483 Header header = {value.innerSize(), value.nonZeros()};
484 EIGEN_USING_STD(memcpy)
485 memcpy(dest, &header, header_bytes);
486 dest += header_bytes;
487
488 // Inner indices.
489 std::size_t data_bytes = sizeof(StorageIndex) * header.num_non_zeros;
490 memcpy(dest, value.innerIndexPtr(), data_bytes);
491 dest += data_bytes;
492
493 // Values.
494 data_bytes = sizeof(Scalar) * header.num_non_zeros;
495 memcpy(dest, value.valuePtr(), data_bytes);
496 dest += data_bytes;
497
498 return dest;
499 }
500
501 EIGEN_DEVICE_FUNC const uint8_t* deserialize(const uint8_t* src, const uint8_t* end, SparseMat& value) const {
502 if (EIGEN_PREDICT_FALSE(src == nullptr)) return nullptr;
503 if (EIGEN_PREDICT_FALSE(src + sizeof(Header) > end)) return nullptr;
504
505 const size_t header_bytes = sizeof(Header);
506 Header header;
507 EIGEN_USING_STD(memcpy)
508 memcpy(&header, src, header_bytes);
509 src += header_bytes;
510
511 value.setZero();
512 value.resize(header.size);
513 value.resizeNonZeros(header.num_non_zeros);
514
515 // Inner indices.
516 std::size_t data_bytes = sizeof(StorageIndex) * header.num_non_zeros;
517 if (EIGEN_PREDICT_FALSE(src + data_bytes > end)) return nullptr;
518 memcpy(value.innerIndexPtr(), src, data_bytes);
519 src += data_bytes;
520
521 // Values.
522 data_bytes = sizeof(Scalar) * header.num_non_zeros;
523 if (EIGEN_PREDICT_FALSE(src + data_bytes > end)) return nullptr;
524 memcpy(value.valuePtr(), src, data_bytes);
525 src += data_bytes;
526 return src;
527 }
528};
529
530} // end namespace Eigen
531
532#endif // EIGEN_SPARSEVECTOR_H
An InnerIterator allows to loop over the element of any matrix expression.
Definition CoreIterators.h:37
internal::traits< SparseVector< Scalar_, Options_, StorageIndex_ > >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
NumTraits< Scalar >::Real RealScalar
Definition SparseMatrixBase.h:127
a sparse vector class
Definition SparseVector.h:62
void conservativeResize(Index newSize)
Definition SparseVector.h:246
Scalar & coeffRef(Index i)
Definition SparseVector.h:117
Index prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Definition SparseVector.h:191
void resize(Index rows, Index cols)
Definition SparseVector.h:225
~SparseVector()
Definition SparseVector.h:350
void swap(SparseVector &other)
Definition SparseVector.h:277
Index prune(F &&keep_predicate)
Prunes the entries of the vector based on a predicate
Definition SparseVector.h:203
Index nonZeros() const
Definition SparseVector.h:130
void resize(Index newSize)
Definition SparseVector.h:234
Scalar sum() const
Definition SparseRedux.h:40
const unsigned int LvalueBit
Definition Constants.h:148
const unsigned int RowMajorBit
Definition Constants.h:70
const unsigned int CompressedAccessBit
Definition Constants.h:195
Namespace containing all symbols from the Eigen library.
Definition B01_Experimental.dox:1
EIGEN_DEPRECATED_WITH_REASON("Initialization is no longer needed.") inline void initParallel()
Definition Parallelizer.h:50
std::enable_if_t< std::is_base_of< DenseBase< std::decay_t< DerivedA > >, std::decay_t< DerivedA > >::value &&std::is_base_of< DenseBase< std::decay_t< DerivedB > >, std::decay_t< DerivedB > >::value, void > swap(DerivedA &&a, DerivedB &&b)
Definition DenseBase.h:667
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:82
uint8_t * serialize(uint8_t *dest, uint8_t *end, const Args &... args)
Definition Serializer.h:189
const int Dynamic
Definition Constants.h:25
const uint8_t * deserialize(const uint8_t *src, const uint8_t *end, Args &... args)
Definition Serializer.h:202