blob: 7e15c814b6f16c30238017c1d44908451a1f4391 [file] [log] [blame]
Narayan Kamathc981c482012-11-02 10:59:05 +00001// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_SPARSEVECTOR_H
11#define EIGEN_SPARSEVECTOR_H
12
13namespace Eigen {
14
15/** \ingroup SparseCore_Module
16 * \class SparseVector
17 *
18 * \brief a sparse vector class
19 *
20 * \tparam _Scalar the scalar type, i.e. the type of the coefficients
21 *
22 * See http://www.netlib.org/linalg/html_templates/node91.html for details on the storage scheme.
23 *
24 * This class can be extended with the help of the plugin mechanism described on the page
25 * \ref TopicCustomizingEigen by defining the preprocessor symbol \c EIGEN_SPARSEVECTOR_PLUGIN.
26 */
27
28namespace internal {
29template<typename _Scalar, int _Options, typename _Index>
30struct traits<SparseVector<_Scalar, _Options, _Index> >
31{
32 typedef _Scalar Scalar;
33 typedef _Index Index;
34 typedef Sparse StorageKind;
35 typedef MatrixXpr XprKind;
36 enum {
37 IsColVector = (_Options & RowMajorBit) ? 0 : 1,
38
39 RowsAtCompileTime = IsColVector ? Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
43 Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
44 CoeffReadCost = NumTraits<Scalar>::ReadCost,
45 SupportedAccessPatterns = InnerRandomAccessPattern
46 };
47};
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -070048
49// Sparse-Vector-Assignment kinds:
50enum {
51 SVA_RuntimeSwitch,
52 SVA_Inner,
53 SVA_Outer
54};
55
56template< typename Dest, typename Src,
57 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
58 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
59 : SVA_Inner>
60struct sparse_vector_assign_selector;
61
Narayan Kamathc981c482012-11-02 10:59:05 +000062}
63
64template<typename _Scalar, int _Options, typename _Index>
65class SparseVector
66 : public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
67{
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -070068 typedef SparseMatrixBase<SparseVector> SparseBase;
69
Narayan Kamathc981c482012-11-02 10:59:05 +000070 public:
71 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
72 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
73 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -070074
75 typedef internal::CompressedStorage<Scalar,Index> Storage;
Narayan Kamathc981c482012-11-02 10:59:05 +000076 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
77
78 enum {
79 Options = _Options
80 };
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -070081
Narayan Kamathc981c482012-11-02 10:59:05 +000082 EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
83 EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
84 EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
85 EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
86
87 EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
88 EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
89
90 EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
91 EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -070092
93 /** \internal */
94 inline Storage& data() { return m_data; }
95 /** \internal */
96 inline const Storage& data() const { return m_data; }
Narayan Kamathc981c482012-11-02 10:59:05 +000097
98 inline Scalar coeff(Index row, Index col) const
99 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700100 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
Narayan Kamathc981c482012-11-02 10:59:05 +0000101 return coeff(IsColVector ? row : col);
102 }
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700103 inline Scalar coeff(Index i) const
104 {
105 eigen_assert(i>=0 && i<m_size);
106 return m_data.at(i);
107 }
Narayan Kamathc981c482012-11-02 10:59:05 +0000108
109 inline Scalar& coeffRef(Index row, Index col)
110 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700111 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
Narayan Kamathc981c482012-11-02 10:59:05 +0000112 return coeff(IsColVector ? row : col);
113 }
114
115 /** \returns a reference to the coefficient value at given index \a i
116 * This operation involes a log(rho*size) binary search. If the coefficient does not
117 * exist yet, then a sorted insertion into a sequential buffer is performed.
118 *
119 * This insertion might be very costly if the number of nonzeros above \a i is large.
120 */
121 inline Scalar& coeffRef(Index i)
122 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700123 eigen_assert(i>=0 && i<m_size);
Narayan Kamathc981c482012-11-02 10:59:05 +0000124 return m_data.atWithInsertion(i);
125 }
126
127 public:
128
129 class InnerIterator;
130 class ReverseInnerIterator;
131
132 inline void setZero() { m_data.clear(); }
133
134 /** \returns the number of non zero coefficients */
135 inline Index nonZeros() const { return static_cast<Index>(m_data.size()); }
136
137 inline void startVec(Index outer)
138 {
139 EIGEN_UNUSED_VARIABLE(outer);
140 eigen_assert(outer==0);
141 }
142
143 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
144 {
145 EIGEN_UNUSED_VARIABLE(outer);
146 eigen_assert(outer==0);
147 return insertBack(inner);
148 }
149 inline Scalar& insertBack(Index i)
150 {
151 m_data.append(0, i);
152 return m_data.value(m_data.size()-1);
153 }
154
155 inline Scalar& insert(Index row, Index col)
156 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700157 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
158
Narayan Kamathc981c482012-11-02 10:59:05 +0000159 Index inner = IsColVector ? row : col;
160 Index outer = IsColVector ? col : row;
161 eigen_assert(outer==0);
162 return insert(inner);
163 }
164 Scalar& insert(Index i)
165 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700166 eigen_assert(i>=0 && i<m_size);
167
Narayan Kamathc981c482012-11-02 10:59:05 +0000168 Index startId = 0;
169 Index p = Index(m_data.size()) - 1;
170 // TODO smart realloc
171 m_data.resize(p+2,1);
172
173 while ( (p >= startId) && (m_data.index(p) > i) )
174 {
175 m_data.index(p+1) = m_data.index(p);
176 m_data.value(p+1) = m_data.value(p);
177 --p;
178 }
179 m_data.index(p+1) = i;
180 m_data.value(p+1) = 0;
181 return m_data.value(p+1);
182 }
183
184 /**
185 */
186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
187
188
189 inline void finalize() {}
190
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700191 void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
Narayan Kamathc981c482012-11-02 10:59:05 +0000192 {
193 m_data.prune(reference,epsilon);
194 }
195
196 void resize(Index rows, Index cols)
197 {
198 eigen_assert(rows==1 || cols==1);
199 resize(IsColVector ? rows : cols);
200 }
201
202 void resize(Index newSize)
203 {
204 m_size = newSize;
205 m_data.clear();
206 }
207
208 void resizeNonZeros(Index size) { m_data.resize(size); }
209
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700210 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
Narayan Kamathc981c482012-11-02 10:59:05 +0000211
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700212 inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
Narayan Kamathc981c482012-11-02 10:59:05 +0000213
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700214 inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
Narayan Kamathc981c482012-11-02 10:59:05 +0000215
216 template<typename OtherDerived>
217 inline SparseVector(const SparseMatrixBase<OtherDerived>& other)
218 : m_size(0)
219 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700220 check_template_parameters();
Narayan Kamathc981c482012-11-02 10:59:05 +0000221 *this = other.derived();
222 }
223
224 inline SparseVector(const SparseVector& other)
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700225 : SparseBase(other), m_size(0)
Narayan Kamathc981c482012-11-02 10:59:05 +0000226 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700227 check_template_parameters();
Narayan Kamathc981c482012-11-02 10:59:05 +0000228 *this = other.derived();
229 }
230
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700231 /** Swaps the values of \c *this and \a other.
232 * Overloaded for performance: this version performs a \em shallow swap by swaping pointers and attributes only.
233 * \sa SparseMatrixBase::swap()
234 */
Narayan Kamathc981c482012-11-02 10:59:05 +0000235 inline void swap(SparseVector& other)
236 {
237 std::swap(m_size, other.m_size);
238 m_data.swap(other.m_data);
239 }
240
241 inline SparseVector& operator=(const SparseVector& other)
242 {
243 if (other.isRValue())
244 {
245 swap(other.const_cast_derived());
246 }
247 else
248 {
249 resize(other.size());
250 m_data = other.m_data;
251 }
252 return *this;
253 }
254
255 template<typename OtherDerived>
256 inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other)
257 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700258 SparseVector tmp(other.size());
259 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
260 this->swap(tmp);
261 return *this;
Narayan Kamathc981c482012-11-02 10:59:05 +0000262 }
263
264 #ifndef EIGEN_PARSED_BY_DOXYGEN
265 template<typename Lhs, typename Rhs>
266 inline SparseVector& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
267 {
268 return Base::operator=(product);
269 }
270 #endif
271
272 friend std::ostream & operator << (std::ostream & s, const SparseVector& m)
273 {
274 for (Index i=0; i<m.nonZeros(); ++i)
275 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
276 s << std::endl;
277 return s;
278 }
279
280 /** Destructor */
281 inline ~SparseVector() {}
282
283 /** Overloaded for performance */
284 Scalar sum() const;
285
286 public:
287
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700288 /** \internal \deprecated use setZero() and reserve() */
Narayan Kamathc981c482012-11-02 10:59:05 +0000289 EIGEN_DEPRECATED void startFill(Index reserve)
290 {
291 setZero();
292 m_data.reserve(reserve);
293 }
294
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700295 /** \internal \deprecated use insertBack(Index,Index) */
Narayan Kamathc981c482012-11-02 10:59:05 +0000296 EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
297 {
298 eigen_assert(r==0 || c==0);
299 return fill(IsColVector ? r : c);
300 }
301
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700302 /** \internal \deprecated use insertBack(Index) */
Narayan Kamathc981c482012-11-02 10:59:05 +0000303 EIGEN_DEPRECATED Scalar& fill(Index i)
304 {
305 m_data.append(0, i);
306 return m_data.value(m_data.size()-1);
307 }
308
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700309 /** \internal \deprecated use insert(Index,Index) */
Narayan Kamathc981c482012-11-02 10:59:05 +0000310 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
311 {
312 eigen_assert(r==0 || c==0);
313 return fillrand(IsColVector ? r : c);
314 }
315
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700316 /** \internal \deprecated use insert(Index) */
Narayan Kamathc981c482012-11-02 10:59:05 +0000317 EIGEN_DEPRECATED Scalar& fillrand(Index i)
318 {
319 return insert(i);
320 }
321
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700322 /** \internal \deprecated use finalize() */
Narayan Kamathc981c482012-11-02 10:59:05 +0000323 EIGEN_DEPRECATED void endFill() {}
324
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700325 // These two functions were here in the 3.1 release, so let's keep them in case some code rely on them.
326 /** \internal \deprecated use data() */
327 EIGEN_DEPRECATED Storage& _data() { return m_data; }
328 /** \internal \deprecated use data() */
329 EIGEN_DEPRECATED const Storage& _data() const { return m_data; }
330
Narayan Kamathc981c482012-11-02 10:59:05 +0000331# ifdef EIGEN_SPARSEVECTOR_PLUGIN
332# include EIGEN_SPARSEVECTOR_PLUGIN
333# endif
334
335protected:
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700336
337 static void check_template_parameters()
Narayan Kamathc981c482012-11-02 10:59:05 +0000338 {
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700339 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
340 EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
Narayan Kamathc981c482012-11-02 10:59:05 +0000341 }
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700342
343 Storage m_data;
344 Index m_size;
Narayan Kamathc981c482012-11-02 10:59:05 +0000345};
346
347template<typename Scalar, int _Options, typename _Index>
348class SparseVector<Scalar,_Options,_Index>::InnerIterator
349{
350 public:
351 InnerIterator(const SparseVector& vec, Index outer=0)
352 : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
353 {
354 EIGEN_UNUSED_VARIABLE(outer);
355 eigen_assert(outer==0);
356 }
357
358 InnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
359 : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
360 {}
361
362 inline InnerIterator& operator++() { m_id++; return *this; }
363
364 inline Scalar value() const { return m_data.value(m_id); }
365 inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
366
367 inline Index index() const { return m_data.index(m_id); }
368 inline Index row() const { return IsColVector ? index() : 0; }
369 inline Index col() const { return IsColVector ? 0 : index(); }
370
371 inline operator bool() const { return (m_id < m_end); }
372
373 protected:
374 const internal::CompressedStorage<Scalar,Index>& m_data;
375 Index m_id;
376 const Index m_end;
377};
378
379template<typename Scalar, int _Options, typename _Index>
380class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
381{
382 public:
383 ReverseInnerIterator(const SparseVector& vec, Index outer=0)
384 : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
385 {
386 EIGEN_UNUSED_VARIABLE(outer);
387 eigen_assert(outer==0);
388 }
389
390 ReverseInnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
391 : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
392 {}
393
394 inline ReverseInnerIterator& operator--() { m_id--; return *this; }
395
396 inline Scalar value() const { return m_data.value(m_id-1); }
397 inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
398
399 inline Index index() const { return m_data.index(m_id-1); }
400 inline Index row() const { return IsColVector ? index() : 0; }
401 inline Index col() const { return IsColVector ? 0 : index(); }
402
403 inline operator bool() const { return (m_id > m_start); }
404
405 protected:
406 const internal::CompressedStorage<Scalar,Index>& m_data;
407 Index m_id;
408 const Index m_start;
409};
410
Carlos Hernandez7faaa9f2014-08-05 17:53:32 -0700411namespace internal {
412
413template< typename Dest, typename Src>
414struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
415 static void run(Dest& dst, const Src& src) {
416 eigen_internal_assert(src.innerSize()==src.size());
417 for(typename Src::InnerIterator it(src, 0); it; ++it)
418 dst.insert(it.index()) = it.value();
419 }
420};
421
422template< typename Dest, typename Src>
423struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
424 static void run(Dest& dst, const Src& src) {
425 eigen_internal_assert(src.outerSize()==src.size());
426 for(typename Dest::Index i=0; i<src.size(); ++i)
427 {
428 typename Src::InnerIterator it(src, i);
429 if(it)
430 dst.insert(i) = it.value();
431 }
432 }
433};
434
435template< typename Dest, typename Src>
436struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
437 static void run(Dest& dst, const Src& src) {
438 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
439 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
440 }
441};
442
443}
444
Narayan Kamathc981c482012-11-02 10:59:05 +0000445} // end namespace Eigen
446
447#endif // EIGEN_SPARSEVECTOR_H