10 #ifndef EIGEN_SPARSEVECTOR_H
11 #define EIGEN_SPARSEVECTOR_H
29 template<
typename _Scalar,
int _Options,
typename _Index>
30 struct traits<SparseVector<_Scalar, _Options, _Index> >
32 typedef _Scalar Scalar;
34 typedef Sparse StorageKind;
35 typedef MatrixXpr XprKind;
39 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
44 CoeffReadCost = NumTraits<Scalar>::ReadCost,
45 SupportedAccessPatterns = InnerRandomAccessPattern
56 template<
typename Dest,
typename Src,
57 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
58 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
60 struct sparse_vector_assign_selector;
64 template<
typename _Scalar,
int _Options,
typename _Index>
66 :
public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
68 typedef SparseMatrixBase<SparseVector> SparseBase;
71 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
72 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
73 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
75 typedef internal::CompressedStorage<Scalar,Index> Storage;
76 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
82 EIGEN_STRONG_INLINE Index rows()
const {
return IsColVector ? m_size : 1; }
83 EIGEN_STRONG_INLINE Index cols()
const {
return IsColVector ? 1 : m_size; }
84 EIGEN_STRONG_INLINE Index innerSize()
const {
return m_size; }
85 EIGEN_STRONG_INLINE Index outerSize()
const {
return 1; }
87 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return &m_data.value(0); }
88 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return &m_data.value(0); }
90 EIGEN_STRONG_INLINE
const Index* innerIndexPtr()
const {
return &m_data.index(0); }
91 EIGEN_STRONG_INLINE Index* innerIndexPtr() {
return &m_data.index(0); }
94 inline Storage& data() {
return m_data; }
96 inline const Storage& data()
const {
return m_data; }
98 inline Scalar coeff(Index
row, Index
col)
const
100 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
101 return coeff(IsColVector ? row : col);
103 inline Scalar coeff(Index i)
const
105 eigen_assert(i>=0 && i<m_size);
109 inline Scalar& coeffRef(Index row, Index col)
111 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
112 return coeff(IsColVector ? row : col);
123 eigen_assert(i>=0 && i<m_size);
124 return m_data.atWithInsertion(i);
130 class ReverseInnerIterator;
132 inline void setZero() { m_data.clear(); }
135 inline Index
nonZeros()
const {
return static_cast<Index
>(m_data.size()); }
137 inline void startVec(Index outer)
139 EIGEN_UNUSED_VARIABLE(outer);
140 eigen_assert(outer==0);
143 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
145 EIGEN_UNUSED_VARIABLE(outer);
146 eigen_assert(outer==0);
147 return insertBack(inner);
149 inline Scalar& insertBack(Index i)
152 return m_data.value(m_data.size()-1);
155 inline Scalar& insert(Index row, Index col)
157 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
159 Index inner = IsColVector ? row :
col;
160 Index outer = IsColVector ? col :
row;
161 EIGEN_ONLY_USED_FOR_DEBUG(outer);
162 eigen_assert(outer==0);
163 return insert(inner);
165 Scalar& insert(Index i)
167 eigen_assert(i>=0 && i<m_size);
170 Index p = Index(m_data.size()) - 1;
172 m_data.resize(p+2,1);
174 while ( (p >= startId) && (m_data.index(p) > i) )
176 m_data.index(p+1) = m_data.index(p);
177 m_data.value(p+1) = m_data.value(p);
180 m_data.index(p+1) = i;
181 m_data.value(p+1) = 0;
182 return m_data.value(p+1);
187 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
190 inline void finalize() {}
192 void prune(
const Scalar& reference,
const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
194 m_data.prune(reference,epsilon);
197 void resize(Index rows, Index cols)
199 eigen_assert(rows==1 || cols==1);
200 resize(IsColVector ? rows : cols);
203 void resize(Index newSize)
209 void resizeNonZeros(Index
size) { m_data.resize(size); }
211 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
213 inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
215 inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
217 template<
typename OtherDerived>
218 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other)
221 check_template_parameters();
222 *
this = other.derived();
225 inline SparseVector(
const SparseVector& other)
226 : SparseBase(other), m_size(0)
228 check_template_parameters();
229 *
this = other.derived();
238 std::swap(m_size, other.m_size);
239 m_data.swap(other.m_data);
244 if (other.isRValue())
246 swap(other.const_cast_derived());
250 resize(other.size());
251 m_data = other.m_data;
256 template<
typename OtherDerived>
257 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other)
259 SparseVector tmp(other.size());
260 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
265 #ifndef EIGEN_PARSED_BY_DOXYGEN
266 template<
typename Lhs,
typename Rhs>
267 inline SparseVector& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
269 return Base::operator=(product);
273 friend std::ostream & operator << (std::ostream & s,
const SparseVector& m)
275 for (Index i=0; i<m.nonZeros(); ++i)
276 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
290 EIGEN_DEPRECATED
void startFill(Index reserve)
293 m_data.reserve(reserve);
297 EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
299 eigen_assert(r==0 || c==0);
300 return fill(IsColVector ? r : c);
304 EIGEN_DEPRECATED Scalar& fill(Index i)
307 return m_data.value(m_data.size()-1);
311 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
313 eigen_assert(r==0 || c==0);
314 return fillrand(IsColVector ? r : c);
318 EIGEN_DEPRECATED Scalar& fillrand(Index i)
324 EIGEN_DEPRECATED
void endFill() {}
328 EIGEN_DEPRECATED Storage& _data() {
return m_data; }
330 EIGEN_DEPRECATED
const Storage& _data()
const {
return m_data; }
332 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
333 # include EIGEN_SPARSEVECTOR_PLUGIN
338 static void check_template_parameters()
340 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
341 EIGEN_STATIC_ASSERT((_Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
348 template<
typename Scalar,
int _Options,
typename _Index>
349 class SparseVector<Scalar,_Options,_Index>::InnerIterator
352 InnerIterator(
const SparseVector& vec, Index outer=0)
353 : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
355 EIGEN_UNUSED_VARIABLE(outer);
356 eigen_assert(outer==0);
359 InnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
360 : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
363 inline InnerIterator& operator++() { m_id++;
return *
this; }
365 inline Scalar value()
const {
return m_data.value(m_id); }
366 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id)); }
368 inline Index index()
const {
return m_data.index(m_id); }
369 inline Index
row()
const {
return IsColVector ? index() : 0; }
370 inline Index
col()
const {
return IsColVector ? 0 : index(); }
372 inline operator bool()
const {
return (m_id < m_end); }
375 const internal::CompressedStorage<Scalar,Index>& m_data;
380 template<
typename Scalar,
int _Options,
typename _Index>
381 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
384 ReverseInnerIterator(
const SparseVector& vec, Index outer=0)
385 : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
387 EIGEN_UNUSED_VARIABLE(outer);
388 eigen_assert(outer==0);
391 ReverseInnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
392 : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
395 inline ReverseInnerIterator& operator--() { m_id--;
return *
this; }
397 inline Scalar value()
const {
return m_data.value(m_id-1); }
398 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id-1)); }
400 inline Index index()
const {
return m_data.index(m_id-1); }
401 inline Index
row()
const {
return IsColVector ? index() : 0; }
402 inline Index
col()
const {
return IsColVector ? 0 : index(); }
404 inline operator bool()
const {
return (m_id > m_start); }
407 const internal::CompressedStorage<Scalar,Index>& m_data;
414 template<
typename Dest,
typename Src>
415 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
416 static void run(Dest& dst,
const Src& src) {
417 eigen_internal_assert(src.innerSize()==src.size());
418 for(
typename Src::InnerIterator it(src, 0); it; ++it)
419 dst.insert(it.index()) = it.value();
423 template<
typename Dest,
typename Src>
424 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
425 static void run(Dest& dst,
const Src& src) {
426 eigen_internal_assert(src.outerSize()==src.size());
427 for(
typename Dest::Index i=0; i<src.size(); ++i)
429 typename Src::InnerIterator it(src, i);
431 dst.insert(i) = it.value();
436 template<
typename Dest,
typename Src>
437 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
438 static void run(Dest& dst,
const Src& src) {
439 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
440 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
448 #endif // EIGEN_SPARSEVECTOR_H
Index nonZeros() const
Definition: SparseVector.h:135
void swap(SparseVector &other)
Definition: SparseVector.h:236
RowXpr row(Index i)
Definition: SparseMatrixBase.h:750
const int Dynamic
Definition: Constants.h:21
Definition: Constants.h:264
a sparse vector class
Definition: SparseUtil.h:72
const unsigned int LvalueBit
Definition: Constants.h:131
Scalar & coeffRef(Index i)
Definition: SparseVector.h:121
Index size() const
Definition: SparseMatrixBase.h:164
~SparseVector()
Definition: SparseVector.h:282
Scalar sum() const
Definition: SparseRedux.h:37
Definition: Constants.h:266
const unsigned int RowMajorBit
Definition: Constants.h:53
ColXpr col(Index i)
Definition: SparseMatrixBase.h:733