10 #ifndef EIGEN_AMBIVECTOR_H
11 #define EIGEN_AMBIVECTOR_H
22 template<
typename _Scalar,
typename _Index>
26 typedef _Scalar Scalar;
28 typedef typename NumTraits<Scalar>::Real RealScalar;
30 AmbiVector(Index size)
31 : m_buffer(0), m_zero(0), m_size(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
36 void init(
double estimatedDensity);
39 Index nonZeros()
const;
42 void setBounds(Index start, Index end) { m_start = start; m_end = end; }
47 Scalar& coeffRef(Index i);
48 Scalar& coeff(Index i);
52 ~AmbiVector() {
delete[] m_buffer; }
54 void resize(Index size)
56 if (m_allocatedSize < size)
61 Index size()
const {
return m_size; }
65 void reallocate(Index size)
72 Index allocSize = (size *
sizeof(ListEl) +
sizeof(Scalar) - 1)/
sizeof(Scalar);
73 m_allocatedElements = (allocSize*
sizeof(Scalar))/
sizeof(ListEl);
74 m_buffer =
new Scalar[allocSize];
78 m_allocatedElements = (size*
sizeof(Scalar))/
sizeof(ListEl);
79 m_buffer =
new Scalar[size];
86 void reallocateSparse()
88 Index copyElements = m_allocatedElements;
89 m_allocatedElements = (std::min)(Index(m_allocatedElements*1.5),m_size);
90 Index allocSize = m_allocatedElements *
sizeof(ListEl);
91 allocSize = (allocSize +
sizeof(Scalar) - 1)/
sizeof(Scalar);
92 Scalar* newBuffer =
new Scalar[allocSize];
93 memcpy(newBuffer, m_buffer, copyElements *
sizeof(ListEl));
113 Index m_allocatedSize;
114 Index m_allocatedElements;
124 template<
typename _Scalar,
typename _Index>
125 _Index AmbiVector<_Scalar,_Index>::nonZeros()
const
127 if (m_mode==IsSparse)
130 return m_end - m_start;
133 template<
typename _Scalar,
typename _Index>
134 void AmbiVector<_Scalar,_Index>::init(
double estimatedDensity)
136 if (estimatedDensity>0.1)
142 template<
typename _Scalar,
typename _Index>
143 void AmbiVector<_Scalar,_Index>::init(
int mode)
146 if (m_mode==IsSparse)
158 template<
typename _Scalar,
typename _Index>
159 void AmbiVector<_Scalar,_Index>::restart()
161 m_llCurrent = m_llStart;
165 template<
typename _Scalar,
typename _Index>
166 void AmbiVector<_Scalar,_Index>::setZero()
170 for (Index i=m_start; i<m_end; ++i)
171 m_buffer[i] = Scalar(0);
175 eigen_assert(m_mode==IsSparse);
181 template<
typename _Scalar,
typename _Index>
182 _Scalar& AmbiVector<_Scalar,_Index>::coeffRef(_Index i)
188 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
190 eigen_assert(m_mode==IsSparse);
197 llElements[0].value = Scalar(0);
198 llElements[0].index = i;
199 llElements[0].next = -1;
200 return llElements[0].value;
202 else if (i<llElements[m_llStart].index)
205 ListEl& el = llElements[m_llSize];
206 el.value = Scalar(0);
209 m_llStart = m_llSize;
211 m_llCurrent = m_llStart;
216 Index nextel = llElements[m_llCurrent].next;
217 eigen_assert(i>=llElements[m_llCurrent].index &&
"you must call restart() before inserting an element with lower or equal index");
218 while (nextel >= 0 && llElements[nextel].index<=i)
220 m_llCurrent = nextel;
221 nextel = llElements[nextel].next;
224 if (llElements[m_llCurrent].index==i)
227 return llElements[m_llCurrent].value;
231 if (m_llSize>=m_allocatedElements)
234 llElements =
reinterpret_cast<ListEl*
>(m_buffer);
236 eigen_internal_assert(m_llSize<m_allocatedElements &&
"internal error: overflow in sparse mode");
238 ListEl& el = llElements[m_llSize];
239 el.value = Scalar(0);
241 el.next = llElements[m_llCurrent].next;
242 llElements[m_llCurrent].next = m_llSize;
250 template<
typename _Scalar,
typename _Index>
251 _Scalar& AmbiVector<_Scalar,_Index>::coeff(_Index i)
257 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
258 eigen_assert(m_mode==IsSparse);
259 if ((m_llSize==0) || (i<llElements[m_llStart].index))
265 Index elid = m_llStart;
266 while (elid >= 0 && llElements[elid].index<i)
267 elid = llElements[elid].next;
269 if (llElements[elid].index==i)
270 return llElements[m_llCurrent].value;
278 template<
typename _Scalar,
typename _Index>
279 class AmbiVector<_Scalar,_Index>::Iterator
282 typedef _Scalar Scalar;
283 typedef typename NumTraits<Scalar>::Real RealScalar;
291 Iterator(
const AmbiVector& vec,
const RealScalar& epsilon = 0)
296 m_isDense = m_vector.m_mode==IsDense;
301 m_cachedIndex = m_vector.m_start-1;
306 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
307 m_currentEl = m_vector.m_llStart;
308 while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<=m_epsilon)
309 m_currentEl = llElements[m_currentEl].next;
317 m_cachedIndex = llElements[m_currentEl].index;
318 m_cachedValue = llElements[m_currentEl].value;
323 Index index()
const {
return m_cachedIndex; }
324 Scalar value()
const {
return m_cachedValue; }
326 operator bool()
const {
return m_cachedIndex>=0; }
328 Iterator& operator++()
335 }
while (m_cachedIndex<m_vector.m_end && abs(m_vector.m_buffer[m_cachedIndex])<m_epsilon);
336 if (m_cachedIndex<m_vector.m_end)
337 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
343 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
345 m_currentEl = llElements[m_currentEl].next;
346 }
while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<m_epsilon);
353 m_cachedIndex = llElements[m_currentEl].index;
354 m_cachedValue = llElements[m_currentEl].value;
361 const AmbiVector& m_vector;
363 RealScalar m_epsilon;
365 Scalar m_cachedValue;
373 #endif // EIGEN_AMBIVECTOR_H