10 #ifndef EIGEN_COMPRESSED_STORAGE_H
11 #define EIGEN_COMPRESSED_STORAGE_H
21 template<
typename _Scalar,
typename _Index>
22 class CompressedStorage
26 typedef _Scalar Scalar;
31 typedef typename NumTraits<Scalar>::Real RealScalar;
36 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
39 CompressedStorage(
size_t size)
40 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
45 CompressedStorage(
const CompressedStorage& other)
46 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
51 CompressedStorage& operator=(
const CompressedStorage& other)
54 internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
55 internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
59 void swap(CompressedStorage& other)
61 std::swap(m_values, other.m_values);
62 std::swap(m_indices, other.m_indices);
63 std::swap(m_size, other.m_size);
64 std::swap(m_allocatedSize, other.m_allocatedSize);
73 void reserve(
size_t size)
75 size_t newAllocatedSize = m_size + size;
76 if (newAllocatedSize > m_allocatedSize)
77 reallocate(newAllocatedSize);
82 if (m_allocatedSize>m_size)
86 void resize(
size_t size,
double reserveSizeFactor = 0)
88 if (m_allocatedSize<size)
89 reallocate(size +
size_t(reserveSizeFactor*
double(size)));
93 void append(
const Scalar& v, Index i)
95 Index
id =
static_cast<Index
>(m_size);
101 inline size_t size()
const {
return m_size; }
102 inline size_t allocatedSize()
const {
return m_allocatedSize; }
103 inline void clear() { m_size = 0; }
105 inline Scalar& value(
size_t i) {
return m_values[i]; }
106 inline const Scalar& value(
size_t i)
const {
return m_values[i]; }
108 inline Index& index(
size_t i) {
return m_indices[i]; }
109 inline const Index& index(
size_t i)
const {
return m_indices[i]; }
111 static CompressedStorage Map(Index* indices, Scalar* values,
size_t size)
113 CompressedStorage res;
114 res.m_indices = indices;
115 res.m_values = values;
116 res.m_allocatedSize = res.m_size = size;
121 inline Index searchLowerIndex(Index key)
const
123 return searchLowerIndex(0, m_size, key);
127 inline Index searchLowerIndex(
size_t start,
size_t end, Index key)
const
131 size_t mid = (end+start)>>1;
132 if (m_indices[mid]<key)
137 return static_cast<Index
>(start);
142 inline Scalar at(Index key,
const Scalar& defaultValue = Scalar(0))
const
146 else if (key==m_indices[m_size-1])
147 return m_values[m_size-1];
150 const size_t id = searchLowerIndex(0,m_size-1,key);
151 return ((
id<m_size) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
155 inline Scalar atInRange(
size_t start,
size_t end, Index key,
const Scalar& defaultValue = Scalar(0))
const
159 else if (end>start && key==m_indices[end-1])
160 return m_values[end-1];
163 const size_t id = searchLowerIndex(start,end-1,key);
164 return ((
id<end) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
170 inline Scalar& atWithInsertion(Index key,
const Scalar& defaultValue = Scalar(0))
172 size_t id = searchLowerIndex(0,m_size,key);
173 if (
id>=m_size || m_indices[
id]!=key)
176 for (
size_t j=m_size-1; j>id; --j)
178 m_indices[j] = m_indices[j-1];
179 m_values[j] = m_values[j-1];
182 m_values[id] = defaultValue;
187 void prune(
const Scalar& reference,
const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
191 for (
size_t i=0; i<n; ++i)
193 if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
205 inline void reallocate(
size_t size)
207 Scalar* newValues =
new Scalar[size];
208 Index* newIndices =
new Index[size];
209 size_t copySize = (std::min)(size, m_size);
211 internal::smart_copy(m_values, m_values+copySize, newValues);
212 internal::smart_copy(m_indices, m_indices+copySize, newIndices);
216 m_values = newValues;
217 m_indices = newIndices;
218 m_allocatedSize = size;
225 size_t m_allocatedSize;
233 #endif // EIGEN_COMPRESSED_STORAGE_H