26 typedef _Scalar Scalar;
27 typedef _StorageIndex StorageIndex;
36 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
40 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
46 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
56 internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
57 internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
67 std::swap(m_allocatedSize, other.m_allocatedSize);
76 void reserve(
Index size)
85 if (m_allocatedSize>m_size)
91 if (m_allocatedSize<size)
95 internal::throw_std_bad_alloc();
101 void append(
const Scalar& v,
Index i)
106 m_indices[id] = internal::convert_index<StorageIndex>(i);
109 inline Index size()
const {
return m_size; }
110 inline Index allocatedSize()
const {
return m_allocatedSize; }
111 inline void clear() { m_size = 0; }
113 const Scalar* valuePtr()
const {
return m_values; }
114 Scalar* valuePtr() {
return m_values; }
115 const StorageIndex* indexPtr()
const {
return m_indices; }
116 StorageIndex* indexPtr() {
return m_indices; }
118 inline Scalar& value(
Index i) { eigen_internal_assert(m_values!=0);
return m_values[i]; }
119 inline const Scalar& value(
Index i)
const { eigen_internal_assert(m_values!=0);
return m_values[i]; }
121 inline StorageIndex& index(
Index i) { eigen_internal_assert(m_indices!=0);
return m_indices[i]; }
122 inline const StorageIndex& index(
Index i)
const { eigen_internal_assert(m_indices!=0);
return m_indices[i]; }
136 if (m_indices[
mid]<key)
150 else if (key==m_indices[m_size-1])
151 return m_values[m_size-1];
155 return ((
id<m_size) && (m_indices[
id]==key)) ? m_values[id] :
defaultValue;
163 else if (end>start && key==m_indices[end-1])
164 return m_values[end-1];
168 return ((
id<end) && (m_indices[
id]==key)) ? m_values[id] :
defaultValue;
177 if (
id>=m_size || m_indices[
id]!=key)
179 if (m_allocatedSize<m_size+1)
181 m_allocatedSize = 2*(m_size+1);
186 internal::smart_copy(m_values, m_values +
id,
newValues.ptr());
187 internal::smart_copy(m_indices, m_indices+
id,
newIndices.ptr());
192 internal::smart_copy(m_values +
id, m_values +m_size,
newValues.ptr() +
id+1);
193 internal::smart_copy(m_indices+
id, m_indices+m_size,
newIndices.ptr()+
id+1);
200 internal::smart_memmove(m_values +
id, m_values +m_size, m_values +
id+1);
201 internal::smart_memmove(m_indices+
id, m_indices+m_size, m_indices+
id+1);
204 m_indices[id] = internal::convert_index<StorageIndex>(key);
212 eigen_internal_assert(to+
chunkSize <= m_size);
216 internal::smart_memmove(m_values+from, m_values+from+
chunkSize, m_values+to);
217 internal::smart_memmove(m_indices+from, m_indices+from+
chunkSize, m_indices+to);
221 internal::smart_copy(m_values+from, m_values+from+
chunkSize, m_values+to);
222 internal::smart_copy(m_indices+from, m_indices+from+
chunkSize, m_indices+to);
226 void prune(
const Scalar& reference,
const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
230 for (
Index i=0; i<n; ++i)
232 if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
244 inline void reallocate(
Index size)
246 #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
247 EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
249 eigen_internal_assert(size!=m_allocatedSize);
250 internal::scoped_array<Scalar> newValues(size);
251 internal::scoped_array<StorageIndex> newIndices(size);
252 Index copySize = (std::min)(size, m_size);
254 internal::smart_copy(m_values, m_values+copySize, newValues.ptr());
255 internal::smart_copy(m_indices, m_indices+copySize, newIndices.ptr());
259 m_allocatedSize = size;
264 StorageIndex* m_indices;
266 Index m_allocatedSize;
Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue=Scalar(0)) const
Like at(), but the search is performed in the range [start,end)
Definition CompressedStorage.h:159
NLOHMANN_BASIC_JSON_TPL_DECLARATION void swap(nlohmann::NLOHMANN_BASIC_JSON_TPL &j1, nlohmann::NLOHMANN_BASIC_JSON_TPL &j2) noexcept(//NOLINT(readability-inconsistent-declaration-parameter-name, cert-dcl58-cpp) is_nothrow_move_constructible< nlohmann::NLOHMANN_BASIC_JSON_TPL >::value &&//NOLINT(misc-redundant-expression) is_nothrow_move_assignable< nlohmann::NLOHMANN_BASIC_JSON_TPL >::value)
exchanges the values of two JSON objects
Definition json.hpp:24418