10 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
36 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
37 struct traits<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
39 typedef _Scalar Scalar;
40 typedef _StorageIndex StorageIndex;
41 typedef Sparse StorageKind;
42 typedef MatrixXpr XprKind;
48 Flags = _Options | NestByRefBit |
LvalueBit,
49 CoeffReadCost = NumTraits<Scalar>::ReadCost,
50 SupportedAccessPatterns = OuterRandomAccessPattern
55 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
57 :
public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
60 using Base::convert_index;
67 using Base::IsRowMajor;
68 using Base::operator=;
78 std::vector<internal::CompressedStorage<Scalar,StorageIndex> > m_data;
82 inline Index rows()
const {
return IsRowMajor ? outerSize() : m_innerSize; }
83 inline Index cols()
const {
return IsRowMajor ? m_innerSize : outerSize(); }
84 inline Index innerSize()
const {
return m_innerSize; }
85 inline Index outerSize()
const {
return convert_index(m_data.size()); }
86 inline Index innerNonZeros(
Index j)
const {
return m_data[j].size(); }
88 std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() {
return m_data; }
89 const std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data()
const {
return m_data; }
96 const Index outer = IsRowMajor ? row : col;
97 const Index inner = IsRowMajor ? col : row;
98 return m_data[outer].at(inner);
107 const Index outer = IsRowMajor ? row : col;
108 const Index inner = IsRowMajor ? col : row;
109 return m_data[outer].atWithInsertion(inner);
113 class ReverseInnerIterator;
117 for (
Index j=0; j<outerSize(); ++j)
125 for (
Index j=0; j<outerSize(); ++j)
126 res += m_data[j].
size();
132 void reserve(
Index reserveSize = 1000)
136 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),
Index(4));
137 for (
Index j=0; j<outerSize(); ++j)
139 m_data[j].reserve(reserveSizePerVector);
160 eigen_assert(outer<
Index(m_data.size()) && inner<m_innerSize &&
"out of range");
161 eigen_assert(((m_data[outer].
size()==0) || (m_data[outer].index(m_data[outer].
size()-1)<inner))
162 &&
"wrong sorted insertion");
163 m_data[outer].append(0, inner);
164 return m_data[outer].value(m_data[outer].
size()-1);
169 const Index outer = IsRowMajor ? row : col;
170 const Index inner = IsRowMajor ? col : row;
173 Index id =
static_cast<Index>(m_data[outer].size()) - 1;
174 m_data[outer].resize(
id+2,1);
176 while ( (
id >= startId) && (m_data[outer].index(
id) > inner) )
178 m_data[outer].index(
id+1) = m_data[outer].index(
id);
179 m_data[outer].value(
id+1) = m_data[outer].value(
id);
182 m_data[outer].index(
id+1) = inner;
183 m_data[outer].value(
id+1) = 0;
184 return m_data[outer].value(
id+1);
193 for (
Index j=0; j<outerSize(); ++j)
194 m_data[j].
prune(reference,epsilon);
201 const Index outerSize = IsRowMajor ? rows : cols;
202 m_innerSize = convert_index(IsRowMajor ? cols : rows);
204 if (
Index(m_data.size()) != outerSize)
206 m_data.resize(outerSize);
210 void resizeAndKeepData(
Index rows,
Index cols)
212 const Index outerSize = IsRowMajor ? rows : cols;
213 const Index innerSize = IsRowMajor ? cols : rows;
214 if (m_innerSize>innerSize)
221 if (m_data.size() != outerSize)
223 m_data.resize(outerSize);
229 : m_innerSize(0), m_data(0)
231 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
232 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
234 eigen_assert(innerSize()==0 && outerSize()==0);
241 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
242 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
248 template<
typename OtherDerived>
252 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
253 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
255 Base::operator=(other.
derived());
259 : Base(), m_innerSize(0)
261 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
262 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
270 std::swap(m_innerSize, other.m_innerSize);
272 m_data.swap(other.m_data);
277 if (other.isRValue())
279 swap(other.const_cast_derived());
283 resize(other.rows(), other.cols());
284 m_data = other.m_data;
299 reserve(reserveSize);
313 const Index outer = IsRowMajor ? row : col;
314 const Index inner = IsRowMajor ? col : row;
325 return insert(row,col);
332 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
333 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
337 template<
typename Scalar,
int _Options,
typename _StorageIndex>
338 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::InnerIterator :
public SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
340 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator Base;
343 : Base(mat.m_data[outer]), m_outer(outer)
346 inline Index row()
const {
return IsRowMajor ? m_outer : Base::index(); }
347 inline Index col()
const {
return IsRowMajor ? Base::index() : m_outer; }
348 inline Index outer()
const {
return m_outer; }
354 template<
typename Scalar,
int _Options,
typename _StorageIndex>
355 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::ReverseInnerIterator :
public SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
357 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator Base;
360 : Base(mat.m_data[outer]), m_outer(outer)
363 inline Index row()
const {
return IsRowMajor ? m_outer : Base::index(); }
364 inline Index col()
const {
return IsRowMajor ? Base::index() : m_outer; }
365 inline Index outer()
const {
return m_outer; }
373 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
374 struct evaluator<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
375 : evaluator_base<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
377 typedef _Scalar Scalar;
378 typedef DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> SparseMatrixType;
379 typedef typename SparseMatrixType::InnerIterator InnerIterator;
380 typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
383 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
384 Flags = SparseMatrixType::Flags
387 evaluator() : m_matrix(0) {}
388 evaluator(
const SparseMatrixType &mat) : m_matrix(&mat) {}
390 operator SparseMatrixType&() {
return m_matrix->const_cast_derived(); }
391 operator const SparseMatrixType&()
const {
return *m_matrix; }
393 Scalar coeff(
Index row,
Index col)
const {
return m_matrix->coeff(row,col); }
395 Index nonZerosEstimate()
const {
return m_matrix->nonZeros(); }
397 const SparseMatrixType *m_matrix;
A sparse matrix class designed for matrix assembly purpose.
Definition: DynamicSparseMatrix.h:58
Scalar & insertBack(Index row, Index col)
Definition: DynamicSparseMatrix.h:152
Scalar & coeffRef(Index row, Index col)
Definition: DynamicSparseMatrix.h:105
void startVec(Index)
Definition: DynamicSparseMatrix.h:145
void resize(Index rows, Index cols)
Definition: DynamicSparseMatrix.h:199
EIGEN_DEPRECATED DynamicSparseMatrix()
Definition: DynamicSparseMatrix.h:228
Index nonZeros() const
Definition: DynamicSparseMatrix.h:122
Scalar coeff(Index row, Index col) const
Definition: DynamicSparseMatrix.h:94
EIGEN_DEPRECATED Scalar & fillrand(Index row, Index col)
Definition: DynamicSparseMatrix.h:323
EIGEN_DEPRECATED Scalar & fill(Index row, Index col)
Definition: DynamicSparseMatrix.h:311
void finalize()
Definition: DynamicSparseMatrix.h:188
EIGEN_DEPRECATED void endFill()
Definition: DynamicSparseMatrix.h:330
EIGEN_DEPRECATED DynamicSparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Definition: DynamicSparseMatrix.h:249
EIGEN_DEPRECATED void startFill(Index reserveSize=1000)
Definition: DynamicSparseMatrix.h:296
Scalar & insertBackByOuterInner(Index outer, Index inner)
Definition: DynamicSparseMatrix.h:158
EIGEN_DEPRECATED DynamicSparseMatrix(Index rows, Index cols)
Definition: DynamicSparseMatrix.h:238
~DynamicSparseMatrix()
Definition: DynamicSparseMatrix.h:290
void prune(Scalar reference, RealScalar epsilon=NumTraits< RealScalar >::dummy_precision())
Definition: DynamicSparseMatrix.h:191
internal::traits< Derived >::StorageIndex StorageIndex
const unsigned int LvalueBit
const unsigned int RowMajorBit
Namespace containing all symbols from the Eigen library.
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index