10 #ifndef EIGEN_SPARSE_BLOCK_H
11 #define EIGEN_SPARSE_BLOCK_H
13 #include "./InternalHeaderCheck.h"
18 template<
typename XprType,
int BlockRows,
int BlockCols>
19 class BlockImpl<XprType,BlockRows,BlockCols,true,Sparse>
20 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,true> >
22 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
23 typedef Block<XprType, BlockRows, BlockCols, true> BlockType;
25 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
27 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
28 typedef SparseMatrixBase<BlockType> Base;
29 using Base::convert_index;
31 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
33 inline BlockImpl(XprType& xpr,
Index i)
34 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
38 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
41 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
42 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
44 Index nonZeros()
const
46 typedef internal::evaluator<XprType> EvaluatorType;
47 EvaluatorType matEval(m_matrix);
49 Index end = m_outerStart + m_outerSize.value();
50 for(
Index j=m_outerStart; j<
end; ++j)
51 for(
typename EvaluatorType::InnerIterator it(matEval, j); it; ++it)
56 inline const Scalar coeff(
Index row,
Index col)
const
58 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
61 inline const Scalar coeff(
Index index)
const
63 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
66 inline const XprType& nestedExpression()
const {
return m_matrix; }
67 inline XprType& nestedExpression() {
return m_matrix; }
68 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
69 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
70 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
71 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
75 typename internal::ref_selector<XprType>::non_const_type m_matrix;
77 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
83 BlockImpl& operator=(
const T&)
85 EIGEN_STATIC_ASSERT(
sizeof(T)==0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
97 template<
typename SparseMatrixType,
int BlockRows,
int BlockCols>
98 class sparse_matrix_block_impl
99 :
public SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> >
101 typedef internal::remove_all_t<typename SparseMatrixType::Nested> MatrixTypeNested_;
102 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
103 typedef SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> > Base;
104 using Base::convert_index;
106 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
107 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
109 typedef typename Base::IndexVector IndexVector;
110 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
113 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index i)
114 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
117 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
118 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
121 template<
typename OtherDerived>
122 inline BlockType& operator=(
const SparseMatrixBase<OtherDerived>& other)
124 typedef internal::remove_all_t<typename SparseMatrixType::Nested> NestedMatrixType_;
125 NestedMatrixType_& matrix = m_matrix;
130 Ref<const SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> > tmp(other.derived());
131 eigen_internal_assert(tmp.outerSize()==m_outerSize.value());
134 Index nnz = tmp.nonZeros();
135 Index start = m_outerStart==0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart];
136 Index end = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
138 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] -
end;
140 Index free_size = m_matrix.isCompressed()
141 ?
Index(matrix.data().allocatedSize()) + block_size
144 Index tmp_start = tmp.outerIndexPtr()[0];
146 bool update_trailing_pointers =
false;
150 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
152 internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
153 internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
155 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
156 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, newdata.indexPtr() + start);
158 internal::smart_copy(matrix.valuePtr()+
end, matrix.valuePtr()+
end + tail_size, newdata.valuePtr()+start+nnz);
159 internal::smart_copy(matrix.innerIndexPtr()+
end, matrix.innerIndexPtr()+
end + tail_size, newdata.indexPtr()+start+nnz);
161 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
163 matrix.data().swap(newdata);
165 update_trailing_pointers =
true;
169 if(m_matrix.isCompressed() && nnz!=block_size)
172 matrix.data().resize(start + nnz + tail_size);
174 internal::smart_memmove(matrix.valuePtr()+
end, matrix.valuePtr() +
end+tail_size, matrix.valuePtr() + start+nnz);
175 internal::smart_memmove(matrix.innerIndexPtr()+
end, matrix.innerIndexPtr() +
end+tail_size, matrix.innerIndexPtr() + start+nnz);
177 update_trailing_pointers =
true;
180 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
181 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, matrix.innerIndexPtr() + start);
185 if(IsVectorAtCompileTime)
187 if(!m_matrix.isCompressed())
188 matrix.innerNonZeroPtr()[m_outerStart] =
StorageIndex(nnz);
189 matrix.outerIndexPtr()[m_outerStart] =
StorageIndex(start);
194 for(
Index k=0; k<m_outerSize.value(); ++k)
196 StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
197 if(!m_matrix.isCompressed())
198 matrix.innerNonZeroPtr()[m_outerStart+k] = nnz_k;
199 matrix.outerIndexPtr()[m_outerStart+k] = p;
204 if(update_trailing_pointers)
206 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
207 for(
Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
209 matrix.outerIndexPtr()[k] += offset;
216 inline BlockType& operator=(
const BlockType& other)
218 return operator=<BlockType>(other);
221 inline const Scalar*
valuePtr()
const
222 {
return m_matrix.valuePtr(); }
224 {
return m_matrix.valuePtr(); }
227 {
return m_matrix.innerIndexPtr(); }
229 {
return m_matrix.innerIndexPtr(); }
232 {
return m_matrix.outerIndexPtr() + m_outerStart; }
234 {
return m_matrix.outerIndexPtr() + m_outerStart; }
237 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
239 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
241 bool isCompressed()
const {
return m_matrix.innerNonZeroPtr()==0; }
245 return m_matrix.coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
248 inline const Scalar coeff(
Index row,
Index col)
const
250 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
253 inline const Scalar coeff(
Index index)
const
255 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
258 const Scalar& lastCoeff()
const
260 EIGEN_STATIC_ASSERT_VECTOR_ONLY(sparse_matrix_block_impl);
262 if(m_matrix.isCompressed())
263 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
265 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
268 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
269 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
271 inline const SparseMatrixType& nestedExpression()
const {
return m_matrix; }
272 inline SparseMatrixType& nestedExpression() {
return m_matrix; }
273 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
274 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
275 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
276 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
280 typename internal::ref_selector<SparseMatrixType>::non_const_type m_matrix;
282 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
288 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
289 class BlockImpl<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true,Sparse>
290 :
public internal::sparse_matrix_block_impl<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols>
293 typedef StorageIndex_ StorageIndex;
294 typedef SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
295 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
296 inline BlockImpl(SparseMatrixType& xpr,
Index i)
300 inline BlockImpl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
301 : Base(xpr, startRow, startCol, blockRows, blockCols)
304 using Base::operator=;
307 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
308 class BlockImpl<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true,Sparse>
309 :
public internal::sparse_matrix_block_impl<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols>
312 typedef StorageIndex_ StorageIndex;
313 typedef const SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
314 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
315 inline BlockImpl(SparseMatrixType& xpr,
Index i)
319 inline BlockImpl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
320 : Base(xpr, startRow, startCol, blockRows, blockCols)
323 using Base::operator=;
325 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr,
Index i);
326 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr);
334 template<
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
335 class BlockImpl<XprType,BlockRows,BlockCols,InnerPanel,
Sparse>
336 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,InnerPanel> >, internal::no_assignment_operator
340 using Base::convert_index;
342 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
345 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
351 m_startRow( (BlockRows==1) && (BlockCols==XprType::ColsAtCompileTime) ? convert_index(i) : 0),
352 m_startCol( (BlockRows==XprType::RowsAtCompileTime) && (BlockCols==1) ? convert_index(i) : 0),
353 m_blockRows(BlockRows==1 ? 1 : xpr.rows()),
354 m_blockCols(BlockCols==1 ? 1 : xpr.cols())
360 : m_matrix(xpr), m_startRow(convert_index(startRow)), m_startCol(convert_index(startCol)), m_blockRows(convert_index(blockRows)), m_blockCols(convert_index(blockCols))
363 inline Index rows()
const {
return m_blockRows.value(); }
364 inline Index cols()
const {
return m_blockCols.value(); }
368 return m_matrix.coeffRef(row + m_startRow.value(), col + m_startCol.value());
371 inline const Scalar coeff(
Index row,
Index col)
const
373 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
376 inline Scalar& coeffRef(
Index index)
378 return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
379 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
382 inline const Scalar coeff(
Index index)
const
384 return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
385 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
388 inline const XprType& nestedExpression()
const {
return m_matrix; }
389 inline XprType& nestedExpression() {
return m_matrix; }
390 Index startRow()
const {
return m_startRow.value(); }
391 Index startCol()
const {
return m_startCol.value(); }
392 Index blockRows()
const {
return m_blockRows.value(); }
393 Index blockCols()
const {
return m_blockCols.value(); }
397 friend struct internal::unary_evaluator<Block<XprType,BlockRows,BlockCols,InnerPanel>, internal::IteratorBased, Scalar >;
401 typename internal::ref_selector<XprType>::non_const_type m_matrix;
402 const internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic> m_startRow;
403 const internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic> m_startCol;
404 const internal::variable_if_dynamic<Index, RowsAtCompileTime> m_blockRows;
405 const internal::variable_if_dynamic<Index, ColsAtCompileTime> m_blockCols;
411 BlockImpl& operator=(
const T&)
413 EIGEN_STATIC_ASSERT(
sizeof(T)==0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
421 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
422 struct unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased >
423 :
public evaluator_base<Block<ArgType,BlockRows,BlockCols,InnerPanel> >
425 class InnerVectorInnerIterator;
426 class OuterVectorInnerIterator;
428 typedef Block<ArgType,BlockRows,BlockCols,InnerPanel> XprType;
429 typedef typename XprType::StorageIndex StorageIndex;
430 typedef typename XprType::Scalar Scalar;
433 IsRowMajor = XprType::IsRowMajor,
434 OuterVector = (BlockCols == 1 && ArgType::IsRowMajor) || (BlockRows == 1 && !ArgType::IsRowMajor),
435 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
436 Flags = XprType::Flags
439 typedef std::conditional_t<OuterVector,OuterVectorInnerIterator,InnerVectorInnerIterator> InnerIterator;
441 explicit unary_evaluator(
const XprType& op)
442 : m_argImpl(op.nestedExpression()), m_block(op)
445 inline Index nonZerosEstimate()
const {
446 const Index nnz = m_block.nonZeros();
450 const Index nested_sz = m_block.nestedExpression().size();
451 return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
457 typedef typename evaluator<ArgType>::InnerIterator EvalIterator;
459 evaluator<ArgType> m_argImpl;
460 const XprType &m_block;
463 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
464 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::InnerVectorInnerIterator
465 :
public EvalIterator
470 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
471 const XprType& m_block;
475 EIGEN_STRONG_INLINE InnerVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
476 : EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
477 m_block(aEval.m_block),
478 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows())
480 while( (EvalIterator::operator
bool()) && (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())) )
481 EvalIterator::operator++();
484 inline StorageIndex index()
const {
return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow()); }
485 inline Index outer()
const {
return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol()); }
486 inline Index row()
const {
return EvalIterator::row() - m_block.startRow(); }
487 inline Index col()
const {
return EvalIterator::col() - m_block.startCol(); }
489 inline operator bool()
const {
return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
492 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
493 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::OuterVectorInnerIterator
496 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
497 const unary_evaluator& m_eval;
499 const Index m_innerIndex;
504 EIGEN_STRONG_INLINE OuterVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
506 m_outerPos( (XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow()) ),
507 m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
508 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows()),
509 m_it(m_eval.m_argImpl, m_outerPos)
511 EIGEN_UNUSED_VARIABLE(outer);
512 eigen_assert(outer==0);
514 while(m_it && m_it.index() < m_innerIndex) ++m_it;
515 if((!m_it) || (m_it.index()!=m_innerIndex))
519 inline StorageIndex index()
const {
return convert_index<StorageIndex>(m_outerPos - (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow())); }
520 inline Index outer()
const {
return 0; }
521 inline Index row()
const {
return XprIsRowMajor ? 0 : index(); }
522 inline Index col()
const {
return XprIsRowMajor ? index() : 0; }
524 inline Scalar value()
const {
return m_it.value(); }
525 inline Scalar& valueRef() {
return m_it.valueRef(); }
527 inline OuterVectorInnerIterator& operator++()
530 while(++m_outerPos<m_end)
533 internal::destroy_at(&m_it);
534 internal::construct_at(&m_it, m_eval.m_argImpl, m_outerPos);
536 while(m_it && m_it.index() < m_innerIndex) ++m_it;
537 if(m_it && m_it.index()==m_innerIndex)
break;
542 inline operator bool()
const {
return m_outerPos < m_end; }
545 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
546 struct unary_evaluator<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true>, IteratorBased>
547 : evaluator<SparseCompressedBase<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true> > >
549 typedef Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,
true> XprType;
550 typedef evaluator<SparseCompressedBase<XprType> > Base;
551 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
554 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
555 struct unary_evaluator<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true>, IteratorBased>
556 : evaluator<SparseCompressedBase<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true> > >
558 typedef Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,
true> XprType;
559 typedef evaluator<SparseCompressedBase<XprType> > Base;
560 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:359
BlockImpl(XprType &xpr, Index i)
Definition: SparseBlock.h:349
Expression of a fixed-size or dynamic-size block.
Definition: Block.h:107
Index nonZeros() const
Definition: SparseCompressedBase.h:58
const Scalar * valuePtr() const
Definition: SparseCompressedBase.h:73
const StorageIndex * outerIndexPtr() const
Definition: SparseCompressedBase.h:92
bool isCompressed() const
Definition: SparseCompressedBase.h:109
const StorageIndex * innerIndexPtr() const
Definition: SparseCompressedBase.h:82
const StorageIndex * innerNonZeroPtr() const
Definition: SparseCompressedBase.h:102
Base class of any sparse matrices or sparse expressions.
Definition: SparseMatrixBase.h:30
internal::traits< Block< SparseMatrixType, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
Definition: SparseMatrixBase.h:45
Index rows() const
Definition: SparseMatrixBase.h:176
Index cols() const
Definition: SparseMatrixBase.h:178
static const lastp1_t end
Definition: IndexedViewHelper.h:183
Namespace containing all symbols from the Eigen library.
Definition: Core:139
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:59
const int Dynamic
Definition: Constants.h:24
Block< SparseMatrixType, BlockRows, BlockCols, true > & derived()
Definition: EigenBase.h:48
Definition: Constants.h:512