10 #ifndef EIGEN_SPARSEMATRIX_H 11 #define EIGEN_SPARSEMATRIX_H 42 template<
typename _Scalar,
int _Options,
typename _Index>
43 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
45 typedef _Scalar Scalar;
46 typedef _Index StorageIndex;
47 typedef Sparse StorageKind;
48 typedef MatrixXpr XprKind;
55 SupportedAccessPatterns = InnerRandomAccessPattern
59 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
60 struct traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
62 typedef SparseMatrix<_Scalar, _Options, _Index> MatrixType;
63 typedef typename ref_selector<MatrixType>::type MatrixTypeNested;
64 typedef typename remove_reference<MatrixTypeNested>::type _MatrixTypeNested;
66 typedef _Scalar Scalar;
67 typedef Dense StorageKind;
68 typedef _Index StorageIndex;
69 typedef MatrixXpr XprKind;
73 ColsAtCompileTime = 1,
75 MaxColsAtCompileTime = 1,
80 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
81 struct traits<Diagonal<const SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
82 :
public traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
91 template<
typename _Scalar,
int _Options,
typename _Index>
96 using Base::convert_index;
98 using Base::isCompressed;
101 using Base::operator+=;
102 using Base::operator-=;
107 typedef typename Base::InnerIterator InnerIterator;
108 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
111 using Base::IsRowMajor;
112 typedef internal::CompressedStorage<Scalar,StorageIndex> Storage;
124 StorageIndex* m_outerIndex;
125 StorageIndex* m_innerNonZeros;
131 inline Index rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
133 inline Index cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
143 inline const Scalar*
valuePtr()
const {
return m_data.valuePtr(); }
147 inline Scalar*
valuePtr() {
return m_data.valuePtr(); }
152 inline const StorageIndex*
innerIndexPtr()
const {
return m_data.indexPtr(); }
177 inline Storage& data() {
return m_data; }
179 inline const Storage& data()
const {
return m_data; }
185 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
187 const Index outer = IsRowMajor ? row : col;
188 const Index inner = IsRowMajor ? col : row;
189 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
190 return m_data.atInRange(m_outerIndex[outer], end, StorageIndex(inner));
203 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
205 const Index outer = IsRowMajor ? row : col;
206 const Index inner = IsRowMajor ? col : row;
208 Index start = m_outerIndex[outer];
209 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
210 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
212 return insert(row,col);
213 const Index p = m_data.searchLowerIndex(start,end-1,StorageIndex(inner));
214 if((p<end) && (m_data.index(p)==inner))
215 return m_data.value(p);
217 return insert(row,col);
249 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
251 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
259 eigen_assert(isCompressed() &&
"This function does not make sense in non compressed mode.");
260 m_data.reserve(reserveSize);
263 #ifdef EIGEN_PARSED_BY_DOXYGEN 276 template<
class SizesType>
277 inline void reserve(
const SizesType& reserveSizes);
279 template<
class SizesType>
280 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
281 #
if (!EIGEN_COMP_MSVC) || (EIGEN_COMP_MSVC>=1500)
284 SizesType::value_type())
286 EIGEN_UNUSED_VARIABLE(enableif);
287 reserveInnerVectors(reserveSizes);
289 #endif // EIGEN_PARSED_BY_DOXYGEN 291 template<
class SizesType>
292 inline void reserveInnerVectors(
const SizesType& reserveSizes)
296 Index totalReserveSize = 0;
298 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
299 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
302 StorageIndex* newOuterIndex = m_innerNonZeros;
304 StorageIndex count = 0;
305 for(
Index j=0; j<m_outerSize; ++j)
307 newOuterIndex[j] = count;
308 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
309 totalReserveSize += reserveSizes[j];
311 m_data.reserve(totalReserveSize);
312 StorageIndex previousOuterIndex = m_outerIndex[m_outerSize];
313 for(
Index j=m_outerSize-1; j>=0; --j)
315 StorageIndex innerNNZ = previousOuterIndex - m_outerIndex[j];
316 for(
Index i=innerNNZ-1; i>=0; --i)
318 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
319 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
321 previousOuterIndex = m_outerIndex[j];
322 m_outerIndex[j] = newOuterIndex[j];
323 m_innerNonZeros[j] = innerNNZ;
325 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
327 m_data.resize(m_outerIndex[m_outerSize]);
331 StorageIndex* newOuterIndex =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+1)*
sizeof(StorageIndex)));
332 if (!newOuterIndex) internal::throw_std_bad_alloc();
334 StorageIndex count = 0;
335 for(
Index j=0; j<m_outerSize; ++j)
337 newOuterIndex[j] = count;
338 StorageIndex alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
339 StorageIndex toReserve = std::max<StorageIndex>(reserveSizes[j], alreadyReserved);
340 count += toReserve + m_innerNonZeros[j];
342 newOuterIndex[m_outerSize] = count;
344 m_data.resize(count);
345 for(
Index j=m_outerSize-1; j>=0; --j)
347 Index offset = newOuterIndex[j] - m_outerIndex[j];
350 StorageIndex innerNNZ = m_innerNonZeros[j];
351 for(
Index i=innerNNZ-1; i>=0; --i)
353 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
354 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
359 std::swap(m_outerIndex, newOuterIndex);
360 std::free(newOuterIndex);
378 inline Scalar& insertBack(
Index row,
Index col)
380 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
385 inline Scalar& insertBackByOuterInner(
Index outer,
Index inner)
387 eigen_assert(
Index(m_outerIndex[outer+1]) == m_data.size() &&
"Invalid ordered insertion (invalid outer index)");
388 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
389 Index p = m_outerIndex[outer+1];
390 ++m_outerIndex[outer+1];
391 m_data.append(Scalar(0), inner);
392 return m_data.value(p);
397 inline Scalar& insertBackByOuterInnerUnordered(
Index outer,
Index inner)
399 Index p = m_outerIndex[outer+1];
400 ++m_outerIndex[outer+1];
401 m_data.append(Scalar(0), inner);
402 return m_data.value(p);
407 inline void startVec(
Index outer)
409 eigen_assert(m_outerIndex[outer]==
Index(m_data.size()) &&
"You must call startVec for each inner vector sequentially");
410 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
411 m_outerIndex[outer+1] = m_outerIndex[outer];
417 inline void finalize()
421 StorageIndex size = internal::convert_index<StorageIndex>(m_data.size());
422 Index i = m_outerSize;
424 while (i>=0 && m_outerIndex[i]==0)
427 while (i<=m_outerSize)
429 m_outerIndex[i] = size;
437 template<
typename InputIterators>
438 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
440 template<
typename InputIterators,
typename DupFunctor>
441 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end, DupFunctor dup_func);
443 void sumupDuplicates() { collapseDuplicates(internal::scalar_sum_op<Scalar,Scalar>()); }
445 template<
typename DupFunctor>
446 void collapseDuplicates(DupFunctor dup_func = DupFunctor());
454 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
464 eigen_internal_assert(m_outerIndex!=0 && m_outerSize>0);
466 Index oldStart = m_outerIndex[1];
467 m_outerIndex[1] = m_innerNonZeros[0];
468 for(
Index j=1; j<m_outerSize; ++j)
470 Index nextOldStart = m_outerIndex[j+1];
471 Index offset = oldStart - m_outerIndex[j];
474 for(
Index k=0; k<m_innerNonZeros[j]; ++k)
476 m_data.index(m_outerIndex[j]+k) = m_data.index(oldStart+k);
477 m_data.value(m_outerIndex[j]+k) = m_data.value(oldStart+k);
480 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
481 oldStart = nextOldStart;
483 std::free(m_innerNonZeros);
485 m_data.resize(m_outerIndex[m_outerSize]);
492 if(m_innerNonZeros != 0)
494 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
495 for (
Index i = 0; i < m_outerSize; i++)
497 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
504 prune(default_prunning_func(reference,epsilon));
514 template<
typename KeepFunc>
515 void prune(
const KeepFunc& keep = KeepFunc())
521 for(
Index j=0; j<m_outerSize; ++j)
523 Index previousStart = m_outerIndex[j];
525 Index end = m_outerIndex[j+1];
526 for(
Index i=previousStart; i<end; ++i)
528 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
530 m_data.value(k) = m_data.value(i);
531 m_data.index(k) = m_data.index(i);
536 m_outerIndex[m_outerSize] = k;
551 if (this->rows() == rows && this->cols() == cols)
return;
554 if(rows==0 || cols==0)
return resize(rows,cols);
556 Index innerChange = IsRowMajor ? cols - this->cols() : rows - this->rows();
557 Index outerChange = IsRowMajor ? rows - this->rows() : cols - this->cols();
558 StorageIndex newInnerSize = convert_index(IsRowMajor ? cols : rows);
564 StorageIndex *newInnerNonZeros =
static_cast<StorageIndex*
>(std::realloc(m_innerNonZeros, (m_outerSize + outerChange) *
sizeof(StorageIndex)));
565 if (!newInnerNonZeros) internal::throw_std_bad_alloc();
566 m_innerNonZeros = newInnerNonZeros;
568 for(
Index i=m_outerSize; i<m_outerSize+outerChange; i++)
569 m_innerNonZeros[i] = 0;
571 else if (innerChange < 0)
574 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+outerChange+1) *
sizeof(StorageIndex)));
575 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
576 for(
Index i = 0; i < m_outerSize; i++)
577 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
581 if (m_innerNonZeros && innerChange < 0)
583 for(
Index i = 0; i < m_outerSize + (std::min)(outerChange,
Index(0)); i++)
585 StorageIndex &n = m_innerNonZeros[i];
586 StorageIndex start = m_outerIndex[i];
587 while (n > 0 && m_data.index(start+n-1) >= newInnerSize) --n;
591 m_innerSize = newInnerSize;
594 if (outerChange == 0)
597 StorageIndex *newOuterIndex =
static_cast<StorageIndex*
>(std::realloc(m_outerIndex, (m_outerSize + outerChange + 1) *
sizeof(StorageIndex)));
598 if (!newOuterIndex) internal::throw_std_bad_alloc();
599 m_outerIndex = newOuterIndex;
602 StorageIndex last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
603 for(
Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)
604 m_outerIndex[i] = last;
606 m_outerSize += outerChange;
618 const Index outerSize = IsRowMajor ? rows : cols;
619 m_innerSize = IsRowMajor ? cols : rows;
621 if (m_outerSize != outerSize || m_outerSize==0)
623 std::free(m_outerIndex);
624 m_outerIndex =
static_cast<StorageIndex*
>(std::malloc((outerSize + 1) *
sizeof(StorageIndex)));
625 if (!m_outerIndex) internal::throw_std_bad_alloc();
627 m_outerSize = outerSize;
631 std::free(m_innerNonZeros);
634 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
639 void resizeNonZeros(
Index size)
645 const ConstDiagonalReturnType
diagonal()
const {
return ConstDiagonalReturnType(*
this); }
651 DiagonalReturnType
diagonal() {
return DiagonalReturnType(*
this); }
655 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
657 check_template_parameters();
663 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
665 check_template_parameters();
670 template<
typename OtherDerived>
672 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
674 EIGEN_STATIC_ASSERT((internal::is_same<Scalar, typename OtherDerived::Scalar>::value),
675 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
676 check_template_parameters();
677 const bool needToTranspose = (Flags &
RowMajorBit) != (internal::evaluator<OtherDerived>::Flags &
RowMajorBit);
682 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 683 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
685 internal::call_assignment_no_alias(*
this, other.
derived());
690 template<
typename OtherDerived,
unsigned int UpLo>
692 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
694 check_template_parameters();
695 Base::operator=(other);
700 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
702 check_template_parameters();
707 template<
typename OtherDerived>
709 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
711 check_template_parameters();
712 initAssignment(other);
717 template<
typename OtherDerived>
719 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
721 check_template_parameters();
722 *
this = other.derived();
730 std::swap(m_outerIndex, other.m_outerIndex);
731 std::swap(m_innerSize, other.m_innerSize);
732 std::swap(m_outerSize, other.m_outerSize);
733 std::swap(m_innerNonZeros, other.m_innerNonZeros);
734 m_data.swap(other.m_data);
741 eigen_assert(rows() == cols() &&
"ONLY FOR SQUARED MATRICES");
742 this->m_data.resize(rows());
746 std::free(m_innerNonZeros);
751 if (other.isRValue())
753 swap(other.const_cast_derived());
755 else if(
this!=&other)
757 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 758 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
760 initAssignment(other);
763 internal::smart_copy(other.m_outerIndex, other.m_outerIndex + m_outerSize + 1, m_outerIndex);
764 m_data = other.m_data;
768 Base::operator=(other);
774 #ifndef EIGEN_PARSED_BY_DOXYGEN 775 template<
typename OtherDerived>
777 {
return Base::operator=(other.
derived()); }
778 #endif // EIGEN_PARSED_BY_DOXYGEN 780 template<
typename OtherDerived>
783 friend std::ostream & operator << (std::ostream & s,
const SparseMatrix& m)
786 s <<
"Nonzero entries:\n";
789 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
793 Index p = m.m_outerIndex[i];
794 Index pe = m.m_outerIndex[i]+m.m_innerNonZeros[i];
797 s <<
"(" << m.m_data.value(k) <<
"," << m.m_data.index(k) <<
") ";
798 for (; k<m.m_outerIndex[i+1]; ++k)
803 s <<
"Outer pointers:\n";
805 s << m.m_outerIndex[i] <<
" ";
806 s <<
" $" << std::endl;
809 s <<
"Inner non zeros:\n";
811 s << m.m_innerNonZeros[i] <<
" ";
812 s <<
" $" << std::endl;
816 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
823 std::free(m_outerIndex);
824 std::free(m_innerNonZeros);
830 # ifdef EIGEN_SPARSEMATRIX_PLUGIN 831 # include EIGEN_SPARSEMATRIX_PLUGIN 836 template<
typename Other>
837 void initAssignment(
const Other& other)
839 resize(other.rows(), other.cols());
842 std::free(m_innerNonZeros);
849 EIGEN_DONT_INLINE Scalar& insertCompressed(
Index row,
Index col);
853 class SingletonVector
855 StorageIndex m_index;
856 StorageIndex m_value;
860 : m_index(convert_index(i)), m_value(convert_index(v))
863 StorageIndex operator[](
Index i)
const {
return i==m_index ? m_value : 0; }
868 EIGEN_DONT_INLINE Scalar& insertUncompressed(
Index row,
Index col);
873 EIGEN_STRONG_INLINE Scalar& insertBackUncompressed(
Index row,
Index col)
875 const Index outer = IsRowMajor ? row : col;
876 const Index inner = IsRowMajor ? col : row;
878 eigen_assert(!isCompressed());
879 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
881 Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
882 m_data.index(p) = convert_index(inner);
883 return (m_data.value(p) = 0);
887 static void check_template_parameters()
890 EIGEN_STATIC_ASSERT((Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
893 struct default_prunning_func {
894 default_prunning_func(
const Scalar& ref,
const RealScalar& eps) : reference(ref), epsilon(eps) {}
895 inline bool operator() (
const Index&,
const Index&,
const Scalar& value)
const 897 return !internal::isMuchSmallerThan(value, reference, epsilon);
906 template<
typename InputIterator,
typename SparseMatrixType,
typename DupFunctor>
907 void set_from_triplets(
const InputIterator& begin,
const InputIterator& end, SparseMatrixType& mat, DupFunctor dup_func)
909 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
910 typedef typename SparseMatrixType::Scalar Scalar;
911 typedef typename SparseMatrixType::StorageIndex StorageIndex;
917 typename SparseMatrixType::IndexVector wi(trMat.outerSize());
919 for(InputIterator it(begin); it!=end; ++it)
921 eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
922 wi(IsRowMajor ? it->col() : it->row())++;
927 for(InputIterator it(begin); it!=end; ++it)
928 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
931 trMat.collapseDuplicates(dup_func);
978 template<
typename Scalar,
int _Options,
typename _Index>
979 template<
typename InputIterators>
982 internal::set_from_triplets<InputIterators, SparseMatrix<Scalar,_Options,_Index> >(begin, end, *
this, internal::scalar_sum_op<Scalar,Scalar>());
994 template<
typename Scalar,
int _Options,
typename _Index>
995 template<
typename InputIterators,
typename DupFunctor>
998 internal::set_from_triplets<InputIterators, SparseMatrix<Scalar,_Options,_Index>, DupFunctor>(begin, end, *
this, dup_func);
1002 template<
typename Scalar,
int _Options,
typename _Index>
1003 template<
typename DupFunctor>
1006 eigen_assert(!isCompressed());
1008 IndexVector wi(innerSize());
1010 StorageIndex count = 0;
1012 for(
Index j=0; j<outerSize(); ++j)
1014 StorageIndex start = count;
1015 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
1016 for(
Index k=m_outerIndex[j]; k<oldEnd; ++k)
1018 Index i = m_data.index(k);
1022 m_data.value(wi(i)) = dup_func(m_data.value(wi(i)), m_data.value(k));
1026 m_data.value(count) = m_data.value(k);
1027 m_data.index(count) = m_data.index(k);
1032 m_outerIndex[j] = start;
1034 m_outerIndex[m_outerSize] = count;
1037 std::free(m_innerNonZeros);
1038 m_innerNonZeros = 0;
1039 m_data.resize(m_outerIndex[m_outerSize]);
1042 template<
typename Scalar,
int _Options,
typename _Index>
1043 template<
typename OtherDerived>
1046 EIGEN_STATIC_ASSERT((internal::is_same<Scalar, typename OtherDerived::Scalar>::value),
1047 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
1049 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 1050 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
1053 const bool needToTranspose = (Flags &
RowMajorBit) != (internal::evaluator<OtherDerived>::Flags &
RowMajorBit);
1054 if (needToTranspose)
1056 #ifdef EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN 1057 EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN
1063 typedef typename internal::nested_eval<OtherDerived,2,typename internal::plain_matrix_type<OtherDerived>::type >::type OtherCopy;
1064 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
1065 typedef internal::evaluator<_OtherCopy> OtherCopyEval;
1066 OtherCopy otherCopy(other.
derived());
1067 OtherCopyEval otherCopyEval(otherCopy);
1074 for (
Index j=0; j<otherCopy.outerSize(); ++j)
1075 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1076 ++dest.m_outerIndex[it.index()];
1079 StorageIndex count = 0;
1080 IndexVector positions(dest.outerSize());
1081 for (
Index j=0; j<dest.outerSize(); ++j)
1083 StorageIndex tmp = dest.m_outerIndex[j];
1084 dest.m_outerIndex[j] = count;
1085 positions[j] = count;
1088 dest.m_outerIndex[dest.outerSize()] = count;
1090 dest.m_data.resize(count);
1092 for (StorageIndex j=0; j<otherCopy.outerSize(); ++j)
1094 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1096 Index pos = positions[it.index()]++;
1097 dest.m_data.index(pos) = j;
1098 dest.m_data.value(pos) = it.value();
1106 if(other.isRValue())
1108 initAssignment(other.
derived());
1111 return Base::operator=(other.
derived());
1115 template<
typename _Scalar,
int _Options,
typename _Index>
1118 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
1120 const Index outer = IsRowMajor ? row : col;
1121 const Index inner = IsRowMajor ? col : row;
1128 if(m_data.allocatedSize()==0)
1129 m_data.reserve(2*m_innerSize);
1132 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1133 if(!m_innerNonZeros) internal::throw_std_bad_alloc();
1135 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
1139 StorageIndex end = convert_index(m_data.allocatedSize());
1140 for(
Index j=1; j<=m_outerSize; ++j)
1141 m_outerIndex[j] = end;
1146 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1147 if(!m_innerNonZeros) internal::throw_std_bad_alloc();
1148 for(
Index j=0; j<m_outerSize; ++j)
1149 m_innerNonZeros[j] = m_outerIndex[j+1]-m_outerIndex[j];
1154 Index data_end = m_data.allocatedSize();
1158 if(m_outerIndex[outer]==data_end)
1160 eigen_internal_assert(m_innerNonZeros[outer]==0);
1164 StorageIndex p = convert_index(m_data.size());
1166 while(j>=0 && m_innerNonZeros[j]==0)
1167 m_outerIndex[j--] = p;
1170 ++m_innerNonZeros[outer];
1171 m_data.append(Scalar(0), inner);
1174 if(data_end != m_data.allocatedSize())
1179 eigen_internal_assert(data_end < m_data.allocatedSize());
1180 StorageIndex new_end = convert_index(m_data.allocatedSize());
1181 for(
Index k=outer+1; k<=m_outerSize; ++k)
1182 if(m_outerIndex[k]==data_end)
1183 m_outerIndex[k] = new_end;
1185 return m_data.value(p);
1190 if(m_outerIndex[outer+1]==data_end && m_outerIndex[outer]+m_innerNonZeros[outer]==m_data.size())
1192 eigen_internal_assert(outer+1==m_outerSize || m_innerNonZeros[outer+1]==0);
1195 ++m_innerNonZeros[outer];
1196 m_data.resize(m_data.size()+1);
1199 if(data_end != m_data.allocatedSize())
1204 eigen_internal_assert(data_end < m_data.allocatedSize());
1205 StorageIndex new_end = convert_index(m_data.allocatedSize());
1206 for(
Index k=outer+1; k<=m_outerSize; ++k)
1207 if(m_outerIndex[k]==data_end)
1208 m_outerIndex[k] = new_end;
1212 Index startId = m_outerIndex[outer];
1213 Index p = m_outerIndex[outer]+m_innerNonZeros[outer]-1;
1214 while ( (p > startId) && (m_data.index(p-1) > inner) )
1216 m_data.index(p) = m_data.index(p-1);
1217 m_data.value(p) = m_data.value(p-1);
1221 m_data.index(p) = convert_index(inner);
1222 return (m_data.value(p) = 0);
1225 if(m_data.size() != m_data.allocatedSize())
1228 m_data.resize(m_data.allocatedSize());
1232 return insertUncompressed(row,col);
1235 template<
typename _Scalar,
int _Options,
typename _Index>
1238 eigen_assert(!isCompressed());
1240 const Index outer = IsRowMajor ? row : col;
1241 const StorageIndex inner = convert_index(IsRowMajor ? col : row);
1243 Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
1244 StorageIndex innerNNZ = m_innerNonZeros[outer];
1248 reserve(SingletonVector(outer,std::max<StorageIndex>(2,innerNNZ)));
1251 Index startId = m_outerIndex[outer];
1252 Index p = startId + m_innerNonZeros[outer];
1253 while ( (p > startId) && (m_data.index(p-1) > inner) )
1255 m_data.index(p) = m_data.index(p-1);
1256 m_data.value(p) = m_data.value(p-1);
1259 eigen_assert((p<=startId || m_data.index(p-1)!=inner) &&
"you cannot insert an element that already exists, you must call coeffRef to this end");
1261 m_innerNonZeros[outer]++;
1263 m_data.index(p) = inner;
1264 return (m_data.value(p) = 0);
1267 template<
typename _Scalar,
int _Options,
typename _Index>
1270 eigen_assert(isCompressed());
1272 const Index outer = IsRowMajor ? row : col;
1273 const Index inner = IsRowMajor ? col : row;
1275 Index previousOuter = outer;
1276 if (m_outerIndex[outer+1]==0)
1279 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
1281 m_outerIndex[previousOuter] = convert_index(m_data.size());
1284 m_outerIndex[outer+1] = m_outerIndex[outer];
1290 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
1291 && (size_t(m_outerIndex[outer+1]) == m_data.size());
1293 size_t startId = m_outerIndex[outer];
1295 size_t p = m_outerIndex[outer+1];
1296 ++m_outerIndex[outer+1];
1298 double reallocRatio = 1;
1299 if (m_data.allocatedSize()<=m_data.size())
1302 if (m_data.size()==0)
1311 double nnzEstimate = double(m_outerIndex[outer])*double(m_outerSize)/double(outer+1);
1312 reallocRatio = (nnzEstimate-double(m_data.size()))/double(m_data.size());
1316 reallocRatio = (std::min)((std::max)(reallocRatio,1.5),8.);
1319 m_data.resize(m_data.size()+1,reallocRatio);
1323 if (previousOuter==-1)
1327 for (
Index k=0; k<=(outer+1); ++k)
1328 m_outerIndex[k] = 0;
1330 while(m_outerIndex[k]==0)
1331 m_outerIndex[k++] = 1;
1332 while (k<=m_outerSize && m_outerIndex[k]!=0)
1333 m_outerIndex[k++]++;
1336 k = m_outerIndex[k]-1;
1339 m_data.index(k) = m_data.index(k-1);
1340 m_data.value(k) = m_data.value(k-1);
1349 while (j<=m_outerSize && m_outerIndex[j]!=0)
1350 m_outerIndex[j++]++;
1353 Index k = m_outerIndex[j]-1;
1356 m_data.index(k) = m_data.index(k-1);
1357 m_data.value(k) = m_data.value(k-1);
1363 while ( (p > startId) && (m_data.index(p-1) > inner) )
1365 m_data.index(p) = m_data.index(p-1);
1366 m_data.value(p) = m_data.value(p-1);
1370 m_data.index(p) = inner;
1371 return (m_data.value(p) = 0);
1376 template<
typename _Scalar,
int _Options,
typename _Index>
1377 struct evaluator<SparseMatrix<_Scalar,_Options,_Index> >
1378 : evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > >
1380 typedef evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > > Base;
1382 evaluator() : Base() {}
1383 explicit evaluator(
const SparseMatrixType &mat) : Base(mat) {}
1390 #endif // EIGEN_SPARSEMATRIX_H StorageIndex * outerIndexPtr()
Definition: SparseMatrix.h:165
const ConstDiagonalReturnType diagonal() const
Definition: SparseMatrix.h:645
bool isCompressed() const
Definition: SparseCompressedBase.h:107
Scalar & insert(Index row, Index col)
Definition: SparseMatrix.h:1116
Index cols() const
Definition: SparseMatrixBase.h:158
Definition: Constants.h:320
void conservativeResize(Index rows, Index cols)
Definition: SparseMatrix.h:548
const unsigned int CompressedAccessBit
Definition: Constants.h:186
const Scalar * valuePtr() const
Definition: SparseMatrix.h:143
A versatible sparse matrix representation.
Definition: SparseMatrix.h:92
void uncompress()
Definition: SparseMatrix.h:490
void prune(const KeepFunc &keep=KeepFunc())
Definition: SparseMatrix.h:515
DiagonalReturnType diagonal()
Definition: SparseMatrix.h:651
~SparseMatrix()
Definition: SparseMatrix.h:821
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:88
Scalar & coeffRef(Index row, Index col)
Definition: SparseMatrix.h:201
const unsigned int LvalueBit
Definition: Constants.h:139
SparseMatrix(const SparseMatrix &other)
Definition: SparseMatrix.h:699
Namespace containing all symbols from the Eigen library.
Definition: Core:271
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix.
Definition: SparseSelfAdjointView.h:43
StorageIndex * innerNonZeroPtr()
Definition: SparseMatrix.h:174
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:167
Index cols() const
Definition: SparseMatrix.h:133
Derived & derived()
Definition: EigenBase.h:44
Index nonZeros() const
Definition: SparseCompressedBase.h:56
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
const unsigned int RowMajorBit
Definition: Constants.h:61
Index innerSize() const
Definition: SparseMatrix.h:136
SparseMatrix(const DiagonalBase< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:718
Scalar * valuePtr()
Definition: SparseMatrix.h:147
Definition: EigenBase.h:28
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
Definition: SparseMatrix.h:980
Index rows() const
Definition: SparseMatrixBase.h:156
void setIdentity()
Definition: SparseMatrix.h:739
Base class of any sparse matrices or sparse expressions.
Definition: ForwardDeclarations.h:281
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Definition: SparseMatrix.h:502
const StorageIndex * innerNonZeroPtr() const
Definition: SparseMatrix.h:170
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: XprHelper.h:35
void swap(SparseMatrix &other)
Definition: SparseMatrix.h:727
Index rows() const
Definition: SparseMatrix.h:131
void resize(Index rows, Index cols)
Definition: SparseMatrix.h:616
void reserve(Index reserveSize)
Definition: SparseMatrix.h:257
SparseMatrix(Index rows, Index cols)
Definition: SparseMatrix.h:662
void makeCompressed()
Definition: SparseMatrix.h:459
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Definition: SparseMatrix.h:671
Scalar value_type
Definition: SparseMatrixBase.h:36
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:708
SparseMatrix()
Definition: SparseMatrix.h:654
const StorageIndex * innerIndexPtr() const
Definition: SparseMatrix.h:152
Definition: Eigen_Colamd.h:50
StorageIndex * innerIndexPtr()
Definition: SparseMatrix.h:156
const StorageIndex * outerIndexPtr() const
Definition: SparseMatrix.h:161
General-purpose arrays with easy API for coefficient-wise operations.
Definition: Array.h:45
Definition: Constants.h:322
Scalar coeff(Index row, Index col) const
Definition: SparseMatrix.h:183
void setZero()
Definition: SparseMatrix.h:246
Expression of a diagonal/subdiagonal/superdiagonal in a matrix.
Definition: Diagonal.h:63
const int Dynamic
Definition: Constants.h:21
Index outerSize() const
Definition: SparseMatrix.h:138
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
Definition: SparseMatrix.h:691
Common base class for sparse [compressed]-{row|column}-storage format.
Definition: SparseCompressedBase.h:15
Sparse matrix.
Definition: MappedSparseMatrix.h:32