10 #ifndef EIGEN_SPARSEMATRIX_H
11 #define EIGEN_SPARSEMATRIX_H
42 template<
typename _Scalar,
int _Options,
typename _Index>
43 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
45 typedef _Scalar Scalar;
47 typedef Sparse StorageKind;
48 typedef MatrixXpr XprKind;
54 Flags = _Options | NestByRefBit |
LvalueBit,
55 CoeffReadCost = NumTraits<Scalar>::ReadCost,
56 SupportedAccessPatterns = InnerRandomAccessPattern
60 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
61 struct traits<Diagonal<const SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
63 typedef SparseMatrix<_Scalar, _Options, _Index> MatrixType;
64 typedef typename nested<MatrixType>::type MatrixTypeNested;
65 typedef typename remove_reference<MatrixTypeNested>::type _MatrixTypeNested;
67 typedef _Scalar Scalar;
68 typedef Dense StorageKind;
70 typedef MatrixXpr XprKind;
74 ColsAtCompileTime = 1,
76 MaxColsAtCompileTime = 1,
78 CoeffReadCost = _MatrixTypeNested::CoeffReadCost*10
84 template<
typename _Scalar,
int _Options,
typename _Index>
90 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseMatrix, +=)
91 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseMatrix, -=)
94 using Base::IsRowMajor;
95 typedef internal::CompressedStorage<Scalar,Index> Storage;
107 Index* m_innerNonZeros;
119 inline Index
rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
121 inline Index
cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
131 inline const Scalar*
valuePtr()
const {
return &m_data.value(0); }
135 inline Scalar*
valuePtr() {
return &m_data.value(0); }
165 inline Storage& data() {
return m_data; }
167 inline const Storage& data()
const {
return m_data; }
173 const Index outer = IsRowMajor ? row :
col;
174 const Index inner = IsRowMajor ? col :
row;
175 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
176 return m_data.atInRange(m_outerIndex[outer], end, inner);
189 const Index outer = IsRowMajor ? row :
col;
190 const Index inner = IsRowMajor ? col :
row;
192 Index start = m_outerIndex[outer];
193 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
194 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
197 const Index p = m_data.searchLowerIndex(start,end-1,inner);
198 if((p<end) && (m_data.index(p)==inner))
199 return m_data.value(p);
222 return insertUncompressed(row,col);
228 class ReverseInnerIterator;
234 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(Index));
236 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(Index));
243 return innerNonZeros().sum();
244 return static_cast<Index
>(m_data.size());
252 eigen_assert(
isCompressed() &&
"This function does not make sense in non compressed mode.");
253 m_data.reserve(reserveSize);
256 #ifdef EIGEN_PARSED_BY_DOXYGEN
260 template<
class SizesType>
261 inline void reserve(
const SizesType& reserveSizes);
263 template<
class SizesType>
264 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
typename SizesType::value_type())
266 EIGEN_UNUSED_VARIABLE(enableif);
267 reserveInnerVectors(reserveSizes);
269 template<
class SizesType>
270 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::Scalar& enableif =
271 #
if (!defined(_MSC_VER)) || (_MSC_VER>=1500)
276 EIGEN_UNUSED_VARIABLE(enableif);
277 reserveInnerVectors(reserveSizes);
279 #endif // EIGEN_PARSED_BY_DOXYGEN
281 template<
class SizesType>
282 inline void reserveInnerVectors(
const SizesType& reserveSizes)
287 std::size_t totalReserveSize = 0;
289 m_innerNonZeros =
new Index[m_outerSize];
292 Index* newOuterIndex = m_innerNonZeros;
295 for(Index j=0; j<m_outerSize; ++j)
297 newOuterIndex[j] = count;
298 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
299 totalReserveSize += reserveSizes[j];
301 m_data.reserve(totalReserveSize);
302 std::ptrdiff_t previousOuterIndex = m_outerIndex[m_outerSize];
303 for(std::ptrdiff_t j=m_outerSize-1; j>=0; --j)
305 ptrdiff_t innerNNZ = previousOuterIndex - m_outerIndex[j];
306 for(std::ptrdiff_t i=innerNNZ-1; i>=0; --i)
308 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
309 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
311 previousOuterIndex = m_outerIndex[j];
312 m_outerIndex[j] = newOuterIndex[j];
313 m_innerNonZeros[j] = innerNNZ;
315 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
317 m_data.resize(m_outerIndex[m_outerSize]);
321 Index* newOuterIndex =
new Index[m_outerSize+1];
323 for(Index j=0; j<m_outerSize; ++j)
325 newOuterIndex[j] = count;
326 Index alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
327 Index toReserve = std::max<std::ptrdiff_t>(reserveSizes[j], alreadyReserved);
328 count += toReserve + m_innerNonZeros[j];
330 newOuterIndex[m_outerSize] = count;
332 m_data.resize(count);
333 for(ptrdiff_t j=m_outerSize-1; j>=0; --j)
335 std::ptrdiff_t offset = newOuterIndex[j] - m_outerIndex[j];
338 std::ptrdiff_t innerNNZ = m_innerNonZeros[j];
339 for(std::ptrdiff_t i=innerNNZ-1; i>=0; --i)
341 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
342 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
347 std::swap(m_outerIndex, newOuterIndex);
348 delete[] newOuterIndex;
366 inline Scalar& insertBack(Index
row, Index
col)
368 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
373 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
375 eigen_assert(
size_t(m_outerIndex[outer+1]) == m_data.size() &&
"Invalid ordered insertion (invalid outer index)");
376 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
377 Index p = m_outerIndex[outer+1];
378 ++m_outerIndex[outer+1];
379 m_data.append(0, inner);
380 return m_data.value(p);
385 inline Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner)
387 Index p = m_outerIndex[outer+1];
388 ++m_outerIndex[outer+1];
389 m_data.append(0, inner);
390 return m_data.value(p);
395 inline void startVec(Index outer)
397 eigen_assert(m_outerIndex[outer]==
int(m_data.size()) &&
"You must call startVec for each inner vector sequentially");
398 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
399 m_outerIndex[outer+1] = m_outerIndex[outer];
405 inline void finalize()
409 Index
size =
static_cast<Index
>(m_data.size());
410 Index i = m_outerSize;
412 while (i>=0 && m_outerIndex[i]==0)
415 while (i<=m_outerSize)
417 m_outerIndex[i] =
size;
425 template<
typename InputIterators>
426 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
428 void sumupDuplicates();
434 EIGEN_DONT_INLINE Scalar& insertByOuterInner(Index j, Index i)
436 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
446 Index oldStart = m_outerIndex[1];
447 m_outerIndex[1] = m_innerNonZeros[0];
448 for(Index j=1; j<m_outerSize; ++j)
450 Index nextOldStart = m_outerIndex[j+1];
451 std::ptrdiff_t offset = oldStart - m_outerIndex[j];
454 for(Index k=0; k<m_innerNonZeros[j]; ++k)
456 m_data.index(m_outerIndex[j]+k) = m_data.index(oldStart+k);
457 m_data.value(m_outerIndex[j]+k) = m_data.value(oldStart+k);
460 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
461 oldStart = nextOldStart;
463 delete[] m_innerNonZeros;
465 m_data.resize(m_outerIndex[m_outerSize]);
472 prune(default_prunning_func(reference,epsilon));
482 template<
typename KeepFunc>
483 void prune(
const KeepFunc& keep = KeepFunc())
490 for(Index j=0; j<m_outerSize; ++j)
492 Index previousStart = m_outerIndex[j];
494 Index end = m_outerIndex[j+1];
495 for(Index i=previousStart; i<end; ++i)
497 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
499 m_data.value(k) = m_data.value(i);
500 m_data.index(k) = m_data.index(i);
505 m_outerIndex[m_outerSize] = k;
515 m_innerSize = IsRowMajor ? cols :
rows;
517 if (m_outerSize != outerSize || m_outerSize==0)
519 delete[] m_outerIndex;
520 m_outerIndex =
new Index [outerSize+1];
525 delete[] m_innerNonZeros;
528 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(Index));
533 void resizeNonZeros(Index size)
544 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
546 check_template_parameters();
552 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
554 check_template_parameters();
559 template<
typename OtherDerived>
561 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
563 check_template_parameters();
569 :
Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
571 check_template_parameters();
576 template<
typename OtherDerived>
578 :
Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
580 check_template_parameters();
581 initAssignment(other);
590 std::swap(m_outerIndex, other.m_outerIndex);
591 std::swap(m_innerSize, other.m_innerSize);
592 std::swap(m_outerSize, other.m_outerSize);
593 std::swap(m_innerNonZeros, other.m_innerNonZeros);
594 m_data.swap(other.m_data);
599 if (other.isRValue())
601 swap(other.const_cast_derived());
605 initAssignment(other);
608 memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*
sizeof(Index));
609 m_data = other.m_data;
613 Base::operator=(other);
619 #ifndef EIGEN_PARSED_BY_DOXYGEN
620 template<
typename Lhs,
typename Rhs>
621 inline SparseMatrix& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
622 {
return Base::operator=(product); }
624 template<
typename OtherDerived>
625 inline SparseMatrix& operator=(
const ReturnByValue<OtherDerived>& other)
627 initAssignment(other);
628 return Base::operator=(other.derived());
631 template<
typename OtherDerived>
632 inline SparseMatrix& operator=(
const EigenBase<OtherDerived>& other)
633 {
return Base::operator=(other.derived()); }
636 template<
typename OtherDerived>
637 EIGEN_DONT_INLINE
SparseMatrix& operator=(
const SparseMatrixBase<OtherDerived>& other)
646 typedef typename internal::nested<OtherDerived,2>::type OtherCopy;
647 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
648 OtherCopy otherCopy(other.derived());
655 for (Index j=0; j<otherCopy.outerSize(); ++j)
656 for (
typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
657 ++dest.m_outerIndex[it.index()];
661 VectorXi positions(dest.outerSize());
662 for (Index j=0; j<dest.outerSize(); ++j)
664 Index tmp = dest.m_outerIndex[j];
665 dest.m_outerIndex[j] = count;
666 positions[j] = count;
669 dest.m_outerIndex[dest.outerSize()] = count;
671 dest.m_data.resize(count);
673 for (Index j=0; j<otherCopy.outerSize(); ++j)
675 for (
typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
677 Index pos = positions[it.index()]++;
678 dest.m_data.index(pos) = j;
679 dest.m_data.value(pos) = it.value();
688 initAssignment(other.derived());
690 return Base::operator=(other.derived());
694 friend std::ostream & operator << (std::ostream & s,
const SparseMatrix& m)
697 s <<
"Nonzero entries:\n";
699 for (Index i=0; i<m.nonZeros(); ++i)
700 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
702 for (Index i=0; i<m.outerSize(); ++i)
704 int p = m.m_outerIndex[i];
705 int pe = m.m_outerIndex[i]+m.m_innerNonZeros[i];
708 s <<
"(" << m.m_data.value(k) <<
"," << m.m_data.index(k) <<
") ";
709 for (; k<m.m_outerIndex[i+1]; ++k)
714 s <<
"Outer pointers:\n";
715 for (Index i=0; i<m.outerSize(); ++i)
716 s << m.m_outerIndex[i] <<
" ";
717 s <<
" $" << std::endl;
718 if(!m.isCompressed())
720 s <<
"Inner non zeros:\n";
721 for (Index i=0; i<m.outerSize(); ++i)
722 s << m.m_innerNonZeros[i] <<
" ";
723 s <<
" $" << std::endl;
727 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
734 delete[] m_outerIndex;
735 delete[] m_innerNonZeros;
738 #ifndef EIGEN_PARSED_BY_DOXYGEN
743 # ifdef EIGEN_SPARSEMATRIX_PLUGIN
744 # include EIGEN_SPARSEMATRIX_PLUGIN
749 template<
typename Other>
750 void initAssignment(
const Other& other)
752 resize(other.rows(), other.cols());
755 delete[] m_innerNonZeros;
762 EIGEN_DONT_INLINE Scalar& insertCompressed(Index
row, Index
col)
766 const Index outer = IsRowMajor ? row :
col;
767 const Index inner = IsRowMajor ? col :
row;
769 Index previousOuter = outer;
770 if (m_outerIndex[outer+1]==0)
773 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
775 m_outerIndex[previousOuter] =
static_cast<Index
>(m_data.size());
778 m_outerIndex[outer+1] = m_outerIndex[outer];
784 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
785 && (size_t(m_outerIndex[outer+1]) == m_data.size());
787 size_t startId = m_outerIndex[outer];
789 size_t p = m_outerIndex[outer+1];
790 ++m_outerIndex[outer+1];
792 float reallocRatio = 1;
793 if (m_data.allocatedSize()<=m_data.size())
796 if (m_data.size()==0)
805 float nnzEstimate = float(m_outerIndex[outer])*float(m_outerSize)/float(outer+1);
806 reallocRatio = (nnzEstimate-float(m_data.size()))/float(m_data.size());
810 reallocRatio = (std::min)((std::max)(reallocRatio,1.5f),8.f);
813 m_data.resize(m_data.size()+1,reallocRatio);
817 if (previousOuter==-1)
821 for (Index k=0; k<=(outer+1); ++k)
824 while(m_outerIndex[k]==0)
825 m_outerIndex[k++] = 1;
826 while (k<=m_outerSize && m_outerIndex[k]!=0)
830 k = m_outerIndex[k]-1;
833 m_data.index(k) = m_data.index(k-1);
834 m_data.value(k) = m_data.value(k-1);
843 while (j<=m_outerSize && m_outerIndex[j]!=0)
847 Index k = m_outerIndex[j]-1;
850 m_data.index(k) = m_data.index(k-1);
851 m_data.value(k) = m_data.value(k-1);
857 while ( (p > startId) && (m_data.index(p-1) > inner) )
859 m_data.index(p) = m_data.index(p-1);
860 m_data.value(p) = m_data.value(p-1);
864 m_data.index(p) = inner;
865 return (m_data.value(p) = 0);
870 class SingletonVector
875 typedef Index value_type;
876 SingletonVector(Index i, Index v)
877 : m_index(i), m_value(v)
880 Index operator[](Index i)
const {
return i==m_index ? m_value : 0; }
885 EIGEN_DONT_INLINE Scalar& insertUncompressed(Index row, Index col)
889 const Index outer = IsRowMajor ? row :
col;
890 const Index inner = IsRowMajor ? col :
row;
892 std::ptrdiff_t room = m_outerIndex[outer+1] - m_outerIndex[outer];
893 std::ptrdiff_t innerNNZ = m_innerNonZeros[outer];
897 reserve(SingletonVector(outer,std::max<std::ptrdiff_t>(2,innerNNZ)));
900 Index startId = m_outerIndex[outer];
901 Index p = startId + m_innerNonZeros[outer];
902 while ( (p > startId) && (m_data.index(p-1) > inner) )
904 m_data.index(p) = m_data.index(p-1);
905 m_data.value(p) = m_data.value(p-1);
908 eigen_assert((p<=startId || m_data.index(p-1)!=inner) &&
"you cannot insert an element that already exist, you must call coeffRef to this end");
910 m_innerNonZeros[outer]++;
912 m_data.index(p) = inner;
913 return (m_data.value(p) = 0);
919 inline Scalar& insertBackUncompressed(Index row, Index col)
921 const Index outer = IsRowMajor ? row :
col;
922 const Index inner = IsRowMajor ? col :
row;
925 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
927 Index p = m_outerIndex[outer] + m_innerNonZeros[outer];
928 m_innerNonZeros[outer]++;
929 m_data.index(p) = inner;
930 return (m_data.value(p) = 0);
934 static void check_template_parameters()
936 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
939 struct default_prunning_func {
940 default_prunning_func(Scalar ref, RealScalar eps) : reference(ref), epsilon(eps) {}
941 inline bool operator() (
const Index&,
const Index&,
const Scalar& value)
const
943 return !internal::isMuchSmallerThan(value, reference, epsilon);
950 template<
typename Scalar,
int _Options,
typename _Index>
951 class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
955 : m_values(mat.
valuePtr()), m_indices(mat.
innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
957 if(mat.isCompressed())
958 m_end = mat.m_outerIndex[outer+1];
960 m_end = m_id + mat.m_innerNonZeros[outer];
963 inline InnerIterator& operator++() { m_id++;
return *
this; }
965 inline const Scalar& value()
const {
return m_values[m_id]; }
966 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id]); }
968 inline Index index()
const {
return m_indices[m_id]; }
969 inline Index outer()
const {
return m_outer; }
970 inline Index
row()
const {
return IsRowMajor ? m_outer : index(); }
971 inline Index
col()
const {
return IsRowMajor ? index() : m_outer; }
973 inline operator bool()
const {
return (m_id < m_end); }
976 const Scalar* m_values;
977 const Index* m_indices;
983 template<
typename Scalar,
int _Options,
typename _Index>
984 class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
987 ReverseInnerIterator(
const SparseMatrix& mat, Index outer)
988 : m_values(mat.
valuePtr()), m_indices(mat.
innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
990 if(mat.isCompressed())
991 m_id = mat.m_outerIndex[outer+1];
993 m_id = m_start + mat.m_innerNonZeros[outer];
996 inline ReverseInnerIterator& operator--() { --m_id;
return *
this; }
998 inline const Scalar& value()
const {
return m_values[m_id-1]; }
999 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id-1]); }
1001 inline Index index()
const {
return m_indices[m_id-1]; }
1002 inline Index outer()
const {
return m_outer; }
1003 inline Index
row()
const {
return IsRowMajor ? m_outer : index(); }
1004 inline Index
col()
const {
return IsRowMajor ? index() : m_outer; }
1006 inline operator bool()
const {
return (m_id > m_start); }
1009 const Scalar* m_values;
1010 const Index* m_indices;
1011 const Index m_outer;
1013 const Index m_start;
1016 namespace internal {
1018 template<
typename InputIterator,
typename SparseMatrixType>
1019 void set_from_triplets(
const InputIterator& begin,
const InputIterator& end, SparseMatrixType& mat,
int Options = 0)
1021 EIGEN_UNUSED_VARIABLE(Options);
1022 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
1023 typedef typename SparseMatrixType::Scalar Scalar;
1024 typedef typename SparseMatrixType::Index Index;
1025 SparseMatrix<Scalar,IsRowMajor?ColMajor:RowMajor> trMat(mat.rows(),mat.cols());
1030 for(InputIterator it(begin); it!=end; ++it)
1031 wi(IsRowMajor ? it->col() : it->row())++;
1035 for(InputIterator it(begin); it!=end; ++it)
1036 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
1039 trMat.sumupDuplicates();
1085 template<
typename Scalar,
int _Options,
typename _Index>
1086 template<
typename InputIterators>
1089 internal::set_from_triplets(begin, end, *
this);
1093 template<
typename Scalar,
int _Options,
typename _Index>
1096 eigen_assert(!isCompressed());
1102 for(
int j=0; j<outerSize(); ++j)
1104 Index start = count;
1105 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
1106 for(Index k=m_outerIndex[j]; k<oldEnd; ++k)
1108 Index i = m_data.index(k);
1112 m_data.value(wi(i)) += m_data.value(k);
1116 m_data.value(count) = m_data.value(k);
1117 m_data.index(count) = m_data.index(k);
1122 m_outerIndex[j] = start;
1124 m_outerIndex[m_outerSize] = count;
1127 delete[] m_innerNonZeros;
1128 m_innerNonZeros = 0;
1129 m_data.resize(m_outerIndex[m_outerSize]);
1134 #endif // EIGEN_SPARSEMATRIX_H