10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H 11 #define EIGEN_SPARSE_COMPRESSED_BASE_H 15 template<
typename Derived>
class SparseCompressedBase;
19 template<
typename Derived>
20 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
25 template<
typename Derived>
26 class SparseCompressedBase
27 :
public SparseMatrixBase<Derived>
30 typedef SparseMatrixBase<Derived> Base;
31 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
32 using Base::operator=;
33 using Base::IsRowMajor;
36 class ReverseInnerIterator;
39 typedef typename Base::IndexVector IndexVector;
46 inline Index nonZeros()
const 48 if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
49 return derived().nonZeros();
50 else if(isCompressed())
51 return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
52 else if(derived().outerSize()==0)
55 return innerNonZeros().sum();
61 inline const Scalar* valuePtr()
const {
return derived().valuePtr(); }
65 inline Scalar* valuePtr() {
return derived().valuePtr(); }
70 inline const StorageIndex* innerIndexPtr()
const {
return derived().innerIndexPtr(); }
74 inline StorageIndex* innerIndexPtr() {
return derived().innerIndexPtr(); }
80 inline const StorageIndex* outerIndexPtr()
const {
return derived().outerIndexPtr(); }
85 inline StorageIndex* outerIndexPtr() {
return derived().outerIndexPtr(); }
90 inline const StorageIndex* innerNonZeroPtr()
const {
return derived().innerNonZeroPtr(); }
94 inline StorageIndex* innerNonZeroPtr() {
return derived().innerNonZeroPtr(); }
97 inline bool isCompressed()
const {
return innerNonZeroPtr()==0; }
101 SparseCompressedBase() {}
103 template<
typename OtherDerived>
explicit SparseCompressedBase(
const SparseCompressedBase<OtherDerived>&);
106 template<
typename Derived>
107 class SparseCompressedBase<Derived>::InnerIterator
110 InnerIterator(
const SparseCompressedBase& mat, Index outer)
111 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
113 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
116 m_end = mat.nonZeros();
120 m_id = mat.outerIndexPtr()[outer];
121 if(mat.isCompressed())
122 m_end = mat.outerIndexPtr()[outer+1];
124 m_end = m_id + mat.innerNonZeroPtr()[outer];
128 explicit InnerIterator(
const SparseCompressedBase& mat)
129 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
131 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
134 explicit InnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
135 : m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_id(0), m_end(data.size())
137 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
140 inline InnerIterator& operator++() { m_id++;
return *
this; }
142 inline const Scalar& value()
const {
return m_values[m_id]; }
143 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id]); }
145 inline StorageIndex index()
const {
return m_indices[m_id]; }
146 inline Index outer()
const {
return m_outer.value(); }
147 inline Index row()
const {
return IsRowMajor ? m_outer.value() : index(); }
148 inline Index col()
const {
return IsRowMajor ? index() : m_outer.value(); }
150 inline operator bool()
const {
return (m_id < m_end); }
153 const Scalar* m_values;
154 const StorageIndex* m_indices;
155 const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
162 template<
typename T> InnerIterator(
const SparseMatrixBase<T>&, Index outer);
165 template<
typename Derived>
166 class SparseCompressedBase<Derived>::ReverseInnerIterator
169 ReverseInnerIterator(
const SparseCompressedBase& mat, Index outer)
170 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
172 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
175 m_id = mat.nonZeros();
179 m_start.value() = mat.outerIndexPtr()[outer];
180 if(mat.isCompressed())
181 m_id = mat.outerIndexPtr()[outer+1];
183 m_id = m_start.value() + mat.innerNonZeroPtr()[outer];
187 explicit ReverseInnerIterator(
const SparseCompressedBase& mat)
188 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
190 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
193 explicit ReverseInnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
194 : m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_start(0), m_id(data.size())
196 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
199 inline ReverseInnerIterator& operator--() { --m_id;
return *
this; }
201 inline const Scalar& value()
const {
return m_values[m_id-1]; }
202 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id-1]); }
204 inline StorageIndex index()
const {
return m_indices[m_id-1]; }
205 inline Index outer()
const {
return m_outer.value(); }
206 inline Index row()
const {
return IsRowMajor ? m_outer.value() : index(); }
207 inline Index col()
const {
return IsRowMajor ? index() : m_outer.value(); }
209 inline operator bool()
const {
return (m_id > m_start.value()); }
212 const Scalar* m_values;
213 const StorageIndex* m_indices;
214 const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
216 const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_start;
221 template<
typename Derived>
222 struct evaluator<SparseCompressedBase<Derived> >
223 : evaluator_base<Derived>
225 typedef typename Derived::Scalar Scalar;
226 typedef typename Derived::InnerIterator InnerIterator;
227 typedef typename Derived::ReverseInnerIterator ReverseInnerIterator;
230 CoeffReadCost = NumTraits<Scalar>::ReadCost,
231 Flags = Derived::Flags
234 evaluator() : m_matrix(0)
236 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
238 explicit evaluator(
const Derived &mat) : m_matrix(&mat)
240 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
243 inline Index nonZerosEstimate()
const {
244 return m_matrix->nonZeros();
247 operator Derived&() {
return m_matrix->const_cast_derived(); }
248 operator const Derived&()
const {
return *m_matrix; }
250 typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
251 Scalar coeff(Index row, Index col)
const 252 {
return m_matrix->coeff(row,col); }
254 Scalar& coeffRef(Index row, Index col)
256 eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
258 const Index outer = Derived::IsRowMajor ? row : col;
259 const Index inner = Derived::IsRowMajor ? col : row;
261 Index start = m_matrix->outerIndexPtr()[outer];
262 Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
263 eigen_assert(end>start &&
"you are using a non finalized sparse matrix or written coefficient does not exist");
264 const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner)
265 - m_matrix->innerIndexPtr();
266 eigen_assert((p<end) && (m_matrix->innerIndexPtr()[p]==inner) &&
"written coefficient does not exist");
267 return m_matrix->const_cast_derived().valuePtr()[p];
270 const Derived *m_matrix;
277 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H A matrix or vector expression mapping an existing array of data.
Definition: Map.h:89
Definition: Eigen_Colamd.h:54