18#ifdef DEAL_II_WITH_PETSC
35 : communicator(MPI_COMM_SELF)
40 const int m = 0,
n = 0, n_nonzero_per_row = 0;
41 const PetscErrorCode ierr = MatCreateSeqAIJ(
42 PETSC_COMM_SELF,
m,
n, n_nonzero_per_row,
nullptr, &
matrix);
54 template <
typename SparsityPatternType>
57 const SparsityPatternType & sparsity_pattern,
58 const std::vector<size_type> &local_rows_per_process,
59 const std::vector<size_type> &local_columns_per_process,
60 const unsigned int this_process,
61 const bool preset_nonzero_locations)
62 : communicator(communicator)
65 local_rows_per_process,
66 local_columns_per_process,
68 preset_nonzero_locations);
84 ierr = MatDuplicate(other.
matrix, MAT_DO_NOT_COPY_VALUES, &
matrix);
104 const PetscErrorCode ierr =
111 template <
typename SparsityPatternType>
115 const SparsityPatternType & sparsity_pattern,
116 const std::vector<size_type> &local_rows_per_process,
117 const std::vector<size_type> &local_columns_per_process,
118 const unsigned int this_process,
119 const bool preset_nonzero_locations)
129 local_rows_per_process,
130 local_columns_per_process,
132 preset_nonzero_locations);
135 template <
typename SparsityPatternType>
139 const SparsityPatternType &sparsity_pattern,
148 do_reinit(local_rows, local_columns, sparsity_pattern);
153 template <
typename SparsityPatternType>
157 const SparsityPatternType &sparsity_pattern)
159 Assert(sparsity_pattern.n_rows() == local_rows.
size(),
161 "SparsityPattern and IndexSet have different number of rows"));
163 sparsity_pattern.n_cols() == local_columns.
size(),
165 "SparsityPattern and IndexSet have different number of columns"));
167 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
178 Assert(row_owners == sparsity_pattern.n_rows(),
181 "Each row has to be owned by exactly one owner (n_rows()=") +
182 std::to_string(sparsity_pattern.n_rows()) +
183 " but sum(local_rows.n_elements())=" +
184 std::to_string(row_owners) +
")"));
186 col_owners == sparsity_pattern.n_cols(),
189 "Each column has to be owned by exactly one owner (n_cols()=") +
190 std::to_string(sparsity_pattern.n_cols()) +
191 " but sum(local_columns.n_elements())=" +
192 std::to_string(col_owners) +
")"));
202 ierr = MatSetSizes(
matrix,
205 sparsity_pattern.n_rows(),
206 sparsity_pattern.n_cols());
209 ierr = MatSetType(
matrix, MATMPIAIJ);
233 const PetscInt local_row_end =
243 std::vector<PetscInt>
245 rowstart_in_window(local_row_end - local_row_start + 1, 0),
248 unsigned int n_cols = 0;
249 for (PetscInt i = local_row_start; i < local_row_end; ++i)
251 const PetscInt
row_length = sparsity_pattern.row_length(i);
252 rowstart_in_window[i + 1 - local_row_start] =
253 rowstart_in_window[i - local_row_start] +
row_length;
256 colnums_in_window.resize(n_cols + 1, -1);
262 PetscInt *ptr = colnums_in_window.data();
263 for (PetscInt i = local_row_start; i < local_row_end; ++i)
264 for (
typename SparsityPatternType::iterator p =
265 sparsity_pattern.begin(i);
266 p != sparsity_pattern.end(i);
275 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
276 rowstart_in_window.data(),
277 colnums_in_window.data(),
284 ierr = MatMPIAIJSetPreallocationCSR(
matrix, &i, &i,
nullptr);
296 template <
typename SparsityPatternType>
299 const SparsityPatternType & sparsity_pattern,
300 const std::vector<size_type> &local_rows_per_process,
301 const std::vector<size_type> &local_columns_per_process,
302 const unsigned int this_process,
303 const bool preset_nonzero_locations)
305 Assert(local_rows_per_process.size() == local_columns_per_process.size(),
307 local_columns_per_process.size()));
319 for (
unsigned int p = 0; p < this_process; ++p)
320 local_row_start += local_rows_per_process[p];
322 local_row_start + local_rows_per_process[this_process];
330 ierr = MatSetSizes(
matrix,
331 local_rows_per_process[this_process],
332 local_columns_per_process[this_process],
333 sparsity_pattern.n_rows(),
334 sparsity_pattern.n_cols());
337 ierr = MatSetType(
matrix, MATMPIAIJ);
353 if (preset_nonzero_locations ==
true)
366 std::vector<PetscInt>
368 rowstart_in_window(local_row_end - local_row_start + 1, 0),
372 for (
size_type i = local_row_start; i < local_row_end; ++i)
375 rowstart_in_window[i + 1 - local_row_start] =
376 rowstart_in_window[i - local_row_start] +
row_length;
379 colnums_in_window.resize(n_cols + 1, -1);
385 PetscInt *ptr = colnums_in_window.data();
386 for (
size_type i = local_row_start; i < local_row_end; ++i)
387 for (
typename SparsityPatternType::iterator p =
388 sparsity_pattern.begin(i);
389 p != sparsity_pattern.end(i);
398 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
399 rowstart_in_window.data(),
400 colnums_in_window.data(),
414 const std::vector<size_type> &,
415 const std::vector<size_type> &,
420 const std::vector<size_type> &,
421 const std::vector<size_type> &,
428 const std::vector<size_type> &,
429 const std::vector<size_type> &,
435 const std::vector<size_type> &,
436 const std::vector<size_type> &,
454 const std::vector<size_type> &,
455 const std::vector<size_type> &,
460 const std::vector<size_type> &,
461 const std::vector<size_type> &,
498 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
501 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
504 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
507 ierr = MatGetOwnershipRangeColumn(
matrix, &min, &max);
510 Assert(n_loc_cols == max - min,
512 "PETSc is requiring non contiguous memory allocation."));
524 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
527 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
530 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
533 ierr = MatGetOwnershipRange(
matrix, &min, &max);
536 Assert(n_loc_rows == max - min,
538 "PETSc is requiring non contiguous memory allocation."));
bool is_contiguous() const
size_type n_elements() const
void add_range(const size_type begin, const size_type end)
size_type nth_index_in_set(const size_type local_index) const
bool is_ascending_and_one_to_one(const MPI_Comm &communicator) const
SparseMatrix & operator=(const value_type d)
void copy_from(const SparseMatrix &other)
void reinit(const MPI_Comm &communicator, const SparsityPatternType &sparsity_pattern, const std::vector< size_type > &local_rows_per_process, const std::vector< size_type > &local_columns_per_process, const unsigned int this_process, const bool preset_nonzero_locations=true)
size_type row_length(const size_type row) const
void vmult(VectorBase &dst, const VectorBase &src) const
void mmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
MatrixBase & operator=(const MatrixBase &)=delete
void assert_is_compressed()
void Tmmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
void compress(const VectorOperation::values operation)
#define DEAL_II_NAMESPACE_OPEN
#define DEAL_II_NAMESPACE_CLOSE
void mmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
PetscScalar matrix_scalar_product(const Vector &u, const Vector &v) const
void Tmmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
static ::ExceptionBase & ExcNotImplemented()
#define Assert(cond, exc)
PetscScalar matrix_norm_square(const Vector &v) const
static ::ExceptionBase & ExcInternalError()
IndexSet locally_owned_range_indices() const
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
IndexSet locally_owned_domain_indices() const
void do_reinit(const SparsityPatternType &sparsity_pattern, const std::vector< size_type > &local_rows_per_process, const std::vector< size_type > &local_columns_per_process, const unsigned int this_process, const bool preset_nonzero_locations)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
void set_keep_zero_rows(Mat &matrix)
PetscErrorCode destroy_matrix(Mat &matrix)
void close_matrix(Mat &matrix)
T sum(const T &t, const MPI_Comm &mpi_communicator)