16 #include <deal.II/lac/petsc_sparse_matrix.h> 18 #ifdef DEAL_II_WITH_PETSC 20 # include <deal.II/base/mpi.h> 22 # include <deal.II/lac/dynamic_sparsity_pattern.h> 23 # include <deal.II/lac/exceptions.h> 24 # include <deal.II/lac/petsc_compatibility.h> 25 # include <deal.II/lac/petsc_vector.h> 26 # include <deal.II/lac/sparsity_pattern.h> 28 DEAL_II_NAMESPACE_OPEN
35 : communicator(MPI_COMM_SELF)
40 const int m = 0,
n = 0, n_nonzero_per_row = 0;
41 const PetscErrorCode ierr = MatCreateSeqAIJ(
42 PETSC_COMM_SELF,
m,
n, n_nonzero_per_row,
nullptr, &
matrix);
58 const bool is_symmetric,
59 const size_type n_offdiag_nonzero_per_row)
60 : communicator(communicator)
68 n_offdiag_nonzero_per_row);
74 const MPI_Comm & communicator,
79 const std::vector<size_type> &row_lengths,
80 const bool is_symmetric,
81 const std::vector<size_type> &offdiag_row_lengths)
82 : communicator(communicator)
95 template <
typename SparsityPatternType>
97 const MPI_Comm & communicator,
98 const SparsityPatternType & sparsity_pattern,
99 const std::vector<size_type> &local_rows_per_process,
100 const std::vector<size_type> &local_columns_per_process,
101 const unsigned int this_process,
102 const bool preset_nonzero_locations)
103 : communicator(communicator)
106 local_rows_per_process,
107 local_columns_per_process,
109 preset_nonzero_locations);
124 ierr = MatDuplicate(other.
matrix, MAT_DO_NOT_COPY_VALUES, &
matrix);
144 const PetscErrorCode ierr =
156 const bool is_symmetric,
157 const size_type n_offdiag_nonzero_per_row)
171 n_offdiag_nonzero_per_row);
182 const std::vector<size_type> &row_lengths,
183 const bool is_symmetric,
184 const std::vector<size_type> &offdiag_row_lengths)
199 offdiag_row_lengths);
204 template <
typename SparsityPatternType>
207 const MPI_Comm & communicator,
208 const SparsityPatternType & sparsity_pattern,
209 const std::vector<size_type> &local_rows_per_process,
210 const std::vector<size_type> &local_columns_per_process,
211 const unsigned int this_process,
212 const bool preset_nonzero_locations)
222 local_rows_per_process,
223 local_columns_per_process,
225 preset_nonzero_locations);
228 template <
typename SparsityPatternType>
232 const SparsityPatternType &sparsity_pattern,
233 const MPI_Comm & communicator)
241 do_reinit(local_rows, local_columns, sparsity_pattern);
250 const bool is_symmetric,
251 const size_type n_offdiag_nonzero_per_row)
265 n_offdiag_nonzero_per_row,
285 const std::vector<size_type> &row_lengths,
286 const bool is_symmetric,
287 const std::vector<size_type> &offdiag_row_lengths)
291 Assert(row_lengths.size() ==
m,
312 const std::vector<PetscInt> int_row_lengths(row_lengths.begin(),
314 const std::vector<PetscInt> int_offdiag_row_lengths(
315 offdiag_row_lengths.begin(), offdiag_row_lengths.end());
320 const PetscErrorCode ierr =
327 int_row_lengths.data(),
329 offdiag_row_lengths.size() ?
330 int_offdiag_row_lengths.data() :
350 template <
typename SparsityPatternType>
354 const SparsityPatternType &sparsity_pattern)
356 Assert(sparsity_pattern.n_rows() == local_rows.
size(),
358 "SparsityPattern and IndexSet have different number of rows"));
360 sparsity_pattern.n_cols() == local_columns.
size(),
362 "SparsityPattern and IndexSet have different number of columns"));
364 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
375 Assert(row_owners == sparsity_pattern.n_rows(),
378 "Each row has to be owned by exactly one owner (n_rows()=") +
380 " but sum(local_rows.n_elements())=" +
383 col_owners == sparsity_pattern.n_cols(),
386 "Each column has to be owned by exactly one owner (n_cols()=") +
388 " but sum(local_columns.n_elements())=" +
399 ierr = MatSetSizes(
matrix,
402 sparsity_pattern.n_rows(),
403 sparsity_pattern.n_cols());
406 ierr = MatSetType(
matrix, MATMPIAIJ);
430 const PetscInt local_row_end =
440 std::vector<PetscInt>
442 rowstart_in_window(local_row_end - local_row_start + 1, 0),
445 unsigned int n_cols = 0;
446 for (PetscInt i = local_row_start; i < local_row_end; ++i)
448 const PetscInt
row_length = sparsity_pattern.row_length(i);
449 rowstart_in_window[i + 1 - local_row_start] =
450 rowstart_in_window[i - local_row_start] +
row_length;
453 colnums_in_window.resize(n_cols + 1, -1);
459 PetscInt *ptr = colnums_in_window.data();
460 for (PetscInt i = local_row_start; i < local_row_end; ++i)
461 for (
typename SparsityPatternType::iterator p =
462 sparsity_pattern.begin(i);
463 p != sparsity_pattern.end(i);
472 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
473 rowstart_in_window.data(),
474 colnums_in_window.data(),
481 ierr = MatMPIAIJSetPreallocationCSR(
matrix, &i, &i,
nullptr);
493 template <
typename SparsityPatternType>
496 const SparsityPatternType & sparsity_pattern,
497 const std::vector<size_type> &local_rows_per_process,
498 const std::vector<size_type> &local_columns_per_process,
499 const unsigned int this_process,
500 const bool preset_nonzero_locations)
502 Assert(local_rows_per_process.size() == local_columns_per_process.size(),
504 local_columns_per_process.size()));
517 for (
unsigned int p = 0; p < this_process; ++p)
519 local_row_start += local_rows_per_process[p];
520 local_col_start += local_columns_per_process[p];
523 local_row_start + local_rows_per_process[this_process];
531 ierr = MatSetSizes(
matrix,
532 local_rows_per_process[this_process],
533 local_columns_per_process[this_process],
534 sparsity_pattern.n_rows(),
535 sparsity_pattern.n_cols());
538 ierr = MatSetType(
matrix, MATMPIAIJ);
554 if (preset_nonzero_locations ==
true)
567 std::vector<PetscInt>
569 rowstart_in_window(local_row_end - local_row_start + 1, 0),
573 for (
size_type i = local_row_start; i < local_row_end; ++i)
576 rowstart_in_window[i + 1 - local_row_start] =
577 rowstart_in_window[i - local_row_start] +
row_length;
580 colnums_in_window.resize(n_cols + 1, -1);
586 PetscInt *ptr = colnums_in_window.data();
587 for (
size_type i = local_row_start; i < local_row_end; ++i)
588 for (
typename SparsityPatternType::iterator p =
589 sparsity_pattern.begin(i);
590 p != sparsity_pattern.end(i);
599 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
600 rowstart_in_window.data(),
601 colnums_in_window.data(),
614 const std::vector<size_type> &,
615 const std::vector<size_type> &,
620 const std::vector<size_type> &,
621 const std::vector<size_type> &,
628 const std::vector<size_type> &,
629 const std::vector<size_type> &,
635 const std::vector<size_type> &,
636 const std::vector<size_type> &,
654 const std::vector<size_type> &,
655 const std::vector<size_type> &,
660 const std::vector<size_type> &,
661 const std::vector<size_type> &,
697 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
700 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
703 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
706 ierr = MatGetOwnershipRangeColumn(
matrix, &min, &max);
709 Assert(n_loc_cols == max - min,
711 "PETSc is requiring non contiguous memory allocation."));
723 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
726 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
729 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
732 ierr = MatGetOwnershipRange(
matrix, &min, &max);
735 Assert(n_loc_rows == max - min,
737 "PETSc is requiring non contiguous memory allocation."));
772 DEAL_II_NAMESPACE_CLOSE
774 #endif // DEAL_II_WITH_PETSC static ::ExceptionBase & ExcLocalRowsTooLarge(int arg1, int arg2)
PetscErrorCode destroy_matrix(Mat &matrix)
void Tmmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
void copy_from(const SparseMatrix &other)
size_type nth_index_in_set(const unsigned int local_index) const
IndexSet locally_owned_domain_indices() const
void vmult(VectorBase &dst, const VectorBase &src) const
void do_reinit(const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
size_type row_length(const size_type row) const
#define AssertThrow(cond, exc)
static ::ExceptionBase & ExcIndexRange(int arg1, int arg2, int arg3)
std::string to_string(const number value, const unsigned int digits=numbers::invalid_unsigned_int)
void set_matrix_option(Mat &matrix, const MatOption option_name, const PetscBool option_value=PETSC_FALSE)
PetscBool is_symmetric(const double tolerance=1.e-12)
bool is_ascending_and_one_to_one(const MPI_Comm &communicator) const
void compress(const VectorOperation::values operation)
void mmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
MatrixBase & operator=(const MatrixBase &)=delete
static ::ExceptionBase & ExcMessage(std::string arg1)
SparseMatrix & operator=(const value_type d)
PetscScalar matrix_scalar_product(const Vector &u, const Vector &v) const
T sum(const T &t, const MPI_Comm &mpi_communicator)
#define Assert(cond, exc)
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
void mmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
void set_keep_zero_rows(Mat &matrix)
void reinit(const MPI_Comm &communicator, const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
IndexSet locally_owned_range_indices() const
void add_range(const size_type begin, const size_type end)
unsigned int global_dof_index
types::global_dof_index size_type
void Tmmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
bool is_contiguous() const
PetscScalar matrix_norm_square(const Vector &v) const
static ::ExceptionBase & ExcNotImplemented()
size_type n_elements() const
void assert_is_compressed()
static ::ExceptionBase & ExcInternalError()
void close_matrix(Mat &matrix)