16 #include <deal.II/lac/petsc_parallel_sparse_matrix.h> 18 #ifdef DEAL_II_WITH_PETSC 20 # include <deal.II/base/mpi.h> 21 # include <deal.II/lac/exceptions.h> 22 # include <deal.II/lac/petsc_compatibility.h> 23 # include <deal.II/lac/petsc_parallel_vector.h> 24 # include <deal.II/lac/sparsity_pattern.h> 25 # include <deal.II/lac/dynamic_sparsity_pattern.h> 27 DEAL_II_NAMESPACE_OPEN
34 : communicator(MPI_COMM_SELF)
39 const int m=0,
n=0, n_nonzero_per_row=0;
40 const PetscErrorCode ierr
41 = MatCreateSeqAIJ(PETSC_COMM_SELF,
m,
n, n_nonzero_per_row,
58 const bool is_symmetric,
59 const size_type n_offdiag_nonzero_per_row)
61 communicator (communicator)
65 n_offdiag_nonzero_per_row);
75 const std::vector<size_type> &row_lengths,
76 const bool is_symmetric,
77 const std::vector<size_type> &offdiag_row_lengths)
79 communicator (communicator)
87 template <
typename SparsityPatternType>
90 const SparsityPatternType &sparsity_pattern,
91 const std::vector<size_type> &local_rows_per_process,
92 const std::vector<size_type> &local_columns_per_process,
93 const unsigned int this_process,
94 const bool preset_nonzero_locations)
96 communicator (communicator)
98 do_reinit (sparsity_pattern, local_rows_per_process,
99 local_columns_per_process, this_process,
100 preset_nonzero_locations);
116 ierr = MatDuplicate (other.
matrix, MAT_DO_NOT_COPY_VALUES, &
matrix);
136 const PetscErrorCode ierr = MatCopy(other.
matrix,
matrix,
137 SAME_NONZERO_PATTERN);
148 const bool is_symmetric,
149 const size_type n_offdiag_nonzero_per_row)
159 n_offdiag_nonzero_per_row);
170 const std::vector<size_type> &row_lengths,
171 const bool is_symmetric,
172 const std::vector<size_type> &offdiag_row_lengths)
187 template <
typename SparsityPatternType>
191 const SparsityPatternType &sparsity_pattern,
192 const std::vector<size_type> &local_rows_per_process,
193 const std::vector<size_type> &local_columns_per_process,
194 const unsigned int this_process,
195 const bool preset_nonzero_locations)
204 do_reinit (sparsity_pattern, local_rows_per_process,
205 local_columns_per_process, this_process,
206 preset_nonzero_locations);
209 template <
typename SparsityPatternType>
214 const SparsityPatternType &sparsity_pattern,
215 const MPI_Comm &communicator)
223 do_reinit (local_rows, local_columns, sparsity_pattern);
232 const bool is_symmetric,
233 const size_type n_offdiag_nonzero_per_row)
240 const PetscErrorCode ierr = MatCreateAIJ
242 local_rows, local_columns,
244 n_nonzero_per_row,
nullptr,
245 n_offdiag_nonzero_per_row,
nullptr,
264 const std::vector<size_type> &row_lengths,
265 const bool is_symmetric,
266 const std::vector<size_type> &offdiag_row_lengths)
270 Assert (row_lengths.size() ==
m,
279 for (
size_type i=0; i<row_lengths.size(); ++i)
280 Assert(row_lengths[i]<=local_columns,
291 const std::vector<PetscInt> int_row_lengths (row_lengths.begin(),
293 const std::vector<PetscInt> int_offdiag_row_lengths (offdiag_row_lengths.begin(),
294 offdiag_row_lengths.end());
297 const PetscErrorCode ierr = MatCreateAIJ
299 local_rows, local_columns,
301 0, int_row_lengths.data(),
303 offdiag_row_lengths.size() ? int_offdiag_row_lengths.data() :
nullptr,
318 template <
typename SparsityPatternType>
323 const SparsityPatternType &sparsity_pattern)
325 Assert(sparsity_pattern.n_rows()==local_rows.
size(),
326 ExcMessage(
"SparsityPattern and IndexSet have different number of rows"));
327 Assert(sparsity_pattern.n_cols()==local_columns.
size(),
328 ExcMessage(
"SparsityPattern and IndexSet have different number of columns"));
330 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
338 Assert(row_owners == sparsity_pattern.n_rows(),
339 ExcMessage(std::string(
"Each row has to be owned by exactly one owner (n_rows()=")
341 +
" but sum(local_rows.n_elements())=" 344 Assert(col_owners == sparsity_pattern.n_cols(),
345 ExcMessage(std::string(
"Each column has to be owned by exactly one owner (n_cols()=")
347 +
" but sum(local_columns.n_elements())=" 359 ierr = MatSetSizes(
matrix,
362 sparsity_pattern.n_rows(),
363 sparsity_pattern.n_cols());
366 ierr = MatSetType(
matrix,MATMPIAIJ);
392 local_row_end = local_row_start + local_rows.
n_elements();
401 std::vector<PetscInt>
403 rowstart_in_window (local_row_end - local_row_start + 1, 0),
406 unsigned int n_cols = 0;
407 for (PetscInt i=local_row_start; i<local_row_end; ++i)
409 const PetscInt
row_length = sparsity_pattern.row_length(i);
410 rowstart_in_window[i+1-local_row_start]
411 = rowstart_in_window[i-local_row_start] +
row_length;
414 colnums_in_window.resize (n_cols+1, -1);
420 PetscInt *ptr = & colnums_in_window[0];
421 for (PetscInt i=local_row_start; i<local_row_end; ++i)
422 for (
typename SparsityPatternType::iterator p=sparsity_pattern.begin(i);
423 p != sparsity_pattern.end(i); ++p, ++ptr)
431 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
432 rowstart_in_window.data(),
433 colnums_in_window.data(),
440 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
455 template <
typename SparsityPatternType>
459 const std::vector<size_type> &local_rows_per_process,
460 const std::vector<size_type> &local_columns_per_process,
461 const unsigned int this_process,
462 const bool preset_nonzero_locations)
464 Assert (local_rows_per_process.size() == local_columns_per_process.size(),
466 local_columns_per_process.size()));
467 Assert (this_process < local_rows_per_process.size(),
480 for (
unsigned int p=0; p<this_process; ++p)
482 local_row_start += local_rows_per_process[p];
483 local_col_start += local_columns_per_process[p];
486 local_row_end = local_row_start + local_rows_per_process[this_process];
494 ierr = MatSetSizes(
matrix,
495 local_rows_per_process[this_process],
496 local_columns_per_process[this_process],
497 sparsity_pattern.n_rows(),
498 sparsity_pattern.n_cols());
501 ierr = MatSetType(
matrix,MATMPIAIJ);
517 if (preset_nonzero_locations ==
true)
530 std::vector<PetscInt>
532 rowstart_in_window (local_row_end - local_row_start + 1, 0),
536 for (
size_type i=local_row_start; i<local_row_end; ++i)
539 rowstart_in_window[i+1-local_row_start]
540 = rowstart_in_window[i-local_row_start] +
row_length;
543 colnums_in_window.resize (n_cols+1, -1);
549 PetscInt *ptr = & colnums_in_window[0];
550 for (
size_type i=local_row_start; i<local_row_end; ++i)
551 for (
typename SparsityPatternType::iterator p=sparsity_pattern.begin(i);
552 p != sparsity_pattern.end(i); ++p, ++ptr)
560 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
561 rowstart_in_window.data(),
562 colnums_in_window.data(),
576 const std::vector<size_type> &,
577 const std::vector<size_type> &,
583 const std::vector<size_type> &,
584 const std::vector<size_type> &,
591 const std::vector<size_type> &,
592 const std::vector<size_type> &,
598 const std::vector<size_type> &,
599 const std::vector<size_type> &,
619 const std::vector<size_type> &,
620 const std::vector<size_type> &,
625 const std::vector<size_type> &,
626 const std::vector<size_type> &,
665 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
668 ierr = MatGetSize (
matrix, &n_rows, &n_cols);
671 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
674 ierr = MatGetOwnershipRangeColumn(
matrix, &min, &max);
677 Assert(n_loc_cols==max-min,
ExcMessage(
"PETSc is requiring non contiguous memory allocation."));
689 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
692 ierr = MatGetSize (
matrix, &n_rows, &n_cols);
695 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
698 ierr = MatGetOwnershipRange(
matrix, &min, &max);
701 Assert(n_loc_rows==max-min,
ExcMessage(
"PETSc is requiring non contiguous memory allocation."));
736 DEAL_II_NAMESPACE_CLOSE
738 #endif // DEAL_II_WITH_PETSC static ::ExceptionBase & ExcLocalRowsTooLarge(int arg1, int arg2)
PetscErrorCode destroy_matrix(Mat &matrix)
void Tmmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
void copy_from(const SparseMatrix &other)
size_type nth_index_in_set(const unsigned int local_index) const
IndexSet locally_owned_domain_indices() const
void vmult(VectorBase &dst, const VectorBase &src) const
void do_reinit(const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
size_type row_length(const size_type row) const
#define AssertThrow(cond, exc)
static ::ExceptionBase & ExcIndexRange(int arg1, int arg2, int arg3)
std::string to_string(const number value, const unsigned int digits=numbers::invalid_unsigned_int)
void set_matrix_option(Mat &matrix, const MatOption option_name, const PetscBool option_value=PETSC_FALSE)
PetscBool is_symmetric(const double tolerance=1.e-12)
bool is_ascending_and_one_to_one(const MPI_Comm &communicator) const
void compress(const VectorOperation::values operation)
void mmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
MatrixBase & operator=(const MatrixBase &)=delete
static ::ExceptionBase & ExcMessage(std::string arg1)
SparseMatrix & operator=(const value_type d)
PetscScalar matrix_scalar_product(const Vector &u, const Vector &v) const
T sum(const T &t, const MPI_Comm &mpi_communicator)
unsigned int global_dof_index
#define Assert(cond, exc)
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
void mmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
types::global_dof_index size_type
void set_keep_zero_rows(Mat &matrix)
void reinit(const MPI_Comm &communicator, const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
IndexSet locally_owned_range_indices() const
void add_range(const size_type begin, const size_type end)
void Tmmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
bool is_contiguous() const
PetscScalar matrix_norm_square(const Vector &v) const
static ::ExceptionBase & ExcNotImplemented()
size_type n_elements() const
void assert_is_compressed()
static ::ExceptionBase & ExcInternalError()
void close_matrix(Mat &matrix)