18#ifdef DEAL_II_WITH_PETSC
39 const int m = 0,
n = 0, n_nonzero_per_row = 0;
40 const PetscErrorCode ierr = MatCreateSeqAIJ(
41 PETSC_COMM_SELF,
m,
n, n_nonzero_per_row,
nullptr, &
matrix);
51 PetscErrorCode ierr = MatDestroy(&
matrix);
58 template <
typename SparsityPatternType>
61 const SparsityPatternType & sparsity_pattern,
62 const std::vector<size_type> &local_rows_per_process,
63 const std::vector<size_type> &local_columns_per_process,
64 const unsigned int this_process,
65 const bool preset_nonzero_locations)
69 local_rows_per_process,
70 local_columns_per_process,
72 preset_nonzero_locations);
83 PetscErrorCode ierr = MatDestroy(&
matrix);
86 ierr = MatDuplicate(other.
matrix, MAT_DO_NOT_COPY_VALUES, &
matrix);
90 template <
typename SparsityPatternType>
95 const IndexSet & local_active_columns,
96 const SparsityPatternType &sparsity_pattern,
100 const PetscErrorCode ierr = MatDestroy(&
matrix);
107 local_active_columns,
125 const PetscErrorCode ierr =
132 template <
typename SparsityPatternType>
136 const SparsityPatternType & sparsity_pattern,
137 const std::vector<size_type> &local_rows_per_process,
138 const std::vector<size_type> &local_columns_per_process,
139 const unsigned int this_process,
140 const bool preset_nonzero_locations)
143 const PetscErrorCode ierr = MatDestroy(&
matrix);
149 local_rows_per_process,
150 local_columns_per_process,
152 preset_nonzero_locations);
157 template <
typename SparsityPatternType>
160 const SparsityPatternType &sparsity_pattern,
163 do_reinit(communicator, local_rows, local_rows, sparsity_pattern);
166 template <
typename SparsityPatternType>
170 const SparsityPatternType &sparsity_pattern,
174 const PetscErrorCode ierr = MatDestroy(&
matrix);
177 do_reinit(communicator, local_rows, local_columns, sparsity_pattern);
182 template <
typename SparsityPatternType>
187 const SparsityPatternType &sparsity_pattern)
189 Assert(sparsity_pattern.n_rows() == local_rows.
size(),
191 "SparsityPattern and IndexSet have different number of rows"));
193 sparsity_pattern.n_cols() == local_columns.
size(),
195 "SparsityPattern and IndexSet have different number of columns"));
197 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
208 Assert(row_owners == sparsity_pattern.n_rows(),
211 "Each row has to be owned by exactly one owner (n_rows()=") +
212 std::to_string(sparsity_pattern.n_rows()) +
213 " but sum(local_rows.n_elements())=" +
214 std::to_string(row_owners) +
")"));
216 col_owners == sparsity_pattern.n_cols(),
219 "Each column has to be owned by exactly one owner (n_cols()=") +
220 std::to_string(sparsity_pattern.n_cols()) +
221 " but sum(local_columns.n_elements())=" +
222 std::to_string(col_owners) +
")"));
229 PetscErrorCode ierr = MatCreate(communicator, &
matrix);
232 ierr = MatSetSizes(
matrix,
235 sparsity_pattern.n_rows(),
236 sparsity_pattern.n_cols());
242 ierr = MatSetType(
matrix, MATAIJ);
266 const PetscInt local_row_end =
276 std::vector<PetscInt>
278 rowstart_in_window(local_row_end - local_row_start + 1, 0),
281 unsigned int n_cols = 0;
282 for (PetscInt i = local_row_start; i < local_row_end; ++i)
284 const PetscInt
row_length = sparsity_pattern.row_length(i);
285 rowstart_in_window[i + 1 - local_row_start] =
286 rowstart_in_window[i - local_row_start] +
row_length;
289 colnums_in_window.resize(n_cols + 1, -1);
295 PetscInt *ptr = colnums_in_window.data();
296 for (PetscInt i = local_row_start; i < local_row_end; ++i)
297 for (
typename SparsityPatternType::iterator p =
298 sparsity_pattern.begin(i);
299 p != sparsity_pattern.end(i);
312 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
313 rowstart_in_window.data(),
314 colnums_in_window.data(),
316 ierr = MatSeqAIJSetPreallocationCSR(
matrix,
317 rowstart_in_window.data(),
318 colnums_in_window.data(),
326 ierr = MatSeqAIJSetPreallocationCSR(
matrix, &i, &i,
nullptr);
328 ierr = MatMPIAIJSetPreallocationCSR(
matrix, &i, &i,
nullptr);
340 template <
typename SparsityPatternType>
344 const SparsityPatternType & sparsity_pattern,
345 const std::vector<size_type> &local_rows_per_process,
346 const std::vector<size_type> &local_columns_per_process,
347 const unsigned int this_process,
348 const bool preset_nonzero_locations)
350 Assert(local_rows_per_process.size() == local_columns_per_process.size(),
352 local_columns_per_process.size()));
364 for (
unsigned int p = 0; p < this_process; ++p)
365 local_row_start += local_rows_per_process[p];
367 local_row_start + local_rows_per_process[this_process];
372 PetscErrorCode ierr = MatCreate(communicator, &
matrix);
375 ierr = MatSetSizes(
matrix,
376 local_rows_per_process[this_process],
377 local_columns_per_process[this_process],
378 sparsity_pattern.n_rows(),
379 sparsity_pattern.n_cols());
385 ierr = MatSetType(
matrix, MATAIJ);
401 if (preset_nonzero_locations ==
true)
414 std::vector<PetscInt>
416 rowstart_in_window(local_row_end - local_row_start + 1, 0),
420 for (
size_type i = local_row_start; i < local_row_end; ++i)
423 rowstart_in_window[i + 1 - local_row_start] =
424 rowstart_in_window[i - local_row_start] +
row_length;
427 colnums_in_window.resize(n_cols + 1, -1);
433 PetscInt *ptr = colnums_in_window.data();
434 for (
size_type i = local_row_start; i < local_row_end; ++i)
435 for (
typename SparsityPatternType::iterator p =
436 sparsity_pattern.begin(i);
437 p != sparsity_pattern.end(i);
450 ierr = MatSeqAIJSetPreallocationCSR(
matrix,
451 rowstart_in_window.data(),
452 colnums_in_window.data(),
454 ierr = MatMPIAIJSetPreallocationCSR(
matrix,
455 rowstart_in_window.data(),
456 colnums_in_window.data(),
466 template <
typename SparsityPatternType>
472 const IndexSet & local_active_columns,
473 const SparsityPatternType &sparsity_pattern)
475# if DEAL_II_PETSC_VERSION_GTE(3, 10, 0)
476 Assert(sparsity_pattern.n_rows() == local_rows.
size(),
478 "SparsityPattern and IndexSet have different number of rows."));
480 sparsity_pattern.n_cols() == local_columns.
size(),
482 "SparsityPattern and IndexSet have different number of columns"));
484 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
495 Assert(row_owners == sparsity_pattern.n_rows(),
498 "Each row has to be owned by exactly one owner (n_rows()=") +
499 std::to_string(sparsity_pattern.n_rows()) +
500 " but sum(local_rows.n_elements())=" +
501 std::to_string(row_owners) +
")"));
503 col_owners == sparsity_pattern.n_cols(),
506 "Each column has to be owned by exactly one owner (n_cols()=") +
507 std::to_string(sparsity_pattern.n_cols()) +
508 " but sum(local_columns.n_elements())=" +
509 std::to_string(col_owners) +
")"));
519 std::vector<PetscInt> idx_glob_row(n_local_active_rows);
520 std::vector<PetscInt> idx_glob_col(n_local_active_cols);
531 IS is_glob_row, is_glob_col;
533 ISLocalToGlobalMapping l2gmap_row;
534 ierr = ISCreateGeneral(communicator,
540 ierr = ISLocalToGlobalMappingCreateIS(is_glob_row, &l2gmap_row);
542 ierr = ISDestroy(&is_glob_row);
545 ISLocalToGlobalMappingViewFromOptions(l2gmap_row,
nullptr,
"-view_map");
549 ISLocalToGlobalMapping l2gmap_col;
550 ierr = ISCreateGeneral(communicator,
556 ierr = ISLocalToGlobalMappingCreateIS(is_glob_col, &l2gmap_col);
558 ierr = ISDestroy(&is_glob_col);
561 ISLocalToGlobalMappingViewFromOptions(l2gmap_col,
nullptr,
"-view_map");
565 ierr = MatCreateIS(communicator,
569 sparsity_pattern.n_rows(),
570 sparsity_pattern.n_cols(),
575 ierr = ISLocalToGlobalMappingDestroy(&l2gmap_row);
577 ierr = ISLocalToGlobalMappingDestroy(&l2gmap_col);
594 ierr = MatISGetLocalMat(
matrix, &local_matrix);
596 ierr = MatSetType(local_matrix,
605 const PetscInt local_row_start = 0;
606 const PetscInt local_row_end = local_active_rows.
n_elements();
611 std::vector<PetscInt> rowstart_in_window(local_row_end -
615 unsigned int global_row_index = 0;
617 unsigned int n_cols = 0;
618 unsigned int global_row_index = 0;
619 for (PetscInt i = local_row_start; i < local_row_end; ++i)
623 sparsity_pattern.row_length(global_row_index);
624 rowstart_in_window[i + 1 - local_row_start] =
625 rowstart_in_window[i - local_row_start] +
row_length;
628 colnums_in_window.resize(n_cols + 1, -1);
635 std::map<unsigned int, unsigned int> loc_act_cols_inv;
636 for (
unsigned int i = 0; i < local_active_columns.
n_elements(); ++i)
642 PetscInt *ptr = colnums_in_window.data();
643 for (PetscInt i = local_row_start; i < local_row_end; ++i)
646 for (
typename SparsityPatternType::iterator p =
647 sparsity_pattern.begin(global_row_index);
648 p != sparsity_pattern.end(global_row_index);
650 *ptr = loc_act_cols_inv[p->column()];
657 ierr = MatSeqAIJSetPreallocationCSR(local_matrix,
658 rowstart_in_window.data(),
659 colnums_in_window.data(),
666 ierr = MatSeqAIJSetPreallocationCSR(local_matrix, &i, &i,
nullptr);
675 ierr = MatISRestoreLocalMat(
matrix, &local_matrix);
682 (void)local_active_rows;
684 (void)local_active_columns;
685 (void)sparsity_pattern;
688 "BDDC preconditioner requires PETSc 3.10.0 or newer"));
698 const std::vector<size_type> &,
699 const std::vector<size_type> &,
704 const std::vector<size_type> &,
705 const std::vector<size_type> &,
712 const std::vector<size_type> &,
713 const std::vector<size_type> &,
719 const std::vector<size_type> &,
720 const std::vector<size_type> &,
749 const std::vector<size_type> &,
750 const std::vector<size_type> &,
756 const std::vector<size_type> &,
757 const std::vector<size_type> &,
826 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
829 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
832 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
835 ierr = MatGetOwnershipRangeColumn(
matrix, &min, &max);
838 Assert(n_loc_cols == max - min,
840 "PETSc is requiring non contiguous memory allocation."));
852 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
855 ierr = MatGetSize(
matrix, &n_rows, &n_cols);
858 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
861 ierr = MatGetOwnershipRange(
matrix, &min, &max);
864 Assert(n_loc_rows == max - min,
866 "PETSc is requiring non contiguous memory allocation."));
bool is_ascending_and_one_to_one(const MPI_Comm communicator) const
bool is_contiguous() const
size_type n_elements() const
void add_range(const size_type begin, const size_type end)
size_type nth_index_in_set(const size_type local_index) const
SparseMatrix & operator=(const value_type d)
void copy_from(const SparseMatrix &other)
void reinit(const MPI_Comm communicator, const SparsityPatternType &sparsity_pattern, const std::vector< size_type > &local_rows_per_process, const std::vector< size_type > &local_columns_per_process, const unsigned int this_process, const bool preset_nonzero_locations=true)
void mmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
PetscScalar matrix_scalar_product(const Vector &u, const Vector &v) const
void Tmmult(SparseMatrix &C, const SparseMatrix &B, const MPI::Vector &V=MPI::Vector()) const
PetscScalar matrix_norm_square(const Vector &v) const
IndexSet locally_owned_range_indices() const
void do_reinit(const MPI_Comm comm, const SparsityPatternType &sparsity_pattern, const std::vector< size_type > &local_rows_per_process, const std::vector< size_type > &local_columns_per_process, const unsigned int this_process, const bool preset_nonzero_locations)
IndexSet locally_owned_domain_indices() const
size_type row_length(const size_type row) const
void vmult(VectorBase &dst, const VectorBase &src) const
void mmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
MatrixBase & operator=(const MatrixBase &)=delete
void assert_is_compressed()
void Tmmult(MatrixBase &C, const MatrixBase &B, const VectorBase &V) const
void compress(const VectorOperation::values operation)
#define DEAL_II_NAMESPACE_OPEN
#define DEAL_II_NAMESPACE_CLOSE
static ::ExceptionBase & ExcNotImplemented()
#define Assert(cond, exc)
#define AssertNothrow(cond, exc)
static ::ExceptionBase & ExcInternalError()
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
void set_keep_zero_rows(Mat &matrix)
void close_matrix(Mat &matrix)
T sum(const T &t, const MPI_Comm mpi_communicator)