16 #include <deal.II/lac/petsc_parallel_sparse_matrix.h> 18 #ifdef DEAL_II_WITH_PETSC 20 # include <deal.II/base/mpi.h> 21 # include <deal.II/lac/exceptions.h> 22 # include <deal.II/lac/petsc_compatibility.h> 23 # include <deal.II/lac/petsc_vector.h> 24 # include <deal.II/lac/sparsity_pattern.h> 25 # include <deal.II/lac/dynamic_sparsity_pattern.h> 27 DEAL_II_NAMESPACE_OPEN
34 : communicator(MPI_COMM_SELF)
39 const int m=0,
n=0, n_nonzero_per_row=0;
40 const PetscErrorCode ierr
41 = MatCreateSeqAIJ(PETSC_COMM_SELF,
m,
n, n_nonzero_per_row,
58 const bool is_symmetric,
59 const size_type n_offdiag_nonzero_per_row)
61 communicator (communicator)
65 n_offdiag_nonzero_per_row);
75 const std::vector<size_type> &row_lengths,
76 const bool is_symmetric,
77 const std::vector<size_type> &offdiag_row_lengths)
79 communicator (communicator)
87 template <
typename SparsityPatternType>
90 const SparsityPatternType &sparsity_pattern,
91 const std::vector<size_type> &local_rows_per_process,
92 const std::vector<size_type> &local_columns_per_process,
93 const unsigned int this_process,
94 const bool preset_nonzero_locations)
96 communicator (communicator)
98 do_reinit (sparsity_pattern, local_rows_per_process,
99 local_columns_per_process, this_process,
100 preset_nonzero_locations);
116 ierr = MatDuplicate (other.
matrix, MAT_DO_NOT_COPY_VALUES, &
matrix);
136 const PetscErrorCode ierr = MatCopy(other.
matrix,
matrix,
137 SAME_NONZERO_PATTERN);
148 const bool is_symmetric,
149 const size_type n_offdiag_nonzero_per_row)
159 n_offdiag_nonzero_per_row);
170 const std::vector<size_type> &row_lengths,
171 const bool is_symmetric,
172 const std::vector<size_type> &offdiag_row_lengths)
187 template <
typename SparsityPatternType>
191 const SparsityPatternType &sparsity_pattern,
192 const std::vector<size_type> &local_rows_per_process,
193 const std::vector<size_type> &local_columns_per_process,
194 const unsigned int this_process,
195 const bool preset_nonzero_locations)
204 do_reinit (sparsity_pattern, local_rows_per_process,
205 local_columns_per_process, this_process,
206 preset_nonzero_locations);
209 template <
typename SparsityPatternType>
214 const SparsityPatternType &sparsity_pattern,
215 const MPI_Comm &communicator)
223 do_reinit (local_rows, local_columns, sparsity_pattern);
232 const bool is_symmetric,
233 const size_type n_offdiag_nonzero_per_row)
240 #if DEAL_II_PETSC_VERSION_LT(3,3,0) 241 const PetscErrorCode ierr = MatCreateMPIAIJ
243 local_rows, local_columns,
245 n_nonzero_per_row, 0,
246 n_offdiag_nonzero_per_row, 0,
249 const PetscErrorCode ierr = MatCreateAIJ
251 local_rows, local_columns,
253 n_nonzero_per_row, 0,
254 n_offdiag_nonzero_per_row, 0,
274 const std::vector<size_type> &row_lengths,
275 const bool is_symmetric,
276 const std::vector<size_type> &offdiag_row_lengths)
280 Assert (row_lengths.size() ==
m,
289 for (
size_type i=0; i<row_lengths.size(); ++i)
290 Assert(row_lengths[i]<=local_columns,
301 const std::vector<PetscInt> int_row_lengths (row_lengths.begin(),
303 const std::vector<PetscInt> int_offdiag_row_lengths (offdiag_row_lengths.begin(),
304 offdiag_row_lengths.end());
307 #if DEAL_II_PETSC_VERSION_LT(3,3,0) 308 const PetscErrorCode ierr = MatCreateMPIAIJ
310 local_rows, local_columns,
312 0, &int_row_lengths[0],
313 0, offdiag_row_lengths.size() ? &int_offdiag_row_lengths[0] : 0,
316 const PetscErrorCode ierr = MatCreateAIJ
318 local_rows, local_columns,
320 0, &int_row_lengths[0],
322 offdiag_row_lengths.size() ? &int_offdiag_row_lengths[0] : 0,
338 template <
typename SparsityPatternType>
343 const SparsityPatternType &sparsity_pattern)
345 Assert(sparsity_pattern.n_rows()==local_rows.
size(),
346 ExcMessage(
"SparsityPattern and IndexSet have different number of rows"));
347 Assert(sparsity_pattern.n_cols()==local_columns.
size(),
348 ExcMessage(
"SparsityPattern and IndexSet have different number of columns"));
350 ExcMessage(
"PETSc only supports contiguous row/column ranges"));
358 Assert(row_owners == sparsity_pattern.n_rows(),
359 ExcMessage(std::string(
"Each row has to be owned by exactly one owner (n_rows()=")
361 +
" but sum(local_rows.n_elements())=" 364 Assert(col_owners == sparsity_pattern.n_cols(),
365 ExcMessage(std::string(
"Each column has to be owned by exactly one owner (n_cols()=")
367 +
" but sum(local_columns.n_elements())=" 379 ierr = MatSetSizes(
matrix,
382 sparsity_pattern.n_rows(),
383 sparsity_pattern.n_cols());
386 ierr = MatSetType(
matrix,MATMPIAIJ);
412 local_row_end = local_row_start + local_rows.
n_elements();
421 std::vector<PetscInt>
423 rowstart_in_window (local_row_end - local_row_start + 1, 0),
426 unsigned int n_cols = 0;
427 for (PetscInt i=local_row_start; i<local_row_end; ++i)
429 const PetscInt
row_length = sparsity_pattern.row_length(i);
430 rowstart_in_window[i+1-local_row_start]
431 = rowstart_in_window[i-local_row_start] +
row_length;
434 colnums_in_window.resize (n_cols+1, -1);
440 PetscInt *ptr = & colnums_in_window[0];
441 for (PetscInt i=local_row_start; i<local_row_end; ++i)
442 for (
typename SparsityPatternType::iterator p=sparsity_pattern.begin(i);
443 p != sparsity_pattern.end(i); ++p, ++ptr)
451 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
452 &rowstart_in_window[0],
453 &colnums_in_window[0],
460 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
475 template <
typename SparsityPatternType>
479 const std::vector<size_type> &local_rows_per_process,
480 const std::vector<size_type> &local_columns_per_process,
481 const unsigned int this_process,
482 const bool preset_nonzero_locations)
484 Assert (local_rows_per_process.size() == local_columns_per_process.size(),
486 local_columns_per_process.size()));
487 Assert (this_process < local_rows_per_process.size(),
500 for (
unsigned int p=0; p<this_process; ++p)
502 local_row_start += local_rows_per_process[p];
503 local_col_start += local_columns_per_process[p];
506 local_row_end = local_row_start + local_rows_per_process[this_process];
508 #if DEAL_II_PETSC_VERSION_LT(2,3,3) 515 local_col_end = local_col_start + local_columns_per_process[this_process];
519 std::vector<PetscInt>
521 row_lengths_in_window (local_row_end - local_row_start),
522 row_lengths_out_of_window (local_row_end - local_row_start);
523 for (
size_type row = local_row_start; row<local_row_end; ++row)
524 for (
size_type c=0; c<sparsity_pattern.row_length(row); ++c)
526 const size_type column = sparsity_pattern.column_number(row,c);
528 if ((column >= local_col_start) &&
529 (column < local_col_end))
530 ++row_lengths_in_window[row-local_row_start];
532 ++row_lengths_out_of_window[row-local_row_start];
543 const PetscErrorCode ierr = MatCreateMPIAIJ(
communicator,
544 local_rows_per_process[this_process],
545 local_columns_per_process[this_process],
546 sparsity_pattern.n_rows(),
547 sparsity_pattern.n_cols(),
548 0, &row_lengths_in_window[0],
549 0, &row_lengths_out_of_window[0],
553 #else //PETSC_VERSION>=2.3.3 560 ierr = MatSetSizes(
matrix,
561 local_rows_per_process[this_process],
562 local_columns_per_process[this_process],
563 sparsity_pattern.n_rows(),
564 sparsity_pattern.n_cols());
567 ierr = MatSetType(
matrix,MATMPIAIJ);
585 if (preset_nonzero_locations ==
true)
598 std::vector<PetscInt>
600 rowstart_in_window (local_row_end - local_row_start + 1, 0),
604 for (
size_type i=local_row_start; i<local_row_end; ++i)
607 rowstart_in_window[i+1-local_row_start]
608 = rowstart_in_window[i-local_row_start] +
row_length;
611 colnums_in_window.resize (n_cols+1, -1);
617 PetscInt *ptr = & colnums_in_window[0];
618 for (
size_type i=local_row_start; i<local_row_end; ++i)
619 for (
typename SparsityPatternType::iterator p=sparsity_pattern.begin(i);
620 p != sparsity_pattern.end(i); ++p, ++ptr)
628 ierr = MatMPIAIJSetPreallocationCSR (
matrix,
629 &rowstart_in_window[0],
630 &colnums_in_window[0],
634 #if DEAL_II_PETSC_VERSION_LT(2,3,3) 655 const std::vector<PetscScalar>
656 values (sparsity_pattern.max_entries_per_row(),
659 for (
size_type i=local_row_start; i<local_row_end; ++i)
661 PetscInt petsc_i = i;
662 ierr = MatSetValues (
matrix, 1, &petsc_i,
663 sparsity_pattern.row_length(i),
664 &colnums_in_window[rowstart_in_window[i-local_row_start]],
665 &values[0], INSERT_VALUES);
675 #endif // version <=2.3.3 687 const std::vector<size_type> &,
688 const std::vector<size_type> &,
694 const std::vector<size_type> &,
695 const std::vector<size_type> &,
702 const std::vector<size_type> &,
703 const std::vector<size_type> &,
709 const std::vector<size_type> &,
710 const std::vector<size_type> &,
730 const std::vector<size_type> &,
731 const std::vector<size_type> &,
736 const std::vector<size_type> &,
737 const std::vector<size_type> &,
776 #if DEAL_II_PETSC_VERSION_LT(3,3,0) 780 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
783 ierr = MatGetSize (
matrix, &n_rows, &n_cols);
786 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
789 ierr = MatGetOwnershipRangeColumn(
matrix, &min, &max);
792 Assert(n_loc_cols==max-min,
ExcMessage(
"PETSc is requiring non contiguous memory allocation."));
805 #if DEAL_II_PETSC_VERSION_LT(3,3,0) 809 PetscInt n_rows, n_cols, n_loc_rows, n_loc_cols, min, max;
812 ierr = MatGetSize (
matrix, &n_rows, &n_cols);
815 ierr = MatGetLocalSize(
matrix, &n_loc_rows, &n_loc_cols);
818 ierr = MatGetOwnershipRange(
matrix, &min, &max);
821 Assert(n_loc_rows==max-min,
ExcMessage(
"PETSc is requiring non contiguous memory allocation."));
835 DEAL_II_NAMESPACE_CLOSE
837 #endif // DEAL_II_WITH_PETSC static ::ExceptionBase & ExcLocalRowsTooLarge(int arg1, int arg2)
PetscErrorCode destroy_matrix(Mat &matrix)
void copy_from(const SparseMatrix &other)
size_type nth_index_in_set(const unsigned int local_index) const
IndexSet locally_owned_domain_indices() const
void vmult(VectorBase &dst, const VectorBase &src) const
void do_reinit(const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
size_type row_length(const size_type row) const
#define AssertThrow(cond, exc)
static ::ExceptionBase & ExcIndexRange(int arg1, int arg2, int arg3)
std::string to_string(const number value, const unsigned int digits=numbers::invalid_unsigned_int)
bool is_ascending_and_one_to_one(const MPI_Comm &communicator) const
void compress(const VectorOperation::values operation)
static ::ExceptionBase & ExcMessage(std::string arg1)
SparseMatrix & operator=(const value_type d)
PetscScalar matrix_scalar_product(const Vector &u, const Vector &v) const
T sum(const T &t, const MPI_Comm &mpi_communicator)
unsigned int global_dof_index
#define Assert(cond, exc)
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
MatrixBase & operator=(const value_type d)
types::global_dof_index size_type
void set_keep_zero_rows(Mat &matrix)
void reinit(const MPI_Comm &communicator, const size_type m, const size_type n, const size_type local_rows, const size_type local_columns, const size_type n_nonzero_per_row, const bool is_symmetric=false, const size_type n_offdiag_nonzero_per_row=0)
IndexSet locally_owned_range_indices() const
void add_range(const size_type begin, const size_type end)
PetscBooleanType is_symmetric(const double tolerance=1.e-12)
bool is_contiguous() const
PetscScalar matrix_norm_square(const Vector &v) const
void set_matrix_option(Mat &matrix, const MatOption option_name, const PetscBooleanType option_value=PETSC_FALSE)
static ::ExceptionBase & ExcNotImplemented()
size_type n_elements() const
void assert_is_compressed()
static ::ExceptionBase & ExcInternalError()
void close_matrix(Mat &matrix)