Reference documentation for deal.II version 9.6.0
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_parallel_block_sparse_matrix.cc
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2023 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
17
18#ifdef DEAL_II_WITH_PETSC
19
20namespace
21{
22 // A dummy utility routine to create an empty matrix in case we import
23 // a MATNEST with NULL blocks
24 static Mat
25 create_dummy_mat(MPI_Comm comm,
26 PetscInt lr,
27 PetscInt gr,
28 PetscInt lc,
29 PetscInt gc)
30 {
31 Mat dummy;
32 PetscErrorCode ierr;
33
34 ierr = MatCreate(comm, &dummy);
35 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
36 ierr = MatSetSizes(dummy, lr, lc, gr, gc);
37 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
38 ierr = MatSetType(dummy, MATAIJ);
39 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
40 ierr = MatSeqAIJSetPreallocation(dummy, 0, nullptr);
41 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
42 ierr = MatMPIAIJSetPreallocation(dummy, 0, nullptr, 0, nullptr);
43 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
44 ierr = MatSetUp(dummy);
45 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
46 ierr = MatSetOption(dummy, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE);
47 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
48 ierr = MatAssemblyBegin(dummy, MAT_FINAL_ASSEMBLY);
49 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
50 ierr = MatAssemblyEnd(dummy, MAT_FINAL_ASSEMBLY);
51 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
52 return dummy;
53 }
54} // namespace
55
57
58namespace PETScWrappers
59{
60 namespace MPI
61 {
64 {
66
67 return *this;
68 }
69
70
71
73 {
74 PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix);
75 (void)ierr;
76 AssertNothrow(ierr == 0, ExcPETScError(ierr));
77 }
78
79
80
81# ifndef DOXYGEN
82 void
83 BlockSparseMatrix::reinit(const size_type n_block_rows,
84 const size_type n_block_columns)
85 {
86 // first delete previous content of
87 // the subobjects array
88 clear();
89
90 // then resize. set sizes of blocks to
91 // zero. user will later have to call
92 // collect_sizes for this
93 this->sub_objects.reinit(n_block_rows, n_block_columns);
94 this->row_block_indices.reinit(n_block_rows, 0);
95 this->column_block_indices.reinit(n_block_columns, 0);
96
97 // and reinitialize the blocks
98 for (size_type r = 0; r < this->n_block_rows(); ++r)
99 for (size_type c = 0; c < this->n_block_cols(); ++c)
100 {
101 BlockType *p = new BlockType();
102 this->sub_objects[r][c] = p;
103 }
104 }
105# endif
106
107
108
109 void
110 BlockSparseMatrix::reinit(const std::vector<IndexSet> &rows,
111 const std::vector<IndexSet> &cols,
112 const BlockDynamicSparsityPattern &bdsp,
113 const MPI_Comm com)
114 {
115 Assert(rows.size() == bdsp.n_block_rows(), ExcMessage("invalid size"));
116 Assert(cols.size() == bdsp.n_block_cols(), ExcMessage("invalid size"));
117
118
119 clear();
120 this->sub_objects.reinit(bdsp.n_block_rows(), bdsp.n_block_cols());
121
122 std::vector<types::global_dof_index> row_sizes;
123 for (unsigned int r = 0; r < bdsp.n_block_rows(); ++r)
124 row_sizes.push_back(bdsp.block(r, 0).n_rows());
125 this->row_block_indices.reinit(row_sizes);
126
127 std::vector<types::global_dof_index> col_sizes;
128 for (unsigned int c = 0; c < bdsp.n_block_cols(); ++c)
129 col_sizes.push_back(bdsp.block(0, c).n_cols());
130 this->column_block_indices.reinit(col_sizes);
131
132 for (unsigned int r = 0; r < this->n_block_rows(); ++r)
133 for (unsigned int c = 0; c < this->n_block_cols(); ++c)
134 {
135 Assert(rows[r].size() == bdsp.block(r, c).n_rows(),
136 ExcMessage("invalid size"));
137 Assert(cols[c].size() == bdsp.block(r, c).n_cols(),
138 ExcMessage("invalid size"));
139
140 BlockType *p = new BlockType();
141 p->reinit(rows[r], cols[c], bdsp.block(r, c), com);
142 this->sub_objects[r][c] = p;
143 }
144
145 this->collect_sizes();
146 }
147
148 void
149 BlockSparseMatrix::reinit(const std::vector<IndexSet> &sizes,
150 const BlockDynamicSparsityPattern &bdsp,
151 const MPI_Comm com)
152 {
153 reinit(sizes, sizes, bdsp, com);
154 }
155
156
157
158 void
160 {
161 auto m = this->n_block_rows();
162 auto n = this->n_block_cols();
163 PetscErrorCode ierr;
164
165 // Create empty matrices if needed
166 // This is needed by the base class
167 // not by MATNEST
168 std::vector<size_type> row_sizes(m, size_type(-1));
169 std::vector<size_type> col_sizes(n, size_type(-1));
170 std::vector<size_type> row_local_sizes(m, size_type(-1));
171 std::vector<size_type> col_local_sizes(n, size_type(-1));
172 MPI_Comm comm = MPI_COMM_NULL;
173 for (size_type r = 0; r < m; r++)
174 {
175 for (size_type c = 0; c < n; c++)
176 {
177 if (this->sub_objects[r][c])
178 {
179 comm = this->sub_objects[r][c]->get_mpi_communicator();
180 row_sizes[r] = this->sub_objects[r][c]->m();
181 col_sizes[c] = this->sub_objects[r][c]->n();
182 row_local_sizes[r] = this->sub_objects[r][c]->local_size();
183 col_local_sizes[c] =
184 this->sub_objects[r][c]->local_domain_size();
185 }
186 }
187 }
188 for (size_type r = 0; r < m; r++)
189 {
190 for (size_type c = 0; c < n; c++)
191 {
192 if (!this->sub_objects[r][c])
193 {
194 Assert(
195 row_sizes[r] != size_type(-1),
197 "When passing empty sub-blocks of a block matrix, you need to make "
198 "sure that at least one block in each block row and block column is "
199 "non-empty. However, block row " +
200 std::to_string(r) +
201 " is completely empty "
202 "and so it is not possible to determine how many rows it should have."));
203 Assert(
204 col_sizes[c] != size_type(-1),
206 "When passing empty sub-blocks of a block matrix, you need to make "
207 "sure that at least one block in each block row and block column is "
208 "non-empty. However, block column " +
209 std::to_string(c) +
210 " is completely empty "
211 "and so it is not possible to determine how many columns it should have."));
212 Mat dummy = ::create_dummy_mat(
213 comm,
214 static_cast<PetscInt>(row_local_sizes[r]),
215 static_cast<PetscInt>(row_sizes[r]),
216 static_cast<PetscInt>(col_local_sizes[c]),
217 static_cast<PetscInt>(col_sizes[c]));
218 this->sub_objects[r][c] = new BlockType(dummy);
219
220 // the new object got a reference on dummy, we can safely
221 // call destroy here
222 ierr = MatDestroy(&dummy);
223 AssertThrow(ierr == 0, ExcPETScError(ierr));
224 }
225 }
226 }
227 }
228
229
230 void
237
238 void
240 {
241 auto m = this->n_block_rows();
242 auto n = this->n_block_cols();
243 PetscErrorCode ierr;
244
245 MPI_Comm comm = PETSC_COMM_SELF;
246
247 ierr = MatDestroy(&petsc_nest_matrix);
248 AssertThrow(ierr == 0, ExcPETScError(ierr));
249 std::vector<Mat> psub_objects(m * n);
250 for (unsigned int r = 0; r < m; r++)
251 for (unsigned int c = 0; c < n; c++)
252 {
253 comm = this->sub_objects[r][c]->get_mpi_communicator();
254 psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
255 }
256 ierr = MatCreateNest(
257 comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix);
258 AssertThrow(ierr == 0, ExcPETScError(ierr));
259
260 ierr = MatNestSetVecType(petsc_nest_matrix, VECNEST);
261 AssertThrow(ierr == 0, ExcPETScError(ierr));
262 }
263
264
265
266 void
272
273
274
275 std::vector<IndexSet>
277 {
278 std::vector<IndexSet> index_sets;
279
280 for (unsigned int i = 0; i < this->n_block_cols(); ++i)
281 index_sets.push_back(this->block(0, i).locally_owned_domain_indices());
282
283 return index_sets;
284 }
285
286
287
288 std::vector<IndexSet>
290 {
291 std::vector<IndexSet> index_sets;
292
293 for (unsigned int i = 0; i < this->n_block_rows(); ++i)
294 index_sets.push_back(this->block(i, 0).locally_owned_range_indices());
295
296 return index_sets;
297 }
298
299
300
301 std::uint64_t
303 {
304 std::uint64_t n_nonzero = 0;
305 for (size_type rows = 0; rows < this->n_block_rows(); ++rows)
306 for (size_type cols = 0; cols < this->n_block_cols(); ++cols)
307 n_nonzero += this->block(rows, cols).n_nonzero_elements();
308
309 return n_nonzero;
310 }
311
312
313
316 {
317 return PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_matrix));
318 }
319
320 BlockSparseMatrix::operator const Mat &() const
321 {
322 return petsc_nest_matrix;
323 }
324
325
326
327 Mat &
332
333 void
335 {
336 clear();
337
338 PetscBool isnest;
339 PetscInt nr = 1, nc = 1;
340
341 PetscErrorCode ierr =
342 PetscObjectTypeCompare(reinterpret_cast<PetscObject>(A),
343 MATNEST,
344 &isnest);
345 AssertThrow(ierr == 0, ExcPETScError(ierr));
346 std::vector<Mat> mats;
347 bool need_empty_matrices = false;
348 if (isnest)
349 {
350 ierr = MatNestGetSize(A, &nr, &nc);
351 AssertThrow(ierr == 0, ExcPETScError(ierr));
352 for (PetscInt i = 0; i < nr; ++i)
353 {
354 for (PetscInt j = 0; j < nc; ++j)
355 {
356 Mat sA;
357 ierr = MatNestGetSubMat(A, i, j, &sA);
358 mats.push_back(sA);
359 if (!sA)
360 need_empty_matrices = true;
361 }
362 }
363 }
364 else
365 {
366 mats.push_back(A);
367 }
368
369 std::vector<size_type> r_block_sizes(nr, 0);
370 std::vector<size_type> c_block_sizes(nc, 0);
371 this->row_block_indices.reinit(r_block_sizes);
372 this->column_block_indices.reinit(c_block_sizes);
373 this->sub_objects.reinit(nr, nc);
374 for (PetscInt i = 0; i < nr; ++i)
375 {
376 for (PetscInt j = 0; j < nc; ++j)
377 {
378 if (mats[i * nc + j])
379 this->sub_objects[i][j] = new BlockType(mats[i * nc + j]);
380 else
381 this->sub_objects[i][j] = nullptr;
382 }
383 }
384 if (need_empty_matrices)
386
388 if (need_empty_matrices || !isnest)
389 {
391 }
392 else
393 {
394 ierr = PetscObjectReference(reinterpret_cast<PetscObject>(A));
395 AssertThrow(ierr == 0, ExcPETScError(ierr));
396 PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix);
397 AssertThrow(ierr == 0, ExcPETScError(ierr));
399 }
400 }
401
402 } // namespace MPI
403} // namespace PETScWrappers
404
405
406
408
409#endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
unsigned int n_block_rows() const
void compress(VectorOperation::values operation)
Table< 2, SmartPointer< BlockType, BlockMatrixBase< SparseMatrix > > > sub_objects
unsigned int n_block_cols() const
BlockType & block(const unsigned int row, const unsigned int column)
SparsityPatternType & block(const size_type row, const size_type column)
void reinit(const size_type n_block_rows, const size_type n_block_columns)
BlockSparseMatrix & operator=(const BlockSparseMatrix &)
void compress(VectorOperation::values operation)
std::size_t n_nonzero_elements() const
virtual void reinit(const SparsityPattern &sparsity)
size_type n_rows() const
size_type n_cols() const
Subscriptor & operator=(const Subscriptor &)
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:503
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:504
#define Assert(cond, exc)
#define AssertNothrow(cond, exc)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
void petsc_increment_state_counter(Vec v)
*braid_SplitCommworld & comm