Loading [MathJax]/extensions/TeX/newcommand.js
 deal.II version GIT relicensing-3067-g24f3489fdc 2025-04-14 21:10:00+00:00
\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}} \newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=} \newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]} \newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages Concepts
petsc_block_vector.h
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2024 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
15#ifndef dealii_petsc_block_vector_h
16#define dealii_petsc_block_vector_h
17
18
19#include <deal.II/base/config.h>
20
21#ifdef DEAL_II_WITH_PETSC
22
28
29# include <cstddef>
30
32
33
34namespace PETScWrappers
35{
36 // forward declaration
37 class BlockVector;
38
39 namespace MPI
40 {
62 class BlockVector : public BlockVectorBase<Vector>
63 {
64 public:
69
74
86
91
98 explicit BlockVector(const unsigned int n_blocks,
99 const MPI_Comm communicator,
100 const size_type block_size,
102
107 BlockVector(const BlockVector &V);
108
116 BlockVector(const std::vector<size_type> &block_sizes,
117 const MPI_Comm communicator,
118 const std::vector<size_type> &local_elements);
119
124 explicit BlockVector(const std::vector<IndexSet> &parallel_partitioning,
125 const MPI_Comm communicator = MPI_COMM_WORLD);
126
130 BlockVector(const std::vector<IndexSet> &parallel_partitioning,
131 const std::vector<IndexSet> &ghost_indices,
132 const MPI_Comm communicator);
133
140 explicit BlockVector(Vec v);
141
145 template <std::size_t num_blocks>
146 explicit BlockVector(const std::array<Vec, num_blocks> &);
147
151 ~BlockVector() override;
152
158 operator=(const value_type s);
159
171 operator=(const BlockVector &V);
172
178 void
179 reinit(Vec v);
180
190 void
191 reinit(const unsigned int n_blocks,
192 const MPI_Comm communicator,
193 const size_type block_size,
195 const bool omit_zeroing_entries = false);
196
217 void
218 reinit(const std::vector<size_type> &block_sizes,
219 const MPI_Comm communicator,
220 const std::vector<size_type> &locally_owned_sizes,
221 const bool omit_zeroing_entries = false);
222
237 void
238 reinit(const BlockVector &V, const bool omit_zeroing_entries = false);
239
244 void
245 reinit(const std::vector<IndexSet> &parallel_partitioning,
246 const MPI_Comm communicator);
247
251 void
252 reinit(const std::vector<IndexSet> &parallel_partitioning,
253 const std::vector<IndexSet> &ghost_entries,
254 const MPI_Comm communicator);
255
263 void
264 reinit(
265 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
266 &partitioners,
267 const bool make_ghosted = true);
268
275 void
277
286 void
288
295 void
296 reinit(const unsigned int num_blocks);
297
301 bool
302 has_ghost_elements() const;
303
308 get_mpi_communicator() const;
309
317 operator const Vec &() const;
318
324 Vec &
325 petsc_vector();
326
344 void
345 swap(BlockVector &v) noexcept;
346
350 void
351 print(std::ostream &out,
352 const unsigned int precision = 3,
353 const bool scientific = true,
354 const bool across = true) const;
355
364
365 private:
373
377 void
379 };
380
383 /*--------------------- Inline functions --------------------------------*/
384
387 , petsc_nest_vector(nullptr)
388 {}
389
390
391
392 inline BlockVector::BlockVector(const unsigned int n_blocks,
393 const MPI_Comm communicator,
394 const size_type block_size,
396 : BlockVector()
397 {
398 reinit(n_blocks, communicator, block_size, locally_owned_size);
399 }
400
401
402
404 const std::vector<size_type> &block_sizes,
405 const MPI_Comm communicator,
406 const std::vector<size_type> &local_elements)
407 : BlockVector()
408 {
409 reinit(block_sizes, communicator, local_elements, false);
410 }
411
412
413
415 : BlockVector()
416 {
418
419 this->components.resize(this->n_blocks());
420 for (unsigned int i = 0; i < this->n_blocks(); ++i)
421 this->components[i] = v.components[i];
422
423 this->collect_sizes();
424 }
425
426
427
429 const std::vector<IndexSet> &parallel_partitioning,
430 const MPI_Comm communicator)
431 : BlockVector()
432 {
433 reinit(parallel_partitioning, communicator);
434 }
435
436
437
439 const std::vector<IndexSet> &parallel_partitioning,
440 const std::vector<IndexSet> &ghost_indices,
441 const MPI_Comm communicator)
442 : BlockVector()
443 {
444 reinit(parallel_partitioning, ghost_indices, communicator);
445 }
446
447
448
450 : BlockVector()
451 {
452 this->reinit(v);
453 }
454
455
456
457 template <std::size_t num_blocks>
458 inline BlockVector::BlockVector(const std::array<Vec, num_blocks> &arrayV)
459 : BlockVector()
460 {
461 this->block_indices.reinit(num_blocks, 0);
462
463 this->components.resize(num_blocks);
464 for (auto i = 0; i < num_blocks; ++i)
465 this->components[i].reinit(arrayV[i]);
466 this->collect_sizes();
467 }
468
469
470
471 inline BlockVector &
473 {
475 return *this;
476 }
477
478
479
480 inline BlockVector &
482 {
483 // we only allow assignment to vectors with the same number of blocks
484 // or to an empty BlockVector
485 Assert(this->n_blocks() == 0 || this->n_blocks() == v.n_blocks(),
487
488 if (this->n_blocks() != v.n_blocks())
490
491 this->components.resize(this->n_blocks());
492 for (unsigned int i = 0; i < this->n_blocks(); ++i)
493 this->components[i] = v.components[i];
494
495 this->collect_sizes();
496
497 return *this;
498 }
499
500
501
502 inline void
503 BlockVector::reinit(const unsigned int n_blocks,
504 const MPI_Comm communicator,
505 const size_type block_size,
507 const bool omit_zeroing_entries)
508 {
509 reinit(std::vector<size_type>(n_blocks, block_size),
510 communicator,
511 std::vector<size_type>(n_blocks, locally_owned_size),
512 omit_zeroing_entries);
513 }
514
515
516
517 inline void
518 BlockVector::reinit(const std::vector<size_type> &block_sizes,
519 const MPI_Comm communicator,
520 const std::vector<size_type> &locally_owned_sizes,
521 const bool omit_zeroing_entries)
522 {
523 this->block_indices.reinit(block_sizes);
524
525 this->components.resize(this->n_blocks());
526 for (unsigned int i = 0; i < this->n_blocks(); ++i)
527 this->components[i].reinit(communicator,
528 block_sizes[i],
529 locally_owned_sizes[i],
530 omit_zeroing_entries);
531
532 this->collect_sizes();
533 }
534
535
536 inline void
537 BlockVector::reinit(const BlockVector &v, const bool omit_zeroing_entries)
538 {
539 if (this->n_blocks() != v.n_blocks())
541
542 this->components.resize(this->n_blocks());
543 for (unsigned int i = 0; i < this->n_blocks(); ++i)
544 this->components[i].reinit(v.components[i], omit_zeroing_entries);
545
546 this->collect_sizes();
547 }
548
549
550
551 inline void
552 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
553 const MPI_Comm communicator)
554 {
555 // update the number of blocks
556 this->block_indices.reinit(parallel_partitioning.size(), 0);
557
558 // initialize each block
559 this->components.resize(this->n_blocks());
560 for (unsigned int i = 0; i < this->n_blocks(); ++i)
561 this->components[i].reinit(parallel_partitioning[i], communicator);
562
563 // update block_indices content
564 this->collect_sizes();
565 }
566
567
568
569 inline void
570 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
571 const std::vector<IndexSet> &ghost_entries,
572 const MPI_Comm communicator)
573 {
574 AssertDimension(parallel_partitioning.size(), ghost_entries.size());
575
576 // update the number of blocks
577 this->block_indices.reinit(parallel_partitioning.size(), 0);
578
579 // initialize each block
580 this->components.resize(this->n_blocks());
581 for (unsigned int i = 0; i < this->n_blocks(); ++i)
582 this->components[i].reinit(parallel_partitioning[i],
583 ghost_entries[i],
584 communicator);
585
586 // update block_indices content
587 this->collect_sizes();
588 }
589
590
591
592 inline void
594 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
595 &partitioners,
596 const bool make_ghosted)
597 {
598 // update the number of blocks
599 this->block_indices.reinit(partitioners.size(), 0);
600
601 // initialize each block
602 this->components.resize(this->n_blocks());
603 for (unsigned int i = 0; i < this->n_blocks(); ++i)
604 this->components[i].reinit(partitioners[i], make_ghosted);
605
606 // update block_indices content
607 this->collect_sizes();
608 }
609
610
611
612 inline MPI_Comm
614 {
615 return PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_vector));
616 }
617
618
619
620 inline bool
622 {
623 bool ghosted = block(0).has_ghost_elements();
624 if constexpr (running_in_debug_mode())
625 {
626 for (unsigned int i = 0; i < this->n_blocks(); ++i)
627 Assert(block(i).has_ghost_elements() == ghosted,
629 }
630 return ghosted;
631 }
632
633
634
635 inline void
637 {
638 std::swap(this->components, v.components);
639 std::swap(this->petsc_nest_vector, v.petsc_nest_vector);
640
641 ::swap(this->block_indices, v.block_indices);
642 }
643
644
645
646 inline void
647 BlockVector::print(std::ostream &out,
648 const unsigned int precision,
649 const bool scientific,
650 const bool across) const
651 {
652 for (unsigned int i = 0; i < this->n_blocks(); ++i)
653 {
654 if (across)
655 out << 'C' << i << ':';
656 else
657 out << "Component " << i << std::endl;
658 this->components[i].print(out, precision, scientific, across);
659 }
660 }
661
662
663
671 inline void
672 swap(BlockVector &u, BlockVector &v) noexcept
673 {
674 u.swap(v);
675 }
676 } // namespace MPI
677
678} // namespace PETScWrappers
679
680namespace internal
681{
682 namespace LinearOperatorImplementation
683 {
684 template <typename>
685 class ReinitHelper;
686
691 template <>
692 class ReinitHelper<PETScWrappers::MPI::BlockVector>
693 {
694 public:
695 template <typename Matrix>
696 static void
697 reinit_range_vector(const Matrix &matrix,
699 bool /*omit_zeroing_entries*/)
700 {
701 v.reinit(matrix.locally_owned_range_indices(),
702 matrix.get_mpi_communicator());
703 }
704
705 template <typename Matrix>
706 static void
707 reinit_domain_vector(const Matrix &matrix,
709 bool /*omit_zeroing_entries*/)
710 {
711 v.reinit(matrix.locally_owned_domain_indices(),
712 matrix.get_mpi_communicator());
713 }
714 };
715
716 } // namespace LinearOperatorImplementation
717} /* namespace internal */
718
719
723template <>
724struct is_serial_vector<PETScWrappers::MPI::BlockVector> : std::false_type
725{};
726
727
729
730#endif // DEAL_II_WITH_PETSC
731
732#endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
::internal::BlockVectorIterators::Iterator< BlockVectorBase, false > iterator
unsigned int n_blocks() const
const value_type * const_pointer
typename BlockType::const_reference const_reference
types::global_dof_index size_type
typename BlockType::value_type value_type
BlockVectorBase & operator=(const value_type s)
std::vector< Vector > components
::internal::BlockVectorIterators::Iterator< BlockVectorBase, true > const_iterator
typename BlockType::reference reference
BlockType & block(const unsigned int i)
std::size_t locally_owned_size() const
const BlockIndices & get_block_indices() const
void print(std::ostream &out, const unsigned int precision=3, const bool scientific=true, const bool across=true) const
BaseClass::const_reference const_reference
void swap(BlockVector &u, BlockVector &v) noexcept
void swap(BlockVector &v) noexcept
BlockVector & operator=(const value_type s)
void compress(VectorOperation::values operation)
BaseClass::const_pointer const_pointer
bool has_ghost_elements() const
static void reinit_range_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
static void reinit_domain_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:35
constexpr bool running_in_debug_mode()
Definition config.h:73
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:36
#define DeclException0(Exception0)
static ::ExceptionBase & ExcNonMatchingBlockVectors()
#define Assert(cond, exc)
#define AssertDimension(dim1, dim2)
static ::ExceptionBase & ExcInternalError()
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
static ::ExceptionBase & ExcIteratorRangeDoesNotMatchVectorSize()
types::global_dof_index locally_owned_size
Definition mpi.cc:833
void swap(BlockVector &u, BlockVector &v) noexcept