Reference documentation for deal.II version 9.6.0
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_block_vector.h
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2024 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
15#ifndef dealii_petsc_block_vector_h
16#define dealii_petsc_block_vector_h
17
18
19#include <deal.II/base/config.h>
20
21#ifdef DEAL_II_WITH_PETSC
22
28
30
31
32namespace PETScWrappers
33{
34 // forward declaration
35 class BlockVector;
36
37 namespace MPI
38 {
60 class BlockVector : public BlockVectorBase<Vector>
61 {
62 public:
67
72
84
89
96 explicit BlockVector(const unsigned int n_blocks,
97 const MPI_Comm communicator,
98 const size_type block_size,
100
105 BlockVector(const BlockVector &V);
106
114 BlockVector(const std::vector<size_type> &block_sizes,
115 const MPI_Comm communicator,
116 const std::vector<size_type> &local_elements);
117
122 explicit BlockVector(const std::vector<IndexSet> &parallel_partitioning,
123 const MPI_Comm communicator = MPI_COMM_WORLD);
124
128 BlockVector(const std::vector<IndexSet> &parallel_partitioning,
129 const std::vector<IndexSet> &ghost_indices,
130 const MPI_Comm communicator);
131
138 explicit BlockVector(Vec v);
139
143 template <size_t num_blocks>
144 explicit BlockVector(const std::array<Vec, num_blocks> &);
145
149 ~BlockVector() override;
150
156 operator=(const value_type s);
157
169 operator=(const BlockVector &V);
170
176 void
177 reinit(Vec v);
178
188 void
189 reinit(const unsigned int n_blocks,
190 const MPI_Comm communicator,
191 const size_type block_size,
193 const bool omit_zeroing_entries = false);
194
215 void
216 reinit(const std::vector<size_type> &block_sizes,
217 const MPI_Comm communicator,
218 const std::vector<size_type> &locally_owned_sizes,
219 const bool omit_zeroing_entries = false);
220
235 void
236 reinit(const BlockVector &V, const bool omit_zeroing_entries = false);
237
242 void
243 reinit(const std::vector<IndexSet> &parallel_partitioning,
244 const MPI_Comm communicator);
245
249 void
250 reinit(const std::vector<IndexSet> &parallel_partitioning,
251 const std::vector<IndexSet> &ghost_entries,
252 const MPI_Comm communicator);
253
261 void
262 reinit(
263 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
264 &partitioners,
265 const bool make_ghosted = true);
266
273 void
275
284 void
286
293 void
294 reinit(const unsigned int num_blocks);
295
299 bool
300 has_ghost_elements() const;
301
306 get_mpi_communicator() const;
307
315 operator const Vec &() const;
316
322 Vec &
323 petsc_vector();
324
342 void
343 swap(BlockVector &v);
344
348 void
349 print(std::ostream &out,
350 const unsigned int precision = 3,
351 const bool scientific = true,
352 const bool across = true) const;
353
362
363 private:
371
375 void
377 };
378
381 /*--------------------- Inline functions --------------------------------*/
382
385 , petsc_nest_vector(nullptr)
386 {}
387
388
389
390 inline BlockVector::BlockVector(const unsigned int n_blocks,
391 const MPI_Comm communicator,
392 const size_type block_size,
393 const size_type locally_owned_size)
394 : BlockVector()
395 {
396 reinit(n_blocks, communicator, block_size, locally_owned_size);
397 }
398
399
400
402 const std::vector<size_type> &block_sizes,
403 const MPI_Comm communicator,
404 const std::vector<size_type> &local_elements)
405 : BlockVector()
406 {
407 reinit(block_sizes, communicator, local_elements, false);
408 }
409
410
411
413 : BlockVector()
414 {
416
417 this->components.resize(this->n_blocks());
418 for (unsigned int i = 0; i < this->n_blocks(); ++i)
419 this->components[i] = v.components[i];
420
421 this->collect_sizes();
422 }
423
424
425
427 const std::vector<IndexSet> &parallel_partitioning,
428 const MPI_Comm communicator)
429 : BlockVector()
430 {
431 reinit(parallel_partitioning, communicator);
432 }
433
434
435
437 const std::vector<IndexSet> &parallel_partitioning,
438 const std::vector<IndexSet> &ghost_indices,
439 const MPI_Comm communicator)
440 : BlockVector()
441 {
442 reinit(parallel_partitioning, ghost_indices, communicator);
443 }
444
445
446
448 : BlockVector()
449 {
450 this->reinit(v);
451 }
452
453
454
455 template <size_t num_blocks>
456 inline BlockVector::BlockVector(const std::array<Vec, num_blocks> &arrayV)
457 : BlockVector()
458 {
459 this->block_indices.reinit(num_blocks, 0);
460
461 this->components.resize(num_blocks);
462 for (auto i = 0; i < num_blocks; ++i)
463 this->components[i].reinit(arrayV[i]);
464 this->collect_sizes();
465 }
466
467
468
469 inline BlockVector &
471 {
473 return *this;
474 }
475
476
477
478 inline BlockVector &
480 {
481 // we only allow assignment to vectors with the same number of blocks
482 // or to an empty BlockVector
483 Assert(this->n_blocks() == 0 || this->n_blocks() == v.n_blocks(),
485
486 if (this->n_blocks() != v.n_blocks())
488
489 this->components.resize(this->n_blocks());
490 for (unsigned int i = 0; i < this->n_blocks(); ++i)
491 this->components[i] = v.components[i];
492
493 this->collect_sizes();
494
495 return *this;
496 }
497
498
499
500 inline void
501 BlockVector::reinit(const unsigned int n_blocks,
502 const MPI_Comm communicator,
503 const size_type block_size,
504 const size_type locally_owned_size,
505 const bool omit_zeroing_entries)
506 {
507 reinit(std::vector<size_type>(n_blocks, block_size),
508 communicator,
509 std::vector<size_type>(n_blocks, locally_owned_size),
510 omit_zeroing_entries);
511 }
512
513
514
515 inline void
516 BlockVector::reinit(const std::vector<size_type> &block_sizes,
517 const MPI_Comm communicator,
518 const std::vector<size_type> &locally_owned_sizes,
519 const bool omit_zeroing_entries)
520 {
521 this->block_indices.reinit(block_sizes);
522
523 this->components.resize(this->n_blocks());
524 for (unsigned int i = 0; i < this->n_blocks(); ++i)
525 this->components[i].reinit(communicator,
526 block_sizes[i],
527 locally_owned_sizes[i],
528 omit_zeroing_entries);
529
530 this->collect_sizes();
531 }
532
533
534 inline void
535 BlockVector::reinit(const BlockVector &v, const bool omit_zeroing_entries)
536 {
537 if (this->n_blocks() != v.n_blocks())
539
540 this->components.resize(this->n_blocks());
541 for (unsigned int i = 0; i < this->n_blocks(); ++i)
542 this->components[i].reinit(v.components[i], omit_zeroing_entries);
543
544 this->collect_sizes();
545 }
546
547
548
549 inline void
550 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
551 const MPI_Comm communicator)
552 {
553 // update the number of blocks
554 this->block_indices.reinit(parallel_partitioning.size(), 0);
555
556 // initialize each block
557 this->components.resize(this->n_blocks());
558 for (unsigned int i = 0; i < this->n_blocks(); ++i)
559 this->components[i].reinit(parallel_partitioning[i], communicator);
560
561 // update block_indices content
562 this->collect_sizes();
563 }
564
565
566
567 inline void
568 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
569 const std::vector<IndexSet> &ghost_entries,
570 const MPI_Comm communicator)
571 {
572 AssertDimension(parallel_partitioning.size(), ghost_entries.size());
573
574 // update the number of blocks
575 this->block_indices.reinit(parallel_partitioning.size(), 0);
576
577 // initialize each block
578 this->components.resize(this->n_blocks());
579 for (unsigned int i = 0; i < this->n_blocks(); ++i)
580 this->components[i].reinit(parallel_partitioning[i],
581 ghost_entries[i],
582 communicator);
583
584 // update block_indices content
585 this->collect_sizes();
586 }
587
588
589
590 inline void
592 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
593 &partitioners,
594 const bool make_ghosted)
595 {
596 // update the number of blocks
597 this->block_indices.reinit(partitioners.size(), 0);
598
599 // initialize each block
600 this->components.resize(this->n_blocks());
601 for (unsigned int i = 0; i < this->n_blocks(); ++i)
602 this->components[i].reinit(partitioners[i], make_ghosted);
603
604 // update block_indices content
605 this->collect_sizes();
606 }
607
608
609
610 inline MPI_Comm
612 {
613 return PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_vector));
614 }
615
616
617
618 inline bool
620 {
621 bool ghosted = block(0).has_ghost_elements();
622# ifdef DEBUG
623 for (unsigned int i = 0; i < this->n_blocks(); ++i)
625# endif
626 return ghosted;
627 }
628
629
630
631 inline void
633 {
634 std::swap(this->components, v.components);
635 std::swap(this->petsc_nest_vector, v.petsc_nest_vector);
636
638 }
639
640
641
642 inline void
643 BlockVector::print(std::ostream &out,
644 const unsigned int precision,
645 const bool scientific,
646 const bool across) const
647 {
648 for (unsigned int i = 0; i < this->n_blocks(); ++i)
649 {
650 if (across)
651 out << 'C' << i << ':';
652 else
653 out << "Component " << i << std::endl;
654 this->components[i].print(out, precision, scientific, across);
655 }
656 }
657
658
659
667 inline void
669 {
670 u.swap(v);
671 }
672 } // namespace MPI
673
674} // namespace PETScWrappers
675
676namespace internal
677{
678 namespace LinearOperatorImplementation
679 {
680 template <typename>
681 class ReinitHelper;
682
687 template <>
688 class ReinitHelper<PETScWrappers::MPI::BlockVector>
689 {
690 public:
691 template <typename Matrix>
692 static void
693 reinit_range_vector(const Matrix &matrix,
695 bool /*omit_zeroing_entries*/)
696 {
697 v.reinit(matrix.locally_owned_range_indices(),
698 matrix.get_mpi_communicator());
699 }
700
701 template <typename Matrix>
702 static void
703 reinit_domain_vector(const Matrix &matrix,
705 bool /*omit_zeroing_entries*/)
706 {
707 v.reinit(matrix.locally_owned_domain_indices(),
708 matrix.get_mpi_communicator());
709 }
710 };
711
712 } // namespace LinearOperatorImplementation
713} /* namespace internal */
714
715
719template <>
720struct is_serial_vector<PETScWrappers::MPI::BlockVector> : std::false_type
721{};
722
723
725
726#endif // DEAL_II_WITH_PETSC
727
728#endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
::internal::BlockVectorIterators::Iterator< BlockVectorBase, false > iterator
unsigned int n_blocks() const
::internal::BlockVectorIterators::Iterator< BlockVectorBase, true > const_iterator
const value_type * const_pointer
typename BlockType::const_reference const_reference
types::global_dof_index size_type
typename BlockType::value_type value_type
BlockVectorBase & operator=(const value_type s)
std::vector< Vector > components
typename BlockType::reference reference
BlockType & block(const unsigned int i)
std::size_t locally_owned_size() const
const BlockIndices & get_block_indices() const
void print(std::ostream &out, const unsigned int precision=3, const bool scientific=true, const bool across=true) const
BaseClass::const_reference const_reference
void swap(BlockVector &u, BlockVector &v)
BlockVector & operator=(const value_type s)
void compress(VectorOperation::values operation)
BaseClass::const_pointer const_pointer
bool has_ghost_elements() const
static void reinit_range_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
static void reinit_domain_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:503
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:504
#define DeclException0(Exception0)
Definition exceptions.h:471
static ::ExceptionBase & ExcNonMatchingBlockVectors()
#define Assert(cond, exc)
#define AssertDimension(dim1, dim2)
static ::ExceptionBase & ExcInternalError()
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
static ::ExceptionBase & ExcIteratorRangeDoesNotMatchVectorSize()