Reference documentation for deal.II version GIT relicensing-362-gab68047079 2024-04-11 17:50:02+00:00
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_block_vector.h
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2023 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
15#ifndef dealii_petsc_block_vector_h
16#define dealii_petsc_block_vector_h
17
18
19#include <deal.II/base/config.h>
20
21#ifdef DEAL_II_WITH_PETSC
22
28
30
31
32namespace PETScWrappers
33{
34 // forward declaration
35 class BlockVector;
36
37 namespace MPI
38 {
60 class BlockVector : public BlockVectorBase<Vector>
61 {
62 public:
67
72
84
89
96 explicit BlockVector(const unsigned int n_blocks,
97 const MPI_Comm communicator,
98 const size_type block_size,
100
105 BlockVector(const BlockVector &V);
106
114 BlockVector(const std::vector<size_type> &block_sizes,
115 const MPI_Comm communicator,
116 const std::vector<size_type> &local_elements);
117
122 explicit BlockVector(const std::vector<IndexSet> &parallel_partitioning,
123 const MPI_Comm communicator = MPI_COMM_WORLD);
124
128 BlockVector(const std::vector<IndexSet> &parallel_partitioning,
129 const std::vector<IndexSet> &ghost_indices,
130 const MPI_Comm communicator);
131
138 explicit BlockVector(Vec v);
139
143 template <size_t num_blocks>
144 explicit BlockVector(const std::array<Vec, num_blocks> &);
145
149 ~BlockVector() override;
150
156 operator=(const value_type s);
157
162 operator=(const BlockVector &V);
163
169 void
170 reinit(Vec v);
171
181 void
182 reinit(const unsigned int n_blocks,
183 const MPI_Comm communicator,
184 const size_type block_size,
186 const bool omit_zeroing_entries = false);
187
208 void
209 reinit(const std::vector<size_type> &block_sizes,
210 const MPI_Comm communicator,
211 const std::vector<size_type> &locally_owned_sizes,
212 const bool omit_zeroing_entries = false);
213
228 void
229 reinit(const BlockVector &V, const bool omit_zeroing_entries = false);
230
235 void
236 reinit(const std::vector<IndexSet> &parallel_partitioning,
237 const MPI_Comm communicator);
238
242 void
243 reinit(const std::vector<IndexSet> &parallel_partitioning,
244 const std::vector<IndexSet> &ghost_entries,
245 const MPI_Comm communicator);
246
254 void
255 reinit(
256 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
257 &partitioners,
258 const bool make_ghosted = true);
259
266 void
268
277 void
279
286 void
287 reinit(const unsigned int num_blocks);
288
292 bool
293 has_ghost_elements() const;
294
299 get_mpi_communicator() const;
300
308 operator const Vec &() const;
309
315 Vec &
316 petsc_vector();
317
335 void
336 swap(BlockVector &v);
337
341 void
342 print(std::ostream &out,
343 const unsigned int precision = 3,
344 const bool scientific = true,
345 const bool across = true) const;
346
355
356 private:
364
368 void
370 };
371
374 /*--------------------- Inline functions --------------------------------*/
375
378 , petsc_nest_vector(nullptr)
379 {}
380
381
382
383 inline BlockVector::BlockVector(const unsigned int n_blocks,
384 const MPI_Comm communicator,
385 const size_type block_size,
386 const size_type locally_owned_size)
387 : BlockVector()
388 {
389 reinit(n_blocks, communicator, block_size, locally_owned_size);
390 }
391
392
393
395 const std::vector<size_type> &block_sizes,
396 const MPI_Comm communicator,
397 const std::vector<size_type> &local_elements)
398 : BlockVector()
399 {
400 reinit(block_sizes, communicator, local_elements, false);
401 }
402
403
404
406 : BlockVector()
407 {
409
410 this->components.resize(this->n_blocks());
411 for (unsigned int i = 0; i < this->n_blocks(); ++i)
412 this->components[i] = v.components[i];
413
414 this->collect_sizes();
415 }
416
417
418
420 const std::vector<IndexSet> &parallel_partitioning,
421 const MPI_Comm communicator)
422 : BlockVector()
423 {
424 reinit(parallel_partitioning, communicator);
425 }
426
427
428
430 const std::vector<IndexSet> &parallel_partitioning,
431 const std::vector<IndexSet> &ghost_indices,
432 const MPI_Comm communicator)
433 : BlockVector()
434 {
435 reinit(parallel_partitioning, ghost_indices, communicator);
436 }
437
438
439
441 : BlockVector()
442 {
443 this->reinit(v);
444 }
445
446
447
448 template <size_t num_blocks>
449 inline BlockVector::BlockVector(const std::array<Vec, num_blocks> &arrayV)
450 : BlockVector()
451 {
452 this->block_indices.reinit(num_blocks, 0);
453
454 this->components.resize(num_blocks);
455 for (auto i = 0; i < num_blocks; ++i)
456 this->components[i].reinit(arrayV[i]);
457 this->collect_sizes();
458 }
459
460
461
462 inline BlockVector &
464 {
466 return *this;
467 }
468
469
470
471 inline BlockVector &
473 {
474 // we only allow assignment to vectors with the same number of blocks
475 // or to an empty BlockVector
476 Assert(this->n_blocks() == 0 || this->n_blocks() == v.n_blocks(),
478
479 if (this->n_blocks() != v.n_blocks())
481
482 this->components.resize(this->n_blocks());
483 for (unsigned int i = 0; i < this->n_blocks(); ++i)
484 this->components[i] = v.components[i];
485
486 this->collect_sizes();
487
488 return *this;
489 }
490
491
492
493 inline void
494 BlockVector::reinit(const unsigned int n_blocks,
495 const MPI_Comm communicator,
496 const size_type block_size,
497 const size_type locally_owned_size,
498 const bool omit_zeroing_entries)
499 {
500 reinit(std::vector<size_type>(n_blocks, block_size),
501 communicator,
502 std::vector<size_type>(n_blocks, locally_owned_size),
503 omit_zeroing_entries);
504 }
505
506
507
508 inline void
509 BlockVector::reinit(const std::vector<size_type> &block_sizes,
510 const MPI_Comm communicator,
511 const std::vector<size_type> &locally_owned_sizes,
512 const bool omit_zeroing_entries)
513 {
514 this->block_indices.reinit(block_sizes);
515
516 this->components.resize(this->n_blocks());
517 for (unsigned int i = 0; i < this->n_blocks(); ++i)
518 this->components[i].reinit(communicator,
519 block_sizes[i],
520 locally_owned_sizes[i],
521 omit_zeroing_entries);
522
523 this->collect_sizes();
524 }
525
526
527 inline void
528 BlockVector::reinit(const BlockVector &v, const bool omit_zeroing_entries)
529 {
530 if (this->n_blocks() != v.n_blocks())
532
533 this->components.resize(this->n_blocks());
534 for (unsigned int i = 0; i < this->n_blocks(); ++i)
535 this->components[i].reinit(v.components[i], omit_zeroing_entries);
536
537 this->collect_sizes();
538 }
539
540
541
542 inline void
543 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
544 const MPI_Comm communicator)
545 {
546 // update the number of blocks
547 this->block_indices.reinit(parallel_partitioning.size(), 0);
548
549 // initialize each block
550 this->components.resize(this->n_blocks());
551 for (unsigned int i = 0; i < this->n_blocks(); ++i)
552 this->components[i].reinit(parallel_partitioning[i], communicator);
553
554 // update block_indices content
555 this->collect_sizes();
556 }
557
558
559
560 inline void
561 BlockVector::reinit(const std::vector<IndexSet> &parallel_partitioning,
562 const std::vector<IndexSet> &ghost_entries,
563 const MPI_Comm communicator)
564 {
565 AssertDimension(parallel_partitioning.size(), ghost_entries.size());
566
567 // update the number of blocks
568 this->block_indices.reinit(parallel_partitioning.size(), 0);
569
570 // initialize each block
571 this->components.resize(this->n_blocks());
572 for (unsigned int i = 0; i < this->n_blocks(); ++i)
573 this->components[i].reinit(parallel_partitioning[i],
574 ghost_entries[i],
575 communicator);
576
577 // update block_indices content
578 this->collect_sizes();
579 }
580
581
582
583 inline void
585 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
586 &partitioners,
587 const bool make_ghosted)
588 {
589 // update the number of blocks
590 this->block_indices.reinit(partitioners.size(), 0);
591
592 // initialize each block
593 this->components.resize(this->n_blocks());
594 for (unsigned int i = 0; i < this->n_blocks(); ++i)
595 this->components[i].reinit(partitioners[i], make_ghosted);
596
597 // update block_indices content
598 this->collect_sizes();
599 }
600
601
602
603 inline MPI_Comm
605 {
606 return PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_vector));
607 }
608
609
610
611 inline bool
613 {
614 bool ghosted = block(0).has_ghost_elements();
615# ifdef DEBUG
616 for (unsigned int i = 0; i < this->n_blocks(); ++i)
618# endif
619 return ghosted;
620 }
621
622
623
624 inline void
626 {
627 std::swap(this->components, v.components);
628 std::swap(this->petsc_nest_vector, v.petsc_nest_vector);
629
631 }
632
633
634
635 inline void
636 BlockVector::print(std::ostream &out,
637 const unsigned int precision,
638 const bool scientific,
639 const bool across) const
640 {
641 for (unsigned int i = 0; i < this->n_blocks(); ++i)
642 {
643 if (across)
644 out << 'C' << i << ':';
645 else
646 out << "Component " << i << std::endl;
647 this->components[i].print(out, precision, scientific, across);
648 }
649 }
650
651
652
660 inline void
662 {
663 u.swap(v);
664 }
665 } // namespace MPI
666
667} // namespace PETScWrappers
668
669namespace internal
670{
671 namespace LinearOperatorImplementation
672 {
673 template <typename>
674 class ReinitHelper;
675
680 template <>
681 class ReinitHelper<PETScWrappers::MPI::BlockVector>
682 {
683 public:
684 template <typename Matrix>
685 static void
686 reinit_range_vector(const Matrix &matrix,
688 bool /*omit_zeroing_entries*/)
689 {
690 v.reinit(matrix.locally_owned_range_indices(),
691 matrix.get_mpi_communicator());
692 }
693
694 template <typename Matrix>
695 static void
696 reinit_domain_vector(const Matrix &matrix,
698 bool /*omit_zeroing_entries*/)
699 {
700 v.reinit(matrix.locally_owned_domain_indices(),
701 matrix.get_mpi_communicator());
702 }
703 };
704
705 } // namespace LinearOperatorImplementation
706} /* namespace internal */
707
708
712template <>
713struct is_serial_vector<PETScWrappers::MPI::BlockVector> : std::false_type
714{};
715
716
718
719#endif // DEAL_II_WITH_PETSC
720
721#endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
::internal::BlockVectorIterators::Iterator< BlockVectorBase, false > iterator
unsigned int n_blocks() const
const value_type * const_pointer
typename BlockType::const_reference const_reference
types::global_dof_index size_type
typename BlockType::value_type value_type
BlockVectorBase & operator=(const value_type s)
std::vector< Vector > components
::internal::BlockVectorIterators::Iterator< BlockVectorBase, true > const_iterator
typename BlockType::reference reference
BlockType & block(const unsigned int i)
std::size_t locally_owned_size() const
const BlockIndices & get_block_indices() const
void print(std::ostream &out, const unsigned int precision=3, const bool scientific=true, const bool across=true) const
BaseClass::const_reference const_reference
void swap(BlockVector &u, BlockVector &v)
BlockVector & operator=(const value_type s)
void compress(VectorOperation::values operation)
BaseClass::const_pointer const_pointer
bool has_ghost_elements() const
static void reinit_range_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
static void reinit_domain_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:502
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:503
#define DeclException0(Exception0)
Definition exceptions.h:471
static ::ExceptionBase & ExcNonMatchingBlockVectors()
#define Assert(cond, exc)
#define AssertDimension(dim1, dim2)
static ::ExceptionBase & ExcInternalError()
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
static ::ExceptionBase & ExcIteratorRangeDoesNotMatchVectorSize()