16 #ifndef dealii_petsc_block_vector_h
17 #define dealii_petsc_block_vector_h
22 #ifdef DEAL_II_WITH_PETSC
98 const MPI_Comm communicator,
115 BlockVector(
const std::vector<size_type> &block_sizes,
116 const MPI_Comm communicator,
117 const std::vector<size_type> &local_elements);
123 explicit BlockVector(
const std::vector<IndexSet> ¶llel_partitioning,
124 const MPI_Comm communicator = MPI_COMM_WORLD);
129 BlockVector(
const std::vector<IndexSet> ¶llel_partitioning,
130 const std::vector<IndexSet> &ghost_indices,
131 const MPI_Comm communicator);
144 template <
size_t num_blocks>
145 explicit BlockVector(
const std::array<Vec, num_blocks> &);
184 const MPI_Comm communicator,
187 const bool omit_zeroing_entries =
false);
210 reinit(
const std::vector<size_type> &block_sizes,
211 const MPI_Comm communicator,
212 const std::vector<size_type> &locally_owned_sizes,
213 const bool omit_zeroing_entries =
false);
237 reinit(
const std::vector<IndexSet> ¶llel_partitioning,
238 const MPI_Comm communicator);
244 reinit(
const std::vector<IndexSet> ¶llel_partitioning,
245 const std::vector<IndexSet> &ghost_entries,
246 const MPI_Comm communicator);
257 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
259 const bool make_ghosted =
true);
289 reinit(
const unsigned int num_blocks);
310 operator const Vec &()
const;
344 print(std::ostream &out,
345 const unsigned int precision = 3,
346 const bool scientific =
true,
347 const bool across =
true)
const;
380 , petsc_nest_vector(nullptr)
386 const MPI_Comm communicator,
397 const std::vector<size_type> &block_sizes,
398 const MPI_Comm communicator,
399 const std::vector<size_type> &local_elements)
402 reinit(block_sizes, communicator, local_elements,
false);
413 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
416 this->collect_sizes();
422 const std::vector<IndexSet> ¶llel_partitioning,
423 const MPI_Comm communicator)
426 reinit(parallel_partitioning, communicator);
432 const std::vector<IndexSet> ¶llel_partitioning,
433 const std::vector<IndexSet> &ghost_indices,
434 const MPI_Comm communicator)
437 reinit(parallel_partitioning, ghost_indices, communicator);
450 template <
size_t num_blocks>
457 for (
auto i = 0; i < num_blocks; ++i)
485 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
488 this->collect_sizes();
497 const MPI_Comm communicator,
500 const bool omit_zeroing_entries)
505 omit_zeroing_entries);
512 const MPI_Comm communicator,
513 const std::vector<size_type> &locally_owned_sizes,
514 const bool omit_zeroing_entries)
519 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
522 locally_owned_sizes[i],
523 omit_zeroing_entries);
536 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
546 const MPI_Comm communicator)
553 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
564 const std::vector<IndexSet> &ghost_entries,
565 const MPI_Comm communicator)
574 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
587 const std::vector<std::shared_ptr<const Utilities::MPI::Partitioner>>
589 const bool make_ghosted)
596 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
618 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
639 const unsigned int precision,
640 const bool scientific,
641 const bool across)
const
643 for (
unsigned int i = 0; i < this->
n_blocks(); ++i)
646 out <<
'C' << i <<
':';
648 out <<
"Component " << i << std::endl;
649 this->
components[i].print(out, precision, scientific, across);
673 namespace LinearOperatorImplementation
686 template <
typename Matrix>
693 matrix.get_mpi_communicator());
696 template <
typename Matrix>
703 matrix.get_mpi_communicator());
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
const BlockIndices & get_block_indices() const
::internal::BlockVectorIterators::Iterator< BlockVectorBase, false > iterator
unsigned int n_blocks() const
const value_type * const_pointer
typename BlockType::const_reference const_reference
types::global_dof_index size_type
typename BlockType::value_type value_type
BlockVectorBase & operator=(const value_type s)
std::vector< Vector > components
::internal::BlockVectorIterators::Iterator< BlockVectorBase, true > const_iterator
typename BlockType::reference reference
BlockIndices block_indices
BlockType & block(const unsigned int i)
std::size_t locally_owned_size() const
BaseClass::value_type value_type
void print(std::ostream &out, const unsigned int precision=3, const bool scientific=true, const bool across=true) const
BaseClass::size_type size_type
bool has_ghost_elements() const
void swap(BlockVector &v)
BaseClass::const_reference const_reference
void swap(BlockVector &u, BlockVector &v)
MPI_Comm get_mpi_communicator() const
BaseClass::pointer pointer
BlockVector & operator=(const value_type s)
BaseClass::reference reference
void compress(VectorOperation::values operation)
BaseClass::const_pointer const_pointer
bool has_ghost_elements() const
static void reinit_range_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
static void reinit_domain_vector(const Matrix &matrix, PETScWrappers::MPI::BlockVector &v, bool)
#define DEAL_II_NAMESPACE_OPEN
#define DEAL_II_NAMESPACE_CLOSE
#define DeclException0(Exception0)
static ::ExceptionBase & ExcInternalError()
static ::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
#define Assert(cond, exc)
#define AssertDimension(dim1, dim2)
static ::ExceptionBase & ExcNonMatchingBlockVectors()
static ::ExceptionBase & ExcIteratorRangeDoesNotMatchVectorSize()
@ matrix
Contents is actually a matrix.
BlockVector< double > BlockVector
std::enable_if_t< IsBlockVector< VectorType >::value, unsigned int > n_blocks(const VectorType &vector)
void swap(BlockVector &u, BlockVector &v)