Reference documentation for deal.II version 8.5.1
tria_base.cc
1 // ---------------------------------------------------------------------
2 //
3 // Copyright (C) 2015 - 2017 by the deal.II authors
4 //
5 // This file is part of the deal.II library.
6 //
7 // The deal.II library is free software; you can use it, redistribute
8 // it, and/or modify it under the terms of the GNU Lesser General
9 // Public License as published by the Free Software Foundation; either
10 // version 2.1 of the License, or (at your option) any later version.
11 // The full text of the license can be found in the file LICENSE at
12 // the top level of the deal.II distribution.
13 //
14 // ---------------------------------------------------------------------
15 
16 
17 #include <deal.II/base/utilities.h>
18 #include <deal.II/base/memory_consumption.h>
19 #include <deal.II/base/logstream.h>
20 #include <deal.II/lac/sparsity_tools.h>
21 #include <deal.II/lac/sparsity_pattern.h>
22 #include <deal.II/lac/vector_memory.h>
23 #include <deal.II/grid/tria.h>
24 #include <deal.II/grid/tria_accessor.h>
25 #include <deal.II/grid/tria_iterator.h>
26 #include <deal.II/grid/grid_tools.h>
27 #include <deal.II/distributed/tria_base.h>
28 
29 
30 #include <algorithm>
31 #include <numeric>
32 #include <iostream>
33 #include <fstream>
34 
35 
36 DEAL_II_NAMESPACE_OPEN
37 
38 namespace parallel
39 {
40 
41  template <int dim, int spacedim>
42  Triangulation<dim,spacedim>::Triangulation (MPI_Comm mpi_communicator,
43  const typename ::Triangulation<dim,spacedim>::MeshSmoothing smooth_grid,
44  const bool check_for_distorted_cells)
45  :
46  ::Triangulation<dim,spacedim>(smooth_grid,check_for_distorted_cells),
47  mpi_communicator (mpi_communicator),
48  my_subdomain (Utilities::MPI::this_mpi_process (this->mpi_communicator)),
49  n_subdomains(Utilities::MPI::n_mpi_processes (this->mpi_communicator))
50  {
51 #ifndef DEAL_II_WITH_MPI
52  Assert(false, ExcMessage("You compiled deal.II without MPI support, for "
53  "which parallel::Triangulation is not available."));
54 #endif
55  number_cache.n_locally_owned_active_cells.resize (n_subdomains);
56  }
57 
58 
59 
60  template <int dim, int spacedim>
61  void
63  copy_triangulation (const ::Triangulation<dim, spacedim> &other_tria)
64  {
65 #ifndef DEAL_II_WITH_MPI
66  (void)other_tria;
67  Assert(false, ExcMessage("You compiled deal.II without MPI support, for "
68  "which parallel::Triangulation is not available."));
69 #else
71 
72  if (const ::parallel::Triangulation<dim,spacedim> *
73  other_tria_x = dynamic_cast<const ::parallel::Triangulation<dim,spacedim> *>(&other_tria))
74  {
75  mpi_communicator = other_tria_x->get_communicator();
76 
77  // release unused vector memory because we will have very different
78  // vectors now
79  ::internal::GrowingVectorMemory::release_all_unused_memory();
80  }
81 #endif
82  }
83 
84 
85 
86  template <int dim, int spacedim>
87  std::size_t
89  {
90  std::size_t mem=
92  + MemoryConsumption::memory_consumption(mpi_communicator)
94  + MemoryConsumption::memory_consumption(number_cache.n_locally_owned_active_cells)
95  + MemoryConsumption::memory_consumption(number_cache.n_global_active_cells)
96  + MemoryConsumption::memory_consumption(number_cache.n_global_levels);
97  return mem;
98 
99  }
100 
101  template <int dim, int spacedim>
103  {
104  // release unused vector memory because the vector layout is going to look
105  // very different now
106  ::internal::GrowingVectorMemory::release_all_unused_memory();
107  }
108 
109  template <int dim, int spacedim>
111  :
112  n_global_active_cells(0),
113  n_global_levels(0)
114  {}
115 
116  template <int dim, int spacedim>
117  unsigned int
119  {
120  return number_cache.n_locally_owned_active_cells[my_subdomain];
121  }
122 
123  template <int dim, int spacedim>
124  unsigned int
126  {
127  return number_cache.n_global_levels;
128  }
129 
130  template <int dim, int spacedim>
133  {
134  return number_cache.n_global_active_cells;
135  }
136 
137  template <int dim, int spacedim>
138  const std::vector<unsigned int> &
140  {
141  return number_cache.n_locally_owned_active_cells;
142  }
143 
144  template <int dim, int spacedim>
145  MPI_Comm
147  {
148  return mpi_communicator;
149  }
150 
151 #ifdef DEAL_II_WITH_MPI
152  template <int dim, int spacedim>
153  void
155  {
156  Assert (number_cache.n_locally_owned_active_cells.size()
157  ==
158  Utilities::MPI::n_mpi_processes (this->mpi_communicator),
159  ExcInternalError());
160 
161  std::fill (number_cache.n_locally_owned_active_cells.begin(),
162  number_cache.n_locally_owned_active_cells.end(),
163  0);
164 
165  number_cache.ghost_owners.clear ();
166  number_cache.level_ghost_owners.clear ();
167 
168  if (this->n_levels() == 0)
169  {
170  // Skip communication done below if we do not have any cells
171  // (meaning the Triangulation is empty on all processors). This will
172  // happen when called from the destructor of Triangulation, which
173  // can get called during exception handling causing a hang in this
174  // function.
175  number_cache.n_global_active_cells = 0;
176  number_cache.n_global_levels = 0;
177  return;
178  }
179 
180 
181  {
182  // find ghost owners
184  cell = this->begin_active();
185  cell != this->end();
186  ++cell)
187  if (cell->is_ghost())
188  number_cache.ghost_owners.insert(cell->subdomain_id());
189 
190  Assert(number_cache.ghost_owners.size() < Utilities::MPI::n_mpi_processes(mpi_communicator), ExcInternalError());
191  }
192 
193  if (this->n_levels() > 0)
195  cell = this->begin_active();
196  cell != this->end(); ++cell)
197  if (cell->subdomain_id() == my_subdomain)
198  ++number_cache.n_locally_owned_active_cells[my_subdomain];
199 
200  unsigned int send_value
201  = number_cache.n_locally_owned_active_cells[my_subdomain];
202  const int ierr = MPI_Allgather (&send_value,
203  1,
204  MPI_UNSIGNED,
205  &number_cache.n_locally_owned_active_cells[0],
206  1,
207  MPI_UNSIGNED,
208  this->mpi_communicator);
209  AssertThrowMPI(ierr);
210 
211  number_cache.n_global_active_cells
212  = std::accumulate (number_cache.n_locally_owned_active_cells.begin(),
213  number_cache.n_locally_owned_active_cells.end(),
214  /* ensure sum is computed with correct data type:*/
215  static_cast<types::global_dof_index>(0));
216  number_cache.n_global_levels = Utilities::MPI::max(this->n_levels(), this->mpi_communicator);
217  }
218 #else
219  template <int dim, int spacedim>
220  void
222  {
223  Assert (false, ExcNotImplemented());
224  }
225 
226 #endif
227 
228  template <int dim, int spacedim>
231  {
232  return my_subdomain;
233  }
234 
235  template <int dim, int spacedim>
236  const std::set<types::subdomain_id> &
239  {
240  return number_cache.ghost_owners;
241  }
242 
243  template <int dim, int spacedim>
244  const std::set<types::subdomain_id> &
247  {
248  return number_cache.level_ghost_owners;
249  }
250 
251 } // end namespace parallel
252 
253 
254 
255 
256 /*-------------- Explicit Instantiations -------------------------------*/
257 #include "tria_base.inst"
258 
259 DEAL_II_NAMESPACE_CLOSE
virtual void copy_triangulation(const Triangulation< dim, spacedim > &other_tria)
Definition: tria.cc:9315
::internal::Triangulation::NumberCache< dim > number_cache
Definition: tria.h:3482
const std::set< types::subdomain_id > & ghost_owners() const
Definition: tria_base.cc:238
static ::ExceptionBase & ExcMessage(std::string arg1)
unsigned int global_dof_index
Definition: types.h:88
#define Assert(cond, exc)
Definition: exceptions.h:313
virtual ~Triangulation()
Definition: tria.cc:9050
virtual std::size_t memory_consumption() const
Definition: tria.cc:13555
unsigned int subdomain_id
Definition: types.h:42
virtual types::global_dof_index n_global_active_cells() const
Definition: tria.cc:11250
unsigned int n_mpi_processes(const MPI_Comm &mpi_communicator)
Definition: mpi.cc:63
const std::set< types::subdomain_id > & level_ghost_owners() const
Definition: tria_base.cc:246
std_cxx11::enable_if< std_cxx11::is_fundamental< T >::value, std::size_t >::type memory_consumption(const T &t)
Definition: mpi.h:48
unsigned int n_locally_owned_active_cells() const
Definition: tria_base.cc:118
#define AssertThrowMPI(error_code)
Definition: exceptions.h:1211
virtual unsigned int n_global_levels() const
Triangulation(MPI_Comm mpi_communicator, const typename ::Triangulation< dim, spacedim >::MeshSmoothing smooth_grid=(::Triangulation< dim, spacedim >::none), const bool check_for_distorted_cells=false)
Definition: tria_base.cc:42
static ::ExceptionBase & ExcNotImplemented()
T max(const T &t, const MPI_Comm &mpi_communicator)
virtual types::subdomain_id locally_owned_subdomain() const
Definition: tria.cc:11846
static ::ExceptionBase & ExcInternalError()