34 template <
int dim,
int spacedim>
44 namespace fullydistributed
46 template <
int dim,
int spacedim>
52 const unsigned
int n_partitions) {
55 , currently_processing_create_triangulation_for_internal_usage(
false)
56 , currently_processing_prepare_coarsening_and_refinement_for_internal_usage(
62 template <
int dim,
int spacedim>
70 Assert(construction_data.
comm == this->mpi_communicator,
71 ExcMessage(
"MPI communicators do not match!"));
79 this->set_mesh_smoothing(
81 typename ::Triangulation<dim, spacedim>::MeshSmoothing
>(
85 this->set_mesh_smoothing(
87 typename ::Triangulation<dim, spacedim>::MeshSmoothing
>(
90 this->set_mesh_smoothing(construction_data.
smoothing);
93 this->coarse_cell_id_to_coarse_cell_index_vector.clear();
94 this->coarse_cell_index_to_coarse_cell_id_vector.clear();
100 currently_processing_create_triangulation_for_internal_usage =
true;
102 currently_processing_create_triangulation_for_internal_usage =
false;
105 auto cell = this->begin();
107 cell->set_level_subdomain_id(
112 this->coarse_cell_id_to_coarse_cell_index_vector.emplace_back(
114 this->coarse_cell_index_to_coarse_cell_id_vector.emplace_back(
120 this->coarse_cell_index_to_coarse_cell_id_vector =
124 std::map<types::coarse_cell_id, unsigned int>
125 coarse_cell_id_to_coarse_cell_index_vector;
126 for (
unsigned int i = 0;
129 coarse_cell_id_to_coarse_cell_index_vector
132 for (
auto i : coarse_cell_id_to_coarse_cell_index_vector)
133 this->coarse_cell_id_to_coarse_cell_index_vector.emplace_back(i);
136 currently_processing_prepare_coarsening_and_refinement_for_internal_usage =
138 currently_processing_create_triangulation_for_internal_usage =
true;
141 currently_processing_prepare_coarsening_and_refinement_for_internal_usage =
143 currently_processing_create_triangulation_for_internal_usage =
false;
151 std::sort(cell_info.begin(),
155 const CellId a_id(a.id);
156 const CellId b_id(b.id);
158 const auto a_coarse_cell_index =
159 this->coarse_cell_id_to_coarse_cell_index(
160 a_id.get_coarse_cell_id());
161 const auto b_coarse_cell_index =
162 this->coarse_cell_id_to_coarse_cell_index(
163 b_id.get_coarse_cell_id());
170 if (a_coarse_cell_index != b_coarse_cell_index)
171 return a_coarse_cell_index < b_coarse_cell_index;
178 for (
const auto &cell : this->cell_iterators())
180 if (cell->is_active())
181 cell->set_subdomain_id(
184 cell->set_level_subdomain_id(
189 for (
unsigned int level = 0;
193 auto cell = this->begin(
level);
198 while (cell_info->id != cell->id().template to_binary<dim>())
202 if (cell->is_active())
203 cell->set_subdomain_id(cell_info->subdomain_id);
207 construct_multigrid_hierarchy)
208 cell->set_level_subdomain_id(cell_info->level_subdomain_id);
213 this->update_number_cache();
214 this->update_cell_relations();
219 template <
int dim,
int spacedim>
227 currently_processing_create_triangulation_for_internal_usage,
229 "You have called the method parallel::fullydistributed::Triangulation::create_triangulation() \n"
230 "that takes 3 arguments. If you have not called this function directly, \n"
231 "it might have been called via a function from the GridGenerator or GridIn \n"
232 "namespace. To be able to setup a fully-distributed Triangulation with these \n"
233 "utility functions nevertheless, please follow the following three steps:\n"
234 " 1) call the utility function for a (serial) Triangulation, \n"
235 " a parallel::shared::Triangulation, or a parallel::distributed::Triangulation object,\n"
236 " 2) use the functions TriangulationDescription::Utilities::create_description_from_triangulation() \n"
237 " or ::create_description_from_triangulation_in_groups() to create the \n"
238 " description of the local partition, and\n"
239 " 3) pass the created description to parallel::fullydistributed::Triangulation::create_triangulation()."));
248 template <
int dim,
int spacedim>
251 const ::Triangulation<dim, spacedim> &other_tria)
257 const ::Triangulation<dim, spacedim> *other_tria_ptr = &other_tria;
266 if (
dynamic_cast<const ::parallel::TriangulationBase<dim, spacedim>
267 *
>(&other_tria) ==
nullptr)
273 this->partitioner(serial_tria,
275 this->mpi_communicator));
278 if (this->is_multilevel_hierarchy_constructed())
282 other_tria_ptr = &serial_tria;
288 this->mpi_communicator,
292 this->create_triangulation(construction_data);
297 template <
int dim,
int spacedim>
301 const unsigned int)> &partitioner,
304 this->partitioner = partitioner;
310 template <
int dim,
int spacedim>
316 this->partitioner_distributed = &partitioner;
322 template <
int dim,
int spacedim>
327 this->signals.pre_distributed_repartition();
333 this->partitioner_distributed->partition(*
this),
338 this->coarse_cell_id_to_coarse_cell_index_vector.clear();
339 this->coarse_cell_index_to_coarse_cell_id_vector.clear();
342 this->create_triangulation(construction_data);
345 this->signals.post_distributed_repartition();
350 template <
int dim,
int spacedim>
359 template <
int dim,
int spacedim>
364 currently_processing_prepare_coarsening_and_refinement_for_internal_usage,
365 ExcMessage(
"No coarsening and refinement is supported!"));
367 return ::Triangulation<dim, spacedim>::
368 prepare_coarsening_and_refinement();
373 template <
int dim,
int spacedim>
377 const std::size_t mem =
381 coarse_cell_id_to_coarse_cell_index_vector) +
383 coarse_cell_index_to_coarse_cell_id_vector);
389 template <
int dim,
int spacedim>
400 template <
int dim,
int spacedim>
405 const auto coarse_cell_index = std::lower_bound(
406 coarse_cell_id_to_coarse_cell_index_vector.begin(),
407 coarse_cell_id_to_coarse_cell_index_vector.end(),
409 [](
const std::pair<types::coarse_cell_id, unsigned int> &pair,
411 Assert(coarse_cell_index !=
412 coarse_cell_id_to_coarse_cell_index_vector.cend(),
414 return coarse_cell_index->second;
419 template <
int dim,
int spacedim>
422 const unsigned int coarse_cell_index)
const
425 coarse_cell_index_to_coarse_cell_id_vector.size());
427 const auto coarse_cell_id =
428 coarse_cell_index_to_coarse_cell_id_vector[coarse_cell_index];
430 ExcMessage(
"You are trying to access a dummy cell!"));
431 return coarse_cell_id;
435 template <
int dim,
int spacedim>
440 this->local_cell_relations.clear();
441 this->local_cell_relations.reserve(this->n_locally_owned_active_cells());
443 for (
const auto &cell : this->active_cell_iterators())
444 if (cell->is_locally_owned())
445 this->local_cell_relations.emplace_back(
451 template <
int dim,
int spacedim>
455#ifdef DEAL_II_WITH_MPI
456 AssertThrow(this->cell_attached_data.pack_callbacks_variable.size() == 0,
460 this->cell_attached_data.n_attached_deserialize == 0,
462 "Not all SolutionTransfer objects have been deserialized after the last call to load()."));
463 Assert(this->n_cells() > 0,
464 ExcMessage(
"Can not save() an empty Triangulation."));
472 unsigned int n_locally_owned_cells = this->n_locally_owned_active_cells();
474 unsigned int global_first_cell = 0;
476 int ierr = MPI_Exscan(&n_locally_owned_cells,
481 this->mpi_communicator);
484 global_first_cell *=
sizeof(
unsigned int);
489 std::string fname = std::string(filename) +
".info";
490 std::ofstream f(fname.c_str());
491 f <<
"version nproc n_attached_fixed_size_objs n_attached_variable_size_objs n_global_active_cells"
495 << this->cell_attached_data.pack_callbacks_fixed.size() <<
" "
496 << this->cell_attached_data.pack_callbacks_variable.size() <<
" "
497 << this->n_global_active_cells() << std::endl;
501 this->save_attached_data(global_first_cell,
502 this->n_global_active_cells(),
508 int ierr = MPI_Info_create(&info);
511 const std::string fname_tria = filename +
"_triangulation.data";
515 ierr = MPI_File_open(this->mpi_communicator,
517 MPI_MODE_CREATE | MPI_MODE_WRONLY,
522 ierr = MPI_File_set_size(fh, 0);
526 ierr = MPI_Barrier(this->mpi_communicator);
528 ierr = MPI_Info_free(&info);
535 this->mpi_communicator,
539 std::vector<char> buffer;
543 const std::uint64_t buffer_size = buffer.size();
545 std::uint64_t offset = 0;
553 this->mpi_communicator);
557 ierr = MPI_File_write_at(
559 myrank *
sizeof(std::uint64_t),
567 const std::uint64_t global_position =
568 mpisize *
sizeof(std::uint64_t) + offset;
580 ierr = MPI_File_close(&fh);
592 template <
int dim,
int spacedim>
596#ifdef DEAL_II_WITH_MPI
597 Assert(this->n_cells() == 0,
598 ExcMessage(
"load() only works if the Triangulation is empty!"));
601 unsigned int n_locally_owned_cells = this->n_locally_owned_active_cells();
603 unsigned int global_first_cell = 0;
605 int ierr = MPI_Exscan(&n_locally_owned_cells,
610 this->mpi_communicator);
613 global_first_cell *=
sizeof(
unsigned int);
616 unsigned int version, numcpus, attached_count_fixed,
617 attached_count_variable, n_global_active_cells;
619 std::string fname = std::string(filename) +
".info";
620 std::ifstream f(fname.c_str());
622 std::string firstline;
623 getline(f, firstline);
624 f >> version >> numcpus >> attached_count_fixed >>
625 attached_count_variable >> n_global_active_cells;
629 ExcMessage(
"Incompatible version found in .info file."));
630 Assert(this->n_global_active_cells() == n_global_active_cells,
631 ExcMessage(
"Number of global active cells differ!"));
644 int ierr = MPI_Info_create(&info);
647 const std::string fname_tria = filename +
"_triangulation.data";
650 ierr = MPI_File_open(this->mpi_communicator,
657 ierr = MPI_Info_free(&info);
661 std::uint64_t buffer_size;
663 ierr = MPI_File_read_at(
665 myrank *
sizeof(std::uint64_t),
672 std::uint64_t offset = 0;
680 this->mpi_communicator);
684 const std::uint64_t global_position =
685 mpisize *
sizeof(std::uint64_t) + offset;
688 std::vector<char> buffer(buffer_size);
698 ierr = MPI_File_close(&fh);
701 auto construction_data = ::Utilities::template unpack<
706 construction_data.
comm = this->mpi_communicator;
708 this->create_triangulation(construction_data);
713 this->cell_attached_data.n_attached_data_sets = 0;
714 this->cell_attached_data.n_attached_deserialize =
715 attached_count_fixed + attached_count_variable;
718 this->load_attached_data(global_first_cell,
719 this->n_global_active_cells(),
720 this->n_locally_owned_active_cells(),
722 attached_count_fixed,
723 attached_count_variable);
725 this->update_cell_relations();
726 this->update_periodic_face_map();
727 this->update_number_cache();
737 template <
int dim,
int spacedim>
740 const bool autopartition)
748 template <
int dim,
int spacedim>
757 for (
const auto &cell : this->active_cell_iterators())
758 if (!cell->is_artificial())
759 number_of_global_coarse_cells =
760 std::max(number_of_global_coarse_cells,
761 cell->id().get_coarse_cell_id());
763 number_of_global_coarse_cells =
765 this->mpi_communicator) +
768 this->number_cache.number_of_global_coarse_cells =
769 number_of_global_coarse_cells;
779#include "fully_distributed_tria.inst"
virtual void copy_triangulation(const Triangulation< dim, spacedim > &other_tria)
virtual std::size_t memory_consumption() const override
virtual void update_number_cache()
virtual void execute_coarsening_and_refinement() override
virtual void update_number_cache() override
virtual types::coarse_cell_id coarse_cell_index_to_coarse_cell_id(const unsigned int coarse_cell_index) const override
void copy_triangulation(const ::Triangulation< dim, spacedim > &other_tria) override
void set_partitioner(const std::function< void(::Triangulation< dim, spacedim > &, const unsigned int)> &partitioner, const TriangulationDescription::Settings &settings)
virtual void update_cell_relations() override
Triangulation(const MPI_Comm &mpi_communicator)
virtual bool prepare_coarsening_and_refinement() override
virtual unsigned int coarse_cell_id_to_coarse_cell_index(const types::coarse_cell_id coarse_cell_id) const override
virtual void save(const std::string &filename) const override
virtual bool is_multilevel_hierarchy_constructed() const override
void create_triangulation(const TriangulationDescription::Description< dim, spacedim > &construction_data) override
virtual void load(const std::string &filename) override
virtual std::size_t memory_consumption() const override
#define DEAL_II_NAMESPACE_OPEN
#define DEAL_II_NAMESPACE_CLOSE
static ::ExceptionBase & ExcIO()
static ::ExceptionBase & ExcNotImplemented()
static ::ExceptionBase & ExcNeedsMPI()
#define Assert(cond, exc)
#define AssertDimension(dim1, dim2)
#define AssertThrowMPI(error_code)
#define AssertIndexRange(index, range)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
void hyper_cube(Triangulation< dim, spacedim > &tria, const double left=0., const double right=1., const bool colorize=false)
std::enable_if< std::is_fundamental< T >::value, std::size_t >::type memory_consumption(const T &t)
Description< dim, spacedim > create_description_from_triangulation(const ::Triangulation< dim, spacedim > &tria, const MPI_Comm &comm, const TriangulationDescription::Settings settings=TriangulationDescription::Settings::default_setting, const unsigned int my_rank_in=numbers::invalid_unsigned_int)
@ construct_multigrid_hierarchy
int File_write_at_c(MPI_File fh, MPI_Offset offset, const void *buf, MPI_Count count, MPI_Datatype datatype, MPI_Status *status)
int File_read_at_c(MPI_File fh, MPI_Offset offset, void *buf, MPI_Count count, MPI_Datatype datatype, MPI_Status *status)
unsigned int this_mpi_process(const MPI_Comm &mpi_communicator)
const MPI_Datatype mpi_type_id_for_type
unsigned int n_mpi_processes(const MPI_Comm &mpi_communicator)
T max(const T &t, const MPI_Comm &mpi_communicator)
size_t pack(const T &object, std::vector< char > &dest_buffer, const bool allow_compression=true)
const types::coarse_cell_id invalid_coarse_cell_id
const types::subdomain_id artificial_subdomain_id
::VectorizedArray< Number, width > max(const ::VectorizedArray< Number, width > &, const ::VectorizedArray< Number, width > &)
std::vector< std::vector< CellData< dim > > > cell_infos
Triangulation< dim, spacedim >::MeshSmoothing smoothing
std::vector< Point< spacedim > > coarse_cell_vertices
std::vector< types::coarse_cell_id > coarse_cell_index_to_coarse_cell_id
const TriangulationDescription::Settings settings
std::vector< std::vector< CellData< dim > > > cell_infos
const ::Triangulation< dim, spacedim > & tria