Reference documentation for deal.II version 9.6.0
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_solver.cc
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2024 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
15
17
19
20#ifdef DEAL_II_WITH_PETSC
21
27
28// Shorthand notation for PETSc error codes.
29# define AssertPETSc(code) \
30 do \
31 { \
32 PetscErrorCode ierr = (code); \
33 AssertThrow(ierr == 0, ExcPETScError(ierr)); \
34 } \
35 while (false)
36
38
39namespace PETScWrappers
40{
42 : ksp(nullptr)
43 , solver_control(nullptr)
44 {}
45
46
48 : ksp(nullptr)
49 , solver_control(&cn)
50 {}
51
52
53
54 void
56 {}
57
58
59
61 {
62 AssertPETSc(KSPDestroy(&ksp));
63 }
64
65
66
67 KSP
69 {
70 return ksp;
71 }
72
73
74
75 SolverBase::operator KSP() const
76 {
77 return ksp;
78 }
79
80
81
82 void
84 VectorBase &x,
85 const VectorBase &b,
86 const PreconditionBase &preconditioner)
87 {
88 // first create a solver object if this
89 // is necessary
90 if (ksp == nullptr)
91 {
93
94 // let derived classes set the solver
95 // type, and the preconditioning
96 // object set the type of
97 // preconditioner
99
100 AssertPETSc(KSPSetPC(ksp, preconditioner.get_pc()));
101
102 /*
103 * by default we set up the preconditioner only once.
104 * this can be overridden by command line.
105 */
106 AssertPETSc(KSPSetReusePreconditioner(ksp, PETSC_TRUE));
107 }
108
109 // setting the preconditioner overwrites the used matrices.
110 // hence, we need to set the matrices after the preconditioner.
111 Mat B;
112 AssertPETSc(KSPGetOperators(ksp, nullptr, &B));
113 AssertPETSc(KSPSetOperators(ksp, A, B));
114
115 // set the command line option prefix name
116 AssertPETSc(KSPSetOptionsPrefix(ksp, prefix_name.c_str()));
117
118 // set the command line options provided
119 // by the user to override the defaults
120 AssertPETSc(KSPSetFromOptions(ksp));
121
122 // then do the real work: set up solver
123 // internal data and solve the
124 // system.
125 AssertPETSc(KSPSetUp(ksp));
126
127 AssertPETSc(KSPSolve(ksp, b, x));
128
129 // in case of failure: throw
130 // exception
131 if (solver_control &&
132 solver_control->last_check() != SolverControl::success)
133 AssertThrow(false,
135 solver_control->last_value()));
136 // otherwise exit as normal
137 }
138
139
140 void
141 SolverBase::set_prefix(const std::string &prefix)
142 {
143 prefix_name = prefix;
144 }
145
146
147 void
149 {
150 AssertPETSc(KSPDestroy(&ksp));
151 }
152
153
156 {
160 "You need to create the solver with a SolverControl object if you want to call the function that returns it."));
161 return *solver_control;
162 }
163
164
165 PetscErrorCode
167 const PetscInt iteration,
168 const PetscReal residual_norm,
169 KSPConvergedReason *reason,
170 void *solver_control_x)
171 {
172 PetscFunctionBeginUser;
174 *reinterpret_cast<SolverControl *>(solver_control_x);
175
176 const SolverControl::State state =
177 solver_control.check(iteration, residual_norm);
178
179 switch (state)
180 {
181 case ::SolverControl::iterate:
182 *reason = KSP_CONVERGED_ITERATING;
183 break;
184
185 case ::SolverControl::success:
186 *reason = KSP_CONVERGED_RTOL;
187 break;
188
189 case ::SolverControl::failure:
190 if (solver_control.last_step() > solver_control.max_steps())
191 *reason = KSP_DIVERGED_ITS;
192 else
193 *reason = KSP_DIVERGED_DTOL;
194 break;
195
196 default:
198 }
199
200 PetscFunctionReturn(PETSC_SUCCESS);
201 }
202
203
204
205 void
207 {
208 // Create the PETSc KSP object
209 AssertPETSc(KSPCreate(comm, &ksp));
210
211 // then a convergence monitor
212 // function that simply
213 // checks with the solver_control
214 // object we have in this object for
215 // convergence
217 }
218
219
220
221 void
223 {
224 if (ksp && solver_control)
226 KSPSetConvergenceTest(ksp, &convergence_test, solver_control, nullptr));
227 }
228
229
230 void
232 {
234
235 // let derived classes set the solver
236 // type, and the preconditioning
237 // object set the type of
238 // preconditioner
240
241 // set the command line options provided
242 // by the user to override the defaults
243 AssertPETSc(KSPSetFromOptions(ksp));
244 }
245
246
247
248 /* ---------------------- SolverRichardson ------------------------ */
249
251 : omega(omega)
252 {}
253
254
255
261
262
263
265 const MPI_Comm,
266 const AdditionalData &data)
267 : SolverRichardson(cn, data)
268 {}
269
270
271 void
273 {
274 AssertPETSc(KSPSetType(ksp, KSPRICHARDSON));
275
276 // set the damping factor from the data
277 AssertPETSc(KSPRichardsonSetScale(ksp, additional_data.omega));
278
279 // in the deal.II solvers, we always
280 // honor the initial guess in the
281 // solution vector. do so here as well:
282 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
283
284 // Hand over the absolute
285 // tolerance and the maximum
286 // iteration number to the PETSc
287 // convergence criterion. The
288 // custom deal.II SolverControl
289 // object is ignored by the PETSc
290 // Richardson method (when no
291 // PETSc monitoring is present),
292 // since in this case PETSc
293 // uses a faster version of
294 // the Richardson iteration,
295 // where no residual is
296 // available.
297 AssertPETSc(KSPSetTolerances(ksp,
298 PETSC_DEFAULT,
299 this->solver_control->tolerance(),
300 PETSC_DEFAULT,
301 this->solver_control->max_steps() + 1));
302 }
303
304
305 /* ---------------------- SolverChebychev ------------------------ */
306
308 const AdditionalData &data)
309 : SolverBase(cn)
310 , additional_data(data)
311 {}
312
313
315 const MPI_Comm,
316 const AdditionalData &data)
317 : SolverChebychev(cn, data)
318 {}
319
320
321 void
323 {
324 AssertPETSc(KSPSetType(ksp, KSPCHEBYSHEV));
325
326 // in the deal.II solvers, we always
327 // honor the initial guess in the
328 // solution vector. do so here as well:
329 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
330 }
331
332
333 /* ---------------------- SolverCG ------------------------ */
334
336 : SolverBase(cn)
337 , additional_data(data)
338 {}
339
340
342 const MPI_Comm,
343 const AdditionalData &data)
344 : SolverCG(cn, data)
345 {}
346
347
348 void
350 {
351 AssertPETSc(KSPSetType(ksp, KSPCG));
352
353 // in the deal.II solvers, we always
354 // honor the initial guess in the
355 // solution vector. do so here as well:
356 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
357 }
358
359
360 /* ---------------------- SolverBiCG ------------------------ */
361
363 : SolverBase(cn)
364 , additional_data(data)
365 {}
366
367
369 const MPI_Comm,
370 const AdditionalData &data)
371 : SolverBiCG(cn, data)
372 {}
373
374
375 void
377 {
378 AssertPETSc(KSPSetType(ksp, KSPBICG));
379
380 // in the deal.II solvers, we always
381 // honor the initial guess in the
382 // solution vector. do so here as well:
383 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
384 }
385
386
387 /* ---------------------- SolverGMRES ------------------------ */
388
390 const unsigned int restart_parameter,
391 const bool right_preconditioning)
392 : restart_parameter(restart_parameter)
393 , right_preconditioning(right_preconditioning)
394 {}
395
396
397
402
403
405 const MPI_Comm,
406 const AdditionalData &data)
407 : SolverGMRES(cn, data)
408 {}
409
410
411 void
413 {
414 AssertPETSc(KSPSetType(ksp, KSPGMRES));
415
416 AssertPETSc(KSPGMRESSetRestart(ksp, additional_data.restart_parameter));
417
418 // Set preconditioning side to right
420 {
421 AssertPETSc(KSPSetPCSide(ksp, PC_RIGHT));
422 }
423
424 // in the deal.II solvers, we always
425 // honor the initial guess in the
426 // solution vector. do so here as well:
427 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
428 }
429
430
431 /* ---------------------- SolverBicgstab ------------------------ */
432
434 : SolverBase(cn)
435 , additional_data(data)
436 {}
437
438
440 const MPI_Comm,
441 const AdditionalData &data)
442 : SolverBicgstab(cn, data)
443 {}
444
445
446 void
448 {
449 AssertPETSc(KSPSetType(ksp, KSPBCGS));
450
451 // in the deal.II solvers, we always
452 // honor the initial guess in the
453 // solution vector. do so here as well:
454 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
455 }
456
457
458 /* ---------------------- SolverCGS ------------------------ */
459
461 : SolverBase(cn)
462 , additional_data(data)
463 {}
464
465
467 const MPI_Comm,
468 const AdditionalData &data)
469 : SolverCGS(cn, data)
470 {}
471
472
473 void
475 {
476 AssertPETSc(KSPSetType(ksp, KSPCGS));
477
478 // in the deal.II solvers, we always
479 // honor the initial guess in the
480 // solution vector. do so here as well:
481 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
482 }
483
484
485 /* ---------------------- SolverTFQMR ------------------------ */
486
488 : SolverBase(cn)
489 , additional_data(data)
490 {}
491
492
494 const MPI_Comm,
495 const AdditionalData &data)
496 : SolverTFQMR(cn, data)
497 {}
498
499
500 void
502 {
503 AssertPETSc(KSPSetType(ksp, KSPTFQMR));
504
505 // in the deal.II solvers, we always
506 // honor the initial guess in the
507 // solution vector. do so here as well:
508 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
509 }
510
511
512 /* ---------------------- SolverTCQMR ------------------------ */
513
515 : SolverBase(cn)
516 , additional_data(data)
517 {}
518
519
521 const MPI_Comm,
522 const AdditionalData &data)
523 : SolverTCQMR(cn, data)
524 {}
525
526
527 void
529 {
530 AssertPETSc(KSPSetType(ksp, KSPTCQMR));
531
532 // in the deal.II solvers, we always
533 // honor the initial guess in the
534 // solution vector. do so here as well:
535 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
536 }
537
538
539 /* ---------------------- SolverCR ------------------------ */
540
542 : SolverBase(cn)
543 , additional_data(data)
544 {}
545
546
548 const MPI_Comm,
549 const AdditionalData &data)
550 : SolverCR(cn, data)
551 {}
552
553
554 void
556 {
557 AssertPETSc(KSPSetType(ksp, KSPCR));
558
559 // in the deal.II solvers, we always
560 // honor the initial guess in the
561 // solution vector. do so here as well:
562 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
563 }
564
565
566 /* ---------------------- SolverLSQR ------------------------ */
567
569 : SolverBase(cn)
570 , additional_data(data)
571 {}
572
573
574
576 const MPI_Comm,
577 const AdditionalData &data)
578 : SolverLSQR(cn, data)
579 {}
580
581
582
583 void
585 {
586 AssertPETSc(KSPSetType(ksp, KSPLSQR));
587
588 // in the deal.II solvers, we always
589 // honor the initial guess in the
590 // solution vector. do so here as well:
591 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
592
593 // The KSPLSQR implementation overwrites the user-defined
594 // convergence test at creation (i.e. KSPSetType) time.
595 // This is probably a bad design decision in PETSc.
596 // Anyway, here we make sure we use our own convergence
597 // test.
599 }
600
601
602 /* ---------------------- SolverPreOnly ------------------------ */
603
605 : SolverBase(cn)
606 , additional_data(data)
607 {}
608
609
611 const MPI_Comm,
612 const AdditionalData &data)
613 : SolverPreOnly(cn, data)
614 {}
615
616
617 void
619 {
620 AssertPETSc(KSPSetType(ksp, KSPPREONLY));
621
622 // The KSPPREONLY solver of
623 // PETSc never calls the convergence
624 // monitor, which leads to failure
625 // even when everything was ok.
626 // Therefore the SolverControl status
627 // is set to some nice values, which
628 // guarantee a nice result at the end
629 // of the solution process.
630 solver_control->check(1, 0.0);
631
632 // Using the PREONLY solver with
633 // a nonzero initial guess leads
634 // PETSc to produce some error messages.
635 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE));
636 }
637
638
639 /* ---------------------- SparseDirectMUMPS------------------------ */
640
642 const AdditionalData &data)
643 : SolverBase(cn)
644 , additional_data(data)
645 , symmetric_mode(false)
646 {}
647
648
649
651 const MPI_Comm,
652 const AdditionalData &data)
653 : SparseDirectMUMPS(cn, data)
654 {}
655
656
657
658 void
660 {
661 /*
662 * KSPPREONLY implements a stub method that applies only the
663 * preconditioner. Its use is due to SparseDirectMUMPS being a direct
664 * (rather than iterative) solver
665 */
666 AssertPETSc(KSPSetType(ksp, KSPPREONLY));
667
668 /*
669 * The KSPPREONLY solver of PETSc never calls the convergence monitor,
670 * which leads to failure even when everything was ok. Therefore, the
671 * SolverControl status is set to some nice values, which guarantee a
672 * nice result at the end of the solution process.
673 */
674 solver_control->check(1, 0.0);
675
676 /*
677 * Using a PREONLY solver with a nonzero initial guess leads PETSc to
678 * produce some error messages.
679 */
680 AssertPETSc(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE));
681 }
682
683 void
685 VectorBase &x,
686 const VectorBase &b)
687 {
688# ifdef DEAL_II_PETSC_WITH_MUMPS
689 /*
690 * creating a solver object if this is necessary
691 */
692 if (ksp == nullptr)
693 {
695
696 /*
697 * setting the solver type
698 */
700
701 /*
702 * set the matrices involved. the last argument is irrelevant here,
703 * since we use the solver only once anyway
704 */
705 AssertPETSc(KSPSetOperators(ksp, A, A));
706
707 /*
708 * getting the associated preconditioner context
709 */
710 PC pc;
711 AssertPETSc(KSPGetPC(ksp, &pc));
712
713 /*
714 * build PETSc PC for particular PCLU or PCCHOLESKY preconditioner
715 * depending on whether the symmetric mode has been set
716 */
717 if (symmetric_mode)
718 AssertPETSc(PCSetType(pc, PCCHOLESKY));
719 else
720 AssertPETSc(PCSetType(pc, PCLU));
721
722 /*
723 * set the software that is to be used to perform the lu
724 * factorization here we start to see differences with the base
725 * class solve function
726 */
727# if DEAL_II_PETSC_VERSION_LT(3, 9, 0)
728 AssertPETSc(PCFactorSetMatSolverPackage(pc, MATSOLVERMUMPS));
729# else
730 AssertPETSc(PCFactorSetMatSolverType(pc, MATSOLVERMUMPS));
731# endif
732
733 /*
734 * set up the package to call for the factorization
735 */
736# if DEAL_II_PETSC_VERSION_LT(3, 9, 0)
737 AssertPETSc(PCFactorSetUpMatSolverPackage(pc));
738# else
739 AssertPETSc(PCFactorSetUpMatSolverType(pc));
740# endif
741
742 /*
743 * get the factored matrix F from the preconditioner context.
744 */
745 Mat F;
746 AssertPETSc(PCFactorGetMatrix(pc, &F));
747
748 /*
749 * pass control parameters to MUMPS.
750 * Setting entry 7 of MUMPS ICNTL array to a value
751 * of 2. This sets use of Approximate Minimum Fill (AMF)
752 */
753 AssertPETSc(MatMumpsSetIcntl(F, 7, 2));
754
755 /*
756 * by default we set up the preconditioner only once.
757 * this can be overridden by command line.
758 */
759 AssertPETSc(KSPSetReusePreconditioner(ksp, PETSC_TRUE));
760 }
761
762 /*
763 * set the matrices involved. the last argument is irrelevant here,
764 * since we use the solver only once anyway
765 */
766 AssertPETSc(KSPSetOperators(ksp, A, A));
767
768 /*
769 * set the command line option prefix name
770 */
771 AssertPETSc(KSPSetOptionsPrefix(ksp, prefix_name.c_str()));
772
773 /*
774 * set the command line options provided by the user to override
775 * the defaults
776 */
777 AssertPETSc(KSPSetFromOptions(ksp));
778
779 /*
780 * solve the linear system
781 */
782 AssertPETSc(KSPSolve(ksp, b, x));
783
784 /*
785 * in case of failure throw exception
786 */
787 if (solver_control &&
788 solver_control->last_check() != SolverControl::success)
789 {
790 AssertThrow(false,
792 solver_control->last_value()));
793 }
794
795# else // DEAL_II_PETSC_WITH_MUMPS
796 Assert(
797 false,
799 "Your PETSc installation does not include a copy of "
800 "the MUMPS package necessary for this solver. You will need to configure "
801 "PETSc so that it includes MUMPS, recompile it, and then re-configure "
802 "and recompile deal.II as well."));
803
804 // Cast to void to silence compiler warnings
805 (void)A;
806 (void)x;
807 (void)b;
808# endif
809 }
810
811
812
813 void
814 SparseDirectMUMPS::set_symmetric_mode(const bool matrix_is_symmetric)
815 {
816 symmetric_mode = matrix_is_symmetric;
817 }
818
819} // namespace PETScWrappers
820
822
823#endif // DEAL_II_WITH_PETSC
MPI_Comm get_mpi_communicator() const
SolverControl & control() const
void initialize(const PreconditionBase &preconditioner)
void perhaps_set_convergence_test() const
static PetscErrorCode convergence_test(KSP ksp, const PetscInt iteration, const PetscReal residual_norm, KSPConvergedReason *reason, void *solver_control)
SmartPointer< SolverControl, SolverBase > solver_control
void set_prefix(const std::string &prefix)
void initialize_ksp_with_comm(const MPI_Comm comm)
void solve(const MatrixBase &A, VectorBase &x, const VectorBase &b, const PreconditionBase &preconditioner)
virtual void set_solver_type(KSP &ksp) const
SolverBiCG(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
virtual void set_solver_type(KSP &ksp) const override
SolverBicgstab(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
SolverCGS(SolverControl &cn, const AdditionalData &data=AdditionalData())
SolverCG(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
virtual void set_solver_type(KSP &ksp) const override
SolverCR(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
SolverChebychev(SolverControl &cn, const AdditionalData &data=AdditionalData())
const AdditionalData additional_data
virtual void set_solver_type(KSP &ksp) const override
SolverGMRES(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
SolverLSQR(SolverControl &cn, const AdditionalData &data=AdditionalData())
SolverPreOnly(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
virtual void set_solver_type(KSP &ksp) const override
const AdditionalData additional_data
SolverRichardson(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
SolverTCQMR(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual void set_solver_type(KSP &ksp) const override
SolverTFQMR(SolverControl &cn, const AdditionalData &data=AdditionalData())
void solve(const MatrixBase &A, VectorBase &x, const VectorBase &b)
virtual void set_solver_type(KSP &ksp) const override
void set_symmetric_mode(const bool matrix_is_symmetric)
SparseDirectMUMPS(SolverControl &cn, const AdditionalData &data=AdditionalData())
virtual State check(const unsigned int step, const double check_value)
@ success
Stop iteration, goal reached.
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:503
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:504
#define Assert(cond, exc)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
#define DEAL_II_NOT_IMPLEMENTED()
*braid_SplitCommworld & comm
#define AssertPETSc(code)
AdditionalData(const unsigned int restart_parameter=30, const bool right_preconditioning=false)