LCOV - code coverage report
Current view: top level - src/solvers - petsc_dm_wrapper.C (source / functions) Hit Total Coverage
Test: libMesh/libmesh: #4229 (6a9aeb) with base 727f46 Lines: 313 372 84.1 %
Date: 2025-08-19 19:27:09 Functions: 16 20 80.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : // The libMesh Finite Element Library.
       2             : // Copyright (C) 2002-2025 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
       3             : 
       4             : // This library is free software; you can redistribute it and/or
       5             : // modify it under the terms of the GNU Lesser General Public
       6             : // License as published by the Free Software Foundation; either
       7             : // version 2.1 of the License, or (at your option) any later version.
       8             : 
       9             : // This library is distributed in the hope that it will be useful,
      10             : // but WITHOUT ANY WARRANTY; without even the implied warranty of
      11             : // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      12             : // Lesser General Public License for more details.
      13             : 
      14             : // You should have received a copy of the GNU Lesser General Public
      15             : // License along with this library; if not, write to the Free Software
      16             : // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
      17             : 
      18             : #include "libmesh/libmesh_common.h"
      19             : #include "libmesh/petsc_macro.h"
      20             : 
      21             : #ifdef LIBMESH_HAVE_PETSC
      22             : #if !PETSC_VERSION_LESS_THAN(3,7,3)
      23             : #if defined(LIBMESH_ENABLE_AMR) && defined(LIBMESH_HAVE_METAPHYSICL)
      24             : 
      25             : #include "libmesh/ignore_warnings.h"
      26             : #include <petscsf.h>
      27             : #if PETSC_VERSION_LESS_THAN(3,12,0)
      28             : #include <petsc/private/dmimpl.h>
      29             : #endif
      30             : #include <petscdmshell.h>
      31             : #include "libmesh/restore_warnings.h"
      32             : 
      33             : #include "libmesh/petsc_dm_wrapper.h"
      34             : #include "libmesh/libmesh_logging.h"
      35             : #include "libmesh/system.h"
      36             : #include "libmesh/mesh.h"
      37             : #include "libmesh/mesh_base.h"
      38             : #include "libmesh/mesh_refinement.h"
      39             : #include "libmesh/mesh_tools.h"
      40             : #include "libmesh/partitioner.h"
      41             : #include "libmesh/dof_map.h"
      42             : #include "libmesh/elem.h"
      43             : #include "libmesh/petsc_matrix.h"
      44             : 
      45             : namespace libMesh
      46             : {
      47             : 
      48             :   //--------------------------------------------------------------------
      49             :   // Functions with C linkage to pass to PETSc.  PETSc will call these
      50             :   // methods as needed.
      51             :   //
      52             :   // Since they must have C linkage they have no knowledge of a namespace.
      53             :   // We give them an obscure name to avoid namespace pollution.
      54             :   //--------------------------------------------------------------------
      55             :   extern "C"
      56             :   {
      57             : 
      58             :     //! Help PETSc create a subDM given a global dm when using fieldsplit
      59             : #if PETSC_VERSION_LESS_THAN(3,9,0)
      60             :     PetscErrorCode libmesh_petsc_DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
      61             : #else
      62         144 :     PetscErrorCode libmesh_petsc_DMCreateSubDM(DM dm, PetscInt numFields, const PetscInt fields[], IS *is, DM *subdm)
      63             : #endif
      64             :     {
      65             :       PetscFunctionBegin;
      66             : 
      67             :       // Basically, we copy the PETSc ShellCreateSubDM implementation,
      68             :       // but also need to set the embedding dim and also propagate
      69             :       // the relevant function pointers to the subDM for GMG purposes.
      70             :       // Since this is called by PETSc we gotta pull some of this info
      71             :       // from the context in the DM.
      72             : 
      73             :       // First, retrieve our context
      74         144 :       void * ctx = nullptr;
      75         144 :       LibmeshPetscCallQ(DMShellGetContext(dm, & ctx));
      76          48 :       libmesh_assert(ctx);
      77         144 :       PetscDMContext * p_ctx = static_cast<PetscDMContext * >(ctx);
      78             : 
      79         144 :       if (subdm)
      80             :         {
      81         144 :           LibmeshPetscCallQ(DMShellCreate(PetscObjectComm((PetscObject) dm), subdm));
      82             : 
      83             :           // Set the DM embedding dimension to help PetscDS (Discrete System)
      84         144 :           LibmeshPetscCallQ(DMSetCoordinateDim(*subdm, p_ctx->mesh_dim));
      85             : 
      86             :           // Now set the function pointers for the subDM
      87             :           // Some DMShellGet* functions only exist with PETSc >= 3.12.0.
      88             : 
      89             :           // Set Coarsen function pointer
      90             : #if PETSC_VERSION_LESS_THAN(3,12,0)
      91          96 :           if (dm->ops->coarsen)
      92          96 :             LibmeshPetscCallQ(DMShellSetCoarsen(*subdm, dm->ops->coarsen));
      93             : #else
      94          48 :           PetscErrorCode (*coarsen)(DM,MPI_Comm,DM*) = nullptr;
      95          48 :           LibmeshPetscCallQ(DMShellGetCoarsen(dm, &coarsen));
      96          48 :           if (coarsen)
      97          48 :             LibmeshPetscCallQ(DMShellSetCoarsen(*subdm, coarsen));
      98             : #endif
      99             : 
     100             :           // Set Refine function pointer
     101             : #if PETSC_VERSION_LESS_THAN(3,12,0)
     102          96 :           if (dm->ops->refine)
     103          96 :             LibmeshPetscCallQ(DMShellSetRefine(*subdm, dm->ops->refine));
     104             : #else
     105          48 :           PetscErrorCode (*refine)(DM,MPI_Comm,DM*) = nullptr;
     106          48 :           LibmeshPetscCallQ(DMShellGetRefine(dm, &refine));
     107             : 
     108          48 :           if (refine)
     109          48 :             LibmeshPetscCallQ(DMShellSetRefine(*subdm, refine));
     110             : #endif
     111             : 
     112             :           // Set Interpolation function pointer
     113             : #if PETSC_VERSION_LESS_THAN(3,12,0)
     114          96 :           if (dm->ops->createinterpolation)
     115          96 :             LibmeshPetscCallQ(DMShellSetCreateInterpolation(*subdm, dm->ops->createinterpolation));
     116             : #else
     117          48 :           PetscErrorCode (*interp)(DM,DM,Mat*,Vec*) = nullptr;
     118          48 :           LibmeshPetscCallQ(DMShellGetCreateInterpolation(dm, &interp));
     119          48 :           if (interp)
     120          48 :             LibmeshPetscCallQ(DMShellSetCreateInterpolation(*subdm, interp));
     121             : #endif
     122             : 
     123             :           // Set Restriction function pointer
     124             : #if PETSC_VERSION_LESS_THAN(3,12,0)
     125          96 :           if (dm->ops->createrestriction)
     126           0 :             LibmeshPetscCallQ(DMShellSetCreateRestriction(*subdm, dm->ops->createrestriction));
     127             : #else
     128          48 :           PetscErrorCode (*createrestriction)(DM,DM,Mat*) = nullptr;
     129          48 :           LibmeshPetscCallQ(DMShellGetCreateRestriction(dm, &createrestriction));
     130          48 :           if (createrestriction)
     131           0 :             LibmeshPetscCallQ(DMShellSetCreateRestriction(*subdm, createrestriction));
     132             : #endif
     133             : 
     134             :           // Set CreateSubDM function pointer
     135             : #if PETSC_VERSION_LESS_THAN(3,12,0)
     136          96 :           if (dm->ops->createsubdm)
     137          96 :             LibmeshPetscCallQ(DMShellSetCreateSubDM(*subdm, dm->ops->createsubdm));
     138             : #else
     139          48 :           PetscErrorCode (*createsubdm)(DM,PetscInt,const PetscInt[],IS*,DM*) = nullptr;
     140          48 :           LibmeshPetscCallQ(DMShellGetCreateSubDM(dm, &createsubdm));
     141          48 :           if (createsubdm)
     142          48 :             LibmeshPetscCallQ(DMShellSetCreateSubDM(*subdm, createsubdm));
     143             : #endif
     144             :           // Set Context pointer
     145         144 :           if (ctx)
     146         144 :             LibmeshPetscCallQ(DMShellSetContext(*subdm, ctx));
     147             : #if PETSC_VERSION_LESS_THAN(3,11,0)
     148             :           // Lastly, Compute the subsection for the subDM
     149             :           LibmeshPetscCallQ(DMCreateSubDM_Section_Private(dm, numFields, fields, is, subdm));
     150             : #elif PETSC_RELEASE_LESS_THAN(3, 21, 0)
     151         144 :           LibmeshPetscCallQ(DMCreateSectionSubDM(dm, numFields, fields, is, subdm));
     152             : #else
     153             :           LibmeshPetscCallQ(DMCreateSectionSubDM(dm, numFields, fields, NULL, NULL, is, subdm));
     154             : #endif
     155             :         }
     156             : 
     157         144 :       PetscFunctionReturn(LIBMESH_PETSC_SUCCESS);
     158             :     }
     159             : 
     160             :     //! Help PETSc identify the finer DM given a dmc
     161           0 :     PetscErrorCode libmesh_petsc_DMRefine(DM dmc, MPI_Comm /*comm*/, DM * dmf)
     162             :     {
     163             :       PetscFunctionBegin;
     164             : 
     165           0 :       libmesh_assert(dmc);
     166           0 :       libmesh_assert(dmf);
     167             : 
     168             :       // extract our context from the incoming dmc
     169           0 :       void * ctx_c = nullptr;
     170           0 :       LibmeshPetscCallQ(DMShellGetContext(dmc, & ctx_c));
     171           0 :       libmesh_assert(ctx_c);
     172           0 :       PetscDMContext * p_ctx = static_cast<PetscDMContext * >(ctx_c);
     173             : 
     174             :       // check / set the finer DM
     175           0 :       libmesh_assert(p_ctx->finer_dm);
     176           0 :       libmesh_assert(*(p_ctx->finer_dm));
     177           0 :       *(dmf) = *(p_ctx->finer_dm);
     178             : 
     179           0 :       PetscFunctionReturn(LIBMESH_PETSC_SUCCESS);
     180             :     }
     181             : 
     182             :     //! Help PETSc identify the coarser DM dmc given the fine DM dmf
     183          24 :     PetscErrorCode libmesh_petsc_DMCoarsen(DM dmf, MPI_Comm /*comm*/, DM * dmc)
     184             :     {
     185             :       PetscFunctionBegin;
     186             : 
     187           8 :       libmesh_assert(dmc);
     188           8 :       libmesh_assert(dmf);
     189             : 
     190             :       // Extract our context from the incoming dmf
     191          24 :       void * ctx_f = nullptr;
     192          24 :       LibmeshPetscCallQ(DMShellGetContext(dmf, &ctx_f));
     193           8 :       libmesh_assert(ctx_f);
     194          24 :       PetscDMContext * p_ctx_f = static_cast<PetscDMContext*>(ctx_f);
     195             : 
     196             :       // First, ensure that there exists a coarse DM that we want to
     197             :       // set. There ought to be as we created it while walking the
     198             :       // hierarchy.
     199           8 :       libmesh_assert(p_ctx_f->coarser_dm);
     200           8 :       libmesh_assert(*(p_ctx_f->coarser_dm));
     201             : 
     202             :       // In situations using fieldsplit we need to provide a coarser
     203             :       // DM which only has the relevant subfields in it. Since we
     204             :       // create global DMs for each mesh level, we need to also create
     205             :       // the subDMs. We do this by checking the number of fields. When
     206             :       // less than all the fields are used, we need to create the
     207             :       // proper subDMs. We get the number of fields and their names
     208             :       // from the incoming fine DM and the global reference DM
     209             :       PetscInt nfieldsf, nfieldsg;
     210             :       char ** fieldnamesf;
     211             :       char ** fieldnamesg;
     212             : 
     213           8 :       libmesh_assert(p_ctx_f->global_dm);
     214          24 :       DM * globaldm = p_ctx_f->global_dm;
     215          24 :       LibmeshPetscCallQ(DMCreateFieldIS(dmf, &nfieldsf, &fieldnamesf, nullptr));
     216          24 :       LibmeshPetscCallQ(DMCreateFieldIS(*globaldm, &nfieldsg, &fieldnamesg, nullptr));
     217             : 
     218             :       // If the probed number of fields is less than the number of
     219             :       // global fields, this amounts to PETSc 'indicating' to us we
     220             :       // are doing FS. So, we must create subDMs for the coarser
     221             :       // DMs.
     222          24 :       if ( nfieldsf < nfieldsg )
     223             :         {
     224          24 :           p_ctx_f->subfields.clear();
     225          24 :           p_ctx_f->subfields.resize(nfieldsf);
     226             : 
     227             :           // To select the subDM fields we match fine grid DM field
     228             :           //  names to their global DM counterparts. Since PETSc can
     229             :           //  internally reassign field numbering under a fieldsplit,
     230             :           //  we must extract subsections via the field names. This is
     231             :           //  admittedly gross, but c'est la vie.
     232          72 :           for (int i = 0; i < nfieldsf ; i++)
     233             :             {
     234         192 :               for (int j = 0; j < nfieldsg ;j++)
     235         144 :                 if ( strcmp( fieldnamesg[j], fieldnamesf[i] ) == 0 )
     236          64 :                   p_ctx_f->subfields[i] = j;
     237             :             }
     238             : 
     239             :           // Next, for the found fields we create a subDM
     240             :           DM subdm;
     241          24 :           LibmeshPetscCallQ(libmesh_petsc_DMCreateSubDM(*(p_ctx_f->coarser_dm), nfieldsf,
     242             :                                                         p_ctx_f->subfields.data(), nullptr, &subdm));
     243             : 
     244             :           // Extract our coarse context from the created subDM so we
     245             :           // can set its subfields for use in createInterp.
     246          24 :           void * ctx_c = nullptr;
     247          24 :           LibmeshPetscCallQ(DMShellGetContext(subdm, &ctx_c));
     248           8 :           libmesh_assert(ctx_c);
     249          24 :           PetscDMContext * p_ctx_c = static_cast<PetscDMContext*>(ctx_c);
     250             : 
     251             :           // propagate subfield info to subDM
     252          24 :           p_ctx_c->subfields = p_ctx_f->subfields;
     253             : 
     254             :           // return created subDM to PETSc
     255          24 :           *(dmc) = subdm;
     256             :         }
     257             :       else {
     258             :         // No fieldsplit was requested so set the coarser DM to the
     259             :         // global coarser DM.
     260           0 :         *(dmc) = *(p_ctx_f->coarser_dm);
     261             :       }
     262             : 
     263          24 :       PetscFunctionReturn(LIBMESH_PETSC_SUCCESS);
     264             :     }
     265             : 
     266             :     //! Function to give PETSc that sets the Interpolation Matrix between two DMs
     267             :     PetscErrorCode
     268          24 :     libmesh_petsc_DMCreateInterpolation (DM dmc /*coarse*/, DM dmf /*fine*/,
     269             :                                          Mat * mat ,Vec * vec)
     270             :     {
     271             :       PetscFunctionBegin;
     272             : 
     273           8 :       libmesh_assert(dmc);
     274           8 :       libmesh_assert(dmf);
     275           8 :       libmesh_assert(mat);
     276           8 :       libmesh_assert(vec); // Optional scaling (not needed for mg)
     277             : 
     278             :       // Get a communicator from incoming DM
     279             :       MPI_Comm comm;
     280          24 :       LibmeshPetscCallQ(PetscObjectGetComm((PetscObject)dmc, &comm));
     281             : 
     282             :       // Extract our coarse context from the incoming DM
     283          24 :       void * ctx_c = nullptr;
     284          24 :       LibmeshPetscCallQ(DMShellGetContext(dmc, &ctx_c));
     285           8 :       libmesh_assert(ctx_c);
     286          24 :       PetscDMContext * p_ctx_c = static_cast<PetscDMContext*>(ctx_c);
     287             : 
     288             :       // Extract our fine context from the incoming DM
     289          24 :       void * ctx_f = nullptr;
     290          24 :       LibmeshPetscCallQ(DMShellGetContext(dmf, &ctx_f));
     291           8 :       libmesh_assert(ctx_f);
     292          24 :       PetscDMContext * p_ctx_f = static_cast<PetscDMContext*>(ctx_f);
     293             : 
     294             :       // Check for existing global projection matrix
     295           8 :       libmesh_assert(p_ctx_c->K_interp_ptr);
     296             : 
     297             :       // If were doing fieldsplit we need to construct sub projection
     298             :       // matrices. We compare the passed in number of DMs fields to a
     299             :       // global DM in order to determine if a subprojection is needed.
     300             :       PetscInt nfieldsf, nfieldsg;
     301             : 
     302           8 :       libmesh_assert(p_ctx_c->global_dm);
     303          24 :       DM * globaldm = p_ctx_c->global_dm;
     304             : 
     305          24 :       LibmeshPetscCallQ(DMCreateFieldIS(dmf, &nfieldsf, nullptr, nullptr));
     306          24 :       LibmeshPetscCallQ(DMCreateFieldIS(*globaldm, &nfieldsg, nullptr, nullptr));
     307             : 
     308             :       // If the probed number of fields is less than the number of
     309             :       // global fields, this amounts to PETSc 'indicating' to us we
     310             :       // are doing FS.
     311          24 :       if ( nfieldsf < nfieldsg)
     312             :         {
     313             :           // Loop over the fields and merge their index sets.
     314          24 :           std::vector<std::vector<numeric_index_type>> allrows,allcols;
     315          16 :           std::vector<numeric_index_type> rows,cols;
     316          24 :           allrows = p_ctx_f->dof_vec;
     317          24 :           allcols = p_ctx_c->dof_vec;
     318             : 
     319             :           // For internal libmesh submat extraction need to merge all
     320             :           // field dofs and then sort the vectors so that they match
     321             :           // the Projection Matrix ordering
     322          24 :           const int n_subfields = p_ctx_f->subfields.size();
     323          24 :           if ( n_subfields >= 1 )
     324             :             {
     325          72 :               for (int i : p_ctx_f->subfields)
     326             :                 {
     327          64 :                   rows.insert(rows.end(), allrows[i].begin(), allrows[i].end());
     328          64 :                   cols.insert(cols.end(), allcols[i].begin(), allcols[i].end());
     329             :                 }
     330          24 :               std::sort(rows.begin(),rows.end());
     331          24 :               std::sort(cols.begin(),cols.end());
     332             :             }
     333             : 
     334             :           // Now that we have merged the fine and coarse index sets
     335             :           // were ready to make the submatrix and pass it off to PETSc
     336          24 :           p_ctx_c->K_interp_ptr->create_submatrix (*p_ctx_c->K_sub_interp_ptr, rows, cols);
     337             : 
     338             :           // return to PETSc the created submatrix
     339          24 :           *(mat) = p_ctx_c->K_sub_interp_ptr->mat();
     340             : 
     341           8 :         } // endif less incoming DM fields than global DM fields
     342             :       else
     343             :         {
     344             :           // We are not doing fieldsplit, so return global projection
     345           0 :           *(mat) = p_ctx_c->K_interp_ptr->mat();
     346             :         }
     347             : 
     348             :       // Vec scaling isnt needed so were done.
     349          24 :       *(vec) = LIBMESH_PETSC_NULLPTR;
     350             : 
     351          24 :       PetscFunctionReturn(LIBMESH_PETSC_SUCCESS);
     352             :     } // end libmesh_petsc_DMCreateInterpolation
     353             : 
     354             :     //! Function to give PETSc that sets the Restriction Matrix between two DMs
     355             :     PetscErrorCode
     356           0 :     libmesh_petsc_DMCreateRestriction (DM dmc /*coarse*/, DM dmf/*fine*/, Mat * mat)
     357             :     {
     358             :       PetscFunctionBegin;
     359             : 
     360           0 :       libmesh_assert(dmc);
     361           0 :       libmesh_assert(dmf);
     362           0 :       libmesh_assert(mat);
     363             : 
     364             :       // get a communicator from incoming DM
     365             :       MPI_Comm comm;
     366           0 :       LibmeshPetscCallQ(PetscObjectGetComm((PetscObject)dmc, &comm));
     367             : 
     368             :       // extract our fine context from the incoming DM
     369           0 :       void * ctx_f = nullptr;
     370           0 :       LibmeshPetscCallQ(DMShellGetContext(dmf, &ctx_f));
     371           0 :       libmesh_assert(ctx_f);
     372           0 :       PetscDMContext * p_ctx_f = static_cast<PetscDMContext*>(ctx_f);
     373             : 
     374             :       // check / give PETSc its matrix
     375           0 :       libmesh_assert(p_ctx_f->K_restrict_ptr);
     376           0 :       *(mat) = p_ctx_f->K_restrict_ptr->mat();
     377             : 
     378           0 :       PetscFunctionReturn(LIBMESH_PETSC_SUCCESS);
     379             :     }
     380             : 
     381             :   } // end extern C functions
     382             : 
     383             : 
     384        5346 :   PetscDMWrapper::~PetscDMWrapper() = default;
     385             : 
     386           0 :   void PetscDMWrapper::clear()
     387             :   {
     388             :     // Calls custom deleters
     389           0 :     _dms.clear();
     390           0 :     _sections.clear();
     391             : 
     392             :     // We don't manage the lifetime of the PetscSF objects
     393           0 :     _star_forests.clear();
     394             : 
     395             :     // The relevant C++ destructors are called for these objects
     396             :     // automatically.
     397           0 :     _pmtx_vec.clear();
     398           0 :     _subpmtx_vec.clear();
     399           0 :     _vec_vec.clear();
     400             : 
     401             :     // These members are trivially clear()able.
     402           0 :     _ctx_vec.clear();
     403           0 :     _mesh_dof_sizes.clear();
     404           0 :     _mesh_dof_loc_sizes.clear();
     405           0 :   }
     406             : 
     407         280 :   unsigned int PetscDMWrapper::init_petscdm(System & system)
     408             :   {
     409          16 :     LOG_SCOPE ("init_and_attach_petscdm()", "PetscDMWrapper");
     410             : 
     411          16 :     MeshBase & mesh = system.get_mesh();   // Convenience
     412         288 :     MeshRefinement mesh_refinement(mesh); // Used for swapping between grids
     413             : 
     414             :     // There's no need for these code paths while traversing the hierarchy
     415           8 :     mesh.allow_renumbering(false);
     416           8 :     mesh.allow_remote_element_removal(false);
     417         280 :     mesh.partitioner() = nullptr;
     418             : 
     419             :     // First walk over the active local elements and see how many maximum MG levels we can construct
     420             : /*
     421             :     unsigned int n_levels = 0;
     422             :     for ( auto & elem : mesh.active_local_element_ptr_range() )
     423             :       {
     424             :         if ( elem->level() > n_levels )
     425             :           n_levels = elem->level();
     426             :       }
     427             :     // On coarse grids some processors may have no active local elements,
     428             :     // these processors shouldn't make projections
     429             :     if (n_levels >= 1)
     430             :       n_levels += 1;
     431             : */
     432             : 
     433         280 :     unsigned int n_levels = MeshTools::n_levels(mesh);
     434             : 
     435             :     // How many MG levels did the user request?
     436           8 :     unsigned int usr_requested_mg_lvls = 0;
     437         544 :     usr_requested_mg_lvls = command_line_next("-pc_mg_levels", usr_requested_mg_lvls);
     438             : 
     439             :     // Only construct however many levels were requested if something was actually requested
     440          16 :     if ( usr_requested_mg_lvls != 0 )
     441             :       {
     442             :         // Dont request more than avail num levels on mesh, require at least 2 levels
     443           4 :         libmesh_assert_less_equal( usr_requested_mg_lvls, n_levels );
     444           4 :         libmesh_assert( usr_requested_mg_lvls > 1 );
     445             : 
     446           4 :         n_levels = usr_requested_mg_lvls;
     447             :       }
     448             :     else
     449             :       {
     450             :         // if -pc_mg_levels is not specified we just construct fieldsplit related
     451             :         // structures on the finest mesh.
     452           4 :         n_levels = 1;
     453             :       }
     454             : 
     455             : 
     456             :     // Init data structures: data[0] ~ coarse grid, data[n_levels-1] ~ fine grid
     457         280 :     this->init_dm_data(n_levels, system.comm());
     458             : 
     459             :     // Step 1.  contract : all active elements have no children
     460         280 :     mesh.contract();
     461             : 
     462             :     // Start on finest grid. Construct DM datas and stash some info for
     463             :     // later projection_matrix and vec sizing
     464         840 :     for(unsigned int level = n_levels; level >= 1; level--)
     465             :       {
     466             :         // Save the n_fine_dofs before coarsening for later projection matrix sizing
     467         576 :         _mesh_dof_sizes[level-1] = system.get_dof_map().n_dofs();
     468         576 :         _mesh_dof_loc_sizes[level-1] = system.get_dof_map().n_local_dofs();
     469             : 
     470             :         // Get refs to things we will fill
     471          16 :         DM & dm = this->get_dm(level-1);
     472          16 :         PetscSection & section = this->get_section(level-1);
     473          16 :         PetscSF & star_forest = this->get_star_forest(level-1);
     474             : 
     475             :         // The shell will contain other DM info
     476         560 :         LibmeshPetscCall2(system.comm(), DMShellCreate(system.comm().get(), &dm));
     477             : 
     478             :         // Set the DM embedding dimension to help PetscDS (Discrete System)
     479         560 :         LibmeshPetscCall2(system.comm(), DMSetCoordinateDim(dm, mesh.mesh_dimension()));
     480             : 
     481             :         // Build the PetscSection and attach it to the DM
     482         560 :         this->build_section(system, section);
     483             : #if PETSC_VERSION_LESS_THAN(3,10,0)
     484             :         LibmeshPetscCall2(system.comm(), DMSetDefaultSection(dm, section));
     485             : #elif PETSC_VERSION_LESS_THAN(3,23,0)
     486         560 :         LibmeshPetscCall2(system.comm(), DMSetSection(dm, section));
     487             : #else
     488             :         LibmeshPetscCall2(system.comm(), DMSetLocalSection(dm, section));
     489             : #endif
     490             : 
     491             :         // We only need to build the star forest if we're in a parallel environment
     492         560 :         if (system.n_processors() > 1)
     493             :           {
     494             :             // Build the PetscSF and attach it to the DM
     495         552 :             this->build_sf(system, star_forest);
     496             : #if PETSC_VERSION_LESS_THAN(3,12,0)
     497          32 :             LibmeshPetscCall2(system.comm(), DMSetDefaultSF(dm, star_forest));
     498             : #else
     499         520 :             LibmeshPetscCall2(system.comm(), DMSetSectionSF(dm, star_forest));
     500             : #endif
     501             :           }
     502             : 
     503             :         // Set PETSC's Restriction, Interpolation, Coarsen and Refine functions for the current DM
     504         560 :         LibmeshPetscCall2(system.comm(), DMShellSetCreateInterpolation ( dm, libmesh_petsc_DMCreateInterpolation ));
     505             : 
     506             :         // Not implemented. For now we rely on galerkin style restrictions
     507          16 :         bool supply_restriction = false;
     508          16 :         if (supply_restriction)
     509           0 :           LibmeshPetscCall2(system.comm(), DMShellSetCreateRestriction ( dm, libmesh_petsc_DMCreateRestriction  ));
     510             : 
     511         560 :         LibmeshPetscCall2(system.comm(), DMShellSetCoarsen ( dm, libmesh_petsc_DMCoarsen ));
     512             : 
     513         560 :         LibmeshPetscCall2(system.comm(), DMShellSetRefine ( dm, libmesh_petsc_DMRefine ));
     514             : 
     515         560 :         LibmeshPetscCall2(system.comm(), DMShellSetCreateSubDM(dm, libmesh_petsc_DMCreateSubDM));
     516             : 
     517             :         // Uniformly coarsen if not the coarsest grid and distribute dof info.
     518         560 :         if ( level != 1 )
     519             :           {
     520         280 :             LOG_CALL("PDM_coarsen", "PetscDMWrapper", mesh_refinement.uniformly_coarsen(1));
     521         280 :             LOG_CALL("PDM_dist_dof", "PetscDMWrapper", system.get_dof_map().distribute_dofs(mesh));
     522         280 :             LOG_CALL("PDM_prep_send", "PetscDMWrapper", system.get_dof_map().prepare_send_list());
     523             :           }
     524             :       } // End PETSc data structure creation
     525             : 
     526             :     // Now fill the corresponding internal PetscDMContext for each created DM
     527         840 :     for( unsigned int i=1; i <= n_levels; i++ )
     528             :       {
     529             :         // Set context dimension
     530         560 :         _ctx_vec[i-1].mesh_dim = mesh.mesh_dimension();
     531             : 
     532             :         // Create and attach a sized vector to the current ctx
     533         592 :         _vec_vec[i-1]->init( _mesh_dof_sizes[i-1] );
     534         576 :         _ctx_vec[i-1].current_vec = _vec_vec[i-1].get();
     535             : 
     536             :         // Set a global DM to be used as reference when using fieldsplit
     537         560 :         _ctx_vec[i-1].global_dm = &(this->get_dm(n_levels-1));
     538             : 
     539         560 :         if (n_levels > 1 )
     540             :           {
     541             :             // Set pointers to surrounding dm levels to help PETSc refine/coarsen
     542         420 :             if ( i == 1 ) // were at the coarsest mesh
     543             :               {
     544         140 :                 _ctx_vec[i-1].coarser_dm = nullptr;
     545         140 :                 _ctx_vec[i-1].finer_dm   = _dms[1].get();
     546             :               }
     547         280 :             else if( i == n_levels ) // were at the finest mesh
     548             :               {
     549         144 :                 _ctx_vec[i-1].coarser_dm = _dms[_dms.size() - 2].get();
     550         140 :                 _ctx_vec[i-1].finer_dm   = nullptr;
     551             :               }
     552             :             else // were in the middle of the hierarchy
     553             :               {
     554         140 :                 _ctx_vec[i-1].coarser_dm = _dms[i-2].get();
     555         144 :                 _ctx_vec[i-1].finer_dm   = _dms[i].get();
     556             :               }
     557             :           }
     558             :       } // End context creation
     559             : 
     560             :     // Attach a vector and context to each DM
     561          16 :     if ( n_levels >= 1 )
     562             :       {
     563         840 :         for ( unsigned int i = 1; i <= n_levels ; ++i)
     564             :           {
     565         560 :             DM & dm = this->get_dm(i-1);
     566             : 
     567         576 :             LibmeshPetscCall2(system.comm(), DMShellSetGlobalVector( dm, _ctx_vec[i-1].current_vec->vec() ));
     568             : 
     569         576 :             LibmeshPetscCall2(system.comm(), DMShellSetContext( dm, &_ctx_vec[i-1] ));
     570             :           }
     571             :       }
     572             : 
     573             :     // DM structures created, now we need projection matrixes if GMG is requested.
     574             :     // To prepare for projection creation go to second coarsest mesh so we can utilize
     575             :     // old_dof_indices information in the projection creation.
     576         280 :     if (n_levels > 1 )
     577             :       {
     578             :         // First, stash the coarse dof indices for FS purposes
     579           4 :         unsigned int n_vars  = system.n_vars();
     580         140 :         _ctx_vec[0].dof_vec.resize(n_vars);
     581             : 
     582         560 :         for( unsigned int v = 0; v < n_vars; v++ )
     583             :           {
     584          24 :             std::vector<numeric_index_type> di;
     585         420 :             system.get_dof_map().local_variable_indices(di, system.get_mesh(), v);
     586         432 :             _ctx_vec[0].dof_vec[v] = di;
     587             :           }
     588             : 
     589         140 :         LOG_CALL ("PDM_refine", "PetscDMWrapper", mesh_refinement.uniformly_refine(1));
     590         140 :         LOG_CALL ("PDM_dist_dof", "PetscDMWrapper", system.get_dof_map().distribute_dofs(mesh));
     591         140 :         LOG_CALL ("PDM_cnstrnts", "PetscDMWrapper", system.reinit_constraints());
     592         140 :         LOG_CALL ("PDM_prep_send", "PetscDMWrapper", system.get_dof_map().prepare_send_list());
     593             :       }
     594             : 
     595             :     // Create the Interpolation Matrices between adjacent mesh levels
     596         560 :     for ( unsigned int i = 1 ; i < n_levels ; ++i )
     597             :       {
     598           8 :         if ( i != n_levels )
     599             :           {
     600             :             // Stash the rest of the dof indices
     601           8 :             unsigned int n_vars  = system.n_vars();
     602         288 :             _ctx_vec[i].dof_vec.resize(n_vars);
     603             : 
     604        1120 :             for( unsigned int v = 0; v < n_vars; v++ )
     605             :               {
     606          48 :                 std::vector<numeric_index_type> di;
     607         840 :                 system.get_dof_map().local_variable_indices(di, system.get_mesh(), v);
     608         888 :                 _ctx_vec[i].dof_vec[v] = di;
     609             :               }
     610             : 
     611         288 :             unsigned int ndofs_c = _mesh_dof_sizes[i-1];
     612         280 :             unsigned int ndofs_f = _mesh_dof_sizes[i];
     613             : 
     614             :             // Create the Interpolation matrix and set its pointer
     615         288 :             _ctx_vec[i-1].K_interp_ptr = _pmtx_vec[i-1].get();
     616         288 :             _ctx_vec[i-1].K_sub_interp_ptr = _subpmtx_vec[i-1].get();
     617             : 
     618           8 :             unsigned int ndofs_local     = system.get_dof_map().n_local_dofs();
     619         272 :             unsigned int ndofs_old_first = system.get_dof_map().first_old_dof();
     620         272 :             unsigned int ndofs_old_end   = system.get_dof_map().end_old_dof();
     621         280 :             unsigned int ndofs_old_size  = ndofs_old_end - ndofs_old_first;
     622             : 
     623             :             // Init and zero the matrix
     624         280 :             _ctx_vec[i-1].K_interp_ptr->init(ndofs_f, ndofs_c, ndofs_local, ndofs_old_size, 30 , 20);
     625             : 
     626             :             // Disable Mat destruction since PETSc destroys these for us
     627         288 :             _ctx_vec[i-1].K_interp_ptr->set_destroy_mat_on_exit(false);
     628         288 :             _ctx_vec[i-1].K_sub_interp_ptr->set_destroy_mat_on_exit(false);
     629             : 
     630             :             // TODO: Projection matrix sparsity pattern?
     631             :             //MatSetOption(_ctx_vec[i-1].K_interp_ptr->mat(), MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
     632             : 
     633             :             // Compute the interpolation matrix and set K_interp_ptr
     634         288 :             LOG_CALL ("PDM_proj_mat", "PetscDMWrapper", system.projection_matrix(*_ctx_vec[i-1].K_interp_ptr));
     635             : 
     636             :             // Always close matrix that contains altered data
     637         288 :             _ctx_vec[i-1].K_interp_ptr->close();
     638             :           }
     639             : 
     640             :         // Move to next grid to make next projection
     641         280 :         if ( i != n_levels - 1 )
     642             :           {
     643         140 :             LOG_CALL ("PDM_refine", "PetscDMWrapper", mesh_refinement.uniformly_refine(1));
     644         140 :             LOG_CALL ("PDM_dist_dof", "PetscDMWrapper", system.get_dof_map().distribute_dofs(mesh));
     645         140 :             LOG_CALL ("PDM_cnstrnts", "PetscDMWrapper", system.reinit_constraints());
     646         140 :             LOG_CALL ("PDM_prep_send", "PetscDMWrapper", system.get_dof_map().prepare_send_list());
     647             :           }
     648             :       } // End create transfer operators. System back at the finest grid
     649             : 
     650         288 :     return n_levels;
     651         264 :   }
     652             : 
     653         280 :   void PetscDMWrapper::init_and_attach_petscdm(System & system, SNES snes)
     654             :   {
     655         280 :     const auto n_levels = this->init_petscdm(system);
     656             : 
     657             :     // Lastly, give SNES the finest level DM
     658         280 :     DM & dm = this->get_dm(n_levels-1);
     659         280 :     LibmeshPetscCall2(system.comm(), SNESSetDM(snes, dm));
     660         280 :   }
     661             : 
     662           0 :   void PetscDMWrapper::init_and_attach_petscdm(System & system, KSP ksp)
     663             :   {
     664           0 :     const auto n_levels = this->init_petscdm(system);
     665             : 
     666             :     // Lastly, give KSP the finest level DM
     667           0 :     DM & dm = this->get_dm(n_levels-1);
     668           0 :     LibmeshPetscCall2(system.comm(), KSPSetDM(ksp, dm));
     669           0 :   }
     670             : 
     671         560 :   void PetscDMWrapper::build_section( const System & system, PetscSection & section )
     672             :   {
     673          32 :     LOG_SCOPE ("build_section()", "PetscDMWrapper");
     674             : 
     675         560 :     LibmeshPetscCall2(system.comm(), PetscSectionCreate(system.comm().get(),&section));
     676             : 
     677             :     // Tell the PetscSection about all of our System variables
     678         560 :     LibmeshPetscCall2(system.comm(), PetscSectionSetNumFields(section,system.n_vars()));
     679             : 
     680             :     // Set the actual names of all the field variables
     681        2240 :     for (auto v : make_range(system.n_vars()))
     682        1680 :       LibmeshPetscCall2(system.comm(), PetscSectionSetFieldName( section, v, system.variable_name(v).c_str() ));
     683             : 
     684             :     // For building the section, we need to create local-to-global map
     685             :     // of local "point" ids to the libMesh global id of that point.
     686             :     // A "point" in PETSc nomenclature is a geometric object that can have
     687             :     // dofs associated with it, e.g. Node, Edge, Face, Elem.
     688             :     // The numbering PETSc expects is continuous for the local numbering.
     689             :     // Since we're only using this interface for solvers, then we can just
     690             :     // assign whatever local id to any of the global ids. But it is local
     691             :     // so we don't need to worry about processor numbering for the local
     692             :     // point ids.
     693          32 :     std::unordered_map<dof_id_type,dof_id_type> node_map;
     694          32 :     std::unordered_map<dof_id_type,dof_id_type> elem_map;
     695          32 :     std::map<dof_id_type,unsigned int> scalar_map;
     696             : 
     697             :     // First we tell the PetscSection about all of our points that have
     698             :     // dofs associated with them.
     699         560 :     this->set_point_range_in_section(system, section, node_map, elem_map, scalar_map);
     700             : 
     701             :     // Now we can build up the dofs per "point" in the PetscSection
     702         560 :     this->add_dofs_to_section(system, section, node_map, elem_map, scalar_map);
     703             : 
     704             :     // Final setup of PetscSection
     705             :     // Until Matt Knepley finishes implementing the commented out function
     706             :     // below, the PetscSection will be assuming node-major ordering
     707             :     // so let's throw an error if the user tries to use this without
     708             :     // node-major order
     709        1104 :     libmesh_error_msg_if(!libMesh::on_command_line("--node-major-dofs"),
     710             :                          "ERROR: Must use --node-major-dofs with PetscSection!");
     711             : 
     712             :     //else if (!system.identify_variable_groups())
     713             :     //  ierr = PetscSectionSetUseFieldOffsets(section,PETSC_TRUE);LIBMESH_CHKERR(ierr);
     714             :     //else
     715             :     //  {
     716             :     //    std::string msg = "ERROR: Only node-major or var-major ordering supported for PetscSection!\n";
     717             :     //    msg += "       var-group-major ordering not supported!\n";
     718             :     //    msg += "       Must use --node-major-dofs or set System::identify_variable_groups() = false!\n";
     719             :     //    libmesh_error_msg(msg);
     720             :     //  }
     721             : 
     722         560 :     LibmeshPetscCall2(system.comm(), PetscSectionSetUp(section));
     723             : 
     724             :     // Sanity checking at least that local_n_dofs match
     725          16 :     libmesh_assert_equal_to(system.n_local_dofs(),this->check_section_n_dofs(section));
     726         560 :   }
     727             : 
     728         552 :   void PetscDMWrapper::build_sf( const System & system, PetscSF & star_forest )
     729             :   {
     730          32 :     LOG_SCOPE ("build_sf()", "PetscDMWrapper");
     731             : 
     732          16 :     const DofMap & dof_map = system.get_dof_map();
     733             : 
     734          16 :     const std::vector<dof_id_type> & send_list = dof_map.get_send_list();
     735             : 
     736             :     // Number of ghost dofs that send information to this processor
     737          32 :     const PetscInt n_leaves = cast_int<PetscInt>(send_list.size());
     738             : 
     739             :     // Number of local dofs, including ghosts dofs
     740         552 :     const PetscInt n_roots = dof_map.n_local_dofs() + n_leaves;
     741             : 
     742             :     // This is the vector of dof indices coming from other processors
     743             :     // We need to give this to the PetscSF
     744             :     // We'll be extra paranoid about this ugly double cast
     745             :     static_assert(sizeof(PetscInt) == sizeof(dof_id_type),"PetscInt is not a dof_id_type!");
     746          16 :     PetscInt * local_dofs = reinterpret_cast<PetscInt *>(const_cast<dof_id_type *>(send_list.data()));
     747             : 
     748             : #ifdef DEBUG
     749             :     // PETSc 3.18 and above don't want duplicate entries here ... and
     750             :     // frankly we shouldn't have duplicates in the first place!
     751             :     {
     752          32 :       std::set<dof_id_type> send_set(send_list.begin(), send_list.end());
     753          16 :       libmesh_assert_equal_to(send_list.size(), send_set.size());
     754             :     }
     755             : #endif
     756             : 
     757             :     // This is the vector of PetscSFNode's for the local_dofs.
     758             :     // For each entry in local_dof, we have to supply the rank from which
     759             :     // that dof stems and its local index on that rank.
     760             :     // PETSc documentation here:
     761             :     // http://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PetscSF/PetscSFNode.html
     762         568 :     std::vector<PetscSFNode> sf_nodes(send_list.size());
     763             : 
     764       90643 :     for (auto i : index_range(send_list))
     765             :       {
     766       90091 :         dof_id_type incoming_dof = send_list[i];
     767             : 
     768       90091 :         const processor_id_type rank = dof_map.dof_owner(incoming_dof);
     769             : 
     770             :         // Dofs are sorted and continuous on the processor so local index
     771             :         // is counted up from the first dof on the processor.
     772       90091 :         PetscInt index = incoming_dof - dof_map.first_dof(rank);
     773             : 
     774       90091 :         sf_nodes[i].rank  = rank; /* Rank of owner */
     775       90091 :         sf_nodes[i].index = index;/* Index of dof on rank */
     776             :       }
     777             : 
     778          32 :     PetscSFNode * remote_dofs = sf_nodes.data();
     779             : 
     780         552 :     LibmeshPetscCall2(system.comm(), PetscSFCreate(system.comm().get(), &star_forest));
     781             : 
     782             :     // TODO: We should create pointers to arrays so we don't have to copy
     783             :     //       and then can use PETSC_OWN_POINTER where PETSc will take ownership
     784             :     //       and delete the memory for us. But then we'd have to use PetscMalloc.
     785         552 :     LibmeshPetscCall2(system.comm(), PetscSFSetGraph(star_forest,
     786             :                                                      n_roots,
     787             :                                                      n_leaves,
     788             :                                                      local_dofs,
     789             :                                                      PETSC_COPY_VALUES,
     790             :                                                      remote_dofs,
     791             :                                                      PETSC_COPY_VALUES));
     792         552 :   }
     793             : 
     794         560 :   void PetscDMWrapper::set_point_range_in_section (const System & system,
     795             :                                                    PetscSection & section,
     796             :                                                    std::unordered_map<dof_id_type,dof_id_type> & node_map,
     797             :                                                    std::unordered_map<dof_id_type,dof_id_type> & elem_map,
     798             :                                                    std::map<dof_id_type,unsigned int> & scalar_map)
     799             :   {
     800             :     // We're expecting this to be empty coming in
     801          16 :     libmesh_assert(node_map.empty());
     802             : 
     803             :     // We need to count up the number of active "points" on this processor.
     804             :     // Nominally, a "point" in PETSc parlance is a geometric object that can
     805             :     // hold DoFs, i.e node, edge, face, elem. Since we handle the mesh and are only
     806             :     // interested in solvers, then the only thing PETSc needs is a unique *local* number
     807             :     // for each "point" that has active DoFs; note however this local numbering
     808             :     // we construct must be continuous.
     809             :     //
     810             :     // In libMesh, for most finite elements, we just associate those DoFs with the
     811             :     // geometric nodes. So can we loop over the nodes on this processor and check
     812             :     // if any of the fields are have active DoFs on that node.
     813             :     // If so, then we tell PETSc about that "point". At this stage, we just need
     814             :     // to count up how many active "points" we have and cache the local number to global id
     815             :     // mapping.
     816             : 
     817             : 
     818             :     // These will be our local counters. pstart should always be zero.
     819             :     // pend will track our local "point" count.
     820             :     // If we're on a processor who coarsened the mesh to have no local elements,
     821             :     // we should make an empty PetscSection. An empty PetscSection is specified
     822             :     // by passing [0,0) to the PetscSectionSetChart call at the end. So, if we
     823             :     // have nothing on this processor, these are the correct values to pass to
     824             :     // PETSc.
     825          16 :     dof_id_type pstart = 0;
     826         560 :     dof_id_type pend = 0;
     827             : 
     828          32 :     const MeshBase & mesh = system.get_mesh();
     829             : 
     830          16 :     const DofMap & dof_map = system.get_dof_map();
     831             : 
     832             :     // If we don't have any local dofs, then there's nothing to tell to the PetscSection
     833         560 :     if (dof_map.n_local_dofs() > 0)
     834             :       {
     835             :         // Conservative estimate of space needed so we don't thrash
     836          32 :         node_map.reserve(mesh.n_local_nodes());
     837          32 :         elem_map.reserve(mesh.n_active_local_elem());
     838             : 
     839             :         // We loop over active elements and then cache the global/local node mapping to make sure
     840             :         // we only count active nodes. For example, if we're calling this function and we're
     841             :         // not the finest level in the Mesh tree, we don't want to include nodes of child elements
     842             :         // that aren't active on this level.
     843       34934 :         for (const auto & elem : mesh.active_local_element_ptr_range())
     844             :           {
     845      187812 :             for (const Node & node : elem->node_ref_range())
     846             :               {
     847             :                 // get the global id number of local node n
     848             : 
     849             :                 // Only register nodes with the PetscSection if they have dofs that belong to
     850             :                 // this processor. Even though we're active local elements, the dofs associated
     851             :                 // with the node may belong to a different processor. The processor who owns
     852             :                 // those dofs will register that node with the PetscSection on that processor.
     853       30456 :                 std::vector<dof_id_type> node_dof_indices;
     854      167508 :                 dof_map.dof_indices( &node, node_dof_indices );
     855      167508 :                 if( !node_dof_indices.empty() && dof_map.local_index(node_dof_indices[0]) )
     856             :                   {
     857             : #ifndef NDEBUG
     858             :                     // We're assuming that if the first dof for this node belongs to this processor,
     859             :                     // then all of them do.
     860       51280 :                     for( auto dof : node_dof_indices )
     861       36372 :                       libmesh_assert(dof_map.local_index(dof));
     862             : #endif
     863             :                     // Cache the global/local mapping if we haven't already
     864             :                     // Then increment our local count
     865      156436 :                     dof_id_type node_id = node.id();
     866       29816 :                     if( node_map.count(node_id) == 0 )
     867             :                       {
     868        7184 :                         node_map.emplace(node_id, pend);
     869       79024 :                         pend++;
     870             :                       }
     871             :                   }
     872             :               }
     873             : 
     874             :             // Some finite elements, e.g. Hierarchic, associate element interior DoFs with the element
     875             :             // rather than the node (since we ought to be able to use Hierachic elements on a QUAD4,
     876             :             // which has no interior node). Thus, we also need to check element interiors for DoFs
     877             :             // as well and, if the finite element has them, we also need to count the Elem in our
     878             :             // "point" accounting.
     879       18612 :             if( elem->n_dofs(system.number()) > 0 )
     880             :               {
     881           0 :                 dof_id_type elem_id = elem->id();
     882           0 :                 elem_map.emplace(elem_id, pend);
     883           0 :                 pend++;
     884             :               }
     885         523 :           }
     886             : 
     887             :         // SCALAR dofs live on the "last" processor, so only work there if there are any
     888         555 :         if( dof_map.n_SCALAR_dofs() > 0 && (system.processor_id() == (system.n_processors()-1)) )
     889             :           {
     890             :             // Loop through all the variables and cache the scalar ones. We cache the
     891             :             // SCALAR variable index along with the local point to make it easier when
     892             :             // we have to register dofs with the PetscSection
     893           0 :             for (auto v : make_range(system.n_vars()))
     894             :               {
     895           0 :                 if( system.variable(v).type().family == SCALAR )
     896             :                   {
     897           0 :                     scalar_map.emplace(pend, v);
     898           0 :                     pend++;
     899             :                   }
     900             :               }
     901             :           }
     902             : 
     903             :       }
     904             : 
     905         560 :     LibmeshPetscCall2(system.comm(), PetscSectionSetChart(section, pstart, pend));
     906         560 :   }
     907             : 
     908         560 :   void PetscDMWrapper::add_dofs_to_section (const System & system,
     909             :                                             PetscSection & section,
     910             :                                             const std::unordered_map<dof_id_type,dof_id_type> & node_map,
     911             :                                             const std::unordered_map<dof_id_type,dof_id_type> & elem_map,
     912             :                                             const std::map<dof_id_type,unsigned int> & scalar_map)
     913             :   {
     914          32 :     const MeshBase & mesh = system.get_mesh();
     915             : 
     916             :     // Now we go through and add dof information for each "point".
     917             :     //
     918             :     // In libMesh, for most finite elements, we just associate those DoFs with the
     919             :     // geometric nodes. So can we loop over the nodes we cached in the node_map and
     920             :     // the DoFs for each field for that node. We need to give PETSc the local id
     921             :     // we built up in the node map.
     922       79584 :     for (const auto [global_node_id, local_node_id] : node_map )
     923             :       {
     924        7184 :         libmesh_assert( mesh.query_node_ptr(global_node_id) );
     925             : 
     926       79024 :         const Node & node = mesh.node_ref(global_node_id);
     927             : 
     928       79024 :         this->add_dofs_helper(system,node,local_node_id,section);
     929             :       }
     930             : 
     931             :     // Some finite element types associate dofs with the element. So now we go through
     932             :     // any of those with the Elem as the point we add to the PetscSection with accompanying
     933             :     // dofs
     934         560 :     for (const auto [global_elem_id, local_elem_id] : elem_map )
     935             :       {
     936           0 :         libmesh_assert( mesh.query_elem_ptr(global_elem_id) );
     937             : 
     938           0 :         const Elem & elem = mesh.elem_ref(global_elem_id);
     939             : 
     940           0 :         this->add_dofs_helper(system,elem,local_elem_id,section);
     941             :       }
     942             : 
     943             :     // Now add any SCALAR dofs to the PetscSection
     944             :     // SCALAR dofs live on the "last" processor, so only work there if there are any
     945         576 :     if (system.processor_id() == (system.n_processors()-1))
     946             :       {
     947          88 :         for (const auto [local_id, scalar_var] : scalar_map )
     948             :           {
     949             :             // The number of SCALAR dofs comes from the variable order
     950           0 :             const int n_dofs = system.variable(scalar_var).type().order.get_order();
     951             : 
     952           0 :             LibmeshPetscCall2(system.comm(), PetscSectionSetFieldDof( section, local_id, scalar_var, n_dofs ));
     953             : 
     954             :             // In the SCALAR case, there are no other variables associate with the "point"
     955             :             // the total number of dofs on the point is the same as that for the field
     956           0 :             LibmeshPetscCall2(system.comm(), PetscSectionSetDof( section, local_id, n_dofs ));
     957             :           }
     958             :       }
     959             : 
     960         560 :   }
     961             : 
     962       79024 :   void PetscDMWrapper::add_dofs_helper (const System & system,
     963             :                                         const DofObject & dof_object,
     964             :                                         dof_id_type local_id,
     965             :                                         PetscSection & section)
     966             :   {
     967        7184 :     unsigned int total_n_dofs_at_dofobject = 0;
     968             : 
     969             :     // We are assuming variables are also numbered 0 to n_vars()-1
     970      316096 :     for (auto v : make_range(system.n_vars()))
     971             :       {
     972      237072 :         unsigned int n_dofs_at_dofobject = dof_object.n_dofs(system.number(), v);
     973             : 
     974      237072 :         if( n_dofs_at_dofobject > 0 )
     975             :           {
     976      178992 :             LibmeshPetscCall2(system.comm(), PetscSectionSetFieldDof( section,
     977             :                                                                       local_id,
     978             :                                                                       v,
     979             :                                                                       n_dofs_at_dofobject ));
     980             : 
     981      178992 :             total_n_dofs_at_dofobject += n_dofs_at_dofobject;
     982             :           }
     983             :       }
     984             : 
     985        7184 :     libmesh_assert_equal_to(total_n_dofs_at_dofobject, dof_object.n_dofs(system.number()));
     986             : 
     987       79024 :     LibmeshPetscCall2(system.comm(), PetscSectionSetDof( section, local_id, total_n_dofs_at_dofobject ));
     988       79024 :   }
     989             : 
     990             : 
     991          16 :   dof_id_type PetscDMWrapper::check_section_n_dofs( PetscSection & section )
     992             :   {
     993          16 :     PetscInt n_local_dofs = 0;
     994             : 
     995             :     // Grap the starting and ending points from the section
     996             :     PetscInt pstart, pend;
     997          16 :     PetscErrorCode ierr = PetscSectionGetChart(section, &pstart, &pend);
     998          16 :     if (ierr)
     999           0 :       libmesh_error();
    1000             : 
    1001             :     // Count up the n_dofs for each point from the section
    1002        7200 :     for( PetscInt p = pstart; p < pend; p++ )
    1003             :       {
    1004             :         PetscInt n_dofs;
    1005        7184 :         ierr = PetscSectionGetDof(section,p,&n_dofs);
    1006        7184 :         if (ierr)
    1007           0 :           libmesh_error();
    1008        7184 :         n_local_dofs += n_dofs;
    1009             :       }
    1010             : 
    1011             :     static_assert(sizeof(PetscInt) == sizeof(dof_id_type),"PetscInt is not a dof_id_type!");
    1012          16 :     return n_local_dofs;
    1013             :   }
    1014             : 
    1015         280 :   void PetscDMWrapper::init_dm_data(unsigned int n_levels, const Parallel::Communicator & comm)
    1016             :   {
    1017         280 :     _dms.resize(n_levels);
    1018         280 :     _sections.resize(n_levels);
    1019         280 :     _star_forests.resize(n_levels);
    1020         280 :     _ctx_vec.resize(n_levels);
    1021         280 :     _pmtx_vec.resize(n_levels);
    1022         280 :     _subpmtx_vec.resize(n_levels);
    1023         280 :     _vec_vec.resize(n_levels);
    1024         280 :     _mesh_dof_sizes.resize(n_levels);
    1025         280 :     _mesh_dof_loc_sizes.resize(n_levels);
    1026             : 
    1027         840 :     for( unsigned int i = 0; i < n_levels; i++ )
    1028             :       {
    1029             :         // Call C++ object constructors
    1030         560 :         _pmtx_vec[i] = std::make_unique<PetscMatrix<Number>>(comm);
    1031         560 :         _subpmtx_vec[i] = std::make_unique<PetscMatrix<Number>>(comm);
    1032        1104 :         _vec_vec[i] = std::make_unique<PetscVector<Number>>(comm);
    1033             :       }
    1034         280 :   }
    1035             : 
    1036             : } // end namespace libMesh
    1037             : 
    1038             : #endif // #if LIBMESH_ENABLE_AMR && LIBMESH_HAVE_METAPHYSICL
    1039             : #endif // PETSC_VERSION
    1040             : #endif // LIBMESH_HAVE_PETSC

Generated by: LCOV version 1.14