15 #include "libmesh/mesh_tools.h" 16 #include "libmesh/linear_partitioner.h" 17 #include "libmesh/elem.h" 18 #include "libmesh/mesh_base.h" 19 #include "libmesh/petsc_solver_exception.h" 32 MooseEnum partPackage(
"parmetis ptscotch chaco party hierarch",
"parmetis",
false);
36 "The external package is used for partitioning the mesh via PETSc");
39 "num_cores_per_compute_node",
41 "Number of cores per compute node for hierarchical partitioning");
43 params.
addParam<
bool>(
"apply_element_weight",
45 "Indicate if we are going to apply element weights to partitioners");
48 "apply_side_weight",
false,
"Indicate if we are going to apply side weights to partitioners");
51 "Partition mesh using external packages via PETSc MatPartitioning interface");
59 _apply_element_weight(params.
get<bool>(
"apply_element_weight")),
60 _apply_side_weight(params.
get<bool>(
"apply_side_weight")),
68 std::unique_ptr<Partitioner>
103 <<
"Average number of elements per partition (" <<
mesh.
n_elem() / n_parts
104 <<
") is less than 28. We are switching from ParMETIS to PTScotch for the partitioning." 109 Partitioner::partition(
mesh, n_parts);
117 dof_id_type num_edges, num_local_elems, local_elem_id, nj, side;
118 std::vector<dof_id_type> side_weights;
119 std::vector<dof_id_type> elem_weights;
125 elem_weights.clear();
127 elem_weights.resize(num_local_elems);
138 "Local element id " << k <<
" is not smaller than " <<
_local_id_to_elem.size());
145 side_weights.clear();
148 side_weights.resize(num_edges);
155 "Local element id " << local_elem_id <<
" is not smaller than " 158 unsigned int n_neighbors = 0;
161 for (
auto neighbor : elem->neighbor_ptr_range())
165 if (neighbor !=
nullptr && neighbor->active())
176 if (n_neighbors != row.size())
178 "Cannot construct dual graph correctly since the number of neighbors is inconsistent");
199 const std::vector<std::vector<dof_id_type>> & graph,
200 const std::vector<dof_id_type> & elem_weights,
201 const std::vector<dof_id_type> & side_weights,
204 const std::string & part_package,
205 std::vector<dof_id_type> & partition)
208 PetscInt num_local_elems, num_elems, *xadj =
nullptr, *adjncy =
nullptr, i, *values =
nullptr,
209 *petsc_elem_weights =
nullptr;
210 const PetscInt * parts;
211 MatPartitioning part;
215 num_elems = num_local_elems = graph.size();
219 LibmeshPetscCallA(
comm.
get(), PetscCalloc1(num_local_elems + 1, &xadj));
223 for (
auto & row : graph)
226 xadj[num_local_elems] = xadj[num_local_elems - 1] + row.size();
229 LibmeshPetscCallA(
comm.
get(), PetscCalloc1(xadj[num_local_elems], &adjncy));
233 for (
auto & row : graph)
234 for (
auto elem : row)
240 mooseAssert(!side_weights.size(),
241 "No side weights should be provided since there are no neighbors at all");
245 if (side_weights.size())
247 mooseAssert((PetscInt)side_weights.size() == i,
248 "Side weight size " << side_weights.size()
249 <<
" does not match with adjacency matrix size " << i);
250 LibmeshPetscCallA(
comm.
get(), PetscCalloc1(side_weights.size(), &values));
252 for (
auto weight : side_weights)
258 MatCreateMPIAdj(
comm.
get(), num_local_elems, num_elems, xadj, adjncy, values, &dual));
260 LibmeshPetscCallA(
comm.
get(), MatPartitioningCreate(
comm.
get(), &part));
261 #if !PETSC_VERSION_LESS_THAN(3, 12, 3) 262 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetUseEdgeWeights(part, PETSC_TRUE));
264 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetAdjacency(part, dual));
266 if (!num_local_elems)
268 mooseAssert(!elem_weights.size(),
269 "No element weights should be provided since there are no elements at all");
273 if (elem_weights.size())
275 mooseAssert((PetscInt)elem_weights.size() == num_local_elems,
276 "Element weight size " << elem_weights.size()
277 <<
" does not match with the number of local elements" 280 LibmeshPetscCallA(
comm.
get(), PetscCalloc1(elem_weights.size(), &petsc_elem_weights));
282 for (
auto weight : elem_weights)
283 petsc_elem_weights[i++] =
weight;
285 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetVertexWeights(part, petsc_elem_weights));
288 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetNParts(part, num_parts));
289 #if PETSC_VERSION_LESS_THAN(3, 9, 2) 290 mooseAssert(part_package !=
"party",
"PETSc-3.9.3 or higher is required for using party");
292 #if PETSC_VERSION_LESS_THAN(3, 9, 0) 293 mooseAssert(part_package !=
"chaco",
"PETSc-3.9.0 or higher is required for using chaco");
295 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetType(part, part_package.c_str()));
296 if (part_package ==
"hierarch")
298 MatPartitioningHierarchicalSetNfineparts(part, num_parts_per_compute_node));
300 LibmeshPetscCallA(
comm.
get(), MatPartitioningSetFromOptions(part));
301 LibmeshPetscCallA(
comm.
get(), MatPartitioningApply(part, &
is));
303 LibmeshPetscCallA(
comm.
get(), ISGetIndices(
is, &parts));
306 for (i = 0; i < num_local_elems; i++)
309 LibmeshPetscCallA(
comm.
get(), ISRestoreIndices(
is, &parts));
310 LibmeshPetscCallA(
comm.
get(), MatPartitioningDestroy(&part));
311 LibmeshPetscCallA(
comm.
get(), MatDestroy(&dual));
312 LibmeshPetscCallA(
comm.
get(), ISDestroy(&
is));
static InputParameters validParams()
T * get(const std::unique_ptr< T > &u)
The MooseUtils::get() specializations are used to support making forwards-compatible code changes fro...
static InputParameters validParams()
virtual std::unique_ptr< Partitioner > & partitioner()
static void partitionGraph(const Parallel::Communicator &comm, const std::vector< std::vector< dof_id_type >> &graph, const std::vector< dof_id_type > &elem_weights, const std::vector< dof_id_type > &side_weights, const dof_id_type num_parts, const dof_id_type num_parts_per_compute_node, const std::string &part_package, std::vector< dof_id_type > &partition)
const Parallel::Communicator & comm() const
void assign_partitioning(MeshBase &mesh, const std::vector< dof_id_type > &parts)
The following methods are specializations for using the libMesh::Parallel::packed_range_* routines fo...
virtual void build_graph(const MeshBase &mesh)
std::vector< Elem *> _local_id_to_elem
Factory & getFactory()
Retrieve a writable reference to the Factory associated with this App.
virtual void partition(MeshBase &mesh, const unsigned int n) override
virtual void partition(const unsigned int n_parts)
processor_id_type _num_parts_per_compute_node
uint8_t processor_id_type
processor_id_type n_processors() const
virtual dof_id_type computeElementWeight(Elem &elm)
virtual dof_id_type computeSideWeight(Elem &elem, unsigned int side)
registerMooseObject("MooseApp", PetscExternalPartitioner)
This is a "smart" enum class intended to replace many of the shortcomings in the C++ enum type It sho...
PetscErrorCode PetscInt const PetscInt IS * is
MooseApp & _app
The MOOSE application this is associated with.
void preLinearPartition(MeshBase &mesh)
bool _apply_element_weight
bool isParamSetByUser(const std::string &nm) const
Test if the supplied parameter is set by a user, as opposed to not set or set to default.
Base class for MOOSE partitioner.
virtual bool is_replicated() const
void mooseError(Args &&... args) const
Emits an error prefixed with object name and type.
PetscExternalPartitioner(const InputParameters ¶ms)
virtual void initialize(MeshBase &)
Called immediately before partitioning.
Partitions a mesh using external petsc partitioners such as parmetis, ptscotch, chaco, party, etc.
virtual dof_id_type n_elem() const=0
std::string _part_package
std::vector< std::vector< dof_id_type > > _dual_graph
virtual void _do_partition(MeshBase &mesh, const unsigned int n) override
virtual std::unique_ptr< Partitioner > clone() const override