Line data Source code
1 : //* This file is part of the MOOSE framework
2 : //* https://mooseframework.inl.gov
3 : //*
4 : //* All rights reserved, see COPYRIGHT for full restrictions
5 : //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 : //*
7 : //* Licensed under LGPL 2.1, please see LICENSE for details
8 : //* https://www.gnu.org/licenses/lgpl-2.1.html
9 :
10 : #include "PetscExternalPartitioner.h"
11 :
12 : #include "GeneratedMesh.h"
13 : #include "MooseApp.h"
14 :
15 : #include "libmesh/mesh_tools.h"
16 : #include "libmesh/linear_partitioner.h"
17 : #include "libmesh/elem.h"
18 : #include "libmesh/mesh_base.h"
19 : #include "libmesh/petsc_solver_exception.h"
20 :
21 : using namespace libMesh;
22 :
23 : registerMooseObject("MooseApp", PetscExternalPartitioner);
24 :
25 : #include <memory>
26 :
27 : InputParameters
28 74423 : PetscExternalPartitioner::validParams()
29 : {
30 74423 : InputParameters params = MoosePartitioner::validParams();
31 :
32 74423 : MooseEnum partPackage("parmetis ptscotch chaco party hierarch", "parmetis", false);
33 :
34 74423 : params.addParam<MooseEnum>("part_package",
35 : partPackage,
36 : "The external package is used for partitioning the mesh via PETSc");
37 :
38 223269 : params.addParam<processor_id_type>(
39 : "num_cores_per_compute_node",
40 148846 : 1,
41 : "Number of cores per compute node for hierarchical partitioning");
42 :
43 223269 : params.addParam<bool>("apply_element_weight",
44 148846 : false,
45 : "Indicate if we are going to apply element weights to partitioners");
46 :
47 223269 : params.addParam<bool>(
48 148846 : "apply_side_weight", false, "Indicate if we are going to apply side weights to partitioners");
49 :
50 74423 : params.addClassDescription(
51 : "Partition mesh using external packages via PETSc MatPartitioning interface");
52 :
53 148846 : return params;
54 74423 : }
55 :
56 1082 : PetscExternalPartitioner::PetscExternalPartitioner(const InputParameters & params)
57 : : MoosePartitioner(params),
58 1082 : _part_package(params.get<MooseEnum>("part_package")),
59 1082 : _apply_element_weight(params.get<bool>("apply_element_weight")),
60 1082 : _apply_side_weight(params.get<bool>("apply_side_weight")),
61 2164 : _num_parts_per_compute_node(params.get<processor_id_type>("num_cores_per_compute_node"))
62 : {
63 2698 : if ((_apply_element_weight || _apply_side_weight) &&
64 1616 : (_part_package == "chaco" || _part_package == "party"))
65 0 : mooseError(_part_package, " does not support weighted graph");
66 1082 : }
67 :
68 : std::unique_ptr<Partitioner>
69 176 : PetscExternalPartitioner::clone() const
70 : {
71 176 : return _app.getFactory().clone(*this);
72 : }
73 :
74 : void
75 70 : PetscExternalPartitioner::preLinearPartition(MeshBase & mesh)
76 : {
77 : // Temporarily cache the old partition method
78 70 : auto old_partitioner = std::move(mesh.partitioner());
79 : // Create a linear partitioner
80 70 : mesh.partitioner() = std::make_unique<LinearPartitioner>();
81 : // Partition mesh
82 70 : mesh.partition(n_processors());
83 : // Restore the old partition
84 70 : mesh.partitioner() = std::move(old_partitioner);
85 70 : }
86 :
87 : void
88 550 : PetscExternalPartitioner::partition(MeshBase & mesh, const unsigned int n_parts)
89 : {
90 : // We want to use a parallel partitioner that requires a distributed graph
91 : // Simply calling a linear partitioner provides us the distributed graph
92 : // We should not do anything when using a distributed mesh since the mesh itself
93 : // is already distributed
94 : // When n_parts=1, we do not need to run any partitioner, instead, let libmesh
95 : // handle this
96 550 : if (mesh.is_replicated() && n_parts > 1)
97 70 : preLinearPartition(mesh);
98 :
99 582 : if (!isParamSetByUser("part_package") && (mesh.n_elem() / n_parts < 28) &&
100 32 : _part_package == "parmetis")
101 : {
102 : Moose::out
103 32 : << "Average number of elements per partition (" << mesh.n_elem() / n_parts
104 32 : << ") is less than 28. We are switching from ParMETIS to PTScotch for the partitioning."
105 32 : << std::endl;
106 32 : _part_package = "ptscotch";
107 : }
108 :
109 550 : Partitioner::partition(mesh, n_parts);
110 550 : }
111 :
112 : void
113 522 : PetscExternalPartitioner::_do_partition(MeshBase & mesh, const unsigned int n_parts)
114 : {
115 522 : initialize(mesh);
116 :
117 : dof_id_type num_edges, num_local_elems, local_elem_id, nj, side;
118 522 : std::vector<dof_id_type> side_weights;
119 522 : std::vector<dof_id_type> elem_weights;
120 :
121 : // Call libmesh to build the dual graph of mesh
122 522 : build_graph(mesh);
123 522 : num_local_elems = _dual_graph.size();
124 :
125 522 : elem_weights.clear();
126 522 : if (_apply_element_weight)
127 256 : elem_weights.resize(num_local_elems);
128 :
129 522 : num_edges = 0;
130 : // compute element weight
131 51722 : for (dof_id_type k = 0; k < num_local_elems; k++)
132 : {
133 51200 : num_edges += _dual_graph[k].size();
134 51200 : if (_apply_element_weight)
135 : {
136 : // Get the original element
137 : mooseAssert(k < static_cast<dof_id_type>(_local_id_to_elem.size()),
138 : "Local element id " << k << " is not smaller than " << _local_id_to_elem.size());
139 21800 : auto elem = _local_id_to_elem[k];
140 :
141 21800 : elem_weights[k] = computeElementWeight(*elem);
142 : }
143 : }
144 :
145 522 : side_weights.clear();
146 : // Edge weights represent the communication
147 522 : if (_apply_side_weight)
148 224 : side_weights.resize(num_edges);
149 :
150 522 : local_elem_id = 0;
151 522 : nj = 0;
152 51722 : for (auto & row : _dual_graph)
153 : {
154 : mooseAssert(local_elem_id < static_cast<dof_id_type>(_local_id_to_elem.size()),
155 : "Local element id " << local_elem_id << " is not smaller than "
156 : << _local_id_to_elem.size());
157 51200 : auto elem = _local_id_to_elem[local_elem_id];
158 51200 : unsigned int n_neighbors = 0;
159 :
160 51200 : side = 0;
161 256000 : for (auto neighbor : elem->neighbor_ptr_range())
162 : {
163 : // Skip boundary sides since they do not connect to
164 : // anything.
165 204800 : if (neighbor != nullptr && neighbor->active())
166 : {
167 194400 : if (_apply_side_weight)
168 79800 : side_weights[nj] = computeSideWeight(*elem, side);
169 :
170 194400 : nj++;
171 194400 : n_neighbors++;
172 : }
173 :
174 204800 : side++;
175 : }
176 51200 : if (n_neighbors != row.size())
177 0 : mooseError(
178 : "Cannot construct dual graph correctly since the number of neighbors is inconsistent");
179 :
180 51200 : local_elem_id++;
181 : }
182 :
183 522 : std::vector<dof_id_type> partition;
184 : // Partition graph
185 522 : partitionGraph(comm(),
186 522 : _dual_graph,
187 : elem_weights,
188 : side_weights,
189 : n_parts,
190 522 : _num_parts_per_compute_node,
191 522 : _part_package,
192 : partition);
193 : // Assign partition to mesh
194 522 : assign_partitioning(mesh, partition);
195 522 : }
196 :
197 : void
198 785 : PetscExternalPartitioner::partitionGraph(const Parallel::Communicator & comm,
199 : const std::vector<std::vector<dof_id_type>> & graph,
200 : const std::vector<dof_id_type> & elem_weights,
201 : const std::vector<dof_id_type> & side_weights,
202 : const dof_id_type num_parts,
203 : const dof_id_type num_parts_per_compute_node,
204 : const std::string & part_package,
205 : std::vector<dof_id_type> & partition)
206 : {
207 : Mat dual;
208 785 : PetscInt num_local_elems, num_elems, *xadj = nullptr, *adjncy = nullptr, i, *values = nullptr,
209 785 : *petsc_elem_weights = nullptr;
210 : const PetscInt * parts;
211 : MatPartitioning part;
212 : IS is;
213 :
214 : // Number of local elements
215 785 : num_elems = num_local_elems = graph.size();
216 : // Figure out the total of elements
217 785 : comm.sum(num_elems);
218 :
219 785 : LibmeshPetscCallA(comm.get(), PetscCalloc1(num_local_elems + 1, &xadj));
220 :
221 785 : num_local_elems = 0;
222 785 : xadj[0] = 0;
223 475701 : for (auto & row : graph)
224 : {
225 474916 : num_local_elems++;
226 474916 : xadj[num_local_elems] = xadj[num_local_elems - 1] + row.size();
227 : }
228 :
229 785 : LibmeshPetscCallA(comm.get(), PetscCalloc1(xadj[num_local_elems], &adjncy));
230 :
231 : // Fill up adjacency
232 785 : i = 0;
233 475701 : for (auto & row : graph)
234 2816444 : for (auto elem : row)
235 2341528 : adjncy[i++] = elem;
236 :
237 : // If there are no neighbors at all, no side weights should be proivded
238 : if (!i)
239 : {
240 : mooseAssert(!side_weights.size(),
241 : "No side weights should be provided since there are no neighbors at all");
242 : }
243 :
244 : // Copy over weights
245 785 : if (side_weights.size())
246 : {
247 : mooseAssert((PetscInt)side_weights.size() == i,
248 : "Side weight size " << side_weights.size()
249 : << " does not match with adjacency matrix size " << i);
250 224 : LibmeshPetscCallA(comm.get(), PetscCalloc1(side_weights.size(), &values));
251 224 : i = 0;
252 80024 : for (auto weight : side_weights)
253 79800 : values[i++] = weight;
254 : }
255 :
256 785 : LibmeshPetscCallA(
257 : comm.get(),
258 : MatCreateMPIAdj(comm.get(), num_local_elems, num_elems, xadj, adjncy, values, &dual));
259 :
260 785 : LibmeshPetscCallA(comm.get(), MatPartitioningCreate(comm.get(), &part));
261 : #if !PETSC_VERSION_LESS_THAN(3, 12, 3)
262 785 : LibmeshPetscCallA(comm.get(), MatPartitioningSetUseEdgeWeights(part, PETSC_TRUE));
263 : #endif
264 785 : LibmeshPetscCallA(comm.get(), MatPartitioningSetAdjacency(part, dual));
265 :
266 : if (!num_local_elems)
267 : {
268 : mooseAssert(!elem_weights.size(),
269 : "No element weights should be provided since there are no elements at all");
270 : }
271 :
272 : // Handle element weights
273 785 : if (elem_weights.size())
274 : {
275 : mooseAssert((PetscInt)elem_weights.size() == num_local_elems,
276 : "Element weight size " << elem_weights.size()
277 : << " does not match with the number of local elements"
278 : << num_local_elems);
279 :
280 256 : LibmeshPetscCallA(comm.get(), PetscCalloc1(elem_weights.size(), &petsc_elem_weights));
281 256 : i = 0;
282 22056 : for (auto weight : elem_weights)
283 21800 : petsc_elem_weights[i++] = weight;
284 :
285 256 : LibmeshPetscCallA(comm.get(), MatPartitioningSetVertexWeights(part, petsc_elem_weights));
286 : }
287 :
288 785 : LibmeshPetscCallA(comm.get(), MatPartitioningSetNParts(part, num_parts));
289 : #if PETSC_VERSION_LESS_THAN(3, 9, 2)
290 : mooseAssert(part_package != "party", "PETSc-3.9.3 or higher is required for using party");
291 : #endif
292 : #if PETSC_VERSION_LESS_THAN(3, 9, 0)
293 : mooseAssert(part_package != "chaco", "PETSc-3.9.0 or higher is required for using chaco");
294 : #endif
295 785 : LibmeshPetscCallA(comm.get(), MatPartitioningSetType(part, part_package.c_str()));
296 785 : if (part_package == "hierarch")
297 32 : LibmeshPetscCallA(comm.get(),
298 : MatPartitioningHierarchicalSetNfineparts(part, num_parts_per_compute_node));
299 :
300 785 : LibmeshPetscCallA(comm.get(), MatPartitioningSetFromOptions(part));
301 785 : LibmeshPetscCallA(comm.get(), MatPartitioningApply(part, &is));
302 :
303 785 : LibmeshPetscCallA(comm.get(), ISGetIndices(is, &parts));
304 :
305 785 : partition.resize(num_local_elems);
306 475701 : for (i = 0; i < num_local_elems; i++)
307 474916 : partition[i] = parts[i];
308 :
309 785 : LibmeshPetscCallA(comm.get(), ISRestoreIndices(is, &parts));
310 785 : LibmeshPetscCallA(comm.get(), MatPartitioningDestroy(&part));
311 785 : LibmeshPetscCallA(comm.get(), MatDestroy(&dual));
312 785 : LibmeshPetscCallA(comm.get(), ISDestroy(&is));
313 785 : }
314 :
315 : dof_id_type
316 0 : PetscExternalPartitioner::computeElementWeight(Elem & /*elem*/)
317 : {
318 0 : return 1;
319 : }
320 :
321 : dof_id_type
322 0 : PetscExternalPartitioner::computeSideWeight(Elem & /*elem*/, unsigned int /*side*/)
323 : {
324 0 : return 1;
325 : }
|