Line data Source code
1 : // The libMesh Finite Element Library.
2 : // Copyright (C) 2002-2025 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 :
4 : // This library is free software; you can redistribute it and/or
5 : // modify it under the terms of the GNU Lesser General Public
6 : // License as published by the Free Software Foundation; either
7 : // version 2.1 of the License, or (at your option) any later version.
8 :
9 : // This library is distributed in the hope that it will be useful,
10 : // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 : // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 : // Lesser General Public License for more details.
13 :
14 : // You should have received a copy of the GNU Lesser General Public
15 : // License along with this library; if not, write to the Free Software
16 : // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 :
18 :
19 :
20 : #include "libmesh/libmesh_config.h"
21 :
22 : #ifdef LIBMESH_HAVE_PETSC
23 :
24 : // Local includes
25 : #include "libmesh/petsc_matrix.h"
26 :
27 : // libMesh includes
28 : #include "libmesh/dof_map.h"
29 : #include "libmesh/dense_matrix.h"
30 : #include "libmesh/libmesh_logging.h"
31 : #include "libmesh/petsc_vector.h"
32 : #include "libmesh/parallel.h"
33 : #include "libmesh/utility.h"
34 : #include "libmesh/wrapped_petsc.h"
35 :
36 : // C++ includes
37 : #ifdef LIBMESH_HAVE_UNISTD_H
38 : #include <unistd.h> // mkstemp
39 : #endif
40 : #include <fstream>
41 :
42 : #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
43 :
44 : namespace
45 : {
46 : using namespace libMesh;
47 :
48 : // historic libMesh n_nz & n_oz arrays are set up for PETSc's AIJ format.
49 : // however, when the blocksize is >1, we need to transform these into
50 : // their BAIJ counterparts.
51 : inline
52 : void transform_preallocation_arrays (const PetscInt blocksize,
53 : const std::vector<numeric_index_type> & n_nz,
54 : const std::vector<numeric_index_type> & n_oz,
55 : std::vector<numeric_index_type> & b_n_nz,
56 : std::vector<numeric_index_type> & b_n_oz)
57 : {
58 : libmesh_assert_equal_to (n_nz.size(), n_oz.size());
59 : libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
60 :
61 : b_n_nz.clear(); /**/ b_n_nz.reserve(n_nz.size()/blocksize);
62 : b_n_oz.clear(); /**/ b_n_oz.reserve(n_oz.size()/blocksize);
63 :
64 : for (std::size_t nn=0, nnzs=n_nz.size(); nn<nnzs; nn += blocksize)
65 : {
66 : b_n_nz.push_back (n_nz[nn]/blocksize);
67 : b_n_oz.push_back (n_oz[nn]/blocksize);
68 : }
69 : }
70 : }
71 :
72 : #endif
73 :
74 :
75 :
76 : namespace libMesh
77 : {
78 :
79 :
80 : //-----------------------------------------------------------------------
81 : // PetscMatrix members
82 :
83 :
84 : // Constructor
85 : template <typename T>
86 27771 : PetscMatrix<T>::PetscMatrix(const Parallel::Communicator & comm_in) :
87 27771 : PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
88 : {
89 27771 : }
90 :
91 :
92 :
93 : // Constructor taking an existing Mat but not the responsibility
94 : // for destroying it
95 : template <typename T>
96 2108 : PetscMatrix<T>::PetscMatrix(Mat mat_in,
97 : const Parallel::Communicator & comm_in,
98 : const bool destroy_on_exit) :
99 2108 : PetscMatrixBase<T>(mat_in, comm_in, destroy_on_exit)
100 : {
101 : MatType mat_type;
102 2108 : LibmeshPetscCall(MatGetType(mat_in, &mat_type));
103 : PetscBool is_hypre;
104 2108 : LibmeshPetscCall(PetscStrcmp(mat_type, MATHYPRE, &is_hypre));
105 2108 : if (is_hypre == PETSC_TRUE)
106 0 : _mat_type = HYPRE;
107 : else
108 2108 : _mat_type = AIJ;
109 2108 : }
110 :
111 :
112 : // Constructor taking global and local dimensions and, optionally,
113 : // number of on- and off-diagonal non-zeros and a block size
114 : template <typename T>
115 0 : PetscMatrix<T>::PetscMatrix(const Parallel::Communicator & comm_in,
116 : const numeric_index_type m_in,
117 : const numeric_index_type n_in,
118 : const numeric_index_type m_l,
119 : const numeric_index_type n_l,
120 : const numeric_index_type n_nz,
121 : const numeric_index_type n_oz,
122 : const numeric_index_type blocksize_in) :
123 0 : PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
124 : {
125 0 : this->init(m_in, n_in, m_l, n_l, n_nz, n_oz, blocksize_in);
126 0 : }
127 :
128 :
129 : // Destructor
130 : template <typename T>
131 28231 : PetscMatrix<T>::~PetscMatrix() = default;
132 :
133 : template <typename T>
134 : void
135 45772 : PetscMatrix<T>::init_without_preallocation (const numeric_index_type m_in,
136 : const numeric_index_type n_in,
137 : const numeric_index_type m_l,
138 : const numeric_index_type n_l,
139 : const numeric_index_type blocksize_in)
140 : {
141 : // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
142 1446 : libmesh_ignore(blocksize_in);
143 :
144 : // Clear initialized matrices
145 45772 : if (this->initialized())
146 280 : this->clear();
147 :
148 45772 : PetscInt m_global = static_cast<PetscInt>(m_in);
149 45772 : PetscInt n_global = static_cast<PetscInt>(n_in);
150 45772 : PetscInt m_local = static_cast<PetscInt>(m_l);
151 45772 : PetscInt n_local = static_cast<PetscInt>(n_l);
152 :
153 45772 : LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
154 45772 : LibmeshPetscCall(MatSetSizes(this->_mat, m_local, n_local, m_global, n_global));
155 45772 : PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
156 45772 : LibmeshPetscCall(MatSetBlockSize(this->_mat,blocksize));
157 45772 : LibmeshPetscCall(MatSetOptionsPrefix(this->_mat, ""));
158 :
159 : #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
160 : if (blocksize > 1)
161 : {
162 : // specified blocksize, bs>1.
163 : // double check sizes.
164 : libmesh_assert_equal_to (m_local % blocksize, 0);
165 : libmesh_assert_equal_to (n_local % blocksize, 0);
166 : libmesh_assert_equal_to (m_global % blocksize, 0);
167 : libmesh_assert_equal_to (n_global % blocksize, 0);
168 : libmesh_assert_equal_to (n_nz % blocksize, 0);
169 : libmesh_assert_equal_to (n_oz % blocksize, 0);
170 :
171 : LibmeshPetscCall(MatSetType(this->_mat, MATBAIJ)); // Automatically chooses seqbaij or mpibaij
172 :
173 : // MatSetFromOptions needs to happen before Preallocation routines
174 : // since MatSetFromOptions can change matrix type and remove incompatible
175 : // preallocation
176 : LibmeshPetscCall(MatSetFromOptions(this->_mat));
177 : }
178 : else
179 : #endif
180 : {
181 45772 : switch (this->_mat_type) {
182 45772 : case AIJ:
183 45772 : LibmeshPetscCall(MatSetType(this->_mat, MATAIJ)); // Automatically chooses seqaij or mpiaij
184 :
185 : // MatSetFromOptions needs to happen before Preallocation routines
186 : // since MatSetFromOptions can change matrix type and remove incompatible
187 : // preallocation
188 45772 : LibmeshPetscCall(MatSetFromOptions(this->_mat));
189 1446 : break;
190 :
191 0 : case HYPRE:
192 : #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
193 0 : LibmeshPetscCall(MatSetType(this->_mat, MATHYPRE));
194 :
195 : // MatSetFromOptions needs to happen before Preallocation routines
196 : // since MatSetFromOptions can change matrix type and remove incompatible
197 : // preallocation
198 0 : LibmeshPetscCall(MatSetFromOptions(this->_mat));
199 : #else
200 : libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
201 : #endif
202 0 : break;
203 :
204 0 : default: libmesh_error_msg("Unsupported petsc matrix type");
205 : }
206 : }
207 :
208 45772 : this->set_context ();
209 45772 : }
210 :
211 :
212 :
213 : template <typename T>
214 3158 : void PetscMatrix<T>::init (const numeric_index_type m_in,
215 : const numeric_index_type n_in,
216 : const numeric_index_type m_l,
217 : const numeric_index_type n_l,
218 : const numeric_index_type nnz,
219 : const numeric_index_type noz,
220 : const numeric_index_type blocksize_in)
221 : {
222 3158 : this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
223 :
224 3158 : PetscInt n_nz = static_cast<PetscInt>(nnz);
225 3158 : PetscInt n_oz = static_cast<PetscInt>(noz);
226 :
227 : #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
228 : if (blocksize > 1)
229 : {
230 : LibmeshPetscCall(MatSeqBAIJSetPreallocation(this->_mat, blocksize, n_nz/blocksize, NULL));
231 : LibmeshPetscCall(MatMPIBAIJSetPreallocation(this->_mat, blocksize,
232 : n_nz/blocksize, NULL,
233 : n_oz/blocksize, NULL));
234 : }
235 : else
236 : #endif
237 : {
238 3158 : switch (this->_mat_type) {
239 3158 : case AIJ:
240 3158 : LibmeshPetscCall(MatSeqAIJSetPreallocation(this->_mat, n_nz, NULL));
241 3158 : LibmeshPetscCall(MatMPIAIJSetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
242 88 : break;
243 :
244 0 : case HYPRE:
245 : #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
246 0 : LibmeshPetscCall(MatHYPRESetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
247 : #else
248 : libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
249 : #endif
250 0 : break;
251 :
252 0 : default: libmesh_error_msg("Unsupported petsc matrix type");
253 : }
254 : }
255 :
256 3158 : this->finish_initialization();
257 3158 : }
258 :
259 : template <typename T>
260 42482 : void PetscMatrix<T>::preallocate(const numeric_index_type libmesh_dbg_var(m_l),
261 : const std::vector<numeric_index_type> & n_nz,
262 : const std::vector<numeric_index_type> & n_oz,
263 : const numeric_index_type blocksize_in)
264 : {
265 : // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
266 1358 : libmesh_assert_equal_to (n_nz.size(), m_l);
267 1358 : libmesh_assert_equal_to (n_oz.size(), m_l);
268 : // Avoid unused warnings when not configured with block storage
269 1358 : libmesh_ignore(blocksize_in);
270 :
271 : #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
272 : PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
273 :
274 : if (blocksize > 1)
275 : {
276 : // transform the per-entry n_nz and n_oz arrays into their block counterparts.
277 : std::vector<numeric_index_type> b_n_nz, b_n_oz;
278 :
279 : transform_preallocation_arrays (blocksize,
280 : n_nz, n_oz,
281 : b_n_nz, b_n_oz);
282 :
283 : LibmeshPetscCall(MatSeqBAIJSetPreallocation (this->_mat,
284 : blocksize,
285 : 0,
286 : numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data())));
287 :
288 : LibmeshPetscCall(MatMPIBAIJSetPreallocation (this->_mat,
289 : blocksize,
290 : 0,
291 : numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()),
292 : 0,
293 : numeric_petsc_cast(b_n_oz.empty() ? nullptr : b_n_oz.data())));
294 : }
295 : else
296 : #endif
297 : {
298 42482 : switch (this->_mat_type) {
299 1358 : case AIJ:
300 76976 : LibmeshPetscCall(MatSeqAIJSetPreallocation (this->_mat,
301 : 0,
302 : numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data())));
303 111470 : LibmeshPetscCall(MatMPIAIJSetPreallocation (this->_mat,
304 : 0,
305 : numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
306 : 0,
307 : numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
308 1358 : break;
309 :
310 0 : case HYPRE:
311 : #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
312 0 : LibmeshPetscCall(MatHYPRESetPreallocation (this->_mat,
313 : 0,
314 : numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
315 : 0,
316 : numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
317 : #else
318 : libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
319 : #endif
320 0 : break;
321 :
322 0 : default: libmesh_error_msg("Unsupported petsc matrix type");
323 : }
324 :
325 : }
326 42482 : }
327 :
328 : template <typename T>
329 45772 : void PetscMatrix<T>::finish_initialization()
330 : {
331 : // Make it an error for PETSc to allocate new nonzero entries during assembly. For old PETSc
332 : // versions this option must be set after preallocation for MPIAIJ matrices
333 45772 : LibmeshPetscCall(MatSetOption(this->_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
334 45772 : this->_is_initialized = true;
335 45772 : }
336 :
337 : template <typename T>
338 490 : void PetscMatrix<T>::init (const numeric_index_type m_in,
339 : const numeric_index_type n_in,
340 : const numeric_index_type m_l,
341 : const numeric_index_type n_l,
342 : const std::vector<numeric_index_type> & n_nz,
343 : const std::vector<numeric_index_type> & n_oz,
344 : const numeric_index_type blocksize_in)
345 : {
346 490 : this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
347 490 : this->preallocate(m_l, n_nz, n_oz, blocksize_in);
348 :
349 490 : this->finish_initialization();
350 490 : }
351 :
352 :
353 : template <typename T>
354 42124 : void PetscMatrix<T>::init (const ParallelType libmesh_dbg_var(type))
355 : {
356 1344 : libmesh_assert(this->_dof_map);
357 :
358 42124 : const numeric_index_type m_in = this->_dof_map->n_dofs();
359 1344 : const numeric_index_type m_l = this->_dof_map->n_local_dofs();
360 1344 : if (m_in != m_l)
361 1312 : libmesh_assert(type != SERIAL);
362 :
363 42124 : const auto blocksize = this->_dof_map->block_size();
364 :
365 42124 : this->init_without_preallocation(m_in, m_in, m_l, m_l, blocksize);
366 42124 : if (!this->_use_hash_table)
367 : {
368 41992 : const std::vector<numeric_index_type> & n_nz = this->_sp->get_n_nz();
369 1344 : const std::vector<numeric_index_type> & n_oz = this->_sp->get_n_oz();
370 :
371 41992 : this->preallocate(m_l, n_nz, n_oz, blocksize);
372 : }
373 :
374 42124 : this->finish_initialization();
375 42124 : }
376 :
377 :
378 : template <typename T>
379 0 : void PetscMatrix<T>::update_preallocation_and_zero ()
380 : {
381 0 : libmesh_not_implemented();
382 : }
383 :
384 : template <typename T>
385 0 : void PetscMatrix<T>::reset_preallocation()
386 : {
387 0 : semiparallel_only();
388 :
389 : #if !PETSC_VERSION_LESS_THAN(3,9,0)
390 0 : libmesh_assert (this->initialized());
391 :
392 0 : LibmeshPetscCall(MatResetPreallocation(this->_mat));
393 : #else
394 : libmesh_warning("Your version of PETSc doesn't support resetting of "
395 : "preallocation, so we will use your most recent sparsity "
396 : "pattern. This may result in a degradation of performance\n");
397 : #endif
398 0 : }
399 :
400 : template <typename T>
401 455082 : void PetscMatrix<T>::zero ()
402 : {
403 12170 : libmesh_assert (this->initialized());
404 :
405 12170 : semiparallel_only();
406 :
407 : PetscInt m_l, n_l;
408 :
409 455082 : LibmeshPetscCall(MatGetLocalSize(this->_mat,&m_l,&n_l));
410 :
411 455082 : if (n_l)
412 422742 : LibmeshPetscCall(MatZeroEntries(this->_mat));
413 455082 : }
414 :
415 : template <typename T>
416 0 : void PetscMatrix<T>::zero_rows (std::vector<numeric_index_type> & rows, T diag_value)
417 : {
418 0 : libmesh_assert (this->initialized());
419 :
420 0 : semiparallel_only();
421 :
422 : // As of petsc-dev at the time of 3.1.0, MatZeroRows now takes two additional
423 : // optional arguments. The optional arguments (x,b) can be used to specify the
424 : // solutions for the zeroed rows (x) and right hand side (b) to update.
425 : // Could be useful for setting boundary conditions...
426 0 : if (!rows.empty())
427 0 : LibmeshPetscCall(MatZeroRows(this->_mat, cast_int<PetscInt>(rows.size()),
428 : numeric_petsc_cast(rows.data()), PS(diag_value),
429 : NULL, NULL));
430 : else
431 0 : LibmeshPetscCall(MatZeroRows(this->_mat, 0, NULL, PS(diag_value), NULL, NULL));
432 0 : }
433 :
434 : template <typename T>
435 70 : std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::zero_clone () const
436 : {
437 70 : libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
438 :
439 : // Copy the nonzero pattern only
440 : Mat copy;
441 70 : LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, ©));
442 :
443 : // Call wrapping PetscMatrix constructor, have it take over
444 : // ownership.
445 72 : auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
446 70 : ret->set_destroy_mat_on_exit(true);
447 :
448 72 : return ret;
449 : }
450 :
451 :
452 :
453 : template <typename T>
454 210 : std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::clone () const
455 : {
456 210 : libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
457 :
458 : // Copy the nonzero pattern and numerical values
459 : Mat copy;
460 210 : LibmeshPetscCall(MatDuplicate(this->_mat, MAT_COPY_VALUES, ©));
461 :
462 : // Call wrapping PetscMatrix constructor, have it take over
463 : // ownership.
464 216 : auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
465 210 : ret->set_destroy_mat_on_exit(true);
466 :
467 216 : return ret;
468 : }
469 :
470 :
471 :
472 : template <typename T>
473 : template <NormType N>
474 2524 : Real PetscMatrix<T>::norm () const
475 : {
476 74 : libmesh_assert (this->initialized());
477 :
478 74 : semiparallel_only();
479 :
480 : PetscReal petsc_value;
481 : Real value;
482 :
483 74 : libmesh_assert (this->closed());
484 :
485 2524 : LibmeshPetscCall(MatNorm(this->_mat, N, &petsc_value));
486 :
487 2524 : value = static_cast<Real>(petsc_value);
488 :
489 2524 : return value;
490 : }
491 : template <typename T>
492 1334 : Real PetscMatrix<T>::l1_norm () const
493 : {
494 1334 : return PetscMatrix<T>::norm<NORM_1>();
495 : }
496 : template <typename T>
497 0 : Real PetscMatrix<T>::frobenius_norm () const
498 : {
499 0 : return PetscMatrix<T>::norm<NORM_FROBENIUS>();
500 : }
501 : template <typename T>
502 1264 : Real PetscMatrix<T>::linfty_norm () const
503 : {
504 1264 : return PetscMatrix<T>::norm<NORM_INFINITY>();
505 : }
506 :
507 :
508 :
509 : template <typename T>
510 140 : void PetscMatrix<T>::print_matlab (const std::string & name) const
511 : {
512 4 : libmesh_assert (this->initialized());
513 :
514 4 : semiparallel_only();
515 :
516 140 : if (!this->closed())
517 : {
518 : libmesh_deprecated();
519 : libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
520 : "Please update your code, as this warning will become an error in a future release.");
521 0 : const_cast<PetscMatrix<T> *>(this)->close();
522 : }
523 :
524 : // Create an ASCII file containing the matrix
525 : // if a filename was provided.
526 140 : if (name != "")
527 : {
528 8 : WrappedPetsc<PetscViewer> petsc_viewer;
529 :
530 140 : LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
531 : name.c_str(),
532 : petsc_viewer.get()));
533 :
534 : #if PETSC_VERSION_LESS_THAN(3,7,0)
535 : LibmeshPetscCall(PetscViewerSetFormat (petsc_viewer,
536 : PETSC_VIEWER_ASCII_MATLAB));
537 : #else
538 140 : LibmeshPetscCall(PetscViewerPushFormat (petsc_viewer,
539 : PETSC_VIEWER_ASCII_MATLAB));
540 : #endif
541 :
542 140 : LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
543 : }
544 :
545 : // Otherwise the matrix will be dumped to the screen.
546 : else
547 : {
548 : #if PETSC_VERSION_LESS_THAN(3,7,0)
549 : LibmeshPetscCall(PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
550 : PETSC_VIEWER_ASCII_MATLAB));
551 : #else
552 0 : LibmeshPetscCall(PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
553 : PETSC_VIEWER_ASCII_MATLAB));
554 : #endif
555 :
556 0 : LibmeshPetscCall(MatView (this->_mat, PETSC_VIEWER_STDOUT_WORLD));
557 : }
558 140 : }
559 :
560 :
561 :
562 :
563 :
564 : template <typename T>
565 0 : void PetscMatrix<T>::print_personal(std::ostream & os) const
566 : {
567 0 : libmesh_assert (this->initialized());
568 :
569 : // Routine must be called in parallel on parallel matrices
570 : // and serial on serial matrices.
571 0 : semiparallel_only();
572 :
573 : // #ifndef NDEBUG
574 : // if (os != std::cout)
575 : // libMesh::err << "Warning! PETSc can only print to std::cout!" << std::endl;
576 : // #endif
577 :
578 : // Matrix must be in an assembled state to be printed
579 0 : if (!this->closed())
580 : {
581 : libmesh_deprecated();
582 : libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_personal().\n"
583 : "Please update your code, as this warning will become an error in a future release.");
584 0 : const_cast<PetscMatrix<T> *>(this)->close();
585 : }
586 :
587 : // Print to screen if ostream is stdout
588 0 : if (os.rdbuf() == std::cout.rdbuf())
589 0 : LibmeshPetscCall(MatView(this->_mat, NULL));
590 :
591 : // Otherwise, print to the requested file, in a roundabout way...
592 : else
593 : {
594 : // We will create a temporary filename, and file, for PETSc to
595 : // write to.
596 0 : std::string temp_filename;
597 :
598 : {
599 : // Template for temporary filename
600 0 : char c[] = "temp_petsc_matrix.XXXXXX";
601 :
602 : // Generate temporary, unique filename only on processor 0. We will
603 : // use this filename for PetscViewerASCIIOpen, before copying it into
604 : // the user's stream
605 0 : if (this->processor_id() == 0)
606 : {
607 0 : int fd = mkstemp(c);
608 :
609 : // Check to see that mkstemp did not fail.
610 0 : libmesh_error_msg_if(fd == -1, "mkstemp failed in PetscMatrix::print_personal()");
611 :
612 : // mkstemp returns a file descriptor for an open file,
613 : // so let's close it before we hand it to PETSc!
614 0 : ::close (fd);
615 : }
616 :
617 : // Store temporary filename as string, makes it easier to broadcast
618 0 : temp_filename = c;
619 : }
620 :
621 : // Now broadcast the filename from processor 0 to all processors.
622 0 : this->comm().broadcast(temp_filename);
623 :
624 : // PetscViewer object for passing to MatView
625 : PetscViewer petsc_viewer;
626 :
627 : // This PETSc function only takes a string and handles the opening/closing
628 : // of the file internally. Since print_personal() takes a reference to
629 : // an ostream, we have to do an extra step... print_personal() should probably
630 : // have a version that takes a string to get rid of this problem.
631 0 : LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
632 : temp_filename.c_str(),
633 : &petsc_viewer));
634 :
635 : // Probably don't need to set the format if it's default...
636 : // ierr = PetscViewerSetFormat (petsc_viewer,
637 : // PETSC_VIEWER_DEFAULT);
638 : // LIBMESH_CHKERR(ierr);
639 :
640 : // Finally print the matrix using the viewer
641 0 : LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
642 :
643 0 : if (this->processor_id() == 0)
644 : {
645 : // Now the inefficient bit: open temp_filename as an ostream and copy the contents
646 : // into the user's desired ostream. We can't just do a direct file copy, we don't even have the filename!
647 0 : std::ifstream input_stream(temp_filename.c_str());
648 0 : os << input_stream.rdbuf(); // The "most elegant" way to copy one stream into another.
649 : // os.close(); // close not defined in ostream
650 :
651 : // Now remove the temporary file
652 0 : input_stream.close();
653 0 : std::remove(temp_filename.c_str());
654 0 : }
655 : }
656 0 : }
657 :
658 :
659 :
660 : template <typename T>
661 210 : void PetscMatrix<T>::_petsc_viewer(const std::string & filename,
662 : PetscViewerType viewertype,
663 : PetscFileMode filemode)
664 : {
665 6 : parallel_object_only();
666 :
667 : // We'll get matrix sizes from the file, but we need to at least
668 : // have a Mat object
669 210 : if (!this->initialized())
670 : {
671 70 : LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
672 70 : this->_is_initialized = true;
673 : }
674 :
675 : PetscViewer viewer;
676 210 : LibmeshPetscCall(PetscViewerCreate(this->comm().get(), &viewer));
677 210 : LibmeshPetscCall(PetscViewerSetType(viewer, viewertype));
678 210 : LibmeshPetscCall(PetscViewerSetFromOptions(viewer));
679 210 : LibmeshPetscCall(PetscViewerFileSetMode(viewer, filemode));
680 210 : LibmeshPetscCall(PetscViewerFileSetName(viewer, filename.c_str()));
681 210 : if (filemode == FILE_MODE_READ)
682 140 : LibmeshPetscCall(MatLoad(this->_mat, viewer));
683 : else
684 70 : LibmeshPetscCall(MatView(this->_mat, viewer));
685 210 : LibmeshPetscCall(PetscViewerDestroy(&viewer));
686 210 : }
687 :
688 :
689 :
690 : template <typename T>
691 70 : void PetscMatrix<T>::print_petsc_binary(const std::string & filename) const
692 : {
693 2 : libmesh_assert (this->initialized());
694 :
695 : // Our helper here isn't const-correct for writes
696 2 : PetscMatrix<T> * nonconst_this = const_cast<PetscMatrix<T>*>(this);
697 70 : nonconst_this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_WRITE);
698 70 : }
699 :
700 :
701 :
702 : template <typename T>
703 0 : void PetscMatrix<T>::print_petsc_hdf5(const std::string & filename) const
704 : {
705 0 : libmesh_assert (this->initialized());
706 :
707 : // Our helper here isn't const-correct for writes
708 0 : PetscMatrix<T> * nonconst_this = const_cast<PetscMatrix<T>*>(this);
709 0 : nonconst_this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_WRITE);
710 0 : }
711 :
712 :
713 :
714 : template <typename T>
715 140 : void PetscMatrix<T>::read_petsc_binary(const std::string & filename)
716 : {
717 8 : LOG_SCOPE("read_petsc_binary()", "PetscMatrix");
718 :
719 140 : this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_READ);
720 140 : }
721 :
722 :
723 :
724 : template <typename T>
725 0 : void PetscMatrix<T>::read_petsc_hdf5(const std::string & filename)
726 : {
727 0 : LOG_SCOPE("read_petsc_hdf5()", "PetscMatrix");
728 :
729 0 : this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_READ);
730 0 : }
731 :
732 :
733 :
734 : template <typename T>
735 42134767 : void PetscMatrix<T>::add_matrix(const DenseMatrix<T> & dm,
736 : const std::vector<numeric_index_type> & rows,
737 : const std::vector<numeric_index_type> & cols)
738 : {
739 3283512 : libmesh_assert (this->initialized());
740 :
741 6567024 : const numeric_index_type n_rows = dm.m();
742 6567024 : const numeric_index_type n_cols = dm.n();
743 :
744 3283512 : libmesh_assert_equal_to (rows.size(), n_rows);
745 3283512 : libmesh_assert_equal_to (cols.size(), n_cols);
746 :
747 45418279 : std::scoped_lock lock(this->_petsc_matrix_mutex);
748 42134767 : LibmeshPetscCall(MatSetValues(this->_mat,
749 : n_rows, numeric_petsc_cast(rows.data()),
750 : n_cols, numeric_petsc_cast(cols.data()),
751 : pPS(const_cast<T*>(dm.get_values().data())),
752 : ADD_VALUES));
753 42134767 : }
754 :
755 :
756 :
757 :
758 :
759 :
760 : template <typename T>
761 0 : void PetscMatrix<T>::add_block_matrix(const DenseMatrix<T> & dm,
762 : const std::vector<numeric_index_type> & brows,
763 : const std::vector<numeric_index_type> & bcols)
764 : {
765 0 : libmesh_assert (this->initialized());
766 :
767 : const numeric_index_type n_brows =
768 0 : cast_int<numeric_index_type>(brows.size());
769 : const numeric_index_type n_bcols =
770 0 : cast_int<numeric_index_type>(bcols.size());
771 :
772 : #ifndef NDEBUG
773 0 : const numeric_index_type n_rows =
774 0 : cast_int<numeric_index_type>(dm.m());
775 0 : const numeric_index_type n_cols =
776 0 : cast_int<numeric_index_type>(dm.n());
777 0 : const numeric_index_type blocksize = n_rows / n_brows;
778 :
779 0 : libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
780 0 : libmesh_assert_equal_to (blocksize*n_brows, n_rows);
781 0 : libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
782 :
783 : PetscInt petsc_blocksize;
784 0 : LibmeshPetscCall(MatGetBlockSize(this->_mat, &petsc_blocksize));
785 0 : libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
786 : #endif
787 :
788 0 : std::scoped_lock lock(this->_petsc_matrix_mutex);
789 : // These casts are required for PETSc <= 2.1.5
790 0 : LibmeshPetscCall(MatSetValuesBlocked(this->_mat,
791 : n_brows, numeric_petsc_cast(brows.data()),
792 : n_bcols, numeric_petsc_cast(bcols.data()),
793 : pPS(const_cast<T*>(dm.get_values().data())),
794 : ADD_VALUES));
795 0 : }
796 :
797 :
798 :
799 :
800 :
801 : template <typename T>
802 594 : void PetscMatrix<T>::_get_submatrix(SparseMatrix<T> & submatrix,
803 : const std::vector<numeric_index_type> & rows,
804 : const std::vector<numeric_index_type> & cols,
805 : const bool reuse_submatrix) const
806 : {
807 594 : if (!this->closed())
808 : {
809 : libmesh_deprecated();
810 : libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
811 : "Please update your code, as this warning will become an error in a future release.");
812 0 : const_cast<PetscMatrix<T> *>(this)->close();
813 : }
814 :
815 16 : semiparallel_only();
816 :
817 : // Make sure the SparseMatrix passed in is really a PetscMatrix
818 16 : PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
819 :
820 : // If we're not reusing submatrix and submatrix is already initialized
821 : // then we need to clear it, otherwise we get a memory leak.
822 594 : if (!reuse_submatrix && submatrix.initialized())
823 24 : submatrix.clear();
824 :
825 : // Construct row and column index sets.
826 32 : WrappedPetsc<IS> isrow;
827 610 : LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
828 : cast_int<PetscInt>(rows.size()),
829 : numeric_petsc_cast(rows.data()),
830 : PETSC_USE_POINTER,
831 : isrow.get()));
832 :
833 32 : WrappedPetsc<IS> iscol;
834 610 : LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
835 : cast_int<PetscInt>(cols.size()),
836 : numeric_petsc_cast(cols.data()),
837 : PETSC_USE_POINTER,
838 : iscol.get()));
839 :
840 : // Extract submatrix
841 1172 : LibmeshPetscCall(LibMeshCreateSubMatrix(this->_mat,
842 : isrow,
843 : iscol,
844 : (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
845 : (&petsc_submatrix->_mat)));
846 :
847 : // Specify that the new submatrix is initialized and close it.
848 594 : petsc_submatrix->_is_initialized = true;
849 594 : petsc_submatrix->close();
850 594 : }
851 :
852 : template <typename T>
853 0 : void PetscMatrix<T>::create_submatrix_nosort(SparseMatrix<T> & submatrix,
854 : const std::vector<numeric_index_type> & rows,
855 : const std::vector<numeric_index_type> & cols) const
856 : {
857 0 : if (!this->closed())
858 : {
859 : libmesh_deprecated();
860 : libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix_nosort().\n"
861 : "Please update your code, as this warning will become an error in a future release.");
862 0 : const_cast<PetscMatrix<T> *>(this)->close();
863 : }
864 :
865 : // Make sure the SparseMatrix passed in is really a PetscMatrix
866 0 : PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
867 :
868 0 : LibmeshPetscCall(MatZeroEntries(petsc_submatrix->_mat));
869 :
870 0 : PetscInt pc_ncols = 0;
871 : const PetscInt * pc_cols;
872 : const PetscScalar * pc_vals;
873 :
874 : // // data for creating the submatrix
875 0 : std::vector<PetscInt> sub_cols;
876 0 : std::vector<PetscScalar> sub_vals;
877 :
878 0 : for (auto i : index_range(rows))
879 : {
880 0 : PetscInt sub_rid[] = {static_cast<PetscInt>(i)};
881 0 : PetscInt rid = static_cast<PetscInt>(rows[i]);
882 : // only get value from local rows, and set values to the corresponding columns in the submatrix
883 0 : if (rows[i]>= this->row_start() && rows[i]< this->row_stop())
884 : {
885 : // get one row of data from the original matrix
886 0 : LibmeshPetscCall(MatGetRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
887 : // extract data from certain cols, save the indices and entries sub_cols and sub_vals
888 0 : for (auto j : index_range(cols))
889 : {
890 0 : for (unsigned int idx = 0; idx< static_cast<unsigned int>(pc_ncols); idx++)
891 : {
892 0 : if (pc_cols[idx] == static_cast<PetscInt>(cols[j]))
893 : {
894 0 : sub_cols.push_back(static_cast<PetscInt>(j));
895 0 : sub_vals.push_back(pc_vals[idx]);
896 : }
897 : }
898 : }
899 : // set values
900 0 : LibmeshPetscCall(MatSetValues(petsc_submatrix->_mat,
901 : 1,
902 : sub_rid,
903 : static_cast<PetscInt>(sub_vals.size()),
904 : sub_cols.data(),
905 : sub_vals.data(),
906 : INSERT_VALUES));
907 0 : LibmeshPetscCall(MatRestoreRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
908 : // clear data for this row
909 0 : sub_cols.clear();
910 0 : sub_vals.clear();
911 : }
912 : }
913 0 : MatAssemblyBeginEnd(petsc_submatrix->comm(), petsc_submatrix->_mat, MAT_FINAL_ASSEMBLY);
914 : // Specify that the new submatrix is initialized and close it.
915 0 : petsc_submatrix->_is_initialized = true;
916 0 : petsc_submatrix->close();
917 0 : }
918 :
919 :
920 : template <typename T>
921 0 : void PetscMatrix<T>::get_diagonal (NumericVector<T> & dest) const
922 : {
923 : // Make sure the NumericVector passed in is really a PetscVector
924 0 : PetscVector<T> & petsc_dest = cast_ref<PetscVector<T> &>(dest);
925 :
926 : // Needs a const_cast since PETSc does not work with const.
927 0 : LibmeshPetscCall(MatGetDiagonal(const_cast<PetscMatrix<T> *>(this)->mat(),petsc_dest.vec()));
928 0 : }
929 :
930 :
931 :
932 : template <typename T>
933 280 : void PetscMatrix<T>::get_transpose (SparseMatrix<T> & dest) const
934 : {
935 : // Make sure the SparseMatrix passed in is really a PetscMatrix
936 8 : PetscMatrix<T> & petsc_dest = cast_ref<PetscMatrix<T> &>(dest);
937 :
938 : // If we aren't reusing the matrix then need to clear dest,
939 : // otherwise we get a memory leak
940 280 : if (&petsc_dest != this)
941 70 : dest.clear();
942 :
943 280 : if (&petsc_dest == this)
944 : // The MAT_REUSE_MATRIX flag was replaced by MAT_INPLACE_MATRIX
945 : // in PETSc 3.7.0
946 : #if PETSC_VERSION_LESS_THAN(3,7,0)
947 : LibmeshPetscCall(MatTranspose(this->_mat,MAT_REUSE_MATRIX, &petsc_dest._mat));
948 : #else
949 210 : LibmeshPetscCall(MatTranspose(this->_mat, MAT_INPLACE_MATRIX, &petsc_dest._mat));
950 : #endif
951 : else
952 70 : LibmeshPetscCall(MatTranspose(this->_mat,MAT_INITIAL_MATRIX, &petsc_dest._mat));
953 :
954 : // Specify that the transposed matrix is initialized and close it.
955 280 : petsc_dest._is_initialized = true;
956 280 : petsc_dest.close();
957 280 : }
958 :
959 :
960 :
961 : template <typename T>
962 0 : void PetscMatrix<T>::flush ()
963 : {
964 0 : semiparallel_only();
965 :
966 0 : MatAssemblyBeginEnd(this->comm(), this->_mat, MAT_FLUSH_ASSEMBLY);
967 0 : }
968 :
969 :
970 :
971 : template <typename T>
972 1225247 : void PetscMatrix<T>::set (const numeric_index_type i,
973 : const numeric_index_type j,
974 : const T value)
975 : {
976 104383 : libmesh_assert (this->initialized());
977 :
978 1225247 : PetscInt i_val=i, j_val=j;
979 :
980 1225247 : PetscScalar petsc_value = static_cast<PetscScalar>(value);
981 1329630 : std::scoped_lock lock(this->_petsc_matrix_mutex);
982 1225247 : LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
983 : &petsc_value, INSERT_VALUES));
984 1225247 : }
985 :
986 :
987 :
988 : template <typename T>
989 481487 : void PetscMatrix<T>::add (const numeric_index_type i,
990 : const numeric_index_type j,
991 : const T value)
992 : {
993 43234 : libmesh_assert (this->initialized());
994 :
995 481487 : PetscInt i_val=i, j_val=j;
996 :
997 481487 : PetscScalar petsc_value = static_cast<PetscScalar>(value);
998 524721 : std::scoped_lock lock(this->_petsc_matrix_mutex);
999 481487 : LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
1000 : &petsc_value, ADD_VALUES));
1001 481487 : }
1002 :
1003 :
1004 :
1005 : template <typename T>
1006 41509240 : void PetscMatrix<T>::add_matrix(const DenseMatrix<T> & dm,
1007 : const std::vector<numeric_index_type> & dof_indices)
1008 : {
1009 41509240 : this->add_matrix (dm, dof_indices, dof_indices);
1010 41509240 : }
1011 :
1012 :
1013 :
1014 :
1015 :
1016 :
1017 :
1018 : template <typename T>
1019 571953 : void PetscMatrix<T>::add (const T a_in, const SparseMatrix<T> & X_in)
1020 : {
1021 16336 : libmesh_assert (this->initialized());
1022 :
1023 : // sanity check. but this cannot avoid
1024 : // crash due to incompatible sparsity structure...
1025 16336 : libmesh_assert_equal_to (this->m(), X_in.m());
1026 16336 : libmesh_assert_equal_to (this->n(), X_in.n());
1027 :
1028 16336 : PetscScalar a = static_cast<PetscScalar> (a_in);
1029 16336 : const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1030 :
1031 16336 : libmesh_assert (X);
1032 :
1033 : // the matrix from which we copy the values has to be assembled/closed
1034 16336 : libmesh_assert(X->closed());
1035 :
1036 16336 : semiparallel_only();
1037 :
1038 571953 : LibmeshPetscCall(MatAXPY(this->_mat, a, X->_mat, DIFFERENT_NONZERO_PATTERN));
1039 571953 : }
1040 :
1041 :
1042 : template <typename T>
1043 0 : void PetscMatrix<T>::matrix_matrix_mult (SparseMatrix<T> & X_in, SparseMatrix<T> & Y_out, bool reuse)
1044 : {
1045 0 : libmesh_assert (this->initialized());
1046 :
1047 : // sanity check
1048 : // we do not check the Y_out size here as we will initialize & close it at the end
1049 0 : libmesh_assert_equal_to (this->n(), X_in.m());
1050 :
1051 0 : const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1052 0 : PetscMatrix<T> * Y = cast_ptr<PetscMatrix<T> *> (&Y_out);
1053 :
1054 : // the matrix from which we copy the values has to be assembled/closed
1055 0 : libmesh_assert(X->closed());
1056 :
1057 0 : semiparallel_only();
1058 :
1059 0 : if (reuse)
1060 0 : LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_REUSE_MATRIX, PETSC_DEFAULT, &Y->_mat));
1061 : else
1062 : {
1063 0 : Y->clear();
1064 0 : LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y->_mat));
1065 : }
1066 : // Specify that the new matrix is initialized
1067 : // We do not close it here as `MatMatMult` ensures Y being closed
1068 0 : Y->_is_initialized = true;
1069 0 : }
1070 :
1071 : template <typename T>
1072 : void
1073 0 : PetscMatrix<T>::add_sparse_matrix (const SparseMatrix<T> & spm,
1074 : const std::map<numeric_index_type, numeric_index_type> & row_ltog,
1075 : const std::map<numeric_index_type, numeric_index_type> & col_ltog,
1076 : const T scalar)
1077 : {
1078 : // size of spm is usually greater than row_ltog and col_ltog in parallel as the indices are owned by the processor
1079 : // also, we should allow adding certain parts of spm to _mat
1080 0 : libmesh_assert_greater_equal(spm.m(), row_ltog.size());
1081 0 : libmesh_assert_greater_equal(spm.n(), col_ltog.size());
1082 :
1083 : // make sure matrix has larger size than spm
1084 0 : libmesh_assert_greater_equal(this->m(), spm.m());
1085 0 : libmesh_assert_greater_equal(this->n(), spm.n());
1086 :
1087 0 : if (!this->closed())
1088 0 : this->close();
1089 :
1090 0 : auto pscm = cast_ptr<const PetscMatrix<T> *>(&spm);
1091 :
1092 0 : PetscInt ncols = 0;
1093 :
1094 : const PetscInt * lcols;
1095 : const PetscScalar * vals;
1096 :
1097 0 : std::vector<PetscInt> gcols;
1098 0 : std::vector<PetscScalar> values;
1099 :
1100 0 : for (auto ltog : row_ltog)
1101 : {
1102 0 : PetscInt grow[] = {static_cast<PetscInt>(ltog.second)}; // global row index
1103 :
1104 0 : LibmeshPetscCall(MatGetRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1105 :
1106 : // get global indices (gcols) from lcols, increment values = vals*scalar
1107 0 : gcols.resize(ncols);
1108 0 : values.resize(ncols);
1109 0 : for (auto i : index_range(gcols))
1110 : {
1111 0 : gcols[i] = libmesh_map_find(col_ltog, lcols[i]);
1112 0 : values[i] = PS(scalar) * vals[i];
1113 : }
1114 :
1115 0 : LibmeshPetscCall(MatSetValues(this->_mat, 1, grow, ncols, gcols.data(), values.data(), ADD_VALUES));
1116 0 : LibmeshPetscCall(MatRestoreRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1117 : }
1118 : // Note: We are not closing the matrix because it is expensive to do so when adding multiple sparse matrices.
1119 : // Remember to manually close the matrix once all changes to the matrix have been made.
1120 0 : }
1121 :
1122 : template <typename T>
1123 130 : T PetscMatrix<T>::operator () (const numeric_index_type i_in,
1124 : const numeric_index_type j_in) const
1125 : {
1126 0 : libmesh_assert (this->initialized());
1127 :
1128 : // If the entry is not in the sparse matrix, it is 0.
1129 130 : T value=0.;
1130 :
1131 : PetscInt
1132 130 : i_val=static_cast<PetscInt>(i_in),
1133 130 : j_val=static_cast<PetscInt>(j_in);
1134 :
1135 :
1136 : // the matrix needs to be closed for this to work
1137 : // this->close();
1138 : // but closing it is a semiparallel operation; we want operator()
1139 : // to run on one processor.
1140 0 : libmesh_assert(this->closed());
1141 :
1142 130 : LibmeshPetscCall(MatGetValue(this->_mat, i_val, j_val, &value));
1143 :
1144 130 : return value;
1145 : }
1146 :
1147 : template <typename T>
1148 8408 : void PetscMatrix<T>::get_row (numeric_index_type i_in,
1149 : std::vector<numeric_index_type> & indices,
1150 : std::vector<T> & values) const
1151 : {
1152 612 : libmesh_assert (this->initialized());
1153 :
1154 : const PetscScalar * petsc_row;
1155 : const PetscInt * petsc_cols;
1156 :
1157 : PetscInt
1158 8408 : ncols=0,
1159 8408 : i_val = static_cast<PetscInt>(i_in);
1160 :
1161 : // the matrix needs to be closed for this to work
1162 : // this->close();
1163 : // but closing it is a semiparallel operation; we want operator()
1164 : // to run on one processor.
1165 612 : libmesh_assert(this->closed());
1166 :
1167 : // PETSc makes no effort at being thread safe. Helgrind complains about
1168 : // possible data races even just in PetscFunctionBegin (due to things
1169 : // like stack counter incrementing). Perhaps we could ignore
1170 : // this, but there are legitimate data races for Mat data members like
1171 : // mat->getrowactive between MatGetRow and MatRestoreRow. Moreover,
1172 : // there could be a write into mat->rowvalues during MatGetRow from
1173 : // one thread while we are attempting to read from mat->rowvalues
1174 : // (through petsc_cols) during data copy in another thread. So
1175 : // the safe thing to do is to lock the whole method
1176 :
1177 9020 : std::lock_guard<std::mutex> lock(_petsc_matrix_mutex);
1178 :
1179 8408 : LibmeshPetscCall(MatGetRow(this->_mat, i_val, &ncols, &petsc_cols, &petsc_row));
1180 :
1181 : // Copy the data
1182 8408 : indices.resize(static_cast<std::size_t>(ncols));
1183 8408 : values.resize(static_cast<std::size_t>(ncols));
1184 :
1185 64570 : for (auto i : index_range(indices))
1186 : {
1187 56162 : indices[i] = static_cast<numeric_index_type>(petsc_cols[i]);
1188 60142 : values[i] = static_cast<T>(petsc_row[i]);
1189 : }
1190 :
1191 8408 : LibmeshPetscCall(MatRestoreRow(this->_mat, i_val,
1192 : &ncols, &petsc_cols, &petsc_row));
1193 8408 : }
1194 :
1195 :
1196 :
1197 : template <typename T>
1198 264 : PetscMatrix<T> & PetscMatrix<T>::operator= (const PetscMatrix<T> & v)
1199 : {
1200 0 : semiparallel_only();
1201 :
1202 264 : if (this->_mat)
1203 : {
1204 : PetscBool assembled;
1205 264 : LibmeshPetscCall(MatAssembled(this->_mat, &assembled));
1206 : #ifndef NDEBUG
1207 0 : const bool cxx_assembled = (assembled == PETSC_TRUE) ? true : false;
1208 0 : libmesh_assert(this->_communicator.verify(cxx_assembled));
1209 : #endif
1210 :
1211 264 : if (!assembled)
1212 : // MatCopy does not work with an unassembled matrix. We could use MatDuplicate but then we
1213 : // would have to destroy the matrix we manage and others might be relying on that data. So
1214 : // we just assemble here regardless of the preceding level of matrix fill
1215 0 : this->close();
1216 264 : LibmeshPetscCall(MatCopy(v._mat, this->_mat, DIFFERENT_NONZERO_PATTERN));
1217 : }
1218 : else
1219 0 : LibmeshPetscCall(MatDuplicate(v._mat, MAT_COPY_VALUES, &this->_mat));
1220 :
1221 264 : this->_is_initialized = true;
1222 :
1223 264 : return *this;
1224 : }
1225 :
1226 : template <typename T>
1227 264 : SparseMatrix<T> & PetscMatrix<T>::operator= (const SparseMatrix<T> & v)
1228 : {
1229 264 : *this = cast_ref<const PetscMatrix<T> &>(v);
1230 264 : return *this;
1231 : }
1232 :
1233 : template <typename T>
1234 70 : void PetscMatrix<T>::scale(const T scale)
1235 : {
1236 2 : libmesh_assert(this->closed());
1237 :
1238 70 : LibmeshPetscCall(MatScale(this->_mat, PS(scale)));
1239 70 : }
1240 :
1241 : template <typename T>
1242 268 : bool PetscMatrix<T>::supports_hash_table() const
1243 : {
1244 : #if PETSC_RELEASE_LESS_THAN(3,19,0)
1245 4 : return false;
1246 : #else
1247 264 : return true;
1248 : #endif
1249 : }
1250 :
1251 : #if PETSC_RELEASE_GREATER_EQUALS(3,23,0)
1252 : template <typename T>
1253 : std::unique_ptr<PetscMatrix<T>>
1254 : PetscMatrix<T>::copy_from_hash ()
1255 : {
1256 : Mat xaij;
1257 : libmesh_assert(this->initialized());
1258 : libmesh_assert(!this->closed());
1259 : LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, &xaij));
1260 : LibmeshPetscCall(MatCopyHashToXAIJ(this->_mat, xaij));
1261 : return std::make_unique<PetscMatrix<T>>(xaij, this->comm(), /*destroy_on_exit=*/true);
1262 : }
1263 : #endif
1264 :
1265 : template <typename T>
1266 : void
1267 0 : PetscMatrix<T>::restore_original_nonzero_pattern()
1268 : {
1269 0 : semiparallel_only();
1270 :
1271 0 : if (this->_use_hash_table)
1272 : #if PETSC_RELEASE_GREATER_EQUALS(3, 23, 0)
1273 : // This performs MatReset plus re-establishes the hash table
1274 : LibmeshPetscCall(MatResetHash(this->_mat));
1275 : #else
1276 0 : libmesh_error_msg("Resetting hash tables not supported until PETSc version 3.23");
1277 : #endif
1278 : else
1279 0 : this->reset_preallocation();
1280 0 : }
1281 :
1282 : //------------------------------------------------------------------
1283 : // Explicit instantiations
1284 : template class LIBMESH_EXPORT PetscMatrix<Number>;
1285 :
1286 : } // namespace libMesh
1287 :
1288 :
1289 : #endif // #ifdef LIBMESH_HAVE_PETSC
|