libMesh
petsc_matrix.C
Go to the documentation of this file.
1 // The libMesh Finite Element Library.
2 // Copyright (C) 2002-2025 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 
4 // This library is free software; you can redistribute it and/or
5 // modify it under the terms of the GNU Lesser General Public
6 // License as published by the Free Software Foundation; either
7 // version 2.1 of the License, or (at your option) any later version.
8 
9 // This library is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 // Lesser General Public License for more details.
13 
14 // You should have received a copy of the GNU Lesser General Public
15 // License along with this library; if not, write to the Free Software
16 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 
18 
19 
20 #include "libmesh/libmesh_config.h"
21 
22 #ifdef LIBMESH_HAVE_PETSC
23 
24 // Local includes
25 #include "libmesh/petsc_matrix.h"
26 
27 // libMesh includes
28 #include "libmesh/dof_map.h"
29 #include "libmesh/dense_matrix.h"
30 #include "libmesh/libmesh_logging.h"
31 #include "libmesh/petsc_vector.h"
32 #include "libmesh/parallel.h"
33 #include "libmesh/utility.h"
34 #include "libmesh/wrapped_petsc.h"
35 
36 // C++ includes
37 #ifdef LIBMESH_HAVE_UNISTD_H
38 #include <unistd.h> // mkstemp
39 #endif
40 #include <fstream>
41 
42 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
43 
44 namespace
45 {
46 using namespace libMesh;
47 
48 // historic libMesh n_nz & n_oz arrays are set up for PETSc's AIJ format.
49 // however, when the blocksize is >1, we need to transform these into
50 // their BAIJ counterparts.
51 inline
52 void transform_preallocation_arrays (const PetscInt blocksize,
53  const std::vector<numeric_index_type> & n_nz,
54  const std::vector<numeric_index_type> & n_oz,
55  std::vector<numeric_index_type> & b_n_nz,
56  std::vector<numeric_index_type> & b_n_oz)
57 {
58  libmesh_assert_equal_to (n_nz.size(), n_oz.size());
59  libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
60 
61  b_n_nz.clear(); b_n_nz.reserve(n_nz.size()/blocksize);
62  b_n_oz.clear(); b_n_oz.reserve(n_oz.size()/blocksize);
63 
64  for (std::size_t nn=0, nnzs=n_nz.size(); nn<nnzs; nn += blocksize)
65  {
66  b_n_nz.push_back (n_nz[nn]/blocksize);
67  b_n_oz.push_back (n_oz[nn]/blocksize);
68  }
69 }
70 }
71 
72 #endif
73 
74 
75 
76 namespace libMesh
77 {
78 
79 
80 //-----------------------------------------------------------------------
81 // PetscMatrix members
82 
83 
84 // Constructor
85 template <typename T>
87  PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
88 {
89 }
90 
91 
92 
93 // Constructor taking an existing Mat but not the responsibility
94 // for destroying it
95 template <typename T>
97  const Parallel::Communicator & comm_in,
98  const bool destroy_on_exit) :
99  PetscMatrixBase<T>(mat_in, comm_in, destroy_on_exit)
100 {
101  MatType mat_type;
102  LibmeshPetscCall(MatGetType(mat_in, &mat_type));
103  PetscBool is_hypre;
104  LibmeshPetscCall(PetscStrcmp(mat_type, MATHYPRE, &is_hypre));
105  if (is_hypre == PETSC_TRUE)
106  _mat_type = HYPRE;
107  else
108  _mat_type = AIJ;
109 }
110 
111 
112 // Constructor taking global and local dimensions and, optionally,
113 // number of on- and off-diagonal non-zeros and a block size
114 template <typename T>
116  const numeric_index_type m_in,
117  const numeric_index_type n_in,
118  const numeric_index_type m_l,
119  const numeric_index_type n_l,
120  const numeric_index_type n_nz,
121  const numeric_index_type n_oz,
122  const numeric_index_type blocksize_in) :
123  PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
124 {
125  this->init(m_in, n_in, m_l, n_l, n_nz, n_oz, blocksize_in);
126 }
127 
128 
129 // Destructor
130 template <typename T>
131 PetscMatrix<T>::~PetscMatrix() = default;
132 
133 template <typename T>
134 void
136  const numeric_index_type n_in,
137  const numeric_index_type m_l,
138  const numeric_index_type n_l,
139  const numeric_index_type blocksize_in)
140 {
141  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
142  libmesh_ignore(blocksize_in);
143 
144  // Clear initialized matrices
145  if (this->initialized())
146  this->clear();
147 
148  PetscInt m_global = static_cast<PetscInt>(m_in);
149  PetscInt n_global = static_cast<PetscInt>(n_in);
150  PetscInt m_local = static_cast<PetscInt>(m_l);
151  PetscInt n_local = static_cast<PetscInt>(n_l);
152 
153  LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
154  LibmeshPetscCall(MatSetSizes(this->_mat, m_local, n_local, m_global, n_global));
155  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
156  LibmeshPetscCall(MatSetBlockSize(this->_mat,blocksize));
157  LibmeshPetscCall(MatSetOptionsPrefix(this->_mat, ""));
158 
159 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
160  if (blocksize > 1)
161  {
162  // specified blocksize, bs>1.
163  // double check sizes.
164  libmesh_assert_equal_to (m_local % blocksize, 0);
165  libmesh_assert_equal_to (n_local % blocksize, 0);
166  libmesh_assert_equal_to (m_global % blocksize, 0);
167  libmesh_assert_equal_to (n_global % blocksize, 0);
168  libmesh_assert_equal_to (n_nz % blocksize, 0);
169  libmesh_assert_equal_to (n_oz % blocksize, 0);
170 
171  LibmeshPetscCall(MatSetType(this->_mat, MATBAIJ)); // Automatically chooses seqbaij or mpibaij
172 
173  // MatSetFromOptions needs to happen before Preallocation routines
174  // since MatSetFromOptions can change matrix type and remove incompatible
175  // preallocation
176  LibmeshPetscCall(MatSetFromOptions(this->_mat));
177  }
178  else
179 #endif
180  {
181  switch (this->_mat_type) {
182  case AIJ:
183  LibmeshPetscCall(MatSetType(this->_mat, MATAIJ)); // Automatically chooses seqaij or mpiaij
184 
185  // MatSetFromOptions needs to happen before Preallocation routines
186  // since MatSetFromOptions can change matrix type and remove incompatible
187  // preallocation
188  LibmeshPetscCall(MatSetFromOptions(this->_mat));
189  break;
190 
191  case HYPRE:
192 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
193  LibmeshPetscCall(MatSetType(this->_mat, MATHYPRE));
194 
195  // MatSetFromOptions needs to happen before Preallocation routines
196  // since MatSetFromOptions can change matrix type and remove incompatible
197  // preallocation
198  LibmeshPetscCall(MatSetFromOptions(this->_mat));
199 #else
200  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
201 #endif
202  break;
203 
204  default: libmesh_error_msg("Unsupported petsc matrix type");
205  }
206  }
207 
208  this->set_context ();
209 }
210 
211 
212 
213 template <typename T>
215  const numeric_index_type n_in,
216  const numeric_index_type m_l,
217  const numeric_index_type n_l,
218  const numeric_index_type nnz,
219  const numeric_index_type noz,
220  const numeric_index_type blocksize_in)
221 {
222  this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
223 
224  PetscInt n_nz = static_cast<PetscInt>(nnz);
225  PetscInt n_oz = static_cast<PetscInt>(noz);
226 
227 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
228  if (blocksize > 1)
229  {
230  LibmeshPetscCall(MatSeqBAIJSetPreallocation(this->_mat, blocksize, n_nz/blocksize, NULL));
231  LibmeshPetscCall(MatMPIBAIJSetPreallocation(this->_mat, blocksize,
232  n_nz/blocksize, NULL,
233  n_oz/blocksize, NULL));
234  }
235  else
236 #endif
237  {
238  switch (this->_mat_type) {
239  case AIJ:
240  LibmeshPetscCall(MatSeqAIJSetPreallocation(this->_mat, n_nz, NULL));
241  LibmeshPetscCall(MatMPIAIJSetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
242  break;
243 
244  case HYPRE:
245 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
246  LibmeshPetscCall(MatHYPRESetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
247 #else
248  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
249 #endif
250  break;
251 
252  default: libmesh_error_msg("Unsupported petsc matrix type");
253  }
254  }
255 
256  this->finish_initialization();
257 }
258 
259 template <typename T>
260 void PetscMatrix<T>::preallocate(const numeric_index_type libmesh_dbg_var(m_l),
261  const std::vector<numeric_index_type> & n_nz,
262  const std::vector<numeric_index_type> & n_oz,
263  const numeric_index_type blocksize_in)
264 {
265  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
266  libmesh_assert_equal_to (n_nz.size(), m_l);
267  libmesh_assert_equal_to (n_oz.size(), m_l);
268  // Avoid unused warnings when not configured with block storage
269  libmesh_ignore(blocksize_in);
270 
271 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
272  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
273 
274  if (blocksize > 1)
275  {
276  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
277  std::vector<numeric_index_type> b_n_nz, b_n_oz;
278 
279  transform_preallocation_arrays (blocksize,
280  n_nz, n_oz,
281  b_n_nz, b_n_oz);
282 
283  LibmeshPetscCall(MatSeqBAIJSetPreallocation (this->_mat,
284  blocksize,
285  0,
286  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data())));
287 
288  LibmeshPetscCall(MatMPIBAIJSetPreallocation (this->_mat,
289  blocksize,
290  0,
291  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()),
292  0,
293  numeric_petsc_cast(b_n_oz.empty() ? nullptr : b_n_oz.data())));
294  }
295  else
296 #endif
297  {
298  switch (this->_mat_type) {
299  case AIJ:
300  LibmeshPetscCall(MatSeqAIJSetPreallocation (this->_mat,
301  0,
302  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data())));
303  LibmeshPetscCall(MatMPIAIJSetPreallocation (this->_mat,
304  0,
305  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
306  0,
307  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
308  break;
309 
310  case HYPRE:
311 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
312  LibmeshPetscCall(MatHYPRESetPreallocation (this->_mat,
313  0,
314  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
315  0,
316  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
317 #else
318  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
319 #endif
320  break;
321 
322  default: libmesh_error_msg("Unsupported petsc matrix type");
323  }
324 
325  }
326 }
327 
328 template <typename T>
330 {
331  // Make it an error for PETSc to allocate new nonzero entries during assembly. For old PETSc
332  // versions this option must be set after preallocation for MPIAIJ matrices
333  LibmeshPetscCall(MatSetOption(this->_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
334  this->_is_initialized = true;
335 }
336 
337 template <typename T>
339  const numeric_index_type n_in,
340  const numeric_index_type m_l,
341  const numeric_index_type n_l,
342  const std::vector<numeric_index_type> & n_nz,
343  const std::vector<numeric_index_type> & n_oz,
344  const numeric_index_type blocksize_in)
345 {
346  this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
347  this->preallocate(m_l, n_nz, n_oz, blocksize_in);
348 
349  this->finish_initialization();
350 }
351 
352 
353 template <typename T>
354 void PetscMatrix<T>::init (const ParallelType libmesh_dbg_var(type))
355 {
356  libmesh_assert(this->_dof_map);
357 
358  const numeric_index_type m_in = this->_dof_map->n_dofs();
359  const numeric_index_type m_l = this->_dof_map->n_local_dofs();
360  if (m_in != m_l)
361  libmesh_assert(type != SERIAL);
362 
363  const auto blocksize = this->_dof_map->block_size();
364 
365  this->init_without_preallocation(m_in, m_in, m_l, m_l, blocksize);
366  if (!this->_use_hash_table)
367  {
368  const std::vector<numeric_index_type> & n_nz = this->_sp->get_n_nz();
369  const std::vector<numeric_index_type> & n_oz = this->_sp->get_n_oz();
370 
371  this->preallocate(m_l, n_nz, n_oz, blocksize);
372  }
373 
374  this->finish_initialization();
375 }
376 
377 
378 template <typename T>
380 {
381  libmesh_not_implemented();
382 }
383 
384 template <typename T>
386 {
387  semiparallel_only();
388 
389 #if !PETSC_VERSION_LESS_THAN(3,9,0)
390  libmesh_assert (this->initialized());
391 
392  LibmeshPetscCall(MatResetPreallocation(this->_mat));
393 #else
394  libmesh_warning("Your version of PETSc doesn't support resetting of "
395  "preallocation, so we will use your most recent sparsity "
396  "pattern. This may result in a degradation of performance\n");
397 #endif
398 }
399 
400 template <typename T>
402 {
403  libmesh_assert (this->initialized());
404 
405  semiparallel_only();
406 
407  PetscInt m_l, n_l;
408 
409  LibmeshPetscCall(MatGetLocalSize(this->_mat,&m_l,&n_l));
410 
411  if (n_l)
412  LibmeshPetscCall(MatZeroEntries(this->_mat));
413 }
414 
415 template <typename T>
416 void PetscMatrix<T>::zero_rows (std::vector<numeric_index_type> & rows, T diag_value)
417 {
418  libmesh_assert (this->initialized());
419 
420  semiparallel_only();
421 
422  // As of petsc-dev at the time of 3.1.0, MatZeroRows now takes two additional
423  // optional arguments. The optional arguments (x,b) can be used to specify the
424  // solutions for the zeroed rows (x) and right hand side (b) to update.
425  // Could be useful for setting boundary conditions...
426  if (!rows.empty())
427  LibmeshPetscCall(MatZeroRows(this->_mat, cast_int<PetscInt>(rows.size()),
428  numeric_petsc_cast(rows.data()), PS(diag_value),
429  NULL, NULL));
430  else
431  LibmeshPetscCall(MatZeroRows(this->_mat, 0, NULL, PS(diag_value), NULL, NULL));
432 }
433 
434 template <typename T>
435 std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::zero_clone () const
436 {
437  libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
438 
439  // Copy the nonzero pattern only
440  Mat copy;
441  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, &copy));
442 
443  // Call wrapping PetscMatrix constructor, have it take over
444  // ownership.
445  auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
446  ret->set_destroy_mat_on_exit(true);
447 
448  return ret;
449 }
450 
451 
452 
453 template <typename T>
454 std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::clone () const
455 {
456  libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
457 
458  // Copy the nonzero pattern and numerical values
459  Mat copy;
460  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_COPY_VALUES, &copy));
461 
462  // Call wrapping PetscMatrix constructor, have it take over
463  // ownership.
464  auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
465  ret->set_destroy_mat_on_exit(true);
466 
467  return ret;
468 }
469 
470 
471 
472 template <typename T>
473 template <NormType N>
475 {
476  libmesh_assert (this->initialized());
477 
478  semiparallel_only();
479 
480  PetscReal petsc_value;
481  Real value;
482 
483  libmesh_assert (this->closed());
484 
485  LibmeshPetscCall(MatNorm(this->_mat, N, &petsc_value));
486 
487  value = static_cast<Real>(petsc_value);
488 
489  return value;
490 }
491 template <typename T>
493 {
494  return PetscMatrix<T>::norm<NORM_1>();
495 }
496 template <typename T>
498 {
499  return PetscMatrix<T>::norm<NORM_FROBENIUS>();
500 }
501 template <typename T>
503 {
504  return PetscMatrix<T>::norm<NORM_INFINITY>();
505 }
506 
507 
508 
509 template <typename T>
510 void PetscMatrix<T>::print_matlab (const std::string & name) const
511 {
512  libmesh_assert (this->initialized());
513 
514  semiparallel_only();
515 
516  if (!this->closed())
517  {
518  libmesh_deprecated();
519  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
520  "Please update your code, as this warning will become an error in a future release.");
521  const_cast<PetscMatrix<T> *>(this)->close();
522  }
523 
524  WrappedPetsc<PetscViewer> petsc_viewer;
525  LibmeshPetscCall(PetscViewerCreate (this->comm().get(),
526  petsc_viewer.get()));
527 
528  // Create an ASCII file containing the matrix
529  // if a filename was provided.
530  if (name != "")
531  {
532  LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
533  name.c_str(),
534  petsc_viewer.get()));
535 
536 #if PETSC_VERSION_LESS_THAN(3,7,0)
537  LibmeshPetscCall(PetscViewerSetFormat (petsc_viewer,
538  PETSC_VIEWER_ASCII_MATLAB));
539 #else
540  LibmeshPetscCall(PetscViewerPushFormat (petsc_viewer,
541  PETSC_VIEWER_ASCII_MATLAB));
542 #endif
543 
544  LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
545  }
546 
547  // Otherwise the matrix will be dumped to the screen.
548  else
549  {
550 #if PETSC_VERSION_LESS_THAN(3,7,0)
551  LibmeshPetscCall(PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
552  PETSC_VIEWER_ASCII_MATLAB));
553 #else
554  LibmeshPetscCall(PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
555  PETSC_VIEWER_ASCII_MATLAB));
556 #endif
557 
558  LibmeshPetscCall(MatView (this->_mat, PETSC_VIEWER_STDOUT_WORLD));
559  }
560 }
561 
562 
563 
564 
565 
566 template <typename T>
567 void PetscMatrix<T>::print_personal(std::ostream & os) const
568 {
569  libmesh_assert (this->initialized());
570 
571  // Routine must be called in parallel on parallel matrices
572  // and serial on serial matrices.
573  semiparallel_only();
574 
575  // #ifndef NDEBUG
576  // if (os != std::cout)
577  // libMesh::err << "Warning! PETSc can only print to std::cout!" << std::endl;
578  // #endif
579 
580  // Matrix must be in an assembled state to be printed
581  if (!this->closed())
582  {
583  libmesh_deprecated();
584  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_personal().\n"
585  "Please update your code, as this warning will become an error in a future release.");
586  const_cast<PetscMatrix<T> *>(this)->close();
587  }
588 
589  // Print to screen if ostream is stdout
590  if (os.rdbuf() == std::cout.rdbuf())
591  LibmeshPetscCall(MatView(this->_mat, NULL));
592 
593  // Otherwise, print to the requested file, in a roundabout way...
594  else
595  {
596  // We will create a temporary filename, and file, for PETSc to
597  // write to.
598  std::string temp_filename;
599 
600  {
601  // Template for temporary filename
602  char c[] = "temp_petsc_matrix.XXXXXX";
603 
604  // Generate temporary, unique filename only on processor 0. We will
605  // use this filename for PetscViewerASCIIOpen, before copying it into
606  // the user's stream
607  if (this->processor_id() == 0)
608  {
609  int fd = mkstemp(c);
610 
611  // Check to see that mkstemp did not fail.
612  libmesh_error_msg_if(fd == -1, "mkstemp failed in PetscMatrix::print_personal()");
613 
614  // mkstemp returns a file descriptor for an open file,
615  // so let's close it before we hand it to PETSc!
616  ::close (fd);
617  }
618 
619  // Store temporary filename as string, makes it easier to broadcast
620  temp_filename = c;
621  }
622 
623  // Now broadcast the filename from processor 0 to all processors.
624  this->comm().broadcast(temp_filename);
625 
626  // PetscViewer object for passing to MatView
627  PetscViewer petsc_viewer;
628 
629  // This PETSc function only takes a string and handles the opening/closing
630  // of the file internally. Since print_personal() takes a reference to
631  // an ostream, we have to do an extra step... print_personal() should probably
632  // have a version that takes a string to get rid of this problem.
633  LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
634  temp_filename.c_str(),
635  &petsc_viewer));
636 
637  // Probably don't need to set the format if it's default...
638  // ierr = PetscViewerSetFormat (petsc_viewer,
639  // PETSC_VIEWER_DEFAULT);
640  // LIBMESH_CHKERR(ierr);
641 
642  // Finally print the matrix using the viewer
643  LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
644 
645  if (this->processor_id() == 0)
646  {
647  // Now the inefficient bit: open temp_filename as an ostream and copy the contents
648  // into the user's desired ostream. We can't just do a direct file copy, we don't even have the filename!
649  std::ifstream input_stream(temp_filename.c_str());
650  os << input_stream.rdbuf(); // The "most elegant" way to copy one stream into another.
651  // os.close(); // close not defined in ostream
652 
653  // Now remove the temporary file
654  input_stream.close();
655  std::remove(temp_filename.c_str());
656  }
657  }
658 }
659 
660 
661 
662 template <typename T>
663 void PetscMatrix<T>::_petsc_viewer(const std::string & filename,
664  PetscViewerType viewertype,
665  PetscFileMode filemode)
666 {
667  parallel_object_only();
668 
669  // We'll get matrix sizes from the file, but we need to at least
670  // have a Mat object
671  if (!this->initialized())
672  {
673  LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
674  this->_is_initialized = true;
675  }
676 
677  PetscViewer viewer;
678  LibmeshPetscCall(PetscViewerCreate(this->comm().get(), &viewer));
679  LibmeshPetscCall(PetscViewerSetType(viewer, viewertype));
680  LibmeshPetscCall(PetscViewerSetFromOptions(viewer));
681  LibmeshPetscCall(PetscViewerFileSetMode(viewer, filemode));
682  LibmeshPetscCall(PetscViewerFileSetName(viewer, filename.c_str()));
683  if (filemode == FILE_MODE_READ)
684  LibmeshPetscCall(MatLoad(this->_mat, viewer));
685  else
686  LibmeshPetscCall(MatView(this->_mat, viewer));
687  LibmeshPetscCall(PetscViewerDestroy(&viewer));
688 }
689 
690 
691 
692 template <typename T>
693 void PetscMatrix<T>::print_petsc_binary(const std::string & filename)
694 {
695  libmesh_assert (this->initialized());
696 
697  this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_WRITE);
698 }
699 
700 
701 
702 template <typename T>
703 void PetscMatrix<T>::print_petsc_hdf5(const std::string & filename)
704 {
705  libmesh_assert (this->initialized());
706 
707  this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_WRITE);
708 }
709 
710 
711 
712 template <typename T>
713 void PetscMatrix<T>::read_petsc_binary(const std::string & filename)
714 {
715  LOG_SCOPE("read_petsc_binary()", "PetscMatrix");
716 
717  this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_READ);
718 }
719 
720 
721 
722 template <typename T>
723 void PetscMatrix<T>::read_petsc_hdf5(const std::string & filename)
724 {
725  LOG_SCOPE("read_petsc_hdf5()", "PetscMatrix");
726 
727  this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_READ);
728 }
729 
730 
731 
732 template <typename T>
734  const std::vector<numeric_index_type> & rows,
735  const std::vector<numeric_index_type> & cols)
736 {
737  libmesh_assert (this->initialized());
738 
739  const numeric_index_type n_rows = dm.m();
740  const numeric_index_type n_cols = dm.n();
741 
742  libmesh_assert_equal_to (rows.size(), n_rows);
743  libmesh_assert_equal_to (cols.size(), n_cols);
744 
745  std::scoped_lock lock(this->_petsc_matrix_mutex);
746  LibmeshPetscCall(MatSetValues(this->_mat,
747  n_rows, numeric_petsc_cast(rows.data()),
748  n_cols, numeric_petsc_cast(cols.data()),
749  pPS(const_cast<T*>(dm.get_values().data())),
750  ADD_VALUES));
751 }
752 
753 
754 
755 
756 
757 
758 template <typename T>
760  const std::vector<numeric_index_type> & brows,
761  const std::vector<numeric_index_type> & bcols)
762 {
763  libmesh_assert (this->initialized());
764 
765  const numeric_index_type n_brows =
766  cast_int<numeric_index_type>(brows.size());
767  const numeric_index_type n_bcols =
768  cast_int<numeric_index_type>(bcols.size());
769 
770 #ifndef NDEBUG
771  const numeric_index_type n_rows =
772  cast_int<numeric_index_type>(dm.m());
773  const numeric_index_type n_cols =
774  cast_int<numeric_index_type>(dm.n());
775  const numeric_index_type blocksize = n_rows / n_brows;
776 
777  libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
778  libmesh_assert_equal_to (blocksize*n_brows, n_rows);
779  libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
780 
781  PetscInt petsc_blocksize;
782  LibmeshPetscCall(MatGetBlockSize(this->_mat, &petsc_blocksize));
783  libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
784 #endif
785 
786  std::scoped_lock lock(this->_petsc_matrix_mutex);
787  // These casts are required for PETSc <= 2.1.5
788  LibmeshPetscCall(MatSetValuesBlocked(this->_mat,
789  n_brows, numeric_petsc_cast(brows.data()),
790  n_bcols, numeric_petsc_cast(bcols.data()),
791  pPS(const_cast<T*>(dm.get_values().data())),
792  ADD_VALUES));
793 }
794 
795 
796 
797 
798 
799 template <typename T>
801  const std::vector<numeric_index_type> & rows,
802  const std::vector<numeric_index_type> & cols,
803  const bool reuse_submatrix) const
804 {
805  if (!this->closed())
806  {
807  libmesh_deprecated();
808  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
809  "Please update your code, as this warning will become an error in a future release.");
810  const_cast<PetscMatrix<T> *>(this)->close();
811  }
812 
813  semiparallel_only();
814 
815  // Make sure the SparseMatrix passed in is really a PetscMatrix
816  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
817 
818  // If we're not reusing submatrix and submatrix is already initialized
819  // then we need to clear it, otherwise we get a memory leak.
820  if (!reuse_submatrix && submatrix.initialized())
821  submatrix.clear();
822 
823  // Construct row and column index sets.
824  WrappedPetsc<IS> isrow;
825  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
826  cast_int<PetscInt>(rows.size()),
827  numeric_petsc_cast(rows.data()),
828  PETSC_USE_POINTER,
829  isrow.get()));
830 
831  WrappedPetsc<IS> iscol;
832  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
833  cast_int<PetscInt>(cols.size()),
834  numeric_petsc_cast(cols.data()),
835  PETSC_USE_POINTER,
836  iscol.get()));
837 
838  // Extract submatrix
839  LibmeshPetscCall(LibMeshCreateSubMatrix(this->_mat,
840  isrow,
841  iscol,
842  (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
843  (&petsc_submatrix->_mat)));
844 
845  // Specify that the new submatrix is initialized and close it.
846  petsc_submatrix->_is_initialized = true;
847  petsc_submatrix->close();
848 }
849 
850 template <typename T>
852  const std::vector<numeric_index_type> & rows,
853  const std::vector<numeric_index_type> & cols) const
854 {
855  if (!this->closed())
856  {
857  libmesh_deprecated();
858  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix_nosort().\n"
859  "Please update your code, as this warning will become an error in a future release.");
860  const_cast<PetscMatrix<T> *>(this)->close();
861  }
862 
863  // Make sure the SparseMatrix passed in is really a PetscMatrix
864  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
865 
866  LibmeshPetscCall(MatZeroEntries(petsc_submatrix->_mat));
867 
868  PetscInt pc_ncols = 0;
869  const PetscInt * pc_cols;
870  const PetscScalar * pc_vals;
871 
872  // // data for creating the submatrix
873  std::vector<PetscInt> sub_cols;
874  std::vector<PetscScalar> sub_vals;
875 
876  for (auto i : index_range(rows))
877  {
878  PetscInt sub_rid[] = {static_cast<PetscInt>(i)};
879  PetscInt rid = static_cast<PetscInt>(rows[i]);
880  // only get value from local rows, and set values to the corresponding columns in the submatrix
881  if (rows[i]>= this->row_start() && rows[i]< this->row_stop())
882  {
883  // get one row of data from the original matrix
884  LibmeshPetscCall(MatGetRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
885  // extract data from certain cols, save the indices and entries sub_cols and sub_vals
886  for (auto j : index_range(cols))
887  {
888  for (unsigned int idx = 0; idx< static_cast<unsigned int>(pc_ncols); idx++)
889  {
890  if (pc_cols[idx] == static_cast<PetscInt>(cols[j]))
891  {
892  sub_cols.push_back(static_cast<PetscInt>(j));
893  sub_vals.push_back(pc_vals[idx]);
894  }
895  }
896  }
897  // set values
898  LibmeshPetscCall(MatSetValues(petsc_submatrix->_mat,
899  1,
900  sub_rid,
901  static_cast<PetscInt>(sub_vals.size()),
902  sub_cols.data(),
903  sub_vals.data(),
904  INSERT_VALUES));
905  LibmeshPetscCall(MatRestoreRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
906  // clear data for this row
907  sub_cols.clear();
908  sub_vals.clear();
909  }
910  }
911  MatAssemblyBeginEnd(petsc_submatrix->comm(), petsc_submatrix->_mat, MAT_FINAL_ASSEMBLY);
912  // Specify that the new submatrix is initialized and close it.
913  petsc_submatrix->_is_initialized = true;
914  petsc_submatrix->close();
915 }
916 
917 
918 template <typename T>
920 {
921  // Make sure the NumericVector passed in is really a PetscVector
922  PetscVector<T> & petsc_dest = cast_ref<PetscVector<T> &>(dest);
923 
924  // Needs a const_cast since PETSc does not work with const.
925  LibmeshPetscCall(MatGetDiagonal(const_cast<PetscMatrix<T> *>(this)->mat(),petsc_dest.vec()));
926 }
927 
928 
929 
930 template <typename T>
932 {
933  // Make sure the SparseMatrix passed in is really a PetscMatrix
934  PetscMatrix<T> & petsc_dest = cast_ref<PetscMatrix<T> &>(dest);
935 
936  // If we aren't reusing the matrix then need to clear dest,
937  // otherwise we get a memory leak
938  if (&petsc_dest != this)
939  dest.clear();
940 
941  if (&petsc_dest == this)
942  // The MAT_REUSE_MATRIX flag was replaced by MAT_INPLACE_MATRIX
943  // in PETSc 3.7.0
944 #if PETSC_VERSION_LESS_THAN(3,7,0)
945  LibmeshPetscCall(MatTranspose(this->_mat,MAT_REUSE_MATRIX, &petsc_dest._mat));
946 #else
947  LibmeshPetscCall(MatTranspose(this->_mat, MAT_INPLACE_MATRIX, &petsc_dest._mat));
948 #endif
949  else
950  LibmeshPetscCall(MatTranspose(this->_mat,MAT_INITIAL_MATRIX, &petsc_dest._mat));
951 
952  // Specify that the transposed matrix is initialized and close it.
953  petsc_dest._is_initialized = true;
954  petsc_dest.close();
955 }
956 
957 
958 
959 template <typename T>
961 {
962  semiparallel_only();
963 
964  MatAssemblyBeginEnd(this->comm(), this->_mat, MAT_FLUSH_ASSEMBLY);
965 }
966 
967 
968 
969 template <typename T>
971  const numeric_index_type j,
972  const T value)
973 {
974  libmesh_assert (this->initialized());
975 
976  PetscInt i_val=i, j_val=j;
977 
978  PetscScalar petsc_value = static_cast<PetscScalar>(value);
979  std::scoped_lock lock(this->_petsc_matrix_mutex);
980  LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
981  &petsc_value, INSERT_VALUES));
982 }
983 
984 
985 
986 template <typename T>
988  const numeric_index_type j,
989  const T value)
990 {
991  libmesh_assert (this->initialized());
992 
993  PetscInt i_val=i, j_val=j;
994 
995  PetscScalar petsc_value = static_cast<PetscScalar>(value);
996  std::scoped_lock lock(this->_petsc_matrix_mutex);
997  LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
998  &petsc_value, ADD_VALUES));
999 }
1000 
1001 
1002 
1003 template <typename T>
1005  const std::vector<numeric_index_type> & dof_indices)
1006 {
1007  this->add_matrix (dm, dof_indices, dof_indices);
1008 }
1009 
1010 
1011 
1012 
1013 
1014 
1015 
1016 template <typename T>
1017 void PetscMatrix<T>::add (const T a_in, const SparseMatrix<T> & X_in)
1018 {
1019  libmesh_assert (this->initialized());
1020 
1021  // sanity check. but this cannot avoid
1022  // crash due to incompatible sparsity structure...
1023  libmesh_assert_equal_to (this->m(), X_in.m());
1024  libmesh_assert_equal_to (this->n(), X_in.n());
1025 
1026  PetscScalar a = static_cast<PetscScalar> (a_in);
1027  const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1028 
1029  libmesh_assert (X);
1030 
1031  // the matrix from which we copy the values has to be assembled/closed
1032  libmesh_assert(X->closed());
1033 
1034  semiparallel_only();
1035 
1036  LibmeshPetscCall(MatAXPY(this->_mat, a, X->_mat, DIFFERENT_NONZERO_PATTERN));
1037 }
1038 
1039 
1040 template <typename T>
1042 {
1043  libmesh_assert (this->initialized());
1044 
1045  // sanity check
1046  // we do not check the Y_out size here as we will initialize & close it at the end
1047  libmesh_assert_equal_to (this->n(), X_in.m());
1048 
1049  const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1050  PetscMatrix<T> * Y = cast_ptr<PetscMatrix<T> *> (&Y_out);
1051 
1052  // the matrix from which we copy the values has to be assembled/closed
1053  libmesh_assert(X->closed());
1054 
1055  semiparallel_only();
1056 
1057  if (reuse)
1058  LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_REUSE_MATRIX, PETSC_DEFAULT, &Y->_mat));
1059  else
1060  {
1061  Y->clear();
1062  LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y->_mat));
1063  }
1064  // Specify that the new matrix is initialized
1065  // We do not close it here as `MatMatMult` ensures Y being closed
1066  Y->_is_initialized = true;
1067 }
1068 
1069 template <typename T>
1070 void
1072  const std::map<numeric_index_type, numeric_index_type> & row_ltog,
1073  const std::map<numeric_index_type, numeric_index_type> & col_ltog,
1074  const T scalar)
1075 {
1076  // size of spm is usually greater than row_ltog and col_ltog in parallel as the indices are owned by the processor
1077  // also, we should allow adding certain parts of spm to _mat
1078  libmesh_assert_greater_equal(spm.m(), row_ltog.size());
1079  libmesh_assert_greater_equal(spm.n(), col_ltog.size());
1080 
1081  // make sure matrix has larger size than spm
1082  libmesh_assert_greater_equal(this->m(), spm.m());
1083  libmesh_assert_greater_equal(this->n(), spm.n());
1084 
1085  if (!this->closed())
1086  this->close();
1087 
1088  auto pscm = cast_ptr<const PetscMatrix<T> *>(&spm);
1089 
1090  PetscInt ncols = 0;
1091 
1092  const PetscInt * lcols;
1093  const PetscScalar * vals;
1094 
1095  std::vector<PetscInt> gcols;
1096  std::vector<PetscScalar> values;
1097 
1098  for (auto ltog : row_ltog)
1099  {
1100  PetscInt grow[] = {static_cast<PetscInt>(ltog.second)}; // global row index
1101 
1102  LibmeshPetscCall(MatGetRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1103 
1104  // get global indices (gcols) from lcols, increment values = vals*scalar
1105  gcols.resize(ncols);
1106  values.resize(ncols);
1107  for (auto i : index_range(gcols))
1108  {
1109  gcols[i] = libmesh_map_find(col_ltog, lcols[i]);
1110  values[i] = PS(scalar) * vals[i];
1111  }
1112 
1113  LibmeshPetscCall(MatSetValues(this->_mat, 1, grow, ncols, gcols.data(), values.data(), ADD_VALUES));
1114  LibmeshPetscCall(MatRestoreRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1115  }
1116  // Note: We are not closing the matrix because it is expensive to do so when adding multiple sparse matrices.
1117  // Remember to manually close the matrix once all changes to the matrix have been made.
1118 }
1119 
1120 template <typename T>
1122  const numeric_index_type j_in) const
1123 {
1124  libmesh_assert (this->initialized());
1125 
1126  // PETSc 2.2.1 & newer
1127  const PetscScalar * petsc_row;
1128  const PetscInt * petsc_cols;
1129 
1130  // If the entry is not in the sparse matrix, it is 0.
1131  T value=0.;
1132 
1133  PetscInt
1134  ncols=0,
1135  i_val=static_cast<PetscInt>(i_in),
1136  j_val=static_cast<PetscInt>(j_in);
1137 
1138 
1139  // the matrix needs to be closed for this to work
1140  // this->close();
1141  // but closing it is a semiparallel operation; we want operator()
1142  // to run on one processor.
1143  libmesh_assert(this->closed());
1144 
1145  LibmeshPetscCall(MatGetRow(this->_mat, i_val, &ncols, &petsc_cols, &petsc_row));
1146 
1147  // Perform a binary search to find the contiguous index in
1148  // petsc_cols (resp. petsc_row) corresponding to global index j_val
1149  std::pair<const PetscInt *, const PetscInt *> p =
1150  std::equal_range (petsc_cols, petsc_cols + ncols, j_val);
1151 
1152  // Found an entry for j_val
1153  if (p.first != p.second)
1154  {
1155  // The entry in the contiguous row corresponding
1156  // to the j_val column of interest
1157  const std::size_t j =
1158  std::distance (const_cast<PetscInt *>(petsc_cols),
1159  const_cast<PetscInt *>(p.first));
1160 
1161  libmesh_assert_less (j, ncols);
1162  libmesh_assert_equal_to (petsc_cols[j], j_val);
1163 
1164  value = static_cast<T> (petsc_row[j]);
1165  }
1166 
1167  LibmeshPetscCall(MatRestoreRow(this->_mat, i_val,
1168  &ncols, &petsc_cols, &petsc_row));
1169 
1170  return value;
1171 }
1172 
1173 template <typename T>
1175  std::vector<numeric_index_type> & indices,
1176  std::vector<T> & values) const
1177 {
1178  libmesh_assert (this->initialized());
1179 
1180  const PetscScalar * petsc_row;
1181  const PetscInt * petsc_cols;
1182 
1183  PetscInt
1184  ncols=0,
1185  i_val = static_cast<PetscInt>(i_in);
1186 
1187  // the matrix needs to be closed for this to work
1188  // this->close();
1189  // but closing it is a semiparallel operation; we want operator()
1190  // to run on one processor.
1191  libmesh_assert(this->closed());
1192 
1193  // PETSc makes no effort at being thread safe. Helgrind complains about
1194  // possible data races even just in PetscFunctionBegin (due to things
1195  // like stack counter incrementing). Perhaps we could ignore
1196  // this, but there are legitimate data races for Mat data members like
1197  // mat->getrowactive between MatGetRow and MatRestoreRow. Moreover,
1198  // there could be a write into mat->rowvalues during MatGetRow from
1199  // one thread while we are attempting to read from mat->rowvalues
1200  // (through petsc_cols) during data copy in another thread. So
1201  // the safe thing to do is to lock the whole method
1202 
1203  std::lock_guard<std::mutex> lock(_petsc_matrix_mutex);
1204 
1205  LibmeshPetscCall(MatGetRow(this->_mat, i_val, &ncols, &petsc_cols, &petsc_row));
1206 
1207  // Copy the data
1208  indices.resize(static_cast<std::size_t>(ncols));
1209  values.resize(static_cast<std::size_t>(ncols));
1210 
1211  for (auto i : index_range(indices))
1212  {
1213  indices[i] = static_cast<numeric_index_type>(petsc_cols[i]);
1214  values[i] = static_cast<T>(petsc_row[i]);
1215  }
1216 
1217  LibmeshPetscCall(MatRestoreRow(this->_mat, i_val,
1218  &ncols, &petsc_cols, &petsc_row));
1219 }
1220 
1221 
1222 
1223 template <typename T>
1225 {
1226  semiparallel_only();
1227 
1228  if (this->_mat)
1229  {
1230  PetscBool assembled;
1231  LibmeshPetscCall(MatAssembled(this->_mat, &assembled));
1232 #ifndef NDEBUG
1233  const bool cxx_assembled = (assembled == PETSC_TRUE) ? true : false;
1234  libmesh_assert(this->_communicator.verify(cxx_assembled));
1235 #endif
1236 
1237  if (!assembled)
1238  // MatCopy does not work with an unassembled matrix. We could use MatDuplicate but then we
1239  // would have to destroy the matrix we manage and others might be relying on that data. So
1240  // we just assemble here regardless of the preceding level of matrix fill
1241  this->close();
1242  LibmeshPetscCall(MatCopy(v._mat, this->_mat, DIFFERENT_NONZERO_PATTERN));
1243  }
1244  else
1245  LibmeshPetscCall(MatDuplicate(v._mat, MAT_COPY_VALUES, &this->_mat));
1246 
1247  this->_is_initialized = true;
1248 
1249  return *this;
1250 }
1251 
1252 template <typename T>
1254 {
1255  *this = cast_ref<const PetscMatrix<T> &>(v);
1256  return *this;
1257 }
1258 
1259 template <typename T>
1261 {
1262  libmesh_assert(this->closed());
1263 
1264  LibmeshPetscCall(MatScale(this->_mat, PS(scale)));
1265 }
1266 
1267 template <typename T>
1269 {
1270 #if PETSC_RELEASE_LESS_THAN(3,19,0)
1271  return false;
1272 #else
1273  return true;
1274 #endif
1275 }
1276 
1277 #if PETSC_RELEASE_GREATER_EQUALS(3,23,0)
1278 template <typename T>
1279 std::unique_ptr<PetscMatrix<T>>
1281 {
1282  Mat xaij;
1283  libmesh_assert(this->initialized());
1284  libmesh_assert(!this->closed());
1285  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, &xaij));
1286  LibmeshPetscCall(MatCopyHashToXAIJ(this->_mat, xaij));
1287  return std::make_unique<PetscMatrix<T>>(xaij, this->comm(), /*destroy_on_exit=*/true);
1288 }
1289 #endif
1290 
1291 template <typename T>
1292 void
1294 {
1295  semiparallel_only();
1296 
1297  if (this->_use_hash_table)
1298 #if PETSC_RELEASE_GREATER_EQUALS(3, 23, 0)
1299  // This performs MatReset plus re-establishes the hash table
1300  LibmeshPetscCall(MatResetHash(this->_mat));
1301 #else
1302  libmesh_error_msg("Resetting hash tables not supported until PETSc version 3.23");
1303 #endif
1304  else
1305  this->reset_preallocation();
1306 }
1307 
1308 //------------------------------------------------------------------
1309 // Explicit instantiations
1310 template class LIBMESH_EXPORT PetscMatrix<Number>;
1311 
1312 } // namespace libMesh
1313 
1314 
1315 #endif // #ifdef LIBMESH_HAVE_PETSC
std::string name(const ElemQuality q)
This function returns a string containing some name for q.
Definition: elem_quality.C:42
virtual bool initialized() const
bool closed()
Checks that the library has been closed.
Definition: libmesh.C:283
void reset_preallocation()
Reset matrix to use the original nonzero pattern provided by users.
Definition: petsc_matrix.C:385
virtual T operator()(const numeric_index_type i, const numeric_index_type j) const override
This class provides a nice interface to PETSc&#39;s Vec object.
Definition: petsc_vector.h:73
virtual void read_petsc_hdf5(const std::string &filename) override
Read the contents of the matrix from a file in PETSc&#39;s HDF5 sparse matrix format. ...
Definition: petsc_matrix.C:723
virtual void print_matlab(const std::string &name="") const override
Print the contents of the matrix in Matlab&#39;s sparse matrix format.
Definition: petsc_matrix.C:510
This class provides a nice interface to the PETSc C-based data structures for parallel, sparse matrices.
void finish_initialization()
Definition: initial.C:11
virtual void zero() override
Set all entries to 0.
Definition: petsc_matrix.C:401
virtual Real frobenius_norm() const
Definition: petsc_matrix.C:497
PetscMatrix(const Parallel::Communicator &comm_in)
Constructor; initializes the matrix to be empty, without any structure, i.e.
Definition: petsc_matrix.C:86
void scale(MeshBase &mesh, const Real xs, const Real ys=0., const Real zs=0.)
Scales the mesh.
void init_without_preallocation(numeric_index_type m, numeric_index_type n, numeric_index_type m_l, numeric_index_type n_l, numeric_index_type blocksize)
Perform matrix initialization steps sans preallocation.
Definition: petsc_matrix.C:135
virtual void print_petsc_binary(const std::string &filename) override
Write the contents of the matrix to a file in PETSc&#39;s binary sparse matrix format.
Definition: petsc_matrix.C:693
void preallocate(numeric_index_type m_l, const std::vector< numeric_index_type > &n_nz, const std::vector< numeric_index_type > &n_oz, numeric_index_type blocksize)
Definition: petsc_matrix.C:260
PetscMatrix & operator=(PetscMatrix &&)=delete
Provides a uniform interface to vector storage schemes for different linear algebra libraries...
Definition: vector_fe_ex5.C:44
const Parallel::Communicator & comm() const
void update_preallocation_and_zero()
Update the sparsity pattern based on dof_map, and set the matrix to zero.
Definition: petsc_matrix.C:379
virtual void add_block_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &brows, const std::vector< numeric_index_type > &bcols) override
Add the full matrix dm to the SparseMatrix.
Definition: petsc_matrix.C:759
unsigned int m() const
PetscScalar * pPS(T *ptr)
Definition: petsc_macro.h:174
virtual void print_personal(std::ostream &os=libMesh::out) const override
Print the contents of the matrix to the screen with the PETSc viewer.
Definition: petsc_matrix.C:567
virtual void read_petsc_binary(const std::string &filename) override
Read the contents of the matrix from a file in PETSc&#39;s binary sparse matrix format.
Definition: petsc_matrix.C:713
virtual std::unique_ptr< SparseMatrix< T > > zero_clone() const override
Definition: petsc_matrix.C:435
The libMesh namespace provides an interface to certain functionality in the library.
virtual void add_sparse_matrix(const SparseMatrix< T > &spm, const std::map< numeric_index_type, numeric_index_type > &row_ltog, const std::map< numeric_index_type, numeric_index_type > &col_ltog, const T scalar) override
Add scalar* spm to the rows and cols of this matrix (A): A(rows[i], cols[j]) += scalar * spm(i...
Real distance(const Point &p)
virtual std::unique_ptr< SparseMatrix< T > > clone() const override
Definition: petsc_matrix.C:454
virtual void clear()=0
Restores the SparseMatrix<T> to a pristine state.
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
Generic sparse matrix.
Definition: vector_fe_ex5.C:46
PetscScalar PS(T val)
Definition: petsc_macro.h:168
void libmesh_ignore(const Args &...)
dof_id_type numeric_index_type
Definition: id_types.h:99
bool _is_initialized
Flag that tells if init() has been called.
Definition: libmesh.C:257
bool _is_initialized
Flag indicating whether or not the matrix has been initialized.
int mkstemp(char *tmpl)
Definition: win_mkstemp.h:13
virtual numeric_index_type m() const =0
libmesh_assert(ctx)
std::unique_ptr< PetscMatrix< T > > copy_from_hash()
Creates a copy of the current hash table matrix and then performs assembly.
virtual void close() override
Calls the SparseMatrix&#39;s internal assembly routines, ensuring that the values are consistent across p...
virtual bool supports_hash_table() const override
virtual void get_row(numeric_index_type i, std::vector< numeric_index_type > &indices, std::vector< T > &values) const override
Get a row from the matrix.
virtual void create_submatrix_nosort(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) const override
Similar to the create_submatrix function, this function creates a submatrix which is defined by the i...
Definition: petsc_matrix.C:851
std::vector< T > & get_values()
Definition: dense_matrix.h:382
Mat _mat
PETSc matrix datatype to store values.
void _petsc_viewer(const std::string &filename, PetscViewerType viewertype, PetscFileMode filemode)
Definition: petsc_matrix.C:663
virtual void matrix_matrix_mult(SparseMatrix< T > &X, SparseMatrix< T > &Y, bool reuse=false) override
Compute Y = A*X for matrix X.
virtual void add(const numeric_index_type i, const numeric_index_type j, const T value) override
Add value to the element (i,j).
Definition: petsc_matrix.C:987
virtual void get_diagonal(NumericVector< T > &dest) const override
Copies the diagonal part of the matrix into dest.
Definition: petsc_matrix.C:919
PetscMatrixType _mat_type
Definition: petsc_matrix.h:349
virtual bool closed() const override
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
virtual void print_petsc_hdf5(const std::string &filename) override
Write the contents of the matrix to a file in PETSc&#39;s HDF5 sparse matrix format.
Definition: petsc_matrix.C:703
virtual void get_transpose(SparseMatrix< T > &dest) const override
Copies the transpose of the matrix into dest, which may be *this.
Definition: petsc_matrix.C:931
virtual Real l1_norm() const override
Definition: petsc_matrix.C:492
static const bool value
Definition: xdr_io.C:54
virtual void add_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) override
Add the full matrix dm to the SparseMatrix.
Definition: petsc_matrix.C:733
virtual void set(const numeric_index_type i, const numeric_index_type j, const T value) override
Set the element (i,j) to value.
Definition: petsc_matrix.C:970
This class provides a nice interface to the PETSc C-based AIJ data structures for parallel...
Definition: petsc_matrix.h:61
virtual void scale(const T scale) override
Scales all elements of this matrix by scale.
bool initialized()
Checks that library initialization has been done.
Definition: libmesh.C:276
virtual void _get_submatrix(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols, const bool reuse_submatrix) const override
This function either creates or re-initializes a matrix called submatrix which is defined by the indi...
Definition: petsc_matrix.C:800
virtual void restore_original_nonzero_pattern() override
Reset the memory storage of the matrix.
virtual void zero_rows(std::vector< numeric_index_type > &rows, T diag_value=0.0) override
Sets all row entries to 0 then puts diag_value in the diagonal entry.
Definition: petsc_matrix.C:416
unsigned int n() const
Defines a dense matrix for use in Finite Element-type computations.
Definition: dof_map.h:75
void finish_initialization()
Finish up the initialization process.
Definition: petsc_matrix.C:329
virtual void flush() override
For PETSc matrix , this function is similar to close but without shrinking memory.
Definition: petsc_matrix.C:960
auto index_range(const T &sizable)
Helper function that returns an IntRange<std::size_t> representing all the indices of the passed-in v...
Definition: int_range.h:117
virtual void init(const numeric_index_type m, const numeric_index_type n, const numeric_index_type m_l, const numeric_index_type n_l, const numeric_index_type n_nz=30, const numeric_index_type n_oz=10, const numeric_index_type blocksize=1) override
Initialize a PETSc matrix.
Definition: petsc_matrix.C:214
virtual Real linfty_norm() const override
Definition: petsc_matrix.C:502
virtual numeric_index_type n() const =0
unsigned int idx(const ElemType type, const unsigned int nx, const unsigned int i, const unsigned int j)
A useful inline function which replaces the macros used previously.
ParallelType
Defines an enum for parallel data structure types.