libMesh
petsc_matrix.C
Go to the documentation of this file.
1 // The libMesh Finite Element Library.
2 // Copyright (C) 2002-2025 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 
4 // This library is free software; you can redistribute it and/or
5 // modify it under the terms of the GNU Lesser General Public
6 // License as published by the Free Software Foundation; either
7 // version 2.1 of the License, or (at your option) any later version.
8 
9 // This library is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 // Lesser General Public License for more details.
13 
14 // You should have received a copy of the GNU Lesser General Public
15 // License along with this library; if not, write to the Free Software
16 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 
18 
19 
20 #include "libmesh/libmesh_config.h"
21 
22 #ifdef LIBMESH_HAVE_PETSC
23 
24 // Local includes
25 #include "libmesh/petsc_matrix.h"
26 
27 // libMesh includes
28 #include "libmesh/dof_map.h"
29 #include "libmesh/dense_matrix.h"
30 #include "libmesh/libmesh_logging.h"
31 #include "libmesh/petsc_vector.h"
32 #include "libmesh/parallel.h"
33 #include "libmesh/utility.h"
34 #include "libmesh/wrapped_petsc.h"
35 
36 // C++ includes
37 #ifdef LIBMESH_HAVE_UNISTD_H
38 #include <unistd.h> // mkstemp
39 #endif
40 #include <fstream>
41 
42 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
43 
44 namespace
45 {
46 using namespace libMesh;
47 
48 // historic libMesh n_nz & n_oz arrays are set up for PETSc's AIJ format.
49 // however, when the blocksize is >1, we need to transform these into
50 // their BAIJ counterparts.
51 inline
52 void transform_preallocation_arrays (const PetscInt blocksize,
53  const std::vector<numeric_index_type> & n_nz,
54  const std::vector<numeric_index_type> & n_oz,
55  std::vector<numeric_index_type> & b_n_nz,
56  std::vector<numeric_index_type> & b_n_oz)
57 {
58  libmesh_assert_equal_to (n_nz.size(), n_oz.size());
59  libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
60 
61  b_n_nz.clear(); b_n_nz.reserve(n_nz.size()/blocksize);
62  b_n_oz.clear(); b_n_oz.reserve(n_oz.size()/blocksize);
63 
64  for (std::size_t nn=0, nnzs=n_nz.size(); nn<nnzs; nn += blocksize)
65  {
66  b_n_nz.push_back (n_nz[nn]/blocksize);
67  b_n_oz.push_back (n_oz[nn]/blocksize);
68  }
69 }
70 }
71 
72 #endif
73 
74 
75 
76 namespace libMesh
77 {
78 
79 
80 //-----------------------------------------------------------------------
81 // PetscMatrix members
82 
83 
84 // Constructor
85 template <typename T>
87  PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
88 {
89 }
90 
91 
92 
93 // Constructor taking an existing Mat but not the responsibility
94 // for destroying it
95 template <typename T>
97  const Parallel::Communicator & comm_in,
98  const bool destroy_on_exit) :
99  PetscMatrixBase<T>(mat_in, comm_in, destroy_on_exit)
100 {
101  MatType mat_type;
102  LibmeshPetscCall(MatGetType(mat_in, &mat_type));
103  PetscBool is_hypre;
104  LibmeshPetscCall(PetscStrcmp(mat_type, MATHYPRE, &is_hypre));
105  if (is_hypre == PETSC_TRUE)
106  _mat_type = HYPRE;
107  else
108  _mat_type = AIJ;
109 }
110 
111 
112 // Constructor taking global and local dimensions and, optionally,
113 // number of on- and off-diagonal non-zeros and a block size
114 template <typename T>
116  const numeric_index_type m_in,
117  const numeric_index_type n_in,
118  const numeric_index_type m_l,
119  const numeric_index_type n_l,
120  const numeric_index_type n_nz,
121  const numeric_index_type n_oz,
122  const numeric_index_type blocksize_in) :
123  PetscMatrixBase<T>(comm_in), _mat_type(AIJ)
124 {
125  this->init(m_in, n_in, m_l, n_l, n_nz, n_oz, blocksize_in);
126 }
127 
128 
129 // Destructor
130 template <typename T>
131 PetscMatrix<T>::~PetscMatrix() = default;
132 
133 template <typename T>
134 void
136  const numeric_index_type n_in,
137  const numeric_index_type m_l,
138  const numeric_index_type n_l,
139  const numeric_index_type blocksize_in)
140 {
141  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
142  libmesh_ignore(blocksize_in);
143 
144  // Clear initialized matrices
145  if (this->initialized())
146  this->clear();
147 
148  PetscInt m_global = static_cast<PetscInt>(m_in);
149  PetscInt n_global = static_cast<PetscInt>(n_in);
150  PetscInt m_local = static_cast<PetscInt>(m_l);
151  PetscInt n_local = static_cast<PetscInt>(n_l);
152 
153  LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
154  LibmeshPetscCall(MatSetSizes(this->_mat, m_local, n_local, m_global, n_global));
155  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
156  LibmeshPetscCall(MatSetBlockSize(this->_mat,blocksize));
157  LibmeshPetscCall(MatSetOptionsPrefix(this->_mat, ""));
158 
159 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
160  if (blocksize > 1)
161  {
162  // specified blocksize, bs>1.
163  // double check sizes.
164  libmesh_assert_equal_to (m_local % blocksize, 0);
165  libmesh_assert_equal_to (n_local % blocksize, 0);
166  libmesh_assert_equal_to (m_global % blocksize, 0);
167  libmesh_assert_equal_to (n_global % blocksize, 0);
168  libmesh_assert_equal_to (n_nz % blocksize, 0);
169  libmesh_assert_equal_to (n_oz % blocksize, 0);
170 
171  LibmeshPetscCall(MatSetType(this->_mat, MATBAIJ)); // Automatically chooses seqbaij or mpibaij
172 
173  // MatSetFromOptions needs to happen before Preallocation routines
174  // since MatSetFromOptions can change matrix type and remove incompatible
175  // preallocation
176  LibmeshPetscCall(MatSetFromOptions(this->_mat));
177  }
178  else
179 #endif
180  {
181  switch (this->_mat_type) {
182  case AIJ:
183  LibmeshPetscCall(MatSetType(this->_mat, MATAIJ)); // Automatically chooses seqaij or mpiaij
184 
185  // MatSetFromOptions needs to happen before Preallocation routines
186  // since MatSetFromOptions can change matrix type and remove incompatible
187  // preallocation
188  LibmeshPetscCall(MatSetFromOptions(this->_mat));
189  break;
190 
191  case HYPRE:
192 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
193  LibmeshPetscCall(MatSetType(this->_mat, MATHYPRE));
194 
195  // MatSetFromOptions needs to happen before Preallocation routines
196  // since MatSetFromOptions can change matrix type and remove incompatible
197  // preallocation
198  LibmeshPetscCall(MatSetFromOptions(this->_mat));
199 #else
200  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
201 #endif
202  break;
203 
204  default: libmesh_error_msg("Unsupported petsc matrix type");
205  }
206  }
207 
208  this->set_context ();
209 }
210 
211 
212 
213 template <typename T>
215  const numeric_index_type n_in,
216  const numeric_index_type m_l,
217  const numeric_index_type n_l,
218  const numeric_index_type nnz,
219  const numeric_index_type noz,
220  const numeric_index_type blocksize_in)
221 {
222  this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
223 
224  PetscInt n_nz = static_cast<PetscInt>(nnz);
225  PetscInt n_oz = static_cast<PetscInt>(noz);
226 
227 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
228  if (blocksize > 1)
229  {
230  LibmeshPetscCall(MatSeqBAIJSetPreallocation(this->_mat, blocksize, n_nz/blocksize, NULL));
231  LibmeshPetscCall(MatMPIBAIJSetPreallocation(this->_mat, blocksize,
232  n_nz/blocksize, NULL,
233  n_oz/blocksize, NULL));
234  }
235  else
236 #endif
237  {
238  switch (this->_mat_type) {
239  case AIJ:
240  LibmeshPetscCall(MatSeqAIJSetPreallocation(this->_mat, n_nz, NULL));
241  LibmeshPetscCall(MatMPIAIJSetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
242  break;
243 
244  case HYPRE:
245 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
246  LibmeshPetscCall(MatHYPRESetPreallocation(this->_mat, n_nz, NULL, n_oz, NULL));
247 #else
248  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
249 #endif
250  break;
251 
252  default: libmesh_error_msg("Unsupported petsc matrix type");
253  }
254  }
255 
256  this->finish_initialization();
257 }
258 
259 template <typename T>
260 void PetscMatrix<T>::preallocate(const numeric_index_type libmesh_dbg_var(m_l),
261  const std::vector<numeric_index_type> & n_nz,
262  const std::vector<numeric_index_type> & n_oz,
263  const numeric_index_type blocksize_in)
264 {
265  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
266  libmesh_assert_equal_to (n_nz.size(), m_l);
267  libmesh_assert_equal_to (n_oz.size(), m_l);
268  // Avoid unused warnings when not configured with block storage
269  libmesh_ignore(blocksize_in);
270 
271 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
272  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
273 
274  if (blocksize > 1)
275  {
276  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
277  std::vector<numeric_index_type> b_n_nz, b_n_oz;
278 
279  transform_preallocation_arrays (blocksize,
280  n_nz, n_oz,
281  b_n_nz, b_n_oz);
282 
283  LibmeshPetscCall(MatSeqBAIJSetPreallocation (this->_mat,
284  blocksize,
285  0,
286  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data())));
287 
288  LibmeshPetscCall(MatMPIBAIJSetPreallocation (this->_mat,
289  blocksize,
290  0,
291  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()),
292  0,
293  numeric_petsc_cast(b_n_oz.empty() ? nullptr : b_n_oz.data())));
294  }
295  else
296 #endif
297  {
298  switch (this->_mat_type) {
299  case AIJ:
300  LibmeshPetscCall(MatSeqAIJSetPreallocation (this->_mat,
301  0,
302  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data())));
303  LibmeshPetscCall(MatMPIAIJSetPreallocation (this->_mat,
304  0,
305  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
306  0,
307  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
308  break;
309 
310  case HYPRE:
311 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
312  LibmeshPetscCall(MatHYPRESetPreallocation (this->_mat,
313  0,
314  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
315  0,
316  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data())));
317 #else
318  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
319 #endif
320  break;
321 
322  default: libmesh_error_msg("Unsupported petsc matrix type");
323  }
324 
325  }
326 }
327 
328 template <typename T>
330 {
331  // Make it an error for PETSc to allocate new nonzero entries during assembly. For old PETSc
332  // versions this option must be set after preallocation for MPIAIJ matrices
333  LibmeshPetscCall(MatSetOption(this->_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
334  this->_is_initialized = true;
335 }
336 
337 template <typename T>
339  const numeric_index_type n_in,
340  const numeric_index_type m_l,
341  const numeric_index_type n_l,
342  const std::vector<numeric_index_type> & n_nz,
343  const std::vector<numeric_index_type> & n_oz,
344  const numeric_index_type blocksize_in)
345 {
346  this->init_without_preallocation(m_in, n_in, m_l, n_l, blocksize_in);
347  this->preallocate(m_l, n_nz, n_oz, blocksize_in);
348 
349  this->finish_initialization();
350 }
351 
352 
353 template <typename T>
354 void PetscMatrix<T>::init (const ParallelType libmesh_dbg_var(type))
355 {
356  libmesh_assert(this->_dof_map);
357 
358  const numeric_index_type m_in = this->_dof_map->n_dofs();
359  const numeric_index_type m_l = this->_dof_map->n_local_dofs();
360  if (m_in != m_l)
361  libmesh_assert(type != SERIAL);
362 
363  const auto blocksize = this->_dof_map->block_size();
364 
365  this->init_without_preallocation(m_in, m_in, m_l, m_l, blocksize);
366  if (!this->_use_hash_table)
367  {
368  const std::vector<numeric_index_type> & n_nz = this->_sp->get_n_nz();
369  const std::vector<numeric_index_type> & n_oz = this->_sp->get_n_oz();
370 
371  this->preallocate(m_l, n_nz, n_oz, blocksize);
372  }
373 
374  this->finish_initialization();
375 }
376 
377 
378 template <typename T>
380 {
381  libmesh_not_implemented();
382 }
383 
384 template <typename T>
386 {
387  semiparallel_only();
388 
389 #if !PETSC_VERSION_LESS_THAN(3,9,0)
390  libmesh_assert (this->initialized());
391 
392  LibmeshPetscCall(MatResetPreallocation(this->_mat));
393 #else
394  libmesh_warning("Your version of PETSc doesn't support resetting of "
395  "preallocation, so we will use your most recent sparsity "
396  "pattern. This may result in a degradation of performance\n");
397 #endif
398 }
399 
400 template <typename T>
402 {
403  libmesh_assert (this->initialized());
404 
405  semiparallel_only();
406 
407  PetscInt m_l, n_l;
408 
409  LibmeshPetscCall(MatGetLocalSize(this->_mat,&m_l,&n_l));
410 
411  if (n_l)
412  LibmeshPetscCall(MatZeroEntries(this->_mat));
413 }
414 
415 template <typename T>
416 void PetscMatrix<T>::zero_rows (std::vector<numeric_index_type> & rows, T diag_value)
417 {
418  libmesh_assert (this->initialized());
419 
420  semiparallel_only();
421 
422  // As of petsc-dev at the time of 3.1.0, MatZeroRows now takes two additional
423  // optional arguments. The optional arguments (x,b) can be used to specify the
424  // solutions for the zeroed rows (x) and right hand side (b) to update.
425  // Could be useful for setting boundary conditions...
426  if (!rows.empty())
427  LibmeshPetscCall(MatZeroRows(this->_mat, cast_int<PetscInt>(rows.size()),
428  numeric_petsc_cast(rows.data()), PS(diag_value),
429  NULL, NULL));
430  else
431  LibmeshPetscCall(MatZeroRows(this->_mat, 0, NULL, PS(diag_value), NULL, NULL));
432 }
433 
434 template <typename T>
435 std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::zero_clone () const
436 {
437  libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
438 
439  // Copy the nonzero pattern only
440  Mat copy;
441  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, &copy));
442 
443  // Call wrapping PetscMatrix constructor, have it take over
444  // ownership.
445  auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
446  ret->set_destroy_mat_on_exit(true);
447 
448  return ret;
449 }
450 
451 
452 
453 template <typename T>
454 std::unique_ptr<SparseMatrix<T>> PetscMatrix<T>::clone () const
455 {
456  libmesh_error_msg_if(!this->closed(), "Matrix must be closed before it can be cloned!");
457 
458  // Copy the nonzero pattern and numerical values
459  Mat copy;
460  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_COPY_VALUES, &copy));
461 
462  // Call wrapping PetscMatrix constructor, have it take over
463  // ownership.
464  auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
465  ret->set_destroy_mat_on_exit(true);
466 
467  return ret;
468 }
469 
470 
471 
472 template <typename T>
473 template <NormType N>
475 {
476  libmesh_assert (this->initialized());
477 
478  semiparallel_only();
479 
480  PetscReal petsc_value;
481  Real value;
482 
483  libmesh_assert (this->closed());
484 
485  LibmeshPetscCall(MatNorm(this->_mat, N, &petsc_value));
486 
487  value = static_cast<Real>(petsc_value);
488 
489  return value;
490 }
491 template <typename T>
493 {
494  return PetscMatrix<T>::norm<NORM_1>();
495 }
496 template <typename T>
498 {
499  return PetscMatrix<T>::norm<NORM_FROBENIUS>();
500 }
501 template <typename T>
503 {
504  return PetscMatrix<T>::norm<NORM_INFINITY>();
505 }
506 
507 
508 
509 template <typename T>
510 void PetscMatrix<T>::print_matlab (const std::string & name) const
511 {
512  libmesh_assert (this->initialized());
513 
514  semiparallel_only();
515 
516  if (!this->closed())
517  {
518  libmesh_deprecated();
519  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
520  "Please update your code, as this warning will become an error in a future release.");
521  const_cast<PetscMatrix<T> *>(this)->close();
522  }
523 
524  // Create an ASCII file containing the matrix
525  // if a filename was provided.
526  if (name != "")
527  {
528  WrappedPetsc<PetscViewer> petsc_viewer;
529 
530  LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
531  name.c_str(),
532  petsc_viewer.get()));
533 
534 #if PETSC_VERSION_LESS_THAN(3,7,0)
535  LibmeshPetscCall(PetscViewerSetFormat (petsc_viewer,
536  PETSC_VIEWER_ASCII_MATLAB));
537 #else
538  LibmeshPetscCall(PetscViewerPushFormat (petsc_viewer,
539  PETSC_VIEWER_ASCII_MATLAB));
540 #endif
541 
542  LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
543  }
544 
545  // Otherwise the matrix will be dumped to the screen.
546  else
547  {
548 #if PETSC_VERSION_LESS_THAN(3,7,0)
549  LibmeshPetscCall(PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
550  PETSC_VIEWER_ASCII_MATLAB));
551 #else
552  LibmeshPetscCall(PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
553  PETSC_VIEWER_ASCII_MATLAB));
554 #endif
555 
556  LibmeshPetscCall(MatView (this->_mat, PETSC_VIEWER_STDOUT_WORLD));
557  }
558 }
559 
560 
561 
562 
563 
564 template <typename T>
565 void PetscMatrix<T>::print_personal(std::ostream & os) const
566 {
567  libmesh_assert (this->initialized());
568 
569  // Routine must be called in parallel on parallel matrices
570  // and serial on serial matrices.
571  semiparallel_only();
572 
573  // #ifndef NDEBUG
574  // if (os != std::cout)
575  // libMesh::err << "Warning! PETSc can only print to std::cout!" << std::endl;
576  // #endif
577 
578  // Matrix must be in an assembled state to be printed
579  if (!this->closed())
580  {
581  libmesh_deprecated();
582  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_personal().\n"
583  "Please update your code, as this warning will become an error in a future release.");
584  const_cast<PetscMatrix<T> *>(this)->close();
585  }
586 
587  // Print to screen if ostream is stdout
588  if (os.rdbuf() == std::cout.rdbuf())
589  LibmeshPetscCall(MatView(this->_mat, NULL));
590 
591  // Otherwise, print to the requested file, in a roundabout way...
592  else
593  {
594  // We will create a temporary filename, and file, for PETSc to
595  // write to.
596  std::string temp_filename;
597 
598  {
599  // Template for temporary filename
600  char c[] = "temp_petsc_matrix.XXXXXX";
601 
602  // Generate temporary, unique filename only on processor 0. We will
603  // use this filename for PetscViewerASCIIOpen, before copying it into
604  // the user's stream
605  if (this->processor_id() == 0)
606  {
607  int fd = mkstemp(c);
608 
609  // Check to see that mkstemp did not fail.
610  libmesh_error_msg_if(fd == -1, "mkstemp failed in PetscMatrix::print_personal()");
611 
612  // mkstemp returns a file descriptor for an open file,
613  // so let's close it before we hand it to PETSc!
614  ::close (fd);
615  }
616 
617  // Store temporary filename as string, makes it easier to broadcast
618  temp_filename = c;
619  }
620 
621  // Now broadcast the filename from processor 0 to all processors.
622  this->comm().broadcast(temp_filename);
623 
624  // PetscViewer object for passing to MatView
625  PetscViewer petsc_viewer;
626 
627  // This PETSc function only takes a string and handles the opening/closing
628  // of the file internally. Since print_personal() takes a reference to
629  // an ostream, we have to do an extra step... print_personal() should probably
630  // have a version that takes a string to get rid of this problem.
631  LibmeshPetscCall(PetscViewerASCIIOpen( this->comm().get(),
632  temp_filename.c_str(),
633  &petsc_viewer));
634 
635  // Probably don't need to set the format if it's default...
636  // ierr = PetscViewerSetFormat (petsc_viewer,
637  // PETSC_VIEWER_DEFAULT);
638  // LIBMESH_CHKERR(ierr);
639 
640  // Finally print the matrix using the viewer
641  LibmeshPetscCall(MatView (this->_mat, petsc_viewer));
642 
643  if (this->processor_id() == 0)
644  {
645  // Now the inefficient bit: open temp_filename as an ostream and copy the contents
646  // into the user's desired ostream. We can't just do a direct file copy, we don't even have the filename!
647  std::ifstream input_stream(temp_filename.c_str());
648  os << input_stream.rdbuf(); // The "most elegant" way to copy one stream into another.
649  // os.close(); // close not defined in ostream
650 
651  // Now remove the temporary file
652  input_stream.close();
653  std::remove(temp_filename.c_str());
654  }
655  }
656 }
657 
658 
659 
660 template <typename T>
661 void PetscMatrix<T>::_petsc_viewer(const std::string & filename,
662  PetscViewerType viewertype,
663  PetscFileMode filemode)
664 {
665  parallel_object_only();
666 
667  // We'll get matrix sizes from the file, but we need to at least
668  // have a Mat object
669  if (!this->initialized())
670  {
671  LibmeshPetscCall(MatCreate(this->comm().get(), &this->_mat));
672  this->_is_initialized = true;
673  }
674 
675  PetscViewer viewer;
676  LibmeshPetscCall(PetscViewerCreate(this->comm().get(), &viewer));
677  LibmeshPetscCall(PetscViewerSetType(viewer, viewertype));
678  LibmeshPetscCall(PetscViewerSetFromOptions(viewer));
679  LibmeshPetscCall(PetscViewerFileSetMode(viewer, filemode));
680  LibmeshPetscCall(PetscViewerFileSetName(viewer, filename.c_str()));
681  if (filemode == FILE_MODE_READ)
682  LibmeshPetscCall(MatLoad(this->_mat, viewer));
683  else
684  LibmeshPetscCall(MatView(this->_mat, viewer));
685  LibmeshPetscCall(PetscViewerDestroy(&viewer));
686 }
687 
688 
689 
690 template <typename T>
691 void PetscMatrix<T>::print_petsc_binary(const std::string & filename)
692 {
693  libmesh_assert (this->initialized());
694 
695  this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_WRITE);
696 }
697 
698 
699 
700 template <typename T>
701 void PetscMatrix<T>::print_petsc_hdf5(const std::string & filename)
702 {
703  libmesh_assert (this->initialized());
704 
705  this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_WRITE);
706 }
707 
708 
709 
710 template <typename T>
711 void PetscMatrix<T>::read_petsc_binary(const std::string & filename)
712 {
713  LOG_SCOPE("read_petsc_binary()", "PetscMatrix");
714 
715  this->_petsc_viewer(filename, PETSCVIEWERBINARY, FILE_MODE_READ);
716 }
717 
718 
719 
720 template <typename T>
721 void PetscMatrix<T>::read_petsc_hdf5(const std::string & filename)
722 {
723  LOG_SCOPE("read_petsc_hdf5()", "PetscMatrix");
724 
725  this->_petsc_viewer(filename, PETSCVIEWERHDF5, FILE_MODE_READ);
726 }
727 
728 
729 
730 template <typename T>
732  const std::vector<numeric_index_type> & rows,
733  const std::vector<numeric_index_type> & cols)
734 {
735  libmesh_assert (this->initialized());
736 
737  const numeric_index_type n_rows = dm.m();
738  const numeric_index_type n_cols = dm.n();
739 
740  libmesh_assert_equal_to (rows.size(), n_rows);
741  libmesh_assert_equal_to (cols.size(), n_cols);
742 
743  std::scoped_lock lock(this->_petsc_matrix_mutex);
744  LibmeshPetscCall(MatSetValues(this->_mat,
745  n_rows, numeric_petsc_cast(rows.data()),
746  n_cols, numeric_petsc_cast(cols.data()),
747  pPS(const_cast<T*>(dm.get_values().data())),
748  ADD_VALUES));
749 }
750 
751 
752 
753 
754 
755 
756 template <typename T>
758  const std::vector<numeric_index_type> & brows,
759  const std::vector<numeric_index_type> & bcols)
760 {
761  libmesh_assert (this->initialized());
762 
763  const numeric_index_type n_brows =
764  cast_int<numeric_index_type>(brows.size());
765  const numeric_index_type n_bcols =
766  cast_int<numeric_index_type>(bcols.size());
767 
768 #ifndef NDEBUG
769  const numeric_index_type n_rows =
770  cast_int<numeric_index_type>(dm.m());
771  const numeric_index_type n_cols =
772  cast_int<numeric_index_type>(dm.n());
773  const numeric_index_type blocksize = n_rows / n_brows;
774 
775  libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
776  libmesh_assert_equal_to (blocksize*n_brows, n_rows);
777  libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
778 
779  PetscInt petsc_blocksize;
780  LibmeshPetscCall(MatGetBlockSize(this->_mat, &petsc_blocksize));
781  libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
782 #endif
783 
784  std::scoped_lock lock(this->_petsc_matrix_mutex);
785  // These casts are required for PETSc <= 2.1.5
786  LibmeshPetscCall(MatSetValuesBlocked(this->_mat,
787  n_brows, numeric_petsc_cast(brows.data()),
788  n_bcols, numeric_petsc_cast(bcols.data()),
789  pPS(const_cast<T*>(dm.get_values().data())),
790  ADD_VALUES));
791 }
792 
793 
794 
795 
796 
797 template <typename T>
799  const std::vector<numeric_index_type> & rows,
800  const std::vector<numeric_index_type> & cols,
801  const bool reuse_submatrix) const
802 {
803  if (!this->closed())
804  {
805  libmesh_deprecated();
806  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
807  "Please update your code, as this warning will become an error in a future release.");
808  const_cast<PetscMatrix<T> *>(this)->close();
809  }
810 
811  semiparallel_only();
812 
813  // Make sure the SparseMatrix passed in is really a PetscMatrix
814  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
815 
816  // If we're not reusing submatrix and submatrix is already initialized
817  // then we need to clear it, otherwise we get a memory leak.
818  if (!reuse_submatrix && submatrix.initialized())
819  submatrix.clear();
820 
821  // Construct row and column index sets.
822  WrappedPetsc<IS> isrow;
823  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
824  cast_int<PetscInt>(rows.size()),
825  numeric_petsc_cast(rows.data()),
826  PETSC_USE_POINTER,
827  isrow.get()));
828 
829  WrappedPetsc<IS> iscol;
830  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
831  cast_int<PetscInt>(cols.size()),
832  numeric_petsc_cast(cols.data()),
833  PETSC_USE_POINTER,
834  iscol.get()));
835 
836  // Extract submatrix
837  LibmeshPetscCall(LibMeshCreateSubMatrix(this->_mat,
838  isrow,
839  iscol,
840  (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
841  (&petsc_submatrix->_mat)));
842 
843  // Specify that the new submatrix is initialized and close it.
844  petsc_submatrix->_is_initialized = true;
845  petsc_submatrix->close();
846 }
847 
848 template <typename T>
850  const std::vector<numeric_index_type> & rows,
851  const std::vector<numeric_index_type> & cols) const
852 {
853  if (!this->closed())
854  {
855  libmesh_deprecated();
856  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix_nosort().\n"
857  "Please update your code, as this warning will become an error in a future release.");
858  const_cast<PetscMatrix<T> *>(this)->close();
859  }
860 
861  // Make sure the SparseMatrix passed in is really a PetscMatrix
862  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
863 
864  LibmeshPetscCall(MatZeroEntries(petsc_submatrix->_mat));
865 
866  PetscInt pc_ncols = 0;
867  const PetscInt * pc_cols;
868  const PetscScalar * pc_vals;
869 
870  // // data for creating the submatrix
871  std::vector<PetscInt> sub_cols;
872  std::vector<PetscScalar> sub_vals;
873 
874  for (auto i : index_range(rows))
875  {
876  PetscInt sub_rid[] = {static_cast<PetscInt>(i)};
877  PetscInt rid = static_cast<PetscInt>(rows[i]);
878  // only get value from local rows, and set values to the corresponding columns in the submatrix
879  if (rows[i]>= this->row_start() && rows[i]< this->row_stop())
880  {
881  // get one row of data from the original matrix
882  LibmeshPetscCall(MatGetRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
883  // extract data from certain cols, save the indices and entries sub_cols and sub_vals
884  for (auto j : index_range(cols))
885  {
886  for (unsigned int idx = 0; idx< static_cast<unsigned int>(pc_ncols); idx++)
887  {
888  if (pc_cols[idx] == static_cast<PetscInt>(cols[j]))
889  {
890  sub_cols.push_back(static_cast<PetscInt>(j));
891  sub_vals.push_back(pc_vals[idx]);
892  }
893  }
894  }
895  // set values
896  LibmeshPetscCall(MatSetValues(petsc_submatrix->_mat,
897  1,
898  sub_rid,
899  static_cast<PetscInt>(sub_vals.size()),
900  sub_cols.data(),
901  sub_vals.data(),
902  INSERT_VALUES));
903  LibmeshPetscCall(MatRestoreRow(this->_mat, rid, &pc_ncols, &pc_cols, &pc_vals));
904  // clear data for this row
905  sub_cols.clear();
906  sub_vals.clear();
907  }
908  }
909  MatAssemblyBeginEnd(petsc_submatrix->comm(), petsc_submatrix->_mat, MAT_FINAL_ASSEMBLY);
910  // Specify that the new submatrix is initialized and close it.
911  petsc_submatrix->_is_initialized = true;
912  petsc_submatrix->close();
913 }
914 
915 
916 template <typename T>
918 {
919  // Make sure the NumericVector passed in is really a PetscVector
920  PetscVector<T> & petsc_dest = cast_ref<PetscVector<T> &>(dest);
921 
922  // Needs a const_cast since PETSc does not work with const.
923  LibmeshPetscCall(MatGetDiagonal(const_cast<PetscMatrix<T> *>(this)->mat(),petsc_dest.vec()));
924 }
925 
926 
927 
928 template <typename T>
930 {
931  // Make sure the SparseMatrix passed in is really a PetscMatrix
932  PetscMatrix<T> & petsc_dest = cast_ref<PetscMatrix<T> &>(dest);
933 
934  // If we aren't reusing the matrix then need to clear dest,
935  // otherwise we get a memory leak
936  if (&petsc_dest != this)
937  dest.clear();
938 
939  if (&petsc_dest == this)
940  // The MAT_REUSE_MATRIX flag was replaced by MAT_INPLACE_MATRIX
941  // in PETSc 3.7.0
942 #if PETSC_VERSION_LESS_THAN(3,7,0)
943  LibmeshPetscCall(MatTranspose(this->_mat,MAT_REUSE_MATRIX, &petsc_dest._mat));
944 #else
945  LibmeshPetscCall(MatTranspose(this->_mat, MAT_INPLACE_MATRIX, &petsc_dest._mat));
946 #endif
947  else
948  LibmeshPetscCall(MatTranspose(this->_mat,MAT_INITIAL_MATRIX, &petsc_dest._mat));
949 
950  // Specify that the transposed matrix is initialized and close it.
951  petsc_dest._is_initialized = true;
952  petsc_dest.close();
953 }
954 
955 
956 
957 template <typename T>
959 {
960  semiparallel_only();
961 
962  MatAssemblyBeginEnd(this->comm(), this->_mat, MAT_FLUSH_ASSEMBLY);
963 }
964 
965 
966 
967 template <typename T>
969  const numeric_index_type j,
970  const T value)
971 {
972  libmesh_assert (this->initialized());
973 
974  PetscInt i_val=i, j_val=j;
975 
976  PetscScalar petsc_value = static_cast<PetscScalar>(value);
977  std::scoped_lock lock(this->_petsc_matrix_mutex);
978  LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
979  &petsc_value, INSERT_VALUES));
980 }
981 
982 
983 
984 template <typename T>
986  const numeric_index_type j,
987  const T value)
988 {
989  libmesh_assert (this->initialized());
990 
991  PetscInt i_val=i, j_val=j;
992 
993  PetscScalar petsc_value = static_cast<PetscScalar>(value);
994  std::scoped_lock lock(this->_petsc_matrix_mutex);
995  LibmeshPetscCall(MatSetValues(this->_mat, 1, &i_val, 1, &j_val,
996  &petsc_value, ADD_VALUES));
997 }
998 
999 
1000 
1001 template <typename T>
1003  const std::vector<numeric_index_type> & dof_indices)
1004 {
1005  this->add_matrix (dm, dof_indices, dof_indices);
1006 }
1007 
1008 
1009 
1010 
1011 
1012 
1013 
1014 template <typename T>
1015 void PetscMatrix<T>::add (const T a_in, const SparseMatrix<T> & X_in)
1016 {
1017  libmesh_assert (this->initialized());
1018 
1019  // sanity check. but this cannot avoid
1020  // crash due to incompatible sparsity structure...
1021  libmesh_assert_equal_to (this->m(), X_in.m());
1022  libmesh_assert_equal_to (this->n(), X_in.n());
1023 
1024  PetscScalar a = static_cast<PetscScalar> (a_in);
1025  const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1026 
1027  libmesh_assert (X);
1028 
1029  // the matrix from which we copy the values has to be assembled/closed
1030  libmesh_assert(X->closed());
1031 
1032  semiparallel_only();
1033 
1034  LibmeshPetscCall(MatAXPY(this->_mat, a, X->_mat, DIFFERENT_NONZERO_PATTERN));
1035 }
1036 
1037 
1038 template <typename T>
1040 {
1041  libmesh_assert (this->initialized());
1042 
1043  // sanity check
1044  // we do not check the Y_out size here as we will initialize & close it at the end
1045  libmesh_assert_equal_to (this->n(), X_in.m());
1046 
1047  const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1048  PetscMatrix<T> * Y = cast_ptr<PetscMatrix<T> *> (&Y_out);
1049 
1050  // the matrix from which we copy the values has to be assembled/closed
1051  libmesh_assert(X->closed());
1052 
1053  semiparallel_only();
1054 
1055  if (reuse)
1056  LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_REUSE_MATRIX, PETSC_DEFAULT, &Y->_mat));
1057  else
1058  {
1059  Y->clear();
1060  LibmeshPetscCall(MatMatMult(this->_mat, X->_mat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y->_mat));
1061  }
1062  // Specify that the new matrix is initialized
1063  // We do not close it here as `MatMatMult` ensures Y being closed
1064  Y->_is_initialized = true;
1065 }
1066 
1067 template <typename T>
1068 void
1070  const std::map<numeric_index_type, numeric_index_type> & row_ltog,
1071  const std::map<numeric_index_type, numeric_index_type> & col_ltog,
1072  const T scalar)
1073 {
1074  // size of spm is usually greater than row_ltog and col_ltog in parallel as the indices are owned by the processor
1075  // also, we should allow adding certain parts of spm to _mat
1076  libmesh_assert_greater_equal(spm.m(), row_ltog.size());
1077  libmesh_assert_greater_equal(spm.n(), col_ltog.size());
1078 
1079  // make sure matrix has larger size than spm
1080  libmesh_assert_greater_equal(this->m(), spm.m());
1081  libmesh_assert_greater_equal(this->n(), spm.n());
1082 
1083  if (!this->closed())
1084  this->close();
1085 
1086  auto pscm = cast_ptr<const PetscMatrix<T> *>(&spm);
1087 
1088  PetscInt ncols = 0;
1089 
1090  const PetscInt * lcols;
1091  const PetscScalar * vals;
1092 
1093  std::vector<PetscInt> gcols;
1094  std::vector<PetscScalar> values;
1095 
1096  for (auto ltog : row_ltog)
1097  {
1098  PetscInt grow[] = {static_cast<PetscInt>(ltog.second)}; // global row index
1099 
1100  LibmeshPetscCall(MatGetRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1101 
1102  // get global indices (gcols) from lcols, increment values = vals*scalar
1103  gcols.resize(ncols);
1104  values.resize(ncols);
1105  for (auto i : index_range(gcols))
1106  {
1107  gcols[i] = libmesh_map_find(col_ltog, lcols[i]);
1108  values[i] = PS(scalar) * vals[i];
1109  }
1110 
1111  LibmeshPetscCall(MatSetValues(this->_mat, 1, grow, ncols, gcols.data(), values.data(), ADD_VALUES));
1112  LibmeshPetscCall(MatRestoreRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals));
1113  }
1114  // Note: We are not closing the matrix because it is expensive to do so when adding multiple sparse matrices.
1115  // Remember to manually close the matrix once all changes to the matrix have been made.
1116 }
1117 
1118 template <typename T>
1120  const numeric_index_type j_in) const
1121 {
1122  libmesh_assert (this->initialized());
1123 
1124  // If the entry is not in the sparse matrix, it is 0.
1125  T value=0.;
1126 
1127  PetscInt
1128  i_val=static_cast<PetscInt>(i_in),
1129  j_val=static_cast<PetscInt>(j_in);
1130 
1131 
1132  // the matrix needs to be closed for this to work
1133  // this->close();
1134  // but closing it is a semiparallel operation; we want operator()
1135  // to run on one processor.
1136  libmesh_assert(this->closed());
1137 
1138  LibmeshPetscCall(MatGetValue(this->_mat, i_val, j_val, &value));
1139 
1140  return value;
1141 }
1142 
1143 template <typename T>
1145  std::vector<numeric_index_type> & indices,
1146  std::vector<T> & values) const
1147 {
1148  libmesh_assert (this->initialized());
1149 
1150  const PetscScalar * petsc_row;
1151  const PetscInt * petsc_cols;
1152 
1153  PetscInt
1154  ncols=0,
1155  i_val = static_cast<PetscInt>(i_in);
1156 
1157  // the matrix needs to be closed for this to work
1158  // this->close();
1159  // but closing it is a semiparallel operation; we want operator()
1160  // to run on one processor.
1161  libmesh_assert(this->closed());
1162 
1163  // PETSc makes no effort at being thread safe. Helgrind complains about
1164  // possible data races even just in PetscFunctionBegin (due to things
1165  // like stack counter incrementing). Perhaps we could ignore
1166  // this, but there are legitimate data races for Mat data members like
1167  // mat->getrowactive between MatGetRow and MatRestoreRow. Moreover,
1168  // there could be a write into mat->rowvalues during MatGetRow from
1169  // one thread while we are attempting to read from mat->rowvalues
1170  // (through petsc_cols) during data copy in another thread. So
1171  // the safe thing to do is to lock the whole method
1172 
1173  std::lock_guard<std::mutex> lock(_petsc_matrix_mutex);
1174 
1175  LibmeshPetscCall(MatGetRow(this->_mat, i_val, &ncols, &petsc_cols, &petsc_row));
1176 
1177  // Copy the data
1178  indices.resize(static_cast<std::size_t>(ncols));
1179  values.resize(static_cast<std::size_t>(ncols));
1180 
1181  for (auto i : index_range(indices))
1182  {
1183  indices[i] = static_cast<numeric_index_type>(petsc_cols[i]);
1184  values[i] = static_cast<T>(petsc_row[i]);
1185  }
1186 
1187  LibmeshPetscCall(MatRestoreRow(this->_mat, i_val,
1188  &ncols, &petsc_cols, &petsc_row));
1189 }
1190 
1191 
1192 
1193 template <typename T>
1195 {
1196  semiparallel_only();
1197 
1198  if (this->_mat)
1199  {
1200  PetscBool assembled;
1201  LibmeshPetscCall(MatAssembled(this->_mat, &assembled));
1202 #ifndef NDEBUG
1203  const bool cxx_assembled = (assembled == PETSC_TRUE) ? true : false;
1204  libmesh_assert(this->_communicator.verify(cxx_assembled));
1205 #endif
1206 
1207  if (!assembled)
1208  // MatCopy does not work with an unassembled matrix. We could use MatDuplicate but then we
1209  // would have to destroy the matrix we manage and others might be relying on that data. So
1210  // we just assemble here regardless of the preceding level of matrix fill
1211  this->close();
1212  LibmeshPetscCall(MatCopy(v._mat, this->_mat, DIFFERENT_NONZERO_PATTERN));
1213  }
1214  else
1215  LibmeshPetscCall(MatDuplicate(v._mat, MAT_COPY_VALUES, &this->_mat));
1216 
1217  this->_is_initialized = true;
1218 
1219  return *this;
1220 }
1221 
1222 template <typename T>
1224 {
1225  *this = cast_ref<const PetscMatrix<T> &>(v);
1226  return *this;
1227 }
1228 
1229 template <typename T>
1231 {
1232  libmesh_assert(this->closed());
1233 
1234  LibmeshPetscCall(MatScale(this->_mat, PS(scale)));
1235 }
1236 
1237 template <typename T>
1239 {
1240 #if PETSC_RELEASE_LESS_THAN(3,19,0)
1241  return false;
1242 #else
1243  return true;
1244 #endif
1245 }
1246 
1247 #if PETSC_RELEASE_GREATER_EQUALS(3,23,0)
1248 template <typename T>
1249 std::unique_ptr<PetscMatrix<T>>
1251 {
1252  Mat xaij;
1253  libmesh_assert(this->initialized());
1254  libmesh_assert(!this->closed());
1255  LibmeshPetscCall(MatDuplicate(this->_mat, MAT_DO_NOT_COPY_VALUES, &xaij));
1256  LibmeshPetscCall(MatCopyHashToXAIJ(this->_mat, xaij));
1257  return std::make_unique<PetscMatrix<T>>(xaij, this->comm(), /*destroy_on_exit=*/true);
1258 }
1259 #endif
1260 
1261 template <typename T>
1262 void
1264 {
1265  semiparallel_only();
1266 
1267  if (this->_use_hash_table)
1268 #if PETSC_RELEASE_GREATER_EQUALS(3, 23, 0)
1269  // This performs MatReset plus re-establishes the hash table
1270  LibmeshPetscCall(MatResetHash(this->_mat));
1271 #else
1272  libmesh_error_msg("Resetting hash tables not supported until PETSc version 3.23");
1273 #endif
1274  else
1275  this->reset_preallocation();
1276 }
1277 
1278 //------------------------------------------------------------------
1279 // Explicit instantiations
1280 template class LIBMESH_EXPORT PetscMatrix<Number>;
1281 
1282 } // namespace libMesh
1283 
1284 
1285 #endif // #ifdef LIBMESH_HAVE_PETSC
std::string name(const ElemQuality q)
This function returns a string containing some name for q.
Definition: elem_quality.C:42
virtual bool initialized() const
bool closed()
Checks that the library has been closed.
Definition: libmesh.C:341
void reset_preallocation()
Reset matrix to use the original nonzero pattern provided by users.
Definition: petsc_matrix.C:385
virtual T operator()(const numeric_index_type i, const numeric_index_type j) const override
This class provides a nice interface to PETSc&#39;s Vec object.
Definition: petsc_vector.h:73
virtual void read_petsc_hdf5(const std::string &filename) override
Read the contents of the matrix from a file in PETSc&#39;s HDF5 sparse matrix format. ...
Definition: petsc_matrix.C:721
virtual void print_matlab(const std::string &name="") const override
Print the contents of the matrix in Matlab&#39;s sparse matrix format.
Definition: petsc_matrix.C:510
This class provides a nice interface to the PETSc C-based data structures for parallel, sparse matrices.
void finish_initialization()
Definition: initial.C:11
virtual void zero() override
Set all entries to 0.
Definition: petsc_matrix.C:401
virtual Real frobenius_norm() const
Definition: petsc_matrix.C:497
PetscMatrix(const Parallel::Communicator &comm_in)
Constructor; initializes the matrix to be empty, without any structure, i.e.
Definition: petsc_matrix.C:86
void scale(MeshBase &mesh, const Real xs, const Real ys=0., const Real zs=0.)
Scales the mesh.
void init_without_preallocation(numeric_index_type m, numeric_index_type n, numeric_index_type m_l, numeric_index_type n_l, numeric_index_type blocksize)
Perform matrix initialization steps sans preallocation.
Definition: petsc_matrix.C:135
virtual void print_petsc_binary(const std::string &filename) override
Write the contents of the matrix to a file in PETSc&#39;s binary sparse matrix format.
Definition: petsc_matrix.C:691
void preallocate(numeric_index_type m_l, const std::vector< numeric_index_type > &n_nz, const std::vector< numeric_index_type > &n_oz, numeric_index_type blocksize)
Definition: petsc_matrix.C:260
PetscMatrix & operator=(PetscMatrix &&)=delete
Provides a uniform interface to vector storage schemes for different linear algebra libraries...
Definition: vector_fe_ex5.C:44
const Parallel::Communicator & comm() const
void update_preallocation_and_zero()
Update the sparsity pattern based on dof_map, and set the matrix to zero.
Definition: petsc_matrix.C:379
virtual void add_block_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &brows, const std::vector< numeric_index_type > &bcols) override
Add the full matrix dm to the SparseMatrix.
Definition: petsc_matrix.C:757
unsigned int m() const
PetscScalar * pPS(T *ptr)
Definition: petsc_macro.h:174
virtual void print_personal(std::ostream &os=libMesh::out) const override
Print the contents of the matrix to the screen with the PETSc viewer.
Definition: petsc_matrix.C:565
virtual void read_petsc_binary(const std::string &filename) override
Read the contents of the matrix from a file in PETSc&#39;s binary sparse matrix format.
Definition: petsc_matrix.C:711
virtual std::unique_ptr< SparseMatrix< T > > zero_clone() const override
Definition: petsc_matrix.C:435
The libMesh namespace provides an interface to certain functionality in the library.
virtual void add_sparse_matrix(const SparseMatrix< T > &spm, const std::map< numeric_index_type, numeric_index_type > &row_ltog, const std::map< numeric_index_type, numeric_index_type > &col_ltog, const T scalar) override
Add scalar* spm to the rows and cols of this matrix (A): A(rows[i], cols[j]) += scalar * spm(i...
virtual std::unique_ptr< SparseMatrix< T > > clone() const override
Definition: petsc_matrix.C:454
virtual void clear()=0
Restores the SparseMatrix<T> to a pristine state.
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
Generic sparse matrix.
Definition: vector_fe_ex5.C:46
PetscScalar PS(T val)
Definition: petsc_macro.h:168
void libmesh_ignore(const Args &...)
dof_id_type numeric_index_type
Definition: id_types.h:99
bool _is_initialized
Flag that tells if init() has been called.
Definition: libmesh.C:315
bool _is_initialized
Flag indicating whether or not the matrix has been initialized.
int mkstemp(char *tmpl)
Definition: win_mkstemp.h:13
virtual numeric_index_type m() const =0
libmesh_assert(ctx)
std::unique_ptr< PetscMatrix< T > > copy_from_hash()
Creates a copy of the current hash table matrix and then performs assembly.
virtual void close() override
Calls the SparseMatrix&#39;s internal assembly routines, ensuring that the values are consistent across p...
virtual bool supports_hash_table() const override
virtual void get_row(numeric_index_type i, std::vector< numeric_index_type > &indices, std::vector< T > &values) const override
Get a row from the matrix.
virtual void create_submatrix_nosort(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) const override
Similar to the create_submatrix function, this function creates a submatrix which is defined by the i...
Definition: petsc_matrix.C:849
std::vector< T > & get_values()
Definition: dense_matrix.h:382
Mat _mat
PETSc matrix datatype to store values.
void _petsc_viewer(const std::string &filename, PetscViewerType viewertype, PetscFileMode filemode)
Definition: petsc_matrix.C:661
virtual void matrix_matrix_mult(SparseMatrix< T > &X, SparseMatrix< T > &Y, bool reuse=false) override
Compute Y = A*X for matrix X.
virtual void add(const numeric_index_type i, const numeric_index_type j, const T value) override
Add value to the element (i,j).
Definition: petsc_matrix.C:985
virtual void get_diagonal(NumericVector< T > &dest) const override
Copies the diagonal part of the matrix into dest.
Definition: petsc_matrix.C:917
PetscMatrixType _mat_type
Definition: petsc_matrix.h:349
virtual bool closed() const override
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
virtual void print_petsc_hdf5(const std::string &filename) override
Write the contents of the matrix to a file in PETSc&#39;s HDF5 sparse matrix format.
Definition: petsc_matrix.C:701
virtual void get_transpose(SparseMatrix< T > &dest) const override
Copies the transpose of the matrix into dest, which may be *this.
Definition: petsc_matrix.C:929
virtual Real l1_norm() const override
Definition: petsc_matrix.C:492
static const bool value
Definition: xdr_io.C:55
virtual void add_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) override
Add the full matrix dm to the SparseMatrix.
Definition: petsc_matrix.C:731
virtual void set(const numeric_index_type i, const numeric_index_type j, const T value) override
Set the element (i,j) to value.
Definition: petsc_matrix.C:968
This class provides a nice interface to the PETSc C-based AIJ data structures for parallel...
Definition: petsc_matrix.h:61
virtual void scale(const T scale) override
Scales all elements of this matrix by scale.
bool initialized()
Checks that library initialization has been done.
Definition: libmesh.C:334
virtual void _get_submatrix(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols, const bool reuse_submatrix) const override
This function either creates or re-initializes a matrix called submatrix which is defined by the indi...
Definition: petsc_matrix.C:798
virtual void restore_original_nonzero_pattern() override
Reset the memory storage of the matrix.
virtual void zero_rows(std::vector< numeric_index_type > &rows, T diag_value=0.0) override
Sets all row entries to 0 then puts diag_value in the diagonal entry.
Definition: petsc_matrix.C:416
unsigned int n() const
Defines a dense matrix for use in Finite Element-type computations.
Definition: dof_map.h:75
void finish_initialization()
Finish up the initialization process.
Definition: petsc_matrix.C:329
virtual void flush() override
For PETSc matrix , this function is similar to close but without shrinking memory.
Definition: petsc_matrix.C:958
auto index_range(const T &sizable)
Helper function that returns an IntRange<std::size_t> representing all the indices of the passed-in v...
Definition: int_range.h:117
virtual void init(const numeric_index_type m, const numeric_index_type n, const numeric_index_type m_l, const numeric_index_type n_l, const numeric_index_type n_nz=30, const numeric_index_type n_oz=10, const numeric_index_type blocksize=1) override
Initialize a PETSc matrix.
Definition: petsc_matrix.C:214
virtual Real linfty_norm() const override
Definition: petsc_matrix.C:502
virtual numeric_index_type n() const =0
unsigned int idx(const ElemType type, const unsigned int nx, const unsigned int i, const unsigned int j)
A useful inline function which replaces the macros used previously.
ParallelType
Defines an enum for parallel data structure types.