libMesh
petsc_matrix.C
Go to the documentation of this file.
1 // The libMesh Finite Element Library.
2 // Copyright (C) 2002-2019 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 
4 // This library is free software; you can redistribute it and/or
5 // modify it under the terms of the GNU Lesser General Public
6 // License as published by the Free Software Foundation; either
7 // version 2.1 of the License, or (at your option) any later version.
8 
9 // This library is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 // Lesser General Public License for more details.
13 
14 // You should have received a copy of the GNU Lesser General Public
15 // License along with this library; if not, write to the Free Software
16 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 
18 
19 // C++ includes
20 #include <unistd.h> // mkstemp
21 #include <fstream>
22 
23 #include "libmesh/libmesh_config.h"
24 
25 #ifdef LIBMESH_HAVE_PETSC
26 
27 // Local includes
28 #include "libmesh/petsc_matrix.h"
29 #include "libmesh/dof_map.h"
30 #include "libmesh/dense_matrix.h"
31 #include "libmesh/petsc_vector.h"
32 #include "libmesh/parallel.h"
33 
34 
35 // For some reason, the blocked matrix API calls below seem to break with PETSC 3.2 & presumably earlier.
36 // For example:
37 // [0]PETSC ERROR: --------------------- Error Message ------------------------------------
38 // [0]PETSC ERROR: Nonconforming object sizes!
39 // [0]PETSC ERROR: Attempt to set block size 3 with BAIJ 1!
40 // [0]PETSC ERROR: ------------------------------------------------------------------------
41 // so as a cowardly workaround disable the functionality in this translation unit for older PETSc's
42 #if PETSC_VERSION_LESS_THAN(3,3,0)
43 # undef LIBMESH_ENABLE_BLOCKED_STORAGE
44 #endif
45 
46 
47 
48 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
49 
50 namespace
51 {
52 using namespace libMesh;
53 
54 // historic libMesh n_nz & n_oz arrays are set up for PETSc's AIJ format.
55 // however, when the blocksize is >1, we need to transform these into
56 // their BAIJ counterparts.
57 inline
58 void transform_preallocation_arrays (const PetscInt blocksize,
59  const std::vector<numeric_index_type> & n_nz,
60  const std::vector<numeric_index_type> & n_oz,
61  std::vector<numeric_index_type> & b_n_nz,
62  std::vector<numeric_index_type> & b_n_oz)
63 {
64  libmesh_assert_equal_to (n_nz.size(), n_oz.size());
65  libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
66 
67  b_n_nz.clear(); b_n_nz.reserve(n_nz.size()/blocksize);
68  b_n_oz.clear(); b_n_oz.reserve(n_oz.size()/blocksize);
69 
70  for (std::size_t nn=0, nnzs=n_nz.size(); nn<nnzs; nn += blocksize)
71  {
72  b_n_nz.push_back (n_nz[nn]/blocksize);
73  b_n_oz.push_back (n_oz[nn]/blocksize);
74  }
75 }
76 }
77 
78 #endif
79 
80 
81 
82 namespace libMesh
83 {
84 
85 
86 //-----------------------------------------------------------------------
87 // PetscMatrix members
88 
89 
90 // Constructor
91 template <typename T>
92 PetscMatrix<T>::PetscMatrix(const Parallel::Communicator & comm_in) :
93  SparseMatrix<T>(comm_in),
94  _destroy_mat_on_exit(true),
95  _mat_type(AIJ)
96 {}
97 
98 
99 
100 // Constructor taking an existing Mat but not the responsibility
101 // for destroying it
102 template <typename T>
104  const Parallel::Communicator & comm_in) :
105  SparseMatrix<T>(comm_in),
106  _destroy_mat_on_exit(false),
107  _mat_type(AIJ)
108 {
109  this->_mat = mat_in;
110  this->_is_initialized = true;
111 }
112 
113 
114 
115 // Destructor
116 template <typename T>
118 {
119  this->clear();
120 }
121 
122 template <typename T>
124 {
125  _mat_type = mat_type;
126 }
127 
128 template <typename T>
130  const numeric_index_type n_in,
131  const numeric_index_type m_l,
132  const numeric_index_type n_l,
133  const numeric_index_type nnz,
134  const numeric_index_type noz,
135  const numeric_index_type blocksize_in)
136 {
137  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
138  libmesh_ignore(blocksize_in);
139 
140  // Clear initialized matrices
141  if (this->initialized())
142  this->clear();
143 
144  this->_is_initialized = true;
145 
146 
147  PetscErrorCode ierr = 0;
148  PetscInt m_global = static_cast<PetscInt>(m_in);
149  PetscInt n_global = static_cast<PetscInt>(n_in);
150  PetscInt m_local = static_cast<PetscInt>(m_l);
151  PetscInt n_local = static_cast<PetscInt>(n_l);
152  PetscInt n_nz = static_cast<PetscInt>(nnz);
153  PetscInt n_oz = static_cast<PetscInt>(noz);
154 
155  ierr = MatCreate(this->comm().get(), &_mat);
156  LIBMESH_CHKERR(ierr);
157  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
158  LIBMESH_CHKERR(ierr);
159  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
160  ierr = MatSetBlockSize(_mat,blocksize);
161  LIBMESH_CHKERR(ierr);
162 
163 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
164  if (blocksize > 1)
165  {
166  // specified blocksize, bs>1.
167  // double check sizes.
168  libmesh_assert_equal_to (m_local % blocksize, 0);
169  libmesh_assert_equal_to (n_local % blocksize, 0);
170  libmesh_assert_equal_to (m_global % blocksize, 0);
171  libmesh_assert_equal_to (n_global % blocksize, 0);
172  libmesh_assert_equal_to (n_nz % blocksize, 0);
173  libmesh_assert_equal_to (n_oz % blocksize, 0);
174 
175  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
176  LIBMESH_CHKERR(ierr);
177  ierr = MatSeqBAIJSetPreallocation(_mat, blocksize, n_nz/blocksize, NULL);
178  LIBMESH_CHKERR(ierr);
179  ierr = MatMPIBAIJSetPreallocation(_mat, blocksize,
180  n_nz/blocksize, NULL,
181  n_oz/blocksize, NULL);
182  LIBMESH_CHKERR(ierr);
183  }
184  else
185 #endif
186  {
187  switch (_mat_type) {
188  case AIJ:
189  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
190  LIBMESH_CHKERR(ierr);
191  ierr = MatSeqAIJSetPreallocation(_mat, n_nz, NULL);
192  LIBMESH_CHKERR(ierr);
193  ierr = MatMPIAIJSetPreallocation(_mat, n_nz, NULL, n_oz, NULL);
194  LIBMESH_CHKERR(ierr);
195  break;
196 
197  case HYPRE:
198 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
199  ierr = MatSetType(_mat, MATHYPRE);
200  LIBMESH_CHKERR(ierr);
201  ierr = MatHYPRESetPreallocation(_mat, n_nz, NULL, n_oz, NULL);
202  LIBMESH_CHKERR(ierr);
203 #else
204  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
205 #endif
206  break;
207 
208  default: libmesh_error_msg("Unsupported petsc matrix type");
209  }
210  }
211 
212  // Make it an error for PETSc to allocate new nonzero entries during assembly
213 #if PETSC_VERSION_LESS_THAN(3,0,0)
214  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR);
215 #else
216  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
217 #endif
218  LIBMESH_CHKERR(ierr);
219 
220  // Is prefix information available somewhere? Perhaps pass in the system name?
221  ierr = MatSetOptionsPrefix(_mat, "");
222  LIBMESH_CHKERR(ierr);
223  ierr = MatSetFromOptions(_mat);
224  LIBMESH_CHKERR(ierr);
225 
226  this->zero ();
227 }
228 
229 
230 template <typename T>
232  const numeric_index_type n_in,
233  const numeric_index_type m_l,
234  const numeric_index_type n_l,
235  const std::vector<numeric_index_type> & n_nz,
236  const std::vector<numeric_index_type> & n_oz,
237  const numeric_index_type blocksize_in)
238 {
239  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
240  libmesh_ignore(blocksize_in);
241 
242  // Clear initialized matrices
243  if (this->initialized())
244  this->clear();
245 
246  this->_is_initialized = true;
247 
248  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
249  libmesh_assert_equal_to (n_nz.size(), m_l);
250  libmesh_assert_equal_to (n_oz.size(), m_l);
251 
252  PetscErrorCode ierr = 0;
253  PetscInt m_global = static_cast<PetscInt>(m_in);
254  PetscInt n_global = static_cast<PetscInt>(n_in);
255  PetscInt m_local = static_cast<PetscInt>(m_l);
256  PetscInt n_local = static_cast<PetscInt>(n_l);
257 
258  ierr = MatCreate(this->comm().get(), &_mat);
259  LIBMESH_CHKERR(ierr);
260  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
261  LIBMESH_CHKERR(ierr);
262  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
263  ierr = MatSetBlockSize(_mat,blocksize);
264  LIBMESH_CHKERR(ierr);
265 
266 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
267  if (blocksize > 1)
268  {
269  // specified blocksize, bs>1.
270  // double check sizes.
271  libmesh_assert_equal_to (m_local % blocksize, 0);
272  libmesh_assert_equal_to (n_local % blocksize, 0);
273  libmesh_assert_equal_to (m_global % blocksize, 0);
274  libmesh_assert_equal_to (n_global % blocksize, 0);
275 
276  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
277  LIBMESH_CHKERR(ierr);
278 
279  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
280  std::vector<numeric_index_type> b_n_nz, b_n_oz;
281 
282  transform_preallocation_arrays (blocksize,
283  n_nz, n_oz,
284  b_n_nz, b_n_oz);
285 
286  ierr = MatSeqBAIJSetPreallocation (_mat,
287  blocksize,
288  0,
289  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()));
290  LIBMESH_CHKERR(ierr);
291 
292  ierr = MatMPIBAIJSetPreallocation (_mat,
293  blocksize,
294  0,
295  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()),
296  0,
297  numeric_petsc_cast(b_n_oz.empty() ? nullptr : b_n_oz.data()));
298  LIBMESH_CHKERR(ierr);
299  }
300  else
301 #endif
302  {
303  switch (_mat_type) {
304  case AIJ:
305  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
306  LIBMESH_CHKERR(ierr);
307  ierr = MatSeqAIJSetPreallocation (_mat,
308  0,
309  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()));
310  LIBMESH_CHKERR(ierr);
311  ierr = MatMPIAIJSetPreallocation (_mat,
312  0,
313  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
314  0,
315  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data()));
316  break;
317 
318  case HYPRE:
319 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
320  ierr = MatSetType(_mat, MATHYPRE);
321  LIBMESH_CHKERR(ierr);
322  ierr = MatHYPRESetPreallocation (_mat,
323  0,
324  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
325  0,
326  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data()));
327  LIBMESH_CHKERR(ierr);
328 #else
329  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
330 #endif
331  break;
332 
333  default: libmesh_error_msg("Unsupported petsc matrix type");
334  }
335 
336  }
337 
338  // Make it an error for PETSc to allocate new nonzero entries during assembly
339 #if PETSC_VERSION_LESS_THAN(3,0,0)
340  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR);
341 #else
342  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
343 #endif
344  LIBMESH_CHKERR(ierr);
345 
346  // Is prefix information available somewhere? Perhaps pass in the system name?
347  ierr = MatSetOptionsPrefix(_mat, "");
348  LIBMESH_CHKERR(ierr);
349  ierr = MatSetFromOptions(_mat);
350  LIBMESH_CHKERR(ierr);
351 
352 
353  this->zero();
354 }
355 
356 
357 template <typename T>
359 {
360  libmesh_assert(this->_dof_map);
361 
362  // Clear initialized matrices
363  if (this->initialized())
364  this->clear();
365 
366  this->_is_initialized = true;
367 
368 
369  const numeric_index_type my_m = this->_dof_map->n_dofs();
370  const numeric_index_type my_n = my_m;
371  const numeric_index_type n_l = this->_dof_map->n_dofs_on_processor(this->processor_id());
372  const numeric_index_type m_l = n_l;
373 
374 
375  const std::vector<numeric_index_type> & n_nz = this->_dof_map->get_n_nz();
376  const std::vector<numeric_index_type> & n_oz = this->_dof_map->get_n_oz();
377 
378  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
379  libmesh_assert_equal_to (n_nz.size(), m_l);
380  libmesh_assert_equal_to (n_oz.size(), m_l);
381 
382  PetscErrorCode ierr = 0;
383  PetscInt m_global = static_cast<PetscInt>(my_m);
384  PetscInt n_global = static_cast<PetscInt>(my_n);
385  PetscInt m_local = static_cast<PetscInt>(m_l);
386  PetscInt n_local = static_cast<PetscInt>(n_l);
387 
388  ierr = MatCreate(this->comm().get(), &_mat);
389  LIBMESH_CHKERR(ierr);
390  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
391  LIBMESH_CHKERR(ierr);
392  PetscInt blocksize = static_cast<PetscInt>(this->_dof_map->block_size());
393  ierr = MatSetBlockSize(_mat,blocksize);
394  LIBMESH_CHKERR(ierr);
395 
396 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
397  if (blocksize > 1)
398  {
399  // specified blocksize, bs>1.
400  // double check sizes.
401  libmesh_assert_equal_to (m_local % blocksize, 0);
402  libmesh_assert_equal_to (n_local % blocksize, 0);
403  libmesh_assert_equal_to (m_global % blocksize, 0);
404  libmesh_assert_equal_to (n_global % blocksize, 0);
405 
406  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
407  LIBMESH_CHKERR(ierr);
408 
409  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
410  std::vector<numeric_index_type> b_n_nz, b_n_oz;
411 
412  transform_preallocation_arrays (blocksize,
413  n_nz, n_oz,
414  b_n_nz, b_n_oz);
415 
416  ierr = MatSeqBAIJSetPreallocation (_mat,
417  blocksize,
418  0,
419  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()));
420  LIBMESH_CHKERR(ierr);
421 
422  ierr = MatMPIBAIJSetPreallocation (_mat,
423  blocksize,
424  0,
425  numeric_petsc_cast(b_n_nz.empty() ? nullptr : b_n_nz.data()),
426  0,
427  numeric_petsc_cast(b_n_oz.empty() ? nullptr : b_n_oz.data()));
428  LIBMESH_CHKERR(ierr);
429  }
430  else
431 #endif
432  {
433  switch (_mat_type) {
434  case AIJ:
435  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
436  LIBMESH_CHKERR(ierr);
437  ierr = MatSeqAIJSetPreallocation (_mat,
438  0,
439  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()));
440  LIBMESH_CHKERR(ierr);
441  ierr = MatMPIAIJSetPreallocation (_mat,
442  0,
443  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
444  0,
445  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data()));
446  break;
447 
448  case HYPRE:
449 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
450  ierr = MatSetType(_mat, MATHYPRE);
451  LIBMESH_CHKERR(ierr);
452  ierr = MatHYPRESetPreallocation (_mat,
453  0,
454  numeric_petsc_cast(n_nz.empty() ? nullptr : n_nz.data()),
455  0,
456  numeric_petsc_cast(n_oz.empty() ? nullptr : n_oz.data()));
457  LIBMESH_CHKERR(ierr);
458 #else
459  libmesh_error_msg("PETSc 3.9.4 or higher with hypre is required for MatHypre");
460 #endif
461  break;
462 
463  default: libmesh_error_msg("Unsupported petsc matrix type");
464  }
465  }
466 
467  // Make it an error for PETSc to allocate new nonzero entries during assembly
468 #if PETSC_VERSION_LESS_THAN(3,0,0)
469  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR);
470 #else
471  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
472 #endif
473  LIBMESH_CHKERR(ierr);
474 
475  // Is prefix information available somewhere? Perhaps pass in the system name?
476  ierr = MatSetOptionsPrefix(_mat, "");
477  LIBMESH_CHKERR(ierr);
478  ierr = MatSetFromOptions(_mat);
479  LIBMESH_CHKERR(ierr);
480 
481  this->zero();
482 }
483 
484 
485 template <typename T>
487 {
488  libmesh_not_implemented();
489 }
490 
491 template <typename T>
493 {
494 #if !PETSC_VERSION_LESS_THAN(3,9,0)
495  libmesh_assert (this->initialized());
496 
497  auto ierr = MatResetPreallocation(_mat);
498  LIBMESH_CHKERR(ierr);
499 #else
500  libmesh_warning("Your version of PETSc doesn't support resetting of "
501  "preallocation, so we will use your most recent sparsity "
502  "pattern. This may result in a degradation of performance\n");
503 #endif
504 }
505 
506 template <typename T>
508 {
509  libmesh_assert (this->initialized());
510 
511  semiparallel_only();
512 
513  PetscErrorCode ierr=0;
514 
515  PetscInt m_l, n_l;
516 
517  ierr = MatGetLocalSize(_mat,&m_l,&n_l);
518  LIBMESH_CHKERR(ierr);
519 
520  if (n_l)
521  {
522  ierr = MatZeroEntries(_mat);
523  LIBMESH_CHKERR(ierr);
524  }
525 }
526 
527 template <typename T>
528 void PetscMatrix<T>::zero_rows (std::vector<numeric_index_type> & rows, T diag_value)
529 {
530  libmesh_assert (this->initialized());
531 
532  semiparallel_only();
533 
534  PetscErrorCode ierr=0;
535 
536 #if PETSC_RELEASE_LESS_THAN(3,1,1)
537  if (!rows.empty())
538  ierr = MatZeroRows(_mat, rows.size(),
539  numeric_petsc_cast(rows.data()), diag_value);
540  else
541  ierr = MatZeroRows(_mat, 0, NULL, diag_value);
542 #else
543  // As of petsc-dev at the time of 3.1.0, MatZeroRows now takes two additional
544  // optional arguments. The optional arguments (x,b) can be used to specify the
545  // solutions for the zeroed rows (x) and right hand side (b) to update.
546  // Could be useful for setting boundary conditions...
547  if (!rows.empty())
548  ierr = MatZeroRows(_mat, cast_int<PetscInt>(rows.size()),
549  numeric_petsc_cast(rows.data()), PS(diag_value),
550  NULL, NULL);
551  else
552  ierr = MatZeroRows(_mat, 0, NULL, PS(diag_value), NULL,
553  NULL);
554 #endif
555 
556  LIBMESH_CHKERR(ierr);
557 }
558 
559 template <typename T>
561 {
562  PetscErrorCode ierr=0;
563 
564  if ((this->initialized()) && (this->_destroy_mat_on_exit))
565  {
566  semiparallel_only();
567 
568  ierr = LibMeshMatDestroy (&_mat);
569  LIBMESH_CHKERR(ierr);
570 
571  this->_is_initialized = false;
572  }
573 }
574 
575 
576 
577 template <typename T>
579 {
580  libmesh_assert (this->initialized());
581 
582  semiparallel_only();
583 
584  PetscErrorCode ierr=0;
585  PetscReal petsc_value;
586  Real value;
587 
588  libmesh_assert (this->closed());
589 
590  ierr = MatNorm(_mat, NORM_1, &petsc_value);
591  LIBMESH_CHKERR(ierr);
592 
593  value = static_cast<Real>(petsc_value);
594 
595  return value;
596 }
597 
598 
599 
600 template <typename T>
602 {
603  libmesh_assert (this->initialized());
604 
605  semiparallel_only();
606 
607  PetscErrorCode ierr=0;
608  PetscReal petsc_value;
609  Real value;
610 
611  libmesh_assert (this->closed());
612 
613  ierr = MatNorm(_mat, NORM_INFINITY, &petsc_value);
614  LIBMESH_CHKERR(ierr);
615 
616  value = static_cast<Real>(petsc_value);
617 
618  return value;
619 }
620 
621 
622 
623 template <typename T>
624 void PetscMatrix<T>::print_matlab (const std::string & name) const
625 {
626  libmesh_assert (this->initialized());
627 
628  semiparallel_only();
629 
630  if (!this->closed())
631  {
632  libmesh_deprecated();
633  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
634  "Please update your code, as this warning will become an error in a future release.");
635  const_cast<PetscMatrix<T> *>(this)->close();
636  }
637 
638  PetscErrorCode ierr=0;
639  PetscViewer petsc_viewer;
640 
641 
642  ierr = PetscViewerCreate (this->comm().get(),
643  &petsc_viewer);
644  LIBMESH_CHKERR(ierr);
645 
650  if (name != "")
651  {
652  ierr = PetscViewerASCIIOpen( this->comm().get(),
653  name.c_str(),
654  &petsc_viewer);
655  LIBMESH_CHKERR(ierr);
656 
657 #if PETSC_VERSION_LESS_THAN(3,7,0)
658  ierr = PetscViewerSetFormat (petsc_viewer,
659  PETSC_VIEWER_ASCII_MATLAB);
660 #else
661  ierr = PetscViewerPushFormat (petsc_viewer,
662  PETSC_VIEWER_ASCII_MATLAB);
663 #endif
664 
665  LIBMESH_CHKERR(ierr);
666 
667  ierr = MatView (_mat, petsc_viewer);
668  LIBMESH_CHKERR(ierr);
669  }
670 
674  else
675  {
676 #if PETSC_VERSION_LESS_THAN(3,7,0)
677  ierr = PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
678  PETSC_VIEWER_ASCII_MATLAB);
679 #else
680  ierr = PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
681  PETSC_VIEWER_ASCII_MATLAB);
682 #endif
683 
684  LIBMESH_CHKERR(ierr);
685 
686  ierr = MatView (_mat, PETSC_VIEWER_STDOUT_WORLD);
687  LIBMESH_CHKERR(ierr);
688  }
689 
690 
694  ierr = LibMeshPetscViewerDestroy (&petsc_viewer);
695  LIBMESH_CHKERR(ierr);
696 }
697 
698 
699 
700 
701 
702 template <typename T>
703 void PetscMatrix<T>::print_personal(std::ostream & os) const
704 {
705  libmesh_assert (this->initialized());
706 
707  // Routine must be called in parallel on parallel matrices
708  // and serial on serial matrices.
709  semiparallel_only();
710 
711  // #ifndef NDEBUG
712  // if (os != std::cout)
713  // libMesh::err << "Warning! PETSc can only print to std::cout!" << std::endl;
714  // #endif
715 
716  // Matrix must be in an assembled state to be printed
717  if (!this->closed())
718  {
719  libmesh_deprecated();
720  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_personal().\n"
721  "Please update your code, as this warning will become an error in a future release.");
722  const_cast<PetscMatrix<T> *>(this)->close();
723  }
724 
725  PetscErrorCode ierr=0;
726 
727  // Print to screen if ostream is stdout
728  if (os.rdbuf() == std::cout.rdbuf())
729  {
730  ierr = MatView(_mat, PETSC_VIEWER_STDOUT_SELF);
731  LIBMESH_CHKERR(ierr);
732  }
733 
734  // Otherwise, print to the requested file, in a roundabout way...
735  else
736  {
737  // We will create a temporary filename, and file, for PETSc to
738  // write to.
739  std::string temp_filename;
740 
741  {
742  // Template for temporary filename
743  char c[] = "temp_petsc_matrix.XXXXXX";
744 
745  // Generate temporary, unique filename only on processor 0. We will
746  // use this filename for PetscViewerASCIIOpen, before copying it into
747  // the user's stream
748  if (this->processor_id() == 0)
749  {
750  int fd = mkstemp(c);
751 
752  // Check to see that mkstemp did not fail.
753  if (fd == -1)
754  libmesh_error_msg("mkstemp failed in PetscMatrix::print_personal()");
755 
756  // mkstemp returns a file descriptor for an open file,
757  // so let's close it before we hand it to PETSc!
758  ::close (fd);
759  }
760 
761  // Store temporary filename as string, makes it easier to broadcast
762  temp_filename = c;
763  }
764 
765  // Now broadcast the filename from processor 0 to all processors.
766  this->comm().broadcast(temp_filename);
767 
768  // PetscViewer object for passing to MatView
769  PetscViewer petsc_viewer;
770 
771  // This PETSc function only takes a string and handles the opening/closing
772  // of the file internally. Since print_personal() takes a reference to
773  // an ostream, we have to do an extra step... print_personal() should probably
774  // have a version that takes a string to get rid of this problem.
775  ierr = PetscViewerASCIIOpen( this->comm().get(),
776  temp_filename.c_str(),
777  &petsc_viewer);
778  LIBMESH_CHKERR(ierr);
779 
780  // Probably don't need to set the format if it's default...
781  // ierr = PetscViewerSetFormat (petsc_viewer,
782  // PETSC_VIEWER_DEFAULT);
783  // LIBMESH_CHKERR(ierr);
784 
785  // Finally print the matrix using the viewer
786  ierr = MatView (_mat, petsc_viewer);
787  LIBMESH_CHKERR(ierr);
788 
789  if (this->processor_id() == 0)
790  {
791  // Now the inefficient bit: open temp_filename as an ostream and copy the contents
792  // into the user's desired ostream. We can't just do a direct file copy, we don't even have the filename!
793  std::ifstream input_stream(temp_filename.c_str());
794  os << input_stream.rdbuf(); // The "most elegant" way to copy one stream into another.
795  // os.close(); // close not defined in ostream
796 
797  // Now remove the temporary file
798  input_stream.close();
799  std::remove(temp_filename.c_str());
800  }
801  }
802 }
803 
804 
805 
806 
807 
808 
809 template <typename T>
811  const std::vector<numeric_index_type> & rows,
812  const std::vector<numeric_index_type> & cols)
813 {
814  libmesh_assert (this->initialized());
815 
816  const numeric_index_type n_rows = dm.m();
817  const numeric_index_type n_cols = dm.n();
818 
819  libmesh_assert_equal_to (rows.size(), n_rows);
820  libmesh_assert_equal_to (cols.size(), n_cols);
821 
822  PetscErrorCode ierr=0;
823  ierr = MatSetValues(_mat,
824  n_rows, numeric_petsc_cast(rows.data()),
825  n_cols, numeric_petsc_cast(cols.data()),
826  pPS(const_cast<T*>(dm.get_values().data())),
827  ADD_VALUES);
828  LIBMESH_CHKERR(ierr);
829 }
830 
831 
832 
833 
834 
835 
836 template <typename T>
838  const std::vector<numeric_index_type> & brows,
839  const std::vector<numeric_index_type> & bcols)
840 {
841  libmesh_assert (this->initialized());
842 
843  const numeric_index_type n_brows =
844  cast_int<numeric_index_type>(brows.size());
845  const numeric_index_type n_bcols =
846  cast_int<numeric_index_type>(bcols.size());
847 
848  PetscErrorCode ierr=0;
849 
850 #ifndef NDEBUG
851  const numeric_index_type n_rows =
852  cast_int<numeric_index_type>(dm.m());
853  const numeric_index_type n_cols =
854  cast_int<numeric_index_type>(dm.n());
855  const numeric_index_type blocksize = n_rows / n_brows;
856 
857  libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
858  libmesh_assert_equal_to (blocksize*n_brows, n_rows);
859  libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
860 
861  PetscInt petsc_blocksize;
862  ierr = MatGetBlockSize(_mat, &petsc_blocksize);
863  LIBMESH_CHKERR(ierr);
864  libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
865 #endif
866 
867  // These casts are required for PETSc <= 2.1.5
868  ierr = MatSetValuesBlocked(_mat,
869  n_brows, numeric_petsc_cast(brows.data()),
870  n_bcols, numeric_petsc_cast(bcols.data()),
871  pPS(const_cast<T*>(dm.get_values().data())),
872  ADD_VALUES);
873  LIBMESH_CHKERR(ierr);
874 }
875 
876 
877 
878 
879 
880 template <typename T>
882  const std::vector<numeric_index_type> & rows,
883  const std::vector<numeric_index_type> & cols,
884  const bool reuse_submatrix) const
885 {
886  if (!this->closed())
887  {
888  libmesh_deprecated();
889  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
890  "Please update your code, as this warning will become an error in a future release.");
891  const_cast<PetscMatrix<T> *>(this)->close();
892  }
893 
894  // Make sure the SparseMatrix passed in is really a PetscMatrix
895  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
896 
897  // If we're not reusing submatrix and submatrix is already initialized
898  // then we need to clear it, otherwise we get a memory leak.
899  if (!reuse_submatrix && submatrix.initialized())
900  submatrix.clear();
901 
902  // Construct row and column index sets.
903  PetscErrorCode ierr=0;
904  IS isrow, iscol;
905 
906  ierr = ISCreateLibMesh(this->comm().get(),
907  cast_int<PetscInt>(rows.size()),
908  numeric_petsc_cast(rows.data()),
910  &isrow); LIBMESH_CHKERR(ierr);
911 
912  ierr = ISCreateLibMesh(this->comm().get(),
913  cast_int<PetscInt>(cols.size()),
914  numeric_petsc_cast(cols.data()),
916  &iscol); LIBMESH_CHKERR(ierr);
917 
918  // Extract submatrix
919  ierr = LibMeshCreateSubMatrix(_mat,
920  isrow,
921  iscol,
922 #if PETSC_RELEASE_LESS_THAN(3,0,1)
923  PETSC_DECIDE,
924 #endif
925  (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
926  &(petsc_submatrix->_mat)); LIBMESH_CHKERR(ierr);
927 
928  // Specify that the new submatrix is initialized and close it.
929  petsc_submatrix->_is_initialized = true;
930  petsc_submatrix->close();
931 
932  // Clean up PETSc data structures
933  ierr = LibMeshISDestroy(&isrow); LIBMESH_CHKERR(ierr);
934  ierr = LibMeshISDestroy(&iscol); LIBMESH_CHKERR(ierr);
935 }
936 
937 
938 
939 template <typename T>
941 {
942  // Make sure the NumericVector passed in is really a PetscVector
943  PetscVector<T> & petsc_dest = cast_ref<PetscVector<T> &>(dest);
944 
945  // Needs a const_cast since PETSc does not work with const.
946  PetscErrorCode ierr =
947  MatGetDiagonal(const_cast<PetscMatrix<T> *>(this)->mat(),petsc_dest.vec()); LIBMESH_CHKERR(ierr);
948 }
949 
950 
951 
952 template <typename T>
954 {
955  // Make sure the SparseMatrix passed in is really a PetscMatrix
956  PetscMatrix<T> & petsc_dest = cast_ref<PetscMatrix<T> &>(dest);
957 
958  // If we aren't reusing the matrix then need to clear dest,
959  // otherwise we get a memory leak
960  if (&petsc_dest != this)
961  dest.clear();
962 
963  PetscErrorCode ierr;
964 #if PETSC_VERSION_LESS_THAN(3,0,0)
965  if (&petsc_dest == this)
966  ierr = MatTranspose(_mat,NULL);
967  else
968  ierr = MatTranspose(_mat,&petsc_dest._mat);
969  LIBMESH_CHKERR(ierr);
970 #else
971  // FIXME - we can probably use MAT_REUSE_MATRIX in more situations
972  if (&petsc_dest == this)
973  ierr = MatTranspose(_mat,MAT_REUSE_MATRIX,&petsc_dest._mat);
974  else
975  ierr = MatTranspose(_mat,MAT_INITIAL_MATRIX,&petsc_dest._mat);
976  LIBMESH_CHKERR(ierr);
977 #endif
978 
979  // Specify that the transposed matrix is initialized and close it.
980  petsc_dest._is_initialized = true;
981  petsc_dest.close();
982 }
983 
984 
985 
986 
987 
988 template <typename T>
990 {
991  semiparallel_only();
992 
993  // BSK - 1/19/2004
994  // strictly this check should be OK, but it seems to
995  // fail on matrix-free matrices. Do they falsely
996  // state they are assembled? Check with the developers...
997  // if (this->closed())
998  // return;
999 
1000  PetscErrorCode ierr=0;
1001 
1002  ierr = MatAssemblyBegin (_mat, MAT_FINAL_ASSEMBLY);
1003  LIBMESH_CHKERR(ierr);
1004  ierr = MatAssemblyEnd (_mat, MAT_FINAL_ASSEMBLY);
1005  LIBMESH_CHKERR(ierr);
1006 }
1007 
1008 template <typename T>
1010 {
1011  semiparallel_only();
1012 
1013  PetscErrorCode ierr=0;
1014 
1015  ierr = MatAssemblyBegin (_mat, MAT_FLUSH_ASSEMBLY);
1016  LIBMESH_CHKERR(ierr);
1017  ierr = MatAssemblyEnd (_mat, MAT_FLUSH_ASSEMBLY);
1018  LIBMESH_CHKERR(ierr);
1019 }
1020 
1021 
1022 
1023 template <typename T>
1025 {
1026  libmesh_assert (this->initialized());
1027 
1028  PetscInt petsc_m=0, petsc_n=0;
1029  PetscErrorCode ierr=0;
1030 
1031  ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
1032  LIBMESH_CHKERR(ierr);
1033 
1034  return static_cast<numeric_index_type>(petsc_m);
1035 }
1036 
1037 template <typename T>
1039 {
1040  libmesh_assert (this->initialized());
1041 
1042  PetscInt m = 0;
1043 
1044  auto ierr = MatGetLocalSize (_mat, &m, NULL);
1045  LIBMESH_CHKERR(ierr);
1046 
1047  return static_cast<numeric_index_type>(m);
1048 }
1049 
1050 template <typename T>
1052 {
1053  libmesh_assert (this->initialized());
1054 
1055  PetscInt petsc_m=0, petsc_n=0;
1056  PetscErrorCode ierr=0;
1057 
1058  ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
1059  LIBMESH_CHKERR(ierr);
1060 
1061  return static_cast<numeric_index_type>(petsc_n);
1062 }
1063 
1064 template <typename T>
1066 {
1067  libmesh_assert (this->initialized());
1068 
1069  PetscInt n = 0;
1070 
1071  auto ierr = MatGetLocalSize (_mat, NULL, &n);
1072  LIBMESH_CHKERR(ierr);
1073 
1074  return static_cast<numeric_index_type>(n);
1075 }
1076 
1077 template <typename T>
1079  numeric_index_type & n) const
1080 {
1081  libmesh_assert (this->initialized());
1082 
1083  PetscInt petsc_m = 0, petsc_n = 0;
1084 
1085  auto ierr = MatGetLocalSize (_mat, &petsc_m, &petsc_n);
1086  LIBMESH_CHKERR(ierr);
1087 
1088  m = static_cast<numeric_index_type>(petsc_m);
1089  n = static_cast<numeric_index_type>(petsc_n);
1090 }
1091 
1092 template <typename T>
1094 {
1095  libmesh_assert (this->initialized());
1096 
1097  PetscInt start=0, stop=0;
1098  PetscErrorCode ierr=0;
1099 
1100  ierr = MatGetOwnershipRange(_mat, &start, &stop);
1101  LIBMESH_CHKERR(ierr);
1102 
1103  return static_cast<numeric_index_type>(start);
1104 }
1105 
1106 
1107 
1108 template <typename T>
1110 {
1111  libmesh_assert (this->initialized());
1112 
1113  PetscInt start=0, stop=0;
1114  PetscErrorCode ierr=0;
1115 
1116  ierr = MatGetOwnershipRange(_mat, &start, &stop);
1117  LIBMESH_CHKERR(ierr);
1118 
1119  return static_cast<numeric_index_type>(stop);
1120 }
1121 
1122 
1123 
1124 template <typename T>
1126  const numeric_index_type j,
1127  const T value)
1128 {
1129  libmesh_assert (this->initialized());
1130 
1131  PetscErrorCode ierr=0;
1132  PetscInt i_val=i, j_val=j;
1133 
1134  PetscScalar petsc_value = static_cast<PetscScalar>(value);
1135  ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1136  &petsc_value, INSERT_VALUES);
1137  LIBMESH_CHKERR(ierr);
1138 }
1139 
1140 
1141 
1142 template <typename T>
1144  const numeric_index_type j,
1145  const T value)
1146 {
1147  libmesh_assert (this->initialized());
1148 
1149  PetscErrorCode ierr=0;
1150  PetscInt i_val=i, j_val=j;
1151 
1152  PetscScalar petsc_value = static_cast<PetscScalar>(value);
1153  ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1154  &petsc_value, ADD_VALUES);
1155  LIBMESH_CHKERR(ierr);
1156 }
1157 
1158 
1159 
1160 template <typename T>
1162  const std::vector<numeric_index_type> & dof_indices)
1163 {
1164  this->add_matrix (dm, dof_indices, dof_indices);
1165 }
1166 
1167 
1168 
1169 
1170 
1171 
1172 
1173 template <typename T>
1174 void PetscMatrix<T>::add (const T a_in, const SparseMatrix<T> & X_in)
1175 {
1176  libmesh_assert (this->initialized());
1177 
1178  // sanity check. but this cannot avoid
1179  // crash due to incompatible sparsity structure...
1180  libmesh_assert_equal_to (this->m(), X_in.m());
1181  libmesh_assert_equal_to (this->n(), X_in.n());
1182 
1183  PetscScalar a = static_cast<PetscScalar> (a_in);
1184  const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1185 
1186  libmesh_assert (X);
1187 
1188  PetscErrorCode ierr=0;
1189 
1190  // the matrix from which we copy the values has to be assembled/closed
1191  libmesh_assert(X->closed());
1192 
1193  semiparallel_only();
1194 
1195  ierr = MatAXPY(_mat, a, X->_mat, DIFFERENT_NONZERO_PATTERN);
1196  LIBMESH_CHKERR(ierr);
1197 }
1198 
1199 
1200 
1201 
1202 template <typename T>
1204  const numeric_index_type j_in) const
1205 {
1206  libmesh_assert (this->initialized());
1207 
1208  // PETSc 2.2.1 & newer
1209  const PetscScalar * petsc_row;
1210  const PetscInt * petsc_cols;
1211 
1212  // If the entry is not in the sparse matrix, it is 0.
1213  T value=0.;
1214 
1215  PetscErrorCode
1216  ierr=0;
1217  PetscInt
1218  ncols=0,
1219  i_val=static_cast<PetscInt>(i_in),
1220  j_val=static_cast<PetscInt>(j_in);
1221 
1222 
1223  // the matrix needs to be closed for this to work
1224  // this->close();
1225  // but closing it is a semiparallel operation; we want operator()
1226  // to run on one processor.
1227  libmesh_assert(this->closed());
1228 
1229  ierr = MatGetRow(_mat, i_val, &ncols, &petsc_cols, &petsc_row);
1230  LIBMESH_CHKERR(ierr);
1231 
1232  // Perform a binary search to find the contiguous index in
1233  // petsc_cols (resp. petsc_row) corresponding to global index j_val
1234  std::pair<const PetscInt *, const PetscInt *> p =
1235  std::equal_range (petsc_cols, petsc_cols + ncols, j_val);
1236 
1237  // Found an entry for j_val
1238  if (p.first != p.second)
1239  {
1240  // The entry in the contiguous row corresponding
1241  // to the j_val column of interest
1242  const std::size_t j =
1243  std::distance (const_cast<PetscInt *>(petsc_cols),
1244  const_cast<PetscInt *>(p.first));
1245 
1246  libmesh_assert_less (j, ncols);
1247  libmesh_assert_equal_to (petsc_cols[j], j_val);
1248 
1249  value = static_cast<T> (petsc_row[j]);
1250  }
1251 
1252  ierr = MatRestoreRow(_mat, i_val,
1253  &ncols, &petsc_cols, &petsc_row);
1254  LIBMESH_CHKERR(ierr);
1255 
1256  return value;
1257 }
1258 
1259 template <typename T>
1261  std::vector<numeric_index_type> & indices,
1262  std::vector<T> & values) const
1263 {
1264  libmesh_assert (this->initialized());
1265 
1266  const PetscScalar * petsc_row;
1267  const PetscInt * petsc_cols;
1268 
1269  PetscErrorCode ierr=0;
1270  PetscInt
1271  ncols=0,
1272  i_val = static_cast<PetscInt>(i_in);
1273 
1274  // the matrix needs to be closed for this to work
1275  // this->close();
1276  // but closing it is a semiparallel operation; we want operator()
1277  // to run on one processor.
1278  libmesh_assert(this->closed());
1279 
1280  // PETSc makes no effort at being thread safe. Helgrind complains about
1281  // possible data races even just in PetscFunctionBegin (due to things
1282  // like stack counter incrementing). Perhaps we could ignore
1283  // this, but there are legitimate data races for Mat data members like
1284  // mat->getrowactive between MatGetRow and MatRestoreRow. Moreover,
1285  // there could be a write into mat->rowvalues during MatGetRow from
1286  // one thread while we are attempting to read from mat->rowvalues
1287  // (through petsc_cols) during data copy in another thread. So
1288  // the safe thing to do is to lock the whole method
1289 
1290 #ifdef LIBMESH_HAVE_CXX11_THREAD
1291  std::lock_guard<std::mutex>
1292 #else
1293  Threads::spin_mutex::scoped_lock
1294 #endif
1295  lock(_petsc_matrix_mutex);
1296 
1297  ierr = MatGetRow(_mat, i_val, &ncols, &petsc_cols, &petsc_row);
1298  LIBMESH_CHKERR(ierr);
1299 
1300  // Copy the data
1301  indices.resize(static_cast<std::size_t>(ncols));
1302  values.resize(static_cast<std::size_t>(ncols));
1303 
1304  for (auto i : index_range(indices))
1305  {
1306  indices[i] = static_cast<numeric_index_type>(petsc_cols[i]);
1307  values[i] = static_cast<T>(petsc_row[i]);
1308  }
1309 
1310  ierr = MatRestoreRow(_mat, i_val,
1311  &ncols, &petsc_cols, &petsc_row);
1312  LIBMESH_CHKERR(ierr);
1313 }
1314 
1315 
1316 template <typename T>
1318 {
1319  libmesh_assert (this->initialized());
1320 
1321  PetscErrorCode ierr=0;
1322  PetscBool assembled;
1323 
1324  ierr = MatAssembled(_mat, &assembled);
1325  LIBMESH_CHKERR(ierr);
1326 
1327  return (assembled == PETSC_TRUE);
1328 }
1329 
1330 template <typename T>
1332 {
1333  this->_destroy_mat_on_exit = destroy;
1334 }
1335 
1336 
1337 template <typename T>
1339 {
1340  std::swap(_mat, m_in._mat);
1341  std::swap(_destroy_mat_on_exit, m_in._destroy_mat_on_exit);
1342 }
1343 
1344 
1345 
1346 //------------------------------------------------------------------
1347 // Explicit instantiations
1348 template class PetscMatrix<Number>;
1349 
1350 } // namespace libMesh
1351 
1352 
1353 #endif // #ifdef LIBMESH_HAVE_PETSC
libMesh::AIJ
Definition: petsc_matrix.h:73
libMesh::index_range
IntRange< std::size_t > index_range(const std::vector< T > &vec)
Helper function that returns an IntRange<std::size_t> representing all the indices of the passed-in v...
Definition: int_range.h:106
libMesh
The libMesh namespace provides an interface to certain functionality in the library.
Definition: factoryfunction.C:55
libMesh::PetscMatrix::PetscMatrix
PetscMatrix(const Parallel::Communicator &comm_in)
Constructor; initializes the matrix to be empty, without any structure, i.e.
Definition: petsc_matrix.C:92
libMesh::SparseMatrix::initialized
virtual bool initialized() const
Definition: sparse_matrix.h:103
libMesh::pPS
PetscScalar * pPS(T *ptr)
Definition: petsc_macro.h:174
libMesh::DenseMatrix
Defines a dense matrix for use in Finite Element-type computations.
Definition: dof_map.h:73
libMesh::DenseMatrixBase::n
unsigned int n() const
Definition: dense_matrix_base.h:109
libMesh::DenseMatrix::get_values
std::vector< T > & get_values()
Definition: dense_matrix.h:371
libMesh::SparseMatrix::clear
virtual void clear()=0
Restores the SparseMatrix<T> to a pristine state.
libMesh::SparseMatrix
Generic sparse matrix.
Definition: vector_fe_ex5.C:45
PetscBool
PetscTruth PetscBool
Definition: petsc_macro.h:73
libMesh::NumericVector
Provides a uniform interface to vector storage schemes for different linear algebra libraries.
Definition: vector_fe_ex5.C:43
libMesh::zero
const Number zero
.
Definition: libmesh.h:243
libMesh::DenseMatrixBase::m
unsigned int m() const
Definition: dense_matrix_base.h:104
libMesh::TriangleWrapper::init
void init(triangulateio &t)
Initializes the fields of t to nullptr/0 as necessary.
PETSC_USE_POINTER
Definition: petsc_macro.h:103
libMesh::libmesh_assert
libmesh_assert(ctx)
libMesh::MacroFunctions::stop
void stop(const char *file, int line, const char *date, const char *time)
Definition: libmesh_common.C:53
libMesh::numeric_petsc_cast
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
Definition: petsc_vector.h:1228
libMesh::TriangleWrapper::destroy
void destroy(triangulateio &t, IO_Type)
Frees any memory which has been dynamically allocated by Triangle.
libMesh::PS
PetscScalar PS(T val)
Definition: petsc_macro.h:168
libMesh::libmesh_ignore
void libmesh_ignore(const Args &...)
Definition: libmesh_common.h:526
libMesh::PetscVector
This class provides a nice interface to PETSc's Vec object.
Definition: petsc_vector.h:72
libMesh::SparseMatrix::m
virtual numeric_index_type m() const =0
libMesh::numeric_index_type
dof_id_type numeric_index_type
Definition: id_types.h:99
libMesh::initialized
bool initialized()
Checks that library initialization has been done.
Definition: libmesh.C:265
distance
Real distance(const Point &p)
Definition: subdomains_ex3.C:50
swap
void swap(Iterator &lhs, Iterator &rhs)
swap, used to implement op=
Definition: variant_filter_iterator.h:478
libMesh::ReferenceElem::get
const Elem & get(const ElemType type_in)
Definition: reference_elem.C:237
libMesh::libMeshPrivateData::_is_initialized
bool _is_initialized
Flag that tells if init() has been called.
Definition: libmesh.C:246
value
static const bool value
Definition: xdr_io.C:56
libMesh::PetscMatrix
This class provides a nice interface to the PETSc C-based data structures for parallel,...
Definition: petsc_matrix.h:87
libMesh::PetscMatrix::close
virtual void close() override
Calls the SparseMatrix's internal assembly routines, ensuring that the values are consistent across p...
Definition: petsc_matrix.C:989
libMesh::PetscMatrix::_destroy_mat_on_exit
bool _destroy_mat_on_exit
This boolean value should only be set to false for the constructor which takes a PETSc Mat object.
Definition: petsc_matrix.h:314
libMesh::PetscVector::vec
Vec vec()
Definition: petsc_vector.h:335
libMesh::PetscMatrix::closed
virtual bool closed() const override
Definition: petsc_matrix.C:1317
libMesh::Real
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
Definition: libmesh_common.h:121
ierr
PetscErrorCode ierr
Definition: petscdmlibmeshimpl.C:1028
libMesh::HYPRE
Definition: petsc_matrix.h:74
libMesh::PetscMatrixType
PetscMatrixType
Definition: petsc_matrix.h:72
libMesh::PetscMatrix::_mat
Mat _mat
PETSc matrix datatype to store values.
Definition: petsc_matrix.h:308
libMesh::Quality::name
std::string name(const ElemQuality q)
This function returns a string containing some name for q.
Definition: elem_quality.C:42
libMesh::SparseMatrix::_is_initialized
bool _is_initialized
Flag indicating whether or not the matrix has been initialized.
Definition: sparse_matrix.h:448
libMesh::closed
bool closed()
Checks that the library has been closed.
Definition: libmesh.C:272
libMesh::SparseMatrix::n
virtual numeric_index_type n() const =0