libMesh
petsc_vector.C
Go to the documentation of this file.
1 // The libMesh Finite Element Library.
2 // Copyright (C) 2002-2025 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 
4 // This library is free software; you can redistribute it and/or
5 // modify it under the terms of the GNU Lesser General Public
6 // License as published by the Free Software Foundation; either
7 // version 2.1 of the License, or (at your option) any later version.
8 
9 // This library is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 // Lesser General Public License for more details.
13 
14 // You should have received a copy of the GNU Lesser General Public
15 // License along with this library; if not, write to the Free Software
16 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 
18 #include "libmesh/petsc_vector.h"
19 
20 #ifdef LIBMESH_HAVE_PETSC
21 
22 // libMesh includes
23 #include "libmesh/petsc_matrix_base.h"
24 #include "libmesh/dense_subvector.h"
25 #include "libmesh/dense_vector.h"
26 #include "libmesh/int_range.h"
27 #include "libmesh/petsc_macro.h"
28 #include "libmesh/wrapped_petsc.h"
29 
30 // TIMPI includes
31 #include "timpi/op_function.h"
33 #include "timpi/standard_type.h"
34 
35 // C++ includes
36 #include <numeric> // std::iota
37 
38 namespace libMesh
39 {
40 //-----------------------------------------------------------------------
41 // PetscVector members
42 
43 template <typename T>
45 {
46  this->_restore_array();
47  libmesh_assert(this->closed());
48 
49  PetscScalar value=0.;
50 
51  LibmeshPetscCall(VecSum (_vec, &value));
52 
53  return static_cast<T>(value);
54 }
55 
56 
57 
58 template <typename T>
59 template <NormType N>
61 {
62  parallel_object_only();
63 
64  this->_restore_array();
65  libmesh_assert(this->closed());
66 
67  PetscReal value=0.;
68 
69  LibmeshPetscCall(VecNorm (_vec, N, &value));
70 
71  return static_cast<Real>(value);
72 }
73 template <typename T>
75 {
76  return PetscVector<T>::norm<NORM_1>();
77 }
78 template <typename T>
80 {
81  return PetscVector<T>::norm<NORM_2>();
82 }
83 template <typename T>
85 {
86  return PetscVector<T>::norm<NORM_INFINITY>();
87 }
88 
89 
90 
91 template <typename T>
94 {
95  parallel_object_only();
96 
97  this->_restore_array();
98  libmesh_assert(this->closed());
99 
100  this->add(1., v);
101 
102  return *this;
103 }
104 
105 
106 
107 template <typename T>
110 {
111  parallel_object_only();
112 
113  this->_restore_array();
114  libmesh_assert(this->closed());
115 
116  this->add(-1., v);
117 
118  return *this;
119 }
120 
121 
122 
123 template <typename T>
125 {
126  this->_restore_array();
127  libmesh_assert_less (i, size());
128 
129  PetscInt i_val = static_cast<PetscInt>(i);
130  PetscScalar petsc_value = PS(value);
131 
132  std::scoped_lock lock(this->_numeric_vector_mutex);
133  LibmeshPetscCall(VecSetValues (_vec, 1, &i_val, &petsc_value, INSERT_VALUES));
134 
135  this->_is_closed = false;
136 }
137 
138 
139 
140 template <typename T>
142 {
143  parallel_object_only();
144 
145 
146  // VecReciprocal has been in PETSc since at least 2.3.3 days
147  LibmeshPetscCall(VecReciprocal(_vec));
148 }
149 
150 
151 
152 template <typename T>
154 {
155  parallel_object_only();
156 
157 
158  // We just call the PETSc VecConjugate
159  LibmeshPetscCall(VecConjugate(_vec));
160 }
161 
162 
163 
164 template <typename T>
166 {
167  this->_restore_array();
168  libmesh_assert_less (i, size());
169 
170  PetscInt i_val = static_cast<PetscInt>(i);
171  PetscScalar petsc_value = PS(value);
172 
173  std::scoped_lock lock(this->_numeric_vector_mutex);
174  LibmeshPetscCall(VecSetValues (_vec, 1, &i_val, &petsc_value, ADD_VALUES));
175 
176  this->_is_closed = false;
177 }
178 
179 
180 
181 template <typename T>
182 void PetscVector<T>::add_vector (const T * v,
183  const std::vector<numeric_index_type> & dof_indices)
184 {
185  // If we aren't adding anything just return
186  if (dof_indices.empty())
187  return;
188 
189  this->_restore_array();
190 
191  const PetscInt * i_val = reinterpret_cast<const PetscInt *>(dof_indices.data());
192  const PetscScalar * petsc_value = pPS(v);
193 
194  std::scoped_lock lock(this->_numeric_vector_mutex);
195  LibmeshPetscCall(VecSetValues (_vec, cast_int<PetscInt>(dof_indices.size()),
196  i_val, petsc_value, ADD_VALUES));
197 
198  this->_is_closed = false;
199 }
200 
201 
202 
203 template <typename T>
205  const SparseMatrix<T> & A_in)
206 {
207  parallel_object_only();
208 
209  this->_restore_array();
210  // Make sure the data passed in are really of Petsc types
211  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
212  const PetscMatrixBase<T> * A = cast_ptr<const PetscMatrixBase<T> *>(&A_in);
213 
214 
215  // We shouldn't close() the matrix for you, as that would potentially modify the state of a const object.
216  if (!A->closed())
217  {
218  libmesh_warning("Matrix A must be assembled before calling PetscVector::add_vector(v, A).\n"
219  "Please update your code, as this warning will become an error in a future release.");
220  libmesh_deprecated();
221  const_cast<PetscMatrixBase<T> *>(A)->close();
222  }
223 
224  // The const_cast<> is not elegant, but it is required since PETSc
225  // expects a non-const Mat.
226  LibmeshPetscCall(MatMultAdd(const_cast<PetscMatrixBase<T> *>(A)->mat(), v->_vec, _vec, _vec));
227 }
228 
229 
230 
231 template <typename T>
233  const SparseMatrix<T> & A_in)
234 {
235  parallel_object_only();
236 
237  this->_restore_array();
238  // Make sure the data passed in are really of Petsc types
239  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
240  const PetscMatrixBase<T> * A = cast_ptr<const PetscMatrixBase<T> *>(&A_in);
241 
242 
243  // We shouldn't close() the matrix for you, as that would potentially modify the state of a const object.
244  if (!A->closed())
245  {
246  libmesh_warning("Matrix A must be assembled before calling PetscVector::add_vector_transpose(v, A).\n"
247  "Please update your code, as this warning will become an error in a future release.");
248  libmesh_deprecated();
249  const_cast<PetscMatrixBase<T> *>(A)->close();
250  }
251 
252  // The const_cast<> is not elegant, but it is required since PETSc
253  // expects a non-const Mat.
254  LibmeshPetscCall(MatMultTransposeAdd(const_cast<PetscMatrixBase<T> *>(A)->mat(), v->_vec, _vec, _vec));
255 }
256 
257 
258 
259 template <typename T>
261  const SparseMatrix<T> & A_in)
262 {
263  parallel_object_only();
264 
265  this->_restore_array();
266  // Make sure the data passed in are really of Petsc types
267  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
268  const PetscMatrixBase<T> * A = cast_ptr<const PetscMatrixBase<T> *>(&A_in);
269 
270  // We shouldn't close() the matrix for you, as that would potentially modify the state of a const object.
271  if (!A->closed())
272  {
273  libmesh_warning("Matrix A must be assembled before calling PetscVector::add_vector_conjugate_transpose(v, A).\n"
274  "Please update your code, as this warning will become an error in a future release.");
275  libmesh_deprecated();
276  const_cast<PetscMatrixBase<T> *>(A)->close();
277  }
278 
279  // Store a temporary copy since MatMultHermitianTransposeAdd doesn't seem to work
280  // TODO: Find out why MatMultHermitianTransposeAdd doesn't work, might be a PETSc bug?
281  std::unique_ptr<NumericVector<Number>> this_clone = this->clone();
282 
283  // The const_cast<> is not elegant, but it is required since PETSc
284  // expects a non-const Mat.
285  LibmeshPetscCall(MatMultHermitianTranspose(const_cast<PetscMatrixBase<T> *>(A)->mat(), v->_vec, _vec));
286 
287  // Add the temporary copy to the matvec result
288  this->add(1., *this_clone);
289 }
290 
291 
292 
293 template <typename T>
294 void PetscVector<T>::add (const T v_in)
295 {
296  this->_get_array(false);
297 
298  for (numeric_index_type i=0; i<_local_size; i++)
299  _values[i] += PetscScalar(v_in);
300 }
301 
302 
303 
304 template <typename T>
306 {
307  parallel_object_only();
308 
309  this->add (1., v);
310 }
311 
312 
313 
314 template <typename T>
315 void PetscVector<T>::add (const T a_in, const NumericVector<T> & v_in)
316 {
317  parallel_object_only();
318 
319  this->_restore_array();
320 
321  // VecAXPY doesn't support &x==&y
322  if (this == &v_in)
323  {
324  this->scale(a_in+1);
325  return;
326  }
327 
328  PetscScalar a = PS(a_in);
329 
330  // Make sure the NumericVector passed in is really a PetscVector
331  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
332  v->_restore_array();
333 
334  libmesh_assert_equal_to (this->size(), v->size());
335 
336  LibmeshPetscCall(VecAXPY(_vec, a, v->vec()));
337 
338  libmesh_assert(this->comm().verify(int(this->type())));
339 
340  if (this->type() == GHOSTED)
341  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
342 
343  this->_is_closed = true;
344 }
345 
346 
347 
348 template <typename T>
349 void PetscVector<T>::insert (const T * v,
350  const std::vector<numeric_index_type> & dof_indices)
351 {
352  if (dof_indices.empty())
353  return;
354 
355  this->_restore_array();
356 
357  PetscInt * idx_values = numeric_petsc_cast(dof_indices.data());
358  std::scoped_lock lock(this->_numeric_vector_mutex);
359  LibmeshPetscCall(VecSetValues (_vec, cast_int<PetscInt>(dof_indices.size()),
360  idx_values, pPS(v), INSERT_VALUES));
361 
362  this->_is_closed = false;
363 }
364 
365 
366 
367 template <typename T>
368 void PetscVector<T>::scale (const T factor_in)
369 {
370  parallel_object_only();
371 
372  this->_restore_array();
373 
374  PetscScalar factor = PS(factor_in);
375 
376  LibmeshPetscCall(VecScale(_vec, factor));
377 
378  libmesh_assert(this->comm().verify(int(this->type())));
379 
380  if (this->type() == GHOSTED)
381  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
382 }
383 
384 template <typename T>
386 {
387  parallel_object_only();
388 
389 
390  const PetscVector<T> * v_vec = cast_ptr<const PetscVector<T> *>(&v);
391 
392  LibmeshPetscCall(VecPointwiseMult(_vec, _vec, v_vec->_vec));
393 
394  return *this;
395 }
396 
397 template <typename T>
399 {
400  parallel_object_only();
401 
402 
403  const PetscVector<T> * v_vec = cast_ptr<const PetscVector<T> *>(&v);
404 
405  LibmeshPetscCall(VecPointwiseDivide(_vec, _vec, v_vec->_vec));
406 
407  return *this;
408 }
409 
410 template <typename T>
412 {
413  parallel_object_only();
414 
415  this->_restore_array();
416 
417  LibmeshPetscCall(VecAbs(_vec));
418 
419  libmesh_assert(this->comm().verify(int(this->type())));
420 
421  if (this->type() == GHOSTED)
422  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
423 }
424 
425 template <typename T>
426 T PetscVector<T>::dot (const NumericVector<T> & v_in) const
427 {
428  parallel_object_only();
429 
430  this->_restore_array();
431 
432  // Error flag
433 
434  // Return value
435  PetscScalar value=0.;
436 
437  // Make sure the NumericVector passed in is really a PetscVector
438  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
439 
440  // 2.3.x (at least) style. Untested for previous versions.
441  LibmeshPetscCall(VecDot(this->_vec, v->_vec, &value));
442 
443  return static_cast<T>(value);
444 }
445 
446 template <typename T>
448 {
449  parallel_object_only();
450 
451  this->_restore_array();
452 
453  // Error flag
454 
455  // Return value
456  PetscScalar value=0.;
457 
458  // Make sure the NumericVector passed in is really a PetscVector
459  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
460 
461  // 2.3.x (at least) style. Untested for previous versions.
462  LibmeshPetscCall(VecTDot(this->_vec, v->_vec, &value));
463 
464  return static_cast<T>(value);
465 }
466 
467 
468 template <typename T>
471 {
472  parallel_object_only();
473 
474  this->_restore_array();
475  libmesh_assert(this->closed());
476 
477  PetscScalar s = PS(s_in);
478 
479  if (this->size() != 0)
480  {
481  LibmeshPetscCall(VecSet(_vec, s));
482 
483  libmesh_assert(this->comm().verify(int(this->type())));
484 
485  if (this->type() == GHOSTED)
486  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
487  }
488 
489  return *this;
490 }
491 
492 
493 
494 template <typename T>
497 {
498  parallel_object_only();
499 
500  // Make sure the NumericVector passed in is really a PetscVector
501  const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
502 
503  *this = *v;
504 
505  return *this;
506 }
507 
508 
509 
510 template <typename T>
513 {
514  parallel_object_only();
515 
516  this->_restore_array();
517  v._restore_array();
518 
519  libmesh_assert_equal_to (this->size(), v.size());
520  libmesh_assert_equal_to (this->local_size(), v.local_size());
521  libmesh_assert (v.closed());
522 
523 
524  LibmeshPetscCall(VecCopy (v._vec, this->_vec));
525 
526  libmesh_assert(this->comm().verify(int(this->type())));
527 
528  if (this->type() == GHOSTED)
529  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
530 
531  this->_is_closed = true;
532 
533  return *this;
534 }
535 
536 
537 
538 template <typename T>
540 PetscVector<T>::operator = (const std::vector<T> & v)
541 {
542  parallel_object_only();
543 
544  this->_get_array(false);
545 
550  if (this->size() == v.size())
551  {
552  numeric_index_type first = first_local_index();
553  numeric_index_type last = last_local_index();
554  for (numeric_index_type i=0; i<last-first; i++)
555  _values[i] = PS(v[first + i]);
556  }
557 
562  else
563  {
564  for (numeric_index_type i=0; i<_local_size; i++)
565  _values[i] = PS(v[i]);
566  }
567 
568  // Make sure ghost dofs are up to date
569  if (this->type() == GHOSTED)
570  this->close();
571 
572  return *this;
573 }
574 
575 
576 
577 template <typename T>
579 {
580  parallel_object_only();
581 
582  this->_restore_array();
583 
584  // Make sure the NumericVector passed in is really a PetscVector
585  PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
586 
587  libmesh_assert(v_local);
588  // v_local_in should be closed
589  libmesh_assert(v_local->closed());
590  libmesh_assert_equal_to (v_local->size(), this->size());
591  // 1) v_local_in is a large vector to hold the whole world
592  // 2) v_local_in is a ghosted vector
593  // 3) v_local_in is a parallel vector
594  // Cases 2) and 3) should be scalable
595  libmesh_assert(this->size()==v_local->local_size() || this->local_size()==v_local->local_size());
596 
597 
598  if (v_local->type() == SERIAL && this->size() == v_local->local_size())
599  {
600  WrappedPetsc<VecScatter> scatter;
601  LibmeshPetscCall(VecScatterCreateToAll(_vec, scatter.get(), nullptr));
602  VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
603  }
604  // Two vectors have the same size, and we should just do a simple copy.
605  // v_local could be either a parallel or ghosted vector
606  else if (this->local_size() == v_local->local_size())
607  LibmeshPetscCall(VecCopy(_vec,v_local->_vec));
608  else
609  libmesh_error_msg("Vectors are inconsistent");
610 
611  // Make sure ghost dofs are up to date
612  // We do not call "close" here to save a global reduction
613  if (v_local->type() == GHOSTED)
614  VecGhostUpdateBeginEnd(this->comm(), v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
615 }
616 
617 
618 
619 template <typename T>
621  const std::vector<numeric_index_type> & send_list) const
622 {
623  parallel_object_only();
624 
625  libmesh_assert(this->comm().verify(int(this->type())));
626  libmesh_assert(this->comm().verify(int(v_local_in.type())));
627 
628  // FIXME: Workaround for a strange bug at large-scale.
629  // If we have ghosting, PETSc lets us just copy the solution, and
630  // doing so avoids a segfault?
631  if (v_local_in.type() == GHOSTED &&
632  this->type() == PARALLEL)
633  {
634  v_local_in = *this;
635  return;
636  }
637 
638  // Normal code path begins here
639 
640  this->_restore_array();
641 
642  // Make sure the NumericVector passed in is really a PetscVector
643  PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
644 
645  libmesh_assert(v_local);
646  libmesh_assert_equal_to (v_local->size(), this->size());
647  libmesh_assert_less_equal (send_list.size(), v_local->size());
648 
649  const numeric_index_type n_sl =
650  cast_int<numeric_index_type>(send_list.size());
651 
652  std::vector<PetscInt> idx(n_sl + this->local_size());
653  for (numeric_index_type i=0; i<n_sl; i++)
654  idx[i] = static_cast<PetscInt>(send_list[i]);
655  for (auto i : make_range(this->local_size()))
656  idx[n_sl+i] = i + this->first_local_index();
657 
658  // Create the index set & scatter objects
660  PetscInt * idxptr = idx.empty() ? nullptr : idx.data();
661  LibmeshPetscCall(ISCreateGeneral(this->comm().get(), n_sl+this->local_size(),
662  idxptr, PETSC_USE_POINTER, is.get()));
663 
664  WrappedPetsc<VecScatter> scatter;
665  LibmeshPetscCall(VecScatterCreate(_vec, is,
666  v_local->_vec, is,
667  scatter.get()));
668 
669  // Perform the scatter
670  VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
671 
672  // Make sure ghost dofs are up to date
673  if (v_local->type() == GHOSTED)
674  v_local->close();
675 }
676 
677 
678 
679 template <typename T>
680 void PetscVector<T>::localize (std::vector<T> & v_local,
681  const std::vector<numeric_index_type> & indices) const
682 {
683  parallel_object_only();
684 
685  // Error code used to check the status of all PETSc function calls.
686 
687  // Create a sequential destination Vec with the right number of entries on each proc.
688  WrappedPetsc<Vec> dest;
689  LibmeshPetscCall(VecCreateSeq(PETSC_COMM_SELF, cast_int<PetscInt>(indices.size()), dest.get()));
690 
691  // Create an IS using the libmesh routine. PETSc does not own the
692  // IS memory in this case, it is automatically cleaned up by the
693  // std::vector destructor.
694  PetscInt * idxptr =
695  indices.empty() ? nullptr : numeric_petsc_cast(indices.data());
697  LibmeshPetscCall(ISCreateGeneral(this->comm().get(), cast_int<PetscInt>(indices.size()), idxptr,
698  PETSC_USE_POINTER, is.get()));
699 
700  // Create the VecScatter object. "PETSC_NULL" means "use the identity IS".
701  WrappedPetsc<VecScatter> scatter;
702  LibmeshPetscCall(VecScatterCreate(_vec,
703  /*src is=*/is,
704  /*dest vec=*/dest,
705  /*dest is=*/LIBMESH_PETSC_NULLPTR,
706  scatter.get()));
707 
708  // Do the scatter
709  VecScatterBeginEnd(this->comm(), scatter, _vec, dest, INSERT_VALUES, SCATTER_FORWARD);
710 
711  // Get access to the values stored in dest.
712  PetscScalar * values;
713  LibmeshPetscCall(VecGetArray (dest, &values));
714 
715  // Store values into the provided v_local. Make sure there is enough
716  // space reserved and then clear out any existing entries before
717  // inserting.
718  v_local.reserve(indices.size());
719  v_local.clear();
720  v_local.insert(v_local.begin(), values, values+indices.size());
721 
722  // We are done using it, so restore the array.
723  LibmeshPetscCall(VecRestoreArray (dest, &values));
724 }
725 
726 
727 
728 template <typename T>
729 void PetscVector<T>::localize (const numeric_index_type first_local_idx,
730  const numeric_index_type last_local_idx,
731  const std::vector<numeric_index_type> & send_list)
732 {
733  parallel_object_only();
734 
735  this->_restore_array();
736 
737  libmesh_assert_less_equal (send_list.size(), this->size());
738  libmesh_assert_less_equal (last_local_idx+1, this->size());
739 
740  const numeric_index_type my_size = this->size();
741  const numeric_index_type my_local_size = (last_local_idx + 1 - first_local_idx);
742 
743  // Don't bother for serial cases
744  // if ((first_local_idx == 0) &&
745  // (my_local_size == my_size))
746  // But we do need to stay in sync for degenerate cases
747  if (this->n_processors() == 1)
748  return;
749 
750 
751  // Build a parallel vector, initialize it with the local
752  // parts of (*this)
753  PetscVector<T> parallel_vec(this->comm(), PARALLEL);
754 
755  parallel_vec.init (my_size, my_local_size, true, PARALLEL);
756 
757 
758  // Copy part of *this into the parallel_vec
759  {
760  // Create idx, idx[i] = i+first_local_idx;
761  std::vector<PetscInt> idx(my_local_size);
762  std::iota (idx.begin(), idx.end(), first_local_idx);
763 
764  // Create the index set & scatter objects
766  LibmeshPetscCall(ISCreateGeneral(this->comm().get(), my_local_size,
767  my_local_size ? idx.data() : nullptr, PETSC_USE_POINTER, is.get()));
768 
769  WrappedPetsc<VecScatter> scatter;
770  LibmeshPetscCall(VecScatterCreate(_vec, is,
771  parallel_vec._vec, is,
772  scatter.get()));
773 
774  // Perform the scatter
775  VecScatterBeginEnd(this->comm(), scatter, _vec, parallel_vec._vec, INSERT_VALUES, SCATTER_FORWARD);
776  }
777 
778  // localize like normal
779  parallel_vec.close();
780  parallel_vec.localize (*this, send_list);
781  this->close();
782 }
783 
784 
785 
786 template <typename T>
787 void PetscVector<T>::localize (std::vector<T> & v_local) const
788 {
789  parallel_object_only();
790 
791  this->_restore_array();
792 
793  // This function must be run on all processors at once
794  parallel_object_only();
795 
796  const PetscInt n = this->size();
797  const PetscInt nl = this->local_size();
798  PetscScalar * values;
799 
800  v_local.clear();
801  v_local.resize(n, 0.);
802 
803  LibmeshPetscCall(VecGetArray (_vec, &values));
804 
805  numeric_index_type ioff = first_local_index();
806 
807  for (PetscInt i=0; i<nl; i++)
808  v_local[i+ioff] = static_cast<T>(values[i]);
809 
810  LibmeshPetscCall(VecRestoreArray (_vec, &values));
811 
812  this->comm().sum(v_local);
813 }
814 
815 
816 
817 // Full specialization for Real datatypes
818 #ifdef LIBMESH_USE_REAL_NUMBERS
819 
820 template <>
821 void PetscVector<Real>::localize_to_one (std::vector<Real> & v_local,
822  const processor_id_type
823  timpi_mpi_var(pid)) const
824 {
825  parallel_object_only();
826 
827  this->_restore_array();
828 
829  const PetscInt n = size();
830  PetscScalar * values;
831 
832  // only one processor
833  if (n_processors() == 1)
834  {
835  v_local.resize(n);
836 
837  LibmeshPetscCall(VecGetArray (_vec, &values));
838 
839  for (PetscInt i=0; i<n; i++)
840  v_local[i] = static_cast<Real>(values[i]);
841 
842  LibmeshPetscCall(VecRestoreArray (_vec, &values));
843  }
844 
845  // otherwise multiple processors
846 #ifdef LIBMESH_HAVE_MPI
847  else
848  {
849  if (pid == 0) // optimized version for localizing to 0
850  {
851  WrappedPetsc<Vec> vout;
853 
854  LibmeshPetscCall(VecScatterCreateToZero(_vec, ctx.get(), vout.get()));
855 
856  VecScatterBeginEnd(this->comm(), ctx, _vec, vout, INSERT_VALUES, SCATTER_FORWARD);
857 
858  if (processor_id() == 0)
859  {
860  v_local.resize(n);
861 
862  LibmeshPetscCall(VecGetArray (vout, &values));
863 
864  for (PetscInt i=0; i<n; i++)
865  v_local[i] = static_cast<Real>(values[i]);
866 
867  LibmeshPetscCall(VecRestoreArray (vout, &values));
868  }
869  }
870  else
871  {
872  v_local.resize(n);
873 
874  numeric_index_type ioff = this->first_local_index();
875  std::vector<Real> local_values (n, 0.);
876 
877  {
878  LibmeshPetscCall(VecGetArray (_vec, &values));
879 
880  const PetscInt nl = local_size();
881  for (PetscInt i=0; i<nl; i++)
882  local_values[i+ioff] = static_cast<Real>(values[i]);
883 
884  LibmeshPetscCall(VecRestoreArray (_vec, &values));
885  }
886 
887 
888  MPI_Reduce (local_values.data(), v_local.data(), n,
891  this->comm().get());
892  }
893  }
894 #endif // LIBMESH_HAVE_MPI
895 }
896 
897 #endif
898 
899 
900 // Full specialization for Complex datatypes
901 #ifdef LIBMESH_USE_COMPLEX_NUMBERS
902 
903 template <>
904 void PetscVector<Complex>::localize_to_one (std::vector<Complex> & v_local,
905  const processor_id_type pid) const
906 {
907  parallel_object_only();
908 
909  this->_restore_array();
910 
911  const PetscInt n = size();
912  const PetscInt nl = local_size();
913  PetscScalar * values;
914 
915 
916  v_local.resize(n);
917 
918 
919  for (PetscInt i=0; i<n; i++)
920  v_local[i] = 0.;
921 
922  // only one processor
923  if (n_processors() == 1)
924  {
925  LibmeshPetscCall(VecGetArray (_vec, &values));
926 
927  for (PetscInt i=0; i<n; i++)
928  v_local[i] = static_cast<Complex>(values[i]);
929 
930  LibmeshPetscCall(VecRestoreArray (_vec, &values));
931  }
932 
933  // otherwise multiple processors
934  else
935  {
936  numeric_index_type ioff = this->first_local_index();
937 
938  /* in here the local values are stored, acting as send buffer for MPI
939  * initialize to zero, since we collect using MPI_SUM
940  */
941  std::vector<Real> real_local_values(n, 0.);
942  std::vector<Real> imag_local_values(n, 0.);
943 
944  {
945  LibmeshPetscCall(VecGetArray (_vec, &values));
946 
947  // provide my local share to the real and imag buffers
948  for (PetscInt i=0; i<nl; i++)
949  {
950  real_local_values[i+ioff] = static_cast<Complex>(values[i]).real();
951  imag_local_values[i+ioff] = static_cast<Complex>(values[i]).imag();
952  }
953 
954  LibmeshPetscCall(VecRestoreArray (_vec, &values));
955  }
956 
957  /* have buffers of the real and imaginary part of v_local.
958  * Once MPI_Reduce() collected all the real and imaginary
959  * parts in these std::vector<Real>, the values can be
960  * copied to v_local
961  */
962  std::vector<Real> real_v_local(n);
963  std::vector<Real> imag_v_local(n);
964 
965  // collect entries from other proc's in real_v_local, imag_v_local
966  MPI_Reduce (real_local_values.data(),
967  real_v_local.data(), n,
970  pid, this->comm().get());
971 
972  MPI_Reduce (imag_local_values.data(),
973  imag_v_local.data(), n,
976  pid, this->comm().get());
977 
978  // copy real_v_local and imag_v_local to v_local
979  for (PetscInt i=0; i<n; i++)
980  v_local[i] = Complex(real_v_local[i], imag_v_local[i]);
981  }
982 }
983 
984 #endif
985 
986 
987 
988 template <typename T>
990  const NumericVector<T> & vec2)
991 {
992  parallel_object_only();
993 
994  this->_restore_array();
995 
996  // Convert arguments to PetscVector*.
997  const PetscVector<T> * vec1_petsc = cast_ptr<const PetscVector<T> *>(&vec1);
998  const PetscVector<T> * vec2_petsc = cast_ptr<const PetscVector<T> *>(&vec2);
999 
1000  // Call PETSc function.
1001  LibmeshPetscCall(VecPointwiseMult(_vec, vec1_petsc->vec(), vec2_petsc->vec()));
1002 
1003  libmesh_assert(this->comm().verify(int(this->type())));
1004 
1005  if (this->type() == GHOSTED)
1006  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
1007 
1008  this->_is_closed = true;
1009 }
1010 
1011 template <typename T>
1013  const NumericVector<T> & vec2)
1014 {
1015  parallel_object_only();
1016 
1017  this->_restore_array();
1018 
1019  // Convert arguments to PetscVector*.
1020  const PetscVector<T> * const vec1_petsc = cast_ptr<const PetscVector<T> *>(&vec1);
1021  const PetscVector<T> * const vec2_petsc = cast_ptr<const PetscVector<T> *>(&vec2);
1022 
1023  // Call PETSc function.
1024  LibmeshPetscCall(VecPointwiseDivide(_vec, vec1_petsc->vec(), vec2_petsc->vec()));
1025 
1026  libmesh_assert(this->comm().verify(int(this->type())));
1027 
1028  if (this->type() == GHOSTED)
1029  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
1030 
1031  this->_is_closed = true;
1032 }
1033 
1034 template <typename T>
1035 void PetscVector<T>::print_matlab (const std::string & name) const
1036 {
1037  parallel_object_only();
1038 
1039  this->_restore_array();
1040  libmesh_assert (this->closed());
1041 
1042 
1043  WrappedPetsc<PetscViewer> petsc_viewer;
1044  LibmeshPetscCall(PetscViewerCreate (this->comm().get(), petsc_viewer.get()));
1045 
1046  // Create an ASCII file containing the matrix
1047  // if a filename was provided.
1048  if (name != "")
1049  {
1050  LibmeshPetscCall(PetscViewerASCIIOpen(this->comm().get(),
1051  name.c_str(),
1052  petsc_viewer.get()));
1053 
1054 #if PETSC_VERSION_LESS_THAN(3,7,0)
1055  LibmeshPetscCall(PetscViewerSetFormat (petsc_viewer,
1056  PETSC_VIEWER_ASCII_MATLAB));
1057 #else
1058  LibmeshPetscCall(PetscViewerPushFormat (petsc_viewer,
1059  PETSC_VIEWER_ASCII_MATLAB));
1060 #endif
1061 
1062  LibmeshPetscCall(VecView (_vec, petsc_viewer));
1063  }
1064 
1065  // Otherwise the matrix will be dumped to the screen.
1066  else
1067  {
1068 
1069 #if PETSC_VERSION_LESS_THAN(3,7,0)
1070  LibmeshPetscCall(PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
1071  PETSC_VIEWER_ASCII_MATLAB));
1072 #else
1073  LibmeshPetscCall(PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
1074  PETSC_VIEWER_ASCII_MATLAB));
1075 #endif
1076 
1077  LibmeshPetscCall(VecView (_vec, PETSC_VIEWER_STDOUT_WORLD));
1078  }
1079 }
1080 
1081 
1082 
1083 
1084 
1085 template <typename T>
1087  const std::vector<numeric_index_type> & rows,
1088  const bool supplying_global_rows) const
1089 {
1090  parallel_object_only();
1091 
1092  libmesh_error_msg_if(
1093  subvector.type() == GHOSTED,
1094  "We do not support scattering parallel information to ghosts for subvectors");
1095 
1096  this->_restore_array();
1097 
1098  // PETSc data structures
1099 
1100  // Make sure the passed in subvector is really a PetscVector
1101  PetscVector<T> * petsc_subvector = cast_ptr<PetscVector<T> *>(&subvector);
1102 
1103  // If the petsc_subvector is already initialized, we assume that the
1104  // user has already allocated the *correct* amount of space for it.
1105  // If not, we use the appropriate PETSc routines to initialize it.
1106  if (!petsc_subvector->initialized())
1107  {
1108  libmesh_assert(petsc_subvector->_type == AUTOMATIC || petsc_subvector->_type == PARALLEL);
1109 
1110  if (supplying_global_rows)
1111  // Initialize the petsc_subvector to have enough space to hold
1112  // the entries which will be scattered into it. Note: such an
1113  // init() function (where we let PETSc decide the number of local
1114  // entries) is not currently offered by the PetscVector
1115  // class. Should we differentiate here between sequential and
1116  // parallel vector creation based on this->n_processors() ?
1117  LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1118  PETSC_DECIDE, // n_local
1119  cast_int<PetscInt>(rows.size()), // n_global
1120  &(petsc_subvector->_vec)));
1121  else
1122  LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1123  cast_int<PetscInt>(rows.size()),
1124  PETSC_DETERMINE,
1125  &(petsc_subvector->_vec)));
1126 
1127  LibmeshPetscCall(VecSetFromOptions (petsc_subvector->_vec));
1128 
1129  // We created a parallel vector
1130  petsc_subvector->_type = PARALLEL;
1131 
1132  // Mark the subvector as initialized
1133  petsc_subvector->_is_initialized = true;
1134  }
1135  else
1136  {
1137  petsc_subvector->_restore_array();
1138  }
1139 
1140  std::vector<PetscInt> idx(rows.size());
1141  if (supplying_global_rows)
1142  std::iota (idx.begin(), idx.end(), 0);
1143  else
1144  {
1145  PetscInt start;
1146  LibmeshPetscCall(VecGetOwnershipRange(petsc_subvector->_vec, &start, nullptr));
1147  std::iota (idx.begin(), idx.end(), start);
1148  }
1149 
1150  // Construct index sets
1151  WrappedPetsc<IS> parent_is;
1152  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
1153  cast_int<PetscInt>(rows.size()),
1154  numeric_petsc_cast(rows.data()),
1155  PETSC_USE_POINTER,
1156  parent_is.get()));
1157 
1158  WrappedPetsc<IS> subvector_is;
1159  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
1160  cast_int<PetscInt>(rows.size()),
1161  idx.data(),
1162  PETSC_USE_POINTER,
1163  subvector_is.get()));
1164 
1165  // Construct the scatter object
1166  WrappedPetsc<VecScatter> scatter;
1167  LibmeshPetscCall(VecScatterCreate(this->_vec,
1168  parent_is,
1169  petsc_subvector->_vec,
1170  subvector_is,
1171  scatter.get()));
1172 
1173  // Actually perform the scatter
1174  VecScatterBeginEnd(this->comm(), scatter, this->_vec, petsc_subvector->_vec, INSERT_VALUES, SCATTER_FORWARD);
1175 
1176  petsc_subvector->_is_closed = true;
1177 }
1178 
1179 
1180 
1181 template <typename T>
1182 void PetscVector<T>::_get_array(bool read_only) const
1183 {
1184  libmesh_assert (this->initialized());
1185 
1186  bool initially_array_is_present = _array_is_present.load(std::memory_order_acquire);
1187 
1188  // If we already have a read/write array - and we're trying
1189  // to get a read only array - let's just use the read write
1190  if (initially_array_is_present && read_only && !_values_read_only)
1191  _read_only_values = _values;
1192 
1193  // If the values have already been retrieved and we're currently
1194  // trying to get a non-read only view (ie read/write) and the
1195  // values are currently read only... then we need to restore
1196  // the array first... and then retrieve it again.
1197  if (initially_array_is_present && !read_only && _values_read_only)
1198  {
1199  _restore_array();
1200  initially_array_is_present = false;
1201  }
1202 
1203  if (!initially_array_is_present)
1204  {
1205  std::scoped_lock lock(_petsc_get_restore_array_mutex);
1206  if (!_array_is_present.load(std::memory_order_relaxed))
1207  {
1208  if (this->type() != GHOSTED)
1209  {
1210  if (read_only)
1211  {
1212  LibmeshPetscCall(VecGetArrayRead(_vec, &_read_only_values));
1213  _values_read_only = true;
1214  }
1215  else
1216  {
1217  LibmeshPetscCall(VecGetArray(_vec, &_values));
1218  _values_read_only = false;
1219  }
1220  _local_size = this->local_size();
1221  }
1222  else
1223  {
1224  LibmeshPetscCall(VecGhostGetLocalForm (_vec,&_local_form));
1225 
1226  if (read_only)
1227  {
1228  LibmeshPetscCall(VecGetArrayRead(_local_form, &_read_only_values));
1229  _values_read_only = true;
1230  }
1231  else
1232  {
1233  LibmeshPetscCall(VecGetArray(_local_form, &_values));
1234  _values_read_only = false;
1235  }
1236 
1237  PetscInt my_local_size = 0;
1238  LibmeshPetscCall(VecGetLocalSize(_local_form, &my_local_size));
1239  _local_size = static_cast<numeric_index_type>(my_local_size);
1240  }
1241 
1242  { // cache ownership range
1243  PetscInt petsc_first=0, petsc_last=0;
1244  LibmeshPetscCall(VecGetOwnershipRange (_vec, &petsc_first, &petsc_last));
1245  _first = static_cast<numeric_index_type>(petsc_first);
1246  _last = static_cast<numeric_index_type>(petsc_last);
1247  }
1248  _array_is_present.store(true, std::memory_order_release);
1249  }
1250  }
1251 }
1252 
1253 
1254 
1255 template <typename T>
1257 {
1258  libmesh_error_msg_if(_values_manually_retrieved,
1259  "PetscVector values were manually retrieved but are being automatically restored!");
1260 
1261  libmesh_assert (this->initialized());
1262  if (_array_is_present.load(std::memory_order_acquire))
1263  {
1264  std::scoped_lock lock(_petsc_get_restore_array_mutex);
1265  if (_array_is_present.load(std::memory_order_relaxed))
1266  {
1267  if (this->type() != GHOSTED)
1268  {
1269  if (_values_read_only)
1270  LibmeshPetscCall(VecRestoreArrayRead (_vec, &_read_only_values));
1271  else
1272  LibmeshPetscCall(VecRestoreArray (_vec, &_values));
1273 
1274  _values = nullptr;
1275  }
1276  else
1277  {
1278  if (_values_read_only)
1279  LibmeshPetscCall(VecRestoreArrayRead (_local_form, &_read_only_values));
1280  else
1281  LibmeshPetscCall(VecRestoreArray (_local_form, &_values));
1282 
1283  _values = nullptr;
1284  LibmeshPetscCall(VecGhostRestoreLocalForm (_vec,&_local_form));
1285  _local_form = nullptr;
1286  _local_size = 0;
1287  }
1288  _array_is_present.store(false, std::memory_order_release);
1289  }
1290  }
1291 }
1292 
1293 template <typename T>
1294 std::unique_ptr<NumericVector<T>>
1295 PetscVector<T>::get_subvector(const std::vector<numeric_index_type> & rows)
1296 {
1297  // Construct index set
1298  WrappedPetsc<IS> parent_is;
1299  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
1300  cast_int<PetscInt>(rows.size()),
1301  numeric_petsc_cast(rows.data()),
1302  PETSC_USE_POINTER,
1303  parent_is.get()));
1304 
1305  Vec subvec;
1306  LibmeshPetscCall(VecGetSubVector(_vec, parent_is, &subvec));
1307 
1308  this->_is_closed = false;
1309 
1310  return std::make_unique<PetscVector<T>>(subvec, this->comm());
1311 }
1312 
1313 template <typename T>
1314 void
1316  const std::vector<numeric_index_type> & rows)
1317 {
1318  auto * const petsc_subvector = cast_ptr<PetscVector<T> *>(subvector.get());
1319 
1320  // Construct index set
1321  WrappedPetsc<IS> parent_is;
1322  LibmeshPetscCall(ISCreateGeneral(this->comm().get(),
1323  cast_int<PetscInt>(rows.size()),
1324  numeric_petsc_cast(rows.data()),
1325  PETSC_USE_POINTER,
1326  parent_is.get()));
1327 
1328  Vec subvec = petsc_subvector->vec();
1329  LibmeshPetscCall(VecRestoreSubVector(_vec, parent_is, &subvec));
1330 
1331  if (this->type() == GHOSTED)
1332  VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
1333 
1334  this->_is_closed = true;
1335 }
1336 
1337 //------------------------------------------------------------------
1338 // Explicit instantiations
1339 template class LIBMESH_EXPORT PetscVector<Number>;
1340 
1341 } // namespace libMesh
1342 
1343 
1344 
1345 #endif // #ifdef LIBMESH_HAVE_PETSC
std::string name(const ElemQuality q)
This function returns a string containing some name for q.
Definition: elem_quality.C:42
virtual Real l2_norm() const override
Definition: petsc_vector.C:79
bool closed()
Checks that the library has been closed.
Definition: libmesh.C:283
virtual void localize(std::vector< T > &v_local) const override
Creates a copy of the global vector in the local vector v_local.
Definition: petsc_vector.C:787
This class provides a nice interface to PETSc&#39;s Vec object.
Definition: petsc_vector.h:73
void add_vector_conjugate_transpose(const NumericVector< T > &v, const SparseMatrix< T > &A)
.
Definition: petsc_vector.C:260
virtual void add_vector_transpose(const NumericVector< T > &v, const SparseMatrix< T > &A) override
Computes , i.e.
Definition: petsc_vector.C:232
boost::multiprecision::float128 real(const boost::multiprecision::float128 in)
This class provides a nice interface to the PETSc C-based data structures for parallel, sparse matrices.
virtual numeric_index_type size() const override
Definition: petsc_vector.h:964
virtual Real linfty_norm() const override
Definition: petsc_vector.C:84
void scale(MeshBase &mesh, const Real xs, const Real ys=0., const Real zs=0.)
Scales the mesh.
virtual bool initialized() const
void _get_array(bool read_only) const
Queries the array (and the local form if the vector is ghosted) from PETSc.
virtual numeric_index_type local_size() const override
Definition: petsc_vector.h:982
virtual void add(const numeric_index_type i, const T value) override
Adds value to the vector entry specified by i.
Definition: petsc_vector.C:165
Vec _vec
Actual PETSc vector datatype to hold vector entries.
Definition: petsc_vector.h:360
Provides a uniform interface to vector storage schemes for different linear algebra libraries...
Definition: vector_fe_ex5.C:44
PetscScalar * pPS(T *ptr)
Definition: petsc_macro.h:174
bool _is_initialized
true once init() has been called.
The libMesh namespace provides an interface to certain functionality in the library.
virtual void pointwise_divide(const NumericVector< T > &vec1, const NumericVector< T > &vec2) override
Computes (summation not implied) i.e.
void iota(ForwardIter first, ForwardIter last, T value)
Utility::iota was created back when std::iota was just an SGI STL extension.
Definition: utility.h:229
void _restore_array() const
Restores the array (and the local form if the vector is ghosted) to PETSc.
uint8_t processor_id_type
Definition: id_types.h:104
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
Generic sparse matrix.
Definition: vector_fe_ex5.C:46
PetscScalar PS(T val)
Definition: petsc_macro.h:168
virtual void insert(const T *v, const std::vector< numeric_index_type > &dof_indices) override
Inserts the entries of v in *this at the locations specified by v.
Definition: petsc_vector.C:349
virtual void pointwise_mult(const NumericVector< T > &vec1, const NumericVector< T > &vec2) override
Computes (summation not implied) i.e.
Definition: petsc_vector.C:989
virtual Real l1_norm() const override
Definition: petsc_vector.C:74
virtual void abs() override
Sets for each entry in the vector.
Definition: petsc_vector.C:411
dof_id_type numeric_index_type
Definition: id_types.h:99
virtual NumericVector< T > & operator+=(const NumericVector< T > &v) override
Adds v to *this, .
Definition: petsc_vector.C:93
virtual void conjugate() override
Negates the imaginary component of each entry in the vector.
Definition: petsc_vector.C:153
libmesh_assert(ctx)
virtual void localize_to_one(std::vector< T > &v_local, const processor_id_type proc_id=0) const override
Creates a local copy of the global vector in v_local only on processor proc_id.
PetscErrorCode PetscInt const PetscInt IS * is
ParallelType _type
Type of vector.
virtual void create_subvector(NumericVector< T > &subvector, const std::vector< numeric_index_type > &rows, bool supplying_global_rows=true) const override
Fills in subvector from this vector using the indices in rows.
Real norm() const
Definition: petsc_vector.C:60
virtual bool closed() const
virtual T sum() const override
Definition: petsc_vector.C:44
std::complex< Real > Complex
ParallelType type() const
virtual void set(const numeric_index_type i, const T value) override
Sets v(i) = value.
Definition: petsc_vector.C:124
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
T indefinite_dot(const NumericVector< T > &v) const
Definition: petsc_vector.C:447
virtual void restore_subvector(std::unique_ptr< NumericVector< T >> subvector, const std::vector< numeric_index_type > &rows) override
Restores a view into this vector using the indices in rows.
virtual void init(const numeric_index_type N, const numeric_index_type n_local, const bool fast=false, const ParallelType type=AUTOMATIC) override
Change the dimension of the vector to n.
Definition: petsc_vector.h:691
static const bool value
Definition: xdr_io.C:54
IntRange< T > make_range(T beg, T end)
The 2-parameter make_range() helper function returns an IntRange<T> when both input parameters are of...
Definition: int_range.h:140
bool initialized()
Checks that library initialization has been done.
Definition: libmesh.C:276
PetscVector< T > & operator=(const PetscVector< T > &v)
Copy assignment operator.
Definition: petsc_vector.C:512
void * ctx
virtual void reciprocal() override
Computes the component-wise reciprocal, .
Definition: petsc_vector.C:141
virtual void add_vector(const T *v, const std::vector< numeric_index_type > &dof_indices) override
Computes , where v is a pointer and each dof_indices[i] specifies where to add value v[i]...
Definition: petsc_vector.C:182
virtual NumericVector< T > & operator*=(const NumericVector< T > &v) override
Computes the component-wise multiplication of this vector&#39;s entries by another&#39;s, ...
Definition: petsc_vector.C:385
boost::multiprecision::float128 imag(const boost::multiprecision::float128)
virtual void print_matlab(const std::string &name="") const override
Print the contents of the vector in Matlab&#39;s sparse matrix format.
virtual std::unique_ptr< NumericVector< T > > get_subvector(const std::vector< numeric_index_type > &rows) override
Creates a view into this vector using the indices in rows.
virtual void scale(const T factor) override
Scale each element of the vector by the given factor.
Definition: petsc_vector.C:368
bool _is_closed
Flag which tracks whether the vector&#39;s values are consistent on all processors after insertion or add...
virtual void close() override
Calls the NumericVector&#39;s internal assembly routines, ensuring that the values are consistent across ...
Definition: petsc_vector.h:869
unsigned int idx(const ElemType type, const unsigned int nx, const unsigned int i, const unsigned int j)
A useful inline function which replaces the macros used previously.
virtual NumericVector< T > & operator/=(const NumericVector< T > &v) override
Computes the component-wise division of this vector&#39;s entries by another&#39;s, .
Definition: petsc_vector.C:398
virtual NumericVector< T > & operator-=(const NumericVector< T > &v) override
Subtracts v from *this, .
Definition: petsc_vector.C:109
virtual T dot(const NumericVector< T > &v) const override
Definition: petsc_vector.C:426