libMesh
petsc_matrix.C
Go to the documentation of this file.
1 // The libMesh Finite Element Library.
2 // Copyright (C) 2002-2017 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner
3 
4 // This library is free software; you can redistribute it and/or
5 // modify it under the terms of the GNU Lesser General Public
6 // License as published by the Free Software Foundation; either
7 // version 2.1 of the License, or (at your option) any later version.
8 
9 // This library is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 // Lesser General Public License for more details.
13 
14 // You should have received a copy of the GNU Lesser General Public
15 // License along with this library; if not, write to the Free Software
16 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 
18 
19 // C++ includes
20 #include <unistd.h> // mkstemp
21 #include <fstream>
22 
23 #include "libmesh/libmesh_config.h"
24 
25 #ifdef LIBMESH_HAVE_PETSC
26 
27 // Local includes
28 #include "libmesh/petsc_matrix.h"
29 #include "libmesh/dof_map.h"
30 #include "libmesh/dense_matrix.h"
31 #include "libmesh/petsc_vector.h"
32 
33 // For some reason, the blocked matrix API calls below seem to break with PETSC 3.2 & presumably earlier.
34 // For example:
35 // [0]PETSC ERROR: --------------------- Error Message ------------------------------------
36 // [0]PETSC ERROR: Nonconforming object sizes!
37 // [0]PETSC ERROR: Attempt to set block size 3 with BAIJ 1!
38 // [0]PETSC ERROR: ------------------------------------------------------------------------
39 // so as a cowardly workaround disable the functionality in this translation unit for older PETSc's
40 #if PETSC_VERSION_LESS_THAN(3,3,0)
41 # undef LIBMESH_ENABLE_BLOCKED_STORAGE
42 #endif
43 
44 
45 
46 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
47 
48 namespace
49 {
50 using namespace libMesh;
51 
52 // historic libMesh n_nz & n_oz arrays are set up for PETSc's AIJ format.
53 // however, when the blocksize is >1, we need to transform these into
54 // their BAIJ counterparts.
55 inline
56 void transform_preallocation_arrays (const PetscInt blocksize,
57  const std::vector<numeric_index_type> & n_nz,
58  const std::vector<numeric_index_type> & n_oz,
59  std::vector<numeric_index_type> & b_n_nz,
60  std::vector<numeric_index_type> & b_n_oz)
61 {
62  libmesh_assert_equal_to (n_nz.size(), n_oz.size());
63  libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
64 
65  b_n_nz.clear(); b_n_nz.reserve(n_nz.size()/blocksize);
66  b_n_oz.clear(); b_n_oz.reserve(n_oz.size()/blocksize);
67 
68  for (std::size_t nn=0; nn<n_nz.size(); nn += blocksize)
69  {
70  b_n_nz.push_back (n_nz[nn]/blocksize);
71  b_n_oz.push_back (n_oz[nn]/blocksize);
72  }
73 }
74 }
75 
76 #endif
77 
78 
79 
80 namespace libMesh
81 {
82 
83 
84 //-----------------------------------------------------------------------
85 // PetscMatrix members
86 
87 
88 // Constructor
89 template <typename T>
91  SparseMatrix<T>(comm_in),
92  _destroy_mat_on_exit(true)
93 {}
94 
95 
96 
97 // Constructor taking an existing Mat but not the responsibility
98 // for destroying it
99 template <typename T>
100 PetscMatrix<T>::PetscMatrix(Mat mat_in,
101  const Parallel::Communicator & comm_in) :
102  SparseMatrix<T>(comm_in),
103  _destroy_mat_on_exit(false)
104 {
105  this->_mat = mat_in;
106  this->_is_initialized = true;
107 }
108 
109 
110 
111 // Destructor
112 template <typename T>
114 {
115  this->clear();
116 }
117 
118 
119 template <typename T>
121  const numeric_index_type n_in,
122  const numeric_index_type m_l,
123  const numeric_index_type n_l,
124  const numeric_index_type nnz,
125  const numeric_index_type noz,
126  const numeric_index_type blocksize_in)
127 {
128  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
129  libmesh_ignore(blocksize_in);
130 
131  // Clear initialized matrices
132  if (this->initialized())
133  this->clear();
134 
135  this->_is_initialized = true;
136 
137 
138  PetscErrorCode ierr = 0;
139  PetscInt m_global = static_cast<PetscInt>(m_in);
140  PetscInt n_global = static_cast<PetscInt>(n_in);
141  PetscInt m_local = static_cast<PetscInt>(m_l);
142  PetscInt n_local = static_cast<PetscInt>(n_l);
143  PetscInt n_nz = static_cast<PetscInt>(nnz);
144  PetscInt n_oz = static_cast<PetscInt>(noz);
145 
146  ierr = MatCreate(this->comm().get(), &_mat);
147  LIBMESH_CHKERR(ierr);
148  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
149  LIBMESH_CHKERR(ierr);
150 
151 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
152  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
153  if (blocksize > 1)
154  {
155  // specified blocksize, bs>1.
156  // double check sizes.
157  libmesh_assert_equal_to (m_local % blocksize, 0);
158  libmesh_assert_equal_to (n_local % blocksize, 0);
159  libmesh_assert_equal_to (m_global % blocksize, 0);
160  libmesh_assert_equal_to (n_global % blocksize, 0);
161  libmesh_assert_equal_to (n_nz % blocksize, 0);
162  libmesh_assert_equal_to (n_oz % blocksize, 0);
163 
164  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
165  LIBMESH_CHKERR(ierr);
166  ierr = MatSetBlockSize(_mat, blocksize);
167  LIBMESH_CHKERR(ierr);
168  ierr = MatSeqBAIJSetPreallocation(_mat, blocksize, n_nz/blocksize, PETSC_NULL);
169  LIBMESH_CHKERR(ierr);
170  ierr = MatMPIBAIJSetPreallocation(_mat, blocksize,
171  n_nz/blocksize, PETSC_NULL,
172  n_oz/blocksize, PETSC_NULL);
173  LIBMESH_CHKERR(ierr);
174  }
175  else
176 #endif
177  {
178  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
179  LIBMESH_CHKERR(ierr);
180  ierr = MatSeqAIJSetPreallocation(_mat, n_nz, PETSC_NULL);
181  LIBMESH_CHKERR(ierr);
182  ierr = MatMPIAIJSetPreallocation(_mat, n_nz, PETSC_NULL, n_oz, PETSC_NULL);
183  LIBMESH_CHKERR(ierr);
184  }
185 
186  // Make it an error for PETSc to allocate new nonzero entries during assembly
187 #if PETSC_VERSION_LESS_THAN(3,0,0)
188  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR);
189 #else
190  ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
191 #endif
192  LIBMESH_CHKERR(ierr);
193 
194  // Is prefix information available somewhere? Perhaps pass in the system name?
195  ierr = MatSetOptionsPrefix(_mat, "");
196  LIBMESH_CHKERR(ierr);
197  ierr = MatSetFromOptions(_mat);
198  LIBMESH_CHKERR(ierr);
199 
200  this->zero ();
201 }
202 
203 
204 template <typename T>
206  const numeric_index_type n_in,
207  const numeric_index_type m_l,
208  const numeric_index_type n_l,
209  const std::vector<numeric_index_type> & n_nz,
210  const std::vector<numeric_index_type> & n_oz,
211  const numeric_index_type blocksize_in)
212 {
213  // So compilers don't warn when !LIBMESH_ENABLE_BLOCKED_STORAGE
214  libmesh_ignore(blocksize_in);
215 
216  // Clear initialized matrices
217  if (this->initialized())
218  this->clear();
219 
220  this->_is_initialized = true;
221 
222  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
223  libmesh_assert_equal_to (n_nz.size(), m_l);
224  libmesh_assert_equal_to (n_oz.size(), m_l);
225 
226  PetscErrorCode ierr = 0;
227  PetscInt m_global = static_cast<PetscInt>(m_in);
228  PetscInt n_global = static_cast<PetscInt>(n_in);
229  PetscInt m_local = static_cast<PetscInt>(m_l);
230  PetscInt n_local = static_cast<PetscInt>(n_l);
231 
232  ierr = MatCreate(this->comm().get(), &_mat);
233  LIBMESH_CHKERR(ierr);
234  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
235  LIBMESH_CHKERR(ierr);
236 
237 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
238  PetscInt blocksize = static_cast<PetscInt>(blocksize_in);
239  if (blocksize > 1)
240  {
241  // specified blocksize, bs>1.
242  // double check sizes.
243  libmesh_assert_equal_to (m_local % blocksize, 0);
244  libmesh_assert_equal_to (n_local % blocksize, 0);
245  libmesh_assert_equal_to (m_global % blocksize, 0);
246  libmesh_assert_equal_to (n_global % blocksize, 0);
247 
248  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
249  LIBMESH_CHKERR(ierr);
250  ierr = MatSetBlockSize(_mat, blocksize);
251  LIBMESH_CHKERR(ierr);
252 
253  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
254  std::vector<numeric_index_type> b_n_nz, b_n_oz;
255 
256  transform_preallocation_arrays (blocksize,
257  n_nz, n_oz,
258  b_n_nz, b_n_oz);
259 
260  ierr = MatSeqBAIJSetPreallocation (_mat,
261  blocksize,
262  0,
263  numeric_petsc_cast(b_n_nz.empty() ? libmesh_nullptr : &b_n_nz[0]));
264  LIBMESH_CHKERR(ierr);
265 
266  ierr = MatMPIBAIJSetPreallocation (_mat,
267  blocksize,
268  0,
269  numeric_petsc_cast(b_n_nz.empty() ? libmesh_nullptr : &b_n_nz[0]),
270  0,
271  numeric_petsc_cast(b_n_oz.empty() ? libmesh_nullptr : &b_n_oz[0]));
272  LIBMESH_CHKERR(ierr);
273  }
274  else
275 #endif
276  {
277 
278  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
279  LIBMESH_CHKERR(ierr);
280  ierr = MatSeqAIJSetPreallocation (_mat,
281  0,
282  numeric_petsc_cast(n_nz.empty() ? libmesh_nullptr : &n_nz[0]));
283  LIBMESH_CHKERR(ierr);
284  ierr = MatMPIAIJSetPreallocation (_mat,
285  0,
286  numeric_petsc_cast(n_nz.empty() ? libmesh_nullptr : &n_nz[0]),
287  0,
288  numeric_petsc_cast(n_oz.empty() ? libmesh_nullptr : &n_oz[0]));
289  LIBMESH_CHKERR(ierr);
290  }
291 
292  // Is prefix information available somewhere? Perhaps pass in the system name?
293  ierr = MatSetOptionsPrefix(_mat, "");
294  LIBMESH_CHKERR(ierr);
295  ierr = MatSetFromOptions(_mat);
296  LIBMESH_CHKERR(ierr);
297 
298 
299  this->zero();
300 }
301 
302 
303 template <typename T>
304 void PetscMatrix<T>::init ()
305 {
306  libmesh_assert(this->_dof_map);
307 
308  // Clear initialized matrices
309  if (this->initialized())
310  this->clear();
311 
312  this->_is_initialized = true;
313 
314 
315  const numeric_index_type my_m = this->_dof_map->n_dofs();
316  const numeric_index_type my_n = my_m;
317  const numeric_index_type n_l = this->_dof_map->n_dofs_on_processor(this->processor_id());
318  const numeric_index_type m_l = n_l;
319 
320 
321  const std::vector<numeric_index_type> & n_nz = this->_dof_map->get_n_nz();
322  const std::vector<numeric_index_type> & n_oz = this->_dof_map->get_n_oz();
323 
324  // Make sure the sparsity pattern isn't empty unless the matrix is 0x0
325  libmesh_assert_equal_to (n_nz.size(), m_l);
326  libmesh_assert_equal_to (n_oz.size(), m_l);
327 
328  PetscErrorCode ierr = 0;
329  PetscInt m_global = static_cast<PetscInt>(my_m);
330  PetscInt n_global = static_cast<PetscInt>(my_n);
331  PetscInt m_local = static_cast<PetscInt>(m_l);
332  PetscInt n_local = static_cast<PetscInt>(n_l);
333 
334  ierr = MatCreate(this->comm().get(), &_mat);
335  LIBMESH_CHKERR(ierr);
336  ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
337  LIBMESH_CHKERR(ierr);
338 
339 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
340  PetscInt blocksize = static_cast<PetscInt>(this->_dof_map->block_size());
341  if (blocksize > 1)
342  {
343  // specified blocksize, bs>1.
344  // double check sizes.
345  libmesh_assert_equal_to (m_local % blocksize, 0);
346  libmesh_assert_equal_to (n_local % blocksize, 0);
347  libmesh_assert_equal_to (m_global % blocksize, 0);
348  libmesh_assert_equal_to (n_global % blocksize, 0);
349 
350  ierr = MatSetType(_mat, MATBAIJ); // Automatically chooses seqbaij or mpibaij
351  LIBMESH_CHKERR(ierr);
352  ierr = MatSetBlockSize(_mat, blocksize);
353  LIBMESH_CHKERR(ierr);
354 
355  // transform the per-entry n_nz and n_oz arrays into their block counterparts.
356  std::vector<numeric_index_type> b_n_nz, b_n_oz;
357 
358  transform_preallocation_arrays (blocksize,
359  n_nz, n_oz,
360  b_n_nz, b_n_oz);
361 
362  ierr = MatSeqBAIJSetPreallocation (_mat,
363  blocksize,
364  0,
365  numeric_petsc_cast(b_n_nz.empty() ? libmesh_nullptr : &b_n_nz[0]));
366  LIBMESH_CHKERR(ierr);
367 
368  ierr = MatMPIBAIJSetPreallocation (_mat,
369  blocksize,
370  0,
371  numeric_petsc_cast(b_n_nz.empty() ? libmesh_nullptr : &b_n_nz[0]),
372  0,
373  numeric_petsc_cast(b_n_oz.empty() ? libmesh_nullptr : &b_n_oz[0]));
374  LIBMESH_CHKERR(ierr);
375  }
376  else
377 #endif
378  {
379  // no block storage case
380  ierr = MatSetType(_mat, MATAIJ); // Automatically chooses seqaij or mpiaij
381  LIBMESH_CHKERR(ierr);
382 
383  ierr = MatSeqAIJSetPreallocation (_mat,
384  0,
385  numeric_petsc_cast(n_nz.empty() ? libmesh_nullptr : &n_nz[0]));
386  LIBMESH_CHKERR(ierr);
387  ierr = MatMPIAIJSetPreallocation (_mat,
388  0,
389  numeric_petsc_cast(n_nz.empty() ? libmesh_nullptr : &n_nz[0]),
390  0,
391  numeric_petsc_cast(n_oz.empty() ? libmesh_nullptr : &n_oz[0]));
392  LIBMESH_CHKERR(ierr);
393  }
394 
395  // Is prefix information available somewhere? Perhaps pass in the system name?
396  ierr = MatSetOptionsPrefix(_mat, "");
397  LIBMESH_CHKERR(ierr);
398  ierr = MatSetFromOptions(_mat);
399  LIBMESH_CHKERR(ierr);
400 
401  this->zero();
402 }
403 
404 
405 template <typename T>
407 {
408  libmesh_not_implemented();
409 }
410 
411 
412 template <typename T>
413 void PetscMatrix<T>::zero ()
414 {
415  libmesh_assert (this->initialized());
416 
417  semiparallel_only();
418 
419  PetscErrorCode ierr=0;
420 
421  PetscInt m_l, n_l;
422 
423  ierr = MatGetLocalSize(_mat,&m_l,&n_l);
424  LIBMESH_CHKERR(ierr);
425 
426  if (n_l)
427  {
428  ierr = MatZeroEntries(_mat);
429  LIBMESH_CHKERR(ierr);
430  }
431 }
432 
433 template <typename T>
434 void PetscMatrix<T>::zero_rows (std::vector<numeric_index_type> & rows, T diag_value)
435 {
436  libmesh_assert (this->initialized());
437 
438  semiparallel_only();
439 
440  PetscErrorCode ierr=0;
441 
442 #if PETSC_RELEASE_LESS_THAN(3,1,1)
443  if (!rows.empty())
444  ierr = MatZeroRows(_mat, rows.size(),
445  numeric_petsc_cast(&rows[0]), diag_value);
446  else
447  ierr = MatZeroRows(_mat, 0, PETSC_NULL, diag_value);
448 #else
449  // As of petsc-dev at the time of 3.1.0, MatZeroRows now takes two additional
450  // optional arguments. The optional arguments (x,b) can be used to specify the
451  // solutions for the zeroed rows (x) and right hand side (b) to update.
452  // Could be useful for setting boundary conditions...
453  if (!rows.empty())
454  ierr = MatZeroRows(_mat, cast_int<PetscInt>(rows.size()),
455  numeric_petsc_cast(&rows[0]), diag_value,
456  PETSC_NULL, PETSC_NULL);
457  else
458  ierr = MatZeroRows(_mat, 0, PETSC_NULL, diag_value, PETSC_NULL,
459  PETSC_NULL);
460 #endif
461 
462  LIBMESH_CHKERR(ierr);
463 }
464 
465 template <typename T>
466 void PetscMatrix<T>::clear ()
467 {
468  PetscErrorCode ierr=0;
469 
470  if ((this->initialized()) && (this->_destroy_mat_on_exit))
471  {
472  semiparallel_only();
473 
474  ierr = LibMeshMatDestroy (&_mat);
475  LIBMESH_CHKERR(ierr);
476 
477  this->_is_initialized = false;
478  }
479 }
480 
481 
482 
483 template <typename T>
485 {
486  libmesh_assert (this->initialized());
487 
488  semiparallel_only();
489 
490  PetscErrorCode ierr=0;
491  PetscReal petsc_value;
492  Real value;
493 
494  libmesh_assert (this->closed());
495 
496  ierr = MatNorm(_mat, NORM_1, &petsc_value);
497  LIBMESH_CHKERR(ierr);
498 
499  value = static_cast<Real>(petsc_value);
500 
501  return value;
502 }
503 
504 
505 
506 template <typename T>
508 {
509  libmesh_assert (this->initialized());
510 
511  semiparallel_only();
512 
513  PetscErrorCode ierr=0;
514  PetscReal petsc_value;
515  Real value;
516 
517  libmesh_assert (this->closed());
518 
519  ierr = MatNorm(_mat, NORM_INFINITY, &petsc_value);
520  LIBMESH_CHKERR(ierr);
521 
522  value = static_cast<Real>(petsc_value);
523 
524  return value;
525 }
526 
527 
528 
529 template <typename T>
530 void PetscMatrix<T>::print_matlab (const std::string & name) const
531 {
532  libmesh_assert (this->initialized());
533 
534  semiparallel_only();
535 
536  if (!this->closed())
537  {
538  libmesh_deprecated();
539  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
540  "Please update your code, as this warning will become an error in a future release.");
541  const_cast<PetscMatrix<T> *>(this)->close();
542  }
543 
544  PetscErrorCode ierr=0;
545  PetscViewer petsc_viewer;
546 
547 
548  ierr = PetscViewerCreate (this->comm().get(),
549  &petsc_viewer);
550  LIBMESH_CHKERR(ierr);
551 
556  if (name != "")
557  {
558  ierr = PetscViewerASCIIOpen( this->comm().get(),
559  name.c_str(),
560  &petsc_viewer);
561  LIBMESH_CHKERR(ierr);
562 
563 #if PETSC_VERSION_LESS_THAN(3,7,0)
564  ierr = PetscViewerSetFormat (petsc_viewer,
565  PETSC_VIEWER_ASCII_MATLAB);
566 #else
567  ierr = PetscViewerPushFormat (petsc_viewer,
568  PETSC_VIEWER_ASCII_MATLAB);
569 #endif
570 
571  LIBMESH_CHKERR(ierr);
572 
573  ierr = MatView (_mat, petsc_viewer);
574  LIBMESH_CHKERR(ierr);
575  }
576 
580  else
581  {
582 #if PETSC_VERSION_LESS_THAN(3,7,0)
583  ierr = PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
584  PETSC_VIEWER_ASCII_MATLAB);
585 #else
586  ierr = PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
587  PETSC_VIEWER_ASCII_MATLAB);
588 #endif
589 
590  LIBMESH_CHKERR(ierr);
591 
592  ierr = MatView (_mat, PETSC_VIEWER_STDOUT_WORLD);
593  LIBMESH_CHKERR(ierr);
594  }
595 
596 
600  ierr = LibMeshPetscViewerDestroy (&petsc_viewer);
601  LIBMESH_CHKERR(ierr);
602 }
603 
604 
605 
606 
607 
608 template <typename T>
609 void PetscMatrix<T>::print_personal(std::ostream & os) const
610 {
611  libmesh_assert (this->initialized());
612 
613  // Routine must be called in parallel on parallel matrices
614  // and serial on serial matrices.
615  semiparallel_only();
616 
617  // #ifndef NDEBUG
618  // if (os != std::cout)
619  // libMesh::err << "Warning! PETSc can only print to std::cout!" << std::endl;
620  // #endif
621 
622  // Matrix must be in an assembled state to be printed
623  if (!this->closed())
624  {
625  libmesh_deprecated();
626  libmesh_warning("The matrix must be assembled before calling PetscMatrix::print_personal().\n"
627  "Please update your code, as this warning will become an error in a future release.");
628  const_cast<PetscMatrix<T> *>(this)->close();
629  }
630 
631  PetscErrorCode ierr=0;
632 
633  // Print to screen if ostream is stdout
634  if (os.rdbuf() == std::cout.rdbuf())
635  {
636  ierr = MatView(_mat, PETSC_VIEWER_STDOUT_SELF);
637  LIBMESH_CHKERR(ierr);
638  }
639 
640  // Otherwise, print to the requested file, in a roundabout way...
641  else
642  {
643  // We will create a temporary filename, and file, for PETSc to
644  // write to.
645  std::string temp_filename;
646 
647  {
648  // Template for temporary filename
649  char c[] = "temp_petsc_matrix.XXXXXX";
650 
651  // Generate temporary, unique filename only on processor 0. We will
652  // use this filename for PetscViewerASCIIOpen, before copying it into
653  // the user's stream
654  if (this->processor_id() == 0)
655  {
656  int fd = mkstemp(c);
657 
658  // Check to see that mkstemp did not fail.
659  if (fd == -1)
660  libmesh_error_msg("mkstemp failed in PetscMatrix::print_personal()");
661 
662  // mkstemp returns a file descriptor for an open file,
663  // so let's close it before we hand it to PETSc!
664  ::close (fd);
665  }
666 
667  // Store temporary filename as string, makes it easier to broadcast
668  temp_filename = c;
669  }
670 
671  // Now broadcast the filename from processor 0 to all processors.
672  this->comm().broadcast(temp_filename);
673 
674  // PetscViewer object for passing to MatView
675  PetscViewer petsc_viewer;
676 
677  // This PETSc function only takes a string and handles the opening/closing
678  // of the file internally. Since print_personal() takes a reference to
679  // an ostream, we have to do an extra step... print_personal() should probably
680  // have a version that takes a string to get rid of this problem.
681  ierr = PetscViewerASCIIOpen( this->comm().get(),
682  temp_filename.c_str(),
683  &petsc_viewer);
684  LIBMESH_CHKERR(ierr);
685 
686  // Probably don't need to set the format if it's default...
687  // ierr = PetscViewerSetFormat (petsc_viewer,
688  // PETSC_VIEWER_DEFAULT);
689  // LIBMESH_CHKERR(ierr);
690 
691  // Finally print the matrix using the viewer
692  ierr = MatView (_mat, petsc_viewer);
693  LIBMESH_CHKERR(ierr);
694 
695  if (this->processor_id() == 0)
696  {
697  // Now the inefficient bit: open temp_filename as an ostream and copy the contents
698  // into the user's desired ostream. We can't just do a direct file copy, we don't even have the filename!
699  std::ifstream input_stream(temp_filename.c_str());
700  os << input_stream.rdbuf(); // The "most elegant" way to copy one stream into another.
701  // os.close(); // close not defined in ostream
702 
703  // Now remove the temporary file
704  input_stream.close();
705  std::remove(temp_filename.c_str());
706  }
707  }
708 }
709 
710 
711 
712 
713 
714 
715 template <typename T>
717  const std::vector<numeric_index_type> & rows,
718  const std::vector<numeric_index_type> & cols)
719 {
720  libmesh_assert (this->initialized());
721 
722  const numeric_index_type n_rows = dm.m();
723  const numeric_index_type n_cols = dm.n();
724 
725  libmesh_assert_equal_to (rows.size(), n_rows);
726  libmesh_assert_equal_to (cols.size(), n_cols);
727 
728  PetscErrorCode ierr=0;
729 
730  // These casts are required for PETSc <= 2.1.5
731  ierr = MatSetValues(_mat,
732  n_rows, numeric_petsc_cast(&rows[0]),
733  n_cols, numeric_petsc_cast(&cols[0]),
734  const_cast<PetscScalar *>(&dm.get_values()[0]),
735  ADD_VALUES);
736  LIBMESH_CHKERR(ierr);
737 }
738 
739 
740 
741 
742 
743 
744 template <typename T>
746  const std::vector<numeric_index_type> & brows,
747  const std::vector<numeric_index_type> & bcols)
748 {
749  libmesh_assert (this->initialized());
750 
751  const numeric_index_type n_brows =
752  cast_int<numeric_index_type>(brows.size());
753  const numeric_index_type n_bcols =
754  cast_int<numeric_index_type>(bcols.size());
755 
756  PetscErrorCode ierr=0;
757 
758 #ifndef NDEBUG
759  const numeric_index_type n_rows =
760  cast_int<numeric_index_type>(dm.m());
761  const numeric_index_type n_cols =
762  cast_int<numeric_index_type>(dm.n());
763  const numeric_index_type blocksize = n_rows / n_brows;
764 
765  libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
766  libmesh_assert_equal_to (blocksize*n_brows, n_rows);
767  libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
768 
769  PetscInt petsc_blocksize;
770  ierr = MatGetBlockSize(_mat, &petsc_blocksize);
771  LIBMESH_CHKERR(ierr);
772  libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
773 #endif
774 
775  // These casts are required for PETSc <= 2.1.5
776  ierr = MatSetValuesBlocked(_mat,
777  n_brows, numeric_petsc_cast(&brows[0]),
778  n_bcols, numeric_petsc_cast(&bcols[0]),
779  const_cast<PetscScalar *>(&dm.get_values()[0]),
780  ADD_VALUES);
781  LIBMESH_CHKERR(ierr);
782 }
783 
784 
785 
786 
787 
788 template <typename T>
790  const std::vector<numeric_index_type> & rows,
791  const std::vector<numeric_index_type> & cols,
792  const bool reuse_submatrix) const
793 {
794  if (!this->closed())
795  {
796  libmesh_deprecated();
797  libmesh_warning("The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
798  "Please update your code, as this warning will become an error in a future release.");
799  const_cast<PetscMatrix<T> *>(this)->close();
800  }
801 
802  // Make sure the SparseMatrix passed in is really a PetscMatrix
803  PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
804 
805  // If we're not reusing submatrix and submatrix is already initialized
806  // then we need to clear it, otherwise we get a memory leak.
807  if (!reuse_submatrix && submatrix.initialized())
808  submatrix.clear();
809 
810  // Construct row and column index sets.
811  PetscErrorCode ierr=0;
812  IS isrow, iscol;
813 
814  ierr = ISCreateLibMesh(this->comm().get(),
815  rows.size(),
816  numeric_petsc_cast(&rows[0]),
818  &isrow); LIBMESH_CHKERR(ierr);
819 
820  ierr = ISCreateLibMesh(this->comm().get(),
821  cols.size(),
822  numeric_petsc_cast(&cols[0]),
824  &iscol); LIBMESH_CHKERR(ierr);
825 
826  // Extract submatrix
827  ierr = LibMeshCreateSubMatrix(_mat,
828  isrow,
829  iscol,
830 #if PETSC_RELEASE_LESS_THAN(3,0,1)
831  PETSC_DECIDE,
832 #endif
833  (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
834  &(petsc_submatrix->_mat)); LIBMESH_CHKERR(ierr);
835 
836  // Specify that the new submatrix is initialized and close it.
837  petsc_submatrix->_is_initialized = true;
838  petsc_submatrix->close();
839 
840  // Clean up PETSc data structures
841  ierr = LibMeshISDestroy(&isrow); LIBMESH_CHKERR(ierr);
842  ierr = LibMeshISDestroy(&iscol); LIBMESH_CHKERR(ierr);
843 }
844 
845 
846 
847 template <typename T>
849 {
850  // Make sure the NumericVector passed in is really a PetscVector
851  PetscVector<T> & petsc_dest = cast_ref<PetscVector<T> &>(dest);
852 
853  // Needs a const_cast since PETSc does not work with const.
854  PetscErrorCode ierr =
855  MatGetDiagonal(const_cast<PetscMatrix<T> *>(this)->mat(),petsc_dest.vec()); LIBMESH_CHKERR(ierr);
856 }
857 
858 
859 
860 template <typename T>
862 {
863  // Make sure the SparseMatrix passed in is really a PetscMatrix
864  PetscMatrix<T> & petsc_dest = cast_ref<PetscMatrix<T> &>(dest);
865 
866  // If we aren't reusing the matrix then need to clear dest,
867  // otherwise we get a memory leak
868  if (&petsc_dest != this)
869  dest.clear();
870 
871  PetscErrorCode ierr;
872 #if PETSC_VERSION_LESS_THAN(3,0,0)
873  if (&petsc_dest == this)
874  ierr = MatTranspose(_mat,PETSC_NULL);
875  else
876  ierr = MatTranspose(_mat,&petsc_dest._mat);
877  LIBMESH_CHKERR(ierr);
878 #else
879  // FIXME - we can probably use MAT_REUSE_MATRIX in more situations
880  if (&petsc_dest == this)
881  ierr = MatTranspose(_mat,MAT_REUSE_MATRIX,&petsc_dest._mat);
882  else
883  ierr = MatTranspose(_mat,MAT_INITIAL_MATRIX,&petsc_dest._mat);
884  LIBMESH_CHKERR(ierr);
885 #endif
886 
887  // Specify that the transposed matrix is initialized and close it.
888  petsc_dest._is_initialized = true;
889  petsc_dest.close();
890 }
891 
892 
893 
894 
895 
896 template <typename T>
897 void PetscMatrix<T>::close ()
898 {
899  semiparallel_only();
900 
901  // BSK - 1/19/2004
902  // strictly this check should be OK, but it seems to
903  // fail on matrix-free matrices. Do they falsely
904  // state they are assembled? Check with the developers...
905  // if (this->closed())
906  // return;
907 
908  PetscErrorCode ierr=0;
909 
910  ierr = MatAssemblyBegin (_mat, MAT_FINAL_ASSEMBLY);
911  LIBMESH_CHKERR(ierr);
912  ierr = MatAssemblyEnd (_mat, MAT_FINAL_ASSEMBLY);
913  LIBMESH_CHKERR(ierr);
914 }
915 
916 template <typename T>
917 void PetscMatrix<T>::flush ()
918 {
919  semiparallel_only();
920 
921  PetscErrorCode ierr=0;
922 
923  ierr = MatAssemblyBegin (_mat, MAT_FLUSH_ASSEMBLY);
924  LIBMESH_CHKERR(ierr);
925  ierr = MatAssemblyEnd (_mat, MAT_FLUSH_ASSEMBLY);
926  LIBMESH_CHKERR(ierr);
927 }
928 
929 
930 
931 template <typename T>
933 {
934  libmesh_assert (this->initialized());
935 
936  PetscInt petsc_m=0, petsc_n=0;
937  PetscErrorCode ierr=0;
938 
939  ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
940  LIBMESH_CHKERR(ierr);
941 
942  return static_cast<numeric_index_type>(petsc_m);
943 }
944 
945 
946 
947 template <typename T>
949 {
950  libmesh_assert (this->initialized());
951 
952  PetscInt petsc_m=0, petsc_n=0;
953  PetscErrorCode ierr=0;
954 
955  ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
956  LIBMESH_CHKERR(ierr);
957 
958  return static_cast<numeric_index_type>(petsc_n);
959 }
960 
961 
962 
963 template <typename T>
965 {
966  libmesh_assert (this->initialized());
967 
968  PetscInt start=0, stop=0;
969  PetscErrorCode ierr=0;
970 
971  ierr = MatGetOwnershipRange(_mat, &start, &stop);
972  LIBMESH_CHKERR(ierr);
973 
974  return static_cast<numeric_index_type>(start);
975 }
976 
977 
978 
979 template <typename T>
981 {
982  libmesh_assert (this->initialized());
983 
984  PetscInt start=0, stop=0;
985  PetscErrorCode ierr=0;
986 
987  ierr = MatGetOwnershipRange(_mat, &start, &stop);
988  LIBMESH_CHKERR(ierr);
989 
990  return static_cast<numeric_index_type>(stop);
991 }
992 
993 
994 
995 template <typename T>
997  const numeric_index_type j,
998  const T value)
999 {
1000  libmesh_assert (this->initialized());
1001 
1002  PetscErrorCode ierr=0;
1003  PetscInt i_val=i, j_val=j;
1004 
1005  PetscScalar petsc_value = static_cast<PetscScalar>(value);
1006  ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1007  &petsc_value, INSERT_VALUES);
1008  LIBMESH_CHKERR(ierr);
1009 }
1010 
1011 
1012 
1013 template <typename T>
1015  const numeric_index_type j,
1016  const T value)
1017 {
1018  libmesh_assert (this->initialized());
1019 
1020  PetscErrorCode ierr=0;
1021  PetscInt i_val=i, j_val=j;
1022 
1023  PetscScalar petsc_value = static_cast<PetscScalar>(value);
1024  ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1025  &petsc_value, ADD_VALUES);
1026  LIBMESH_CHKERR(ierr);
1027 }
1028 
1029 
1030 
1031 template <typename T>
1033  const std::vector<numeric_index_type> & dof_indices)
1034 {
1035  this->add_matrix (dm, dof_indices, dof_indices);
1036 }
1037 
1038 
1039 
1040 
1041 
1042 
1043 
1044 template <typename T>
1045 void PetscMatrix<T>::add (const T a_in, SparseMatrix<T> & X_in)
1046 {
1047  libmesh_assert (this->initialized());
1048 
1049  // sanity check. but this cannot avoid
1050  // crash due to incompatible sparsity structure...
1051  libmesh_assert_equal_to (this->m(), X_in.m());
1052  libmesh_assert_equal_to (this->n(), X_in.n());
1053 
1054  PetscScalar a = static_cast<PetscScalar> (a_in);
1055  PetscMatrix<T> * X = cast_ptr<PetscMatrix<T> *> (&X_in);
1056 
1057  libmesh_assert (X);
1058 
1059  PetscErrorCode ierr=0;
1060 
1061  // the matrix from which we copy the values has to be assembled/closed
1062  libmesh_assert(X->closed());
1063 
1064  semiparallel_only();
1065 
1066  ierr = MatAXPY(_mat, a, X->_mat, DIFFERENT_NONZERO_PATTERN);
1067  LIBMESH_CHKERR(ierr);
1068 }
1069 
1070 
1071 
1072 
1073 template <typename T>
1075  const numeric_index_type j_in) const
1076 {
1077  libmesh_assert (this->initialized());
1078 
1079  // PETSc 2.2.1 & newer
1080  const PetscScalar * petsc_row;
1081  const PetscInt * petsc_cols;
1082 
1083  // If the entry is not in the sparse matrix, it is 0.
1084  T value=0.;
1085 
1086  PetscErrorCode
1087  ierr=0;
1088  PetscInt
1089  ncols=0,
1090  i_val=static_cast<PetscInt>(i_in),
1091  j_val=static_cast<PetscInt>(j_in);
1092 
1093 
1094  // the matrix needs to be closed for this to work
1095  // this->close();
1096  // but closing it is a semiparallel operation; we want operator()
1097  // to run on one processor.
1098  libmesh_assert(this->closed());
1099 
1100  ierr = MatGetRow(_mat, i_val, &ncols, &petsc_cols, &petsc_row);
1101  LIBMESH_CHKERR(ierr);
1102 
1103  // Perform a binary search to find the contiguous index in
1104  // petsc_cols (resp. petsc_row) corresponding to global index j_val
1105  std::pair<const PetscInt *, const PetscInt *> p =
1106  std::equal_range (&petsc_cols[0], &petsc_cols[0] + ncols, j_val);
1107 
1108  // Found an entry for j_val
1109  if (p.first != p.second)
1110  {
1111  // The entry in the contiguous row corresponding
1112  // to the j_val column of interest
1113  const std::size_t j =
1114  std::distance (const_cast<PetscInt *>(&petsc_cols[0]),
1115  const_cast<PetscInt *>(p.first));
1116 
1117  libmesh_assert_less (static_cast<PetscInt>(j), ncols);
1118  libmesh_assert_equal_to (petsc_cols[j], j_val);
1119 
1120  value = static_cast<T> (petsc_row[j]);
1121  }
1122 
1123  ierr = MatRestoreRow(_mat, i_val,
1124  &ncols, &petsc_cols, &petsc_row);
1125  LIBMESH_CHKERR(ierr);
1126 
1127  return value;
1128 }
1129 
1130 
1131 
1132 
1133 template <typename T>
1134 bool PetscMatrix<T>::closed() const
1135 {
1136  libmesh_assert (this->initialized());
1137 
1138  PetscErrorCode ierr=0;
1139  PetscBool assembled;
1140 
1141  ierr = MatAssembled(_mat, &assembled);
1142  LIBMESH_CHKERR(ierr);
1143 
1144  return (assembled == PETSC_TRUE);
1145 }
1146 
1147 
1148 
1149 template <typename T>
1151 {
1152  std::swap(_mat, m_in._mat);
1153  std::swap(_destroy_mat_on_exit, m_in._destroy_mat_on_exit);
1154 }
1155 
1156 
1157 
1158 //------------------------------------------------------------------
1159 // Explicit instantiations
1160 template class PetscMatrix<Number>;
1161 
1162 } // namespace libMesh
1163 
1164 
1165 #endif // #ifdef LIBMESH_HAVE_PETSC
std::string name(const ElemQuality q)
This function returns a string containing some name for q.
Definition: elem_quality.C:39
virtual bool closed() const
bool closed()
Checks that the library has been closed.
Definition: libmesh.C:281
virtual numeric_index_type n() const =0
Encapsulates the MPI_Comm object.
Definition: parallel.h:657
unsigned int n() const
unsigned int size() const
Definition: parallel.h:726
virtual numeric_index_type m() const =0
const class libmesh_nullptr_t libmesh_nullptr
Numeric vector.
Definition: dof_map.h:66
bool _is_initialized
true once init() has been called.
virtual bool initialized() const
The libMesh namespace provides an interface to certain functionality in the library.
const Number zero
.
Definition: libmesh.h:178
Real distance(const Point &p)
virtual void zero()=0
Set all entries to zero.
virtual void clear()=0
Restores the SparseMatrix<T> to a pristine state.
libmesh_assert(j)
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
virtual bool initialized() const
Generic sparse matrix.
Definition: dof_map.h:65
dof_id_type numeric_index_type
Definition: id_types.h:92
unsigned int m() const
void init(triangulateio &t)
Initializes the fields of t to NULL/0 as necessary.
void stop(const char *file, int line, const char *date, const char *time)
void broadcast(T &data, const unsigned int root_id=0) const
Take a local value and broadcast it to all processors.
std::vector< T > & get_values()
Definition: dense_matrix.h:325
virtual void close()=0
Calls the NumericVector&#39;s internal assembly routines, ensuring that the values are consistent across ...
void libmesh_ignore(const T &)
PetscTruth PetscBool
Definition: petsc_macro.h:64
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
PetscErrorCode ierr
const Parallel::Communicator & comm() const
void swap(Iterator &lhs, Iterator &rhs)
swap, used to implement op=
static const bool value
Definition: xdr_io.C:108
virtual void clear()
Restores the NumericVector<T> to a pristine state.
Defines a dense matrix for use in Finite Element-type computations.
Definition: dof_map.h:64
processor_id_type processor_id() const