Chaste Commit::f2ff7ee04e70ac9d06c57344df8d017dbb12b97b
PetscTools.cpp
1/*
2
3Copyright (c) 2005-2024, University of Oxford.
4All rights reserved.
5
6University of Oxford means the Chancellor, Masters and Scholars of the
7University of Oxford, having an administrative office at Wellington
8Square, Oxford OX1 2JD, UK.
9
10This file is part of Chaste.
11
12Redistribution and use in source and binary forms, with or without
13modification, are permitted provided that the following conditions are met:
14 * Redistributions of source code must retain the above copyright notice,
15 this list of conditions and the following disclaimer.
16 * Redistributions in binary form must reproduce the above copyright notice,
17 this list of conditions and the following disclaimer in the documentation
18 and/or other materials provided with the distribution.
19 * Neither the name of the University of Oxford nor the names of its
20 contributors may be used to endorse or promote products derived from this
21 software without specific prior written permission.
22
23THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
34*/
35
36#include "PetscTools.hpp"
37#include <cassert>
38#include <cstring> // For strcmp etc. Needed in gcc-4.3
39#include <iostream>
40#include <sstream>
41#include "Exception.hpp"
42#include "Warnings.hpp"
43
46unsigned PetscTools::mRank = 0;
48
49#ifndef NDEBUG
50// Uncomment this to trace calls to PetscTools::Barrier
51//#define DEBUG_BARRIERS
52#endif
53
54#ifdef DEBUG_BARRIERS
55static unsigned mNumBarriers = 0u;
56#endif
57
59{
60 PetscBool is_there;
61 PetscInitialized(&is_there);
62 if (is_there)
63 {
65
66 PetscInt num_procs;
67 MPI_Comm_size(PETSC_COMM_WORLD, &num_procs);
68 mNumProcessors = (unsigned)num_procs;
69
70 PetscInt my_rank;
71 MPI_Comm_rank(PETSC_COMM_WORLD, &my_rank);
72 mRank = (unsigned)my_rank;
73 }
74 else
75 {
76 // No PETSc
77 mPetscIsInitialised = false;
79 mRank = 0;
80 }
81}
82
83// Information methods
84
90
92{
93 CheckCache();
94 return (mIsolateProcesses || mNumProcessors == 1);
95}
96
98{
99 CheckCache();
100 return (mNumProcessors > 1);
101}
102
104{
105 return mIsolateProcesses;
106}
107
109{
110 CheckCache();
111 return mNumProcessors;
112}
113
115{
116 CheckCache();
117 return mRank;
118}
119
121{
122 CheckCache();
123 return (mRank == MASTER_RANK || mIsolateProcesses);
124}
125
127{
128 CheckCache();
129 return (mRank == mNumProcessors - 1 || mIsolateProcesses);
130}
131
132// Little utility methods
133
134void PetscTools::Barrier(const std::string callerId)
135{
136 CheckCache();
138 {
139#ifdef DEBUG_BARRIERS
140 // "Before" is alphabetically before "Post" so that one can sort the output on process/event/barrier
141 std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Before "
142 << "Barrier " << mNumBarriers << " \"" << callerId << "\"." << std::endl
143 << std::flush;
144#endif
145 PetscBarrier(PETSC_NULL);
146#ifdef DEBUG_BARRIERS
147 std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Post "
148 << "Barrier " << mNumBarriers++ << " \"" << callerId << "\"." << std::endl
149 << std::flush;
150#endif
151 }
152}
153
155{
156 Barrier("PetscTools::RoundRobin"); // We want barriers both before all and after all, just in case
157 const unsigned my_rank = GetMyRank();
158 for (unsigned turn = 0; turn < my_rank; turn++)
159 {
160 Barrier("PetscTools::RoundRobin");
161 }
162}
163
165{
166 const unsigned num_procs = GetNumProcs();
167 for (unsigned turn = GetMyRank(); turn < num_procs; turn++)
168 {
169 Barrier("PetscTools::RoundRobin");
170 }
171}
172
174{
175 mIsolateProcesses = isolate;
176}
177
179{
181 {
182 return PETSC_COMM_SELF;
183 }
184 else
185 {
186 return PETSC_COMM_WORLD;
187 }
188}
189
191{
192 CheckCache();
193 unsigned my_flag = (unsigned)flag;
194 unsigned anyones_flag_is_true = my_flag;
196 {
197 MPI_Allreduce(&my_flag, &anyones_flag_is_true, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
198 }
199 return (anyones_flag_is_true == 1);
200}
201
203{
204 bool anyones_error = ReplicateBool(flag);
205 if (flag)
206 {
207 // Return control to exception thrower
208 return;
209 }
210 if (anyones_error)
211 {
212 EXCEPTION("Another process threw an exception; bailing out.");
213 }
214}
215
216// Vector & matrix creation routines
217
218Vec PetscTools::CreateVec(int size, int localSize, bool ignoreOffProcEntries)
219{
220 assert(size >= 0); // There is one test where we create a zero-sized vector
221 Vec ret;
222 VecCreate(PETSC_COMM_WORLD, &ret);
223 VecSetSizes(ret, localSize, size); // localSize usually defaults to PETSC_DECIDE
224 VecSetFromOptions(ret);
225
226 if (ignoreOffProcEntries)
227 {
228#if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
229 VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
230#else
231 VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES);
232#endif
233 }
234
235 return ret;
236}
237
238Vec PetscTools::CreateVec(std::vector<double> data)
239{
240 assert(data.size() > 0);
241 Vec ret = CreateVec(data.size());
242
243 double* p_ret;
244 VecGetArray(ret, &p_ret);
245 int lo, hi;
246 VecGetOwnershipRange(ret, &lo, &hi);
247
248 for (int global_index = lo; global_index < hi; global_index++)
249 {
250 int local_index = global_index - lo;
251 p_ret[local_index] = data[global_index];
252 }
253 VecRestoreArray(ret, &p_ret);
254
255 return ret;
256}
257
258Vec PetscTools::CreateAndSetVec(int size, double value)
259{
260 assert(size > 0);
261 Vec ret = CreateVec(size);
262
263#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
264 VecSet(&value, ret);
265#else
266 VecSet(ret, value);
267#endif
268
269 return ret;
270}
271
272void PetscTools::SetupMat(Mat& rMat, int numRows, int numColumns,
273 unsigned rowPreallocation,
274 int numLocalRows,
275 int numLocalColumns,
276 bool ignoreOffProcEntries,
277 bool newAllocationError)
278{
279 assert(numRows > 0);
280 assert(numColumns > 0);
281 if ((int)rowPreallocation > numColumns)
282 {
283 WARNING("Preallocation failure: requested number of nonzeros per row greater than number of columns"); //+rowPreallocation+">"+numColumns);
284 rowPreallocation = numColumns;
285 }
286
287#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
288 MatCreate(PETSC_COMM_WORLD, numLocalRows, numLocalColumns, numRows, numColumns, &rMat);
289#else //New API
290 MatCreate(PETSC_COMM_WORLD, &rMat);
291 MatSetSizes(rMat, numLocalRows, numLocalColumns, numRows, numColumns);
292#endif
293
295 {
296 MatSetType(rMat, MATSEQAIJ);
297 if (rowPreallocation > 0)
298 {
299 MatSeqAIJSetPreallocation(rMat, rowPreallocation, PETSC_NULL);
300 }
301 }
302 else
303 {
304 MatSetType(rMat, MATMPIAIJ);
305 if (rowPreallocation > 0)
306 {
307 MatMPIAIJSetPreallocation(rMat, rowPreallocation, PETSC_NULL, rowPreallocation, PETSC_NULL);
308 }
309 }
310
311 MatSetFromOptions(rMat);
312
313 if (ignoreOffProcEntries) //&& IsParallel())
314 {
315 if (rowPreallocation == 0)
316 {
317 // We aren't allowed to do non-zero allocation after setting MAT_IGNORE_OFF_PROC_ENTRIES
318 WARNING("Ignoring MAT_IGNORE_OFF_PROC_ENTRIES flag because we might set non-zeroes later");
319 }
320 else
321 {
322#if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
323 MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
324#else
325 MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES);
326#endif
327 }
328 }
329#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 3) //PETSc 3.3 or later
330 if (newAllocationError == false)
331 {
332 MatSetOption(rMat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
333 }
334#endif
335}
336
337void PetscTools::DumpPetscObject(const Mat& rMat, const std::string& rOutputFileFullPath)
338{
339 PetscViewer view;
340#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
341 PetscViewerFileType type = PETSC_FILE_CREATE;
342#else
343 PetscFileMode type = FILE_MODE_WRITE;
344#endif
345
346 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
347 MatView(rMat, view);
348 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
349}
350
351void PetscTools::DumpPetscObject(const Vec& rVec, const std::string& rOutputFileFullPath)
352{
353 PetscViewer view;
354#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
355 PetscViewerFileType type = PETSC_FILE_CREATE;
356#else
357 PetscFileMode type = FILE_MODE_WRITE;
358#endif
359
360 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
361 VecView(rVec, view);
362 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
363}
364
365void PetscTools::ReadPetscObject(Mat& rMat, const std::string& rOutputFileFullPath, Vec rParallelLayout)
366{
367 /*
368 * PETSc (as of 3.1) doesn't provide any method for loading a Mat object with a user-defined parallel
369 * layout, i.e. there's no equivalent to VecLoadIntoVector for Mat's.
370 *
371 * It seems to be in their future plans though: http://lists.mcs.anl.gov/pipermail/petsc-users/2010-March/006062.html
372 */
373
374 PetscViewer view;
375#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
376 PetscViewerFileType type = PETSC_FILE_RDONLY;
377#else
378 PetscFileMode type = FILE_MODE_READ;
379#endif
380
381 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
382 type, &view);
383
384#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
385 MatCreate(PETSC_COMM_WORLD, &rMat);
386 MatSetType(rMat, MATMPIAIJ);
387 MatLoad(rMat, view);
388#else
389 MatLoad(view, MATMPIAIJ, &rMat);
390#endif
391
392 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
393
394 if (rParallelLayout != nullptr)
395 {
396 /*
397 * The idea is to copy rMat into a matrix that has the appropriate
398 * parallel layout. Inefficient...
399 */
400 PetscInt num_rows, num_local_rows;
401
402 VecGetSize(rParallelLayout, &num_rows);
403 VecGetLocalSize(rParallelLayout, &num_local_rows);
404
405 Mat temp_mat;
407 PetscTools::SetupMat(temp_mat, num_rows, num_rows, 100, num_local_rows, num_local_rows, false);
408
409#if PETSC_VERSION_GE(3, 11, 2) // PETSc 3.11.2 or newer
410 PetscTools::ChasteMatCopy(rMat, temp_mat, DIFFERENT_NONZERO_PATTERN);
411#else
412 MatCopy(rMat, temp_mat, DIFFERENT_NONZERO_PATTERN);
413#endif
414
416 rMat = temp_mat;
417 }
418}
419
420void PetscTools::ReadPetscObject(Vec& rVec, const std::string& rOutputFileFullPath, Vec rParallelLayout)
421{
422 PetscViewer view;
423#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
424 PetscViewerFileType type = PETSC_FILE_RDONLY;
425#else
426 PetscFileMode type = FILE_MODE_READ;
427#endif
428
429 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
430 type, &view);
431 if (rParallelLayout == nullptr)
432 {
433#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
434 VecCreate(PETSC_COMM_WORLD, &rVec);
435 VecSetType(rVec, VECMPI);
436 VecLoad(rVec, view);
437#else
438 VecLoad(view, VECMPI, &rVec);
439#endif
440 }
441 else
442 {
443 VecDuplicate(rParallelLayout, &rVec);
444#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
445 VecLoad(rVec, view);
446#else
447 VecLoadIntoVector(view, rVec);
448#endif
449 }
450 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
451}
452
454{
455#ifdef __INTEL_COMPILER
456 //Old versions of the intel compiler can result in a PETSC ERROR and the program aborting if parmetis is checked for before
457 //some other calls to Petsc are made. This nasty hack ensures that the HasParMetis method always works on Intel
458 Mat mat;
459 PetscTools::SetupMat(mat, 1, 1, 1);
461#endif
462
463 MatPartitioning part;
464 MatPartitioningCreate(PETSC_COMM_WORLD, &part);
465
466 // We are expecting an error from PETSC on systems that don't have the interface, so suppress it
467 // in case it aborts
468 PetscPushErrorHandler(PetscIgnoreErrorHandler, nullptr);
469
470#if (PETSC_VERSION_MAJOR == 2 || (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR < 2))
471 PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part, MAT_PARTITIONING_PARMETIS);
472#else
473 PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part, MATPARTITIONINGPARMETIS);
474#endif
475
476 // Stop supressing error
477 PetscPopErrorHandler();
478
479 // Note that this method probably leaks memory inside PETSc because if MatPartitioningCreate fails
480 // then there isn't a proper handle to destroy.
481 MatPartitioningDestroy(PETSC_DESTROY_PARAM(part));
482
483 // Get out of jail free card for Windows where the latest configuration of the integration machine shows that our implementation doesn't work as expected.
484#ifdef _MSC_VER
485 //\todo #2016 (or similar ticket). The method NodePartitioner::PetscMatrixPartitioning is not working in parallel
486 if (parmetis_installed_error == 0)
487 {
488 WARN_ONCE_ONLY("The PETSc/parMETIS interface is correctly installed but does not yet work in Windows so matrix-based partitioning will be turned off.");
489 }
490 return false;
491#endif
492
493 return (parmetis_installed_error == 0);
494}
495
496#if PETSC_VERSION_GE(3, 11, 2) // PETSc 3.11.2 or newer
497PetscErrorCode PetscTools::ChasteMatCopy(Mat A, Mat B, MatStructure str)
498{
499 PetscErrorCode ierr;
500 PetscInt i, rstart = 0, rend = 0, nz;
501 const PetscInt* cwork;
502 const PetscScalar* vwork;
503
504 PetscBool assembled;
505 MatAssembled(B, &assembled);
506 if (assembled == PETSC_TRUE)
507 {
508 ierr = MatZeroEntries(B);
509 CHKERRQ(ierr);
510 }
511
512 ierr = MatGetOwnershipRange(A, &rstart, &rend);
513 CHKERRQ(ierr);
514 for (i = rstart; i < rend; i++)
515 {
516 ierr = MatGetRow(A, i, &nz, &cwork, &vwork);
517 CHKERRQ(ierr);
518 ierr = MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES);
519 CHKERRQ(ierr);
520 ierr = MatRestoreRow(A, i, &nz, &cwork, &vwork);
521 CHKERRQ(ierr);
522 }
523
524 ierr = MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY);
525 CHKERRQ(ierr);
526 ierr = MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY);
527 CHKERRQ(ierr);
528 PetscFunctionReturn(0);
529}
530#endif
#define EXCEPTION(message)
#define PETSC_DESTROY_PARAM(x)
PetscTruth PetscBool
static Vec CreateAndSetVec(int size, double value)
static void Destroy(Vec &rVec)
static bool ReplicateBool(bool flag)
static void DumpPetscObject(const Mat &rMat, const std::string &rOutputFileFullPath)
static MPI_Comm GetWorld()
static bool AmMaster()
static unsigned mNumProcessors
static void IsolateProcesses(bool isolate=true)
static bool AmTopMost()
static void ReadPetscObject(Mat &rMat, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr)
static unsigned mRank
static bool mIsolateProcesses
static void Barrier(const std::string callerId="")
static bool IsParallel()
static bool IsSequential()
static bool mPetscIsInitialised
static void EndRoundRobin()
static Vec CreateVec(int size, int localSize=PETSC_DECIDE, bool ignoreOffProcEntries=true)
static bool IsIsolated()
static unsigned GetMyRank()
static void ReplicateException(bool flag)
static bool IsInitialised()
static void BeginRoundRobin()
static const unsigned MASTER_RANK
static void SetupMat(Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
static void ResetCache()
static unsigned GetNumProcs()
static bool HasParMetis()
static void CheckCache()