Chaste Commit::f2ff7ee04e70ac9d06c57344df8d017dbb12b97b
MonodomainPurkinjeSolver.cpp
1/*
2
3Copyright (c) 2005-2024, University of Oxford.
4All rights reserved.
5
6University of Oxford means the Chancellor, Masters and Scholars of the
7University of Oxford, having an administrative office at Wellington
8Square, Oxford OX1 2JD, UK.
9
10This file is part of Chaste.
11
12Redistribution and use in source and binary forms, with or without
13modification, are permitted provided that the following conditions are met:
14 * Redistributions of source code must retain the above copyright notice,
15 this list of conditions and the following disclaimer.
16 * Redistributions in binary form must reproduce the above copyright notice,
17 this list of conditions and the following disclaimer in the documentation
18 and/or other materials provided with the distribution.
19 * Neither the name of the University of Oxford nor the names of its
20 contributors may be used to endorse or promote products derived from this
21 software without specific prior written permission.
22
23THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
34*/
35
36#include "MonodomainPurkinjeSolver.hpp"
37#include "MonodomainPurkinjeVolumeMassMatrixAssembler.hpp"
38#include "MonodomainPurkinjeCableMassMatrixAssembler.hpp"
39#include "PetscMatTools.hpp"
40
41
42
43template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
45{
46 assert(this->mpLinearSystem->rGetLhsMatrix() != NULL);
47 assert(this->mpLinearSystem->rGetRhsVector() != NULL);
48
50 // set up LHS matrix (and mass matrix)
52 if(computeMatrix)
53 {
54 mpVolumeAssembler->SetMatrixToAssemble(this->mpLinearSystem->rGetLhsMatrix(),false);
55 mpVolumeAssembler->AssembleMatrix();
56
57 mpCableAssembler->SetMatrixToAssemble(this->mpLinearSystem->rGetLhsMatrix(),false);
58 // False here is to say to the cable assembler don't zero the matrix before assembling,
59 // just add the terms to the previous value of the matrix.
60 mpCableAssembler->AssembleMatrix();
61
62 SetIdentityBlockToLhsMatrix();
63 this->mpLinearSystem->FinaliseLhsMatrix();
64
65
66 MonodomainPurkinjeVolumeMassMatrixAssembler<ELEMENT_DIM,SPACE_DIM> volume_mass_matrix_assembler(mpMixedMesh, HeartConfig::Instance()->GetUseMassLumping());
67 volume_mass_matrix_assembler.SetMatrixToAssemble(mMassMatrix);
68 volume_mass_matrix_assembler.Assemble();
69
70 MonodomainPurkinjeCableMassMatrixAssembler<ELEMENT_DIM,SPACE_DIM> cable_mass_matrix_assembler(mpMixedMesh, HeartConfig::Instance()->GetUseMassLumping());
71 cable_mass_matrix_assembler.SetMatrixToAssemble(mMassMatrix,false /* don't zero the matrix*/);
72 cable_mass_matrix_assembler.Assemble();
73
74 PetscMatTools::Finalise(mMassMatrix);
75 }
76
77 HeartEventHandler::BeginEvent(HeartEventHandler::ASSEMBLE_RHS);
78
80 // Set up z in b=Mz
82 DistributedVectorFactory* p_factory = this->mpMesh->GetDistributedVectorFactory();
83
84 // dist stripe for the current Voltage
85 DistributedVector distributed_current_solution = p_factory->CreateDistributedVector(currentSolution);
86 DistributedVector::Stripe distributed_current_solution_volume(distributed_current_solution, 0);
87 DistributedVector::Stripe distributed_current_solution_cable(distributed_current_solution, 1);
88 // dist stripe for z
89 DistributedVector dist_vec_matrix_based = p_factory->CreateDistributedVector(mVecForConstructingRhs);
90 DistributedVector::Stripe dist_vec_matrix_based_volume(dist_vec_matrix_based, 0);
91 DistributedVector::Stripe dist_vec_matrix_based_cable(dist_vec_matrix_based, 1);
92
95
97 double Cm_purkinje = HeartConfig::Instance()->GetPurkinjeCapacitance();
98
99
100 for (DistributedVector::Iterator index = dist_vec_matrix_based.Begin();
101 index!= dist_vec_matrix_based.End();
102 ++index)
103 {
104 double V_volume = distributed_current_solution_volume[index];
105 double F_volume = - Am*this->mpMonodomainTissue->rGetIionicCacheReplicated()[index.Global]
106 - this->mpMonodomainTissue->rGetIntracellularStimulusCacheReplicated()[index.Global];
107 dist_vec_matrix_based_volume[index] = Am*Cm*V_volume*PdeSimulationTime::GetPdeTimeStepInverse() + F_volume;
108
109 double V_cable = distributed_current_solution_cable[index];
110 double F_cable = - Am*this->mpMonodomainTissue->rGetPurkinjeIionicCacheReplicated()[index.Global] //Purkinje intra-cell stimulus not defined yet
111 - this->mpMonodomainTissue->rGetPurkinjeIntracellularStimulusCacheReplicated()[index.Global];
112
113 dist_vec_matrix_based_cable[index] = Am_purkinje*Cm_purkinje*V_cable*PdeSimulationTime::GetPdeTimeStepInverse() + F_cable;
114 }
115
116 dist_vec_matrix_based.Restore();
117
119 // b = Mz
121 MatMult(mMassMatrix, mVecForConstructingRhs, this->mpLinearSystem->rGetRhsVector());
122
123 // assembling RHS is not finished yet, as Neumann bcs are added below, but
124 // the event will be begun again inside mpMonodomainAssembler->AssembleVector();
125 HeartEventHandler::EndEvent(HeartEventHandler::ASSEMBLE_RHS);
126
128 // apply Neumann boundary conditions
130
131 mpNeumannSurfaceTermsAssembler->SetVectorToAssemble(this->mpLinearSystem->rGetRhsVector(), false/*don't zero vector!*/);
132 mpNeumannSurfaceTermsAssembler->AssembleVector();
133
134 // finalise
135 this->mpLinearSystem->FinaliseRhsVector();
136}
137
138
139template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
141{
142 this->mpLinearSystem->FinaliseLhsMatrix();
143
144 Vec diagonal;
145 VecDuplicate(mVecForConstructingRhs, &diagonal);
146 MatGetDiagonal(this->mpLinearSystem->rGetLhsMatrix(), diagonal);
147
148 // if A(i,i)=0, i must be within the block to be altered, so set A(i,i)=1.
149 PetscMatTools::SwitchWriteMode(this->mpLinearSystem->rGetLhsMatrix());
150 PetscInt lo, hi;
151 this->mpLinearSystem->GetOwnershipRange(lo, hi);
152 for (int row=lo; row<hi; row++)
153 {
154 if ( fabs( PetscVecTools::GetElement(diagonal, row)) < 1e-8 )
155 {
156 PetscMatTools::SetElement(this->mpLinearSystem->rGetLhsMatrix(),row, row, 1.0);
157 }
158 }
159
160 PetscTools::Destroy(diagonal);
161}
162
163template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
165{
166 if (this->mpLinearSystem != NULL)
167 {
168 return;
169 }
170
171 // call base class version...
173
174 //..then do a bit extra
175 if (HeartConfig::Instance()->GetUseAbsoluteTolerance())
176 {
177 this->mpLinearSystem->SetAbsoluteTolerance(HeartConfig::Instance()->GetAbsoluteTolerance());
178 }
179 else
180 {
182// this->mpLinearSystem->SetRelativeTolerance(HeartConfig::Instance()->GetRelativeTolerance());
183 }
184
185 this->mpLinearSystem->SetKspType(HeartConfig::Instance()->GetKSPSolver());
186 this->mpLinearSystem->SetPcType(HeartConfig::Instance()->GetKSPPreconditioner());
187 this->mpLinearSystem->SetMatrixIsSymmetric(true);
188 this->mpLinearSystem->SetUseFixedNumberIterations(HeartConfig::Instance()->GetUseFixedNumberIterationsLinearSolver(), HeartConfig::Instance()->GetEvaluateNumItsEveryNSolves());
189
190 // Initialise sizes/partitioning of mass matrix & vector, using the initial condition as a template
191 VecDuplicate(initialSolution, &mVecForConstructingRhs);
192 PetscInt ownership_range_lo;
193 PetscInt ownership_range_hi;
194 VecGetOwnershipRange(initialSolution, &ownership_range_lo, &ownership_range_hi);
195 PetscInt local_size = ownership_range_hi - ownership_range_lo;
196 PetscTools::SetupMat(mMassMatrix, 2*this->mpMesh->GetNumNodes(), 2*this->mpMesh->GetNumNodes(),
197 2*this->mpMesh->CalculateMaximumNodeConnectivityPerProcess(),
198 local_size, local_size);
199}
200
201
202template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
204{
205 // solve cell models
206 mpMonodomainTissue->SolveCellSystems(currentSolution, PdeSimulationTime::GetTime(), PdeSimulationTime::GetNextTime());
207}
208
209template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
214 : AbstractDynamicLinearPdeSolver<ELEMENT_DIM,SPACE_DIM,2>(pMesh),
215 mpMixedMesh(pMesh),
216 mpMonodomainTissue(pTissue),
217 mpBoundaryConditions(pBoundaryConditions)
218{
219 assert(pTissue);
220 assert(pBoundaryConditions);
221 if(HeartConfig::Instance()->GetUseStateVariableInterpolation())
222 {
223 EXCEPTION("State-variable interpolation is not yet supported with Purkinje");
224 }
225 this->mMatrixIsConstant = true;
226
230
231 // Tell tissue there's no need to replicate ionic caches
232 pTissue->SetCacheReplication(false);
234
235}
236
237template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
239{
240 delete mpVolumeAssembler;
241 delete mpCableAssembler;
242 delete mpNeumannSurfaceTermsAssembler;
243
244 if (mVecForConstructingRhs)
245 {
246 PetscTools::Destroy(mVecForConstructingRhs);
247 PetscTools::Destroy(mMassMatrix);
248 }
249}
250
251
253// explicit instantiation
255
256template class MonodomainPurkinjeSolver<2,2>;
257template class MonodomainPurkinjeSolver<3,3>;
#define EXCEPTION(message)
#define NEVER_REACHED
void SetCacheReplication(bool doCacheReplication)
void SetMatrixToAssemble(Mat &rMatToAssemble, bool zeroMatrixBeforeAssembly=true)
virtual void InitialiseForSolve(Vec initialSolution=nullptr)
DistributedVector CreateDistributedVector(Vec vec, bool readOnly=false)
double GetCapacitance() const
double GetPurkinjeSurfaceAreaToVolumeRatio()
double GetSurfaceAreaToVolumeRatio() const
double GetPurkinjeCapacitance()
static HeartConfig * Instance()
void PrepareForSetupLinearSystem(Vec currentSolution)
MonodomainTissue< ELEMENT_DIM, SPACE_DIM > * mpMonodomainTissue
void SetupLinearSystem(Vec currentSolution, bool computeMatrix)
NaturalNeumannSurfaceTermAssembler< ELEMENT_DIM, SPACE_DIM, 2 > * mpNeumannSurfaceTermsAssembler
MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM > * mpMixedMesh
MonodomainPurkinjeCableAssembler< ELEMENT_DIM, SPACE_DIM > * mpCableAssembler
MonodomainPurkinjeVolumeAssembler< ELEMENT_DIM, SPACE_DIM > * mpVolumeAssembler
virtual void InitialiseForSolve(Vec initialSolution)
MonodomainPurkinjeSolver(MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM > *pMesh, MonodomainTissue< ELEMENT_DIM, SPACE_DIM > *pTissue, BoundaryConditionsContainer< ELEMENT_DIM, SPACE_DIM, 2 > *pBoundaryConditions)
static double GetPdeTimeStepInverse()
static double GetTime()
static double GetNextTime()
static void SwitchWriteMode(Mat matrix)
static void Finalise(Mat matrix)
static void SetElement(Mat matrix, PetscInt row, PetscInt col, double value)
static void Destroy(Vec &rVec)
static void SetupMat(Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
static double GetElement(Vec vector, PetscInt row)