Chaste  Release::2018.1
MonodomainPurkinjeSolver.cpp
1 /*
2 
3 Copyright (c) 2005-2018, University of Oxford.
4 All rights reserved.
5 
6 University of Oxford means the Chancellor, Masters and Scholars of the
7 University of Oxford, having an administrative office at Wellington
8 Square, Oxford OX1 2JD, UK.
9 
10 This file is part of Chaste.
11 
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14  * Redistributions of source code must retain the above copyright notice,
15  this list of conditions and the following disclaimer.
16  * Redistributions in binary form must reproduce the above copyright notice,
17  this list of conditions and the following disclaimer in the documentation
18  and/or other materials provided with the distribution.
19  * Neither the name of the University of Oxford nor the names of its
20  contributors may be used to endorse or promote products derived from this
21  software without specific prior written permission.
22 
23 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 
34 */
35 
36 #include "MonodomainPurkinjeSolver.hpp"
37 #include "MonodomainPurkinjeVolumeMassMatrixAssembler.hpp"
38 #include "MonodomainPurkinjeCableMassMatrixAssembler.hpp"
39 #include "PetscMatTools.hpp"
40 
41 
42 
43 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
45 {
46  assert(this->mpLinearSystem->rGetLhsMatrix() != NULL);
47  assert(this->mpLinearSystem->rGetRhsVector() != NULL);
48 
50  // set up LHS matrix (and mass matrix)
52  if(computeMatrix)
53  {
54  mpVolumeAssembler->SetMatrixToAssemble(this->mpLinearSystem->rGetLhsMatrix(),false);
55  mpVolumeAssembler->AssembleMatrix();
56 
57  mpCableAssembler->SetMatrixToAssemble(this->mpLinearSystem->rGetLhsMatrix(),false);
58  // False here is to say to the cable assembler don't zero the matrix before assembling,
59  // just add the terms to the previous value of the matrix.
60  mpCableAssembler->AssembleMatrix();
61 
62  SetIdentityBlockToLhsMatrix();
63  this->mpLinearSystem->FinaliseLhsMatrix();
64 
65 
66  MonodomainPurkinjeVolumeMassMatrixAssembler<ELEMENT_DIM,SPACE_DIM> volume_mass_matrix_assembler(mpMixedMesh, HeartConfig::Instance()->GetUseMassLumping());
67  volume_mass_matrix_assembler.SetMatrixToAssemble(mMassMatrix);
68  volume_mass_matrix_assembler.Assemble();
69 
70  MonodomainPurkinjeCableMassMatrixAssembler<ELEMENT_DIM,SPACE_DIM> cable_mass_matrix_assembler(mpMixedMesh, HeartConfig::Instance()->GetUseMassLumping());
71  cable_mass_matrix_assembler.SetMatrixToAssemble(mMassMatrix,false /* don't zero the matrix*/);
72  cable_mass_matrix_assembler.Assemble();
73 
74  PetscMatTools::Finalise(mMassMatrix);
75  }
76 
77  HeartEventHandler::BeginEvent(HeartEventHandler::ASSEMBLE_RHS);
78 
80  // Set up z in b=Mz
82  DistributedVectorFactory* p_factory = this->mpMesh->GetDistributedVectorFactory();
83 
84  // dist stripe for the current Voltage
85  DistributedVector distributed_current_solution = p_factory->CreateDistributedVector(currentSolution);
86  DistributedVector::Stripe distributed_current_solution_volume(distributed_current_solution, 0);
87  DistributedVector::Stripe distributed_current_solution_cable(distributed_current_solution, 1);
88  // dist stripe for z
89  DistributedVector dist_vec_matrix_based = p_factory->CreateDistributedVector(mVecForConstructingRhs);
90  DistributedVector::Stripe dist_vec_matrix_based_volume(dist_vec_matrix_based, 0);
91  DistributedVector::Stripe dist_vec_matrix_based_cable(dist_vec_matrix_based, 1);
92 
94  double Cm = HeartConfig::Instance()->GetCapacitance();
95 
97  double Cm_purkinje = HeartConfig::Instance()->GetPurkinjeCapacitance();
98 
99 
100  for (DistributedVector::Iterator index = dist_vec_matrix_based.Begin();
101  index!= dist_vec_matrix_based.End();
102  ++index)
103  {
104  double V_volume = distributed_current_solution_volume[index];
105  double F_volume = - Am*this->mpMonodomainTissue->rGetIionicCacheReplicated()[index.Global]
106  - this->mpMonodomainTissue->rGetIntracellularStimulusCacheReplicated()[index.Global];
107  dist_vec_matrix_based_volume[index] = Am*Cm*V_volume*PdeSimulationTime::GetPdeTimeStepInverse() + F_volume;
108 
109  double V_cable = distributed_current_solution_cable[index];
110  double F_cable = - Am*this->mpMonodomainTissue->rGetPurkinjeIionicCacheReplicated()[index.Global] //Purkinje intra-cell stimulus not defined yet
111  - this->mpMonodomainTissue->rGetPurkinjeIntracellularStimulusCacheReplicated()[index.Global];
112 
113  dist_vec_matrix_based_cable[index] = Am_purkinje*Cm_purkinje*V_cable*PdeSimulationTime::GetPdeTimeStepInverse() + F_cable;
114  }
115 
116  dist_vec_matrix_based.Restore();
117 
119  // b = Mz
121  MatMult(mMassMatrix, mVecForConstructingRhs, this->mpLinearSystem->rGetRhsVector());
122 
123  // assembling RHS is not finished yet, as Neumann bcs are added below, but
124  // the event will be begun again inside mpMonodomainAssembler->AssembleVector();
125  HeartEventHandler::EndEvent(HeartEventHandler::ASSEMBLE_RHS);
126 
128  // apply Neumann boundary conditions
130 
131  mpNeumannSurfaceTermsAssembler->SetVectorToAssemble(this->mpLinearSystem->rGetRhsVector(), false/*don't zero vector!*/);
132  mpNeumannSurfaceTermsAssembler->AssembleVector();
133 
134  // finalise
135  this->mpLinearSystem->FinaliseRhsVector();
136 }
137 
138 
139 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
141 {
142  this->mpLinearSystem->FinaliseLhsMatrix();
143 
144  Vec diagonal;
145  VecDuplicate(mVecForConstructingRhs, &diagonal);
146  MatGetDiagonal(this->mpLinearSystem->rGetLhsMatrix(), diagonal);
147 
148  // if A(i,i)=0, i must be within the block to be altered, so set A(i,i)=1.
149  PetscMatTools::SwitchWriteMode(this->mpLinearSystem->rGetLhsMatrix());
150  PetscInt lo, hi;
151  this->mpLinearSystem->GetOwnershipRange(lo, hi);
152  for (int row=lo; row<hi; row++)
153  {
154  if ( fabs( PetscVecTools::GetElement(diagonal, row)) < 1e-8 )
155  {
156  PetscMatTools::SetElement(this->mpLinearSystem->rGetLhsMatrix(),row, row, 1.0);
157  }
158  }
159 
160  PetscTools::Destroy(diagonal);
161 }
162 
163 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
165 {
166  if (this->mpLinearSystem != NULL)
167  {
168  return;
169  }
170 
171  // call base class version...
173 
174  //..then do a bit extra
175  if (HeartConfig::Instance()->GetUseAbsoluteTolerance())
176  {
177  this->mpLinearSystem->SetAbsoluteTolerance(HeartConfig::Instance()->GetAbsoluteTolerance());
178  }
179  else
180  {
182 // this->mpLinearSystem->SetRelativeTolerance(HeartConfig::Instance()->GetRelativeTolerance());
183  }
184 
185  this->mpLinearSystem->SetKspType(HeartConfig::Instance()->GetKSPSolver());
186  this->mpLinearSystem->SetPcType(HeartConfig::Instance()->GetKSPPreconditioner());
187  this->mpLinearSystem->SetMatrixIsSymmetric(true);
188  this->mpLinearSystem->SetUseFixedNumberIterations(HeartConfig::Instance()->GetUseFixedNumberIterationsLinearSolver(), HeartConfig::Instance()->GetEvaluateNumItsEveryNSolves());
189 
190  // Initialise sizes/partitioning of mass matrix & vector, using the initial condition as a template
191  VecDuplicate(initialSolution, &mVecForConstructingRhs);
192  PetscInt ownership_range_lo;
193  PetscInt ownership_range_hi;
194  VecGetOwnershipRange(initialSolution, &ownership_range_lo, &ownership_range_hi);
195  PetscInt local_size = ownership_range_hi - ownership_range_lo;
196  PetscTools::SetupMat(mMassMatrix, 2*this->mpMesh->GetNumNodes(), 2*this->mpMesh->GetNumNodes(),
197  2*this->mpMesh->CalculateMaximumNodeConnectivityPerProcess(),
198  local_size, local_size);
199 }
200 
201 
202 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
204 {
205  // solve cell models
206  mpMonodomainTissue->SolveCellSystems(currentSolution, PdeSimulationTime::GetTime(), PdeSimulationTime::GetNextTime());
207 }
208 
209 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
214  : AbstractDynamicLinearPdeSolver<ELEMENT_DIM,SPACE_DIM,2>(pMesh),
215  mpMixedMesh(pMesh),
216  mpMonodomainTissue(pTissue),
217  mpBoundaryConditions(pBoundaryConditions)
218 {
219  assert(pTissue);
220  assert(pBoundaryConditions);
221  if(HeartConfig::Instance()->GetUseStateVariableInterpolation())
222  {
223  EXCEPTION("State-variable interpolation is not yet supported with Purkinje");
224  }
225  this->mMatrixIsConstant = true;
226 
230 
231  // Tell tissue there's no need to replicate ionic caches
232  pTissue->SetCacheReplication(false);
233  mVecForConstructingRhs = NULL;
234 
235 }
236 
237 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
239 {
240  delete mpVolumeAssembler;
241  delete mpCableAssembler;
242  delete mpNeumannSurfaceTermsAssembler;
243 
244  if (mVecForConstructingRhs)
245  {
246  PetscTools::Destroy(mVecForConstructingRhs);
247  PetscTools::Destroy(mMassMatrix);
248  }
249 }
250 
251 
253 // explicit instantiation
255 
256 template class MonodomainPurkinjeSolver<2,2>;
257 template class MonodomainPurkinjeSolver<3,3>;
double GetPurkinjeSurfaceAreaToVolumeRatio()
NaturalNeumannSurfaceTermAssembler< ELEMENT_DIM, SPACE_DIM, 2 > * mpNeumannSurfaceTermsAssembler
static double GetElement(Vec vector, PetscInt row)
MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM > * mpMixedMesh
DistributedVector CreateDistributedVector(Vec vec, bool readOnly=false)
virtual void InitialiseForSolve(Vec initialSolution=nullptr)
MonodomainPurkinjeCableAssembler< ELEMENT_DIM, SPACE_DIM > * mpCableAssembler
#define EXCEPTION(message)
Definition: Exception.hpp:143
void SetupLinearSystem(Vec currentSolution, bool computeMatrix)
#define NEVER_REACHED
Definition: Exception.hpp:206
MonodomainPurkinjeVolumeAssembler< ELEMENT_DIM, SPACE_DIM > * mpVolumeAssembler
static double GetNextTime()
static void SetElement(Mat matrix, PetscInt row, PetscInt col, double value)
static void SwitchWriteMode(Mat matrix)
static void SetupMat(Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
Definition: PetscTools.cpp:268
static double GetPdeTimeStepInverse()
void PrepareForSetupLinearSystem(Vec currentSolution)
static void Destroy(Vec &rVec)
Definition: PetscTools.hpp:352
double GetPurkinjeCapacitance()
void SetCacheReplication(bool doCacheReplication)
static void Finalise(Mat matrix)
double GetCapacitance() const
double GetSurfaceAreaToVolumeRatio() const
MonodomainPurkinjeSolver(MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM > *pMesh, MonodomainTissue< ELEMENT_DIM, SPACE_DIM > *pTissue, BoundaryConditionsContainer< ELEMENT_DIM, SPACE_DIM, 2 > *pBoundaryConditions)
void SetMatrixToAssemble(Mat &rMatToAssemble, bool zeroMatrixBeforeAssembly=true)
MonodomainTissue< ELEMENT_DIM, SPACE_DIM > * mpMonodomainTissue
static HeartConfig * Instance()
static double GetTime()
virtual void InitialiseForSolve(Vec initialSolution)