DistributedVector.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2009
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 
00030 #include "DistributedVector.hpp"
00031 
00032 unsigned DistributedVector::mLo=0;
00033 unsigned DistributedVector::mHi=0;
00034 unsigned DistributedVector::mGlobalHi=0;
00035 bool DistributedVector::mPetscStatusKnown=false;
00036 
00037 
00038 void DistributedVector::CheckForPetsc()
00039 {
00040     assert(mPetscStatusKnown==false);
00041     PetscTruth petsc_is_initialised;
00042     PetscInitialized(&petsc_is_initialised);
00043 
00044     //Tripping this assertion means that PETSc and MPI weren't intialised
00045     //A unit test should include the global fixture:
00046     //#include "PetscSetupAndFinalize.hpp"
00047     assert(petsc_is_initialised);
00048     mPetscStatusKnown=true;
00049 }
00050 
00051 void DistributedVector::SetProblemSizePerProcessor(unsigned size, PetscInt local)
00052 {
00053 #ifndef NDEBUG
00054     if (!mPetscStatusKnown)
00055     {
00056         CheckForPetsc();
00057     }
00058 #endif
00059     Vec vec;
00060     VecCreate(PETSC_COMM_WORLD, &vec);
00061     VecSetSizes(vec, local, size);
00062     VecSetFromOptions(vec);
00063     SetProblemSize(vec);
00064     VecDestroy(vec);
00065 }
00066 
00067 void DistributedVector::SetProblemSize(unsigned size)
00068 {
00069     SetProblemSizePerProcessor(size, PETSC_DECIDE);
00070 }
00071 
00072 void DistributedVector::SetProblemSize(Vec vec)
00073 {
00074 #ifndef NDEBUG
00075     if (!mPetscStatusKnown)
00076     {
00077         CheckForPetsc();
00078     }
00079 #endif
00080     // calculate my range
00081     PetscInt petsc_lo, petsc_hi;
00082     VecGetOwnershipRange(vec, &petsc_lo, &petsc_hi);
00083     mLo = (unsigned)petsc_lo;
00084     mHi = (unsigned)petsc_hi;
00085     // vector size
00086     PetscInt size;
00087     VecGetSize(vec, &size);
00088     mGlobalHi = (unsigned) size;
00089 }
00090 
00091 unsigned DistributedVector::GetProblemSize()
00092 {
00093     return mGlobalHi;
00094 }
00095 
00096 bool DistributedVector::IsGlobalIndexLocal(unsigned globalIndex)
00097 {
00098     return (mLo<=globalIndex && globalIndex<mHi);
00099 }
00100 
00101 Vec DistributedVector::CreateVec()
00102 {
00103     Vec vec;
00104     VecCreate(PETSC_COMM_WORLD, &vec);
00105     VecSetSizes(vec, mHi-mLo, mGlobalHi);
00106     VecSetFromOptions(vec);
00107     return vec;
00108 }
00109 
00110 Vec DistributedVector::CreateVec(unsigned stride)
00111 {
00112     Vec vec;
00113     VecCreateMPI(PETSC_COMM_WORLD, stride*(mHi-mLo), stride*mGlobalHi, &vec);
00114     return vec;
00115 }
00116 
00117 DistributedVector::DistributedVector(Vec vec) : mVec(vec)
00118 {
00119     VecGetArray(vec, &mpVec);
00120     PetscInt size;
00121     VecGetSize(vec, &size);
00122     mNumChunks = (unsigned) size / mGlobalHi;
00123     assert ((mNumChunks * mGlobalHi) == (unsigned)size);
00124 }
00125 
00126 double& DistributedVector::operator[](unsigned globalIndex) throw (DistributedVectorException)
00127 {
00128     assert(mNumChunks==1);
00129     if (mLo<=globalIndex && globalIndex<mHi)
00130     {
00131         return mpVec[globalIndex - mLo];
00132     }
00133     throw DistributedVectorException();
00134 }
00135 
00136 void DistributedVector::Restore()
00137 {
00138     VecRestoreArray(mVec, &mpVec);
00139     VecAssemblyBegin(mVec);
00140     VecAssemblyEnd(mVec);
00141 }
00142 
00143 bool DistributedVector::Iterator::operator!=(const Iterator& other)
00144 {
00145    return(Global != other.Global);
00146 }
00147 
00148 DistributedVector::Iterator& DistributedVector::Iterator::operator++()
00149 {
00150     Local++;
00151     Global++;
00152     return(*this);
00153 }
00154 
00155 DistributedVector::Iterator DistributedVector::Begin()
00156 {
00157     Iterator index;
00158     index.Local = 0;
00159     index.Global = mLo;
00160     return index;
00161 }
00162 
00163 DistributedVector::Iterator DistributedVector::End()
00164 {
00165     Iterator index;
00166     index.Local = mHi-mLo;
00167     index.Global = mHi;
00168     return index;
00169 }
00170 
00171 double& DistributedVector::operator[](Iterator index) throw (DistributedVectorException)
00172 {
00173     assert(mNumChunks==1);
00174     return mpVec[index.Local];
00175 }
00176 

Generated on Wed Mar 18 12:51:50 2009 for Chaste by  doxygen 1.5.5