AbstractTetrahedralMeshWriter.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2010
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 // Disable PETSc logging of MPI calls (we don't use this, anyway) to fix
00030 // "right-hand operand of comma has no effect" warnings when building with
00031 // PETSc 2.2.1.
00032 #define PETSC_HAVE_BROKEN_RECURSIVE_MACRO
00033 
00034 #include "AbstractTetrahedralMeshWriter.hpp"
00035 #include "AbstractTetrahedralMesh.hpp"
00036 #include "DistributedTetrahedralMesh.hpp"
00037 
00038 #include <mpi.h> // For MPI_Send, MPI_Recv
00039 
00043 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00044 struct MeshWriterIterators
00045 {
00047     typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator*  pNodeIter;
00049     typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator* pElemIter;
00050 };
00051 
00053 // Implementation
00055 
00056 
00057 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00058 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::AbstractTetrahedralMeshWriter(const std::string &rDirectory,
00059                    const std::string &rBaseName,
00060                    const bool clearOutputDir)
00061     : AbstractMeshWriter<ELEMENT_DIM, SPACE_DIM>(rDirectory, rBaseName, clearOutputDir),
00062       mpMesh(NULL),
00063       mNodesPerElement(ELEMENT_DIM+1),
00064       mpParallelMesh(NULL),
00065       mpIters(new MeshWriterIterators<ELEMENT_DIM,SPACE_DIM>),
00066       mpNodeMap(NULL),
00067       mNodeCounterForParallelMesh(0),
00068       mElementCounterForParallelMesh(0)
00069 
00070 {
00071     mpIters->pNodeIter = NULL;
00072     mpIters->pElemIter = NULL;
00073 }
00074 
00075 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00076 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::~AbstractTetrahedralMeshWriter()
00077 {
00078     if (mpIters->pNodeIter)
00079     {
00080         delete mpIters->pNodeIter;
00081     }
00082     if (mpIters->pElemIter)
00083     {
00084         delete mpIters->pElemIter;
00085     }
00086 
00087     delete mpIters;
00088 
00089     if (mpNodeMap)
00090     {
00091         delete mpNodeMap;
00092     }
00093 }
00094 
00095 
00096 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00097 std::vector<double> AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextNode()
00098 {
00099     // if we are writing from a mesh..
00100     assert(PetscTools::AmMaster());
00101     if (mpMesh)
00102     {
00103         std::vector<double> coords(SPACE_DIM);
00104         double raw_coords[SPACE_DIM];
00105 
00106         //Iterate over the locally-owned nodes
00107         if ( (*(mpIters->pNodeIter)) != mpMesh->GetNodeIteratorEnd())
00108         {
00109             //Either this a sequential mesh (and we own it all)
00110             // or it's parallel (and the master owns the first chunk)
00111             for (unsigned j=0; j<SPACE_DIM; j++)
00112             {
00113                 coords[j] = (*(mpIters->pNodeIter))->GetPoint()[j];
00114             }
00115 
00116             mNodeCounterForParallelMesh=(*(mpIters->pNodeIter))->GetIndex() + 1;//Ready for when we run out of local nodes
00117 
00118             ++(*(mpIters->pNodeIter));
00119             return coords;
00120         }
00121 
00122         //If we didn't return then the iterator has reached the end of the local nodes.
00123         // It must be a parallel mesh and we are expecting messages...
00124 
00125         assert( mpParallelMesh != NULL );
00126 
00127         MPI_Status status;
00128         // do receive, convert to std::vector on master
00129         MPI_Recv(raw_coords, SPACE_DIM, MPI_DOUBLE, MPI_ANY_SOURCE, mNodeCounterForParallelMesh, PETSC_COMM_WORLD, &status);
00130         assert(status.MPI_ERROR == MPI_SUCCESS);
00131         for (unsigned j=0; j<coords.size(); j++)
00132         {
00133             coords[j] = raw_coords[j];
00134         }
00135 
00136 
00137         mNodeCounterForParallelMesh++;
00138         return coords;
00139     }
00140     else
00141     {
00142         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextNode();
00143     }
00144 }
00145 
00146 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00147 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextElement()
00148 {
00149     assert(PetscTools::AmMaster());
00150     // if we are writing from a mesh..
00151     if (mpMesh)
00152     {
00153         ElementData elem_data;
00154         elem_data.NodeIndices.resize(mNodesPerElement);
00155 
00156         if ( mpParallelMesh == NULL ) // not using parallel mesh
00157         {
00158             // Use the iterator
00159             assert(this->mNumElements==mpMesh->GetNumElements());
00160 
00161             for (unsigned j=0; j<elem_data.NodeIndices.size(); j++)
00162             {
00163                 unsigned old_index = (*(mpIters->pElemIter))->GetNodeGlobalIndex(j);
00164                 elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00165             }
00166             // Set attribute
00167             elem_data.AttributeValue = (*(mpIters->pElemIter))->GetRegion();
00168 
00169             ++(*(mpIters->pElemIter));
00170 
00171             return elem_data;
00172         }
00173         else //Parallel mesh
00174         {
00175             //Use the mElementCounterForParallelMesh variable to identify next element
00176             if ( mpParallelMesh->CalculateDesignatedOwnershipOfElement( mElementCounterForParallelMesh ) == true )
00177             {
00178                 //Master owns this element
00179                 Element<ELEMENT_DIM, SPACE_DIM>* p_element = mpParallelMesh->GetElement(mElementCounterForParallelMesh);
00180                 assert(elem_data.NodeIndices.size() == ELEMENT_DIM+1);
00181                 assert( ! p_element->IsDeleted() );
00182                 //Master can use the local data to recover node indices & attribute
00183                 for (unsigned j=0; j<ELEMENT_DIM+1; j++)
00184                 {
00185                     elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
00186                 }
00187                 elem_data.AttributeValue = p_element->GetRegion();
00188             }
00189             else
00190             {
00191                 //Master doesn't own this element.
00192                 // +2 to allow for attribute value too
00193                 unsigned raw_indices[ELEMENT_DIM+2];
00194                 MPI_Status status;
00195                 //Get it from elsewhere
00196                 MPI_Recv(raw_indices, ELEMENT_DIM+2, MPI_UNSIGNED, MPI_ANY_SOURCE,
00197                          this->mNumNodes + mElementCounterForParallelMesh,
00198                          PETSC_COMM_WORLD, &status);
00199                 // Convert to std::vector
00200                 for (unsigned j=0; j< elem_data.NodeIndices.size(); j++)
00201                 {
00202                     elem_data.NodeIndices[j] = raw_indices[j];
00203                 }
00204                 // Attribute value
00205                 elem_data.AttributeValue = raw_indices[ELEMENT_DIM+1];
00206             }
00207             // increment element counter
00208             mElementCounterForParallelMesh++;
00209 
00210             return elem_data; // non-master processors will return garbage here - but they should never write to file
00211         }
00212     }
00213     else // not writing from a mesh
00214     {
00215         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextElement();
00216     }
00217 }
00218 
00219 
00221 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00222 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingMesh(
00223       AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh)
00224 {
00225     this->mpMeshReader = NULL;
00226     mpMesh = &rMesh;
00227 
00228     this->mNumNodes = mpMesh->GetNumNodes();
00229     this->mNumElements = mpMesh->GetNumElements();
00230 
00231     typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00232     mpIters->pNodeIter = new NodeIterType(mpMesh->GetNodeIteratorBegin());
00233 
00234     typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElemIterType;
00235     mpIters->pElemIter = new ElemIterType(mpMesh->GetElementIteratorBegin());
00236 
00237     //Use this processes first element to gauge the size of all the elements
00238     if ( (*(mpIters->pElemIter)) != mpMesh->GetElementIteratorEnd())
00239     {
00240         mNodesPerElement = (*(mpIters->pElemIter))->GetNumNodes();
00241     }
00242 
00243     //Have we got a parallel mesh?
00245     mpParallelMesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>* >(&rMesh);
00246 
00247     if (mpParallelMesh != NULL)
00248     {
00249         //It's a parallel mesh
00250         WriteFilesUsingParallelMesh();
00251         return;
00252     }
00253 
00254     if (!PetscTools::AmMaster())
00255     {
00256         PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with Master process writing files
00257         return;
00258     }
00259 
00260     // Set up node map if we might have deleted nodes
00261     unsigned node_map_index = 0;
00262     if (mpMesh->IsMeshChanging())
00263     {
00264         mpNodeMap = new NodeMap(mpMesh->GetNumAllNodes());
00265         for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00266         {
00267             mpNodeMap->SetNewIndex(it->GetIndex(), node_map_index++);
00268         }
00269     }
00270 
00271     // Cache all of the BoundaryElements
00272     for (unsigned i=0; i<(unsigned)rMesh.GetNumAllBoundaryElements(); i++)
00273     {
00274         BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = rMesh.GetBoundaryElement(i);
00275         if (p_boundary_element->IsDeleted() == false)
00276         {
00277             std::vector<unsigned> indices(p_boundary_element->GetNumNodes());
00278             for (unsigned j=0; j<p_boundary_element->GetNumNodes(); j++)
00279             {
00280                 unsigned old_index = p_boundary_element->GetNodeGlobalIndex(j);
00281                 indices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00282             }
00283             this->SetNextBoundaryFace(indices);
00284         }
00285     }
00286     this->WriteFiles();
00287     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with waiting Slave processes
00288     delete mpIters->pNodeIter;
00289     mpIters->pNodeIter=NULL;
00290     delete mpIters->pElemIter;
00291     mpIters->pElemIter=NULL;
00292 }
00293 
00294 
00295 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00296 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingParallelMesh()
00297 {
00298     //Concentrate node information to the master
00299 
00300     MPI_Status status;
00301     double raw_coords[SPACE_DIM];
00302     //Concentrate and cache all of the BoundaryElements
00303     unsigned raw_face_indices[ELEMENT_DIM];//Assuming that we don't have parallel quadratic meshes
00304     for (unsigned index=0; index<(unsigned)mpParallelMesh->GetNumBoundaryElements(); index++)
00305     {
00306         try
00307         {
00308             if ( mpParallelMesh->CalculateDesignatedOwnershipOfBoundaryElement( index ) == true )
00309             {
00310                 BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = mpParallelMesh->GetBoundaryElement(index);
00311                 assert(p_boundary_element->IsDeleted() == false);
00312                 for (unsigned j=0; j<ELEMENT_DIM; j++)
00313                 {
00314                     raw_face_indices[j] = p_boundary_element->GetNodeGlobalIndex(j);
00315                 }
00316                 MPI_Send(raw_face_indices, ELEMENT_DIM, MPI_UNSIGNED, 0, index, PETSC_COMM_WORLD);
00317             }
00318         }
00319         catch (Exception e)
00320         {
00321         }
00322         if (PetscTools::AmMaster())
00323         {
00324             MPI_Recv(raw_face_indices, ELEMENT_DIM, MPI_UNSIGNED, MPI_ANY_SOURCE, index, PETSC_COMM_WORLD, &status);
00325             std::vector<unsigned> indices(ELEMENT_DIM);
00326             for (unsigned j=0; j<indices.size(); j++)
00327             {
00328                 indices[j] = raw_face_indices[j];
00329             }
00330             this->SetNextBoundaryFace(indices);
00331         }
00332     }
00333     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh 1");
00334 
00335     //Master goes on to write as usual
00336     if (PetscTools::AmMaster())
00337     {
00338         this->WriteFiles();
00339     }
00340     else
00341     {
00342         //Slaves concentrate the Nodes and Elements
00343         typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00344         for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00345         {
00346             for (unsigned j=0; j<SPACE_DIM; j++)
00347             {
00348                 raw_coords[j] = it->GetPoint()[j];
00349             }
00350             MPI_Send(raw_coords, SPACE_DIM, MPI_DOUBLE, 0, it->GetIndex(), PETSC_COMM_WORLD);//Nodes sent with positive tags
00351         }
00352 
00353         // +2 allows for attribute value
00354         unsigned raw_indices[ELEMENT_DIM+2]; //Assuming that we don't have parallel quadratic elements
00355         typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElementIterType;
00356         for (ElementIterType it = mpMesh->GetElementIteratorBegin(); it != mpMesh->GetElementIteratorEnd(); ++it)
00357         {
00358             unsigned index =it->GetIndex();
00359             if ( mpParallelMesh->CalculateDesignatedOwnershipOfElement( index ) == true )
00360             {
00361                 for (unsigned j=0; j<ELEMENT_DIM+1; j++)
00362                 {
00363                     raw_indices[j] = it->GetNodeGlobalIndex(j);
00364                 }
00365                 // Attribute value
00366                 raw_indices[ELEMENT_DIM+1] = it->GetRegion();
00367                 MPI_Send(raw_indices, ELEMENT_DIM+2, MPI_UNSIGNED, 0,
00368                          this->mNumNodes + (it->GetIndex()), //Elements sent with tags offset
00369                          PETSC_COMM_WORLD);
00370             }
00371         }
00372     }
00373     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh 2");
00374 }
00375 
00377 // Explicit instantiation
00379 
00380 template class AbstractTetrahedralMeshWriter<1,1>;
00381 template class AbstractTetrahedralMeshWriter<1,2>;
00382 template class AbstractTetrahedralMeshWriter<1,3>;
00383 template class AbstractTetrahedralMeshWriter<2,2>;
00384 template class AbstractTetrahedralMeshWriter<2,3>;
00385 template class AbstractTetrahedralMeshWriter<3,3>;

Generated by  doxygen 1.6.2