AbstractTetrahedralMeshWriter.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2011
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 // Disable PETSc logging of MPI calls (we don't use this, anyway) to fix
00030 // "right-hand operand of comma has no effect" warnings when building with
00031 // PETSc 2.2.1.
00032 #define PETSC_HAVE_BROKEN_RECURSIVE_MACRO
00033 
00034 #include "AbstractTetrahedralMeshWriter.hpp"
00035 #include "AbstractTetrahedralMesh.hpp"
00036 #include "DistributedTetrahedralMesh.hpp"
00037 #include "MixedDimensionMesh.hpp"
00038 #include "Version.hpp"
00039 
00040 #include <mpi.h> // For MPI_Send, MPI_Recv
00041 
00045 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00046 struct MeshWriterIterators
00047 {
00049     typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator* pNodeIter;
00051     typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator* pElemIter;
00053     typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator* pBoundaryElemIter;
00054 };
00055 
00057 // Implementation
00059 
00060 
00061 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00062 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::AbstractTetrahedralMeshWriter(const std::string &rDirectory,
00063                    const std::string &rBaseName,
00064                    const bool clearOutputDir)
00065     : AbstractMeshWriter<ELEMENT_DIM, SPACE_DIM>(rDirectory, rBaseName, clearOutputDir),
00066       mpMesh(NULL),
00067       mpNodeMap(NULL),
00068       mNodesPerElement(ELEMENT_DIM+1),
00069       mNodesPerBoundaryElement(ELEMENT_DIM),
00070       mpDistributedMesh(NULL),
00071       mpMixedMesh(NULL),
00072       mpIters(new MeshWriterIterators<ELEMENT_DIM,SPACE_DIM>),
00073       mNodeCounterForParallelMesh(0),
00074       mElementCounterForParallelMesh(0),
00075       mBoundaryElementCounterForParallelMesh(0),
00076       mCableElementCounterForParallelMesh(0),
00077       mFilesAreBinary(false)
00078 {
00079     mpIters->pNodeIter = NULL;
00080     mpIters->pElemIter = NULL;
00081     mpIters->pBoundaryElemIter = NULL;
00082 }
00083 
00084 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00085 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::~AbstractTetrahedralMeshWriter()
00086 {
00087     if (mpIters->pNodeIter)
00088     {
00089         delete mpIters->pNodeIter;
00090     }
00091     if (mpIters->pElemIter)
00092     {
00093         delete mpIters->pElemIter;
00094     }
00095     if (mpIters->pBoundaryElemIter)
00096     {
00097         delete mpIters->pBoundaryElemIter;
00098     }
00099 
00100     delete mpIters;
00101 
00102     if (mpNodeMap)
00103     {
00104         delete mpNodeMap;
00105     }
00106 }
00107 
00108 
00109 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00110 std::vector<double> AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextNode()
00111 {
00112     // if we are writing from a mesh..
00113     assert(PetscTools::AmMaster());
00114     if (mpMesh)
00115     {
00116         std::vector<double> coords(SPACE_DIM);
00117         double raw_coords[SPACE_DIM];
00118 
00119         //Iterate over the locally-owned nodes
00120         if ( (*(mpIters->pNodeIter)) != mpMesh->GetNodeIteratorEnd())
00121         {
00122             //Either this a sequential mesh (and we own it all)
00123             // or it's parallel (and the master owns the first chunk)
00124             for (unsigned j=0; j<SPACE_DIM; j++)
00125             {
00126                 coords[j] = (*(mpIters->pNodeIter))->GetPoint()[j];
00127             }
00128 
00129             mNodeCounterForParallelMesh=(*(mpIters->pNodeIter))->GetIndex() + 1;//Ready for when we run out of local nodes
00130 
00131             ++(*(mpIters->pNodeIter));
00132             return coords;
00133         }
00134 
00135         //If we didn't return then the iterator has reached the end of the local nodes.
00136         // It must be a parallel mesh and we are expecting messages...
00137 
00138         assert( mpDistributedMesh != NULL );
00139 
00140         MPI_Status status;
00141         // do receive, convert to std::vector on master
00142         MPI_Recv(raw_coords, SPACE_DIM, MPI_DOUBLE, MPI_ANY_SOURCE, mNodeCounterForParallelMesh, PETSC_COMM_WORLD, &status);
00143         assert(status.MPI_ERROR == MPI_SUCCESS);
00144         for (unsigned j=0; j<coords.size(); j++)
00145         {
00146             coords[j] = raw_coords[j];
00147         }
00148 
00149 
00150         mNodeCounterForParallelMesh++;
00151         return coords;
00152     }
00153     else
00154     {
00155         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextNode();
00156     }
00157 }
00158 
00159 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00160 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextElement()
00161 {
00162     assert(PetscTools::AmMaster());
00163     // if we are writing from a mesh..
00164     if (mpMesh)
00165     {
00166         ElementData elem_data;
00167         elem_data.NodeIndices.resize(mNodesPerElement);
00168 
00169         if ( mpDistributedMesh == NULL ) // not using parallel mesh
00170         {
00171             // Use the iterator
00172             assert(this->mNumElements==mpMesh->GetNumElements());
00173 
00174             for (unsigned j=0; j<elem_data.NodeIndices.size(); j++)
00175             {
00176                 unsigned old_index = (*(mpIters->pElemIter))->GetNodeGlobalIndex(j);
00177                 elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00178             }
00179             // Set attribute
00180             elem_data.AttributeValue = (*(mpIters->pElemIter))->GetRegion();
00181 
00182             ++(*(mpIters->pElemIter));
00183 
00184             return elem_data;
00185         }
00186         else //Parallel mesh
00187         {
00188             //Use the mElementCounterForParallelMesh variable to identify next element
00189             if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( mElementCounterForParallelMesh ) == true )
00190             {
00191                 //Master owns this element
00192                 Element<ELEMENT_DIM, SPACE_DIM>* p_element = mpDistributedMesh->GetElement(mElementCounterForParallelMesh);
00193                 assert(elem_data.NodeIndices.size() == mNodesPerElement);
00194                 assert( ! p_element->IsDeleted() );
00195                 //Master can use the local data to recover node indices & attribute
00196                 for (unsigned j=0; j<mNodesPerElement; j++)
00197                 {
00198                     elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
00199                 }
00200                 elem_data.AttributeValue = p_element->GetRegion();
00201             }
00202             else
00203             {
00204                 //Master doesn't own this element.
00205                 // +1 to allow for attribute value too
00206                 unsigned raw_indices[mNodesPerElement+1];
00207                 MPI_Status status;
00208                 //Get it from elsewhere
00209                 MPI_Recv(raw_indices, mNodesPerElement+1, MPI_UNSIGNED, MPI_ANY_SOURCE,
00210                          this->mNumNodes + mElementCounterForParallelMesh,
00211                          PETSC_COMM_WORLD, &status);
00212                 // Convert to std::vector
00213                 for (unsigned j=0; j< elem_data.NodeIndices.size(); j++)
00214                 {
00215                     elem_data.NodeIndices[j] = raw_indices[j];
00216                 }
00217 
00218                 // Attribute value
00219                 elem_data.AttributeValue = raw_indices[mNodesPerElement];
00220             }
00221             // increment element counter
00222             mElementCounterForParallelMesh++;
00223 
00224             return elem_data; // non-master processors will return garbage here - but they should never write to file
00225         }
00226     }
00227     else // not writing from a mesh
00228     {
00229         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextElement();
00230     }
00231 }
00232 
00233 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00234 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextBoundaryElement()
00235 {
00236     assert(PetscTools::AmMaster());
00237     // if we are writing from a mesh..
00238     if (mpMesh)
00239     {
00240         ElementData boundary_elem_data;
00241         boundary_elem_data.NodeIndices.resize(mNodesPerBoundaryElement);
00242 
00243         if ( mpDistributedMesh == NULL ) // not using parallel mesh
00244         {
00245             // Use the iterator
00246             assert(this->mNumBoundaryElements==mpMesh->GetNumBoundaryElements());
00247 
00248             for (unsigned j=0; j<boundary_elem_data.NodeIndices.size(); j++)
00249             {
00250                 unsigned old_index = (*(*(mpIters->pBoundaryElemIter)))->GetNodeGlobalIndex(j);
00251                 boundary_elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00252             }
00253 
00254             ++(*(mpIters->pBoundaryElemIter));
00255 
00256             return boundary_elem_data;
00257         }
00258         else //Parallel mesh
00259         {
00260             //Use the mElementCounterForParallelMesh variable to identify next element
00261             if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( mBoundaryElementCounterForParallelMesh ) == true )
00262             {
00263                 //Master owns this boundary element
00264                 BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = mpDistributedMesh->GetBoundaryElement(mBoundaryElementCounterForParallelMesh);
00265                 assert(boundary_elem_data.NodeIndices.size() == ELEMENT_DIM);
00266                 assert( ! p_boundary_element->IsDeleted() );
00267                 //Master can use the local data to recover node indices & attribute
00268                 for (unsigned j=0; j<ELEMENT_DIM; j++)
00269                 {
00270                     boundary_elem_data.NodeIndices[j] = p_boundary_element->GetNodeGlobalIndex(j);
00271                 }
00272             }
00273             else
00274             {
00275                 //Master doesn't own this boundary element.
00276                 unsigned raw_indices[ELEMENT_DIM];
00277                 MPI_Status status;
00278                 //Get it from elsewhere
00279                 MPI_Recv(raw_indices, ELEMENT_DIM, MPI_UNSIGNED, MPI_ANY_SOURCE,
00280                          this->mNumNodes + this->mNumElements + mBoundaryElementCounterForParallelMesh,
00281                          PETSC_COMM_WORLD, &status);
00282                 // Convert to std::vector
00283                 for (unsigned j=0; j< boundary_elem_data.NodeIndices.size(); j++)
00284                 {
00285                     boundary_elem_data.NodeIndices[j] = raw_indices[j];
00286                 }
00287             }
00288             // increment element counter
00289             mBoundaryElementCounterForParallelMesh++;
00290 
00291             return boundary_elem_data; // non-master processors will return garbage here - but they should never write to file
00292         }
00293     }
00294     else // not writing from a mesh
00295     {
00296         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextBoundaryElement();
00297     }
00298 }
00299 
00300 
00301 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00302 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextCableElement()
00303 {
00304     assert(PetscTools::AmMaster());
00305     
00306     // if we are writing from a mesh..
00307     if (mpMesh)
00308     {
00309         // Need to be using a MixedDimensionMesh or there will be no cable data
00310         assert(mpMixedMesh);
00311     
00312         ElementData elem_data;
00313         elem_data.NodeIndices.resize(2);
00314 
00315         //Use the mCableElementCounterForParallelMesh variable to identify next element
00316         if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( mCableElementCounterForParallelMesh ) == true )
00317         {
00318             //Master owns this element
00319             Element<1, SPACE_DIM>* p_element = mpMixedMesh->GetCableElement(mCableElementCounterForParallelMesh);
00320             assert( ! p_element->IsDeleted() );
00321             //Master can use the local data to recover node indices & attribute
00322             for (unsigned j=0; j<2; j++)
00323             {
00324                 elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
00325             }
00326             elem_data.AttributeValue = p_element->GetRegion();
00327         }
00328         else
00329         {
00330             //Master doesn't own this element.
00331             // size is 3 to allow for 2 indices and attribute value too
00332             unsigned raw_indices[3];
00333             MPI_Status status;
00334             //Get it from elsewhere
00335             MPI_Recv(raw_indices, 3, MPI_UNSIGNED, MPI_ANY_SOURCE,
00336                      this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + mCableElementCounterForParallelMesh,
00337                      PETSC_COMM_WORLD, &status);
00338 
00339             // Convert to ElementData (2 nodes plus an attribute value)
00340             for (unsigned j=0; j< 2; j++)
00341             {
00342                 elem_data.NodeIndices[j] = raw_indices[j];
00343             }
00344             // Attribute value
00345             elem_data.AttributeValue = raw_indices[2];
00346         }
00347         // increment element counter
00348         mCableElementCounterForParallelMesh++;
00349 
00350         return elem_data; // non-master processors will return garbage here - but they should never write to file
00351     }
00352     else // not writing from a mesh
00353     {
00354         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextCableElement();
00355     }
00356 }
00357 
00359 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00360 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingMesh(
00361       AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh,
00362       bool keepOriginalElementIndexing)
00363 {
00364     this->mpMeshReader = NULL;
00365     mpMesh = &rMesh;
00366 
00367     this->mNumNodes = mpMesh->GetNumNodes();
00368     this->mNumElements = mpMesh->GetNumElements();
00369     this->mNumBoundaryElements = mpMesh->GetNumBoundaryElements();
00370     this->mNumCableElements = mpMesh->GetNumCableElements();
00371 
00372     typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00373     mpIters->pNodeIter = new NodeIterType(mpMesh->GetNodeIteratorBegin());
00374 
00375     typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElemIterType;
00376     mpIters->pElemIter = new ElemIterType(mpMesh->GetElementIteratorBegin());
00377 
00378     typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElemIterType;
00379     mpIters->pBoundaryElemIter = new BoundaryElemIterType(mpMesh->GetBoundaryElementIteratorBegin());
00380 
00381     //Use this processes first element to gauge the size of all the elements
00382     if ( (*(mpIters->pElemIter)) != mpMesh->GetElementIteratorEnd())
00383     {
00384         mNodesPerElement = (*(mpIters->pElemIter))->GetNumNodes();
00385     }
00386 
00387     //Use this processes first boundary element to gauge the size of all the boundary elements
00388     if ( (*(mpIters->pBoundaryElemIter)) != mpMesh->GetBoundaryElementIteratorEnd())
00389     {
00390         mNodesPerBoundaryElement = (*(*(mpIters->pBoundaryElemIter)))->GetNumNodes();
00391     }
00392 
00393     //Connectivity file is written when we write to a binary file (only available for TrianglesMeshWriter) and if we are preserving the element order
00394     if (this->mFilesAreBinary && keepOriginalElementIndexing)
00395     {
00396         unsigned max_elements_all;
00397         if (PetscTools::IsSequential())
00398         {
00399             max_elements_all = mpMesh->CalculateMaximumContainingElementsPerProcess();
00400         }
00401         else
00402         {
00403             unsigned max_elements_per_process = mpMesh->CalculateMaximumContainingElementsPerProcess();
00404             MPI_Allreduce(&max_elements_per_process, &max_elements_all, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
00405         }
00406 
00407 
00408         for (unsigned writing_process=0; writing_process<PetscTools::GetNumProcs(); writing_process++)
00409         {
00410             if (PetscTools::GetMyRank() == writing_process)
00411             {
00412                 std::string node_connect_list_file_name = this->mBaseName + ".ncl";
00413                 out_stream p_ncl_file=out_stream(NULL);
00414                 
00415                 if (PetscTools::AmMaster())
00416                 {
00417                     assert(writing_process==0);
00418                     //Open the file for the first time                    
00419                     p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name);
00420             
00421                     // Write the ncl header            
00422                     *p_ncl_file << this->mNumNodes << "\t";
00423                     *p_ncl_file << max_elements_all << "\t";
00424                     *p_ncl_file << "\tBIN\n";
00425                 }
00426                 else
00427                 {
00428                     //Append to the existing file
00429                     p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::app);                    
00430                 }
00431                 
00432                 // Write each node's data
00433                 unsigned default_marker = UINT_MAX;
00434     
00435                 for (NodeIterType iter = mpMesh->GetNodeIteratorBegin();
00436                      iter != mpMesh->GetNodeIteratorEnd();
00437                      ++iter)
00438                 {
00439                     //Get the containing element indices from the node's set and sort them
00440                     std::set<unsigned>& r_elem_set = iter->rGetContainingElementIndices();
00441                     std::vector<unsigned> elem_vector(r_elem_set.begin(),r_elem_set.end()); 
00442                     std::sort(elem_vector.begin(), elem_vector.end());
00443                     //Pad the vector with unsigned markers
00444                     for (unsigned elem_index=elem_vector.size();  elem_index<max_elements_all; elem_index++)
00445                     {
00446                         elem_vector.push_back(default_marker);
00447                     }
00448                     assert (elem_vector.size() == max_elements_all);
00449                     //Write raw data out of std::vector into the file
00450                     p_ncl_file->write((char*)&elem_vector[0], elem_vector.size()*sizeof(unsigned));
00451                 }
00452                 
00453                 if (PetscTools::AmTopMost())
00454                 {
00455                     *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
00456                 }
00457                 
00458                 p_ncl_file->close();
00459             }
00460             
00461             PetscTools::Barrier("AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingMesh");
00462         }
00463     }    
00464 
00465 
00466     //Have we got a parallel mesh?
00468     mpDistributedMesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>* >(&rMesh);
00469     
00470     //Have we got a MixedDimensionMesh?
00472     mpMixedMesh = dynamic_cast<MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>* >(this->mpMesh);
00473     
00474 
00475     if (mpDistributedMesh != NULL)
00476     {
00477         //It's a parallel mesh
00478         WriteFilesUsingParallelMesh(keepOriginalElementIndexing);
00479         return;
00480     }
00481 
00482     if (!PetscTools::AmMaster())
00483     {
00484         PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with Master process writing files
00485         return;
00486     }
00487 
00488     // Set up node map if we might have deleted nodes
00489     unsigned node_map_index = 0;
00490     if (mpMesh->IsMeshChanging())
00491     {
00492         mpNodeMap = new NodeMap(mpMesh->GetNumAllNodes());
00493         for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00494         {
00495             mpNodeMap->SetNewIndex(it->GetIndex(), node_map_index++);
00496         }
00497     }
00498 
00499     this->WriteFiles();
00500     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with waiting Slave processes
00501     delete mpIters->pNodeIter;
00502     mpIters->pNodeIter=NULL;
00503     delete mpIters->pElemIter;
00504     mpIters->pElemIter=NULL;
00505     delete mpIters->pBoundaryElemIter;
00506     mpIters->pBoundaryElemIter=NULL;
00507 }
00508 
00509 
00510 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00511 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingParallelMesh(bool keepOriginalElementIndexing)
00512 {
00513     if (keepOriginalElementIndexing)
00514     {
00515         //Master goes on to write as usual
00516         if (PetscTools::AmMaster())
00517         {
00518             this->WriteFiles();
00519         }
00520         else
00521         {
00522             double raw_coords[SPACE_DIM];
00523             //Slaves concentrate the Nodes
00524             typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00525             for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00526             {
00527                 for (unsigned j=0; j<SPACE_DIM; j++)
00528                 {
00529                     raw_coords[j] = it->GetPoint()[j];
00530                 }
00531                 MPI_Send(raw_coords, SPACE_DIM, MPI_DOUBLE, 0, it->GetIndex(), PETSC_COMM_WORLD);//Nodes sent with positive tags
00532             }
00533     
00534             //Slaves concentrate the Elements for which they are owners
00535             // +1 allows for attribute value
00536             unsigned raw_indices[mNodesPerElement+1]; //Assuming that we don't have parallel quadratic elements
00537             typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElementIterType;
00538             for (ElementIterType it = mpMesh->GetElementIteratorBegin(); it != mpMesh->GetElementIteratorEnd(); ++it)
00539             {
00540                 unsigned index =it->GetIndex();
00541                 if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( index ) == true )
00542                 {
00543                     for (unsigned j=0; j<mNodesPerElement; j++)
00544                     {
00545                         raw_indices[j] = it->GetNodeGlobalIndex(j);
00546                     }
00547                     // Attribute value
00548                     raw_indices[mNodesPerElement] = it->GetRegion();
00549 
00550 
00551                     MPI_Send(raw_indices, mNodesPerElement+1, MPI_UNSIGNED, 0,
00552                              this->mNumNodes + index, //Elements sent with tags offset
00553                              PETSC_COMM_WORLD);
00554                 }
00555             }
00556 
00557             //Slaves concentrate the Faces for which they are owners (not in 1-d)
00558             if (ELEMENT_DIM != 1)
00559             {
00560                 unsigned raw_face_indices[ELEMENT_DIM];//Assuming that we don't have parallel quadratic meshes
00561                 typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElementIterType;
00562                 for (BoundaryElementIterType it = mpMesh->GetBoundaryElementIteratorBegin(); it != mpMesh->GetBoundaryElementIteratorEnd(); ++it)
00563                 {
00564                     unsigned index =(*it)->GetIndex();
00565                     if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( index ) == true )
00566                     {
00567                         for (unsigned j=0; j<ELEMENT_DIM; j++)
00568                         {
00569                             raw_face_indices[j] = (*it)->GetNodeGlobalIndex(j);
00570                         }
00571                         MPI_Send(raw_face_indices, ELEMENT_DIM, MPI_UNSIGNED, 0,
00572                                 this->mNumNodes + this->mNumElements +index, //Faces sent with tags offset even more
00573                                 PETSC_COMM_WORLD);
00574                     }
00575                 }
00576             }
00577 
00578             //Slaves concentrate the cable elements for which they are owners
00579             if ( mpMixedMesh )
00580             {
00581                 unsigned raw_cable_element_indices[3];
00582                 typedef typename MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>::CableElementIterator CableElementIterType;
00583                 for (CableElementIterType it = mpMixedMesh->GetCableElementIteratorBegin(); it != mpMixedMesh->GetCableElementIteratorEnd(); ++it)
00584                 {
00585                     unsigned index =(*it)->GetIndex();
00586                     if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( index ) == true )
00587                     {
00588                         for (unsigned j=0; j<2; j++)
00589                         {
00590                             raw_cable_element_indices[j] = (*it)->GetNodeGlobalIndex(j);
00591                         }
00592                         raw_cable_element_indices[2] = (*it)->GetRegion();
00593                         MPI_Send(raw_cable_element_indices, 3, MPI_UNSIGNED, 0,
00594                                 this->mNumNodes + this->mNumElements + this->mNumBoundaryElements +index, //Cable elements sent with tags offset even more
00595                                 PETSC_COMM_WORLD);
00596                     }
00597                 }
00598             }
00599         }
00600         PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh");
00601     }
00602     else
00603     {
00604         for (unsigned writing_process=0; writing_process<PetscTools::GetNumProcs(); writing_process++)
00605         {
00606             if(PetscTools::GetMyRank() == writing_process)
00607             {
00608                 if (PetscTools::AmMaster())
00609                 {
00610                     // Make sure headers are written first
00611                     assert(PetscTools::GetMyRank() == 0);                   
00612                     CreateFilesWithHeaders();
00613                 }                
00614 
00615                 AppendLocalDataToFiles();
00616                 
00617                 if (PetscTools::AmTopMost())
00618                 {
00619                     // Make sure footers are written last
00620                     assert(PetscTools::GetMyRank() == PetscTools::GetNumProcs()-1);
00621                     WriteFilesFooter();
00622                 }        
00623             }
00624             //Process i+1 waits for process i to close the file
00625             PetscTools::Barrier();
00626         }//Loop in writing_process
00627                 
00628     }    
00629 }
00630 
00631 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00632 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::CreateFilesWithHeaders()
00633 {
00634     // If control reaches this line you haven't implemented the optimised 
00635     // parallel write for whichever visualiser you are writing for.
00636     NEVER_REACHED;
00637 }
00638    
00639 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00640 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::AppendLocalDataToFiles()
00641 {
00642     // If control reaches this line you haven't implemented the optimised 
00643     // parallel write for whichever visualiser you are writing for.
00644     NEVER_REACHED;
00645 }
00646 
00647 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00648 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesFooter()
00649 {
00650     // If control reaches this line you haven't implemented the optimised 
00651     // parallel write for whichever visualiser you are writing for.
00652     NEVER_REACHED;
00653 }
00654 
00655 
00657 // Explicit instantiation
00659 
00660 template class AbstractTetrahedralMeshWriter<1,1>;
00661 template class AbstractTetrahedralMeshWriter<1,2>;
00662 template class AbstractTetrahedralMeshWriter<1,3>;
00663 template class AbstractTetrahedralMeshWriter<2,2>;
00664 template class AbstractTetrahedralMeshWriter<2,3>;
00665 template class AbstractTetrahedralMeshWriter<3,3>;

Generated on Tue May 31 14:31:48 2011 for Chaste by  doxygen 1.5.5