Chaste  Release::2017.1
AbstractTetrahedralMeshWriter.cpp
1 /*
2 
3 Copyright (c) 2005-2017, University of Oxford.
4 All rights reserved.
5 
6 University of Oxford means the Chancellor, Masters and Scholars of the
7 University of Oxford, having an administrative office at Wellington
8 Square, Oxford OX1 2JD, UK.
9 
10 This file is part of Chaste.
11 
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14  * Redistributions of source code must retain the above copyright notice,
15  this list of conditions and the following disclaimer.
16  * Redistributions in binary form must reproduce the above copyright notice,
17  this list of conditions and the following disclaimer in the documentation
18  and/or other materials provided with the distribution.
19  * Neither the name of the University of Oxford nor the names of its
20  contributors may be used to endorse or promote products derived from this
21  software without specific prior written permission.
22 
23 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 
34 */
35 
36 // Disable PETSc logging of MPI calls (we don't use this, anyway) to fix
37 // "right-hand operand of comma has no effect" warnings when building with
38 // PETSc 2.2.1.
39 #define PETSC_HAVE_BROKEN_RECURSIVE_MACRO
40 
41 #include "AbstractTetrahedralMeshWriter.hpp"
42 #include "AbstractTetrahedralMesh.hpp"
43 #include "DistributedTetrahedralMesh.hpp"
44 #include "MixedDimensionMesh.hpp"
45 #include "Version.hpp"
46 #include "Exception.hpp"
47 
48 #include <mpi.h> // For MPI_Send, MPI_Recv
49 
50 const char* MeshEventHandler::EventName[] = { "Tri write","BinTri write","VTK write","PVTK write", "node write", "ele write", "face write", "ncl write", "comm1","comm2","Total"};
51 
55 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
57 {
64 };
65 
67 // Implementation
69 
70 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
72  const std::string& rBaseName,
73  const bool clearOutputDir)
74  : AbstractMeshWriter<ELEMENT_DIM, SPACE_DIM>(rDirectory, rBaseName, clearOutputDir),
75  mpNodeMap(nullptr),
76  mNodesPerElement(ELEMENT_DIM+1),
77  mNodesPerBoundaryElement(ELEMENT_DIM),
78  mpMesh(nullptr),
79  mpDistributedMesh(nullptr),
80  mpMixedMesh(nullptr),
81  mpIters(new MeshWriterIterators<ELEMENT_DIM,SPACE_DIM>),
82  mNodeCounterForParallelMesh(0),
83  mElementCounterForParallelMesh(0),
84  mBoundaryElementCounterForParallelMesh(0),
85  mCableElementCounterForParallelMesh(0),
86  mFilesAreBinary(false)
87 {
88  mpIters->pNodeIter = nullptr;
89  mpIters->pElemIter = nullptr;
90  mpIters->pBoundaryElemIter = nullptr;
91 }
92 
93 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
95 {
96  if (mpIters->pNodeIter)
97  {
98  delete mpIters->pNodeIter;
99  }
100  if (mpIters->pElemIter)
101  {
102  delete mpIters->pElemIter;
103  }
104  if (mpIters->pBoundaryElemIter)
105  {
106  delete mpIters->pBoundaryElemIter;
107  }
108 
109  delete mpIters;
110 
111  if (mpNodeMap)
112  {
113  delete mpNodeMap;
114  }
115 }
116 
117 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
119 {
120  // if we are writing from a mesh..
121  assert(PetscTools::AmMaster());
122  if (mpMesh)
123  {
124  std::vector<double> coords(SPACE_DIM);
125 
126  //Iterate over the locally-owned nodes
127  if ((*(mpIters->pNodeIter)) != mpMesh->GetNodeIteratorEnd())
128  {
129  // Either this is a sequential mesh (and we own it all)
130  // or it's parallel (and the master owns the first chunk)
131  for (unsigned j=0; j<SPACE_DIM; j++)
132  {
133  coords[j] = (*(mpIters->pNodeIter))->GetPoint()[j];
134  }
135 
136  mNodeCounterForParallelMesh=(*(mpIters->pNodeIter))->GetIndex() + 1;//Ready for when we run out of local nodes
137 
138  ++(*(mpIters->pNodeIter));
139  return coords;
140  }
141 
142  // If we didn't return then the iterator has reached the end of the local nodes.
143  // It must be a parallel mesh and we are expecting messages...
144 
145  assert( mpDistributedMesh != nullptr );
146 
147  MPI_Status status;
148  status.MPI_ERROR = MPI_SUCCESS; //For MPICH2
149  // do receive, convert to std::vector on master
150  boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
151  MPI_Recv(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, MPI_ANY_SOURCE, mNodeCounterForParallelMesh, PETSC_COMM_WORLD, &status);
152  assert(status.MPI_ERROR == MPI_SUCCESS);
153  for (unsigned j=0; j<coords.size(); j++)
154  {
155  coords[j] = raw_coords[j];
156  }
157 
158  mNodeCounterForParallelMesh++;
159  return coords;
160  }
161  else
162  {
164  }
165 }
166 
167 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
169 {
170  assert(PetscTools::AmMaster());
171  // if we are writing from a mesh..
172  if (mpMesh)
173  {
174  ElementData elem_data;
175  elem_data.NodeIndices.resize(mNodesPerElement);
176 
177  if (mpDistributedMesh == nullptr) // not using parallel mesh
178  {
179  // Use the iterator
180  assert(this->mNumElements == mpMesh->GetNumElements());
181 
182  for (unsigned j=0; j<elem_data.NodeIndices.size(); j++)
183  {
184  unsigned old_index = (*(mpIters->pElemIter))->GetNodeGlobalIndex(j);
185  elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
186  }
187  // Set attribute
188  elem_data.AttributeValue = (*(mpIters->pElemIter))->GetAttribute();
189 
190  ++(*(mpIters->pElemIter));
191 
192  return elem_data;
193  }
194  else // Parallel mesh
195  {
196  // Use the mElementCounterForParallelMesh variable to identify next element
197  if (mpDistributedMesh->CalculateDesignatedOwnershipOfElement(mElementCounterForParallelMesh) == true)
198  {
199  // Master owns this element
201  assert(elem_data.NodeIndices.size() == mNodesPerElement);
202  assert(!p_element->IsDeleted());
203  // Master can use the local data to recover node indices & attribute
204  for (unsigned j=0; j<mNodesPerElement; j++)
205  {
206  elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
207  }
208  elem_data.AttributeValue = p_element->GetAttribute();
209  }
210  else
211  {
212  //Master doesn't own this element.
214  }
215  // increment element counter
217 
218  return elem_data; // non-master processors will return garbage here - but they should never write to file
219  }
220  }
221  else // not writing from a mesh
222  {
224  }
225 }
226 
227 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
229 {
230  assert(PetscTools::AmMaster());
231  // if we are writing from a mesh..
232  if (mpMesh)
233  {
234  ElementData boundary_elem_data;
235  boundary_elem_data.NodeIndices.resize(mNodesPerBoundaryElement);
236 
237  if (mpDistributedMesh == nullptr) // not using parallel mesh
238  {
239  // Use the iterator
240  assert(this->mNumBoundaryElements==mpMesh->GetNumBoundaryElements());
241 
242  for (unsigned j=0; j<boundary_elem_data.NodeIndices.size(); j++)
243  {
244  unsigned old_index = (*(*(mpIters->pBoundaryElemIter)))->GetNodeGlobalIndex(j);
245  boundary_elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
246  }
247  boundary_elem_data.AttributeValue = (*(*(mpIters->pBoundaryElemIter)))->GetAttribute();
248 
249  ++(*(mpIters->pBoundaryElemIter));
250  return boundary_elem_data;
251  }
252  else // Parallel mesh
253  {
254  // Use the mElementCounterForParallelMesh variable to identify next element
255  if (mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement(mBoundaryElementCounterForParallelMesh) == true)
256  {
257  // Master owns this boundary element
258  BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = mpDistributedMesh->GetBoundaryElement(mBoundaryElementCounterForParallelMesh);
259  assert(boundary_elem_data.NodeIndices.size() == ELEMENT_DIM);
260  assert(!p_boundary_element->IsDeleted());
261 
262  // Master can use the local data to recover node indices & attribute
263  for (unsigned j=0; j<ELEMENT_DIM; j++)
264  {
265  boundary_elem_data.NodeIndices[j] = p_boundary_element->GetNodeGlobalIndex(j);
266  }
267  boundary_elem_data.AttributeValue = p_boundary_element->GetAttribute();
268  }
269  else
270  {
271  //Master doesn't own this boundary element.
273  }
274  // increment element counter
276 
277  return boundary_elem_data; // non-master processors will return garbage here - but they should never write to file
278  }
279  }
280  else // not writing from a mesh
281  {
283  }
284 }
285 
286 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
288 {
289  assert(PetscTools::AmMaster());
290 
291  // if we are writing from a mesh..
292  if (mpMesh)
293  {
294  // Need to be using a MixedDimensionMesh or there will be no cable data
295  assert(mpMixedMesh);
296 
297  ElementData elem_data;
298  elem_data.NodeIndices.resize(2);
299 
300  // Use the mCableElementCounterForParallelMesh variable to identify next element
301  if (mpMixedMesh->CalculateDesignatedOwnershipOfCableElement(mCableElementCounterForParallelMesh) == true)
302  {
303  // Master owns this element
305  assert(!p_element->IsDeleted());
306 
307  // Master can use the local data to recover node indices & attribute
308  for (unsigned j=0; j<2; j++)
309  {
310  elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
311  }
312  elem_data.AttributeValue = p_element->GetAttribute();
313  }
314  else
315  {
316  // Master doesn't own this element
318  }
319  // Increment element counter
321 
322  return elem_data; // non-master processors will return garbage here - but they should never write to file
323  }
324  else // not writing from a mesh
325  {
327  }
328 }
329 
330 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
333  bool invertMeshPermutation)
334 {
335  MeshEventHandler::BeginEvent(MeshEventHandler::NCL);
336  unsigned max_elements_all;
338  {
339  max_elements_all = rMesh.CalculateMaximumContainingElementsPerProcess();
340  }
341  else
342  {
343  unsigned max_elements_per_process = rMesh.CalculateMaximumContainingElementsPerProcess();
344  MPI_Allreduce(&max_elements_per_process, &max_elements_all, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
345  }
346 
347  std::string node_connect_list_file_name = this->mBaseName + ".ncl";
348  if (invertMeshPermutation && !rMesh.rGetNodePermutation().empty())
349  {
350  node_connect_list_file_name += "-tmp";
351  }
352 
354  {
355  out_stream p_ncl_file = out_stream(nullptr);
356 
357  if (PetscTools::AmMaster())
358  {
359  //Open the file for the first time
360  p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::trunc);
361 
362  // Write the ncl header
363  *p_ncl_file << rMesh.GetNumNodes() << "\t";
364  *p_ncl_file << max_elements_all << "\t";
365  *p_ncl_file << "\tBIN\n";
366  }
367  else
368  {
369  // Append to the existing file
370  p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::app);
371  }
372 
373  // Write each node's data
374  unsigned default_marker = UINT_MAX;
375 
376  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
377  for (NodeIterType iter = rMesh.GetNodeIteratorBegin();
378  iter != rMesh.GetNodeIteratorEnd();
379  ++iter)
380  {
381  // Get the containing element indices from the node's set and sort them
382  std::set<unsigned>& r_elem_set = iter->rGetContainingElementIndices();
383  std::vector<unsigned> elem_vector(r_elem_set.begin(), r_elem_set.end());
384  std::sort(elem_vector.begin(), elem_vector.end());
385  // Pad the vector with unsigned markers
386  for (unsigned elem_index=elem_vector.size(); elem_index<max_elements_all; elem_index++)
387  {
388  elem_vector.push_back(default_marker);
389  }
390  assert(elem_vector.size() == max_elements_all);
391  // Write raw data out of std::vector into the file
392  if (max_elements_all > 0u)
393  {
394  p_ncl_file->write((char*)&elem_vector[0], elem_vector.size()*sizeof(unsigned));
395  }
396  }
397 
398  if (PetscTools::AmTopMost())
399  {
400  *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
401  }
402 
403  p_ncl_file->close();
404  }
406 
407  if (invertMeshPermutation && PetscTools::AmMaster() && !rMesh.rGetNodePermutation().empty() && max_elements_all > 0u)
408  {
409  // Open files
410  const std::string real_node_connect_list_file_name = this->mBaseName + ".ncl";
411  out_stream p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(real_node_connect_list_file_name, std::ios::binary | std::ios::trunc);
412  FileFinder temp_ncl_path = this->mpOutputFileHandler->FindFile(node_connect_list_file_name);
413  std::ifstream temp_ncl_file(temp_ncl_path.GetAbsolutePath().c_str(), std::ios::binary);
414  // Copy the header
415  std::string header_line;
416  getline(temp_ncl_file, header_line, '\n');
417  (*p_ncl_file) << header_line << "\n";
418  const std::streampos data_start = temp_ncl_file.tellg();
419  const std::streamoff item_width = max_elements_all * sizeof(unsigned);
420  // Copy the binary data, permuted
421  std::vector<unsigned> elem_vector(max_elements_all);
422  for (unsigned node_index=0; node_index<rMesh.GetNumAllNodes(); node_index++)
423  {
424  unsigned permuted_index = rMesh.rGetNodePermutation()[node_index];
425  temp_ncl_file.seekg(data_start + item_width * permuted_index, std::ios_base::beg);
426  temp_ncl_file.read((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
427  p_ncl_file->write((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
428  }
429  // Footer
430  *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
431  p_ncl_file->close();
432  // Remove temp file
433  remove(temp_ncl_path.GetAbsolutePath().c_str());
434  }
435  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteNclFile");
436  MeshEventHandler::EndEvent(MeshEventHandler::NCL);
437 }
438 
440 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
443  bool keepOriginalElementIndexing)
444 {
445  this->mpMeshReader = nullptr;
446  mpMesh = &rMesh;
447 
448  this->mNumNodes = mpMesh->GetNumNodes();
449  this->mNumElements = mpMesh->GetNumElements();
450  this->mNumBoundaryElements = mpMesh->GetNumBoundaryElements();
451  this->mNumCableElements = mpMesh->GetNumCableElements();
452 
453  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
454  mpIters->pNodeIter = new NodeIterType(mpMesh->GetNodeIteratorBegin());
455 
457  mpIters->pElemIter = new ElemIterType(mpMesh->GetElementIteratorBegin());
458 
460  mpIters->pBoundaryElemIter = new BoundaryElemIterType(mpMesh->GetBoundaryElementIteratorBegin());
461 
462  // Use this process's first element to gauge the size of all the elements
463  if ((*(mpIters->pElemIter)) != mpMesh->GetElementIteratorEnd())
464  {
465  mNodesPerElement = (*(mpIters->pElemIter))->GetNumNodes();
466  }
467 
468  // Use this process's first boundary element to gauge the size of all the boundary elements
469  if ((*(mpIters->pBoundaryElemIter)) != mpMesh->GetBoundaryElementIteratorEnd())
470  {
471  mNodesPerBoundaryElement = (*(*(mpIters->pBoundaryElemIter)))->GetNumNodes();
472  }
473 
474  //Connectivity file is written when we write to a binary file (only available for TrianglesMeshWriter) and if we are preserving the element order
475  if (this->mFilesAreBinary && keepOriginalElementIndexing)
476  {
478  }
479 
480  // Have we got a parallel mesh?
483 
484  // Have we got a MixedDimensionMesh?
487 
488  if (mpDistributedMesh != nullptr)
489  {
490  // It's a parallel mesh
491  WriteFilesUsingParallelMesh(keepOriginalElementIndexing);
492  return;
493  }
494 
495  if (!PetscTools::AmMaster())
496  {
497  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with Master process writing files
498  return;
499  }
500 
501  // Set up node map if we might have deleted nodes
502  unsigned node_map_index = 0;
503  if (mpMesh->IsMeshChanging())
504  {
505  mpNodeMap = new NodeMap(1 + mpMesh->GetMaximumNodeIndex());
506  for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
507  {
508  mpNodeMap->SetNewIndex(it->GetIndex(), node_map_index++);
509  }
510  }
511 
512  this->WriteFiles();
513  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); // Paired with waiting Slave processes
514  delete mpIters->pNodeIter;
515  mpIters->pNodeIter = nullptr;
516  delete mpIters->pElemIter;
517  mpIters->pElemIter = nullptr;
518  delete mpIters->pBoundaryElemIter;
519  mpIters->pBoundaryElemIter = nullptr;
520 }
521 
522 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
526 {
527  WriteNclFile(rMesh, true);
528  this->WriteFilesUsingMeshReader(rMeshReader);
529 }
530 
531 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
533 {
534  if (keepOriginalElementIndexing)
535  {
536  // Master goes on to write as usual
537  if (PetscTools::AmMaster())
538  {
539  this->WriteFiles();
540  }
541  else
542  {
543 // PetscTools::Barrier("DodgyBarrierBeforeNODE");
544  MeshEventHandler::BeginEvent(MeshEventHandler::NODE);
545  boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
546  // Slaves concentrate the Nodes
547  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
548  for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
549  {
550  for (unsigned j=0; j<SPACE_DIM; j++)
551  {
552  raw_coords[j] = it->GetPoint()[j];
553  }
554  MPI_Ssend(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, 0, it->GetIndex(), PETSC_COMM_WORLD);//Nodes sent with positive tags
555  }
556 // PetscTools::Barrier("DodgyBarrierAfterNODE");
557  MeshEventHandler::EndEvent(MeshEventHandler::NODE);
558 
559  MeshEventHandler::BeginEvent(MeshEventHandler::ELE);
560  // Slaves concentrate the Elements for which they are owners
561  boost::scoped_array<unsigned> raw_indices(new unsigned[mNodesPerElement]); // Assuming that we don't have parallel quadratic elements
563  for (ElementIterType it = mpMesh->GetElementIteratorBegin(); it != mpMesh->GetElementIteratorEnd(); ++it)
564  {
565  unsigned index = it->GetIndex();
566  if (mpDistributedMesh->CalculateDesignatedOwnershipOfElement(index) == true)
567  {
568  for (unsigned j=0; j<mNodesPerElement; j++)
569  {
570  raw_indices[j] = it->GetNodeGlobalIndex(j);
571  }
572  PostElement(index, raw_indices.get(), mNodesPerElement, this->mNumNodes + index, it->GetAttribute());
573  }
574  }
575 // PetscTools::Barrier("DodgyBarrierAfterELE");
576  MeshEventHandler::EndEvent(MeshEventHandler::ELE);
577  MeshEventHandler::BeginEvent(MeshEventHandler::FACE);
578  // Slaves concentrate the Faces for which they are owners (not in 1-d)
579  if (ELEMENT_DIM != 1)
580  {
581  boost::scoped_array<unsigned> raw_face_indices(new unsigned[ELEMENT_DIM]); // Assuming that we don't have parallel quadratic meshes
582  typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElementIterType;
583  for (BoundaryElementIterType it = mpMesh->GetBoundaryElementIteratorBegin(); it != mpMesh->GetBoundaryElementIteratorEnd(); ++it)
584  {
585  unsigned index = (*it)->GetIndex();
586  if (mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement(index) == true)
587  {
588  for (unsigned j=0; j<ELEMENT_DIM; j++)
589  {
590  raw_face_indices[j] = (*it)->GetNodeGlobalIndex(j);
591  }
592  PostElement(index, raw_face_indices.get(), ELEMENT_DIM, this->mNumNodes + this->mNumElements + index, (*it)->GetAttribute());
593  }
594  }
595  }
596 // PetscTools::Barrier("DodgyBarrierAfterFACE");
597  MeshEventHandler::EndEvent(MeshEventHandler::FACE);
598 
599  // Slaves concentrate the cable elements for which they are owners
600  if (mpMixedMesh)
601  {
602  typedef typename MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>::CableElementIterator CableElementIterType;
603  for (CableElementIterType it = mpMixedMesh->GetCableElementIteratorBegin(); it != mpMixedMesh->GetCableElementIteratorEnd(); ++it)
604  {
605  unsigned index =(*it)->GetIndex();
606  if (mpMixedMesh->CalculateDesignatedOwnershipOfCableElement(index) == true)
607  {
608  unsigned raw_cable_element_indices[2];
609  for (unsigned j=0; j<2; j++)
610  {
611  raw_cable_element_indices[j] = (*it)->GetNodeGlobalIndex(j);
612  }
613  PostElement(index, raw_cable_element_indices, 2, this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + index, (*it)->GetAttribute());
614  }
615  }
616  }
617  }
618  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh");
619  }
620  else
621  {
623 
624  if (PetscTools::AmMaster())
625  {
626  // Make sure headers are written first
627  assert(PetscTools::GetMyRank() == 0);
629  }
630 
632 
633  if (PetscTools::AmTopMost())
634  {
635  // Make sure footers are written last
638  }
639 
641  }
642 }
643 
644 // LCOV_EXCL_START
645 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
647 {
648  // If control reaches this line you haven't implemented the optimised
649  // parallel write for whichever visualiser you are writing for.
651 }
652 // LCOV_EXCL_STOP
653 
654 
655 // LCOV_EXCL_START
656 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
658 {
659  // If control reaches this line you haven't implemented the optimised
660  // parallel write for whichever visualiser you are writing for.
662 }
663 // LCOV_EXCL_STOP
664 
665 
666 // LCOV_EXCL_START
667 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
669 {
670  // If control reaches this line you haven't implemented the optimised
671  // parallel write for whichever visualiser you are writing for.
673 }
674 // LCOV_EXCL_STOP
675 
676 // Explicit instantiation
virtual ElementData GetNextCableElement()
static void Barrier(const std::string callerId="")
Definition: PetscTools.cpp:134
unsigned GetNodeGlobalIndex(unsigned localIndex) const
void UnpackElement(ElementData &rElementData, unsigned globalIndex, unsigned numIndices, unsigned tag)
const std::vector< unsigned > & rGetNodePermutation() const
std::string GetAbsolutePath() const
Definition: FileFinder.cpp:221
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > * mpMesh
AbstractTetrahedralMeshWriter(const std::string &rDirectory, const std::string &rBaseName, const bool clearOutputDir=true)
static bool AmMaster()
Definition: PetscTools.cpp:120
void WriteFilesUsingMeshReader(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader)
NodeIterator GetNodeIteratorEnd()
unsigned CalculateMaximumContainingElementsPerProcess() const
virtual unsigned GetNumNodes()
OutputFileHandler * mpOutputFileHandler
virtual unsigned GetNumNodes() const
AbstractMesh< ELEMENT_DIM, SPACE_DIM >::NodeIterator * pNodeIter
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::BoundaryElementIterator * pBoundaryElemIter
virtual unsigned GetNumAllNodes() const
virtual std::vector< double > GetNextNode()
#define NEVER_REACHED
Definition: Exception.hpp:206
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ElementIterator * pElemIter
MeshWriterIterators< ELEMENT_DIM, SPACE_DIM > * mpIters
std::vector< unsigned > NodeIndices
bool IsDeleted() const
void SetNewIndex(unsigned oldIndex, unsigned newIndex)
Definition: NodeMap.cpp:66
out_stream OpenOutputFile(const std::string &rFileName, std::ios_base::openmode mode=std::ios::out|std::ios::trunc) const
void WriteNclFile(AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, bool invertMeshPermutation=false)
static bool IsSequential()
Definition: PetscTools.cpp:91
static const char * EventName[11]
FileFinder FindFile(std::string leafName) const
DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > * mpDistributedMesh
static void EndRoundRobin()
Definition: PetscTools.cpp:160
virtual void WriteFilesUsingParallelMesh(bool keepOriginalElementIndexing=true)
static void BeginRoundRobin()
Definition: PetscTools.cpp:150
void PostElement(unsigned globalIndex, unsigned indices[], unsigned numIndices, unsigned tag, double attribute)
NodeIterator GetNodeIteratorBegin(bool skipDeletedNodes=true)
MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM > * mpMixedMesh
virtual void WriteFiles()=0
unsigned GetNewIndex(unsigned oldIndex) const
Definition: NodeMap.cpp:87
virtual ElementData GetNextBoundaryElement()
static bool AmTopMost()
Definition: PetscTools.cpp:126
static std::string GetProvenanceString()
static unsigned GetNumProcs()
Definition: PetscTools.cpp:108
std::vector< BoundaryElement< ELEMENT_DIM-1, SPACE_DIM > * >::const_iterator BoundaryElementIterator
static unsigned GetMyRank()
Definition: PetscTools.cpp:114
virtual ElementData GetNextElement()
virtual void WriteFilesUsingMesh(AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, bool keepOriginalElementIndexing=true)
void WriteFilesUsingMeshReaderAndMesh(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh)
std::vector< Element< 1, SPACE_DIM > * >::const_iterator CableElementIterator
AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > * mpMeshReader