DistanceMapCalculator.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2010
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 #include "DistanceMapCalculator.hpp"
00030 #include "DistributedTetrahedralMesh.hpp" //For dynamic cast
00031 
00032 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00033 DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::DistanceMapCalculator(
00034             AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh)
00035     : mrMesh(rMesh),
00036       mWorkOnEntireMesh(true),
00037       mNumHalosPerProcess(NULL),
00038       mRoundCounter(0u)
00039 {
00040     mNumNodes = mrMesh.GetNumNodes();
00041     DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>* p_distributed_mesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>*>(&mrMesh);
00042     if ( PetscTools::IsSequential() || p_distributed_mesh == NULL)
00043     {
00044         //It's a non-distributed mesh...
00045         mLo=0;
00046         mHi=mNumNodes;
00047     }
00048     else
00049     {
00050         //It's a parallel (distributed) mesh (p_parallel_mesh is non-null and we are running in parallel)
00051         mWorkOnEntireMesh=false;
00052         mLo = mrMesh.GetDistributedVectorFactory()->GetLow();
00053         mHi = mrMesh.GetDistributedVectorFactory()->GetHigh();
00054         //Get local halo information
00055         p_distributed_mesh->GetHaloNodeIndices(mHaloNodeIndices);
00056         //Share information on the number of halo nodes
00057         unsigned my_size=mHaloNodeIndices.size();
00058         mNumHalosPerProcess=new unsigned[PetscTools::GetNumProcs()];
00059         MPI_Allgather(&my_size, 1, MPI_UNSIGNED,
00060                      mNumHalosPerProcess, 1, MPI_UNSIGNED, PETSC_COMM_WORLD);
00061     }
00062 }
00063 
00064 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00065 void DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::ComputeDistanceMap(
00066         const std::vector<unsigned>& rSourceNodeIndices,
00067         std::vector<double>& rNodeDistances)
00068 {
00069     rNodeDistances.resize(mNumNodes);
00070     for (unsigned index=0; index<mNumNodes; index++)
00071     {
00072         rNodeDistances[index] = DBL_MAX;
00073     }
00074     /*
00075      * Vector of witness points (a closest point in original source set of points)
00076      */
00077     std::vector< c_vector<double, SPACE_DIM> >  witness_points(mNumNodes);
00078 
00079     for (unsigned source_index=0; source_index<rSourceNodeIndices.size(); source_index++)
00080     {
00081         unsigned node_index=rSourceNodeIndices[source_index];
00082         PushLocal(node_index);
00083         rNodeDistances[node_index] = 0.0;
00084         //If we have the correct information, then we can set the witness
00085         if (mLo<=node_index && node_index<mHi)
00086         {
00087             witness_points[node_index]=mrMesh.GetNode(node_index)->rGetLocation();
00088         }
00089     }
00090 
00091     bool non_empty_queue=true;
00092     mRoundCounter=0;
00093     while (non_empty_queue)
00094     {
00095         WorkOnLocalQueue(witness_points, rNodeDistances);
00096         non_empty_queue=UpdateQueueFromRemote(witness_points, rNodeDistances);
00097         //Sanity - check that we aren't doing this very many times
00098         if (mRoundCounter++ > 3 * PetscTools::GetNumProcs())
00099         {
00100             //This line will be hit if there's a parallel distributed mesh with a really bad partition
00101             NEVER_REACHED;
00102         }
00103     }
00104 
00105 
00106     if (mWorkOnEntireMesh==false)
00107     {
00108         //Update all processes with the best values from everywhere
00109         //Take a local copy
00110         std::vector<double> local_distances=rNodeDistances;
00111         //Share it back into the vector
00112         MPI_Allreduce( &local_distances[0], &rNodeDistances[0], mNumNodes, MPI_DOUBLE, MPI_MIN, PETSC_COMM_WORLD);
00113     }
00114 }
00115 
00116 
00117 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00118 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::UpdateQueueFromRemote(std::vector< c_vector<double, SPACE_DIM> >& rWitnessPoints,
00119                                                                      std::vector<double>& rNodeDistances)
00120 {
00121     if (mWorkOnEntireMesh)
00122     {
00123         //This update does nowt.
00124         return false;
00125     }
00126     for (unsigned bcast_process=0; bcast_process<PetscTools::GetNumProcs(); bcast_process++)
00127     {
00128         //Process packs cart0/cart1/...euclid/index into a 1-d array
00129         double* witness_exchange = new double[ SPACE_DIM * mNumHalosPerProcess[bcast_process] ];
00130         double* dist_exchange = new double[  mNumHalosPerProcess[bcast_process] ];
00131         unsigned* index_exchange = new unsigned[  mNumHalosPerProcess[bcast_process] ];
00132         if (PetscTools::GetMyRank() == bcast_process)
00133         {
00134             //Broadcaster fills the array
00135             for (unsigned index=0; index<mHaloNodeIndices.size();index++)
00136             {
00137                 for (unsigned j=0; j<SPACE_DIM; j++)
00138                 {
00139                     witness_exchange[ index*(SPACE_DIM) + j] =  rWitnessPoints[mHaloNodeIndices[index]][j];
00140                 }
00141                 dist_exchange[index] = rNodeDistances[mHaloNodeIndices[index]];
00142                 index_exchange[index] = mHaloNodeIndices[index];
00143             }
00144         }
00145 
00146         //Broadcast - this is can be done by casting indices to double and packing everything
00147         //into a single array.  That would be better for latency, but this is probably more readable.
00148         MPI_Bcast(witness_exchange, (SPACE_DIM) * mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
00149                   bcast_process, PETSC_COMM_WORLD);
00150         MPI_Bcast(dist_exchange, mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
00151                   bcast_process, PETSC_COMM_WORLD);
00152         MPI_Bcast(index_exchange, mNumHalosPerProcess[bcast_process], MPI_UNSIGNED,
00153                   bcast_process, PETSC_COMM_WORLD);
00154         if (PetscTools::GetMyRank() != bcast_process)
00155         {
00156             //Receiving process take updates
00157             for (unsigned index=0; index<mNumHalosPerProcess[bcast_process];index++)
00158             {
00159                 unsigned global_index=index_exchange[index];
00160                 //Is it a better answer?
00161                 if (dist_exchange[index] < rNodeDistances[global_index]*(1.0-DBL_EPSILON) )
00162                 {
00163                     //Copy across - this may be unnecessary when PushLocal isn't going to push because it's not local.
00164                     rNodeDistances[global_index] = dist_exchange[index];
00165                     for (unsigned j=0; j<SPACE_DIM; j++)
00166                     {
00167                          rWitnessPoints[global_index][j] = witness_exchange[ index*(SPACE_DIM) + j];
00168                     }
00169                     PushLocal(global_index);
00170                 }
00171             }
00172         }
00173         delete [] witness_exchange;
00174         delete [] dist_exchange;
00175         delete [] index_exchange;
00176     }
00177     //Is any queue non-empty?
00178     bool non_empty_queue=PetscTools::ReplicateBool(!mActiveNodeIndexQueue.empty());
00179     return(non_empty_queue);
00180 }
00181 
00182 
00183 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00184 void DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::WorkOnLocalQueue(std::vector< c_vector<double, SPACE_DIM> >& rWitnessPoints,
00185                                                                      std::vector<double>& rNodeDistances)
00186 {
00187    while (!mActiveNodeIndexQueue.empty())
00188     {
00189         // Get the next index in the queue
00190         unsigned current_node_index = mActiveNodeIndexQueue.front();
00191         mActiveNodeIndexQueue.pop();
00192 
00193         try
00194         {
00195             Node<SPACE_DIM>* p_current_node = mrMesh.GetNode(current_node_index);
00196             // Loop over the elements containing the given node
00197             for(typename Node<SPACE_DIM>::ContainingElementIterator element_iterator = p_current_node->ContainingElementsBegin();
00198                 element_iterator != p_current_node->ContainingElementsEnd();
00199                 ++element_iterator)
00200             {
00201                 // Get a pointer to the container element
00202                 Element<ELEMENT_DIM, SPACE_DIM>* p_containing_element = mrMesh.GetElement(*element_iterator);
00203 
00204                 // Loop over the nodes of the element
00205                 for(unsigned node_local_index=0;
00206                    node_local_index<p_containing_element->GetNumNodes();
00207                    node_local_index++)
00208                 {
00209                     Node<SPACE_DIM>* p_neighbour_node = p_containing_element->GetNode(node_local_index);
00210                     unsigned neighbour_node_index = p_neighbour_node->GetIndex();
00211 
00212                     // Avoid revisiting the active node
00213                     if(neighbour_node_index != current_node_index)
00214                     {
00215 
00216                         // Test if we have found a shorter path from the witness in the source to the current neighbour through current node
00217                         //This will save some sqrts later...
00218                         double updated_distance = norm_2(p_neighbour_node->rGetLocation() - rWitnessPoints[current_node_index]);
00219                         if ( updated_distance < rNodeDistances[neighbour_node_index] * (1.0-DBL_EPSILON) )
00220                         {
00221                             rWitnessPoints[neighbour_node_index] = rWitnessPoints[current_node_index];
00222                             rNodeDistances[neighbour_node_index] = updated_distance;
00223                             PushLocal(neighbour_node_index);
00224                         }
00225                     }
00226                }//Node
00227            }//Element
00228         }//Try
00229         catch (Exception &e)
00230         {
00231             //Node in the queue doesn't belong to process
00232             NEVER_REACHED;
00233         }
00234 
00235     }
00236 }
00237 
00239 // Explicit instantiation
00241 
00242 template class DistanceMapCalculator<1, 1>;
00243 template class DistanceMapCalculator<1, 2>;
00244 template class DistanceMapCalculator<2, 2>;
00245 template class DistanceMapCalculator<1, 3>;
00246 //template class DistanceMapCalculator<2, 3>;
00247 template class DistanceMapCalculator<3, 3>;

Generated by  doxygen 1.6.2