Chaste Release::3.1
DistanceMapCalculator.cpp
00001 /*
00002 
00003 Copyright (c) 2005-2012, University of Oxford.
00004 All rights reserved.
00005 
00006 University of Oxford means the Chancellor, Masters and Scholars of the
00007 University of Oxford, having an administrative office at Wellington
00008 Square, Oxford OX1 2JD, UK.
00009 
00010 This file is part of Chaste.
00011 
00012 Redistribution and use in source and binary forms, with or without
00013 modification, are permitted provided that the following conditions are met:
00014  * Redistributions of source code must retain the above copyright notice,
00015    this list of conditions and the following disclaimer.
00016  * Redistributions in binary form must reproduce the above copyright notice,
00017    this list of conditions and the following disclaimer in the documentation
00018    and/or other materials provided with the distribution.
00019  * Neither the name of the University of Oxford nor the names of its
00020    contributors may be used to endorse or promote products derived from this
00021    software without specific prior written permission.
00022 
00023 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00024 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00025 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00026 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
00027 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00028 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
00029 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
00030 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00031 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
00032 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 
00034 */
00035 
00036 #include "DistanceMapCalculator.hpp"
00037 #include "DistributedTetrahedralMesh.hpp" // For dynamic cast
00038 
00039 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00040 DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::DistanceMapCalculator(
00041             AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh)
00042     : mrMesh(rMesh),
00043       mWorkOnEntireMesh(true),
00044       mNumHalosPerProcess(NULL),
00045       mRoundCounter(0u),
00046       mPopCounter(0u),
00047       mTargetNodeIndex(UINT_MAX),
00048       mSingleTarget(false)
00049 {
00050     mNumNodes = mrMesh.GetNumNodes();
00051 
00052     DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>* p_distributed_mesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>*>(&mrMesh);
00053     if ( PetscTools::IsSequential() || p_distributed_mesh == NULL)
00054     {
00055         // It's a non-distributed mesh
00056         mLo = 0;
00057         mHi = mNumNodes;
00058     }
00059     else
00060     {
00061         // It's a parallel (distributed) mesh (p_parallel_mesh is non-null and we are running in parallel)
00062         mWorkOnEntireMesh = false;
00063         mLo = mrMesh.GetDistributedVectorFactory()->GetLow();
00064         mHi = mrMesh.GetDistributedVectorFactory()->GetHigh();
00065 
00066         // Get local halo information
00067         p_distributed_mesh->GetHaloNodeIndices(mHaloNodeIndices);
00068 
00069         // Share information on the number of halo nodes
00070         unsigned my_size = mHaloNodeIndices.size();
00071         mNumHalosPerProcess = new unsigned[PetscTools::GetNumProcs()];
00072         MPI_Allgather(&my_size, 1, MPI_UNSIGNED, mNumHalosPerProcess, 1, MPI_UNSIGNED, PETSC_COMM_WORLD);
00073     }
00074 }
00075 
00076 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00077 void DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::ComputeDistanceMap(
00078         const std::vector<unsigned>& rSourceNodeIndices,
00079         std::vector<double>& rNodeDistances)
00080 {
00081     rNodeDistances.resize(mNumNodes);
00082     for (unsigned index=0; index<mNumNodes; index++)
00083     {
00084         rNodeDistances[index] = DBL_MAX;
00085     }
00086     assert(mActivePriorityNodeIndexQueue.empty());
00087 
00088     if (mSingleTarget)
00089     {
00090         assert(rSourceNodeIndices.size() == 1);
00091         unsigned source_node_index = rSourceNodeIndices[0];
00092 
00093         // We need to make sure this is local, so that we can use the geometry
00094         if (mLo<=source_node_index && source_node_index<mHi)
00095         {
00096             double heuristic_correction = norm_2(mrMesh.GetNode(source_node_index)->rGetLocation()-mTargetNodePoint);
00097             PushLocal(heuristic_correction, source_node_index);
00098             rNodeDistances[source_node_index] = heuristic_correction;
00099         }
00100      }
00101     else
00102     {
00103         for (unsigned source_index=0; source_index<rSourceNodeIndices.size(); source_index++)
00104         {
00105             unsigned source_node_index = rSourceNodeIndices[source_index];
00106             PushLocal(0.0, source_node_index);
00107             rNodeDistances[source_node_index] = 0.0;
00108         }
00109     }
00110 
00111     bool non_empty_queue = true;
00112     mRoundCounter = 0;
00113     mPopCounter = 0;
00114     while (non_empty_queue)
00115     {
00116         bool termination = WorkOnLocalQueue(rNodeDistances);
00117 
00118         // Sanity - check that we aren't doing this very many times
00119         if (mRoundCounter++ > 3 * PetscTools::GetNumProcs())
00120         {
00121             // This line will be hit if there's a parallel distributed mesh with a really bad partition
00122             NEVER_REACHED;
00123         }
00124         if (mSingleTarget && PetscTools::ReplicateBool(termination))
00125         {
00126             // A single process found the target already
00127             break;
00128         }
00129         non_empty_queue = UpdateQueueFromRemote(rNodeDistances);
00130     }
00131 
00132     if (mWorkOnEntireMesh == false)
00133     {
00134         // Update all processes with the best values from everywhere
00135         // Take a local copy
00136         std::vector<double> local_distances = rNodeDistances;
00137 
00138         // Share it back into the vector
00139         MPI_Allreduce( &local_distances[0], &rNodeDistances[0], mNumNodes, MPI_DOUBLE, MPI_MIN, PETSC_COMM_WORLD);
00140     }
00141 }
00142 
00143 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00144 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::UpdateQueueFromRemote(std::vector<double>& rNodeDistances)
00145 {
00146     if (mWorkOnEntireMesh)
00147     {
00148         // This update does nowt
00149         return !mActivePriorityNodeIndexQueue.empty();
00150     }
00151     for (unsigned bcast_process=0; bcast_process<PetscTools::GetNumProcs(); bcast_process++)
00152     {
00153         // Process packs cart0/cart1/...euclid/index into a 1-d array
00154         double* dist_exchange = new double[ mNumHalosPerProcess[bcast_process] ];
00155         unsigned* index_exchange = new unsigned[ mNumHalosPerProcess[bcast_process] ];
00156         if (PetscTools::GetMyRank() == bcast_process)
00157         {
00158             // Broadcaster fills the array
00159             for (unsigned index=0; index<mHaloNodeIndices.size();index++)
00160             {
00161                 dist_exchange[index] = rNodeDistances[mHaloNodeIndices[index]];
00162                 index_exchange[index] = mHaloNodeIndices[index];
00163             }
00164         }
00165 
00166         /*
00167          * Broadcast - this is can be done by casting indices to double and
00168          * packing everything into a single array. That would be better for
00169          * latency, but this is probably more readable.
00170          */
00171         MPI_Bcast(dist_exchange, mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
00172                   bcast_process, PETSC_COMM_WORLD);
00173         MPI_Bcast(index_exchange, mNumHalosPerProcess[bcast_process], MPI_UNSIGNED,
00174                   bcast_process, PETSC_COMM_WORLD);
00175         if (PetscTools::GetMyRank() != bcast_process)
00176         {
00177             // Receiving process take updates
00178             for (unsigned index=0; index<mNumHalosPerProcess[bcast_process];index++)
00179             {
00180                 unsigned global_index=index_exchange[index];
00181                 // Is it a better answer?
00182                 if (dist_exchange[index] < rNodeDistances[global_index]*(1.0-2*DBL_EPSILON) )
00183                 {
00184                     // Copy across - this may be unnecessary when PushLocal isn't going to push because it's not local
00185                     rNodeDistances[global_index] = dist_exchange[index];
00186                     PushLocal(rNodeDistances[global_index], global_index);
00187                 }
00188             }
00189         }
00190         delete [] dist_exchange;
00191         delete [] index_exchange;
00192     }
00193     // Is any queue non-empty?
00194     bool non_empty_queue = PetscTools::ReplicateBool(!mActivePriorityNodeIndexQueue.empty());
00195     return(non_empty_queue);
00196 }
00197 
00198 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00199 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::WorkOnLocalQueue(std::vector<double>& rNodeDistances)
00200 {
00201     unsigned pop_stop = mNumNodes/(PetscTools::GetNumProcs()*20);
00202     while (!mActivePriorityNodeIndexQueue.empty())
00203     {
00204         // Get the next index in the queue
00205         unsigned current_node_index = mActivePriorityNodeIndexQueue.top().second;
00206         double distance_when_queued = -mActivePriorityNodeIndexQueue.top().first;
00207         mActivePriorityNodeIndexQueue.pop();
00208 
00209         // Only act on nodes which haven't been acted on already
00210         // (It's possible that a better distance has been found and already been dealt with)
00211         if (distance_when_queued == rNodeDistances[current_node_index])
00212         {
00213             mPopCounter++;
00214             Node<SPACE_DIM>* p_current_node = mrMesh.GetNode(current_node_index);
00215             double current_heuristic = 0.0;
00216             if (mSingleTarget)
00217             {
00218                  current_heuristic=norm_2(p_current_node->rGetLocation()-mTargetNodePoint);
00219             }
00220 
00221             // Loop over the elements containing the given node
00222             for (typename Node<SPACE_DIM>::ContainingElementIterator element_iterator = p_current_node->ContainingElementsBegin();
00223                 element_iterator != p_current_node->ContainingElementsEnd();
00224                 ++element_iterator)
00225             {
00226                 // Get a pointer to the container element
00227                 Element<ELEMENT_DIM, SPACE_DIM>* p_containing_element = mrMesh.GetElement(*element_iterator);
00228 
00229                 // Loop over the nodes of the element
00230                 for (unsigned node_local_index=0;
00231                    node_local_index<p_containing_element->GetNumNodes();
00232                    node_local_index++)
00233                 {
00234                     Node<SPACE_DIM>* p_neighbour_node = p_containing_element->GetNode(node_local_index);
00235                     unsigned neighbour_node_index = p_neighbour_node->GetIndex();
00236 
00237                     // Avoid revisiting the active node
00238                     if (neighbour_node_index != current_node_index)
00239                     {
00240 
00241                         double neighbour_heuristic=0.0;
00242                         if (mSingleTarget)
00243                         {
00244                              neighbour_heuristic=norm_2(p_neighbour_node->rGetLocation()-mTargetNodePoint);
00245                         }
00246                         // Test if we have found a shorter path from the source to the neighbour through current node
00247                         double updated_distance = rNodeDistances[current_node_index] +
00248                                                   norm_2(p_neighbour_node->rGetLocation() - p_current_node->rGetLocation())
00249                                                   - current_heuristic + neighbour_heuristic;
00250                         if ( updated_distance < rNodeDistances[neighbour_node_index] * (1.0-2*DBL_EPSILON) )
00251                         {
00252                             rNodeDistances[neighbour_node_index] = updated_distance;
00253                             PushLocal(updated_distance, neighbour_node_index);
00254                         }
00255                     }
00256                 }
00257             }
00258             if (mSingleTarget)
00259             {
00260                 if (current_node_index == mTargetNodeIndex)
00261                 {
00262                     // Premature termination if there is a single goal in mind (and we found it)
00263                     return true;
00264                 }
00265                 if (mPopCounter%pop_stop == 0)
00266                 {
00267                     // Premature termination -- in case the work has been done
00268                     return false;
00269                 }
00270             }
00271         }
00272      }
00273      return false;
00274 }
00275 
00276 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00277 double DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::SingleDistance(unsigned sourceNodeIndex, unsigned targetNodeIndex)
00278 {
00279     std::vector<unsigned> source_node_index_vector;
00280     source_node_index_vector.push_back(sourceNodeIndex);
00281 
00282     // We are re-using the mCachedDistances vector...
00283     mTargetNodeIndex = targetNodeIndex; // For premature termination
00284     mSingleTarget = true;
00285 
00286     // Set up information on target (for A* guidance)
00287     c_vector<double, SPACE_DIM> target_point = zero_vector<double>(SPACE_DIM);
00288     if (mrMesh.GetDistributedVectorFactory()->IsGlobalIndexLocal(mTargetNodeIndex))
00289     {
00290         // Owner of target node sets target_point (others have zero)
00291         target_point = mrMesh.GetNode(mTargetNodeIndex)->rGetLocation();
00292     }
00293 
00294     // Communicate for wherever to everyone
00295     MPI_Allreduce(&target_point[0], &mTargetNodePoint[0], SPACE_DIM, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD);
00296 
00297     //mTargetNodePoint;
00298     std::vector<double> distances;
00299     ComputeDistanceMap(source_node_index_vector, distances);
00300 
00302 
00303     // Reset target, so we don't terminate early next time.
00304     mSingleTarget = false;
00305 
00306     // Make sure that there isn't a non-empty queue from a previous calculation
00307     if (!mActivePriorityNodeIndexQueue.empty())
00308     {
00309         mActivePriorityNodeIndexQueue = std::priority_queue<std::pair<double, unsigned> >();
00310     }
00311 
00312     return distances[targetNodeIndex];
00313 }
00314 
00316 // Explicit instantiation
00318 
00319 template class DistanceMapCalculator<1, 1>;
00320 template class DistanceMapCalculator<1, 2>;
00321 template class DistanceMapCalculator<2, 2>;
00322 template class DistanceMapCalculator<1, 3>;
00323 //template class DistanceMapCalculator<2, 3>;
00324 template class DistanceMapCalculator<3, 3>;