DistanceMapCalculator.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2011
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 #include "DistanceMapCalculator.hpp"
00030 #include "DistributedTetrahedralMesh.hpp" // For dynamic cast
00031 
00032 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00033 DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::DistanceMapCalculator(
00034             AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh)
00035     : mrMesh(rMesh),
00036       mWorkOnEntireMesh(true),
00037       mNumHalosPerProcess(NULL),
00038       mRoundCounter(0u),
00039       mPopCounter(0u),
00040       mTargetNodeIndex(UINT_MAX),
00041       mSingleTarget(false)
00042 {
00043     mNumNodes = mrMesh.GetNumNodes();
00044 
00045     DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>* p_distributed_mesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>*>(&mrMesh);
00046     if ( PetscTools::IsSequential() || p_distributed_mesh == NULL)
00047     {
00048         // It's a non-distributed mesh
00049         mLo = 0;
00050         mHi = mNumNodes;
00051     }
00052     else
00053     {
00054         // It's a parallel (distributed) mesh (p_parallel_mesh is non-null and we are running in parallel)
00055         mWorkOnEntireMesh = false;
00056         mLo = mrMesh.GetDistributedVectorFactory()->GetLow();
00057         mHi = mrMesh.GetDistributedVectorFactory()->GetHigh();
00058 
00059         // Get local halo information
00060         p_distributed_mesh->GetHaloNodeIndices(mHaloNodeIndices);
00061 
00062         // Share information on the number of halo nodes
00063         unsigned my_size = mHaloNodeIndices.size();
00064         mNumHalosPerProcess = new unsigned[PetscTools::GetNumProcs()];
00065         MPI_Allgather(&my_size, 1, MPI_UNSIGNED, mNumHalosPerProcess, 1, MPI_UNSIGNED, PETSC_COMM_WORLD);
00066     }
00067 }
00068 
00069 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00070 void DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::ComputeDistanceMap(
00071         const std::vector<unsigned>& rSourceNodeIndices,
00072         std::vector<double>& rNodeDistances)
00073 {
00074     rNodeDistances.resize(mNumNodes);
00075     for (unsigned index=0; index<mNumNodes; index++)
00076     {
00077         rNodeDistances[index] = DBL_MAX;
00078     }
00079     assert(mActivePriorityNodeIndexQueue.empty());
00080 
00081     if (mSingleTarget)
00082     {
00083         assert(rSourceNodeIndices.size() == 1);
00084         unsigned source_node_index = rSourceNodeIndices[0];
00085 
00086         // We need to make sure this is local, so that we can use the geometry
00087         if (mLo<=source_node_index && source_node_index<mHi)
00088         {
00089             double heuristic_correction = norm_2(mrMesh.GetNode(source_node_index)->rGetLocation()-mTargetNodePoint);
00090             PushLocal(heuristic_correction, source_node_index);
00091             rNodeDistances[source_node_index] = heuristic_correction;
00092         }
00093      }
00094     else
00095     {
00096         for (unsigned source_index=0; source_index<rSourceNodeIndices.size(); source_index++)
00097         {
00098             unsigned source_node_index = rSourceNodeIndices[source_index];
00099             PushLocal(0.0, source_node_index);
00100             rNodeDistances[source_node_index] = 0.0;
00101         }
00102     }
00103 
00104     bool non_empty_queue = true;
00105     mRoundCounter = 0;
00106     mPopCounter = 0;
00107     while (non_empty_queue)
00108     {
00109         bool termination = WorkOnLocalQueue(rNodeDistances);
00110 
00111         // Sanity - check that we aren't doing this very many times
00112         if (mRoundCounter++ > 3 * PetscTools::GetNumProcs())
00113         {
00114             // This line will be hit if there's a parallel distributed mesh with a really bad partition
00115             NEVER_REACHED;
00116         }
00117         if (mSingleTarget && PetscTools::ReplicateBool(termination))
00118         {
00119             // A single process found the target already
00120             break;
00121         }
00122         non_empty_queue = UpdateQueueFromRemote(rNodeDistances);
00123     }
00124 
00125     if (mWorkOnEntireMesh == false)
00126     {
00127         // Update all processes with the best values from everywhere
00128         // Take a local copy
00129         std::vector<double> local_distances = rNodeDistances;
00130 
00131         // Share it back into the vector
00132         MPI_Allreduce( &local_distances[0], &rNodeDistances[0], mNumNodes, MPI_DOUBLE, MPI_MIN, PETSC_COMM_WORLD);
00133     }
00134 }
00135 
00136 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00137 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::UpdateQueueFromRemote(std::vector<double>& rNodeDistances)
00138 {
00139     if (mWorkOnEntireMesh)
00140     {
00141         // This update does nowt
00142         return !mActivePriorityNodeIndexQueue.empty();
00143     }
00144     for (unsigned bcast_process=0; bcast_process<PetscTools::GetNumProcs(); bcast_process++)
00145     {
00146         // Process packs cart0/cart1/...euclid/index into a 1-d array
00147         double* dist_exchange = new double[ mNumHalosPerProcess[bcast_process] ];
00148         unsigned* index_exchange = new unsigned[ mNumHalosPerProcess[bcast_process] ];
00149         if (PetscTools::GetMyRank() == bcast_process)
00150         {
00151             // Broadcaster fills the array
00152             for (unsigned index=0; index<mHaloNodeIndices.size();index++)
00153             {
00154                 dist_exchange[index] = rNodeDistances[mHaloNodeIndices[index]];
00155                 index_exchange[index] = mHaloNodeIndices[index];
00156             }
00157         }
00158 
00159         /*
00160          * Broadcast - this is can be done by casting indices to double and
00161          * packing everything into a single array. That would be better for
00162          * latency, but this is probably more readable.
00163          */
00164         MPI_Bcast(dist_exchange, mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
00165                   bcast_process, PETSC_COMM_WORLD);
00166         MPI_Bcast(index_exchange, mNumHalosPerProcess[bcast_process], MPI_UNSIGNED,
00167                   bcast_process, PETSC_COMM_WORLD);
00168         if (PetscTools::GetMyRank() != bcast_process)
00169         {
00170             // Receiving process take updates
00171             for (unsigned index=0; index<mNumHalosPerProcess[bcast_process];index++)
00172             {
00173                 unsigned global_index=index_exchange[index];
00174                 // Is it a better answer?
00175                 if (dist_exchange[index] < rNodeDistances[global_index]*(1.0-2*DBL_EPSILON) )
00176                 {
00177                     // Copy across - this may be unnecessary when PushLocal isn't going to push because it's not local
00178                     rNodeDistances[global_index] = dist_exchange[index];
00179                     PushLocal(rNodeDistances[global_index], global_index);
00180                 }
00181             }
00182         }
00183         delete [] dist_exchange;
00184         delete [] index_exchange;
00185     }
00186     // Is any queue non-empty?
00187     bool non_empty_queue = PetscTools::ReplicateBool(!mActivePriorityNodeIndexQueue.empty());
00188     return(non_empty_queue);
00189 }
00190 
00191 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00192 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::WorkOnLocalQueue(std::vector<double>& rNodeDistances)
00193 {
00194     unsigned pop_stop = mNumNodes/(PetscTools::GetNumProcs()*20);
00195     while (!mActivePriorityNodeIndexQueue.empty())
00196     {
00197         // Get the next index in the queue
00198         unsigned current_node_index = mActivePriorityNodeIndexQueue.top().second;
00199         double distance_when_queued = -mActivePriorityNodeIndexQueue.top().first;
00200         mActivePriorityNodeIndexQueue.pop();
00201 
00202         // Only act on nodes which haven't been acted on already
00203         // (It's possible that a better distance has been found and already been dealt with)
00204         if (distance_when_queued == rNodeDistances[current_node_index]);
00205         {
00206             mPopCounter++;
00207             Node<SPACE_DIM>* p_current_node = mrMesh.GetNode(current_node_index);
00208             double current_heuristic = 0.0;
00209             if (mSingleTarget)
00210             {
00211                  current_heuristic=norm_2(p_current_node->rGetLocation()-mTargetNodePoint);
00212             }
00213 
00214             // Loop over the elements containing the given node
00215             for (typename Node<SPACE_DIM>::ContainingElementIterator element_iterator = p_current_node->ContainingElementsBegin();
00216                 element_iterator != p_current_node->ContainingElementsEnd();
00217                 ++element_iterator)
00218             {
00219                 // Get a pointer to the container element
00220                 Element<ELEMENT_DIM, SPACE_DIM>* p_containing_element = mrMesh.GetElement(*element_iterator);
00221 
00222                 // Loop over the nodes of the element
00223                 for (unsigned node_local_index=0;
00224                    node_local_index<p_containing_element->GetNumNodes();
00225                    node_local_index++)
00226                 {
00227                     Node<SPACE_DIM>* p_neighbour_node = p_containing_element->GetNode(node_local_index);
00228                     unsigned neighbour_node_index = p_neighbour_node->GetIndex();
00229 
00230                     // Avoid revisiting the active node
00231                     if (neighbour_node_index != current_node_index)
00232                     {
00233 
00234                         double neighbour_heuristic=0.0;
00235                         if (mSingleTarget)
00236                         {
00237                              neighbour_heuristic=norm_2(p_neighbour_node->rGetLocation()-mTargetNodePoint);
00238                         }
00239                         // Test if we have found a shorter path from the source to the neighbour through current node
00240                         double updated_distance = rNodeDistances[current_node_index] +
00241                                                   norm_2(p_neighbour_node->rGetLocation() - p_current_node->rGetLocation())
00242                                                   - current_heuristic + neighbour_heuristic;
00243                         if ( updated_distance < rNodeDistances[neighbour_node_index] * (1.0-2*DBL_EPSILON) )
00244                         {
00245                             rNodeDistances[neighbour_node_index] = updated_distance;
00246                             PushLocal(updated_distance, neighbour_node_index);
00247                         }
00248                     }
00249                 }
00250             }
00251             if (mSingleTarget)
00252             {
00253                 if (current_node_index == mTargetNodeIndex)
00254                 {
00255                     // Premature termination if there is a single goal in mind (and we found it)
00256                     return true;
00257                 }
00258                 if (mPopCounter%pop_stop == 0)
00259                 {
00260                     // Premature termination -- in case the work has been done
00261                     return false;
00262                 }
00263             }
00264         }
00265      }
00266      return false;
00267 }
00268 
00269 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00270 double DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::SingleDistance(unsigned sourceNodeIndex, unsigned targetNodeIndex)
00271 {
00272     std::vector<unsigned> source_node_index_vector;
00273     source_node_index_vector.push_back(sourceNodeIndex);
00274 
00275     // We are re-using the mCachedDistances vector...
00276     mTargetNodeIndex = targetNodeIndex; // For premature termination
00277     mSingleTarget = true;
00278 
00279     // Set up information on target (for A* guidance)
00280     c_vector<double, SPACE_DIM> target_point = zero_vector<double>(SPACE_DIM);
00281     if (mrMesh.GetDistributedVectorFactory()->IsGlobalIndexLocal(mTargetNodeIndex))
00282     {
00283         // Owner of target node sets target_point (others have zero)
00284         target_point = mrMesh.GetNode(mTargetNodeIndex)->rGetLocation();
00285     }
00286 
00287     // Communicate for wherever to everyone
00288     MPI_Allreduce(&target_point[0], &mTargetNodePoint[0], SPACE_DIM, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD);
00289 
00290     //mTargetNodePoint;
00291     std::vector<double> distances;
00292     ComputeDistanceMap(source_node_index_vector, distances);
00293 
00295 
00296     // Reset target, so we don't terminate early next time.
00297     mSingleTarget = false;
00298 
00299     // Make sure that there isn't a non-empty queue from a previous calculation
00300     if (!mActivePriorityNodeIndexQueue.empty())
00301     {
00302         mActivePriorityNodeIndexQueue = std::priority_queue<std::pair<double, unsigned> >();
00303     }
00304 
00305     return distances[targetNodeIndex];
00306 }
00307 
00309 // Explicit instantiation
00311 
00312 template class DistanceMapCalculator<1, 1>;
00313 template class DistanceMapCalculator<1, 2>;
00314 template class DistanceMapCalculator<2, 2>;
00315 template class DistanceMapCalculator<1, 3>;
00316 //template class DistanceMapCalculator<2, 3>;
00317 template class DistanceMapCalculator<3, 3>;
Generated on Thu Dec 22 13:00:16 2011 for Chaste by  doxygen 1.6.3