DistanceMapCalculator.cpp

00001 /*
00002 
00003 Copyright (C) University of Oxford, 2005-2011
00004 
00005 University of Oxford means the Chancellor, Masters and Scholars of the
00006 University of Oxford, having an administrative office at Wellington
00007 Square, Oxford OX1 2JD, UK.
00008 
00009 This file is part of Chaste.
00010 
00011 Chaste is free software: you can redistribute it and/or modify it
00012 under the terms of the GNU Lesser General Public License as published
00013 by the Free Software Foundation, either version 2.1 of the License, or
00014 (at your option) any later version.
00015 
00016 Chaste is distributed in the hope that it will be useful, but WITHOUT
00017 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
00018 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
00019 License for more details. The offer of Chaste under the terms of the
00020 License is subject to the License being interpreted in accordance with
00021 English Law and subject to any action against the University of Oxford
00022 being under the jurisdiction of the English Courts.
00023 
00024 You should have received a copy of the GNU Lesser General Public License
00025 along with Chaste. If not, see <http://www.gnu.org/licenses/>.
00026 
00027 */
00028 
00029 #include "DistanceMapCalculator.hpp"
00030 #include "DistributedTetrahedralMesh.hpp" //For dynamic cast
00031 //#include "Debug.hpp"
00032 
00033 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00034 DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::DistanceMapCalculator(
00035             AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh)
00036     : mrMesh(rMesh),
00037       mWorkOnEntireMesh(true),
00038       mNumHalosPerProcess(NULL),
00039       mRoundCounter(0u),
00040       mPopCounter(0u),
00041       mTargetNodeIndex(UINT_MAX),
00042       mSingleTarget(false)
00043 {
00044     mNumNodes = mrMesh.GetNumNodes();
00045     
00046     DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>* p_distributed_mesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>*>(&mrMesh);
00047     if ( PetscTools::IsSequential() || p_distributed_mesh == NULL)
00048     {
00049         //It's a non-distributed mesh...
00050         mLo=0;
00051         mHi=mNumNodes;
00052     }
00053     else
00054     {
00055         //It's a parallel (distributed) mesh (p_parallel_mesh is non-null and we are running in parallel)
00056         mWorkOnEntireMesh=false;
00057         mLo = mrMesh.GetDistributedVectorFactory()->GetLow();
00058         mHi = mrMesh.GetDistributedVectorFactory()->GetHigh();
00059         //Get local halo information
00060         p_distributed_mesh->GetHaloNodeIndices(mHaloNodeIndices);
00061         //Share information on the number of halo nodes
00062         unsigned my_size=mHaloNodeIndices.size();
00063         mNumHalosPerProcess=new unsigned[PetscTools::GetNumProcs()];
00064         MPI_Allgather(&my_size, 1, MPI_UNSIGNED,
00065                      mNumHalosPerProcess, 1, MPI_UNSIGNED, PETSC_COMM_WORLD);
00066     }
00067 }
00068 
00069 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00070 void DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::ComputeDistanceMap(
00071         const std::vector<unsigned>& rSourceNodeIndices,
00072         std::vector<double>& rNodeDistances)
00073 {
00074     rNodeDistances.resize(mNumNodes);
00075     for (unsigned index=0; index<mNumNodes; index++)
00076     {
00077         rNodeDistances[index] = DBL_MAX;
00078     }
00079     assert(mActivePriorityNodeIndexQueue.empty());
00080     
00081     if (mSingleTarget)
00082     {
00083         assert(rSourceNodeIndices.size()==1);
00084         unsigned source_node_index=rSourceNodeIndices[0];
00085         //We need to make sure this is local, so that we can use the geometry
00086         if (mLo<=source_node_index && source_node_index<mHi)
00087         {
00088             double heuristic_correction=norm_2(mrMesh.GetNode(source_node_index)->rGetLocation()-mTargetNodePoint);
00089             PushLocal(heuristic_correction, source_node_index);
00090             rNodeDistances[source_node_index] = heuristic_correction;
00091         }
00092      }
00093     else
00094     {
00095         for (unsigned source_index=0; source_index<rSourceNodeIndices.size(); source_index++)
00096         {
00097             unsigned source_node_index=rSourceNodeIndices[source_index];
00098             PushLocal(0.0, source_node_index);
00099             rNodeDistances[source_node_index] = 0.0;
00100         }
00101     }
00102 
00103     bool non_empty_queue=true;
00104     mRoundCounter=0;
00105     mPopCounter=0;
00106     while (non_empty_queue)
00107     {
00108         bool termination=WorkOnLocalQueue(rNodeDistances);
00109         //Sanity - check that we aren't doing this very many times
00110         if (mRoundCounter++ > 3 * PetscTools::GetNumProcs())
00111         {
00112             //This line will be hit if there's a parallel distributed mesh with a really bad partition
00113             NEVER_REACHED;
00114         }
00115         if (mSingleTarget && PetscTools::ReplicateBool(termination))
00116         {
00117             //A single process found the target already
00118             break;
00119         }
00120         non_empty_queue=UpdateQueueFromRemote(rNodeDistances);
00121     }
00122 
00123 
00124     if (mWorkOnEntireMesh==false)
00125     {
00126         //Update all processes with the best values from everywhere
00127         //Take a local copy
00128         std::vector<double> local_distances=rNodeDistances;
00129         //Share it back into the vector
00130         MPI_Allreduce( &local_distances[0], &rNodeDistances[0], mNumNodes, MPI_DOUBLE, MPI_MIN, PETSC_COMM_WORLD);
00131     }
00132 }
00133 
00134 
00135 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00136 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::UpdateQueueFromRemote(std::vector<double>& rNodeDistances)
00137 {
00138     if (mWorkOnEntireMesh)
00139     {
00140         //This update does nowt.
00141         return !mActivePriorityNodeIndexQueue.empty();
00142     }
00143     for (unsigned bcast_process=0; bcast_process<PetscTools::GetNumProcs(); bcast_process++)
00144     {
00145         //Process packs cart0/cart1/...euclid/index into a 1-d array
00146         double* dist_exchange = new double[  mNumHalosPerProcess[bcast_process] ];
00147         unsigned* index_exchange = new unsigned[  mNumHalosPerProcess[bcast_process] ];
00148         if (PetscTools::GetMyRank() == bcast_process)
00149         {
00150             //Broadcaster fills the array
00151             for (unsigned index=0; index<mHaloNodeIndices.size();index++)
00152             {
00153                 dist_exchange[index] = rNodeDistances[mHaloNodeIndices[index]];
00154                 index_exchange[index] = mHaloNodeIndices[index];
00155             }
00156         }
00157 
00158         //Broadcast - this is can be done by casting indices to double and packing everything
00159         //into a single array.  That would be better for latency, but this is probably more readable.
00160         MPI_Bcast(dist_exchange, mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
00161                   bcast_process, PETSC_COMM_WORLD);
00162         MPI_Bcast(index_exchange, mNumHalosPerProcess[bcast_process], MPI_UNSIGNED,
00163                   bcast_process, PETSC_COMM_WORLD);
00164         if (PetscTools::GetMyRank() != bcast_process)
00165         {
00166             //Receiving process take updates
00167             for (unsigned index=0; index<mNumHalosPerProcess[bcast_process];index++)
00168             {
00169                 unsigned global_index=index_exchange[index];
00170                 //Is it a better answer?
00171                 if (dist_exchange[index] < rNodeDistances[global_index]*(1.0-2*DBL_EPSILON) )
00172                 {
00173                     //Copy across - this may be unnecessary when PushLocal isn't going to push because it's not local.
00174                     rNodeDistances[global_index] = dist_exchange[index];
00175                     PushLocal(rNodeDistances[global_index], global_index);
00176                 }
00177             }
00178         }
00179         delete [] dist_exchange;
00180         delete [] index_exchange;
00181     }
00182     //Is any queue non-empty?
00183     bool non_empty_queue=PetscTools::ReplicateBool(!mActivePriorityNodeIndexQueue.empty());
00184     return(non_empty_queue);
00185 }
00186 
00187 
00188 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00189 bool DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::WorkOnLocalQueue(std::vector<double>& rNodeDistances)
00190 {
00191     unsigned pop_stop = mNumNodes/(PetscTools::GetNumProcs()*20);
00192     while (!mActivePriorityNodeIndexQueue.empty())
00193     {
00194         // Get the next index in the queue
00195         unsigned current_node_index = mActivePriorityNodeIndexQueue.top().second;
00196         double distance_when_queued=-mActivePriorityNodeIndexQueue.top().first;
00197         mActivePriorityNodeIndexQueue.pop();
00198         //Only act on nodes which haven't been acted on already
00199         //(It's possible that a better distance has been found and already been dealt with) 
00200         if (distance_when_queued == rNodeDistances[current_node_index]);
00201         {
00202             mPopCounter++;
00203             Node<SPACE_DIM>* p_current_node = mrMesh.GetNode(current_node_index);
00204             double current_heuristic=0.0;
00205             if (mSingleTarget)
00206             {
00207                  current_heuristic=norm_2(p_current_node->rGetLocation()-mTargetNodePoint);
00208             }
00209             // Loop over the elements containing the given node
00210             for(typename Node<SPACE_DIM>::ContainingElementIterator element_iterator = p_current_node->ContainingElementsBegin();
00211                 element_iterator != p_current_node->ContainingElementsEnd();
00212                 ++element_iterator)
00213             {
00214                 // Get a pointer to the container element
00215                 Element<ELEMENT_DIM, SPACE_DIM>* p_containing_element = mrMesh.GetElement(*element_iterator);
00216 
00217                 // Loop over the nodes of the element
00218                 for(unsigned node_local_index=0;
00219                    node_local_index<p_containing_element->GetNumNodes();
00220                    node_local_index++)
00221                 {
00222                     Node<SPACE_DIM>* p_neighbour_node = p_containing_element->GetNode(node_local_index);
00223                     unsigned neighbour_node_index = p_neighbour_node->GetIndex();
00224 
00225                     // Avoid revisiting the active node
00226                     if(neighbour_node_index != current_node_index)
00227                     {
00228 
00229                         double neighbour_heuristic=0.0;
00230                         if (mSingleTarget)
00231                         {
00232                              neighbour_heuristic=norm_2(p_neighbour_node->rGetLocation()-mTargetNodePoint);
00233                         }
00234                         // Test if we have found a shorter path from the source to the neighbour through current node
00235                         double updated_distance = rNodeDistances[current_node_index] +
00236                                                   norm_2(p_neighbour_node->rGetLocation() - p_current_node->rGetLocation())
00237                                                   - current_heuristic + neighbour_heuristic;
00238                         if ( updated_distance < rNodeDistances[neighbour_node_index] * (1.0-2*DBL_EPSILON) )
00239                         {
00240                             rNodeDistances[neighbour_node_index] = updated_distance;
00241                             PushLocal(updated_distance, neighbour_node_index);
00242                         }
00243                     }
00244                 }//Node
00245             }//Element
00246             if (mSingleTarget)
00247             {
00248                 if (current_node_index == mTargetNodeIndex)
00249                 {
00250                     //Premature termination if there is a single goal in mind (and we found it)
00251                     return true;
00252                 }
00253                 if (mPopCounter%pop_stop == 0)
00254                 {
00255                     //Premature termination -- in case the work has been done
00256                     return false;
00257                 }
00258             }
00259             
00260         }//If
00261      }//While !empty
00262      return false;
00263 }
00264 
00265 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00266 double DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::SingleDistance(unsigned sourceNodeIndex, unsigned targetNodeIndex)
00267 {
00268     std::vector<unsigned> source_node_index_vector;
00269     source_node_index_vector.push_back(sourceNodeIndex);
00270 
00271     //We are re-using the mCachedDistances vector...
00272     mTargetNodeIndex=targetNodeIndex;//For premature termination
00273     mSingleTarget=true;
00274 
00275     //Set up information on target (for A* guidance)
00276     c_vector<double, SPACE_DIM> target_point=zero_vector<double>(SPACE_DIM);
00277     if (mrMesh.GetDistributedVectorFactory()->IsGlobalIndexLocal(mTargetNodeIndex))
00278     {
00279         //Owner of target node sets target_point (others have zero)
00280         target_point=mrMesh.GetNode(mTargetNodeIndex)->rGetLocation();
00281     }
00282     //Communicate for wherever to everyone
00283     MPI_Allreduce( &target_point[0], &mTargetNodePoint[0], SPACE_DIM, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD);
00284     
00285     //mTargetNodePoint;
00286     std::vector<double> distances;
00287     ComputeDistanceMap(source_node_index_vector, distances);
00288 
00290     
00291     //Reset target, so we don't terminate early next time.
00292     mSingleTarget=false;
00293     
00294     //Make sure that there isn't a non-empty queue from a previous calculation
00295     if (!mActivePriorityNodeIndexQueue.empty())
00296     {
00297         mActivePriorityNodeIndexQueue = std::priority_queue<std::pair<double, unsigned> >();
00298     }
00299 
00300     return distances[targetNodeIndex];
00301 }
00302 
00304 // Explicit instantiation
00306 
00307 template class DistanceMapCalculator<1, 1>;
00308 template class DistanceMapCalculator<1, 2>;
00309 template class DistanceMapCalculator<2, 2>;
00310 template class DistanceMapCalculator<1, 3>;
00311 //template class DistanceMapCalculator<2, 3>;
00312 template class DistanceMapCalculator<3, 3>;

Generated on Tue May 31 14:31:48 2011 for Chaste by  doxygen 1.5.5