Skip to content
Snippets Groups Projects
Commit 033377ca authored by Markus Blatt's avatar Markus Blatt
Browse files

Prevents calling MPI_Comm_free after MPI_Finalize.

With OpenMPI there appeared cases where MPI_Comm_free
was called after MPI_Finalize. This was caused by the
destructor of OwnerOverlapCopyCommunication being called
after MPI_Finalize.
This patch moves the destruction out of the main method.
If MPI 2 functionality is available we also check whether
MPI_Finalize was called before freeing the communicator and
only call free if not.
parent da5f82f4
No related branches found
No related tags found
No related merge requests found
......@@ -662,7 +662,17 @@ namespace Dune {
if (globalLookup_) delete globalLookup_;
if (freecomm==true)
if(comm!=MPI_COMM_NULL)
{
#ifdef MPI_2
// If it is possible to query whether MPI_Finalize
// was called, only free the communicator before
// calling MPI_Finalize.
int wasFinalized = 0;
MPI_Finalized( &wasFinalized );
if(!wasFinalized)
#endif
MPI_Comm_free(&comm);
}
}
private:
......
......@@ -11,20 +11,9 @@
#include <dune/istl/schwarz.hh>
#include "anisotropic.hh"
int main(int argc, char** argv)
template<int BS>
void testHierarchy(int N)
{
MPI_Init(&argc, &argv);
const int BS=1;
int N=10;
if(argc>1)
N = atoi(argv[1]);
int procs, rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &procs);
typedef int LocalId;
typedef int GlobalId;
typedef Dune::OwnerOverlapCopyCommunication<LocalId,GlobalId> Communication;
......@@ -63,7 +52,7 @@ int main(int argc, char** argv)
Criterion criterion(100,4);
Dune::Timer timer;
hierarchy.build<OverlapFlags>(criterion);
hierarchy.template build<OverlapFlags>(criterion);
hierarchy.coarsenVector(vh);
std::cout<<"Building hierarchy took "<<timer.elapsed()<<std::endl;
......@@ -77,6 +66,24 @@ int main(int argc, char** argv)
std::vector<std::size_t> data;
hierarchy.getCoarsestAggregatesOnFinest(data);
}
int main(int argc, char** argv)
{
MPI_Init(&argc, &argv);
const int BS=1;
int N=10;
if(argc>1)
N = atoi(argv[1]);
int procs, rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &procs);
testHierarchy<BS>(N);
MPI_Finalize();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment