Skip to content
Snippets Groups Projects

[bugfix] matrixredistribute for matrices with scalar block_type and add a test

2 files
+ 15
10
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -34,25 +34,22 @@ void MPI_err_handler(MPI_Comm *, int *err_code, ...){
throw MPIError(s, *err_code);
}
template<int BS>
template<class MatrixBlock>
int testRepart(int N, int coarsenTarget)
{
std::cout<<"==================================================="<<std::endl;
std::cout<<"BS="<<BS<<" N="<<N<<" coarsenTarget="<<coarsenTarget<<std::endl;
std::cout<<" N="<<N<<" coarsenTarget="<<coarsenTarget<<std::endl;
int procs, rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &procs);
typedef Dune::FieldMatrix<double,BS,BS> MatrixBlock;
typedef Dune::BCRSMatrix<MatrixBlock> BCRSMat;
typedef Dune::bigunsignedint<56> GlobalId;
typedef Dune::OwnerOverlapCopyCommunication<GlobalId> Communication;
int n;
N/=BS;
Communication comm(MPI_COMM_WORLD);
BCRSMat mat = setupAnisotropic2d<MatrixBlock>(N, comm.indexSet(), comm.communicator(), &n, 1);
@@ -138,6 +135,8 @@ int main(int argc, char** argv)
return 1;
}
testRepart<1>(N,coarsenTarget);
testRepart<Dune::FieldMatrix<double, 1, 1>>(N,coarsenTarget);
testRepart<Dune::FieldMatrix<double, 2, 2>>(N/2,coarsenTarget);
testRepart<double>(N,coarsenTarget);
MPI_Finalize();
}
Loading