Skip to content
Snippets Groups Projects
Commit e9fe3769 authored by Markus Blatt's avatar Markus Blatt
Browse files

Moving the documentation for the parallel index sets to dune-common.

The index sets themselves are already there.

Step 1: copying files from dune-istl to dune-common

[[Imported from SVN: r7110]]
parent feba789e
No related branches found
No related tags found
No related merge requests found
# $Id$
# also build these sub directories
SUBDIRS = doxygen buildsystem
SUBDIRS = doxygen buildsystem comm
# setting like in dune-web
CURDIR=doc
......
# $Id: Makefile.am 1681 2012-08-31 11:30:17Z gruenich $
SUBDIRS = figures
MPIPROGRAMS = indexset poosc08 poosc08_test
# programs just to build when "make check" is used
check_PROGRAMS = $(MPIPROGRAMS)
# list of tests to run (indicestest is special case)
TESTS = $(MPIPROGRAMS)
if BUILD_DOCS
DOCFILES = communication.pdf
EXTRA_DIST = $(DOCFILES)
EXTRAINSTALL = $(DOCFILES)
endif
# setting like in dune-web
CURDIR=doc/istl/comm
BASEDIR=../../..
# install the docs
docdir=$(datadir)/doc/dune-istl/comm
include $(top_srcdir)/am/latex
include $(top_srcdir)/am/webstuff
include $(top_srcdir)/am/global-rules
poosc08_SOURCES = poosc08.cc
poosc08_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
poosc08_LDFLAGS = $(AM_LDFLAGS) \
$(DUNEMPILDFLAGS)
poosc08_LDADD = \
$(DUNEMPILIBS) \
$(LDADD)
poosc08_test_SOURCES = poosc08_test.cc
poosc08_test_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
poosc08_test_LDFLAGS = $(AM_LDFLAGS) \
$(DUNEMPILDFLAGS)
poosc08_test_LDADD = \
$(DUNEMPILIBS) \
$(LDADD)
indexset_SOURCES = indexset.cc buildindexset.hh reverse.hh
indexset_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
indexset_LDFLAGS = $(AM_LDFLAGS) \
$(DUNEMPILDFLAGS)
indexset_LDADD = \
$(DUNEMPILIBS) \
$(LDADD)
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
// $Id: buildindexset.hh 1565 2012-04-30 22:04:34Z mblatt $
#ifndef BUILDINDEXSET_HH
#define BUILDINDEXSET_HH
#include <dune/common/parallel/indexset.hh>
#include <dune/common/parallel/plocalindex.hh>
/**
* @brief Flag for marking the indices.
*/
enum Flag {owner, overlap};
// The type of local index we use
typedef Dune::ParallelLocalIndex<Flag> LocalIndex;
/**
* @brief Add indices to the example index set.
* @param indexSet The index set to build.
*/
template<class C, class TG, int N>
void build(C& comm, Dune::ParallelIndexSet<TG,LocalIndex,N>& indexSet)
{
// The rank of our process
int rank=comm.rank();
// Indicate that we add or remove indices.
indexSet.beginResize();
if(rank==0) {
indexSet.add(0, LocalIndex(0,overlap,true));
indexSet.add(2, LocalIndex(1,owner,true));
indexSet.add(6, LocalIndex(2,owner,true));
indexSet.add(3, LocalIndex(3,owner,true));
indexSet.add(5, LocalIndex(4,owner,true));
}
if(rank==1) {
indexSet.add(0, LocalIndex(0,owner,true));
indexSet.add(1, LocalIndex(1,owner,true));
indexSet.add(7, LocalIndex(2,owner,true));
indexSet.add(5, LocalIndex(3,overlap,true));
indexSet.add(4, LocalIndex(4,owner,true));
}
// Modification is over
indexSet.endResize();
}
#endif
@InProceedings{ISTL,
author = {Markus Blatt and Peter Bastian},
title = {The Iterative Solver Template Library},
booktitle = {Applied Parallel Computing. State of the Art in Scientific Computing},
editor = {Bo K\r{a}gstr\"om and Erik Elmroth and Jack Dongarra and Jerzy Wa\'sniewski},
year = 2007,
volume = 4699,
series = {Lecture Notes in Computer Science},
publisher = {Springer},
pages = {666--675}
}
@Article{dune08-1,
author = {Peter Bastian and Markus Blatt and Andreas Dedner and Christian Engwer and Robert Kl\"ofkorn and Mario Ohlberger and Oliver Sander},
title = { A generic grid interface for parallel and adaptive scientific computing. Part I: abstract framework},
journal = {Computing},
year = 2008,
volume = 82,
number = {2--3},
pages = {103--119}
}
@Article{dune08-2,
author = {Peter Bastian and Markus Blatt and Andreas Dedner and Christian Engwer and Robert Kl\"ofkorn and Ralf Kornhuber and Mario Ohlberger and Oliver Sander},
title = { A generic grid interface for parallel and adaptive scientific computing. Part II: implementation and test in DUNE},
journal = {Computing},
year = 2008,
volume = 82,
number = {2--3} ,
pages = {121--138}
}
@Article{ISTLParallel,
author = {Markus Blatt and Peter Bastian},
title = {On the Generic Parallelisation of Iterative Solvers for
the Finite Element Method},
journal = {Int. J. Computational Science and
Engineering},
volume = {4},
number = {1},
pages = {56--69},
year = 2008
}
@Misc{DuneWeb,
author = {DUNE},
howpublished = {\texttt{http://www.dune-project.org/}}
}
@Misc{boost_mpi,
author = {D. Gregor and M. Troyer},
title = {{B}oost.{M}{P}{I}},
howpublished = {\texttt{http://www.boost.org/}},
year = 2006
}
@PhdThesis{gerlach02:janus,
author = {Jens Gerlach},
title = {Domain Engineering and Generic Programming for Parallel Scientific Computing},
school = {TU Berlin},
year = {2002}
}
@InProceedings{giloi95:_promot,
author = {W.K. Giloi and M. Kessler and A. Schramm},
title = {PROMOTER: A High Level, Object-Parallel Programming Language},
booktitle = {Proceedings of the International Conference on High Performance Computing},
year = {1995},
address = {New Dehli, India},
month = {December}
}
@inproceedings{nolte00:_taco,
author = {J\"{o}rg Nolte and Mitsuhisa Sato and Yutaka Ishikawa},
title = {TACO -- Dynamic Distributed Collections with Templates and Topologies},
booktitle = {Euro-Par '00: Proceedings from the 6th International Euro-Par Conference on Parallel Processing},
year = {2000},
isbn = {3-540-67956-1},
pages = {1071--1080},
publisher = {Springer-Verlag},
address = {London, UK},
}
\ No newline at end of file
This diff is collapsed.
# $Id: $
EXTRA_DIST=darray.eps distarray.eps distindex.eps
include $(top_srcdir)/am/webstuff
include $(top_srcdir)/am/global-rules
%!PS-Adobe-2.0 EPSF-2.0
%%Title: ../eps/darray.eps
%%Creator: fig2dev Version 3.2 Patchlevel 1
%%CreationDate: Thu Mar 4 15:25:17 1999
%%For: peter@speedo (Peter Bastian)
%%Orientation: Portrait
%%BoundingBox: 0 0 345 260
%%Pages: 0
%%BeginSetup
%%EndSetup
%%Magnification: 1.0000
%%EndComments
/$F2psDict 200 dict def
$F2psDict begin
$F2psDict /mtrx matrix put
/col-1 {0 setgray} bind def
/col0 {0.000 0.000 0.000 srgb} bind def
/col1 {0.000 0.000 1.000 srgb} bind def
/col2 {0.000 1.000 0.000 srgb} bind def
/col3 {0.000 1.000 1.000 srgb} bind def
/col4 {1.000 0.000 0.000 srgb} bind def
/col5 {1.000 0.000 1.000 srgb} bind def
/col6 {1.000 1.000 0.000 srgb} bind def
/col7 {1.000 1.000 1.000 srgb} bind def
/col8 {0.000 0.000 0.560 srgb} bind def
/col9 {0.000 0.000 0.690 srgb} bind def
/col10 {0.000 0.000 0.820 srgb} bind def
/col11 {0.530 0.810 1.000 srgb} bind def
/col12 {0.000 0.560 0.000 srgb} bind def
/col13 {0.000 0.690 0.000 srgb} bind def
/col14 {0.000 0.820 0.000 srgb} bind def
/col15 {0.000 0.560 0.560 srgb} bind def
/col16 {0.000 0.690 0.690 srgb} bind def
/col17 {0.000 0.820 0.820 srgb} bind def
/col18 {0.560 0.000 0.000 srgb} bind def
/col19 {0.690 0.000 0.000 srgb} bind def
/col20 {0.820 0.000 0.000 srgb} bind def
/col21 {0.560 0.000 0.560 srgb} bind def
/col22 {0.690 0.000 0.690 srgb} bind def
/col23 {0.820 0.000 0.820 srgb} bind def
/col24 {0.500 0.190 0.000 srgb} bind def
/col25 {0.630 0.250 0.000 srgb} bind def
/col26 {0.750 0.380 0.000 srgb} bind def
/col27 {1.000 0.500 0.500 srgb} bind def
/col28 {1.000 0.630 0.630 srgb} bind def
/col29 {1.000 0.750 0.750 srgb} bind def
/col30 {1.000 0.880 0.880 srgb} bind def
/col31 {1.000 0.840 0.000 srgb} bind def
end
save
-130.0 296.0 translate
1 -1 scale
/cp {closepath} bind def
/ef {eofill} bind def
/gr {grestore} bind def
/gs {gsave} bind def
/sa {save} bind def
/rs {restore} bind def
/l {lineto} bind def
/m {moveto} bind def
/rm {rmoveto} bind def
/n {newpath} bind def
/s {stroke} bind def
/sh {show} bind def
/slc {setlinecap} bind def
/slj {setlinejoin} bind def
/slw {setlinewidth} bind def
/srgb {setrgbcolor} bind def
/rot {rotate} bind def
/sc {scale} bind def
/sd {setdash} bind def
/ff {findfont} bind def
/sf {setfont} bind def
/scf {scalefont} bind def
/sw {stringwidth} bind def
/tr {translate} bind def
/tnt {dup dup currentrgbcolor
4 -2 roll dup 1 exch sub 3 -1 roll mul add
4 -2 roll dup 1 exch sub 3 -1 roll mul add
4 -2 roll dup 1 exch sub 3 -1 roll mul add srgb}
bind def
/shd {dup dup currentrgbcolor 4 -2 roll mul 4 -2 roll mul
4 -2 roll mul srgb} bind def
/$F2psBegin {$F2psDict begin /$F2psEnteredState save def} def
/$F2psEnd {$F2psEnteredState restore end} def
%%EndProlog
$F2psBegin
10 setmiterlimit
n -1000 5689 m -1000 -1000 l 8527 -1000 l 8527 5689 l cp clip
0.06299 0.06299 sc
% Polyline
7.500 slw
n 5265 3105 m 7515 3105 l 7515 3780 l 5265 3780 l cp gs col0 s gr
% Polyline
n 5670 3105 m 5670 3780 l gs col0 s gr
% Polyline
n 6120 3105 m 6120 3780 l gs col0 s gr
% Polyline
n 6570 3105 m 6570 3780 l gs col0 s gr
% Polyline
n 7020 3105 m 7020 3780 l gs col0 s gr
/Times-Roman ff 180.00 scf sf
5400 4050 m
gs 1 -1 sc (0) col0 sh gr
/Times-Roman ff 180.00 scf sf
6300 4050 m
gs 1 -1 sc (2) col0 sh gr
/Times-Roman ff 180.00 scf sf
6750 4050 m
gs 1 -1 sc (3) col0 sh gr
/Times-Roman ff 180.00 scf sf
7245 4050 m
gs 1 -1 sc (4) col0 sh gr
/Times-Roman ff 180.00 scf sf
5850 4050 m
gs 1 -1 sc (1) col0 sh gr
% Polyline
n 2340 3105 m 4590 3105 l 4590 3780 l 2340 3780 l cp gs col0 s gr
% Polyline
n 2745 3105 m 2745 3780 l gs col0 s gr
% Polyline
n 3195 3105 m 3195 3780 l gs col0 s gr
% Polyline
n 3645 3105 m 3645 3780 l gs col0 s gr
% Polyline
n 4095 3105 m 4095 3780 l gs col0 s gr
/Times-Roman ff 180.00 scf sf
2475 4050 m
gs 1 -1 sc (0) col0 sh gr
/Times-Roman ff 180.00 scf sf
3375 4050 m
gs 1 -1 sc (2) col0 sh gr
/Times-Roman ff 180.00 scf sf
3825 4050 m
gs 1 -1 sc (3) col0 sh gr
/Times-Roman ff 180.00 scf sf
4320 4050 m
gs 1 -1 sc (4) col0 sh gr
/Times-Roman ff 180.00 scf sf
2925 4050 m
gs 1 -1 sc (1) col0 sh gr
/Times-Roman ff 180.00 scf sf
2970 4320 m
gs 1 -1 sc (local indices) col0 sh gr
/Times-Roman ff 180.00 scf sf
2475 4635 m
gs 1 -1 sc (local array in processor 0) col0 sh gr
% Polyline
n 3150 1215 m 6750 1215 l 6750 1890 l 3150 1890 l cp gs col0 s gr
% Polyline
n 4950 1215 m 4950 1890 l gs col0 s gr
% Polyline
n 4050 1215 m 4050 1890 l gs col0 s gr
% Polyline
n 3600 1215 m 3600 1890 l gs col0 s gr
% Polyline
n 4500 1215 m 4500 1890 l gs col0 s gr
% Polyline
n 5850 1215 m 5850 1890 l gs col0 s gr
% Polyline
n 5400 1215 m 5400 1890 l gs col0 s gr
% Polyline
n 6300 1215 m 6300 1890 l gs col0 s gr
% Polyline
n 2520 3105 m 3375 1890 l gs col0 s gr
% Polyline
n 2970 3105 m 4230 1890 l gs col0 s gr
% Polyline
n 3375 3105 m 6030 1890 l gs col0 s gr
% Polyline
n 3825 3105 m 4725 1890 l gs col0 s gr
% Polyline
n 5490 3105 m 3465 1890 l gs col0 s gr
% Polyline
n 5850 3105 m 3870 1890 l gs col0 s gr
% Polyline
n 6345 3105 m 6525 1890 l gs col0 s gr
% Polyline
n 6795 3105 m 5625 1890 l gs col0 s gr
% Polyline
n 7290 3105 m 5175 1890 l gs col0 s gr
% Polyline
n 4320 3105 m 5535 1890 l gs col0 s gr
/Times-Roman ff 180.00 scf sf
3285 1035 m
gs 1 -1 sc (0) col0 sh gr
/Times-Roman ff 180.00 scf sf
3735 1035 m
gs 1 -1 sc (1) col0 sh gr
/Times-Roman ff 180.00 scf sf
4230 1035 m
gs 1 -1 sc (2) col0 sh gr
/Times-Roman ff 180.00 scf sf
4680 1035 m
gs 1 -1 sc (3) col0 sh gr
/Times-Roman ff 180.00 scf sf
5085 1035 m
gs 1 -1 sc (4) col0 sh gr
/Times-Roman ff 180.00 scf sf
5535 1035 m
gs 1 -1 sc (5) col0 sh gr
/Times-Roman ff 180.00 scf sf
5985 1035 m
gs 1 -1 sc (6) col0 sh gr
/Times-Roman ff 180.00 scf sf
6435 1035 m
gs 1 -1 sc (7) col0 sh gr
/Times-Roman ff 180.00 scf sf
5940 4320 m
gs 1 -1 sc (local indices) col0 sh gr
/Times-Roman ff 180.00 scf sf
5490 4635 m
gs 1 -1 sc (local array in processor 1) col0 sh gr
/Times-Roman ff 180.00 scf sf
3825 720 m
gs 1 -1 sc (global array with global indices) col0 sh gr
/Times-Italic ff 180.00 scf sf
2880 1665 m
gs 1 -1 sc (a:) col0 sh gr
/Times-Italic ff 180.00 scf sf
2070 3555 m
gs 1 -1 sc (a0:) col0 sh gr
/Times-Italic ff 180.00 scf sf
4995 3555 m
gs 1 -1 sc (a1:) col0 sh gr
$F2psEnd
rs
This diff is collapsed.
This diff is collapsed.
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
// $Id: indexset.cc 1728 2012-11-06 13:06:44Z christi $
#include "config.h"
#include <dune/common/parallel/indexset.hh>
#include <dune/common/parallel/plocalindex.hh>
#include <dune/common/parallel/mpihelper.hh>
#include <iostream>
#include "buildindexset.hh"
#include "reverse.hh"
int main(int argc, char **argv)
{
// This is a parallel programm so we need to
// initialize mpi first.
Dune::MPIHelper& helper = Dune::MPIHelper::instance(argc, argv);
// The rank of our process
int rank = helper.rank();
// The type used as the local index
typedef Dune::ParallelLocalIndex<Flag> LocalIndex;
// The type used as the global index
typedef int GlobalIndex;
// The index set we use to identify the local indices with the globally
// unique ones
typedef Dune::ParallelIndexSet<GlobalIndex,LocalIndex,100> ParallelIndexSet;
// The index set
ParallelIndexSet indexSet;
build(helper, indexSet);
// Print the index set
std::cout<<indexSet<<std::endl;
reverseLocalIndex(indexSet);
// Print the index set
if(rank==0)
std::cout<<"Reordered lcoal indices:"<<std::endl;
// Wait for all processes
helper.getCollectiveCommunication().barrier();
std::cout<<indexSet<<std::endl;
// Assign new local indices
return 0;
}
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/common/parallel/mpihelper.hh> // An initializer of MPI
#include <dune/common/parallel/indexset.hh>
#include <dune/common/parallel/plocalindex.hh>
enum Flags { owner, ghost };
struct Bla
{
/** @brief The local index. */
size_t localIndex_;
/** @brief An attribute for the index. */
char attribute_;
/** @brief True if the index is also known to other processors. */
bool public_;
/**
* @brief The state of the index.
*
* Has to be one of LocalIndexState!
* @see LocalIndexState.
*/
char state_;
};
template<typename T1, typename T2>
void buildBlockedIndexSet(T1& indexset, int N, const T2& comm)
{
int rank=comm.rank();
int size=comm.size();
int localsize=N/size;
int bigger=N%size;
int start, end;
if(rank<bigger) {
start=rank*(localsize+1);
end=start+(localsize+1);
}else{
start=bigger*(localsize+1)+(rank-bigger)*localsize;
end=start+localsize;
}
indexset.beginResize();
int index=0;
int gindex=start;
typedef typename T1::LocalIndex LocalIndex;
std::cout<<sizeof(LocalIndex)<<" "<<sizeof(Bla)<<std::endl;
if(start>0)
indexset.add(gindex-1,LocalIndex(index++,ghost));
for(int i=start; i<end; i++,index++,gindex++)
indexset.add(gindex,LocalIndex(index,owner,true));
if(end<N)
indexset.add(gindex,LocalIndex(index,ghost,true));
}
int main(int argc, char** argv)
{
int n=100;
try{
using namespace Dune;
//Maybe initialize Mpi
MPIHelper& helper = MPIHelper::instance(argc, argv);
std::cout << "Hello World! This is poosc08." << std::endl;
if(Dune::MPIHelper::isFake)
std::cout<< "This is a sequential program." << std::endl;
else{
typedef ParallelIndexSet<int,ParallelLocalIndex<Flags> > IndexSet;
IndexSet blockedSet;
buildBlockedIndexSet(blockedSet, n, helper.getCollectiveCommunication());
}
return 0;
}
catch (Dune::Exception &e) {
std::cerr << "Dune reported error: " << e << std::endl;
}
catch (...) {
std::cerr << "Unknown exception thrown!" << std::endl;
}
}
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/common/parallel/mpihelper.hh> // An initializer of MPI
#include <dune/common/parallel/indexset.hh>
#include <dune/common/parallel/remoteindices.hh>
#include <dune/common/parallel/communicator.hh>
#include <dune/common/parallel/plocalindex.hh>
#include <dune/common/parallel/interface.hh>
#include <dune/common/enumset.hh>
enum Flags { owner, ghost };
template<typename T>
struct AddData {
typedef typename T::value_type IndexedType;
static const IndexedType& gather(const T& v, int i){
return v[i];
}
static void scatter(T& v, const IndexedType& item, int i){
v[i]+=item;
}
};
template<typename T>
struct CopyData {
typedef typename T::value_type IndexedType;
static const IndexedType& gather(const T& v, int i){
return v[i];
}
static void scatter(T& v, const IndexedType& item, int i){
v[i]=item;
}
};
template<class T>
void doCalculations(T& t){}
#if HAVE_MPI
void test()
{
int rank;
MPI_Comm comm=(MPI_COMM_WORLD);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
using namespace Dune;
// shortcut for index set type
typedef ParallelLocalIndex<Flags> LocalIndex;
typedef ParallelIndexSet<int, LocalIndex > PIndexSet;
PIndexSet sis;
sis.beginResize();
if(rank==0) {
sis.add(11, LocalIndex(0, ghost));
for(int i=1; i<=6; i++)
sis.add(i-1, LocalIndex(i, owner, i<=1||i>5));
sis.add(6, LocalIndex(7, ghost));
}else{
sis.add(5, LocalIndex(0, ghost));
for(int i=1; i<=6; i++)
sis.add(5+i, LocalIndex(i, owner, i<=1||i>5));
sis.add(0,LocalIndex(7, ghost));
}
sis.endResize();
PIndexSet tis;
tis.beginResize();
int l=0;
for(int i=0; i<2; ++i)
for(int j=0; j<5; ++j) {
int g=rank*3-1+i*6+j;
if(g<0||g>11)
continue;
Flags flag=(j>0&&j<4) ? owner : ghost;
tis.add(g, LocalIndex(l++, flag));
}
tis.endResize();
std::cout<<rank<<" isxset: "<<sis<<std::endl;
RemoteIndices<PIndexSet> riRedist(sis, tis, comm);
riRedist.rebuild<true>();
std::vector<int> v;
RemoteIndices<PIndexSet> riS(sis,sis, comm, v, true);
riS.rebuild<false>();
std::cout<<std::endl<<"begin"<<rank<<" riS="<<riS<<" end"<<rank<<std::endl<<std::endl;
Combine<EnumItem<Flags,ghost>,EnumItem<Flags,owner>,Flags> ghostFlags;
EnumItem<Flags,owner> ownerFlags;
Combine<EnumItem<Flags,ghost>, EnumItem<Flags,owner> > allFlags;
Interface infRedist;
Interface infS;
infRedist.build(riRedist, ownerFlags, allFlags);
infS.build(riS, ownerFlags, ghostFlags);
std::cout<<"inf "<<rank<<": "<<infS<<std::endl;
typedef std::vector<double> Container;
Container s(sis.size(),3), t(tis.size());
s[sis.size()-1]=-1;
BufferedCommunicator bComm;
BufferedCommunicator bCommRedist;
bComm.build(s, s, infS);
//bCommRedist.build(s, t, infRedist);
for(std::size_t i=0; i<sis.size(); i++)
std::cout<<s[i]<<" ";
std::cout<<std::endl;
bComm.forward<CopyData<Container> >(s,s);
for(std::size_t i=0; i<sis.size(); i++)
std::cout<<s[i]<<" ";
std::cout<<std::endl;
//bCommRedist.forward<CopyData<Container> >(s,t);
// calculate on the redistributed array
doCalculations(t);
bCommRedist.backward<AddData<Container> >(s,t);
}
#endif // HAVE_MPI
int main(int argc, char** argv)
{
try{
using namespace Dune;
#if HAVE_MPI
//Maybe initialize Mpi
MPIHelper& helper = MPIHelper::instance(argc, argv);
std::cout << "Hello World! This is poosc08. rank=" <<helper.rank()<< std::endl;
test();
return 0;
#else
std::cout<< "Test poosc08_test disabled because MPI is not available." << std::endl;
return 77;
#endif // HAVE_MPI
}
catch (Dune::Exception &e) {
std::cerr << "Dune reported error: " << e << std::endl;
}
catch (...) {
std::cerr << "Unknown exception thrown!" << std::endl;
}
}
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
// $Id: reverse.hh 942 2008-09-10 18:21:57Z christi $
#ifndef REVERSE_HH
#define REVERSE_HH
#include "buildindexset.hh"
/**
* @brief Reverse the local indices of an index set.
*
* Let the index set have N entries than the index 0 will become N-1,
* 1 become N-2, ..., and N-1 will become 0.
* @param indexSet The index set to reverse.
*/
template<typename TG, typename TL, int N>
void reverseLocalIndex(Dune::ParallelIndexSet<TG,TL,N>& indexSet)
{
// reverse the local indices
typedef typename Dune::ParallelIndexSet<TG,TL,N>::iterator iterator;
iterator end = indexSet.end();
size_t maxLocal = 0;
// find the maximal local index
for(iterator index = indexSet.begin(); index != end; ++index) {
// Get the local index
LocalIndex& local = index->local();
maxLocal = std::max(maxLocal, local.local());
}
for(iterator index = indexSet.begin(); index != end; ++index) {
// Get the local index
LocalIndex& local = index->local();
local = maxLocal--;
}
}
#endif
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment