Skip to content
Snippets Groups Projects
Commit 69aa058e authored by Markus Blatt's avatar Markus Blatt
Browse files

Make documentation more complete.

[[Imported from SVN: r1294]]
parent 71acf781
No related branches found
No related tags found
No related merge requests found
......@@ -17,3 +17,4 @@ indexset
*.rel
Makefile.dist.in
Makefile.dist
poosc08_test
......@@ -4,7 +4,7 @@ SUBDIRS = figures
# only build these programs if an MPI-implementation was found
if MPI
MPIPROGRAMS = indexset
MPIPROGRAMS = indexset poosc08 poosc08_test
endif
# programs just to build when "make check" is used
check_PROGRAMS = $(MPIPROGRAMS)
......@@ -28,6 +28,24 @@ include $(top_srcdir)/am/latex
include $(top_srcdir)/am/webstuff
include $(top_srcdir)/am/global-rules
poosc08_SOURCES = poosc08.cc
poosc08_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
poosc08_LDFLAGS = $(AM_LDFLAGS) \
$(DUNEMPILDFLAGS)
poosc08_LDADD = \
$(DUNEMPILIBS) \
$(LDADD)
poosc08_test_SOURCES = poosc08_test.cc
poosc08_test_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
poosc08_test_LDFLAGS = $(AM_LDFLAGS) \
$(DUNEMPILDFLAGS)
poosc08_test_LDADD = \
$(DUNEMPILIBS) \
$(LDADD)
indexset_SOURCES = indexset.cc buildindexset.hh reverse.hh
indexset_CPPFLAGS = $(AM_CPPFLAGS) \
$(DUNEMPICPPFLAGS)
......
@InProceedings{ISTL,
author = {Markus Blatt and Peter Bastian},
title = {The Iterative Solver Template Library},
booktitle = {Applied Parallel Computing. State of the Art in Scientific Computing},
editor = {Bo K\r{a}gstr\"om and Erik Elmroth and Jack Dongarra and Jerzy Wa\'sniewski},
year = 2007,
volume = 4699,
series = {Lecture Notes in Computer Science},
publisher = {Springer},
pages = {666--675}
}
@Article{dune08-1,
author = {Peter Bastian and Markus Blatt and Andreas Dedner and Christian Engwer and Robert Kl\"ofkorn and Mario Ohlberger and Oliver Sander},
title = { A generic grid interface for parallel and adaptive scientific computing. Part I: abstract framework},
journal = {Computing},
year = 2008,
volume = 82,
number = {2--3},
pages = {103--119}
}
@Article{dune08-2,
author = {Peter Bastian and Markus Blatt and Andreas Dedner and Christian Engwer and Robert Kl\"ofkorn and Ralf Kornhuber and Mario Ohlberger and Oliver Sander},
title = { A generic grid interface for parallel and adaptive scientific computing. Part II: implementation and test in DUNE},
journal = {Computing},
year = 2008,
volume = 82,
number = {2--3} ,
pages = {121--138}
}
@Article{ISTLParallel,
author = {Markus Blatt and Peter Bastian},
title = {On the Generic Parallelisation of Iterative Solvers for
the Finite Element Method},
journal = {Int. J. Computational Science and
Engineering},
volume = {4},
number = {1},
pages = {56--69},
year = 2008
}
@Misc{DuneWeb,
author = {DUNE},
howpublished = {\texttt{http://www.dune-project.org/}}
}
@Misc{boost_mpi,
author = {D. Gregor and M. Troyer},
title = {{B}oost.{M}{P}{I}},
howpublished = {\texttt{http://www.boost.org/}},
year = 2006
}
@PhdThesis{gerlach02:janus,
author = {Jens Gerlach},
title = {Domain Engineering and Generic Programming for Parallel Scientific Computing},
school = {TU Berlin},
year = {2002}
}
@InProceedings{giloi95:_promot,
author = {W.K. Giloi and M. Kessler and A. Schramm},
title = {PROMOTER: A High Level, Object-Parallel Programming Language},
booktitle = {Proceedings of the International Conference on High Performance Computing},
year = {1995},
address = {New Dehli, India},
month = {December}
}
@inproceedings{nolte00:_taco,
author = {J\"{o}rg Nolte and Mitsuhisa Sato and Yutaka Ishikawa},
title = {TACO -- Dynamic Distributed Collections with Templates and Topologies},
booktitle = {Euro-Par '00: Proceedings from the 6th International Euro-Par Conference on Parallel Processing},
year = {2000},
isbn = {3-540-67956-1},
pages = {1071--1080},
publisher = {Springer-Verlag},
address = {London, UK},
}
\ No newline at end of file
This diff is collapsed.
# $Id: $
EXTRA_DIST=darray.eps
EXTRA_DIST=darray.eps distarray.eps distindex.eps
include $(top_srcdir)/am/webstuff
include $(top_srcdir)/am/global-rules
This diff is collapsed.
This diff is collapsed.
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/mpihelper.hh> // An initializer of MPI
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/istl/indexset.hh>
#include <dune/istl/plocalindex.hh>
enum Flags { owner, ghost };
struct Bla
{
/** @brief The local index. */
size_t localIndex_;
/** @brief An attribute for the index. */
char attribute_;
/** @brief True if the index is also known to other processors. */
bool public_;
/**
* @brief The state of the index.
*
* Has to be one of LocalIndexState!
* @see LocalIndexState.
*/
char state_;
};
template<typename T1, typename T2>
void buildBlockedIndexSet(T1& indexset, int N, const T2& comm)
{
int rank=comm.rank();
int size=comm.size();
int localsize=N/size;
int bigger=N%size;
int start, end;
if(rank<bigger) {
start=rank*(localsize+1);
end=start+(localsize+1);
}else{
start=bigger*(localsize+1)+(rank-bigger)*localsize;
end=start+localsize;
}
indexset.beginResize();
int index=0;
int gindex=start;
typedef typename T1::LocalIndex LocalIndex;
std::cout<<sizeof(LocalIndex)<<" "<<sizeof(Bla)<<std::endl;
if(start>0)
indexset.add(gindex-1,LocalIndex(index++,ghost));
for(int i=start; i<end; i++,index++,gindex++)
indexset.add(gindex,LocalIndex(index,owner,true));
if(end<N)
indexset.add(gindex,LocalIndex(index,ghost,true));
}
int main(int argc, char** argv)
{
int n=100;
try{
using namespace Dune;
//Maybe initialize Mpi
MPIHelper& helper = MPIHelper::instance(argc, argv);
std::cout << "Hello World! This is poosc08." << std::endl;
if(Dune::MPIHelper::isFake)
std::cout<< "This is a sequential program." << std::endl;
else{
typedef ParallelIndexSet<int,ParallelLocalIndex<Flags> > IndexSet;
IndexSet blockedSet;
buildBlockedIndexSet(blockedSet, n, helper.getCollectiveCommunication());
}
return 0;
}
catch (Dune::Exception &e) {
std::cerr << "Dune reported error: " << e << std::endl;
}
catch (...) {
std::cerr << "Unknown exception thrown!" << std::endl;
}
}
// -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
// vi: set et ts=4 sw=2 sts=2:
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <iostream>
#include <dune/common/mpihelper.hh> // An initializer of MPI
#include <dune/common/exceptions.hh> // We use exceptions
#include <dune/istl/indexset.hh>
#include <dune/istl/remoteindices.hh>
#include <dune/istl/communicator.hh>
#include <dune/istl/plocalindex.hh>
#include <dune/istl/interface.hh>
#include <dune/common/enumset.hh>
enum Flags { owner, ghost };
template<typename T>
struct AddData {
typedef typename T::value_type IndexedType;
static double gather(const T& v, int i){
return v[i];
}
static void scatter(T& v, double item, int i){
v[i]+=item;
}
};
template<typename T>
struct CopyData {
typedef typename T::value_type IndexedType;
static double gather(const T& v, int i){
return v[i];
}
static void scatter(T& v, double item, int i){
v[i]=item;
}
};
template<class T>
void doCalculations(T& t){}
void test()
{
int rank;
MPI_Comm comm=(MPI_COMM_WORLD);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
using namespace Dune;
// shortcut for index set type
typedef ParallelLocalIndex<Flags> LocalIndex;
typedef ParallelIndexSet<int, LocalIndex > PIndexSet;
PIndexSet sis;
sis.beginResize();
if(rank==0) {
sis.add(11, LocalIndex(0, ghost));
for(int i=1; i<=6; i++)
sis.add(i-1, LocalIndex(i, owner, i<=1||i>5));
sis.add(6, LocalIndex(7, ghost));
}else{
sis.add(5, LocalIndex(0, ghost));
for(int i=1; i<=6; i++)
sis.add(5+i, LocalIndex(i, owner, i<=1||i>5));
sis.add(0,LocalIndex(7, ghost));
}
sis.endResize();
PIndexSet tis;
tis.beginResize();
int l=0;
for(int i=0; i<2; ++i)
for(int j=0; j<5; ++j) {
int g=rank*3-1+i*6+j;
if(g<0||g>11)
continue;
Flags flag=(j>0&&j<4) ? owner : ghost;
tis.add(g, LocalIndex(l++, flag));
}
tis.endResize();
std::cout<<rank<<" isxset: "<<sis<<std::endl;
RemoteIndices<PIndexSet> riRedist(sis, tis, comm);
riRedist.rebuild<true>();
std::vector<int> v;
RemoteIndices<PIndexSet> riS(sis,sis, comm, v, true);
riS.rebuild<false>();
std::cout<<std::endl<<"begin"<<rank<<" riS="<<riS<<" end"<<rank<<std::endl<<std::endl;
Combine<EnumItem<Flags,ghost>,EnumItem<Flags,owner>,Flags> ghostFlags;
EnumItem<Flags,owner> ownerFlags;
Combine<EnumItem<Flags,ghost>, EnumItem<Flags,owner> > allFlags;
Interface infRedist;
Interface infS;
infRedist.build(riRedist, ownerFlags, allFlags);
infS.build(riS, ownerFlags, ghostFlags);
std::cout<<"inf "<<rank<<": "<<infS<<std::endl;
typedef std::vector<double> Container;
Container s(sis.size(),3), t(tis.size());
s[sis.size()-1]=-1;
BufferedCommunicator bComm;
BufferedCommunicator bCommRedist;
bComm.build(s, s, infS);
//bCommRedist.build(s, t, infRedist);
for(std::size_t i=0; i<sis.size(); i++)
std::cout<<s[i]<<" ";
std::cout<<std::endl;
bComm.forward<CopyData<Container> >(s,s);
for(std::size_t i=0; i<sis.size(); i++)
std::cout<<s[i]<<" ";
std::cout<<std::endl;
//bCommRedist.forward<CopyData<Container> >(s,t);
// calculate on the redistributed array
doCalculations(t);
//bCommRedist.backward<AddData<Container> >(s,t);
}
int main(int argc, char** argv)
{
try{
using namespace Dune;
//Maybe initialize Mpi
MPIHelper& helper = MPIHelper::instance(argc, argv);
std::cout << "Hello World! This is poosc08." << std::endl;
if(Dune::MPIHelper::isFake)
std::cout<< "This is a sequential program." << std::endl;
else{
test();
}
return 0;
}
catch (Dune::Exception &e) {
std::cerr << "Dune reported error: " << e << std::endl;
}
catch (...) {
std::cerr << "Unknown exception thrown!" << std::endl;
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment