Skip to content
Snippets Groups Projects
Commit c5fb2887 authored by Timo Koch's avatar Timo Koch
Browse files

Merge branch 'revert-f020e91b' into 'master'

Revert "Merge branch 'fix/165-py-constructor-with-communicator' into 'master'"

See merge request !699
parents f020e91b 5fa3a282
Branches
Tags
1 merge request!699Revert "Merge branch 'fix/165-py-constructor-with-communicator' into 'master'"
Pipeline #60664 passed
......@@ -2,9 +2,7 @@
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
from dune.common.checkconfiguration import assertCMakeHave, ConfigurationError
from dune.common import comm as defaultCommunicator
from dune.typeregistry import generateTypeName
from dune.packagemetadata import getCMakeFlags
class CartesianDomain(tuple):
@staticmethod
......@@ -205,7 +203,7 @@ def tensorProductCoordinates(coords, offset=None, ctype='double'):
return coords_(coords,offset)
def yaspGrid(constructor, dimgrid=None, coordinates="equidistant", ctype=None,
periodic=None, overlap=None, communicator=defaultCommunicator, **param):
periodic=None, overlap=None, **param):
"""create a Dune::YaspGrid
constructor: a Yaspgrod coordinates object
......@@ -298,38 +296,34 @@ def yaspGrid(constructor, dimgrid=None, coordinates="equidistant", ctype=None,
# periodic and overlap are (if required) defined by the DGF reader
periodic = None
overlap = None
# check that Communicator uses default value
if communicator != defaultCommunicator:
raise ValueError("yaspGrid: construction via reader does not support user defined communicator")
else:
raise ValueError("yaspGrid: unsupported constructor parameter " + str(constructor))
# compile YaspGrid for a given dimension & coordinate type
includes = ["dune/grid/yaspgrid.hh", "dune/grid/io/file/dgfparser/dgfyasp.hh", "dune/common/parallel/mpihelper.hh"]
includes = ["dune/grid/yaspgrid.hh", "dune/grid/io/file/dgfparser/dgfyasp.hh"]
gridTypeName, _ = generateTypeName("Dune::YaspGrid", str(dimgrid), coordinates_type)
setupPeriodic = [ 'std::bitset<'+str(dimgrid)+'> periodic_;',
'for (int i=0;i<'+str(dimgrid)+';++i) periodic_.set(i,periodic[i]);']
generator = setupPeriodic + [ 'return new DuneType(coordinates,periodic_,overlap,communicator);' ]
ctor = Constructor(
[ "const " + coordinates_type + "& coordinates",
'std::array<bool, '+str(dimgrid)+'> periodic',
'int overlap',
'Dune::Communication< Dune::MPIHelper::MPICommunicator > communicator'],
generator,
[ '"coordinates"_a', '"periodic"_a', '"overlap"_a', '"communicator"_a' ])
[ "const " + coordinates_type + "& coordinates",
'std::array<bool, '+str(dimgrid)+'> periodic',
'int overlap' ],
[ 'std::bitset<'+str(dimgrid)+'> periodic_;',
'for (int i=0;i<'+str(dimgrid)+';++i) periodic_.set(i,periodic[i]);',
'return new DuneType(coordinates,periodic_,overlap);' ],
[ '"coordinates"_a', '"periodic"_a', '"overlap"_a' ])
gridModule = module(includes, gridTypeName, ctor)
# read the grid either via reader or create it directly...
if useReader:
return gridModule.reader(constructor).leafView # readers don't support custom communicator
return gridModule.reader(constructor).leafView
else:
return gridModule.HierarchicalGrid(constructor,periodic,overlap,communicator).leafView
return gridModule.HierarchicalGrid(constructor,periodic,overlap).leafView
grid_registry = {
"OneD" : onedGrid,
"Yasp" : yaspGrid,
}
from dune.packagemetadata import getCMakeFlags
try:
if not getCMakeFlags()["HAVE_ALBERTA"]:
raise KeyError
......
......@@ -4,7 +4,6 @@
# make sure the example works and run in during tests
dune_python_add_test(NAME pyexample
SCRIPT example.py
MPI_RANKS 1 2 4
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
LABELS quick)
dune_symlink_to_source_files(FILES example.py test2d_offset.dgf circle1storder.msh)
......
......@@ -76,16 +76,3 @@ p_grid = yaspGrid( cartDomain, dimgrid=dim) #, ctype='float' )
print("number of elements of periodic YaspGrid grid:",p_grid.size(0))
p_grid.plot()
runOnGrid(p_grid)
# create parallel YaspGrids with user defined communicators
def parallelGrid(comm,sz):
print (comm.rank, "construct a YaspGrid with tensor product coordinate type and custom communicator")
from dune.grid import yaspGrid, tensorProductCoordinates
import numpy as np
coords = tensorProductCoordinates([np.array([1,2,3,4,5,6,7,8,9]), np.array([10,11,33,44,45,46,47,48,109])], ctype='float')
ygrid = yaspGrid(coords,overlap=0,communicator=comm)
print(comm.rank,"number of elements of tensor YaspGrid grid:",ygrid.size(0))
assert(comm.size == sz)
assert(ygrid.size(0) == 64/sz)
parallelGrid(dune.common.comm, dune.common.comm.size) # working with the default communicator, one distributed grid
parallelGrid(MPI.COMM_SELF, 1) # unrelated local grids on each rank
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment