Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • jakub.both/dune-common
  • samuel.burbulla/dune-common
  • patrick.jaap/dune-common
  • tobias.leibner/dune-common
  • alexander.mueller/dune-common
  • pipping/dune-common
  • Xinyun.Li/dune-common
  • felix.schindler/dune-common
  • simon.praetorius/dune-common
  • ani.anciaux-sedrakian/dune-common
  • henrik.stolzmann/dune-common
  • matthew.t.collins/dune-common
  • liam.keegan/dune-common
  • felix.mueller/dune-common
  • ansgar/dune-common
  • dominic/dune-common
  • lars.lubkoll/dune-common
  • exadune/dune-common
  • felix.gruber/dune-common
  • govind.sahai/dune-common
  • michael.sghaier/dune-common
  • core/dune-common
  • kilian.weishaupt/dune-common
  • markus.blatt/dune-common
  • joscha.podlesny/dune-common
  • tobias.meyer.andersen/dune-common
  • andreas.thune/dune-common
  • lars.bilke/dune-common
  • daniel.kienle/dune-common
  • lukas.renelt/dune-common
  • smuething/dune-common
  • stephan.hilb/dune-common
  • tkoch/dune-common
  • nils.dreier/dune-common
  • rene.milk/dune-common
  • lasse.hinrichsen/dune-common
  • yunus.sevinchan/dune-common
  • lisa_julia.nebel/dune-common
  • claus-justus.heine/dune-common
  • lorenzo.cerrone/dune-common
  • eduardo.bueno/dune-common
41 results
Show changes
Showing
with 2340 additions and 1434 deletions
#! /usr/bin/env python3
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
import argparse
import dataclasses
import errno
import logging
import re
# REUSE-IgnoreStart
parser = argparse.ArgumentParser(
description='add SPDX header to DUNE project modules'
)
parser.add_argument(
'--license', '-l', dest='license',
help='SPDX license name',
default='LicenseRef-GPL-2.0-only-with-DUNE-exception',
)
parser.add_argument(
'--copyright', '-c', dest='copyright',
help='copyright information',
default='Copyright © DUNE Project contributors, see file LICENSE.md in module root',
)
parser.add_argument(
'--like', dest='like',
help='handle files like a file named like this',
),
parser.add_argument(
'--log', dest='log',
help='set log level',
),
parser.add_argument(
'files', nargs='+',
help='files to add copyright and license headers to',
)
@dataclasses.dataclass
class Rule:
pattern: str
prefix: str = ""
suffix: str = ""
line_prefix: str = ""
line_suffix: str = ""
use_license_file: bool = False
skip_lines: re.Pattern = None
def match(self, filename) -> bool:
return bool(re.search(self.pattern, filename))
rules = [
Rule(
pattern=r"\.(?:c|cc|cc\.in|h|hh|hh\.in|ct|ht|t|geo)$|config\.h\.cmake$",
line_prefix="// ",
),
Rule(
pattern=r"\.(?:rst|rst\.in)$",
prefix="::\n",
line_prefix=" ",
suffix="\n",
),
Rule(
pattern=r"\.bib$",
line_prefix="@Comment ",
suffix="\n",
),
Rule(
pattern=r"\.dgf$",
line_prefix="% ",
skip_lines=re.compile("^DGF"),
),
Rule(
pattern=r"\.tex$",
line_prefix="% ",
suffix="\n",
),
Rule(
pattern=r"(?:\.amc|\.cmake|\.cmake\.in|\.gitignore|\.ini|\.pc\.in"
r"|\.pl|\.py|\.py\.in|\.sh|\.toml|\.toml\.in|\.yml"
r"|CMakeLists.txt|Doxylocal|dune.module|MANIFEST\.in)$",
line_prefix="# ",
suffix="\n",
),
Rule(
pattern=r"/doxygen/.*\.txt$",
line_prefix="// ",
suffix="\n",
),
Rule(
pattern=r"(?:\.md|INSTALL)$",
prefix="<!--\n",
suffix="-->\n\n",
),
Rule(
pattern=r"\.(?:cel|eps|fig|pdf|png|svg|vrt)$",
use_license_file=True,
),
Rule(
# gmsh's MSH file format support comments via unknown sections
# ($Comment ... $EndComment), but DUNE does not handle that.
# Reference: https://gmsh.info/doc/texinfo/gmsh.html#MSH-file-format
pattern=r"\.msh",
use_license_file=True,
),
]
skip_lines = re.compile(r'-\*-|vi(?:m)?:|^#!')
class Notice:
def __init__(self, copyright, license):
self.copyright = f'SPDX-FileCopyrightInfo: {copyright}'
self.license = f'SPDX-License-Identifier: {license}'
def match(self, lines) -> bool:
pattern = re.compile(f'(?:{re.escape(self.copyright)}|{re.escape(self.license)})')
return any(pattern.search(line) for line in lines)
def rule_for_file(filename) -> Rule:
for rule in rules:
if rule.match(filename):
return rule
return None
def line_for_notice(lines, rule: Rule) -> int:
for index, line in enumerate(lines):
if not skip_lines.search(line) \
and (rule.skip_lines is None or not rule.skip_lines.search(line)):
return index
return len(lines)
def apply_rule_to_file(filename, rule: Rule, notice: Notice):
if rule.use_license_file:
try:
with open(f"{filename}.license", "xt") as fh:
logging.debug(f"{filename}: create separate .license file")
print(notice.copyright, file=fh)
print(notice.license, file=fh)
except OSError as e:
if e.errno == errno.EEXIST:
logging.info(f"{filename}: separate .license file already exists")
else:
raise
else:
with open(filename, "rt") as fh:
lines = fh.readlines()
if notice.match(lines):
logging.info(f"{filename}: already contains a notice")
return
index = line_for_notice(lines, rule)
logging.debug(f"{filename}: Will insert notice at line {index}")
if rule.suffix:
lines.insert(index, rule.suffix)
lines.insert(index, f"{rule.line_prefix}{notice.license}{rule.line_suffix}\n")
lines.insert(index, f"{rule.line_prefix}{notice.copyright}{rule.line_suffix}\n")
if rule.prefix:
lines.insert(index, rule.prefix)
with open(filename, "wt") as fh:
print(*lines, sep='', end='', file=fh)
if __name__ == '__main__':
args = parser.parse_args()
if args.log:
logging.basicConfig(level=getattr(logging, args.log.upper()))
notice = Notice(args.copyright, args.license)
for filename in args.files:
logging.debug(f"Processing {filename}")
if args.like:
rule = rule_for_file(args.like)
else:
rule = rule_for_file(filename)
if rule is None:
logging.warning(f"{filename}: No rule found for this file. Add a rule or try --like.")
continue
apply_rule_to_file(filename, rule, notice)
# add-spdx() {
# local file line_prefix prefix suffix
# local copying_file="LICENSE.md"
# local license="LicenseRef-GPL-2.0-only-with-DUNE-exception"
# xxlicense="LGPL-2.1-or-later"
# for file in "$@"; do
# prefix=""
# line_prefix=""
# suffix=""
# license_file="${file}"
# case "${file}" in
# *.cc|*.hh|*config.h.cmake|*.cc.in|*.ct|*.ht|*.t) line_prefix="// " ;;
# */doxygen/*.txt) line_prefix="// "; suffix=$'\n' ;;
# *CMakeLists.txt|*.py|*.gitignore|*.yml|*Doxylocal|*.toml|*.pc.in|*dune.module|*.cmake|*.ini) line_prefix="# "; suffix=$'\n' ;;
# *.md|*INSTALL) prefix=$'<!--\n'; suffix=$'\n-->\n' ;;
# *.tex) line_prefix="% "; suffix=$'\n' ;;
# *.svg|*.png|*.eps|*.pdf) license_file="${file}.license"; file="" ;;
# esac
#
# ed ${file} <<EOT
# 0i
# ${prefix}${line_prefix}SPDX-FileCopyrightText: Copyright © DUNE Project contributors, see file ${copying_file} in module root
# ${line_prefix}SPDX-License-Identifier: ${license}${suffix}
# .
# w ${license_file}
# q
# EOT
# done
# }
# REUSE-IgnoreEnd
#!/bin/bash
# $Id: autogen.sh 5054 2008-01-08 15:06:55Z christi $
# barf on errors
set -e
usage () {
echo "Usage: dune-autogen DUNE_MODULE_PATH_LIST [options]"
echo " --ac=, --acversion=VERSION use a specific VERSION of autoconf"
echo " --am=, --amversion=VERSION use a specific VERSION of automake"
echo " -h, --help you already found this :-)"
}
## get my name...
grep '^Module:' dune.module >/dev/null || echo "Parser Error: Module entry missing in dune.module"
name=
while read head name rest
do case "$head" in
Module:) break;;
Module:*) name="${head#Module:}"; break;;
esac
name=
done <dune.module
## dune-all.m4
rm -f dune-all.m4
rm -f $name.m4
# add current dir to PATH
PATH=`dirname $0`:$PATH
# guess libtool prefix
if test -n "$LIBTOOLIZE"; then
LIBTOOL_prefix=`dirname \`dirname $LIBTOOLIZE\``
PATH=$LIBTOOL_prefix:$PATH
ACLOCAL_FLAGS="$ACLOCAL_FLAGS -I $LIBTOOL_prefix/share/aclocal"
fi
for OPT in "$@"; do
set +e
# stolen from configure...
# when no option is set, this returns an error code
arg=`expr "x$OPT" : 'x[^=]*=\(.*\)'`
set -e
case "$OPT" in
--ac=*|--acversion=*)
if test "x$arg" = "x"; then
usage;
exit 1;
fi
ACVERSION=$arg
;;
--am=*|--amversion=*)
if test "x$arg" = "x"; then
usage;
exit 1;
fi
AMVERSION=$arg
;;
-h|--help) usage ; exit 0 ;;
*)
if test -d "$OPT/m4"; then
ACLOCAL_FLAGS="$ACLOCAL_FLAGS -I $OPT/m4"
fi
if test -f "$OPT/dune-common.pc.in" ; then
# if test \( -d "$OPT/am" \) -a ! \( -h "$OPT/am" \) ; then
echo "Found am directory $OPT/am"
am_dir="$OPT/am"
fi
if test -d "$OPT/share/aclocal"; then
ACLOCAL_FLAGS="$ACLOCAL_FLAGS -I $OPT/share/aclocal"
fi
if test -d "$OPT/share/dune-common/am"; then
echo "Found am directory $OPT/share/dune-common/am"
am_dir="$OPT/share/dune-common/am"
fi
PATH=$OPT/bin:$PATH
;;
esac
done
## report parameters
if test "x$ACVERSION" != "x"; then
echo "Forcing autoconf version $ACVERSION"
if ! which autoconf$ACVERSION > /dev/null; then
echo
echo "Error: Could not find autoconf$ACVERSION"
echo " Did you specify a wrong version?"
exit 1
fi
fi
if test "x$AMVERSION" != "x"; then
echo "Forcing automake version $AMVERSION"
if ! which automake$AMVERSION > /dev/null; then
echo
echo "Error: Could not find automake$AMVERSION"
echo " Did you specify a wrong version?"
exit 1
fi
fi
## run autotools
echo "--> dunedoxynize..."
dunedoxynize
echo "--> libtoolize..."
# this script won't rewrite the files if they already exist. This is a
# PITA when you want to upgrade libtool, thus I'm setting --force
if [ x`type -t glibtoolize` = xfile ]; then
LIBTOOLIZE=glibtoolize
fi
${LIBTOOLIZE-libtoolize} --force
# writing privat m4 file
echo -n "--> "
dunecontrol --only=$name m4create
# prepare everything
echo "--> aclocal..."
rm -f aclocal.m4
rm -rf autom4te.cache
aclocal$AMVERSION -I . $ACLOCAL_FLAGS
# create a link to the dune-common am directory
if [ "$name" != "dune-common" ]; then
if [ -n "$am_dir" ] && [ -d $am_dir ]; then
echo "--> linking dune-common/am..."
rm -f am
ln -s $am_dir am
else
echo
echo "Error: Could not find dune-common/am!"
usage
exit 1
fi
fi
# applications should provide a config.h for now
echo "--> autoheader..."
autoheader$ACVERSION
echo "--> automake..."
automake$AMVERSION -W all --add-missing
echo "--> autoconf..."
autoconf$ACVERSION
## tell the user what to do next
echo "Now run ./configure to setup $name"
#! /usr/bin/env python3
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
#
# Wrapper around CTest for DUNE
#
# CTest returns with an error status not only when tests failed, but also
# when tests were only skipped. This wrapper checks the log and returns
# successfully if no tests failed; skipped tests do not result in an error.
# This behaviour is needed in a continuous integration environment, when
# building binary packages or in other cases where the testsuite should be
# run automatically.
#
# Moreover, this script also converts the XML test report generated by CTest
# into a JUnit report file that can be consumed by a lot of reporting
# software.
#
# Author: Ansgar Burchardt <Ansgar.Burchardt@tu-dresden.de>
# Author: Steffen Müthing <steffen.muething@iwr.uni-heidelberg.de> (for the JUnit part)
import errno
import glob
import os.path
import shutil
import subprocess
import sys
import xml.etree.ElementTree as et
from pathlib import Path
import os
import re
class CTestParser:
def findCTestOutput(self):
files = glob.glob("Testing/*/Test.xml")
if len(files) != 1:
fn = files.join(", ")
raise Exception("Found multiple CTest output files: {}".format(files.join(", ")))
return files[0]
def printTest(self,test,output=None):
status = test.get("Status")
name = test.find("Name").text
fullName = test.find("FullName").text
if output is not None:
output = test.find("Results").find("Measurement").find("Value").text
print("======================================================================")
print("Name: {}".format(name))
print("FullName: {}".format(fullName))
print("Status: {}".format(status.upper()))
if output:
print("Output:")
for line in output.splitlines():
print(" ", line)
print()
def __init__(self,junitpath=None):
self.inputpath = self.findCTestOutput()
if junitpath is None:
if "CI_PROJECT_DIR" in os.environ:
buildroot = Path(os.environ["CI_PROJECT_DIR"])
# create a slug from the project name
name = os.environ["CI_PROJECT_NAME"].lower()
name = re.sub(r"[^-a-z0-9]","-",name);
junitbasename = "{}-".format(name)
else:
buildroot = Path.cwd()
junitbasename = ""
junitdir = buildroot / "junit"
junitdir.mkdir(parents=True,exist_ok=True)
self.junitpath = junitdir / "{}cmake.xml".format(junitbasename)
else:
self.junitpath = Path(junitpath)
junitdir = junitpath.resolve().parent
junitdir.mkdir(parents=True,exist_ok=True)
self.tests = 0
self.passed = 0
self.failures = 0
self.skipped = 0
self.errors = 0
self.skipped = 0
self.time = 0.0
def createJUnitSkeleton(self):
self.testsuites = et.Element("testsuites")
self.testsuite = et.SubElement(self.testsuites,"testsuite")
self.properties = et.SubElement(self.testsuite,"properties")
def fillJUnitStatistics(self):
self.testsuite.set("name","cmake")
self.testsuite.set("tests",str(self.tests))
self.testsuite.set("disabled","0")
self.testsuite.set("errors",str(self.errors))
self.testsuite.set("failures",str(self.failures))
self.testsuite.set("skipped",str(self.skipped))
self.testsuite.set("time",str(self.time))
def processTest(self,test):
testcase = et.SubElement(self.testsuite,"testcase")
testcase.set("name",test.find("Name").text)
testcase.set("assertions","1")
testcase.set("classname","cmake")
time = test.find("./Results/NamedMeasurement[@name='Execution Time']/Value")
if time is not None:
self.time += float(time.text)
testcase.set("time",time.text)
self.tests += 1
outcome = test.get("Status")
if outcome == "passed":
testcase.set("status","passed")
self.passed += 1
elif outcome == "failed":
self.failures += 1
testcase.set("status","failure")
failure = et.SubElement(testcase,"failure")
failure.set("message","program execution failed")
failure.text = test.find("./Results/Measurement/Value").text
self.printTest(test)
elif outcome == "notrun":
# This does not exit on older CMake versions, so work around that
try:
status = test.find("./Results/NamedMeasurement[@name='Completion Status']/Value").text
if status == "SKIP_RETURN_CODE=77":
self.skipped += 1
et.SubElement(testcase,"skipped")
elif status == "Required Files Missing":
self.errors += 1
error = et.SubElement(testcase,"error")
error.set("message","compilation failed")
error.set("type","compilation error")
self.printTest(test,output="Compilation error")
else:
error = et.SubElement(testcase,"error")
error.set("message","unknown error during test execution")
error.set("type","unknown")
error.text = test.find("./Results/Measurement/Value").text
self.errors += 1
self.printTest(test)
except AttributeError:
output_tag = test.find("./Results/Measurement/Value")
if output_tag is not None:
msg = output_tag.text
if "skipped" in msg:
self.skipped += 1
et.SubElement(testcase,"skipped")
elif "Unable to find required file" in msg:
self.errors += 1
error = et.SubElement(testcase,"error")
error.set("message","compilation failed")
error.set("type","compilation error")
self.printTest(test,output="Compilation error")
else:
error = et.SubElement(testcase,"error")
error.set("message","unknown error during test execution")
error.set("type","unknown")
error.text = msg
self.errors += 1
self.printTest(test)
else:
error = et.SubElement(testcase,"error")
error.set("message","unknown error during test execution")
error.set("type","unknown")
error.text = "no message"
self.errors += 1
self.printTest(test)
output_tag = test.find("./Results/Measurement/Value")
if output_tag is not None:
out = et.SubElement(testcase,"system-out")
out.text = output_tag.text
def process(self):
with open(self.inputpath, "r", encoding="utf-8") as fh:
tree = et.parse(fh)
root = tree.getroot()
self.createJUnitSkeleton()
for test in root.findall(".//Testing/Test"):
self.processTest(test)
self.fillJUnitStatistics()
with self.junitpath.open("wb") as fh:
fh.write(et.tostring(self.testsuites,encoding="utf-8"))
print("JUnit report for CTest results written to {}".format(self.junitpath))
return self.errors + self.failures
def runCTest(argv=[]):
cmd = ["ctest",
"--output-on-failure",
"--dashboard", "ExperimentalTest",
"--no-compress-output",
]
cmd.extend(argv)
subprocess.call(cmd)
def checkDirectory():
if not os.path.exists("CMakeCache.txt"):
raise Exception("ERROR: dune-ctest must be run in a cmake build directory")
def removeCTestOutput():
try:
shutil.rmtree("Testing")
except OSError as e:
if e.errno != errno.ENOENT:
raise
def main():
try:
checkDirectory()
removeCTestOutput()
runCTest(argv=sys.argv[1:])
parser = CTestParser()
errors = parser.process()
status = 0 if errors == 0 else 1
sys.exit(status)
except Exception as e:
print("Internal error: {}".format(e))
sys.exit(127)
if __name__ == "__main__":
main()
#!/bin/sh
# dune-git-whitespace-hook
# DO NOT TOUCH THE PRECEDING LINE
# It is used by dunecontrol to enable automatic updates of the whitespace hook
#
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
#
# DUNE pre-commit hook to enforce whitespace policy
# This hook prevents adding lines with trailing whitespace and or tab characters
# in line indentation for certain files (see the TRAILING_WHITESPACE_DEFAULT and
# TAB_IN_INDENT_DEFAULT variables below for the default sets of files that will
# be checked).
# You can tell the hook which files should be inspected by setting the Git
# configuration variables "hooks.whitespace.trailing" and "hooks.whitespace.tabinindent".
# Those variables should contain valid Perl regular expressions. The names of modified
# files will be matched against those regexes.
# git-diff-index needs a valid commit to compare to
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
fi
# By default, we disallow trailing whitespace for the following files, but the check for C/C++ and CMake sources
# happens in the tab-in-indent check to avoid confusing users with duplicate error messages
TRAILING_WHITESPACE_DEFAULT='^(dune\.module|README|README\.SVN|COPYING|INSTALL|TODO)$|^[^/]*(\.md|\.pc\.in)$|^doc/.*\.md$'
# By default, we disallow tabs in indents and trailing whitespace in C/C++ and CMake source files
TAB_IN_INDENT_DEFAULT='(^|/)CMakeLists\.txt$|(\.cpp|\.hpp|\.cc|\.hh|\.c|\.h|\.cmake|\.sh|\.py)$'
# Get user preferences
TRAILING_WHITESPACE_FILES=$(git config hooks.whitespace.trailing)
# Set default regex for disallowing trailing whitespace if the user did not set anything.
# We need to check the return value of git-config to distinguish the case
# when the user set an empty value
if [ $? -ne 0 ];
then
TRAILING_WHITESPACE_FILES="$TRAILING_WHITESPACE_DEFAULT"
fi
TAB_IN_INDENT_FILES=$(git config hooks.whitespace.tabinindent)
# Set default regex for disallowing tabs if the user did not set anything.
# We need to check the return value of git-config to distinguish the case
# when the user set an empty value
if [ $? -ne 0 ];
then
TAB_IN_INDENT_FILES="$TAB_IN_INDENT_DEFAULT"
fi
# Unfortunately, we have to mess directly with the repository config file,
# as git won't honor a custom config file specified by GIT_CONFIG
# backup repository-local user setting for core.whitespace
USER_WHITESPACE=$(git config --local --get core.whitespace)
if [ $? -ne 0 ];
then
USER_HAS_CUSTOM_WHITESPACE=0
else
USER_HAS_CUSTOM_WHITESPACE=1
fi
# Figure out how to call xargs to make sure it won't invoke its argument with
# an empty argument list. BSD xargs will not do that by default, while GNU xargs
# needs -r to do the same. So we start by checking whether xargs does the right
# thing without options. Now there could be other obscure versions of xargs out
# there (on clusters etc.) that behave in yet another way, so we try with -r as
# well. If that fails, we throw a big error message at the user.
# In the following line, xargs should not call false, so the return value should be 0.
echo "" | xargs false
if [ $? -ne 0 ]; then
# Let's try with -r
echo "" | xargs -r false
if [ $? -ne 0 ]; then
# Houston, we have a problem
if [ -z "$DUNE_WHITESPACE_IGNORE_XARGS" ]; then
echo "You seem to be lacking a version of xargs that is compatible to either BSD or GNU!" 1>&2
echo "Please file a bug report at http://dune-project.org about this issue with your exact operating system type and version!" 1>&2
echo "You can still use this hook by setting the environment variable DUNE_WHITESPACE_IGNORE_XARGS to 1, but please be aware" 1>&2
echo "that the hook might create false positives." 1>&2
echo "==============================================================" 1>&2
echo "Aborting the commit..." 1>&2
exit 99
else
SILENTXARGS=xargs
fi
else
SILENTXARGS="xargs -r"
fi
else
SILENTXARGS=xargs
fi
fail=0
done=0
do_cleanup()
{
if [ $done -ne 1 ];
then
echo "Error while executing whitespace checking pre-commit hook!" 1>&2
echo "There might still be whitespace errors in your commit!" 1>&2
fi
if [ $USER_HAS_CUSTOM_WHITESPACE -eq 1 ];
then
git config --replace-all core.whitespace "$USER_WHITESPACE"
else
git config --unset core.whitespace
fi
# be nice and let the commit go through if something went wrong along the
# way and we did not record a failure
exit $fail
}
trap do_cleanup EXIT
# set custom value
git config --replace-all core.whitespace trailing-space
if [ -z "$TRAILING_WHITESPACE_FILES" ];
then
git diff-index --check --cached $against --
result=$?
else
export TRAILING_WHITESPACE_FILES
git diff-index --cached --name-only $against \
| perl -ne 'print if /$ENV{TRAILING_WHITESPACE_FILES}/' \
| $SILENTXARGS git diff-index --check --cached $against --
result=$?
fi
if [ $result -ne 0 ];
then
fail=1
fi
git config --replace-all core.whitespace trailing-space,tab-in-indent
if [ -z "$TAB_IN_INDENT_FILES" ];
then
git diff-index --check --cached $against --
result=$?
else
export TAB_IN_INDENT_FILES
git diff-index --cached --name-only $against \
| perl -ne 'print if /$ENV{TAB_IN_INDENT_FILES}/' \
| $SILENTXARGS git diff-index --check --cached $against --
result=$?
fi
if [ $result -ne 0 ];
then
fail=1
fi
done=1
# trap will call the cleanup code
This diff is collapsed.
#!/bin/sh
if [ "x$DEBUG" = "xyes" ]; then
set -x
fi
DOXYOUT=${1:-doc/doxygen/Doxyfile.in}
OUTDIR=`dirname "$DOXYOUT"`
DOXYDIR="doc/doxygen/"
find_doxystyle()
{
for i in `dirname $0`/../doc/doxygen/Doxystyle `dirname $0`/../share/dune-common/doc/doxygen/Doxystyle; do
if [ -f "$i" ]; then
export DOXYSTYLE=$i
return 0
fi
done
return 1
}
test_doxylocal()
{
while [ -n "$1" ]; do
if [ -f "$1/dune.module" ]; then
return 0
fi
shift
done
return 1
}
parse_doxyfile()
{
# Doxylocal is used _as is_
if [ "$2" = "Doxylocal" ]; then
cat $1/$DOXYDIR/$2
return
fi
local FILE=$1/$DOXYDIR/$2
local FOUND=0
local line
# find lines that match the pattern
set `grep -n '^ *\(INPUT\|EXAMPLE_PATH\|IMAGE_PATH\|PREDEFINED\|EXCLUDE\|EXAMPLE_PATTERNS\) *[+=]' $FILE | cut -d: -f1`
# search lines in $@ and proceeding lines
grep -n . $FILE |
sed -e 's/\\/\\\\/g' |
while read line; do
if [ "${line%%:*}" -eq "${1:-0}" ]; then
FOUND=1
# subst = by += if necessary
start="${line%%=*}"
case "$start" in
*+) ;;
*) line="$start+=${line#*=}" ;;
esac
shift
fi
if [ $FOUND -eq 1 ]; then
echo "$line"
else
continue
fi
# check for trailing '\'
case "$line" in
*\\) FOUND=1 ;;
*) FOUND=0 ;;
esac
done | sed -e 's/^[0-9]*://'
}
parse_doxylocal()
{
if echo $1 | grep -q "^/"; then
srcdir=$1/$DOXYDIR
top_srcdir=$1
parse_doxyfile $1 $2 | sed -e "s!@\(abs_\)*srcdir@!$srcdir!" -e "s!@\(abs_\)*top_srcdir@!$top_srcdir!";
else
parse_doxyfile $1 $2
fi
}
get_module_name()
{
grep "^Module:" $1/dune.module | sed -e 's/^Module: *//'
}
generate_doxyout()
{
echo "Generating $DOXYOUT from "
echo " global style"
cat $DOXYSTYLE > $DOXYOUT
while [ -n "$1" ]; do
for DOXY in Doxylocal Doxyfile.in Doxyfile; do
if [ "$1/$DOXYDIR/$DOXY" -ef "$DOXYOUT" ]; then continue; fi
if [ -f "$1/$DOXYDIR/$DOXY" ]; then
echo " and `get_module_name $1` config"
parse_doxylocal $1 $DOXY >> $DOXYOUT
break
fi
done
shift
done
echo " ... done"
}
generate_doxylocal()
{
echo "Generating $DOXYOUT from "
for DOXY in Doxyfile.in Doxyfile; do
if [ -f "$1/$DOXYDIR/$DOXY" ]; then
echo " `get_module_name .` $DOXY"
parse_doxylocal . $DOXY > $DOXYOUT
break
fi
done
echo " ... done"
}
# make sure we are in dune module
if ! [ -f dune.module ]; then
echo "Error: dunedoxynize must be called from the top_srcdir of your module"
exit 1
fi
# don't do anything if we have old style Doxyfile or Doxyfile.in, but no Doxylocal
if [ $# -eq 0 ] && ! [ -f "$DOXYDIR/Doxylocal" ]; then
for DOXY in Doxyfile.in Doxyfile; do
if [ -f "$DOXYDIR/$DOXY" ]; then
echo "Warning: module still uses handwritten $DOXY"
echo " You can transform it to an initial Doxylocal by calling"
echo " dune-common/bin/dunedoxynize doc/doxygen/Doxylocal ."
echo " in the top source directory of this module"
exit 0
fi
done
exit 0
fi
# quite if your module doesn't have any documentation
if ! [ -d "$OUTDIR" ]; then
if [ $# -eq 0 ]; then
exit 0
else
echo "Error: output directory $OUTDIR does not exist."
exit 0
fi
fi
# search for doxygen style
if ! find_doxystyle; then
echo "Error: dunedoxynize global style not found"
exit 1
fi
# drop the parameter of the output file
if [ $# -gt 0 ]; then
shift
fi
# make sure that there is at least one Doxylocal file
if ! test_doxylocal "${@:-.}" && [ $# -gt 0 ]; then
echo "Error: you didn't supply any valid Doxylocal file"
exit 1
fi
if [ "`basename $DOXYOUT`" = "Doxylocal" ]; then
generate_doxylocal "${@:-.}"
else
generate_doxyout "${@:-.}"
fi
#!/usr/bin/env python3
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
try:
import skbuild
except ImportError:
print("skbuild needed for packaging, run 'pip install scikit-build'")
import sys
sys.exit(0)
import sys, os, io, getopt, re, shutil
import importlib, subprocess
import email.utils
from datetime import date
# make sure that 'metadata' is taken from the current `dune-common` folder
# and not some installed version which might be different from the one I'm
# packaging (by mistake). The path to `packagemetadata.py` needs to be
# added to the python path (to work here) and to the environment so that a
# later call to `python setup.py` also works.
here = os.path.dirname(os.path.abspath(__file__))
mods = os.path.join(here, "..", "python", "dune")
sys.path.append(mods)
pythonpath = mods + ":" + os.environ.get('PYTHONPATH','.')
os.environ['PYTHONPATH'] = pythonpath
try:
from packagemetadata import metaData
except ImportError:
# not calling from within a dune-common source module so use installed
# version after all
from dune.packagemetadata import metaData
def main(argv):
repositories = ["gitlab", "testpypi", "pypi"]
def usage():
return 'usage: dunepackaging.py [--upload <'+"|".join(repositories)+'> | -c | --clean | --version <version> | --onlysdist | --bdist_conda]'
try:
opts, args = getopt.getopt(argv, "hc", ["upload=", "clean", "version=", "onlysdist", "bdist_conda"])
except getopt.GetoptError:
print(usage())
sys.exit(2)
upload = False
repository = "gitlab"
clean = False
version = None
onlysdist = False
bdistconda = False
for opt, arg in opts:
if opt == '-h':
print(usage())
sys.exit(2)
elif opt in ("--upload"):
upload = True
if arg != '':
repository = arg
if repository not in repositories:
print("Specified repository must be one of: " + " ".join(repositories))
sys.exit(2)
elif opt in ("-c", "--clean"):
clean = True
elif opt in ("--version"):
version = arg
elif opt in ("--onlysdist"):
onlysdist = True
elif opt in ("--bdist_conda"):
onlysdist = True
bdistconda = True
# Remove generated files
def removeFiles():
import glob
files = ['MANIFEST', 'dist', '_skbuild', '__pycache__']
print("Remove generated files: " + ", ".join(files))
remove = ['rm', '-rf'] + files
subprocess.call(remove)
# checkout setup.py and pyproject.toml
checkout = ['git', 'checkout', 'setup.py', 'pyproject.toml']
subprocess.call(checkout)
if clean:
removeFiles()
sys.exit(0)
data, cmake_flags = metaData(version, dependencyCheck=False)
if version is None:
version = data.version
# Generate setup.py
print("Generate setup.py")
f = open("setup.py", "w")
f.write("""#
# DO NOT MODIFY THIS FILE!
# This file is autogenerated by the `dunepackaging.py` script and
# only used for the pypi dune packages. This file will not be included in
# the build directory.
#
# See https://www.dune-project.org/dev/adding_python/ for docs on
# Python packaging for Dune modules.
#
""")
f.write("import os, sys\n")
if data.name == 'dune-common':
f.write("here = os.path.dirname(os.path.abspath(__file__))\n")
f.write("mods = os.path.join(here, \"python\", \"dune\")\n")
f.write("sys.path.append(mods)\n\n")
f.write("try:\n")
f.write(" from dune.packagemetadata import metaData\n")
f.write("except ImportError:\n")
f.write(" from packagemetadata import metaData\n")
f.write("from skbuild import setup\n")
f.write("setup(**metaData('"+version+"')[1])\n")
f.close()
# Generate pyproject.toml
print("Generate pyproject.toml")
f = open("pyproject.toml", "w")
f.write("""#
# DO NOT MODIFY THIS FILE!
# This file is autogenerated by the `dunepackaging.py` script and
# only used for the pypi dune packages. This file will not be included in
# the build directory.
#
# See https://www.dune-project.org/dev/adding_python/ for docs on
# Python packaging for Dune modules.
#
# This is uses the `Python-Requires` field in the `dune.modules` file to
# populate the `requires` entry. Additional packages needed for the package
# build should be added in the `dune.modules`. These packages will then also be
# included in the package install from source.
#
""")
requires = data.asPythonRequirementString(data.depends + data.python_requires)
requires = list(set(requires)) # make requirements unique
minimal = ["pip", "setuptools", "wheel", "scikit-build", "cmake>=3.16", "ninja", "requests"]
requires += [r for r in minimal if not any([a.startswith(r) for a in requires])]
requires.sort()
f.write("[build-system]\n")
f.write("requires = "+requires.__str__()+"\n")
f.write("build-backend = 'setuptools.build_meta'\n")
f.close()
# Create source distribution and upload to repository
python = sys.executable
if upload or onlysdist:
print("Remove dist")
remove = ['rm', '-rf', 'dist']
subprocess.call(remove)
# check if we have scikit-build
import importlib
if importlib.util.find_spec("skbuild") is None:
print("Please install the pip package 'scikit-build' to build the source distribution.")
sys.exit(2)
# append hash of current git commit to README
shutil.copy('README.md', 'tmp_README.md')
githash = ['git', 'rev-parse', 'HEAD']
hash = subprocess.check_output(githash, encoding='UTF-8')
with open("README.md", "a") as f:
f.write("\n\ngit-" + hash)
print("Create source distribution")
# make sure setup.py/pyproject.toml are tracked by git so that
# they get added to the package by scikit
gitadd = ['git', 'add', 'setup.py', 'pyproject.toml']
subprocess.call(gitadd)
# run sdist
build = [python, 'setup.py', 'sdist']
subprocess.call(build, stdout=subprocess.DEVNULL)
# undo the above git add
gitreset = ['git', 'reset', 'setup.py', 'pyproject.toml']
subprocess.call(gitreset)
# restore README.md
shutil.move('tmp_README.md', 'README.md')
if not onlysdist:
# check if we have twine
import importlib
if importlib.util.find_spec("twine") is None:
print("Please install the pip package 'twine' to upload the source distribution.")
sys.exit(2)
twine = [python, '-m', 'twine', 'upload']
twine += ['--repository', repository]
twine += ['dist/*']
subprocess.call(twine)
removeFiles()
# create conda package meta.yaml (experimental)
if bdistconda:
import hashlib
remove = ['rm', '-rf', 'dist/'+data.name]
subprocess.call(remove)
mkdir = ['mkdir', 'dist/'+data.name ]
subprocess.call(mkdir)
print("Create bdist_conda (experimental)")
distfile = 'dist/'+data.name+'-'+version+'.tar.gz'
datahash = ''
with open(distfile, "rb") as include:
source = include.read()
datahash = hashlib.sha256( source ).hexdigest()
print("Generate ",'dist/'+data.name+'/meta.yaml')
f = open('dist/'+data.name+'/meta.yaml', "w")
f.write('{% set name = "' + data.name + '" %}\n')
f.write('{% set version = "' + version + '" %}\n')
f.write('{% set hash = "' + datahash + '" %}\n\n')
f.write('package:\n')
f.write(' name: "{{ name|lower }}"\n')
f.write(' version: "{{ version }}"\n\n')
f.write('source:\n')
f.write(' path: ../{{ name }}-{{ version }}/\n')
f.write(' sha256: {{ hash }}\n\n')
f.write('build:\n')
f.write(' number: 1\n')
if 'TMPDIR' in os.environ:
f.write(' script_env:\n')
f.write(' - TMPDIR=' + os.environ['TMPDIR'] +'\n')
f.write(' script: "{{ PYTHON }} -m pip install . --no-deps --ignore-installed -vv "\n\n')
f.write('requirements:\n')
requirements = ['pip', 'python', 'mkl', 'tbb', 'intel-openmp',
'libgcc-ng', 'libstdcxx-ng', 'gmp', 'scikit-build',
'mpi4py', 'matplotlib', 'numpy', 'scipy', 'ufl']
for dep in data.depends:
requirements += [dep[0]]
f.write(' host:\n')
for dep in requirements:
f.write(' - ' + dep + '\n')
f.write('\n')
f.write(' run:\n')
for dep in requirements:
f.write(' - ' + dep + '\n')
f.write('\n')
f.write('test:\n')
f.write(' imports:\n')
f.write(' - ' + data.name.replace('-','.') + '\n\n')
f.write('about:\n')
f.write(' home: '+data.url+'\n')
f.write(' license: GPLv2 with linking exception.\n')
f.write(' license_family: GPL\n')
f.write(' summary: '+data.description+'\n')
f.close()
if __name__ == "__main__":
main(sys.argv[1:])
This diff is collapsed.
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
try:
from dune.common.module import resolve_dependencies, resolve_order, select_modules
except ImportError:
import os
here = os.path.dirname(os.path.abspath(__file__))
mods = os.path.join(os.path.dirname(here), "python", "dune", "common")
if os.path.exists(os.path.join(mods, "module.py")):
import sys
sys.path.append(mods)
from module import resolve_dependencies, resolve_order, select_modules
else:
raise
print("Found Modules:")
print("--------------")
modules, _ = select_modules()
for description in modules.values():
print(repr(description))
print()
print()
print("Resolved Dependencies:")
print("----------------------")
deps = resolve_dependencies(modules)
for mod_name, mod_deps in deps.items():
print(mod_name + ": " + " ".join(mod_deps))
print()
print("Build Order:")
print("------------")
print(" ".join(resolve_order(deps)))
#!/bin/bash
set -e
canonicalname(){
if test $# -ne 1; then
echo Usage: canonicalname path >&2
return 1
fi
name="$1"
while test -L "$name"; do
if ! test -e "$name"; then
echo $name: file not found >&2
return 1
fi
if newname="`readlink $name`"; then
name="$newname"
else
echo "$(dirname $name)/$(basename $name)"
fi
done
echo $name
}
canonicalpath(){
if test $# -ne 1; then
echo Usage: canonicalpath path >&2
return 1
fi
(cd $(dirname $(canonicalname $1)) && pwd)
}
findm4dir() {
BINDIR="$(canonicalpath $0)"
# source-build
M4DIR="$BINDIR/../m4"
if test -f "$M4DIR/mpi-config.m4"; then
echo "$M4DIR"
return
fi
# installed
M4DIR="$BINDIR/../share/aclocal"
if test -f "$M4DIR/mpi-config.m4"; then
echo "$M4DIR"
return
fi
# downloaded
M4DIR="$BINDIR"
if test -f "$M4DIR/mpi-config.m4"; then
echo "$M4DIR"
return
fi
echo "ERROR: could not find mpi-config.m4! Incomplete installation?" >&2
exit 1
}
version=0.1
verbose=0
usage()
{
cat <<EOF
Usage: mpi-config [OPTIONS] [LIBRARIES]
Options:
[--mpicc[=COMPILER]]
[--disable-cxx]
[--verbose]
[--version]
[--mpiversion]
[--libs]
[--cflags]
EOF
exit $1
}
if test $# -eq 0 ; then
usage 1 1>&2
fi
while test $# -gt 0 ; do
case "$1" in
-*=*) optarg=`echo "$1" | sed 's/[-_a-zA-Z0-9]*=//'` ;;
*) optarg= ;;
esac
case $1 in
--mpicc=*)
MPICC=$optarg
;;
--version)
echo $version
exit 0
;;
--verbose)
verbose=1
;;
--disable-cxx)
disablecxx=yes
;;
--mpiversion)
tasks="$tasks print_mpiversion"
;;
--cflags)
tasks="$tasks print_cflags"
;;
--ldflags)
tasks="$tasks print_ldflags"
;;
--libs)
tasks="$tasks print_libs"
;;
*)
usage 1 1>&2
;;
esac
shift
done
if test x$MPICC = x ; then
MPICC=mpicc
fi
#
# LIB
#
# load mpi-config.m4
#
# find m4 file
M4DIR=`findm4dir`
eval "$(
m4 -I$M4DIR <<EOF
changequote([, ])
define([AC_DEFUN],[define([\$1],[\$2])])
define([AC_MSG_CHECKING],[
if test $verbose -gt 0; then
echo -n "checking \$@..."
fi
])
define([AC_MSG_RESULT],[
if test $verbose -gt 0; then
echo " \$@"
fi
])
define([AC_MSG_NOTICE],[
if test $verbose -gt 0; then
echo "\$@"
fi
])
define([AC_MSG_ERROR],[
if test $verbose -gt 0; then
echo "Error: \$@"
exit 1
fi
])
include([mpi-config.m4])
MPI_CONFIG_HELPER
EOF
)"
#
# output methods
#
print_mpiversion() {
get_mpiparameters
echo $MPI_VERSION
}
print_cflags() {
get_mpiparameters
if test x$disablecxx = xyes; then
DUNEMPICPPFLAGS="$DUNEMPICPPFLAGS $MPI_NOCXXFLAGS"
fi
echo $DUNEMPICPPFLAGS
}
print_ldflags() {
get_mpiparameters
echo $DUNEMPILDFLAGS
}
print_libs() {
get_mpiparameters
echo $DUNEMPILIBS
}
for task in $tasks; do
eval $task
done
#!/bin/bash
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
#
# This script builds a Python package index on gitlab.dune-project.org. Such an
# index is necessary as a drop-in replacement for PyPI in continuous integration,
# when runners operate with restricted network access.
#
# Running this script requires the following prerequisites to be met:
# * Go to Gitlab Profile/Setting/Access Tokens and create a personal API access token with
# at least the `write_registry` and 'api' scope.
# * Export your token with `export TWINE_PASSWORD=<token>`
#
# This script exits upon errors
set -e
# We authenticate with a personal Gitlab API token. You are expected to
# have set TWINE_PASSWORD to your API token when calling this script.
export TWINE_USERNAME=__token__
# Make sure that TWINE_PASSWORD was set
# export TWINE_PASSWORD=...
if [ -z "$TWINE_PASSWORD" ]
then
echo "TWINE_PASSWORD was not set!"
exit 1
fi
# Create a temporary directory as workspace for this script
TMPDIR=$(mktemp -d)
pushd $TMPDIR
python3 -m venv env
source env/bin/activate
python -m pip install pip-download twine
pip-download -d $(pwd)/downloads \
# dune-common \
# dune-geometry \
# dune-grid \
# dune-istl \
# dune-localfunctions \
# dune-alugrid \
# dune-fem \
pyparsing \
mpi4py \
wheel \
setuptools \
jinja2 \
portalocker \
fenics-ufl==2019.1.0 \
matplotlib \
scipy \
pip>=21 \
ninja \
sortedcontainers
# Upload the packages to the index
for filename in downloads/*
do
# NB: The 133 here is the Gitlab project ID of dune-common.
python -m twine upload --verbose --skip-existing --repository-url https://gitlab.dune-project.org/api/v4/projects/133/packages/pypi $filename
done
# Clean up the temporary directory
popd
rm -rf $TMPDIR
#!/bin/bash
set -e
###############################################
###
### read lib
###
canonicalname(){
if test $# -ne 1; then
echo Usage: canonicalname path >&2
return 1
fi
file="$1"
if test ! -e "$file"; then
echo $file: file not found >&2
return 1
fi
# if this is a symlink, then follow the symlink
if test -L "$file"; then
fdir="`dirname \"$file\"`"
flink="`readlink \"$file\"`"
if test -e "$flink"; then
# these are absolute links, or links in the CWD
canonicalname "$flink"
else
canonicalname "$fdir/$flink"
fi
else
# if this is a file, then remember the filename and
# canonicalize the directory name
if test -f "$file"; then
fdir="`dirname \"$file\"`"
fname="`basename \"$file\"`"
fdir="`canonicalname \"$fdir\"`"
echo "$fdir/$fname"
fi
# if this is a directory, then create an absolute
# directory name and we are done
if test -d "$file"; then
(cd "$file"; pwd)
fi
fi
}
canonicalpath(){
if test $# -ne 1; then
echo Usage: canonicalpath path >&2
return 1
fi
dirname $(canonicalname "$1")
}
checkdebug () {
while test $# -gt 0; do
if test x$1 = x--debug; then
echo yes
return
fi
shift
done
echo no
}
DEBUG=`checkdebug $@`
if test "x$DEBUG" = "xyes"; then
set -x
set -v
fi
export COMMAND_DIR="`canonicalpath $0`"
# Read the modules find part
. "$COMMAND_DIR/dunemodules.inc"
#
# test version checks
#
test_version_check () {
if ! check_version "$1" "$2"; then
echo "ERROR: version does not match (found $1, required $2)" >&2
#exit 1
else
echo "OK: version does match (found $1, required $2)" >&2
fi
}
test_version_check "1.2.3" ">= 1.2.5 || < 1.2.4"
test_version_check "1.2.4" ">= 1.2.5 || < 1.2.4"
test_version_check "1.2.5" ">= 1.2.5 || < 1.2.4"
test_version_check "1.2" ">= 1.2.5 || < 1.2.4"
test_version_check "1.2.3" ">= 1.2.5 && < 1.2.4"
test_version_check "1.2.4" "< 1.2.5 && >= 1.2.4"
test_version_check "1.2.3" ">= 1.2"
test_version_check "1.2.3" "= 2.4.1"
test_version_check "1.2.3" "= 1.2.3"
test_version_check "1.2.3" "> 1.2"
test_version_check "1.2.3" "= 1.2"
test_version_check "1.2.3" "< 1.2"
check_pattern()
{
PATTERN=$PARSE_SUGDEP_PATTERN
}
parse_version()
{
local deps="$1"
local name=""
local dep=""
local xdeps=""
echo start: $deps
while test -n "$deps"; do
PATTERN=$PARSE_SUGDEP_PATTERN
name=`echo $deps | sed -e "s/$PATTERN/\1/"`
ver=`echo $deps | sed -e "s/$PATTERN/\2/" -e 's/[()]//g'`
xdeps=`echo $deps | sed -e "s/$PATTERN/\3/"`
if test "$deps" = "$xdeps"; then
echo Error parsing dependency string \"$1\"
exit 1
fi
deps=$xdeps
echo $name version $ver ... $deps
done
echo done
}
parse_version "dune-common (>= 1.2), dune-istl (<999)"
parse_version "alf (12) bart()c"
parse_version "a b () c"
#!/bin/bash
make compile_XFAIL
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
add_subdirectory(modules)
add_subdirectory(scripts)
add_subdirectory(test)
\ No newline at end of file
This diff is collapsed.
# SPDX-FileCopyrightInfo: Copyright © DUNE Project contributors, see file LICENSE.md in module root
# SPDX-License-Identifier: LicenseRef-GPL-2.0-only-with-DUNE-exception
# Defines the functions to use GMP
#
# .. cmake_function:: add_dune_gmp_flags
#
# .. cmake_param:: targets
# :positional:
# :single:
# :required:
#
# A list of targets to use GMP with.
#
include_guard(GLOBAL)
# set HAVE_GMP for the config.h file
set(HAVE_GMP ${GMP_FOUND})
# register all GMP related flags
if(GMP_FOUND)
dune_register_package_flags(
LIBRARIES GMP::gmpxx
COMPILE_DEFINITIONS "HAVE_GMP=1"
)
endif()
# add function to link against the GMP library
function(add_dune_gmp_flags _targets)
if(GMP_FOUND)
foreach(_target ${_targets})
target_link_libraries(${_target} PUBLIC GMP::gmpxx)
target_compile_definitions(${_target} PUBLIC HAVE_GMP=1)
endforeach(_target ${_targets})
endif(GMP_FOUND)
endfunction(add_dune_gmp_flags)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.