From: heister Date: Tue, 28 Aug 2012 18:21:41 +0000 (+0000) Subject: add hdf5/xdmf output (work from Eric Heien) X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=3e3aa054d7e2afaa3f6bd2814e08cdadc91cdb6a;p=dealii-svn.git add hdf5/xdmf output (work from Eric Heien) git-svn-id: https://svn.dealii.org/trunk@26149 0785d39b-7218-0410-832d-ea1e28bc413d --- diff --git a/deal.II/aclocal.m4 b/deal.II/aclocal.m4 index 8fbe0c44b4..9211f854bd 100644 --- a/deal.II/aclocal.m4 +++ b/deal.II/aclocal.m4 @@ -7054,6 +7054,245 @@ AC_DEFUN(DEAL_II_CHECK_TRILINOS_HEADER_FILES, dnl CXXFLAGS="${OLD_CXXFLAGS}" ]) +dnl =========================================================================== +dnl http://www.gnu.org/software/autoconf-archive/ax_lib_hdf5.html +dnl =========================================================================== +dnl +dnl SYNOPSIS +dnl +dnl AX_LIB_HDF5([serial/parallel]) +dnl +dnl DESCRIPTION +dnl +dnl This macro provides tests of the availability of HDF5 library. +dnl +dnl The optional macro argument should be either 'serial' or 'parallel'. The +dnl former only looks for serial HDF5 installations via h5cc. The latter +dnl only looks for parallel HDF5 installations via h5pcc. If the optional +dnl argument is omitted, serial installations will be preferred over +dnl parallel ones. +dnl +dnl The macro adds a --with-hdf5 option accepting one of three values: +dnl +dnl no - do not check for the HDF5 library. +dnl yes - do check for HDF5 library in standard locations. +dnl path - complete path to where lib/libhdf5* libraries and +dnl include/H5* include files reside. +dnl +dnl If HDF5 is successfully found, this macro calls +dnl +dnl AC_SUBST(DEAL_II_HDF5_VERSION) +dnl AC_SUBST(DEAL_II_HDF5_CFLAGS) +dnl AC_SUBST(DEAL_II_HDF5_CPPFLAGS) +dnl AC_SUBST(DEAL_II_HDF5_LDFLAGS) +dnl AC_SUBST(DEAL_II_HDF5_INCDIR) +dnl AC_DEFINE(DEAL_II_HAVE_HDF5) +dnl +dnl and sets with_hdf5="yes". +dnl +dnl If HDF5 is disabled or not found, this macros sets with_hdf5="no". +dnl +dnl Your configuration script can test $with_hdf to take any further +dnl actions. HDF5_{C,CPP,LD}FLAGS may be used when building with C or C++. +dnl +dnl LICENSE +dnl +dnl Copyright (c) 2009 Timothy Brown +dnl Copyright (c) 2010 Rhys Ulerich +dnl +dnl Copying and distribution of this file, with or without modification, are +dnl permitted in any medium without royalty provided the copyright notice +dnl and this notice are preserved. This file is offered as-is, without any +dnl warranty. + +AC_DEFUN(DEAL_II_CONFIGURE_HDF5, dnl +[ + +AC_REQUIRE([AC_PROG_SED]) +AC_REQUIRE([AC_PROG_AWK]) +AC_REQUIRE([AC_PROG_GREP]) + +dnl Add a default --with-hdf5 configuration option. +AC_ARG_WITH([hdf5], + AS_HELP_STRING( + [--with-hdf5=[yes/no/PATH]], + m4_case(m4_normalize([$1]), + [serial], [location of h5cc for serial HDF5 configuration], + [parallel], [location of h5pcc for parallel HDF5 configuration], + [location of h5cc or h5pcc for HDF5 configuration]) + ), + [if test "$withval" = "no"; then + with_hdf5="no" + elif test "$withval" = "yes"; then + with_hdf5="yes" + else + with_hdf5="yes" + H5CC="$withval" + fi], + [with_hdf5="yes"] +) + +dnl Set defaults to blank +USE_CONTRIB_HDF5=no +DEAL_II_HDF5_VERSION="" +DEAL_II_HDF5_CFLAGS="" +DEAL_II_HDF5_CPPFLAGS="" +DEAL_II_HDF5_LDFLAGS="" +DEAL_II_HDF5_INCDIR="" + +dnl Try and find hdf5 compiler tools and options. +if test "$with_hdf5" = "yes"; then + if test -z "$H5CC"; then + dnl Check to see if H5CC is in the path. + AC_PATH_PROGS( + [H5CC], + m4_case(m4_normalize([$1]), + [serial], [h5cc], + [parallel], [h5pcc], + [h5cc h5pcc]), + []) + else + AC_MSG_CHECKING([Using provided HDF5 C wrapper]) + AC_MSG_RESULT([$H5CC]) + fi + AC_MSG_CHECKING([for HDF5 libraries]) + if test ! -x "$H5CC"; then + AC_MSG_RESULT([no]) + AC_MSG_WARN(m4_case(m4_normalize([$1]), + [serial], [ +Unable to locate serial HDF5 compilation helper script 'h5cc'. +Please specify --with-hdf5= as the full path to h5cc. +HDF5 support is being disabled (equivalent to --with-hdf5=no). +], [parallel],[ +Unable to locate parallel HDF5 compilation helper script 'h5pcc'. +Please specify --with-hdf5= as the full path to h5pcc. +HDF5 support is being disabled (equivalent to --with-hdf5=no). +], [ +Unable to locate HDF5 compilation helper scripts 'h5cc' or 'h5pcc'. +Please specify --with-hdf5= as the full path to h5cc or h5pcc. +HDF5 support is being disabled (equivalent to --with-hdf5=no). +])) + with_hdf5="no" + else + dnl h5cc provides both AM_ and non-AM_ options + dnl depending on how it was compiled either one of + dnl these are empty. Lets roll them both into one. + + dnl Look for "HDF5 Version: X.Y.Z" + DEAL_II_HDF5_VERSION=$(eval $H5CC -showconfig | grep 'HDF5 Version:' \ + | $AWK '{print $[]3}') + +dnl A ideal situation would be where everything we needed was +dnl in the AM_* variables. However most systems are not like this +dnl and seem to have the values in the non-AM variables. +dnl +dnl We try the following to find the flags: +dnl (1) Look for "NAME:" tags +dnl (2) Look for "NAME/H5_NAME:" tags +dnl (3) Look for "AM_NAME:" tags +dnl + dnl (1) + dnl Look for "CFLAGS: " + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig | grep '\bCFLAGS:' \ + | $AWK -F: '{print $[]2}') + dnl Look for "CPPFLAGS" + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig | grep '\bCPPFLAGS:' \ + | $AWK -F: '{print $[]2}') + dnl Look for "LD_FLAGS" + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig | grep '\bLDFLAGS:' \ + | $AWK -F: '{print $[]2}') + + dnl (2) + dnl CFLAGS/H5_CFLAGS: .../.... + dnl We could use $SED with something like the following + dnl 's/CFLAGS.*\/H5_CFLAGS.*[:]\(.*\)\/\(.*\)/\1/p' + if test -z "$DEAL_II_HDF5_CFLAGS"; then + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/CFLAGS.*[:]\(.*\)\/\(.*\)/\1/p') + fi + dnl Look for "CPPFLAGS" + if test -z "$DEAL_II_HDF5_CPPFLAGS"; then + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/CPPFLAGS.*[:]\(.*\)\/\(.*\)/\1/p') + fi + dnl Look for "LD_FLAGS" + if test -z "$DEAL_II_HDF5_LDFLAGS"; then + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/LDFLAGS.*[:]\(.*\)\/\(.*\)/\1/p') + fi + + dnl (3) + dnl Check to see if these are not empty strings. If so + dnl find the AM_ versions and use them. + if test -z "$DEAL_II_HDF5_CFLAGS"; then + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_CFLAGS:' | $AWK -F: '{print $[]2}') + fi + if test -z "$DEAL_II_HDF5_CPPFLAGS"; then + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_CPPFLAGS:' | $AWK -F: '{print $[]2}') + fi + if test -z "$DEAL_II_HDF5_LDFLAGS"; then + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_LDFLAGS:' | $AWK -F: '{print $[]2}') + fi + + dnl Frustratingly, the necessary -Idir,-Ldir still may not be found! + dnl Attempt to pry any more required include directories from wrapper. + for arg in `$H5CC -c -show` + do + case "$arg" in #( + -I*) echo $DEAL_II_HDF5_CPPFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \ + || DEAL_II_HDF5_CPPFLAGS="$arg $DEAL_II_HDF5_CPPFLAGS" + ;; + esac + done + for arg in `$H5CC -show` + do + case "$arg" in #( + -L*) echo $DEAL_II_HDF5_LDFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \ + || DEAL_II_HDF5_LDFLAGS="$arg $DEAL_II_HDF5_LDFLAGS" + ;; + esac + done + + AC_MSG_RESULT([yes (version $[DEAL_II_HDF5_VERSION])]) + + dnl Look for any extra libraries also needed to link properly + EXTRA_LIBS=$(eval $H5CC -showconfig | grep 'Extra libraries:'\ + | $AWK -F: '{print $[]2}') + + dnl Look for HDF5's high level library + ax_lib_hdf5_save_LDFLAGS=$LDFLAGS + ax_lib_hdf5_save_LIBS=$LIBS + LDFLAGS=$DEAL_II_HDF5_LDFLAGS + AC_HAVE_LIBRARY([hdf5_hl], + [DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS -lhdf5_hl"], + [], + [-lhdf5 $EXTRA_LIBS]) + LIBS=$ax_lib_hdf5_save_LIBS + LDFLAGS=$ax_lib_hdf5_save_LDFLAGS + + dnl Add the HDF5 library itself + DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS -lhdf5" + + dnl Add any EXTRA_LIBS afterwards + if test "$EXTRA_LIBS"; then + DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS $EXTRA_LIBS" + fi + + dnl remove "-I" from cpp flags to get include path + DEAL_II_HDF5_INCDIR=$(eval echo $DEAL_II_HDF5_CPPFLAGS | cut -c 3-) + + LDFLAGS="$LDFLAGS $DEAL_II_HDF5_LDFLAGS" + USE_CONTRIB_HDF5=yes + AC_DEFINE([DEAL_II_HAVE_HDF5], [1], [Defined if you have HDF5 support]) + fi +fi +]) + + + dnl ------------------------------------------------------------ diff --git a/deal.II/common/Make.global_options.in b/deal.II/common/Make.global_options.in index e1aa652e3f..78e5afbfca 100644 --- a/deal.II/common/Make.global_options.in +++ b/deal.II/common/Make.global_options.in @@ -76,6 +76,13 @@ DEAL_II_TRILINOS_VERSION_MINOR = @DEAL_II_TRILINOS_VERSION_MINOR@ DEAL_II_TRILINOS_VERSION_SUBMINOR = @DEAL_II_TRILINOS_VERSION_SUBMINOR@ DEAL_II_TRILINOS_LIBPREFIX = @DEAL_II_TRILINOS_LIBPREFIX@ +USE_CONTRIB_HDF5 = @USE_CONTRIB_HDF5@ +DEAL_II_HDF5_VERSION = @DEAL_II_HDF5_VERSION@ +DEAL_II_HDF5_CFLAGS = @DEAL_II_HDF5_CFLAGS@ +DEAL_II_HDF5_CPPFLAGS = @DEAL_II_HDF5_CPPFLAGS@ +DEAL_II_HDF5_LDFLAGS = @DEAL_II_HDF5_LDFLAGS@ +DEAL_II_HDF5_INCDIR = @DEAL_II_HDF5_INCDIR@ + USE_CONTRIB_BLAS = @USE_CONTRIB_BLAS@ USE_CONTRIB_LAPACK = @USE_CONTRIB_LAPACK@ @@ -271,6 +278,8 @@ include-path-slepc-conf = $(DEAL_II_SLEPC_DIR)/$(DEAL_II_PETSC_ARCH)/conf include-path-trilinos = $(DEAL_II_TRILINOS_INCDIR) include-path-mumps = $(DEAL_II_MUMPS_DIR)/include include-path-metis = $(DEAL_II_METIS_INCDIR) +include-path-hdf5 = $(DEAL_II_HDF5_INCDIR) + # include paths as command line flags. while compilers allow a space between # the '-I' and the actual path, we also send these flags to the @@ -316,6 +325,11 @@ ifeq ($(USE_CONTRIB_METIS),yes) INCLUDE += -I$(include-path-metis) endif +ifeq ($(USE_CONTRIB_HDF5),yes) + INCLUDE += -I$(include-path-hdf5) +endif + + ifeq ($(enable-threads),yes) INCLUDE += -I$(shell echo $D/contrib/tbb/tbb*/include) endif @@ -354,6 +368,13 @@ ifeq ($(USE_CONTRIB_PETSC),yes) CXXFLAGS.o += $(OCXX_PETSCFLAGS) endif +ifeq ($(USE_CONTRIB_HDF5),yes) + CFLAGS.g += $(DEAL_II_HDF5_CFLAGS) + CFLAGS.o += $(DEAL_II_HDF5_CFLAGS) + CXXFLAGS.g += $(DEAL_II_HDF5_CXXFLAGS) + CXXFLAGS.o += $(DEAL_II_HDF5_CXXFLAGS) +endif + ifneq ($(enable-threads),no) MT = MT else diff --git a/deal.II/configure b/deal.II/configure index 2dc9370956..a10253898c 100755 --- a/deal.II/configure +++ b/deal.II/configure @@ -644,6 +644,15 @@ USE_CONTRIB_MUMPS DEAL_II_ARPACK_ARCH DEAL_II_ARPACK_DIR USE_CONTRIB_ARPACK +DEAL_II_HDF5_INCDIR +DEAL_II_HDF5_LDFLAGS +DEAL_II_HDF5_CPPFLAGS +DEAL_II_HDF5_CFLAGS +DEAL_II_HDF5_VERSION +USE_CONTRIB_HDF5 +H5CC +AWK +SED DEAL_II_TRILINOS_STATIC DEAL_II_TRILINOS_SHARED DEAL_II_TRILINOS_LIBDIR @@ -794,6 +803,7 @@ with_slepc with_trilinos with_trilinos_include with_trilinos_libs +with_hdf5 with_arpack with_mumps with_scalapack @@ -1489,6 +1499,7 @@ Optional Packages: Specify the path to the Trilinos libraries; use this if you want to override the TRILINOS_LIBDIR environment variable. + --with-hdf5=yes/no/PATH location of h5cc or h5pcc for HDF5 configuration --with-arpack=path/to/arpack Specify the path to the ARPACK installation, for which the include directory and lib directory are @@ -11681,6 +11692,342 @@ done +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 +$as_echo_n "checking for a sed that does not truncate output... " >&6; } +if ${ac_cv_path_SED+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ + for ac_i in 1 2 3 4 5 6 7; do + ac_script="$ac_script$as_nl$ac_script" + done + echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed + { ac_script=; unset ac_script;} + if test -z "$SED"; then + ac_path_SED_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in sed gsed; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" + { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue +# Check for GNU ac_path_SED and select it if it is found. + # Check for GNU $ac_path_SED +case `"$ac_path_SED" --version 2>&1` in +*GNU*) + ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo '' >> "conftest.nl" + "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_SED_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_SED="$ac_path_SED" + ac_path_SED_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_SED_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_SED"; then + as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 + fi +else + ac_cv_path_SED=$SED +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 +$as_echo "$ac_cv_path_SED" >&6; } + SED="$ac_cv_path_SED" + rm -f conftest.sed + +for ac_prog in gawk mawk nawk awk +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AWK+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AWK"; then + ac_cv_prog_AWK="$AWK" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_prog_AWK="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AWK=$ac_cv_prog_AWK +if test -n "$AWK"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 +$as_echo "$AWK" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$AWK" && break +done + + + + + + + + +# Check whether --with-hdf5 was given. +if test "${with_hdf5+set}" = set; then : + withval=$with_hdf5; if test "$withval" = "no"; then + with_hdf5="no" + elif test "$withval" = "yes"; then + with_hdf5="yes" + else + with_hdf5="yes" + H5CC="$withval" + fi +else + with_hdf5="yes" + +fi + + +USE_CONTRIB_HDF5=no +DEAL_II_HDF5_VERSION="" +DEAL_II_HDF5_CFLAGS="" +DEAL_II_HDF5_CPPFLAGS="" +DEAL_II_HDF5_LDFLAGS="" +DEAL_II_HDF5_INCDIR="" + +if test "$with_hdf5" = "yes"; then + if test -z "$H5CC"; then + for ac_prog in h5cc h5pcc +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_H5CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $H5CC in + [\\/]* | ?:[\\/]*) + ac_cv_path_H5CC="$H5CC" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + ac_cv_path_H5CC="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +H5CC=$ac_cv_path_H5CC +if test -n "$H5CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $H5CC" >&5 +$as_echo "$H5CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$H5CC" && break +done + + else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking Using provided HDF5 C wrapper" >&5 +$as_echo_n "checking Using provided HDF5 C wrapper... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $H5CC" >&5 +$as_echo "$H5CC" >&6; } + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for HDF5 libraries" >&5 +$as_echo_n "checking for HDF5 libraries... " >&6; } + if test ! -x "$H5CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: +Unable to locate HDF5 compilation helper scripts 'h5cc' or 'h5pcc'. +Please specify --with-hdf5= as the full path to h5cc or h5pcc. +HDF5 support is being disabled (equivalent to --with-hdf5=no). +" >&5 +$as_echo "$as_me: WARNING: +Unable to locate HDF5 compilation helper scripts 'h5cc' or 'h5pcc'. +Please specify --with-hdf5= as the full path to h5cc or h5pcc. +HDF5 support is being disabled (equivalent to --with-hdf5=no). +" >&2;} + with_hdf5="no" + else + + DEAL_II_HDF5_VERSION=$(eval $H5CC -showconfig | grep 'HDF5 Version:' \ + | $AWK '{print $3}') + + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig | grep '\bCFLAGS:' \ + | $AWK -F: '{print $2}') + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig | grep '\bCPPFLAGS:' \ + | $AWK -F: '{print $2}') + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig | grep '\bLDFLAGS:' \ + | $AWK -F: '{print $2}') + + if test -z "$DEAL_II_HDF5_CFLAGS"; then + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/CFLAGS.*:\(.*\)\/\(.*\)/\1/p') + fi + if test -z "$DEAL_II_HDF5_CPPFLAGS"; then + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/CPPFLAGS.*:\(.*\)\/\(.*\)/\1/p') + fi + if test -z "$DEAL_II_HDF5_LDFLAGS"; then + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig \ + | $SED -n 's/LDFLAGS.*:\(.*\)\/\(.*\)/\1/p') + fi + + if test -z "$DEAL_II_HDF5_CFLAGS"; then + DEAL_II_HDF5_CFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_CFLAGS:' | $AWK -F: '{print $2}') + fi + if test -z "$DEAL_II_HDF5_CPPFLAGS"; then + DEAL_II_HDF5_CPPFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_CPPFLAGS:' | $AWK -F: '{print $2}') + fi + if test -z "$DEAL_II_HDF5_LDFLAGS"; then + DEAL_II_HDF5_LDFLAGS=$(eval $H5CC -showconfig \ + | grep '\bAM_LDFLAGS:' | $AWK -F: '{print $2}') + fi + + for arg in `$H5CC -c -show` + do + case "$arg" in #( + -I*) echo $DEAL_II_HDF5_CPPFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \ + || DEAL_II_HDF5_CPPFLAGS="$arg $DEAL_II_HDF5_CPPFLAGS" + ;; + esac + done + for arg in `$H5CC -show` + do + case "$arg" in #( + -L*) echo $DEAL_II_HDF5_LDFLAGS | $GREP -e "$arg" 2>&1 >/dev/null \ + || DEAL_II_HDF5_LDFLAGS="$arg $DEAL_II_HDF5_LDFLAGS" + ;; + esac + done + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes (version $DEAL_II_HDF5_VERSION)" >&5 +$as_echo "yes (version $DEAL_II_HDF5_VERSION)" >&6; } + + EXTRA_LIBS=$(eval $H5CC -showconfig | grep 'Extra libraries:'\ + | $AWK -F: '{print $2}') + + ax_lib_hdf5_save_LDFLAGS=$LDFLAGS + ax_lib_hdf5_save_LIBS=$LIBS + LDFLAGS=$DEAL_II_HDF5_LDFLAGS + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -lhdf5_hl" >&5 +$as_echo_n "checking for main in -lhdf5_hl... " >&6; } +if ${ac_cv_lib_hdf5_hl_main+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lhdf5_hl -lhdf5 $EXTRA_LIBS $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + + +int +main () +{ +return main (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_hdf5_hl_main=yes +else + ac_cv_lib_hdf5_hl_main=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_hdf5_hl_main" >&5 +$as_echo "$ac_cv_lib_hdf5_hl_main" >&6; } +if test "x$ac_cv_lib_hdf5_hl_main" = xyes; then : + DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS -lhdf5_hl" +fi +ac_cv_lib_hdf5_hl=ac_cv_lib_hdf5_hl_main + + LIBS=$ax_lib_hdf5_save_LIBS + LDFLAGS=$ax_lib_hdf5_save_LDFLAGS + + DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS -lhdf5" + + if test "$EXTRA_LIBS"; then + DEAL_II_HDF5_LDFLAGS="$DEAL_II_HDF5_LDFLAGS $EXTRA_LIBS" + fi + + DEAL_II_HDF5_INCDIR=$(eval echo $DEAL_II_HDF5_CPPFLAGS | cut -c 3-) + + LDFLAGS="$LDFLAGS $DEAL_II_HDF5_LDFLAGS" + USE_CONTRIB_HDF5=yes + +$as_echo "#define DEAL_II_HAVE_HDF5 1" >>confdefs.h + + fi +fi + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ARPACK library directory" >&5 $as_echo_n "checking for ARPACK library directory... " >&6; } @@ -14777,6 +15124,7 @@ gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' +AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF diff --git a/deal.II/configure.in b/deal.II/configure.in index b0dd66d93d..99d56212a8 100644 --- a/deal.II/configure.in +++ b/deal.II/configure.in @@ -507,6 +507,15 @@ AC_SUBST(DEAL_II_TRILINOS_LIBDIR) AC_SUBST(DEAL_II_TRILINOS_SHARED) AC_SUBST(DEAL_II_TRILINOS_STATIC) +DEAL_II_CONFIGURE_HDF5 +AC_SUBST(USE_CONTRIB_HDF5) +AC_SUBST(DEAL_II_HDF5_VERSION) +AC_SUBST(DEAL_II_HDF5_CFLAGS) +AC_SUBST(DEAL_II_HDF5_CPPFLAGS) +AC_SUBST(DEAL_II_HDF5_LDFLAGS) +AC_SUBST(DEAL_II_HDF5_INCDIR) + + DEAL_II_CONFIGURE_ARPACK AC_SUBST(USE_CONTRIB_ARPACK) AC_SUBST(DEAL_II_ARPACK_DIR) diff --git a/deal.II/contrib/configure b/deal.II/contrib/configure index 80ce9f53e6..cd2b0fb594 100755 --- a/deal.II/contrib/configure +++ b/deal.II/contrib/configure @@ -1,5 +1,5 @@ #! /bin/sh -# From configure.in Revision. +# From configure.in Revision: 22430 . # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.68. # diff --git a/deal.II/doc/external-libs/hdf5.html b/deal.II/doc/external-libs/hdf5.html new file mode 100644 index 0000000000..e9e10181e4 --- /dev/null +++ b/deal.II/doc/external-libs/hdf5.html @@ -0,0 +1,48 @@ + + + + + The deal.II Readme on interfacing to HDF5 + + + + + + + + + + +

Using and Installing HDF5

+

HDF5 + is a library for high-performance parallel data output. Deal.II can use it to write + graphical solutions. They can be displayed using paraview for example. +

+ +

+ This is how you can download, configure, and build hdf5: + +

+wget http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.9.tar.gz
+tar xf hdf5-1.8.9.tar.gz
+cd hdf5-1.8.9/
+./configure --prefix=`pwd`/build/ -enable-parallel
+make install
+    
+ + + You will end up with a script called h5pcc (or h5cc if you + decided to not build the parallel version) in the build/bin directory. + +

Interfacing deal.II + to HDF5

+ +

To be able to use the interface of deal.II + for HDF5 we then + configure deal.II with the following + additional option: + --with-hdf5=path/to/hdf5/build/bin/h5pcc +

+ + diff --git a/deal.II/doc/news/changes.h b/deal.II/doc/news/changes.h index 9a16777177..df12f77d71 100644 --- a/deal.II/doc/news/changes.h +++ b/deal.II/doc/news/changes.h @@ -62,6 +62,11 @@ used to store boundary indicators internally.
    +
  1. +New: we now support parallel output using HDF5/xdmf. +
    +(Eric Heien, Timo Heister, 2012/08/28) +
  2. New: we are now compatible with Trilinos 10.4.2, 10.8.5, and 10.12.2. See the readme for more information. diff --git a/deal.II/doc/readme.html b/deal.II/doc/readme.html index f35adb466e..bbb9dc291b 100644 --- a/deal.II/doc/readme.html +++ b/deal.II/doc/readme.html @@ -730,7 +730,18 @@ this page. - + +
    HDF5
    +
    + This adds HDF5/XDMF graphical output capabilities to deal.II. You need to + install the HDF5 library + separately. Configure with --with-hdf5= and point it to + the h5pcc or h5cc script inside your hdf5 installation. + + For a detailed description of how to compile HDF5 and linking with + deal.II, see this + page. +
    diff --git a/deal.II/include/deal.II/base/config.h.in b/deal.II/include/deal.II/base/config.h.in index a0a9a5346a..37e5c6c7c6 100644 --- a/deal.II/include/deal.II/base/config.h.in +++ b/deal.II/include/deal.II/base/config.h.in @@ -131,6 +131,9 @@ libraries */ #undef DEAL_II_HAVE_DARWIN_DYNACAST_BUG +/* Defined if you have HDF5 support */ +#undef DEAL_II_HAVE_HDF5 + /* Defined if std::isfinite is available */ #undef DEAL_II_HAVE_ISFINITE diff --git a/deal.II/include/deal.II/base/data_out_base.h b/deal.II/include/deal.II/base/data_out_base.h index d126f615aa..7a0523028a 100644 --- a/deal.II/include/deal.II/base/data_out_base.h +++ b/deal.II/include/deal.II/base/data_out_base.h @@ -43,7 +43,7 @@ DEAL_II_NAMESPACE_OPEN class ParameterHandler; - +class XDMFEntry; /** * This is a base class for output of data on meshes of very general @@ -1407,7 +1407,13 @@ class DataOutBase * Output in deal.II * intermediate format. */ - deal_II_intermediate + deal_II_intermediate, + + /** + * Output in + * HDF5 format. + */ + hdf5 }; @@ -1808,7 +1814,29 @@ class DataOutBase const Deal_II_IntermediateFlags &flags, std::ostream &out); + template + static void write_hdf5_parallel ( + const std::vector > &patches, + const std::vector &data_names, + const std::vector > &vector_data_ranges, + const char* filename, + MPI_Comm comm); + + template + static XDMFEntry create_xdmf_entry (const std::vector > &patches, + const std::vector &data_names, + const std::vector > &vector_data_ranges, + const char* h5_filename, + const double cur_time, + MPI_Comm comm); + template + static void write_xdmf_file ( + const std::vector > &patches, + const std::vector &entries, + const char *filename, + MPI_Comm comm); + /** * Given an input stream that contains * data written by @@ -2462,6 +2490,15 @@ class DataOutInterface : private DataOutBase */ void write_deal_II_intermediate (std::ostream &out) const; + XDMFEntry create_xdmf_entry (const char *h5_filename, + const double cur_time, + MPI_Comm comm) const; + + void write_xdmf_file (const std::vector &entries, + const char *filename, + MPI_Comm comm) const; + + void write_hdf5_parallel (const char* filename, MPI_Comm comm) const; /** * Write data and grid to out * according to the given data @@ -3012,6 +3049,45 @@ class DataOutReader : public DataOutInterface +// A class to store relevant data to use when writing the light data XDMF file +// This should only contain valid data on the root node which writes the files, +// the rest of the nodes will have valid set to false +class XDMFEntry { +private: + // Whether this entry is valid and contains data to be written + bool valid; + // The name of the HDF5 heavy data file this entry references + std::string h5_filename; + // The simulation time associated with this entry + double entry_time; + // The number of nodes, cells and dimensionality associated with the data + unsigned int num_nodes, num_cells, dimension; + // The attributes associated with this entry and their dimension + std::map attribute_dims; + + // Small function to create indentation for XML file + std::string indent(const unsigned int indent_level) const { + std::string res = ""; + for (unsigned int i=0;i #endif +#ifdef DEAL_II_HAVE_HDF5 +#include +#endif DEAL_II_NAMESPACE_OPEN @@ -965,6 +968,75 @@ namespace std::vector vertices; std::vector cells; }; + + + class HDF5MemStream + { + public: + /** + * Constructor, storing + * persistent values for + * later use. + */ + HDF5MemStream (const unsigned int local_points_cell_count[2], const unsigned int global_points_cell_offsets[2], const unsigned int dim); + + /** + * Output operator for points. + */ + template + void write_point (const unsigned int index, + const Point&); + + /** + * Do whatever is necessary to + * terminate the list of points. + * In this case, nothing. + */ + void flush_points () {}; + + /** + * Write dim-dimensional cell + * with first vertex at + * number start and further + * vertices offset by the + * specified values. Values + * not needed are ignored. + * + * The order of vertices for + * these cells in different + * dimensions is + *
      + *
    1. [0,1] + *
    2. [] + *
    3. [] + *
    + */ + template + void write_cell(const unsigned int index, + const unsigned int start, + const unsigned int x_offset, + const unsigned int y_offset, + const unsigned int z_offset); + + /** + * Do whatever is necessary to + * terminate the list of cells. + * In this case, nothing. + */ + void flush_cells () {}; + + const double *node_data(void) const { return &vertices[0]; }; + const unsigned int *cell_data(void) const { return &cells[0]; }; + + private: + /** + * A list of vertices and + * cells, used to write HDF5 data. + */ + std::vector vertices; + std::vector cells; + unsigned int cell_offset; + }; //----------------------------------------------------------------------// @@ -1463,7 +1535,48 @@ namespace return stream; } - + HDF5MemStream::HDF5MemStream(const unsigned int local_points_cell_count[2], const unsigned int global_points_cell_offsets[2], const unsigned int dim) { + unsigned int entries_per_cell = (2 << (dim-1)); + + vertices.resize(local_points_cell_count[0]*dim); + cells.resize(local_points_cell_count[1]*entries_per_cell); + cell_offset = global_points_cell_offsets[1]*entries_per_cell; + } + + template + void + HDF5MemStream::write_point (const unsigned int index, + const Point& p) + { + for (int i=0;i + void + HDF5MemStream::write_cell( + unsigned int index, + unsigned int start, + unsigned int d1, + unsigned int d2, + unsigned int d3) + { + unsigned int base_entry = index * GeometryInfo::vertices_per_cell; + cells[base_entry+0] = cell_offset+start; + cells[base_entry+1] = cell_offset+start+d1; + if (dim>=2) + { + cells[base_entry+2] = cell_offset+start+d2+d1; + cells[base_entry+3] = cell_offset+start+d2; + if (dim>=3) + { + cells[base_entry+4] = cell_offset+start+d3; + cells[base_entry+5] = cell_offset+start+d3+d1; + cells[base_entry+6] = cell_offset+start+d3+d2+d1; + cells[base_entry+7] = cell_offset+start+d3+d2; + } + } + } + template std::ostream& DXStream::operator<< (const T& t) @@ -2154,6 +2267,9 @@ parse_output_format (const std::string &format_name) if (format_name == "deal.II intermediate") return deal_II_intermediate; + + if (format_name == "hdf5") + return hdf5; AssertThrow (false, ExcMessage ("The given file format name is not recognized: <" @@ -2168,7 +2284,7 @@ parse_output_format (const std::string &format_name) std::string DataOutBase::get_output_format_names () { - return "none|dx|ucd|gnuplot|povray|eps|gmv|tecplot|tecplot_binary|vtk|vtu|deal.II intermediate"; + return "none|dx|ucd|gnuplot|povray|eps|gmv|tecplot|tecplot_binary|vtk|vtu|hdf5|deal.II intermediate"; } @@ -2203,6 +2319,8 @@ default_suffix (const OutputFormat output_format) return ".vtu"; case deal_II_intermediate: return ".d2"; + case hdf5: + return ".h5"; default: Assert (false, ExcNotImplemented()); return ""; @@ -5557,6 +5675,465 @@ write_deal_II_intermediate (std::ostream &out) const } +template +XDMFEntry DataOutInterface:: +create_xdmf_entry (const char *h5_filename, const double cur_time, MPI_Comm comm) const +{ + return DataOutBase::create_xdmf_entry(get_patches(), get_dataset_names(), get_vector_data_ranges(), + h5_filename, cur_time, comm); +} + +template +XDMFEntry DataOutBase::create_xdmf_entry (const std::vector > &patches, + const std::vector &data_names, + const std::vector > &vector_data_ranges, + const char* h5_filename, + const double cur_time, + MPI_Comm comm) +{ + unsigned int local_node_cell_count[2], global_node_cell_count[2]; + const unsigned int n_data_sets = data_names.size(); + int myrank; + +#ifndef DEAL_II_HAVE_HDF5 + AssertThrow(false, ExcMessage ("XDMF support requires HDF5 to be turned on.")); +#endif + AssertThrow(dim == 2 || dim == 3, ExcMessage ("XDMF only supports 2 or 3 dimensions.")); + + compute_sizes(patches, local_node_cell_count[0], local_node_cell_count[1]); + + // And compute the global total +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + MPI_Comm_rank(comm, &myrank); + MPI_Allreduce(local_node_cell_count, global_node_cell_count, 2, MPI_UNSIGNED, MPI_SUM, comm); +#else + myrank = 0; + global_node_cell_count[0] = local_node_cell_count[0]; + global_node_cell_count[1] = local_node_cell_count[1]; +#endif + + // Output the XDMF file only on the root process + if (myrank == 0) { + XDMFEntry entry(h5_filename, cur_time, global_node_cell_count[0], global_node_cell_count[1], dim); + + // The vector names generated here must match those generated in the HDF5 file + unsigned int i, n_th_vector, data_set, pt_data_vector_dim; + std::string vector_name; + for (n_th_vector=0,data_set=0;data_set(vector_data_ranges[n_th_vector]) < data_set) n_th_vector++; + + // Determine whether the data is multiple dimensions or one + if (std_cxx1x::get<0>(vector_data_ranges[n_th_vector]) == data_set) { + // Multiple dimensions + pt_data_vector_dim = std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) - std_cxx1x::get<0>(vector_data_ranges[n_th_vector])+1; + + // Ensure the dimensionality of the data is correct + AssertThrow (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) >= std_cxx1x::get<0>(vector_data_ranges[n_th_vector]), + ExcLowerRange (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]), std_cxx1x::get<0>(vector_data_ranges[n_th_vector]))); + AssertThrow (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) < n_data_sets, + ExcIndexRange (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]), 0, n_data_sets)); + + // Determine the vector name + // Concatenate all the + // component names with double + // underscores unless a vector + // name has been specified + if (std_cxx1x::get<2>(vector_data_ranges[n_th_vector]) != "") { + vector_name = std_cxx1x::get<2>(vector_data_ranges[n_th_vector]); + } else { + vector_name = ""; + for (i=std_cxx1x::get<0>(vector_data_ranges[n_th_vector]);i(vector_data_ranges[n_th_vector]);++i) + vector_name += data_names[i] + "__"; + vector_name += data_names[std_cxx1x::get<1>(vector_data_ranges[n_th_vector])]; + } + } else { + // One dimension + pt_data_vector_dim = 1; + vector_name = data_names[data_set]; + } + + entry.add_attribute(vector_name, pt_data_vector_dim); + + // Advance the current data set + data_set += pt_data_vector_dim; + } + + return entry; + } else { + return XDMFEntry(); + } +} + +template +void DataOutInterface:: +write_xdmf_file (const std::vector &entries, const char *filename, MPI_Comm comm) const +{ + DataOutBase::write_xdmf_file(get_patches(), entries, filename, comm); +} + +template +void DataOutBase::write_xdmf_file (const std::vector > &patches, + const std::vector &entries, + const char *filename, + MPI_Comm comm) +{ + int myrank; + +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + MPI_Comm_rank(comm, &myrank); +#else + myrank = 0; +#endif + + // Only rank 0 process writes the XDMF file + if (myrank == 0) { + std::ofstream xdmf_file(filename); + std::vector::const_iterator it; + + xdmf_file << "\n"; + xdmf_file << "\n"; + xdmf_file << "\n"; + xdmf_file << " \n"; + xdmf_file << " \n"; + + // Write out all the entries indented + for (it=entries.begin();it!=entries.end();++it) + xdmf_file << it->get_xdmf_content(3); + + xdmf_file << " \n"; + xdmf_file << " \n"; + xdmf_file << "\n"; + + xdmf_file.close(); + } +} + +// Get the XDMF content associated with this entry +// If the entry is not valid, this returns false +std::string XDMFEntry::get_xdmf_content(const unsigned int indent_level) const { + std::stringstream ss; + unsigned int i; + std::map::const_iterator it; + + if (!valid) return ""; + + ss << indent(indent_level+0) << "\n"; + ss << indent(indent_level+1) << "\n"; + + return ss.str(); +} + +template +void DataOutInterface:: +write_hdf5_parallel (const char* filename, MPI_Comm comm) const +{ +#ifndef DEAL_II_HAVE_HDF5 + AssertThrow(false, ExcMessage ("HDF5 support is disabled.")); +#endif + DataOutBase::write_hdf5_parallel(get_patches(), get_dataset_names(), + get_vector_data_ranges(), + filename, comm); +} + +template +void DataOutBase::write_hdf5_parallel (const std::vector > &patches, + const std::vector &data_names, + const std::vector > &vector_data_ranges, + const char* filename, + MPI_Comm comm) +{ +#ifndef DEAL_II_HAVE_HDF5 + AssertThrow(false, ExcMessage ("HDF5 support is disabled.")); +#else + int world_size; + hid_t h5_file_id, plist_id; + hid_t node_dataspace, node_dataset, node_file_dataspace, node_memory_dataspace; + hid_t cell_dataspace, cell_dataset, cell_file_dataspace, cell_memory_dataspace; + hid_t pt_data_dataspace, pt_data_dataset, pt_data_file_dataspace, pt_data_memory_dataspace; + herr_t status; + unsigned int local_node_cell_count[2], global_node_cell_count[2], global_node_cell_offsets[2]; + hsize_t count[2], offset[2], node_ds_dim[2], cell_ds_dim[2]; + const unsigned int n_data_sets = data_names.size(); + + // If HDF5 is not parallel and we're using multiple processes, abort +#ifndef H5_HAVE_PARALLEL +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + MPI_Comm_size(comm, &world_size); + AssertThrow (world_size <= 1, + ExcMessage ("Serial HDF5 output on multiple processes is not yet supported.")); +#endif +#endif + + compute_sizes(patches, local_node_cell_count[0], local_node_cell_count[1]); + + Table<2,double> data_vectors (n_data_sets, local_node_cell_count[0]); + void (*fun_ptr) (const std::vector > &, Table<2,double> &) = &DataOutBase::template write_gmv_reorder_data_vectors; + Threads::Task<> reorder_task = Threads::new_task (fun_ptr, patches, data_vectors); + + // Create file access properties + plist_id = H5Pcreate(H5P_FILE_ACCESS); + AssertThrow(plist_id != -1, ExcIO()); + // If MPI is enabled *and* HDF5 is parallel, we can do parallel output +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI +#ifdef H5_HAVE_PARALLEL + // Set the access to use the specified MPI_Comm object + status = H5Pset_fapl_mpio(plist_id, comm, MPI_INFO_NULL); + AssertThrow(status >= 0, ExcIO()); +#endif +#endif + + // Overwrite any existing files (change this to an option?) and close the property list + h5_file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id); + AssertThrow(h5_file_id >= 0, ExcIO()); + status = H5Pclose(plist_id); + AssertThrow(status >= 0, ExcIO()); + + // Compute the global total number of nodes/cells + // And determine the offset of the data for this process +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + MPI_Allreduce(local_node_cell_count, global_node_cell_count, 2, MPI_UNSIGNED, MPI_SUM, comm); + MPI_Scan(local_node_cell_count, global_node_cell_offsets, 2, MPI_UNSIGNED, MPI_SUM, comm); + global_node_cell_offsets[0] -= local_node_cell_count[0]; + global_node_cell_offsets[1] -= local_node_cell_count[1]; +#else + global_node_cell_offsets[0] = global_node_cell_offsets[1] = 0; +#endif + + // Write the nodes/cells to the HDF5 "stream" object. Record the process offset + // so that node reference indices are correctly calculated + HDF5MemStream hdf5_data(local_node_cell_count, global_node_cell_offsets, dim); + write_nodes(patches, hdf5_data); + write_cells(patches, hdf5_data); + + // Create the dataspace for the nodes and cells + node_ds_dim[0] = global_node_cell_count[0]; + node_ds_dim[1] = dim; + node_dataspace = H5Screate_simple(2, node_ds_dim, NULL); + AssertThrow(node_dataspace >= 0, ExcIO()); + + cell_ds_dim[0] = global_node_cell_count[1]; + cell_ds_dim[1] = GeometryInfo::vertices_per_cell; + cell_dataspace = H5Screate_simple(2, cell_ds_dim, NULL); + AssertThrow(cell_dataspace >= 0, ExcIO()); + + // Create the dataset for the nodes and cells + node_dataset = H5Dcreate(h5_file_id, "nodes", H5T_NATIVE_DOUBLE, node_dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + AssertThrow(node_dataset >= 0, ExcIO()); + cell_dataset = H5Dcreate(h5_file_id, "cells", H5T_NATIVE_UINT, cell_dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + AssertThrow(cell_dataset >= 0, ExcIO()); + + // Close the node and cell dataspaces since we're done with them + status = H5Sclose(node_dataspace); + AssertThrow(status >= 0, ExcIO()); + status = H5Sclose(cell_dataspace); + AssertThrow(status >= 0, ExcIO()); + + // Create the data subset we'll use to read from memory + count[0] = local_node_cell_count[0]; + count[1] = dim; + offset[0] = global_node_cell_offsets[0]; + offset[1] = 0; + node_memory_dataspace = H5Screate_simple(2, count, NULL); + AssertThrow(node_memory_dataspace >= 0, ExcIO()); + + // Select the hyperslab in the file + node_file_dataspace = H5Dget_space(node_dataset); + AssertThrow(node_file_dataspace >= 0, ExcIO()); + status = H5Sselect_hyperslab(node_file_dataspace, H5S_SELECT_SET, offset, NULL, count, NULL); + AssertThrow(status >= 0, ExcIO()); + + // And repeat for cells + count[0] = local_node_cell_count[1]; + count[1] = GeometryInfo::vertices_per_cell; + offset[0] = global_node_cell_offsets[1]; + offset[1] = 0; + cell_memory_dataspace = H5Screate_simple(2, count, NULL); + AssertThrow(cell_memory_dataspace >= 0, ExcIO()); + + cell_file_dataspace = H5Dget_space(cell_dataset); + AssertThrow(cell_file_dataspace >= 0, ExcIO()); + status = H5Sselect_hyperslab(cell_file_dataspace, H5S_SELECT_SET, offset, NULL, count, NULL); + AssertThrow(status >= 0, ExcIO()); + + // Create the property list for a collective write + plist_id = H5Pcreate(H5P_DATASET_XFER); + AssertThrow(plist_id >= 0, ExcIO()); +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI +#ifdef H5_HAVE_PARALLEL + status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE); + AssertThrow(status >= 0, ExcIO()); +#endif +#endif + + // And finally, write the node data + status = H5Dwrite(node_dataset, H5T_NATIVE_DOUBLE, node_memory_dataspace, node_file_dataspace, plist_id, hdf5_data.node_data()); + AssertThrow(status >= 0, ExcIO()); + + // And the cell data + status = H5Dwrite(cell_dataset, H5T_NATIVE_UINT, cell_memory_dataspace, cell_file_dataspace, plist_id, hdf5_data.cell_data()); + AssertThrow(status >= 0, ExcIO()); + + reorder_task.join (); + + // when writing, first write out + // all vector data, then handle the + // scalar data sets that have been + // left over + unsigned int i, n, q, r, n_th_vector, data_set, pt_data_vector_dim, mem_vector_dim; + double *pt_data; + std::string vector_name; + for (n_th_vector=0,data_set=0;data_set(vector_data_ranges[n_th_vector]) < data_set) n_th_vector++; + + // Determine the dimension of this data + if (std_cxx1x::get<0>(vector_data_ranges[n_th_vector]) == data_set) { + // Multiple dimensions + pt_data_vector_dim = std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) - std_cxx1x::get<0>(vector_data_ranges[n_th_vector])+1; + + // Ensure the dimensionality of the data is correct + AssertThrow (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) >= std_cxx1x::get<0>(vector_data_ranges[n_th_vector]), + ExcLowerRange (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]), std_cxx1x::get<0>(vector_data_ranges[n_th_vector]))); + AssertThrow (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]) < n_data_sets, + ExcIndexRange (std_cxx1x::get<1>(vector_data_ranges[n_th_vector]), 0, n_data_sets)); + + // Determine the vector name + // Concatenate all the + // component names with double + // underscores unless a vector + // name has been specified + if (std_cxx1x::get<2>(vector_data_ranges[n_th_vector]) != "") { + vector_name = std_cxx1x::get<2>(vector_data_ranges[n_th_vector]); + } else { + vector_name = ""; + for (i=std_cxx1x::get<0>(vector_data_ranges[n_th_vector]);i(vector_data_ranges[n_th_vector]);++i) + vector_name += data_names[i] + "__"; + vector_name += data_names[std_cxx1x::get<1>(vector_data_ranges[n_th_vector])]; + } + } else { + // One dimension + pt_data_vector_dim = 1; + vector_name = data_names[data_set]; + } + + // Allocate space for the point data + // Must be either 1D or 3D + mem_vector_dim = (pt_data_vector_dim>1?3:1); + pt_data = new double[local_node_cell_count[0]*mem_vector_dim]; + + // Create the dataspace for the point data + node_ds_dim[0] = global_node_cell_count[0]; + node_ds_dim[1] = mem_vector_dim; + pt_data_dataspace = H5Screate_simple(2, node_ds_dim, NULL); + AssertThrow(pt_data_dataspace >= 0, ExcIO()); + + pt_data_dataset = H5Dcreate(h5_file_id, vector_name.c_str(), H5T_NATIVE_DOUBLE, pt_data_dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + AssertThrow(pt_data_dataset >= 0, ExcIO()); + + // Create the data subset we'll use to read from memory + count[0] = local_node_cell_count[0]; + count[1] = mem_vector_dim; + offset[0] = global_node_cell_offsets[0]; + offset[1] = 0; + pt_data_memory_dataspace = H5Screate_simple(2, count, NULL); + AssertThrow(pt_data_memory_dataspace >= 0, ExcIO()); + + // Select the hyperslab in the file + pt_data_file_dataspace = H5Dget_space(pt_data_dataset); + AssertThrow(pt_data_file_dataspace >= 0, ExcIO()); + status = H5Sselect_hyperslab(pt_data_file_dataspace, H5S_SELECT_SET, offset, NULL, count, NULL); + AssertThrow(status >= 0, ExcIO()); + + // Write point data to the memory array + r = 0; + for (i=0;i= 0, ExcIO()); + + delete pt_data; + + // Close the dataspaces + status = H5Sclose(pt_data_dataspace); + AssertThrow(status >= 0, ExcIO()); + status = H5Sclose(pt_data_memory_dataspace); + AssertThrow(status >= 0, ExcIO()); + status = H5Sclose(pt_data_file_dataspace); + AssertThrow(status >= 0, ExcIO()); + // Close the dataset + status = H5Dclose(pt_data_dataset); + AssertThrow(status >= 0, ExcIO()); + + // Advance the current data set + data_set += pt_data_vector_dim; + } + + // Close the file dataspaces + status = H5Sclose(node_file_dataspace); + AssertThrow(status >= 0, ExcIO()); + status = H5Sclose(cell_file_dataspace); + AssertThrow(status >= 0, ExcIO()); + + // Close the memory dataspaces + status = H5Sclose(node_memory_dataspace); + AssertThrow(status >= 0, ExcIO()); + status = H5Sclose(cell_memory_dataspace); + AssertThrow(status >= 0, ExcIO()); + + // Close the datasets + status = H5Dclose(node_dataset); + AssertThrow(status >= 0, ExcIO()); + status = H5Dclose(cell_dataset); + AssertThrow(status >= 0, ExcIO()); + + // Close the parallel access + status = H5Pclose(plist_id); + AssertThrow(status >= 0, ExcIO()); + + // Close the file + status = H5Fclose(h5_file_id); + AssertThrow(status >= 0, ExcIO()); +#endif +} + + template void