diff --git a/.gitignore b/.gitignore index 719ad1f7d42..9ebe84da2aa 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ *.pvt[ksu] *.h5m .*_license +*.mypy_cache unix/* unix-opt/* unix-gcov/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index eb0049e7a4b..64e201b2eaa 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -34,6 +34,7 @@ variables: - schedules # Set with CI/CD Pipelines - Run Pipeline - web + dependencies: [] # # This provides the basic order of operations and options template for cloud based stage 1 tests. @@ -44,6 +45,8 @@ variables: extends: .test stage: stage-1 image: jedbrown/mpich-ccache + tags: + - docker before_script: - date - echo nproc=$(nproc) @@ -55,14 +58,22 @@ variables: - echo CONFIG_OPTS=$(CONFIG_OPTS) script: - ${PYTHON} ./configure --with-debugging=0 COPTFLAGS='-O -march=native' CXXOPTFLAGS='-O -march=native' FOPTFLAGS='-O -march=native' ${CONFIG_OPTS} - - make CFLAGS=-Werror CXXFLAGS=-Werror - - make allgtests-tap search="${TEST_SEARCH}" TIMEOUT=${TIMEOUT} + - make CFLAGS=-Werror CXXFLAGS=-Werror FFLAGS=-Werror + - make CFLAGS=-Werror CXXFLAGS=-Werror FFLAGS=-Werror allgtests-tap search="${TEST_SEARCH}" TIMEOUT=${TIMEOUT} after_script: - date - CCACHE_DIR=$(pwd)/.ccache ccache --show-stats artifacts: reports: junit: arch-*/tests/testresults.xml + name: "$CI_JOB_NAME" + when: always + paths: + - arch-*/lib/petsc/conf/*.log + - arch-*/tests/testresults.xml + - arch-*/tests/test_*_tap.log + - arch-*/tests/test_*_err.log + expire_in: 4 days cache: paths: - .ccache/ @@ -115,13 +126,21 @@ checksource: - printf "./config/examples/${TEST_ARCH}.py\n" - cat ./config/examples/${TEST_ARCH}.py - ./config/examples/${TEST_ARCH}.py - - make CFLAGS=${MAKE_CFLAGS} CXXFLAGS=${MAKE_CXXFLAGS} - - make check + - make CFLAGS=${MAKE_CFLAGS} CXXFLAGS=${MAKE_CXXFLAGS} FFLAGS=${MAKE_FFLAGS} + - make CFLAGS=${MAKE_CFLAGS} CXXFLAGS=${MAKE_CXXFLAGS} FFLAGS=${MAKE_FFLAGS} check - make updatedatafiles - - make cleantest allgtests-tap TIMEOUT=${TIMEOUT} + - if [ -z ${DISABLE_TESTS+x} ]; then make CFLAGS=${MAKE_CFLAGS} CXXFLAGS=${MAKE_CXXFLAGS} FFLAGS=${MAKE_FFLAGS} cleantest allgtests-tap TIMEOUT=${TIMEOUT}; fi artifacts: reports: junit: ${TEST_ARCH}/tests/testresults.xml + name: "$CI_JOB_NAME" + when: always + paths: + - ${TEST_ARCH}/lib/petsc/conf/*.log + - ${TEST_ARCH}/tests/testresults.xml + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_tap.log + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_err.log + expire_in: 4 days variables: OPENBLAS_NUM_THREADS: 1 @@ -152,9 +171,9 @@ checksource: - grep PRETTY_NAME /etc/os-release - echo $(nproc) - ccache --zero-stats + - if [ ! -z ${LOAD_MODULES+x} ]; then module load ${LOAD_MODULES}; module list; fi after_script: - date - - cp configure.log ../configure.log-${CI_JOB_NAME}-${CI_PIPELINE_ID}-${CI_JOB_ID} - ccache --show-stats .linux_test: @@ -162,11 +181,13 @@ checksource: variables: MAKE_CFLAGS: -Werror MAKE_CXXFLAGS: -Werror + MAKE_FFLAGS: -Werror .freebsd_test: variables: MAKE_CFLAGS: -Werror MAKE_CXXFLAGS: -Werror + MAKE_FFLAGS: -Werror before_script: - date - hostname @@ -175,13 +196,13 @@ checksource: - ccache --zero-stats after_script: - date - - cp configure.log ../configure.log-${CI_JOB_NAME}-${CI_PIPELINE_ID}-${CI_JOB_ID} - ccache --show-stats .osx_test: variables: MAKE_CFLAGS: -Werror MAKE_CXXFLAGS: -Werror + MAKE_FFLAGS: -Werror before_script: - date - hostname @@ -190,7 +211,6 @@ checksource: - ccache --zero-stats after_script: - date - - cp configure.log ../configure.log-${CI_JOB_NAME}-${CI_PIPELINE_ID}-${CI_JOB_ID} - ccache --show-stats .opensolaris_test: @@ -201,7 +221,6 @@ checksource: - echo $(nproc) after_script: - date - - cp configure.log ../configure.log-${CI_JOB_NAME}-${CI_PIPELINE_ID}-${CI_JOB_ID} .mswin_test: before_script: @@ -211,7 +230,6 @@ checksource: - echo $(nproc) after_script: - date - - cp configure.log ../configure.log-${CI_JOB_NAME}-${CI_PIPELINE_ID}-${CI_JOB_ID} # # The following tests run as part of stage-2. @@ -227,7 +245,8 @@ linux-gcc-complex-opt: - .stage-2 - .linux_test tags: - - linux-dell + - linux-stage2 + - ci:2 variables: TEST_ARCH: arch-ci-linux-gcc-complex-opt @@ -236,7 +255,7 @@ linux-gcc-pkgs-opt: - .stage-2 - .linux_test tags: - - linux-dell + - linux-stage2 variables: TEST_ARCH: arch-ci-linux-gcc-pkgs-opt @@ -245,7 +264,7 @@ linux-gcc-quad-64idx-dbg: - .stage-2 - .linux_test tags: - - linux-dell + - linux-stage2 variables: TEST_ARCH: arch-ci-linux-gcc-quad-64idx-dbg @@ -254,7 +273,7 @@ linux-cuda-double: - .stage-2 - .linux_test tags: - - name:frog + - gpu:nvidia, os:linux, name:p1 variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -cuda_initialize 0 TEST_ARCH: arch-ci-linux-cuda-double @@ -273,7 +292,7 @@ freebsd-cxx-cmplx-64idx-dbg: - .stage-2 - .freebsd_test tags: - - name:petsc-fbsd + - os:fbsd variables: TEST_ARCH: arch-ci-freebsd-cxx-cmplx-64idx-dbg @@ -291,7 +310,7 @@ osx-cxx-pkgs-opt: - .stage-3 - .osx_test tags: - - os:osx + - os:macos variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -saws_port_auto_select -saws_port_auto_select_silent -vecscatter_mpi1 false -options_left false TEST_ARCH: arch-ci-osx-cxx-pkgs-opt @@ -305,6 +324,24 @@ mswin-uni: variables: TEST_ARCH: arch-ci-mswin-uni +mswin-gnu: + extends: + - .stage-2 + - .mswin_test + tags: + - name:ps5-3 + variables: + TEST_ARCH: arch-ci-mswin-gnu + MAKE_CFLAGS: -Werror + MAKE_CXXFLAGS: -Werror + MAKE_FFLAGS: -Werror + DISABLE_TESTS: 1 + artifacts: + reports: + paths: + - arch-*/lib/petsc/conf/*.log + expire_in: 4 days + # # The following tests run as part of stage-3. # @@ -327,7 +364,7 @@ mswin-intel: variables: TEST_ARCH: arch-ci-mswin-intel -arch-ci-mswin-opt-impi: +mswin-opt-impi: extends: - .stage-3 - .mswin_test @@ -387,7 +424,7 @@ osx-cxx-cmplx-pkgs-dbg: - .stage-3 - .osx_test tags: - - os:osx + - os:macos variables: TEST_ARCH: arch-ci-osx-cxx-cmplx-pkgs-dbg @@ -396,7 +433,7 @@ osx-dbg: - .stage-3 - .osx_test tags: - - os:osx + - os:macos variables: TEST_ARCH: arch-ci-osx-dbg @@ -405,7 +442,7 @@ osx-xsdk-opt: - .stage-3 - .osx_test tags: - - os:osx + - os:macos variables: TEST_ARCH: arch-ci-osx-xsdk-opt @@ -418,6 +455,18 @@ linux-cmplx-gcov: variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -vecscatter_mpi1 false -options_left false TEST_ARCH: arch-ci-linux-cmplx-gcov + after_script: + - make gcov + artifacts: + when: always + paths: + - ${TEST_ARCH}/lib/petsc/conf/*.log + - ${TEST_ARCH}/tests/testresults.xml + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_tap.log + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_err.log + - ${TEST_ARCH}/gcov.tar.gz + expire_in: 4 days + linux-cmplx-single: extends: @@ -469,7 +518,7 @@ linux-c-exodus-dbg: - .stage-3 - .linux_test tags: - - name:frog + - gpu:nvidia, os:linux variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -cuda_initialize 0 TEST_ARCH: arch-ci-linux-c-exodus-dbg @@ -479,7 +528,7 @@ linux-cuda-single: - .stage-3 - .linux_test tags: - - name:frog + - gpu:nvidia, os:linux variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -cuda_initialize 0 TEST_ARCH: arch-ci-linux-cuda-single @@ -489,8 +538,9 @@ linux-viennacl: - .stage-3 - .linux_test tags: - - name:frog + - gpu:nvidia, os:linux, name:frog variables: + PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -cuda_initialize 0 TEST_ARCH: arch-ci-linux-viennacl linux-opt-misc: @@ -498,7 +548,7 @@ linux-opt-misc: - .stage-3 - .linux_test tags: - - name:compute004 + - gce-nfs variables: TEST_ARCH: arch-ci-linux-opt-misc @@ -556,13 +606,25 @@ linux-gcov: - mcs-nfs variables: TEST_ARCH: arch-ci-linux-gcov + after_script: + - make gcov + artifacts: + when: always + paths: + - ${TEST_ARCH}/lib/petsc/conf/*.log + - ${TEST_ARCH}/tests/testresults.xml + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_tap.log + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_err.log + - ${TEST_ARCH}/gcov.tar.gz + expire_in: 4 days + linux-ILP64: extends: - .stage-3 - .linux_test tags: - - name:compute004 + - gce-nfs variables: TEST_ARCH: arch-ci-linux-ILP64 @@ -574,6 +636,18 @@ linux-matlab-ilp64-gcov: - mcs-nfs variables: TEST_ARCH: arch-ci-linux-matlab-ilp64-gcov + after_script: + - make gcov + artifacts: + when: always + paths: + - ${TEST_ARCH}/lib/petsc/conf/*.log + - ${TEST_ARCH}/tests/testresults.xml + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_tap.log + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_err.log + - ${TEST_ARCH}/gcov.tar.gz + expire_in: 4 days + linux-opt-cxx-quad: extends: @@ -610,13 +684,24 @@ linux-pkgs-gcov: - mcs-nfs variables: TEST_ARCH: arch-ci-linux-pkgs-gcov + after_script: + - make gcov + artifacts: + when: always + paths: + - ${TEST_ARCH}/lib/petsc/conf/*.log + - ${TEST_ARCH}/tests/testresults.xml + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_tap.log + - ${TEST_ARCH}/tests/test_${TEST_ARCH}_err.log + - ${TEST_ARCH}/gcov.tar.gz + expire_in: 4 days linux-pkgs-opt: extends: - .stage-3 - .linux_test tags: - - name:compute004 + - gce-nfs variables: PETSC_OPTIONS: -check_pointer_intensity 0 -error_output_stdout -nox -nox_warning -malloc_dump -saws_port_auto_select -saws_port_auto_select_silent TEST_ARCH: arch-ci-linux-pkgs-opt @@ -626,7 +711,7 @@ linux-pkgs-valgrind: - .stage-3 - .linux_test tags: - - mcs-nfs,server + - valgrind variables: TEST_ARCH: arch-ci-linux-pkgs-valgrind TIMEOUT: 7200 @@ -640,14 +725,25 @@ linux-xsdk-dbg: variables: TEST_ARCH: arch-ci-linux-xsdk-dbg -linux-pardiso: +linux-intel: extends: - .stage-3 - - .linux_test + - .linux_test_noflags tags: - - mcs-nfs + - gce-nfs variables: - TEST_ARCH: arch-ci-linux-pardiso + TEST_ARCH: arch-ci-linux-intel + LOAD_MODULES: intel-mkl/19.5 intel/19.0 + +linux-intel-cmplx: + extends: + - .stage-3 + - .linux_test_noflags + tags: + - gce-nfs + variables: + TEST_ARCH: arch-ci-linux-intel-cmplx + LOAD_MODULES: intel-mkl/19.5 intel/19.0 linux-nagfor: extends: @@ -657,7 +753,57 @@ linux-nagfor: - mcs-nfs variables: TEST_ARCH: arch-ci-linux-nagfor + MAKE_FFLAGS: -DNO_WERROR_FLAG + +linux-64idx-i8: + extends: + - .stage-3 + - .linux_test + tags: + - gce-nfs + variables: + TEST_ARCH: arch-ci-linux-64idx-i8 + +linux-64idx-i8-uni: + extends: + - .stage-3 + - .linux_test + tags: + - gce-nfs + variables: + TEST_ARCH: arch-ci-linux-64idx-i8-uni +# job for analyzing the final coverage results +analyze-pipeline: + stage: .post + image: jedbrown/mpich-ccache + variables: + PYTHON: python3 + # Need a dummy TEST_ARCH that comes from artifacts + TEST_ARCH: arch-ci-linux-pkgs-gcov + only: + refs: +# Set with CI/CD Shedules - New Schedule + - schedules +# Set with CI/CD Pipelines - Run Pipeline + - web + allow_failure: true + before_script: + - date + script: + - python3 ./configure --with-mpi=0 --with-fc=0 --with-cxx=0 +# - make mergegcov LOC=$PWD + - ${PYTHON} lib/petsc/bin/maint/gcov.py --merge_gcov --loc=${PWD} --petsc_arch ${TEST_ARCH} + - ls + - ls arch*/* + artifacts: + name: "$CI_JOB_NAME" + when: always + paths: + - i*.html + - arch-*-gcov/* + expire_in: 4 days +# # # The following tests are experimental; more tests by users at other sites may be added below this. Experimental test # that fail produce a warning, but do not block execution of a pipeline. diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS index 51de90edecc..1bdae102243 100644 --- a/.gitlab/CODEOWNERS +++ b/.gitlab/CODEOWNERS @@ -3,16 +3,22 @@ /src/tao/ @adener @tmunson /src/ts/ @caidao22 @jedbrown @emconsta @stefanozampini @BarrySmith +src/ts/event/ @abhyshr @caidao22 @BarrySmith /src/dm/impls/plex/ @knepley @wence @haplav @tisaac /src/dm/impls/network/ @hongzhangsun @BarrySmith /src/dm/impls/stag/ @psanan +src/dm/impls/network/ @abhyshr @hongzhangsun +*/examples/*/network/ @abhyshr @hongzhangsun + /src/ksp/pc/impls/bddc/ @stefanozampini /src/ksp/pc/impls/gamg/ @markadams4 /src/ksp/pc/impls/hpddm/ @prj- @stefanozampini /src/ksp/ksp/impls/hpddm/ @prj- @stefanozampini +/src/sys/classes/viewer/ @petsc/integrators @haplav + /src/vec/is/sf/ @jedbrown @jczhang07 /src/vec/vscat/ @jczhang07 @BarrySmith diff --git a/config/BuildSystem/config/base.py b/config/BuildSystem/config/base.py index d546942af89..6fc0b42aefc 100644 --- a/config/BuildSystem/config/base.py +++ b/config/BuildSystem/config/base.py @@ -222,7 +222,7 @@ def checkExecutable(self, dir, name): def getExecutable(self, names, path = [], getFullPath = 0, useDefaultPath = 0, resultName = '', setMakeMacro = 1): '''Search for an executable in the list names - - Each name in the list is tried for each entry in the path + - Each name in the list is tried for each entry in the path until a name is located, then it stops - If found, the path is stored in the variable "name", or "resultName" if given - By default, a make macro "resultName" will hold the path''' found = 0 @@ -291,6 +291,26 @@ def getNames(name, resultName): setattr(self, varName, name+options) if setMakeMacro: self.addMakeMacro(varName.upper(), getattr(self, varName)) + else: + def logPrintFilesInPath(path): + for d in path: + try: + self.logWrite(' '+str(os.listdir(d))+'\n') + except OSError as e: + self.logWrite(' Warning accessing '+d+' gives errors: '+str(e)+'\n') + return + self.logWrite(' Unable to find programs '+str(names)+' providing listing of each search directory to help debug\n') + self.logWrite(' Path provided in Python program\n') + logPrintFilesInPath(path) + if useDefaultPath: + if os.environ['PATH'].split(os.path.pathsep): + self.logWrite(' Path provided by default path\n') + logPrintFilesInPath(os.environ['PATH'].split(os.path.pathsep)) + dirs = self.argDB['with-executables-search-path'] + if not isinstance(dirs, list): dirs = [dirs] + if dirs: + self.logWrite(' Path provided by --with-executables-search-path\n') + logPrintFilesInPath(dirs) return found def getExecutables(self, names, path = '', getFullPath = 0, useDefaultPath = 0, resultName = ''): diff --git a/config/BuildSystem/config/compilerOptions.py b/config/BuildSystem/config/compilerOptions.py index 536359ce88d..5c5be25b3f9 100644 --- a/config/BuildSystem/config/compilerOptions.py +++ b/config/BuildSystem/config/compilerOptions.py @@ -24,6 +24,8 @@ def getCFlags(self, compiler, bopt, language): # skip -fstack-protector for brew gcc - as this gives SEGV if not (config.setCompilers.Configure.isDarwin(self.log) and config.setCompilers.Configure.isGNU(compiler, self.log)): flags.extend(['-fstack-protector']) + if config.setCompilers.Configure.isDarwinCatalina(self.log) and config.setCompilers.Configure.isClang(compiler, self.log): + flags.extend(['-fno-stack-check']) flags.extend(['-mfp16-format=ieee']) # arm for utilizing 16 bit storage of floating point if config.setCompilers.Configure.isClang(compiler, self.log): flags.extend(['-Qunused-arguments']) @@ -111,6 +113,8 @@ def getCxxFlags(self, compiler, bopt): # skip -fstack-protector for brew gcc - as this gives SEGV if not (config.setCompilers.Configure.isDarwin(self.log) and config.setCompilers.Configure.isGNU(compiler, self.log)): flags.extend(['-fstack-protector']) + if config.setCompilers.Configure.isDarwinCatalina(self.log) and config.setCompilers.Configure.isClang(compiler, self.log): + flags.extend(['-fno-stack-check']) # The option below would prevent warnings about compiling C as C++ being deprecated, but it causes Clang to SEGV, http://llvm.org/bugs/show_bug.cgi?id=12924 # flags.extend([('-x','c++')]) if self.argDB['with-visibility']: diff --git a/config/BuildSystem/config/compilers.py b/config/BuildSystem/config/compilers.py index a91f08f319c..b4cbc183c0e 100644 --- a/config/BuildSystem/config/compilers.py +++ b/config/BuildSystem/config/compilers.py @@ -473,6 +473,7 @@ def checkCxxDialect(self): includes = """ #include #include + #include template constexpr T Cubed( T x ) { return x*x*x; } """ body = """ @@ -483,6 +484,7 @@ def checkCxxDialect(self): std::cout << x; """ body14 = """ + constexpr std::complex I(0.0,1.0); auto lambda = [](auto x, auto y) {return x + y;}; return lambda(3,4); """ diff --git a/config/BuildSystem/config/compilersFortran.py b/config/BuildSystem/config/compilersFortran.py index c12fa554608..16d861d2cf4 100644 --- a/config/BuildSystem/config/compilersFortran.py +++ b/config/BuildSystem/config/compilersFortran.py @@ -146,8 +146,11 @@ def checkFortranTypeStar(self): def checkFortran90(self): '''Determine whether the Fortran compiler handles F90''' self.pushLanguage('FC') - if self.checkLink(body = ' INTEGER, PARAMETER :: int = SELECTED_INT_KIND(8)\n INTEGER (KIND=int) :: ierr\n\n ierr = 1'): - self.addDefine('USING_F90', 1) + if self.checkLink(body = ''' + REAL(KIND=SELECTED_REAL_KIND(10)) d + INTEGER, PARAMETER :: int = SELECTED_INT_KIND(8) + INTEGER (KIND=int) :: ierr + ierr = 1'''): self.fortranIsF90 = 1 self.logPrint('Fortran compiler supports F90') else: diff --git a/config/BuildSystem/config/framework.py b/config/BuildSystem/config/framework.py index 8b25d07ebc8..fac66fa0dfd 100644 --- a/config/BuildSystem/config/framework.py +++ b/config/BuildSystem/config/framework.py @@ -1050,6 +1050,7 @@ def processChildren(num, q): # the handling of logs, error messages, and tracebacks from errors in children # does not work correctly. except (RuntimeError, config.base.ConfigureSetupError) as e: + tbo = sys.exc_info()[2] emsg = str(e) if not emsg.endswith('\n'): emsg = emsg+'\n' msg ='*******************************************************************************\n'\ @@ -1067,6 +1068,7 @@ def processChildren(num, q): +emsg+'*******************************************************************************\n' se = '' except ImportError as e : + tbo = sys.exc_info()[2] emsg = str(e) if not emsg.endswith('\n'): emsg = emsg+'\n' msg ='*******************************************************************************\n'\ @@ -1075,6 +1077,7 @@ def processChildren(num, q): +emsg+'*******************************************************************************\n' se = '' except OSError as e : + tbo = sys.exc_info()[2] emsg = str(e) if not emsg.endswith('\n'): emsg = emsg+'\n' msg ='*******************************************************************************\n'\ @@ -1083,6 +1086,7 @@ def processChildren(num, q): +emsg+'*******************************************************************************\n' se = '' except SystemExit as e: + tbo = sys.exc_info()[2] if e.code is None or e.code == 0: return msg ='*******************************************************************************\n'\ @@ -1090,6 +1094,7 @@ def processChildren(num, q): +'*******************************************************************************\n' se = str(e) except Exception as e: + tbo = sys.exc_info()[2] msg ='*******************************************************************************\n'\ +' CONFIGURATION CRASH (Please send configure.log to petsc-maint@mcs.anl.gov)\n' \ +'*******************************************************************************\n' @@ -1145,9 +1150,33 @@ def processChildren(num, q): def serialEvaluation(self, depGraph): import graph + ndepGraph = graph.DirectedGraph.topologicalSort(depGraph) + for child in ndepGraph: + # note, only classes derived from package.py have this attribute + if hasattr(child,'deps'): + found = 0 + if child.lookforbydefault: found = 1 + if 'download-'+child.package in self.framework.clArgDB and self.argDB['download-'+child.package]: found = 1 + if 'with-'+child.package in self.framework.clArgDB and self.argDB['with-'+child.package]: found = 1 + if 'with-'+child.package+'-lib' in self.framework.clArgDB and self.argDB['with-'+child.package+'-lib']: found = 1 + if 'with-'+child.package+'-dir' in self.framework.clArgDB and self.argDB['with-'+child.package+'-dir']: found = 1 + if not found: continue + msg = '' + for dep in child.deps: + found = 0 + if dep.lookforbydefault: found = 1 + if 'download-'+dep.package in self.framework.clArgDB and self.argDB['download-'+dep.package]: found = 1 + if 'with-'+dep.package in self.framework.clArgDB and self.argDB['with-'+dep.package]: found = 1 + if 'with-'+dep.package+'-lib' in self.framework.clArgDB and self.argDB['with-'+dep.package+'-lib']: found = 1 + if 'with-'+dep.package+'-dir' in self.framework.clArgDB and self.argDB['with-'+dep.package+'-dir']: found = 1 + if not found: msg += 'Package '+child.package+' requested but dependency '+dep.package+' not requested. Perhaps you want --download-'+dep.package+'\n' + if msg: raise RuntimeError(msg) + if child.cxx and ('with-cxx' in self.framework.clArgDB) and (self.argDB['with-cxx'] == '0'): raise RuntimeError('Package '+child.package+' requested requires C++ but compiler turned off.') + if child.fc and ('with-fc' in self.framework.clArgDB) and (self.argDB['with-fc'] == '0'): raise RuntimeError('Package '+child.package+' requested requires Fortran but compiler turned off.') + + depGraph = graph.DirectedGraph.topologicalSort(depGraph) totaltime = 0 starttime = time.time() - depGraph = graph.DirectedGraph.topologicalSort(depGraph) for child in depGraph: start = time.time() if not hasattr(child, '_configured'): diff --git a/config/BuildSystem/config/libraries.py b/config/BuildSystem/config/libraries.py index 39c09f321c9..8b228de6bd1 100644 --- a/config/BuildSystem/config/libraries.py +++ b/config/BuildSystem/config/libraries.py @@ -338,7 +338,7 @@ def checkDynamic(self): self.headers.check('dlfcn.h') return - def checkShared(self, includes, initFunction, checkFunction, finiFunction = None, checkLink = None, libraries = [], initArgs = '&argc, &argv', boolType = 'int', noCheckArg = 0, defaultArg = '', executor = None, timeout = 15): + def checkShared(self, includes, initFunction, checkFunction, finiFunction = None, checkLink = None, libraries = [], initArgs = '&argc, &argv', boolType = 'int', noCheckArg = 0, defaultArg = '', executor = None, timeout = 60): '''Determine whether a library is shared - initFunction(int *argc, char *argv[]) is called to initialize some static data - checkFunction(int *check) is called to verify that the static data wer set properly diff --git a/config/BuildSystem/config/package.py b/config/BuildSystem/config/package.py index 63ae9286d54..963f3f3d62d 100644 --- a/config/BuildSystem/config/package.py +++ b/config/BuildSystem/config/package.py @@ -2,6 +2,7 @@ import config.base import os +import re try: from hashlib import md5 as new_md5 @@ -55,6 +56,8 @@ def __init__(self, framework): self.liblist = [[]] # list of libraries we wish to check for (packages can override with their own generateLibList() method) self.extraLib = [] # additional libraries needed to link self.includes = [] # headers to check for + self.optionalincludes = [] # headers to check for, do not error if not found + self.foundoptionalincludes = 0 self.functions = [] # functions we wish to check for in the libraries self.functionsDefine = [] # optional functions we wish to check for in the libraries that should generate a PETSC_HAVE_ define self.functionsFortran = 0 # 1 means the symbols in self.functions are Fortran symbols, so name-mangling is done @@ -89,7 +92,7 @@ def __init__(self, framework): self.isMPI = 0 # Is an MPI implementation, needed to check for compiler wrappers self.hastests = 0 # indicates that PETSc make alltests has tests for this package - self.hastestsdatafiles = 0 # indicates that PETSc make all tests has tests for this package that require DATAFILESPATH to be set + self.hastestsdatafiles = 0 # indicates that PETSc make alltests has tests for this package that require DATAFILESPATH to be set self.makerulename = '' # some packages do too many things with the make stage; this allows a package to limit to, for example, just building the libraries self.installedpetsc = 0 self.installwithbatch = 1 # install the package even though configure in the batch mode; f2blaslapack and fblaslapack for example @@ -115,7 +118,7 @@ def __str__(self): if self.lib: output += ' Library: '+self.libraries.toStringNoDupes(self.lib)+'\n' if self.executablename: output += ' '+getattr(self,self.executablename)+'\n' if self.usesopenmp == 'yes': output += ' uses OpenMP; use export OMP_NUM_THREADS=

or -omp_num_threads

to control the number of threads\n' - if self.usesopenmp == 'unknown': output += ' Unkown if this uses OpenMP (try export OMP_NUM_THREADS=<1-4> yourprogram -log_view) \n' + if self.usesopenmp == 'unknown': output += ' Unknown if this uses OpenMP (try export OMP_NUM_THREADS=<1-4> yourprogram -log_view) \n' return output def setupDependencies(self, framework): @@ -460,9 +463,9 @@ def generateGuesses(self): else: path = None os.environ['PKG_CONFIG_PATH'] = self.argDB['with-'+self.package+'-pkg-config'] - l,err,ret = config.base.Configure.executeShellCommand('pkg-config '+self.pkgname+' --libs', timeout=5, log = self.log) + l,err,ret = config.base.Configure.executeShellCommand('pkg-config '+self.pkgname+' --libs', timeout=60, log = self.log) l = l.strip() - i,err,ret = config.base.Configure.executeShellCommand('pkg-config '+self.pkgname+' --cflags', timeout=5, log = self.log) + i,err,ret = config.base.Configure.executeShellCommand('pkg-config '+self.pkgname+' --cflags', timeout=60, log = self.log) i = i.strip() if self.argDB['with-'+self.package+'-pkg-config']: if path: os.environ['PKG_CONFIG_PATH'] = path @@ -676,8 +679,11 @@ def updateGitDir(self): try: config.base.Configure.executeShellCommand([self.sourceControl.git, '-c', 'user.name=petsc-configure', '-c', 'user.email=petsc@configure', 'stash'], cwd=self.packageDir, log = self.log) config.base.Configure.executeShellCommand([self.sourceControl.git, 'clean', '-f', '-d', '-x'], cwd=self.packageDir, log = self.log) - except: - raise RuntimeError('Unable to run git stash/clean in repository: '+self.packageDir+'.\nPerhaps its a git error!') + except RuntimeError as e: + if str(e).find("Unknown option: -c") >= 0: + self.logPrintBox('***** WARNING: Unable to "git stash". Likely due to antique git version (<1.8). Proceeding without stashing!') + else: + raise RuntimeError('Unable to run git stash/clean in repository: '+self.packageDir+'.\nPerhaps its a git error!') try: config.base.Configure.executeShellCommand([self.sourceControl.git, 'checkout', '-f', gitcommit_hash], cwd=self.packageDir, log = self.log) except: @@ -793,12 +799,13 @@ def checkInclude(self, incl, hfiles, otherIncludes = [], timeout = 600.0): return ret def checkPackageLink(self, includes, body, cleanup = 1, codeBegin = None, codeEnd = None, shared = 0): - oldFlags = self.compilers.CPPFLAGS + flagsArg = self.getPreprocessorFlagsArg() + oldFlags = getattr(self.compilers, flagsArg) oldLibs = self.compilers.LIBS - self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include) + setattr(self.compilers, flagsArg, oldFlags+' '+self.headers.toString(self.include)) self.compilers.LIBS = self.libraries.toString(self.lib)+' '+self.compilers.LIBS result = self.checkLink(includes, body, cleanup, codeBegin, codeEnd, shared) - self.compilers.CPPFLAGS = oldFlags + setattr(self.compilers, flagsArg,oldFlags) self.compilers.LIBS = oldLibs return result @@ -884,10 +891,14 @@ def configureLibrary(self): self.libraries.saveLog() if self.executeTest(self.libraries.check,[lib, self.functions],{'otherLibs' : self.dlib, 'fortranMangle' : self.functionsFortran, 'cxxMangle' : self.functionsCxx[0], 'prototype' : self.functionsCxx[1], 'call' : self.functionsCxx[2], 'cxxLink': self.cxx}): self.lib = lib - self.executeTest(self.libraries.check,[lib, self.functionsDefine],{'otherLibs' : self.dlib, 'fortranMangle' : self.functionsFortran, 'cxxMangle' : self.functionsCxx[0], 'prototype' : self.functionsCxx[1], 'call' : self.functionsCxx[2], 'cxxLink': self.cxx, 'functionDefine': 1}) + if self.functionsDefine: + self.executeTest(self.libraries.check,[lib, self.functionsDefine],{'otherLibs' : self.dlib, 'fortranMangle' : self.functionsFortran, 'cxxMangle' : self.functionsCxx[0], 'prototype' : self.functionsCxx[1], 'call' : self.functionsCxx[2], 'cxxLink': self.cxx, 'functionDefine': 1}) self.logWrite(self.libraries.restoreLog()) - self.logPrint('Checking for headers '+location+': '+str(incl)) - if (not self.includes) or self.checkInclude(incl, self.includes, self.dinclude, timeout = 40.0): + self.logPrint('Checking for optional headers '+str(self.optionalincludes)+' in '+location+': '+str(incl)) + if self.checkInclude(incl, self.optionalincludes, self.dinclude, timeout = 60.0): + self.foundoptionalincludes = 1 + self.logPrint('Checking for headers '+str(self.includes)+' in '+location+': '+str(incl)) + if (not self.includes) or self.checkInclude(incl, self.includes, self.dinclude, timeout = 60.0): if self.includes: self.include = testedincl self.found = 1 @@ -988,31 +999,41 @@ def infinitePatch(str): if not self.version and not self.minversion and not self.maxversion and not self.versionname: return if not self.versioninclude: - if not self.includes: return + if not self.includes: + self.log.write('For '+self.package+' unable to find version information since includes and version includes are missing skipping version check\n') + self.version = '' + return self.versioninclude = self.includes[0] - oldFlags = self.compilers.CPPFLAGS - self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include) if self.cxx: self.pushLanguage('C++') else: self.pushLanguage(self.defaultLanguage) + flagsArg = self.getPreprocessorFlagsArg() + oldFlags = getattr(self.compilers, flagsArg) + setattr(self.compilers, flagsArg, oldFlags+' '+self.headers.toString(self.include)) + self.compilers.saveLog() try: - output = self.outputPreprocess('#include "'+self.versioninclude+'"\nversion='+self.versionname+'\n') + output = self.outputPreprocess('#include "'+self.versioninclude+'"\n;petscpkgver('+self.versionname+');\n') + self.logWrite(self.compilers.restoreLog()) except: self.log.write('For '+self.package+' unable to run preprocessor to obtain version information, skipping version check\n') + self.logWrite(self.compilers.restoreLog()) self.popLanguage() - self.compilers.CPPFLAGS = oldFlags + setattr(self.compilers, flagsArg,oldFlags) + self.version = '' return self.popLanguage() - self.compilers.CPPFLAGS = oldFlags - loutput = output.split('\n') + setattr(self.compilers, flagsArg,oldFlags) + #strip #lines + output = re.sub('#.*\n','\n',output) + #strip newlines,spaces,quotes + output = re.sub('[\n "]*','',output) + #now split over ';' + loutput = output.split(';') version = '' for i in loutput: - if i.startswith('version='): - version = i[8:] - break - if i.startswith('version ='): - version = i[9:] + if i.find('petscpkgver') >=0: + version = i.split('(')[1].split(')')[0] break if not version: self.log.write('For '+self.package+' unable to find version information: output below, skipping version check\n') @@ -1020,7 +1041,6 @@ def infinitePatch(str): if self.requiresversion: raise RuntimeError('Configure must be able to determined the version information for '+self.name+'. It was unable to, please send configure.log to petsc-maint@mcs.anl.gov') return - version = version.replace(' ','').replace('\"','') try: self.foundversion = self.versionToStandardForm(version) self.version_tuple = self.versionToTuple(self.foundversion) @@ -1170,14 +1190,14 @@ def compilePETSc(self): if self.framework.argDB['prefix']: try: self.logPrintBox('Installing PETSc; this may take several minutes') - output,err,ret = config.package.Package.executeShellCommand(self.installDirProvider.installSudo+self.make.make+' install PETSC_DIR='+self.petscdir.dir+' PETSC_ARCH='+self.arch, cwd=self.petscdir.dir, timeout=50, log = self.log) + output,err,ret = config.package.Package.executeShellCommand(self.installDirProvider.installSudo+self.make.make+' install PETSC_DIR='+self.petscdir.dir+' PETSC_ARCH='+self.arch, cwd=self.petscdir.dir, timeout=60, log = self.log) self.log.write(output+err) except RuntimeError as e: raise RuntimeError('Error running make install on PETSc: '+str(e)) elif not self.argDB['with-batch']: try: self.logPrintBox('Testing PETSc; this may take several minutes') - output,err,ret = config.package.Package.executeShellCommand(self.make.make+' test PETSC_DIR='+self.petscdir.dir+' PETSC_ARCH='+self.arch, cwd=self.petscdir.dir, timeout=50, log = self.log) + output,err,ret = config.package.Package.executeShellCommand(self.make.make+' test PETSC_DIR='+self.petscdir.dir+' PETSC_ARCH='+self.arch, cwd=self.petscdir.dir, timeout=60, log = self.log) output = output+err self.log.write(output) if output.find('error') > -1 or output.find('Error') > -1: @@ -1516,17 +1536,17 @@ def formGNUConfigureArgs(self): args.append('F90="'+self.setCompilers.cross_fc+'"') else: args.append('F90="'+fc+'"') - args.append('F90FLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','')+'"') + args.append('F90FLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','').replace('-fdefault-integer-8','')+'"') else: args.append('--disable-f90') - args.append('FFLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','')+'"') + args.append('FFLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','').replace('-fdefault-integer-8','')+'"') if not self.installwithbatch and hasattr(self.setCompilers,'cross_fc'): args.append('FC="'+self.setCompilers.cross_fc+'"') args.append('F77="'+self.setCompilers.cross_fc+'"') else: args.append('FC="'+fc+'"') args.append('F77="'+fc+'"') - args.append('FCFLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','')+'"') + args.append('FCFLAGS="'+self.removeWarningFlags(self.getCompilerFlags()).replace('-Mfree','').replace('-fdefault-integer-8','')+'"') self.popLanguage() else: args.append('--disable-fortran') diff --git a/config/BuildSystem/config/packages/BlasLapack.py b/config/BuildSystem/config/packages/BlasLapack.py index 4d102b0734a..a217768108f 100644 --- a/config/BuildSystem/config/packages/BlasLapack.py +++ b/config/BuildSystem/config/packages/BlasLapack.py @@ -220,14 +220,14 @@ def generateGuesses(self): mkl = os.getenv('MKLROOT') if mkl: # Since user did not select MKL specifically first try compiler defaults and only if they fail use the MKL - yield ('Default compiler libraries', '', '','unknown','unknow') - yield ('Default compiler locations', 'libblas.a', 'liblapack.a','unknown','unknow') - yield ('Default compiler locations /usr/local/lib', os.path.join('/usr','local','lib','libblas.a'), os.path.join('/usr','local','lib','liblapack.a'),'unknown','unknow') - yield ('OpenBLAS default compiler locations', None, 'libopenblas.a','unknown','unknow') - yield ('OpenBLAS default compiler locations /usr/local/lib', None, os.path.join('/usr','local','lib','libopenblas.a'),'unknown','unknow') - yield ('Default compiler locations with gfortran', None, ['liblapack.a', 'libblas.a','libgfortran.a'],'unknown','unknow') - yield ('Default compiler locations', 'libblis.a', 'liblapack.a','unknown','unknown') - yield ('Default compiler locations /usr/local/lib', os.path.join('/usr','local','lib','libblis.a'), os.path.join('/usr','local','lib','liblapack.a'),'unknown','unknown') + yield ('Default compiler libraries', '', '','unknown','unknown') + yield ('BLIS default compiler locations', 'libblis.a', 'liblapack.a','unknown','unknown') + yield ('BLIS default compiler locations /usr/local/lib', os.path.join('/usr','local','lib','libblis.a'), os.path.join('/usr','local','lib','liblapack.a'),'unknown','unknown') + yield ('OpenBLAS default compiler locations', None, 'libopenblas.a','unknown','unknown') + yield ('OpenBLAS default compiler locations /usr/local/lib', None, os.path.join('/usr','local','lib','libopenblas.a'),'unknown','unknown') + yield ('Default compiler locations', 'libblas.a', 'liblapack.a','unknown','unknown') + yield ('Default compiler locations /usr/local/lib', os.path.join('/usr','local','lib','libblas.a'), os.path.join('/usr','local','lib','liblapack.a'),'unknown','unknown') + yield ('Default compiler locations with gfortran', None, ['liblapack.a', 'libblas.a','libgfortran.a'],'unknown','unknown') self.logWrite('Did not detect default BLAS and LAPACK locations so using the value of MKLROOT to search as --with-blas-lapack-dir='+mkl) self.argDB['with-blaslapack-dir'] = mkl @@ -297,10 +297,6 @@ def generateGuesses(self): return self.log.write('Files and directories in that directory:\n'+str(os.listdir(dir))+'\n') - yield ('User specified installation root (HPUX)', os.path.join(dir, 'libveclib.a'), os.path.join(dir, 'liblapack.a'),'32','unkown') - yield ('User specified OpenBLAS', None, os.path.join(dir, 'libopenblas.a'),'unknown','unkown') - yield ('User specified installation root (F2CBLASLAPACK)', os.path.join(dir,'libf2cblas.a'), os.path.join(dir, 'libf2clapack.a'),'32','no') - yield ('User specified installation root(FBLASLAPACK)', os.path.join(dir, 'libfblas.a'), os.path.join(dir, 'libflapack.a'),'32','no') # Check MATLAB [ILP64] MKL yield ('User specified MATLAB [ILP64] MKL Linux lib dir', None, [os.path.join(dir,'bin','glnxa64','mkl.so'), os.path.join(dir,'sys','os','glnxa64','libiomp5.so'), 'pthread'],'64','yes') oldFlags = self.setCompilers.LDFLAGS @@ -370,18 +366,28 @@ def generateGuesses(self): yield ('User specified AMD ACML lib dir', None, [os.path.join(dir,'lib','libacml.a'), os.path.join(dir,'lib','libacml_mv.a')],'32','unknown') yield ('User specified AMD ACML lib dir', None, os.path.join(dir,'lib','libacml_mp.a'),'32','unknown') yield ('User specified AMD ACML lib dir', None, [os.path.join(dir,'lib','libacml_mp.a'), os.path.join(dir,'lib','libacml_mv.a')],'32','unknown') + # BLIS + yield ('User specified installation root', os.path.join(dir, 'libblis.a'), os.path.join(dir, 'liblapack.a'), 'unknown', 'unknown') + # Search for OpenBLAS + yield ('User specified OpenBLAS', None, os.path.join(dir, 'libopenblas.a'),'unknown','unknown') # Search for atlas yield ('User specified ATLAS Linux installation root', [os.path.join(dir, 'libcblas.a'),os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')], [os.path.join(dir, 'liblapack.a')],'32','no') yield ('User specified ATLAS Linux installation root', [os.path.join(dir, 'libf77blas.a'), os.path.join(dir, 'libatlas.a')], [os.path.join(dir, 'liblapack.a')],'32','no') + + yield ('User specified installation root (HPUX)', os.path.join(dir, 'libveclib.a'), os.path.join(dir, 'liblapack.a'),'32','unknown') + yield ('User specified installation root (F2CBLASLAPACK)', os.path.join(dir,'libf2cblas.a'), os.path.join(dir, 'libf2clapack.a'),'32','no') + yield ('User specified installation root(FBLASLAPACK)', os.path.join(dir, 'libfblas.a'), os.path.join(dir, 'libflapack.a'),'32','no') # Search for liblapack.a and libblas.a after the implementations with more specific name to avoid # finding these in /usr/lib despite using -L while attempting to get a different library. - yield ('User specified installation root', os.path.join(dir, 'libblas.a'), os.path.join(dir, 'liblapack.a'),'unknown','unknow') - yield ('User specified installation root', os.path.join(dir, 'libblis.a'), os.path.join(dir, 'liblapack.a'),'unknown','unknown') + yield ('User specified installation root', os.path.join(dir, 'libblas.a'), os.path.join(dir, 'liblapack.a'),'unknown','unknown') raise RuntimeError('You set a value for --with-blaslapack-dir=

, but '+self.argDB['with-blaslapack-dir']+' cannot be used\n') if self.defaultPrecision == '__float128': raise RuntimeError('__float128 precision requires f2c libraries; suggest --download-f2cblaslapack\n') + + # Try compiler defaults yield ('Default compiler libraries', '', '','unknown','unknown') + yield ('Default BLIS', 'libblis.a', 'liblapack.a','unknown','unknown') yield ('Default compiler locations', 'libblas.a', 'liblapack.a','unknown','unknown') yield ('Default OpenBLAS', None, 'libopenblas.a','unknown','unknown') # Intel on Mac diff --git a/config/BuildSystem/config/packages/Chaco.py b/config/BuildSystem/config/packages/Chaco.py index cf938e8f9d1..220704df577 100644 --- a/config/BuildSystem/config/packages/Chaco.py +++ b/config/BuildSystem/config/packages/Chaco.py @@ -42,8 +42,8 @@ def Install(self): self.setCompilers.AR+' '+self.setCompilers.AR_FLAGS+' '+'libchaco.'+ self.setCompilers.AR_LIB_SUFFIX+' `ls */*.o |grep -v main/main.o`', self.setCompilers.RANLIB+' libchaco.'+self.setCompilers.AR_LIB_SUFFIX, - [self.installSudo+'mkdir', '-p', os.path.join(self.installDir,self.libdir)], - [self.installSudo+'cp', 'libchaco.'+self.setCompilers.AR_LIB_SUFFIX, os.path.join(self.installDir,self.libdir)] + self.installSudo+'mkdir -p '+os.path.join(self.installDir,self.libdir), + self.installSudo+'cp libchaco.'+self.setCompilers.AR_LIB_SUFFIX+' '+os.path.join(self.installDir,self.libdir) ], cwd=os.path.join(self.packageDir, 'code'), timeout=2500, log = self.log) except RuntimeError as e: diff --git a/config/BuildSystem/config/packages/GLVis.py b/config/BuildSystem/config/packages/GLVis.py index b75e5ececdd..a1c32bab24b 100644 --- a/config/BuildSystem/config/packages/GLVis.py +++ b/config/BuildSystem/config/packages/GLVis.py @@ -75,10 +75,10 @@ def Install(self): self.logPrintBox('Installing GLVis; this may take several minutes') self.installDirProvider.printSudoPasswordMessage() output2,err2,ret2 = config.package.Package.executeShellCommandSeq( - [[self.installSudo+'mkdir', '-p', installBinDir], + [self.installSudo+'mkdir -p '+installBinDir, self.installSudo+'cp -f glvis '+installBinDir+'/.', self.installSudo+'chmod 750 '+installBinDir+'/glvis' - ], cwd=self.packageDir, timeout=50, log = self.log) + ], cwd=self.packageDir, timeout=60, log = self.log) except RuntimeError as e: self.logPrint('Error running make on GLVis: '+str(e)) raise RuntimeError('Error running make on GLVis') diff --git a/config/BuildSystem/config/packages/MPI.py b/config/BuildSystem/config/packages/MPI.py index 0763eb117a7..7f473fc7f90 100644 --- a/config/BuildSystem/config/packages/MPI.py +++ b/config/BuildSystem/config/packages/MPI.py @@ -55,6 +55,7 @@ def __init__(self, framework): # support MPI-3 non-blocking collectives self.support_mpi3_nbc = 0 self.mpi_pkg_version = '' + self.lookforbydefault = 1 return def setupHelp(self, help): @@ -211,24 +212,27 @@ def configureMPIEXEC(self): # TODO: This support for spaces and () should be moved to core BuildSystem self.mpiexec = self.mpiexec.replace(' ', '\\ ').replace('(', '\\(').replace(')', '\\)').replace('\ -',' -') if (hasattr(self, 'ompi_major_version') and int(self.ompi_major_version) >= 3): - (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -help all', checkCommand = noCheck, timeout = 10, log = self.log) + (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -help all', checkCommand = noCheck, timeout = 60, log = self.log, threads = 1) if out.find('--oversubscribe') >=0: self.mpiexec = self.mpiexec + ' --oversubscribe' # using mpiexec environmental variables make sure mpiexec matches the MPI libraries and save the variables for testing in PetscInitialize() # the variable HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE is not currently used. PetscInitialize() can check the existence of the environmental variable to # determine if the program has been started with the correct mpiexec (will only be set for parallel runs so not clear how to check appropriately) - (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -n 1 printenv', checkCommand = noCheck, timeout = 10, log = self.log) - if ret: raise RuntimeError('Unable to run '+self.mpiexec+' with option "-n 1"\n'+err) - if out.find('MPIR_CVAR_CH3') > -1: - if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH"); - self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPIR_CVAR_CH3') - elif out.find('MPIR_CVAR_CH3') > -1: - if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH"); - self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPICH') - elif out.find('OMPI_COMM_WORLD_SIZE') > -1: - if hasattr(self,'mpich_numversion'): raise RuntimeError("Your libraries are from MPICH but it appears your mpiexec is from OpenMPI"); - self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'OMP') + (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -n 1 printenv', checkCommand = noCheck, timeout = 60, threads = 1, log = self.log) + if ret: + self.logWrite('Unable to run '+self.mpiexec+' with option "-n 1 printenv"\nThis could be ok, some MPI implementations such as SGI produce a non-zero status with non-MPI programs\n'+out+err) + else: + if out.find('MPIR_CVAR_CH3') > -1: + if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH"); + self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPIR_CVAR_CH3') + elif out.find('MPIR_CVAR_CH3') > -1: + if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH"); + self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPICH') + elif out.find('OMPI_COMM_WORLD_SIZE') > -1: + if hasattr(self,'mpich_numversion'): raise RuntimeError("Your libraries are from MPICH but it appears your mpiexec is from OpenMPI"); + self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'OMP') + self.addMakeMacro('MPIEXEC', self.mpiexec) self.mpiexec = self.mpiexec + ' -n 1' @@ -236,10 +240,9 @@ def configureMPIEXEC(self): includes = '#include ' body = 'MPI_Init(0,0);\nMPI_Finalize();\n' try: - ok = self.checkRun(includes, body, executor = self.mpiexec, timeout = 20) + ok = self.checkRun(includes, body, executor = self.mpiexec, timeout = 60, threads = 1) if not ok: raise RuntimeError('Unable to run MPI program with '+self.mpiexec+' make sure this is the correct program to run MPI jobs') except RuntimeError as e: - print(str(e)) if str(e).find('Runaway process exceeded time limit') > -1: raise RuntimeError('Timeout: Unable to run MPI program with '+self.mpiexec+'\n\ (1) make sure this is the correct program to run MPI jobs\n\ @@ -284,6 +287,9 @@ def configureMPI2(self): # Even MPI_Win_create is in MPI 2.0, we do this test to supress MPIUNI, which does not support MPI one-sided. if self.checkLink('#include \n', 'int base[100]; MPI_Win win; if (MPI_Win_create(base,100,4,MPI_INFO_NULL,MPI_COMM_WORLD,&win));\n'): self.addDefine('HAVE_MPI_WIN_CREATE', 1) + # flag broken one-sided tests + if not 'HAVE_MSMPI' in self.defines and not (hasattr(self, 'mpich_numversion') and int(self.mpich_numversion) <= 30004300): + self.addDefine('HAVE_MPI_ONE_SIDED', 1) self.compilers.CPPFLAGS = oldFlags self.compilers.LIBS = oldLibs self.logWrite(self.framework.restoreLog()) @@ -327,6 +333,10 @@ def configureMPI3(self): openmpi_cuda_test = '#include\n #include \n #if defined(MPIX_CUDA_AWARE_SUPPORT) && MPIX_CUDA_AWARE_SUPPORT\n #else\n #error This OpenMPI is not CUDA-aware\n #endif\n' if self.checkCompile(openmpi_cuda_test): self.addDefine('HAVE_MPI_GPU_AWARE', 1) + if self.checkLink('#include \n', 'int ptr[1]; MPI_Win win; if (MPI_Get_accumulate(ptr,1,MPI_INT,ptr,1,MPI_INT,0,0,1,MPI_INT,MPI_SUM,win));\n'): + self.addDefine('HAVE_MPI_GET_ACCUMULATE', 1) + if self.checkLink('#include \n', 'int ptr[1]; MPI_Win win; MPI_Request req; if (MPI_Rget(ptr,1,MPI_INT,0,1,1,MPI_INT,win,&req));\n'): + self.addDefine('HAVE_MPI_RGET', 1) self.compilers.CPPFLAGS = oldFlags self.compilers.LIBS = oldLibs self.logWrite(self.framework.restoreLog()) @@ -349,7 +359,7 @@ def configureMPITypes(self): self.addDefine('HAVE_'+datatype, 1) elif not self.argDB['with-batch']: self.pushLanguage('C') - if self.checkRun(includes, body, defaultArg = 'known-mpi-'+name): + if self.checkRun(includes, body, defaultArg = 'known-mpi-'+name, executor = self.mpiexec): self.addDefine('HAVE_'+datatype, 1) self.popLanguage() else: @@ -446,7 +456,7 @@ def configureIO(self): - Define HAVE_MPIIO if they are present - Some older MPI 1 implementations are missing these''' # MSWIN has buggy MPI IO - if 'HAVE_LIBMSMPI' in self.defines: return + if 'HAVE_MSMPI' in self.defines: return oldFlags = self.compilers.CPPFLAGS oldLibs = self.compilers.LIBS self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include) @@ -465,8 +475,8 @@ def configureIO(self): self.compilers.CPPFLAGS = oldFlags return - def checkMPICHorOpenMPI(self): - '''Determine if MPICH_NUMVERSION or OMPI_MAJOR_VERSION exist in mpi.h + def checkMPIDistro(self): + '''Determine if MPICH_NUMVERSION, OMPI_MAJOR_VERSION or MSMPI_VER exist in mpi.h Used for consistency checking of MPI installation at compile time''' import re HASHLINESPACE = ' *(?:\n#.*\n *)*' @@ -502,7 +512,7 @@ def checkMPICHorOpenMPI(self): self.compilers.CPPFLAGS = oldFlags self.mpi_pkg_version = MPI_VER+'\n' return - + # IBM Spectrum MPI is derived from OpenMPI, we do not yet have specific tests for it # https://www.ibm.com/us-en/marketplace/spectrum-mpi openmpi_test = '#include \nint ompi_major = OMPI_MAJOR_VERSION;\nint ompi_minor = OMPI_MINOR_VERSION;\nint ompi_release = OMPI_RELEASE_VERSION;\n' @@ -517,10 +527,32 @@ def checkMPICHorOpenMPI(self): self.addDefine('HAVE_OMPI_MINOR_VERSION',ompi_minor_version) self.addDefine('HAVE_OMPI_RELEASE_VERSION',ompi_release_version) self.ompi_major_version = ompi_major_version + self.ompi_version = tuple([int(i) for i in [ompi_major_version,ompi_minor_version,ompi_release_version]]) self.mpi_pkg_version = ' OMPI_VERSION: '+ompi_major_version+'.'+ompi_minor_version+'.'+ompi_release_version+'\n' + MPI_VER = ' OMPI_VERSION: '+ompi_major_version+'.'+ompi_minor_version+'.'+ompi_release_version except: self.logPrint('Unable to parse OpenMPI version from header. Probably a buggy preprocessor') - self.compilers.CPPFLAGS = oldFlags + if MPI_VER: + self.compilers.CPPFLAGS = oldFlags + self.mpi_pkg_version = MPI_VER+'\n' + return + + msmpi_test = '#include \n#define xstr(s) str(s)\n#define str(s) #s\n#if defined(MSMPI_VER)\nchar msmpi_hex[] = xstr(MSMPI_VER);\n#else\n#error not MSMPI\n#endif\n' + if self.checkCompile(msmpi_test): + buf = self.outputPreprocess(msmpi_test) + msmpi_version = 'unknown' + self.addDefine('HAVE_MSMPI',1) # flag we have MSMPI since we need to disable broken components + try: + msmpi_version = re.compile('\nchar msmpi_hex\[\] = '+HASHLINESPACE+'\"([a-zA-Z0-9_]*)\"'+HASHLINESPACE+';').search(buf).group(1) + MPI_VER = ' MSMPI_VERSION: '+msmpi_version + self.addDefine('HAVE_MSMPI_VERSION',msmpi_version) + except: + self.logPrint('Unable to parse MSMPI version from header. Probably a buggy preprocessor') + if MPI_VER: + self.compilers.CPPFLAGS = oldFlags + self.mpi_pkg_version = MPI_VER+'\n' + return + return def findMPIInc(self): @@ -588,7 +620,10 @@ def configureLibrary(self): if 'with-'+self.package+'-shared' in self.argDB: self.argDB['with-'+self.package] = 1 config.package.Package.configureLibrary(self) - self.executeTest(self.checkMPICHorOpenMPI) + if self.setCompilers.usedMPICompilers: + if 'with-mpi-include' in self.argDB: raise RuntimeError('Do not use --with-mpi-include when using MPI compiler wrappers') + if 'with-mpi-lib' in self.argDB: raise RuntimeError('Do not use --with-mpi-lib when using MPI compiler wrappers') + self.executeTest(self.checkMPIDistro) if any(x in platform.processor() for x in ['i386','x86','i86pc']) and config.setCompilers.Configure.isSolaris(self.log) and hasattr(self, 'mpich_numversion') and int(self.mpich_numversion) >= 30301300: # this is only needed if MPICH/HWLOC were compiled with optimization self.logWrite('Setting environmental variable to work around buggy HWLOC\nhttps://github.com/open-mpi/hwloc/issues/290\n') @@ -596,16 +631,16 @@ def configureLibrary(self): self.addDefine('HAVE_HWLOC_SOLARIS_BUG',1) self.logPrintBox('***** WARNING: This MPI implementation may have a bug in it that causes programs to hang.\n\ You may need to set the environmental variable HWLOC_COMPONENTS to -x86 to prevent such hangs. warning message *****') - self.executeTest(self.configureMPI2) - self.executeTest(self.configureMPI3) #depends on checkMPICHorOpenMPI for self.mpich_numversion + self.executeTest(self.configureMPI2) #depends on checkMPIDistro + self.executeTest(self.configureMPI3) #depends on checkMPIDistro + self.executeTest(self.configureMPIEXEC) self.executeTest(self.configureMPITypes) self.executeTest(self.SGIMPICheck) self.executeTest(self.CxxMPICheck) self.executeTest(self.FortranMPICheck) - self.executeTest(self.configureIO) + self.executeTest(self.configureIO) #depends on checkMPIDistro self.executeTest(self.findMPIInc) self.executeTest(self.PetscArchMPICheck) - self.executeTest(self.configureMPIEXEC) funcs = '''MPI_Type_get_envelope MPI_Type_dup MPI_Init_thread MPI_Iallreduce MPI_Ibarrier MPI_Finalized MPI_Exscan MPI_Reduce_scatter MPI_Reduce_scatter_block'''.split() found, missing = self.libraries.checkClassify(self.dlib, funcs) for f in found: diff --git a/config/BuildSystem/config/packages/MPICH.py b/config/BuildSystem/config/packages/MPICH.py index fc26f4b3557..16c968da8ec 100644 --- a/config/BuildSystem/config/packages/MPICH.py +++ b/config/BuildSystem/config/packages/MPICH.py @@ -4,8 +4,8 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.download = ['https://www.mpich.org/static/downloads/3.3.1/mpich-3.3.1.tar.gz', - 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/mpich-3.3.1.tar.gz'] + self.download = ['https://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz', + 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/mpich-3.3.2.tar.gz'] self.download_31 = ['http://www.mpich.org/static/downloads/3.1/mpich-3.1.tar.gz', 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/mpich-3.1.tar.gz'] self.downloaddirnames = ['mpich'] diff --git a/config/BuildSystem/config/packages/MUMPS.py b/config/BuildSystem/config/packages/MUMPS.py index 216d62d6e90..1d6be6034be 100644 --- a/config/BuildSystem/config/packages/MUMPS.py +++ b/config/BuildSystem/config/packages/MUMPS.py @@ -16,6 +16,7 @@ def __init__(self, framework): self.functions = ['dmumps_c'] self.includes = ['dmumps_c.h'] # + self.fc = 1 # Mumps does NOT work with 64 bit integers without a huge number of hacks we ain't making self.precisions = ['single','double'] self.requires32bitint = 1; # 1 means that the package will not work with 64 bit integers @@ -66,8 +67,6 @@ def Install(self): self.usesopenmp = 'yes' # use OMP_NUM_THREADS to control the number of threads used - if not hasattr(self.compilers, 'FC'): - raise RuntimeError('Cannot install '+self.name+' without Fortran, make sure you do NOT have --with-fc=0') if not self.fortran.FortranDefineCompilerOption: raise RuntimeError('Fortran compiler cannot handle preprocessing directives from command line.') g = open(os.path.join(self.packageDir,'Makefile.inc'),'w') @@ -152,7 +151,7 @@ def Install(self): g.close() if self.installNeeded('Makefile.inc'): try: - output1,err1,ret1 = config.package.Package.executeShellCommand('make clean', cwd=self.packageDir, timeout=5, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand('make clean', cwd=self.packageDir, timeout=60, log = self.log) except RuntimeError as e: pass try: @@ -163,12 +162,12 @@ def Install(self): self.logPrintBox('Installing Mumps; this may take several minutes') self.installDirProvider.printSudoPasswordMessage() output,err,ret = config.package.Package.executeShellCommandSeq( - [[self.installSudo+'mkdir', '-p', libDir, includeDir], + [self.installSudo+'mkdir -p '+libDir+' '+includeDir, self.installSudo+'cp -f lib/*.* '+libDir+'/.', self.installSudo+'cp -f include/*.* '+includeDir+'/.' - ], cwd=self.packageDir, timeout=50, log = self.log) + ], cwd=self.packageDir, timeout=60, log = self.log) if self.argDB['with-mumps-serial']: - output,err,ret = config.package.Package.executeShellCommand([self.installSudo+'cp', '-f', 'libseq/libmpiseq.a', libDir+'/.'], cwd=self.packageDir, timeout=25, log = self.log) + output,err,ret = config.package.Package.executeShellCommand([self.installSudo+'cp', '-f', 'libseq/libmpiseq.a', libDir+'/.'], cwd=self.packageDir, timeout=60, log = self.log) except RuntimeError as e: self.logPrint('Error running make on MUMPS: '+str(e)) raise RuntimeError('Error running make on MUMPS') diff --git a/config/BuildSystem/config/packages/OpenMPI.py b/config/BuildSystem/config/packages/OpenMPI.py index eae11fb0494..9b1128a4853 100644 --- a/config/BuildSystem/config/packages/OpenMPI.py +++ b/config/BuildSystem/config/packages/OpenMPI.py @@ -28,6 +28,7 @@ def formGNUConfigureArgs(self): args.append('--disable-mpi-f90') args.append('F77=""') args.append('FC=""') + args.append('--enable-mpi-fortran=no') if not self.argDB['with-shared-libraries']: args.append('--enable-shared=no') args.append('--enable-static=yes') diff --git a/config/BuildSystem/config/packages/PTScotch.py b/config/BuildSystem/config/packages/PTScotch.py index 95691225907..3f857672d3f 100644 --- a/config/BuildSystem/config/packages/PTScotch.py +++ b/config/BuildSystem/config/packages/PTScotch.py @@ -3,10 +3,10 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.version = '6.0.8' + self.version = '6.0.9' self.versionname = 'SCOTCH_VERSION.SCOTCH_RELEASE.SCOTCH_PATCHLEVEL' self.gitcommit = 'v'+self.version - self.download = ['git:https://gitlab.inria.fr/scotch/scotch', + self.download = ['git://https://gitlab.inria.fr/scotch/scotch.git', 'https://gitlab.inria.fr/scotch/scotch/-/archive/'+self.gitcommit+'/scotch-'+self.gitcommit+'.tar.gz', 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/scotch-'+self.gitcommit+'.tar.gz'] self.downloaddirnames = ['scotch','petsc-pkg-scotch'] @@ -23,7 +23,8 @@ def setupDependencies(self, framework): self.mathlib = framework.require('config.packages.mathlib',self) self.pthread = framework.require('config.packages.pthread',self) self.zlib = framework.require('config.packages.zlib',self) - self.deps = [self.mpi, self.mathlib] + self.regex = framework.require('config.packages.regex',self) + self.deps = [self.mpi,self.mathlib,self.regex] self.odeps = [self.pthread,self.zlib] return @@ -55,12 +56,17 @@ def Install(self): # Building cflags/ldflags self.cflags = self.removeWarningFlags(self.setCompilers.getCompilerFlags())+' '+self.headers.toString(self.mpi.include) + functions = self.framework.require('config.functions', self) + if not functions.haveFunction('FORK') and not functions.haveFunction('_PIPE'): + raise RuntimeError('Error building PTScotch: no pipe function') ldflags = self.libraries.toString(self.dlib) if self.zlib.found: self.cflags = self.cflags + ' -DCOMMON_FILE_COMPRESS_GZ' # OSX does not have pthread_barrier_destroy if self.pthread.found and self.pthread.pthread_barrier: self.cflags = self.cflags + ' -DCOMMON_PTHREAD' + if functions.haveFunction('_PIPE'): + self.cflags = self.cflags + ' -D\'pipe(pfds)=_pipe(pfds,1024,0x8000)\'' if self.libraries.add('-lrt','timer_create'): ldflags += ' -lrt' self.cflags = self.cflags + ' -DCOMMON_RANDOM_FIXED_SEED' # do not use -DSCOTCH_PTHREAD because requires MPI built for threads. @@ -110,7 +116,7 @@ def Install(self): includeDir = os.path.join(self.installDir, self.includedir) self.logPrintBox('Installing PTScotch; this may take several minutes') self.installDirProvider.printSudoPasswordMessage() - output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,includeDir)+' && '+self.installSudo+'mkdir -p '+os.path.join(self.installDir,self.libdir)+' && cd '+self.packageDir+' && '+self.installSudo+'cp -f lib/*.a '+libDir+'/. && '+self.installSudo+' cp -f include/*.h '+includeDir+'/.', timeout=25, log = self.log) + output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,includeDir)+' && '+self.installSudo+'mkdir -p '+os.path.join(self.installDir,self.libdir)+' && cd '+self.packageDir+' && '+self.installSudo+'cp -f lib/*.a '+libDir+'/. && '+self.installSudo+' cp -f include/*.h '+includeDir+'/.', timeout=60, log = self.log) self.postInstall(output+err,os.path.join('src','Makefile.inc')) return self.installDir diff --git a/config/BuildSystem/config/packages/PaStiX.py b/config/BuildSystem/config/packages/PaStiX.py index 31f8fc4e843..a8f2fc35498 100644 --- a/config/BuildSystem/config/packages/PaStiX.py +++ b/config/BuildSystem/config/packages/PaStiX.py @@ -25,7 +25,12 @@ def setupDependencies(self, framework): self.indexTypes = framework.require('PETSc.options.indexTypes', self) self.scotch = framework.require('config.packages.PTScotch',self) self.mpi = framework.require('config.packages.MPI',self) - self.deps = [self.mpi,self.blasLapack, self.scotch] + self.pthread = framework.require('config.packages.pthread',self) + # PaStiX.py does not absolutely require hwloc, but it performs better with it and can fail (in ways not easily tested) without it + # https://gforge.inria.fr/forum/forum.php?thread_id=32824&forum_id=599&group_id=186 + # https://solverstack.gitlabpages.inria.fr/pastix/Bindings.html + self.hwloc = framework.require('config.packages.hwloc',self) + self.deps = [self.mpi, self.blasLapack, self.scotch, self.pthread, self.hwloc] return def Install(self): @@ -138,6 +143,8 @@ def Install(self): g.write('# Uncomment the following line if your MPI doesn\'t support MPI_Datatype correctly\n') g.write('#CCPASTIX := $(CCPASTIX) -DNO_MPI_TYPE\n') g.write('\n') + g.write('CCPASTIX := $(CCPASTIX) -DWITH_HWLOC '+self.headers.toString(self.hwloc.include)+'\n') + g.write('EXTRALIB := $(EXTRALIB) '+self.libraries.toString(self.hwloc.dlib)+'\n') g.write('###################################################################\n') g.write('# Options #\n') g.write('###################################################################\n') diff --git a/config/BuildSystem/config/packages/SuiteSparse.py b/config/BuildSystem/config/packages/SuiteSparse.py index 1b90d78582b..af8faf5dd30 100644 --- a/config/BuildSystem/config/packages/SuiteSparse.py +++ b/config/BuildSystem/config/packages/SuiteSparse.py @@ -3,36 +3,38 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self,framework) - self.version = '5.7.1' - self.versionname = 'UMFPACK_MAIN_VERSION.UMFPACK_SUB_VERSION.UMFPACK_SUBSUB_VERSION' - self.versioninclude = 'umfpack.h' - # Note that there is not SuitSparse version number in the code, the only version information is for UMFPACK - self.download = ['http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.4.3.tar.gz', - 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/SuiteSparse-4.4.3.tar.gz'] + self.version = '5.6.0' + self.versioninclude = 'SuiteSparse_config.h' + self.versionname = 'SUITESPARSE_MAIN_VERSION.SUITESPARSE_SUB_VERSION.SUITESPARSE_SUBSUB_VERSION' + self.gitcommit = 'v'+self.version + self.download = ['git://https://github.com/DrTimothyAldenDavis/SuiteSparse','https://github.com/DrTimothyAldenDavis/SuiteSparse/archive/'+self.gitcommit+'.tar.gz'] self.liblist = [['libumfpack.a','libklu.a','libcholmod.a','libbtf.a','libccolamd.a','libcolamd.a','libcamd.a','libamd.a','libsuitesparseconfig.a'], ['libumfpack.a','libklu.a','libcholmod.a','libbtf.a','libccolamd.a','libcolamd.a','libcamd.a','libamd.a','libsuitesparseconfig.a','librt.a'], ['libumfpack.a','libklu.a','libcholmod.a','libbtf.a','libccolamd.a','libcolamd.a','libcamd.a','libamd.a','libmetis.a','libsuitesparseconfig.a'], ['libumfpack.a','libklu.a','libcholmod.a','libbtf.a','libccolamd.a','libcolamd.a','libcamd.a','libamd.a','libmetis.a','libsuitesparseconfig.a','librt.a']] - self.functions = ['umfpack_dl_wsolve','cholmod_l_solve','klu_l_solve'] - self.includes = ['umfpack.h','cholmod.h','klu.h'] - self.hastests = 1 - self.hastestsdatafiles= 1 - self.precisions = ['double'] + self.functions = ['umfpack_dl_wsolve','cholmod_l_solve','klu_l_solve'] + self.includes = ['umfpack.h','cholmod.h','klu.h'] + self.hastests = 1 + self.hastestsdatafiles = 1 + self.precisions = ['double'] return def setupHelp(self, help): import nargs config.package.Package.setupHelp(self, help) - help.addArgument('SUITESPARSE', '-download-suitesparse-gpu=', nargs.ArgBool(None, 0, 'Install SuiteSparse to use GPUs')) - + # This is set to 1 since CHOLMOD is broken with GPU support (does not even compile with icc on my workstation) + # see https://github.com/DrTimothyAldenDavis/SuiteSparse/issues/5 + help.addArgument('SUITESPARSE', '-download-suitesparse-disablegpu=', nargs.ArgBool(None, 1, 'Force disabling SuiteSparse/CHOLMOD use of GPUs')) + def setupDependencies(self, framework): config.package.Package.setupDependencies(self, framework) self.blasLapack = framework.require('config.packages.BlasLapack',self) self.mathlib = framework.require('config.packages.mathlib',self) self.deps = [self.blasLapack,self.mathlib] - if self.argDB['download-suitesparse-gpu']: - self.cuda = framework.require('config.packages.cuda',self) - self.deps.append(self.cuda) + self.cuda = framework.require('config.packages.cuda',self) + self.openmp = framework.require('config.packages.openmp',self) + self.metis = framework.require('config.packages.metis',self) + self.odeps = [self.openmp,self.cuda,self.metis] return def Install(self): @@ -41,36 +43,62 @@ def Install(self): if not self.make.haveGNUMake: raise RuntimeError('SuiteSparse buildtools require GNUMake. Use --with-make=gmake or --download-make') - mkfile = 'SuiteSparse_config/SuiteSparse_config.mk' - g = open(os.path.join(self.packageDir, mkfile), 'w') + # Use CHOLMOD_OMP_NUM_THREADS to control the number of threads + if self.openmp.found: + self.usesopenmp = 'yes' + + # From v4.5.0, SuiteSparse_config/SuiteSparse_config.mk is not modifiable anymore. Instead, we must override make variables + args=[] + self.setCompilers.pushLanguage('C') - g.write('CC = '+self.setCompilers.getCompiler()+'\n') - g.write('CF = '+self.removeWarningFlags(self.setCompilers.getCompilerFlags())+'\n') + args.append('CC="'+self.setCompilers.getCompiler()+'"') + cflags=self.removeWarningFlags(self.setCompilers.getCompilerFlags()) + ldflags=self.setCompilers.getDynamicLinkerFlags() + ldflags+=self.setCompilers.LDFLAGS + # SuiteSparse 5.6.0 makefile has a bug in how it treats LDFLAGS (not using the override directive) + ldflags+=" -L\$(INSTALL_LIB)" + self.setCompilers.popLanguage() + + # CHOLMOD may build the shared library with CXX + self.setCompilers.pushLanguage('Cxx') + args.append('CXX="'+self.setCompilers.getCompiler()+'"') self.setCompilers.popLanguage() - g.write('MAKE ='+self.make.make+'\n') - g.write('RANLIB = '+self.setCompilers.RANLIB+'\n') - g.write('ARCHIVE = '+self.setCompilers.AR+' '+self.setCompilers.AR_FLAGS+'\n') - g.write('RM = '+self.programs.RM+'\n') - g.write('MV = '+self.programs.mv+'\n') - g.write('CP = '+self.programs.cp+'\n') - g.write('CLEAN = *.o *.obj *.ln *.bb *.bbg *.da *.tcov *.gcov gmon.out *.bak *.d\n') - g.write('INSTALL_LIB = ' + self.libDir + '\n') - g.write('INSTALL_INCLUDE = ' + self.includeDir + '\n') + + args.append('MAKE="'+self.make.make+'"') + args.append('RANLIB="'+self.setCompilers.RANLIB+'"') + args.append('ARCHIVE="'+self.setCompilers.AR+' '+self.setCompilers.AR_FLAGS+'"') + args.append('RM="'+self.programs.RM+'"') + args.append('MV="'+self.programs.mv+'"') + args.append('CP="'+self.programs.cp+'"') + args.append('LDFLAGS="'+ldflags+'"') + args.append('INSTALL_LIB='+self.libDir) + args.append('INSTALL_INCLUDE='+self.includeDir) + args.append('INSTALL_DOC='+self.installDir+'/share/doc/suitesparse') + args.append('BLAS="'+self.libraries.toString(self.blasLapack.dlib)+'"') + args.append('LAPACK="'+self.libraries.toString(self.blasLapack.dlib)+'"') if self.blasLapack.mangling == 'underscore': flg = '' elif self.blasLapack.mangling == 'caps': flg = '-DBLAS_CAPS_DOES_NOT_WORK' else: flg = '-DBLAS_NO_UNDERSCORE' - g.write('UMFPACK_CONFIG = '+flg+'\n') - if self.argDB['download-suitesparse-gpu']: - if self.defaultIndexSize == 32: - raise RuntimeError('SuiteSparse only uses GPUs with --with-64-bit-indices') - if not hasattr(self.compilers, 'CUDAC'): - raise RuntimeError('Run with --with-cuda to use allow SuiteSparse to compile using CUDA') - # code taken from cuda.py + args.append('UMFPACK_CONFIG='+flg) + + if self.metis.found: + # '-I' is added automatically inside SuiteSparse_config.mk + metisinc = self.headers.toString(self.metis.include).replace('-I','',1) + args.append('MY_METIS_INC="'+metisinc+'"') + args.append('MY_METIS_LIB="'+self.libraries.toString(self.metis.dlib)+'"') + else: + flg+=' -DNPARTITION' + + # CUDA support for 64bit indices installations only + if self.cuda.found and self.defaultIndexSize == 64 and not self.argDB['download-suitesparse-disablegpu']: + self.logPrintBox('SuiteSparse: Enabling support for CHOLMOD on GPUs (it can be disabled with --download-suitesparse-disablegpu=1)') + args.append('CF="'+cflags+' -D_GNU_SOURCE"') # The GPU code branches use feenableexcept including fenv.h only self.pushLanguage('CUDA') petscNvcc = self.getCompiler() + cudaFlags = self.getCompilerFlags() self.popLanguage() self.getExecutable(petscNvcc,getFullPath=1,resultName='systemNvcc') if hasattr(self,'systemNvcc'): @@ -78,46 +106,50 @@ def Install(self): cudaDir = os.path.split(nvccDir)[0] else: raise RuntimeError('Unable to locate CUDA NVCC compiler') - g.write('CUDA_ROOT = '+cudaDir+'\n') - g.write('GPU_BLAS_PATH = $(CUDA_ROOT)\n') - g.write('GPU_CONFIG = -I$(CUDA_ROOT)/include -DGPU_BLAS\n') -# GPU_CONFIG = -I$(CUDA_ROOT)/include -DGPU_BLAS -DCHOLMOD_OMP_NUM_THREADS=10 - g.write('CUDA_PATH = $(CUDA_ROOT)\n') - g.write('CUDART_LIB = $(CUDA_ROOT)/lib64/libcudart.so\n') - g.write('CUBLAS_LIB = $(CUDA_ROOT)/lib64/libcublas.so\n') - g.write('CUDA_INC_PATH = $(CUDA_ROOT)/include/\n') - g.write('NV20 = -arch=sm_20 -Xcompiler -fPIC\n') - g.write('NV30 = -arch=sm_30 -Xcompiler -fPIC\n') - g.write('NV35 = -arch=sm_35 -Xcompiler -fPIC\n') - g.write('NVCC = $(CUDA_ROOT)/bin/nvcc\n') - g.write('NVCCFLAGS = -O3 -gencode=arch=compute_20,code=sm_20 -gencode=arch=compute_30,code=sm_30 -gencode=arch=compute_35,code=sm_35 -Xcompiler -fPIC\n') - g.write('CHOLMOD_CONFIG = '+flg+' -DNPARTITION $(GPU_CONFIG)\n') + args.append('CUDA_ROOT='+cudaDir) + args.append('GPU_BLAS_PATH='+cudaDir) + args.append('CUDA_PATH='+cudaDir) + args.append('CUDART_LIB='+cudaDir+'/lib64/libcudart.so') + args.append('CUBLAS_LIB='+cudaDir+'/lib64/libcublas.so') + args.append('CUDA_INC_PATH='+cudaDir+'/include') + args.append('NVCCFLAGS="'+cudaFlags+' -Xcompiler -fPIC"') + args.append('CHOLMOD_CONFIG="'+flg+' -DGPU_BLAS"') self.addDefine('USE_SUITESPARSE_GPU',1) else: - g.write('CHOLMOD_CONFIG = '+flg+' -DNPARTITION\n') - g.close() + if self.cuda.found and not self.argDB['download-suitesparse-disablegpu']: + self.logPrintBox('SuiteSparse: Cannot enable support for GPUs. SuiteSparse only uses GPUs with --with-64-bit-indices') + args.append('CF="'+cflags+'"') + args.append('CHOLMOD_CONFIG="'+flg+'"') + args.append('CUDA=no') + + args = ' '.join(args) + conffile = os.path.join(self.packageDir,self.package+'.petscconf') + fd = open(conffile, 'w') + fd.write(args) + fd.close() - if self.installNeeded(mkfile): + if self.installNeeded(conffile): try: self.logPrintBox('Compiling and installing SuiteSparse; this may take several minutes') self.installDirProvider.printSudoPasswordMessage() - # SuiteSparse install does not create missing directories, hence we need to create them first + makewithargs=self.make.make+' '+args + # SuiteSparse install may not create missing directories, hence we need to create them first output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,'lib'), timeout=2500, log=self.log) output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,'include'), timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/SuiteSparse_config && '+self.make.make+' && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/AMD && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/COLAMD && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/BTF && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CAMD && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CCOLAMD && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CHOLMOD && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/UMFPACK && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/KLU && '+self.make.make+' library && '+self.installSudo+self.make.make+' install && '+self.make.make+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/SuiteSparse_config && '+makewithargs+' clean && '+makewithargs+' && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/AMD && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/COLAMD && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/BTF && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CAMD && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CCOLAMD && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/CHOLMOD && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/UMFPACK && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+'/KLU && '+makewithargs+' clean && '+makewithargs+' library && '+self.installSudo+makewithargs+' install && '+makewithargs+' clean', timeout=2500, log=self.log) self.addDefine('HAVE_SUITESPARSE',1) except RuntimeError as e: raise RuntimeError('Error running make on SuiteSparse: '+str(e)) - self.postInstall(output+err, mkfile) + self.postInstall(output+err, conffile) return self.installDir def consistencyChecks(self): diff --git a/config/BuildSystem/config/packages/SuperLU_DIST.py b/config/BuildSystem/config/packages/SuperLU_DIST.py index c4d74342718..f7508484b88 100644 --- a/config/BuildSystem/config/packages/SuperLU_DIST.py +++ b/config/BuildSystem/config/packages/SuperLU_DIST.py @@ -4,7 +4,7 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.version = '6.1.1' + self.version = '6.2.0' self.versionname = 'SUPERLU_DIST_MAJOR_VERSION.SUPERLU_DIST_MINOR_VERSION.SUPERLU_DIST_PATCH_VERSION' self.gitcommit = 'v'+self.version self.download = ['git://https://github.com/xiaoyeli/superlu_dist','https://github.com/xiaoyeli/superlu_dist/archive/'+self.gitcommit+'.tar.gz'] diff --git a/config/BuildSystem/config/packages/Triangle.py b/config/BuildSystem/config/packages/Triangle.py index c286d011d33..fa24bcbda33 100644 --- a/config/BuildSystem/config/packages/Triangle.py +++ b/config/BuildSystem/config/packages/Triangle.py @@ -116,8 +116,8 @@ def Install(self): self.installDirProvider.printSudoPasswordMessage() output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,'lib'), timeout=2500, log=self.log) output,err,ret = config.package.Package.executeShellCommand(self.installSudo+'mkdir -p '+os.path.join(self.installDir,'include'), timeout=2500, log=self.log) - output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir,'libtriangle.'+self.setCompilers.AR_LIB_SUFFIX)+' '+os.path.join(self.installDir,'lib'), timeout=5, log = self.log) - output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir, 'src', 'triangle.h')+' '+includeDir, timeout=5, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir,'libtriangle.'+self.setCompilers.AR_LIB_SUFFIX)+' '+os.path.join(self.installDir,'lib'), timeout=60, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir, 'src', 'triangle.h')+' '+includeDir, timeout=60, log = self.log) self.postInstall(output1+err1+output2+err2,'make.inc') return self.installDir diff --git a/config/BuildSystem/config/packages/Trilinos.py b/config/BuildSystem/config/packages/Trilinos.py index dbd796bb4a0..5caf51a1fb4 100644 --- a/config/BuildSystem/config/packages/Trilinos.py +++ b/config/BuildSystem/config/packages/Trilinos.py @@ -289,7 +289,7 @@ def generateLibList(self,dir): fd.write('listlibs:\n\t-@echo ${Trilinos_LIBRARIES}') fd.close() try: - output1,err1,ret1 = config.package.Package.executeShellCommand('make -f simplemake listlibs', timeout=25, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand('make -f simplemake listlibs', timeout=60, log = self.log) os.unlink('simplemake') except RuntimeError as e: raise RuntimeError('Unable to generate list of Trilinos Libraries') diff --git a/config/BuildSystem/config/packages/adblaslapack.py b/config/BuildSystem/config/packages/adblaslapack.py index 154f6129996..d6e2825e242 100644 --- a/config/BuildSystem/config/packages/adblaslapack.py +++ b/config/BuildSystem/config/packages/adblaslapack.py @@ -35,8 +35,8 @@ def Install(self): if self.installNeeded('Makefile.inc'): self.logPrintBox('Configuring, compiling and installing adblaslapack; this may take several seconds') self.installDirProvider.printSudoPasswordMessage() - output1,err1,ret1 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'src')+' && make clean all ',timeout=50, log = self.log) - output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'src')+' && '+self.installSudo+' cp -f libadblaslapack.a '+os.path.join(self.installDir,'lib'),timeout=20, log = self.log) - output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'include')+' && '+self.installSudo+' cp -f adblaslapack.hpp '+os.path.join(self.installDir,'include'),timeout=20, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'src')+' && make clean all ',timeout=60, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'src')+' && '+self.installSudo+' cp -f libadblaslapack.a '+os.path.join(self.installDir,'lib'),timeout=60, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+os.path.join(self.packageDir,'include')+' && '+self.installSudo+' cp -f adblaslapack.hpp '+os.path.join(self.installDir,'include'),timeout=60, log = self.log) self.postInstall(output1+err1+output2+err2,'Makefile.inc') return self.installDir diff --git a/config/BuildSystem/config/packages/alquimia.py b/config/BuildSystem/config/packages/alquimia.py index 138413f94e8..7c9499e6fb9 100644 --- a/config/BuildSystem/config/packages/alquimia.py +++ b/config/BuildSystem/config/packages/alquimia.py @@ -87,7 +87,7 @@ def postProcess(self): if not self.argDB['with-batch']: try: self.logPrintBox('Testing Alquimia; this may take several minutes') - output,err,ret = config.package.CMakePackage.executeShellCommand('cd '+os.path.join(self.packageDir,'petsc-build')+' && '+self.make.make+' test_install',timeout=50, log = self.log) + output,err,ret = config.package.CMakePackage.executeShellCommand('cd '+os.path.join(self.packageDir,'petsc-build')+' && '+self.make.make+' test_install',timeout=60, log = self.log) output = output+err self.log.write(output) if output.find('Failure') > -1: diff --git a/config/BuildSystem/config/packages/amanzi.py b/config/BuildSystem/config/packages/amanzi.py index 238fa9c6ed2..c5dc57c8b83 100644 --- a/config/BuildSystem/config/packages/amanzi.py +++ b/config/BuildSystem/config/packages/amanzi.py @@ -105,7 +105,7 @@ def generateLibList(self,dir): fd.write('listlibs:\n\t-@echo ${Trilinos_LIBRARIES}') fd.close() try: - output1,err1,ret1 = config.package.Package.executeShellCommand('make -f simplemake listlibs', timeout=25, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand('make -f simplemake listlibs', timeout=60, log = self.log) os.unlink('simplemake') except RuntimeError as e: raise RuntimeError('Unable to generate list of Trilinos Libraries') diff --git a/config/BuildSystem/config/packages/cgns.py b/config/BuildSystem/config/packages/cgns.py old mode 100755 new mode 100644 diff --git a/config/BuildSystem/config/packages/ctetgen.py b/config/BuildSystem/config/packages/ctetgen.py index e40bf230aee..2ca16a7fa59 100644 --- a/config/BuildSystem/config/packages/ctetgen.py +++ b/config/BuildSystem/config/packages/ctetgen.py @@ -57,8 +57,9 @@ def postProcess(self): output,err,ret = config.package.GNUPackage.executeShellCommand(self.make.make+' PETSC_DIR='+self.petscdir.dir+' clean lib PCC_FLAGS="'+cflags+'"',timeout=1000, log = self.log, cwd=self.packageDir) self.log.write(output+err) self.logPrintBox('Installing Ctetgen; this may take several minutes') + # TODO: This message should not be printed if ctetgen is install in PETSc arch directory; need self.printSudoPasswordMessage() defined in package.py self.installDirProvider.printSudoPasswordMessage(1) - output,err,ret = config.package.GNUPackage.executeShellCommand(self.installDirProvider.installSudo+self.make.make+' PETSC_DIR='+self.petscdir.dir+' prefix='+self.installDir+' install-ctetgen',timeout=1000, log = self.log, cwd=self.packageDir) + output,err,ret = config.package.GNUPackage.executeShellCommand(self.installSudo+self.make.make+' PETSC_DIR='+self.petscdir.dir+' prefix='+self.installDir+' install-ctetgen',timeout=1000, log = self.log, cwd=self.packageDir) self.log.write(output+err) except RuntimeError as e: raise RuntimeError('Error running make on Ctetgen: '+str(e)) diff --git a/config/BuildSystem/config/packages/cxxlibs.py b/config/BuildSystem/config/packages/cxxlibs.py index 4edc03547a9..21bc36c4bbb 100644 --- a/config/BuildSystem/config/packages/cxxlibs.py +++ b/config/BuildSystem/config/packages/cxxlibs.py @@ -3,6 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) + self.lookforbydefault = 1 return def __str__(self): diff --git a/config/BuildSystem/config/packages/egads.py b/config/BuildSystem/config/packages/egads.py new file mode 100644 index 00000000000..30eb684484a --- /dev/null +++ b/config/BuildSystem/config/packages/egads.py @@ -0,0 +1,129 @@ +import config.package +import os + +class Configure(config.package.GNUPackage): + def __init__(self, framework): + config.package.GNUPackage.__init__(self, framework) + self.gitcommit = '09fe8f0fe689bea60354eb3e9977fd8452c05573' + self.download = ['git://https://github.com/bldenton/EGADSlite.git'] + self.functions = [] + self.includes = [] + self.hastests = 1 + return + + def setupDependencies(self, framework): + config.package.GNUPackage.setupDependencies(self, framework) + return + + def createMakefile(self): + makeinc = os.path.join(self.packageDir, 'make.inc') + g = open(makeinc,'w') + g.write(''' +include $(PETSC_DIR)/lib/petsc/conf/variables + +CFLAGS = -DLITE -Iinclude + +INCDIR = include +SRCDIR = src +LIBBASE = libegadslite +LIBNAME = ${LIBBASE}.${AR_LIB_SUFFIX} +LIBSRC.h = $(INCDIR)/egads.h $(INCDIR)/egadsErrors.h $(INCDIR)/egadsInternals.h $(INCDIR)/egadsTris.h \ + $(INCDIR)/egadsTypes.h $(INCDIR)/emp.h $(INCDIR)/liteClasses.h +LIBSRC.c = $(SRCDIR)/liteAttrs.c $(SRCDIR)/liteBase.c $(SRCDIR)/liteGeom.c $(SRCDIR)/liteImport.c \ + $(SRCDIR)/liteMemory.c $(SRCDIR)/liteTopo.c $(SRCDIR)/egadsTess.c $(SRCDIR)/egadsTris.c \ + $(SRCDIR)/egadsQuads.c $(SRCDIR)/egadsTessInp.c $(SRCDIR)/egadsRobust.c \ + $(SRCDIR)/emp.c $(SRCDIR)/evaluate.c $(SRCDIR)/rational.c +LIBSRC.o = $(LIBSRC.c:%.c=%.o) + +lib : $(LIBNAME) ; + +$(LIBSRC.o) : $(LIBSRC.h) + +define ARCHIVE_RECIPE_WIN32FE_LIB + @$(RM) $@ $@.args + @cygpath -w $^ > $@.args + $(AR) $(AR_FLAGS) $@ @$@.args + @$(RM) $@.args +endef + +define ARCHIVE_RECIPE_DEFAULT + @$(RM) $@ + $(AR) $(AR_FLAGS) $@ $^ + $(RANLIB) $@ +endef + +$(LIBNAME) : $(LIBSRC.o) + $(if $(findstring win32fe lib,$(AR)),$(ARCHIVE_RECIPE_WIN32FE_LIB),$(ARCHIVE_RECIPE_DEFAULT)) + +COMPILE.c = $(CC) $(PCC_FLAGS) $(CFLAGS) $(CCPPFLAGS) $(TARGET_ARCH) -c + +# This is unusual; usually prefix would default to /usr/local +prefix ?= $(PETSC_DIR)/$(PETSC_ARCH) +libdir = $(prefix)/lib +includedir = $(prefix)/include +INSTALL = install +INSTALL_DATA = $(INSTALL) -m644 +MKDIR_P = mkdir -p + +install-egads: $(LIBNAME) + $(MKDIR_P) "$(DESTDIR)$(includedir)" "$(DESTDIR)$(libdir)" + $(INSTALL_DATA) $(LIBSRC.h) "$(DESTDIR)$(includedir)/" + $(INSTALL_DATA) $(LIBNAME) "$(DESTDIR)$(libdir)/" + +clean: + $(RM) $(LIBNAME) $(LIBSRC.o) + +.PHONY: lib clean install-egads + ''') + g.close() + return + + # the install is delayed until postProcess() since egads install requires PETSc to have created its build/makefiles before installing + # note that egads can (and is) built before PETSc is built. + def Install(self): + self.createMakefile() + return self.installDir + + def configureLibrary(self): + ''' Since egads cannot be built until after PETSc configure is complete we need to just assume the downloaded library will work''' + if 'with-egads' in self.framework.clArgDB: + raise RuntimeError('egads does not support --with-egads; only --download-egads') + if 'with-egads-dir' in self.framework.clArgDB: + self.egadsDir = self.framework.argDB['with-egads-dir'] + if 'with-egads-include' in self.framework.clArgDB: + raise RuntimeError('egads does not support --with-egads-include; only --download-egads') + if 'with-egads-lib' in self.framework.clArgDB: + raise RuntimeError('egads does not support --with-egads-lib; only --download-egads') + if 'with-egads-shared' in self.framework.clArgDB: + raise RuntimeError('egads does not support --with-egads-shared') + + if not hasattr(self,'egadsDir'): + self.checkDownload() + self.egadsDir = self.installDir + self.include = [os.path.join(self.egadsDir,'include')] + self.lib = [os.path.join(self.egadsDir,'lib','libegadslite.a')] + self.found = 1 + self.dlib = self.lib + if not hasattr(self.framework, 'packages'): + self.framework.packages = [] + self.framework.packages.append(self) + + def postProcess(self): + if not hasattr(self,'installDir'): + return + try: + self.logPrintBox('Compiling egads; this may take several minutes') + # uses the regular PETSc library builder and then moves result + # turn off any compiler optimizations as they may break egads + self.setCompilers.pushLanguage('C') + cflags = self.checkNoOptFlag()+' '+self.getSharedFlag(self.setCompilers.getCompilerFlags())+' '+self.getPointerSizeFlag(self.setCompilers.getCompilerFlags())+' '+self.getWindowsNonOptFlags(self.setCompilers.getCompilerFlags())+' '+self.getDebugFlags(self.setCompilers.getCompilerFlags()) + self.setCompilers.popLanguage() + output,err,ret = config.package.GNUPackage.executeShellCommand(self.make.make+' -f make.inc PETSC_DIR=' + self.petscdir.dir + ' clean lib PCC_FLAGS="' + cflags + '"', timeout=1000, log = self.log, cwd=self.packageDir) + self.log.write(output+err) + self.logPrintBox('Installing egads; this may take several minutes') + # TODO: This message should not be printed if egads is install in PETSc arch directory; need self.printSudoPasswordMessage() defined in package.py + self.installDirProvider.printSudoPasswordMessage(1) + output,err,ret = config.package.GNUPackage.executeShellCommand(self.installSudo+self.make.make+' -f make.inc PETSC_DIR='+self.petscdir.dir+' prefix='+self.installDir+' install-egads',timeout=1000, log = self.log, cwd=self.packageDir) + self.log.write(output+err) + except RuntimeError as e: + raise RuntimeError('Error running make on egads: '+str(e)) diff --git a/config/BuildSystem/config/packages/eigen.py b/config/BuildSystem/config/packages/eigen.py index 8bdea7f713a..fe9ff9ef866 100644 --- a/config/BuildSystem/config/packages/eigen.py +++ b/config/BuildSystem/config/packages/eigen.py @@ -4,14 +4,16 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): import os config.package.CMakePackage.__init__(self, framework) - self.download = ['hg://https://bitbucket.org/eigen/eigen/','https://bitbucket.org/eigen/eigen/get/3.3.7.tar.bz2'] - self.functions = [] - self.includes = ['Eigen/Core'] - self.liblist = [] - self.cxx = 1 - self.pkgname = 'eigen3' - self.includedir = os.path.join('include', 'eigen3') - self.useddirectly = 0 + self.version = '3.3.7' + self.gitcommit = self.version + self.download = ['git://https://gitlab.com/libeigen/eigen','https://gitlab.com/libeigen/eigen/-/archive/'+self.gitcommit+'/eigen-'+self.gitcommit+'.tar.gz'] + self.functions = [] + self.includes = ['Eigen/Core'] + self.liblist = [] + self.cxx = 1 + self.pkgname = 'eigen3' + self.includedir = os.path.join('include', 'eigen3') + self.useddirectly = 0 return def setupDependencies(self, framework): diff --git a/config/BuildSystem/config/packages/f2cblaslapack.py b/config/BuildSystem/config/packages/f2cblaslapack.py index 2f7e637d2cd..381fe840c73 100644 --- a/config/BuildSystem/config/packages/f2cblaslapack.py +++ b/config/BuildSystem/config/packages/f2cblaslapack.py @@ -90,7 +90,7 @@ def Install(self): try: self.logPrintBox('Installing F2CBLASLAPACK') self.installDirProvider.printSudoPasswordMessage() - output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.installSudo+'mkdir -p '+libdir+' && '+self.installSudo+'cp -f libf2cblas.'+self.setCompilers.AR_LIB_SUFFIX+' libf2clapack.'+self.setCompilers.AR_LIB_SUFFIX+' '+ libdir, timeout=30, log = self.log) + output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.installSudo+'mkdir -p '+libdir+' && '+self.installSudo+'cp -f libf2cblas.'+self.setCompilers.AR_LIB_SUFFIX+' libf2clapack.'+self.setCompilers.AR_LIB_SUFFIX+' '+ libdir, timeout=60, log = self.log) except RuntimeError as e: self.printLog('Error moving '+blasDir+' libraries: '+str(e)) raise RuntimeError('Error moving '+blasDir+' libraries') diff --git a/config/BuildSystem/config/packages/fblaslapack.py b/config/BuildSystem/config/packages/fblaslapack.py index 1f7391fc8a8..32d3b97b80f 100644 --- a/config/BuildSystem/config/packages/fblaslapack.py +++ b/config/BuildSystem/config/packages/fblaslapack.py @@ -3,8 +3,9 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = 'origin/barry/2019-08-22/fix-syntax-for-nag' + self.gitcommit = 'v3.4.2-p2' self.download = ['git://https://bitbucket.org/petsc/pkg-fblaslapack','https://bitbucket.org/petsc/pkg-fblaslapack/get/'+self.gitcommit+'.tar.gz'] + self.downloaddirnames = ['petsc-pkg-fblaslapack'] self.precisions = ['single','double'] self.downloadonWindows = 1 self.skippackagewithoptions = 1 diff --git a/config/BuildSystem/config/packages/flibs.py b/config/BuildSystem/config/packages/flibs.py index 85b1b92d2b7..c2c15c2473a 100644 --- a/config/BuildSystem/config/packages/flibs.py +++ b/config/BuildSystem/config/packages/flibs.py @@ -3,6 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) + self.lookforbydefault = 1 return def __str__(self): diff --git a/config/BuildSystem/config/packages/hpddm.py b/config/BuildSystem/config/packages/hpddm.py index 6b3f87963af..8cb9066100d 100644 --- a/config/BuildSystem/config/packages/hpddm.py +++ b/config/BuildSystem/config/packages/hpddm.py @@ -3,13 +3,13 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = '7d80961' #master sep-29-2019 + self.gitcommit = '7efbd32' #master feb-11-2020 self.download = ['git://https://github.com/hpddm/hpddm','https://github.com/hpddm/hpddm/archive/'+self.gitcommit+'.tar.gz'] - self.minversion = '2.0.0' + self.version = '2.0.3' + self.minversion = '2.0.3' # prior versions are not handling KSPHPDDM options properly self.versionname = 'HPDDM_VERSION' self.versioninclude = 'HPDDM_define.hpp' self.requirescxx11 = 1 - self.noMPIUni = 1 self.cxx = 1 self.functions = [] self.includes = ['HPDDM.hpp'] @@ -30,17 +30,15 @@ def setupDependencies(self, framework): self.mpi = framework.require('config.packages.MPI',self) self.blasLapack = framework.require('config.packages.BlasLapack',self) self.slepc = framework.require('config.packages.slepc',self) - self.odeps = [self.slepc] - self.deps = [self.mpi,self.blasLapack,self.cxxlibs,self.mathlib] + self.deps = [self.blasLapack,self.cxxlibs,self.mathlib] + self.odeps = [self.mpi,self.slepc] return def Install(self): import os - if not self.checkSharedLibrariesEnabled(): - raise RuntimeError('Shared libraries enabled needed to build HPDDM') - if self.framework.argDB['with-64-bit-blas-indices']: - raise RuntimeError('32-bit BLAS needed to build HPDDM') - buildDir = os.path.join(self.packageDir,'petsc-build') + if self.slepc.found and not self.checkSharedLibrariesEnabled(): + raise RuntimeError('Shared libraries enabled needed to build PCHPDDM') + buildDir = os.path.join(self.packageDir,'petsc-build') self.setCompilers.pushLanguage('Cxx') cxx = self.setCompilers.getCompiler() cxxflags = self.setCompilers.getCompilerFlags() @@ -56,18 +54,6 @@ def Install(self): incDir = os.path.join(prefix,'include') libDir = os.path.join(prefix,'lib') PETSC_OPT = self.headers.toStringNoDupes([os.path.join(PETSC_DIR,'include'),os.path.join(PETSC_DIR,PETSC_ARCH,'include')]) - # SLEPc dependency - ldflags = ' '.join(self.setCompilers.sharedLibraryFlags) - slepcbuilddep = '' - if self.slepc.found: - # how can we get the slepc lib? Eventually, we may want to use the variables from the framework - #cxxflags += self.headers.toStringNoDupes(self.slepc.dinclude) - #ldflags += self.libraries.toString(self.slepc.dlib) - dinclude = [incDir] - dlib = [os.path.join(libDir,'libslepc.'+self.setCompilers.sharedLibraryExt)] - cxxflags += ' '+self.headers.toStringNoDupes(dinclude) - ldflags += ' '+self.libraries.toString(dlib) - slepcbuilddep = 'slepc-install slepc-build' if self.installSudo: newuser = self.installSudo+' -u $${SUDO_USER} ' else: @@ -81,34 +67,54 @@ def Install(self): self.logPrintBox('Copying HPDDM; this may take several seconds') output,err,ret = config.package.Package.executeShellCommand(cpstr,timeout=100,log=self.log) self.log.write(output+err) - oldFlags = self.compilers.CXXPPFLAGS - self.compilers.CXXPPFLAGS += ' -I'+incDir - self.checkVersion() - self.compilers.CXXPPFLAGS = oldFlags - self.addMakeRule('hpddmcopy','',\ - ['@echo "*** Copying HPDDM ***"',\ - '@${RM} -f ${PETSC_ARCH}/lib/petsc/conf/hpddm.errorflg',\ - '@'+cpstr+' > ${PETSC_ARCH}/lib/petsc/conf/hpddm.log 2>&1 || \\\n\ - (echo "**************************ERROR*************************************" && \\\n\ - echo "Error copying HPDDM. Check ${PETSC_ARCH}/lib/petsc/conf/hpddm.log" && \\\n\ - echo "********************************************************************" && \\\n\ - touch '+os.path.join('${PETSC_ARCH}','lib','petsc','conf','hpddm.errorflg')+' && \\\n\ - exit 1)']) - self.addMakeRule('hpddmbuild',slepcbuilddep,\ - ['@echo "*** Building and installing HPDDM ***"',\ - '@${RM} -f ${PETSC_ARCH}/lib/petsc/conf/hpddm.errorflg',\ - '@'+newuser+cxx+' '+cxxflags+' '+self.headers.toStringNoDupes(self.dinclude)+' '+PETSC_OPT+' -I'+self.packageDir+'/include '+self.packageDir+'/interface/hpddm_petsc.cpp '+ldflags+' -o '+libDir+os.path.join('/libhpddm_petsc.'+self.setCompilers.sharedLibraryExt)+' > ${PETSC_ARCH}/lib/petsc/conf/hpddm.log 2>&1 || \\\n\ - (echo "**************************ERROR*************************************" && \\\n\ - echo "Error building HPDDM. Check ${PETSC_ARCH}/lib/petsc/conf/hpddm.log" && \\\n\ - echo "********************************************************************" && \\\n\ - touch '+os.path.join('${PETSC_ARCH}','lib','petsc','conf','hpddm.errorflg')+' && \\\n\ - exit 1)']) - if self.argDB['prefix'] and not 'package-prefix-hash' in self.argDB: - self.addMakeRule('hpddm-build','') - self.addMakeRule('hpddm-install','hpddmbuild') - else: - self.addMakeRule('hpddm-build','hpddmbuild') - self.addMakeRule('hpddm-install','') + # SLEPc dependency + if self.mpi.found: + if self.slepc.found: + slepcbuilddep = '' + ldflags = ' '.join(self.setCompilers.sharedLibraryFlags) + # how can we get the slepc lib? Eventually, we may want to use the variables from the framework + #cxxflags += self.headers.toStringNoDupes(self.slepc.dinclude) + #ldflags += self.libraries.toString(self.slepc.dlib) + dinclude = [incDir] + dlib = [os.path.join(libDir,'libslepc.'+self.setCompilers.sharedLibraryExt)] + cxxflags += ' '+self.headers.toStringNoDupes(dinclude) + ldflags += ' '+self.libraries.toString(dlib) + slepcbuilddep = 'slepc-install slepc-build' + oldFlags = self.compilers.CXXPPFLAGS + self.compilers.CXXPPFLAGS += ' -I'+incDir + self.checkVersion() + self.compilers.CXXPPFLAGS = oldFlags + self.addMakeRule('hpddmcopy','',\ + ['@echo "*** Copying HPDDM ***"',\ + '@${RM} -f ${PETSC_ARCH}/lib/petsc/conf/hpddm.errorflg',\ + '@'+cpstr+' > ${PETSC_ARCH}/lib/petsc/conf/hpddm.log 2>&1 || \\\n\ + (echo "**************************ERROR*************************************" && \\\n\ + echo "Error copying HPDDM. Check ${PETSC_ARCH}/lib/petsc/conf/hpddm.log" && \\\n\ + echo "********************************************************************" && \\\n\ + touch '+os.path.join('${PETSC_ARCH}','lib','petsc','conf','hpddm.errorflg')+' && \\\n\ + exit 1)']) + self.addMakeRule('hpddmbuild',slepcbuilddep,\ + ['@echo "*** Building and installing HPDDM ***"',\ + '@${RM} -f ${PETSC_ARCH}/lib/petsc/conf/hpddm.errorflg',\ + '@'+newuser+cxx+' '+cxxflags+' '+self.headers.toStringNoDupes(self.dinclude)+' '+PETSC_OPT+' -I'+self.packageDir+'/include '+self.packageDir+'/interface/hpddm_petsc.cpp '+ldflags+' -o '+libDir+os.path.join('/libhpddm_petsc.'+self.setCompilers.sharedLibraryExt)+' > ${PETSC_ARCH}/lib/petsc/conf/hpddm.log 2>&1 || \\\n\ + (echo "**************************ERROR*************************************" && \\\n\ + echo "Error building HPDDM. Check ${PETSC_ARCH}/lib/petsc/conf/hpddm.log" && \\\n\ + echo "********************************************************************" && \\\n\ + touch '+os.path.join('${PETSC_ARCH}','lib','petsc','conf','hpddm.errorflg')+' && \\\n\ + exit 1)']) + if self.argDB['prefix'] and not 'package-prefix-hash' in self.argDB: + self.addMakeRule('hpddm-build','') + self.addMakeRule('hpddm-install','hpddmbuild') + return self.installDir + else: + self.addMakeRule('hpddm-build','hpddmbuild') + self.addMakeRule('hpddm-install','') + return self.installDir + else: + self.logPrintBox('***** WARNING: Compiling HPDDM with MPI but no SLEPc,\n\ +PCHPDDM won\'t be available, unless reconfiguring with --download-slepc *****') + self.addMakeRule('hpddm-build','') + self.addMakeRule('hpddm-install','') return self.installDir def alternateConfigureLibrary(self): diff --git a/config/BuildSystem/config/packages/hwloc.py b/config/BuildSystem/config/packages/hwloc.py index b6ce4dff01b..010a4b04437 100644 --- a/config/BuildSystem/config/packages/hwloc.py +++ b/config/BuildSystem/config/packages/hwloc.py @@ -4,7 +4,8 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.download = ['http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.gz'] + self.download = ['http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-2.1.0.tar.gz', + 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/hwloc-2.1.0.tar.gz'] self.functions = ['hwloc_topology_init'] self.includes = ['hwloc.h'] self.liblist = [['libhwloc.a'],['libhwloc.a','libxml2.a']] diff --git a/config/BuildSystem/config/packages/hypre.py b/config/BuildSystem/config/packages/hypre.py index 869a1661389..70d79f6d652 100644 --- a/config/BuildSystem/config/packages/hypre.py +++ b/config/BuildSystem/config/packages/hypre.py @@ -4,12 +4,13 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '2.18.1' + #self.version = '2.18.2' self.minversion = '2.14' self.versionname = 'HYPRE_RELEASE_VERSION' self.versioninclude = 'HYPRE_config.h' self.requiresversion = 1 - self.gitcommit = 'v'+self.version + #self.gitcommit = 'v'+self.version + self.gitcommit = '93baaa8c9' # v2.18.2+valgrind-fix self.download = ['git://https://github.com/hypre-space/hypre','https://github.com/hypre-space/hypre/archive/'+self.gitcommit+'.tar.gz'] self.functions = ['HYPRE_IJMatrixCreate'] self.includes = ['HYPRE.h'] @@ -53,7 +54,9 @@ def formGNUConfigureArgs(self): libs = [] for l in self.mpi.lib: ll = os.path.basename(l) - libs.append(ll[3:-2]) + if ll.endswith('.a'): libs.append(ll[3:-2]) + if ll.endswith('.so'): libs.append(ll[3:-3]) + if ll.endswith('.dylib'): libs.append(ll[3:-6]) libs = ' '.join(libs) args.append('--with-MPI-libs="'+libs+'"') @@ -109,8 +112,9 @@ def consistencyChecks(self): def configureLibrary(self): config.package.Package.configureLibrary(self) - oldFlags = self.compilers.CPPFLAGS - self.compilers.CPPFLAGS += ' '+self.headers.toString(self.include) + flagsArg = self.getPreprocessorFlagsArg() + oldFlags = getattr(self.compilers, flagsArg) + setattr(self.compilers, flagsArg, oldFlags+' '+self.headers.toString(self.include)) # check integers if self.defaultIndexSize == 64: code = '#if !defined(HYPRE_BIGINT) && !defined(HYPRE_MIXEDINT)\n#error HYPRE_BIGINT or HYPRE_MIXEDINT not defined!\n#endif' @@ -120,5 +124,5 @@ def configureLibrary(self): msg = 'Hypre with --enable-bigint/--enable-mixedint appears to be specified for a default 32-bit-indices build of PETSc.\n' if not self.checkCompile('#include "HYPRE_config.h"',code): raise RuntimeError('Hypre specified is incompatible!\n'+msg+'Suggest using --download-hypre for a compatible hypre') - self.compilers.CPPFLAGS = oldFlags + setattr(self.compilers, flagsArg,oldFlags) return diff --git a/config/BuildSystem/config/packages/lgrind.py b/config/BuildSystem/config/packages/lgrind.py index 330ce831546..339f0e57cab 100644 --- a/config/BuildSystem/config/packages/lgrind.py +++ b/config/BuildSystem/config/packages/lgrind.py @@ -29,9 +29,9 @@ def Install(self): self.logPrintBox('Batch build that could not generate lgrind, you may not be able to build all documentation') return raise RuntimeError('Error running make on lgrind: '+str(e)) - output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'source','lgrind')+' '+os.path.join(self.confDir,'bin'), timeout=25, log = self.log) - output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'lgrind.sty')+' '+os.path.join(self.confDir,'share'), timeout=25, log = self.log) - output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'lgrindef')+' '+os.path.join(self.confDir,'share'), timeout=25, log = self.log) + output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'source','lgrind')+' '+os.path.join(self.confDir,'bin'), timeout=60, log = self.log) + output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'lgrind.sty')+' '+os.path.join(self.confDir,'share'), timeout=60, log = self.log) + output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'lgrindef')+' '+os.path.join(self.confDir,'share'), timeout=60, log = self.log) return self.confDir def configure(self): diff --git a/config/BuildSystem/config/packages/libceed.py b/config/BuildSystem/config/packages/libceed.py index bebb9e86029..8ffbde94d97 100644 --- a/config/BuildSystem/config/packages/libceed.py +++ b/config/BuildSystem/config/packages/libceed.py @@ -19,23 +19,22 @@ def setupHelp(self, help): def setupDependencies(self, framework): config.package.Package.setupDependencies(self, framework) self.setCompilers = framework.require('config.setCompilers',self) - self.installdir = framework.require('PETSc.options.installDir',self) + self.make = framework.require('config.packages.make',self) return def Install(self): import os - self.pushLanguage('C') - # TODO: maybe add support for various backends, OCCA, MAGMA? - cc = self.setCompilers.getCompiler() - self.popLanguage() + # TODO: maybe add support for various backends, CUDA, libXSMM, OCCA, MAGMA? + with self.Language('C'): + cc = self.setCompilers.getCompiler() try: self.logPrintBox('Compiling libceed; this may take several minutes') - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+' && make clean && CC='+cc+' prefix='+self.installDir+' make ', timeout=250, log = self.log) + output,err,ret = config.package.Package.executeShellCommand(self.make.make_jnp_list + ['CC='+cc, 'prefix='+self.installDir, '-B'], cwd=self.packageDir, timeout=250, log=self.log) except RuntimeError as e: raise RuntimeError('Error running make on libceed: '+str(e)) try: self.logPrintBox('Installing libceed; this may take several minutes') - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+' && '+self.installSudo+' prefix='+self.installDir+' make install', timeout=250, log = self.log) + output,err,ret = config.package.Package.executeShellCommand(self.make.make_sudo_list + ['install', 'prefix='+self.installDir], cwd=self.packageDir, timeout=250, log=self.log) except RuntimeError as e: raise RuntimeError('Error running install on libceed: '+str(e)) return self.installDir diff --git a/config/BuildSystem/config/packages/make.py b/config/BuildSystem/config/packages/make.py index 9d14230fdfe..7c8be275704 100644 --- a/config/BuildSystem/config/packages/make.py +++ b/config/BuildSystem/config/packages/make.py @@ -112,7 +112,7 @@ def checkGNUMake(self,make): self.logPrintBox('***** WARNING: You have an older version of Gnu make, it will work,\n\ but may not support all the parallel testing options. You can install the \n\ latest Gnu make with your package manager, such as brew or macports, or use\n\ -the --download-make option to get the latest Gnu make warning message *****') +the --download-make option to get the latest Gnu make *****') self.foundversion = ".".join([str(major),str(minor)]) except RuntimeError as e: self.log.write('GNUMake check failed: '+str(e)+'\n') @@ -203,7 +203,12 @@ def configureMakeNP(self): self.addMakeMacro('MAKE_TEST_NP',str(make_test_np)) self.addMakeMacro('MAKE_LOAD',str(make_load)) self.addMakeMacro('NPMAX',str(cores)) - self.make_jnp = self.make + ' -j' + str(self.make_np) +' -l'+str(self.make_load) + self.make_jnp_list = [self.make, '-j'+str(self.make_np), '-l'+str(self.make_load)] + self.make_jnp = ' '.join(self.make_jnp_list) + if self.installSudo: + self.make_sudo_list = [self.installSudo, self.make] + else: + self.make_sudo_list = [self.make] return def configure(self): diff --git a/config/BuildSystem/config/packages/mathlib.py b/config/BuildSystem/config/packages/mathlib.py index 768da0d3b79..85d0ec5309c 100644 --- a/config/BuildSystem/config/packages/mathlib.py +++ b/config/BuildSystem/config/packages/mathlib.py @@ -3,6 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) + self.lookforbydefault = 1 return def __str__(self): diff --git a/config/BuildSystem/config/packages/med.py b/config/BuildSystem/config/packages/med.py index 3c0922d4c72..7473637b2a9 100644 --- a/config/BuildSystem/config/packages/med.py +++ b/config/BuildSystem/config/packages/med.py @@ -3,8 +3,9 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = 'origin/maint-4.0.0' - self.download = ['git://https://bitbucket.org/petsc/pkg-med.git','https://bitbucket.org/petsc/pkg-med/get/'+self.gitcommit+'.tar.gz'] + self.gitbranch = 'maint-4.0.0' + self.gitcommit = 'origin/'+self.gitbranch + self.download = ['git://https://bitbucket.org/petsc/pkg-med.git','https://bitbucket.org/petsc/pkg-med/get/'+self.gitbranch+'.tar.gz'] self.functions = ['MEDfileOpen'] self.includes = ['med.h'] self.liblist = [['libmedC.a','libmed.a']] diff --git a/config/BuildSystem/config/packages/metis.py b/config/BuildSystem/config/packages/metis.py index 73ae6ae2bf4..c60ca3f1386 100644 --- a/config/BuildSystem/config/packages/metis.py +++ b/config/BuildSystem/config/packages/metis.py @@ -3,7 +3,8 @@ class Configure(config.package.CMakePackage): def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) - self.gitcommit = 'v5.1.0-p6' + self.versionname = 'METIS_VER_MAJOR.METIS_VER_MINOR.METIS_VER_SUBMINOR' + self.gitcommit = 'v5.1.0-p8' self.download = ['git://https://bitbucket.org/petsc/pkg-metis.git','https://bitbucket.org/petsc/pkg-metis/get/'+self.gitcommit+'.tar.gz'] self.downloaddirnames = ['petsc-pkg-metis'] self.functions = ['METIS_PartGraphKway'] @@ -22,7 +23,7 @@ def setupHelp(self, help): def setupDependencies(self, framework): config.package.CMakePackage.setupDependencies(self, framework) self.compilerFlags = framework.require('config.compilerFlags', self) - self.mathlib = framework.require('config.packages.mathlib',self) + self.mathlib = framework.require('config.packages.mathlib', self) self.deps = [self.mathlib] return diff --git a/config/BuildSystem/config/packages/ml.py b/config/BuildSystem/config/packages/ml.py index d304a22a7ce..3fed49a6f2e 100644 --- a/config/BuildSystem/config/packages/ml.py +++ b/config/BuildSystem/config/packages/ml.py @@ -10,7 +10,6 @@ def __init__(self, framework): self.includes = ['ml_include.h'] self.liblist = [['libml.a']] self.license = 'http://trilinos.sandia.gov/' - self.fc = 0 self.cxx = 1 self.precisions = ['double'] self.complex = 0 diff --git a/config/BuildSystem/config/packages/openblas.py b/config/BuildSystem/config/packages/openblas.py index a3c28337e9e..97fd5da67e8 100644 --- a/config/BuildSystem/config/packages/openblas.py +++ b/config/BuildSystem/config/packages/openblas.py @@ -5,6 +5,8 @@ # fails on mac due to argument list too long https://github.com/xianyi/OpenBLAS/issues/977 # does not support 64 bit integers with INTERFACE64 +# OpenBLAS is not always valgrind clean +# dswap_k_SANDYBRIDGE (in /usr/lib/openblas-base/libblas.so.3) class Configure(config.package.Package): def __init__(self, framework): @@ -13,7 +15,7 @@ def __init__(self, framework): self.gitcommit = 'e7c4d6705a41910240dd19b9e7082a422563bf15' self.versionname = 'OPENBLAS_VERSION' self.download = ['git://https://github.com/xianyi/OpenBLAS.git','https://github.com/xianyi/OpenBLAS/archive/'+self.gitcommit+'.tar.gz'] - self.includes = ['openblas_config.h'] + self.optionalincludes = ['openblas_config.h'] self.functions = ['openblas_get_config'] self.liblist = [['libopenblas.a']] self.precisions = ['single','double'] @@ -38,11 +40,17 @@ def setupDependencies(self, framework): config.package.Package.setupDependencies(self, framework) self.make = framework.require('config.packages.make',self) self.openmp = framework.require('config.packages.openmp',self) + self.pthread = framework.require('config.packages.pthread',self) + + def getSearchDirectories(self): + import os + return [os.path.join('/usr','local')] def configureLibrary(self): import os config.package.Package.configureLibrary(self) - self.checkVersion() + if self.foundoptionalincludes: + self.checkVersion() if self.found: # TODO: Use openblas_get_config() or openblas_config.h to determine use of OpenMP and 64 bit indices for prebuilt OpenBLAS libraries if not hasattr(self,'usesopenmp'): self.usesopenmp = 'unknown' @@ -68,9 +76,9 @@ def Install(self): cmdline += 'FC='+self.compilers.FC+' ' if self.argDB['download-openblas-64-bit-blas-indices'] or self.argDB['with-64-bit-blas-indices']: cmdline += " INTERFACE64=1 " - self.known64 = 'yes' + self.known64 = '64' else: - self.known64 = 'no' + self.known64 = '32' if 'download-openblas-make-options' in self.argDB and self.argDB['download-openblas-make-options']: cmdline+=" "+self.argDB['download-openblas-make-options'] if not self.argDB['with-shared-libraries']: @@ -84,12 +92,14 @@ def Install(self): cmdline += " USE_OPENMP=0 " self.usesopenmp = 'no' if 'download-openblas-use-pthreads' in self.argDB and self.argDB['download-openblas-use-pthreads']: + if not self.pthread.found: raise RuntimeError("--download-openblas-use-pthreads option selected but pthreads is not available") self.usespthreads = 1 cmdline += " USE_THREAD=1 " # use the environmental variable OPENBLAS_NUM_THREADS to control the number of threads used else: cmdline += " USE_THREAD=0 " cmdline += " NO_EXPRECISION=1 " + cmdline += " libs netlib re_lapack shared " libdir = self.libDir blasDir = self.packageDir @@ -108,7 +118,7 @@ def Install(self): try: self.logPrintBox('Installing OpenBLAS') self.installDirProvider.printSudoPasswordMessage() - output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.installSudo+' make PREFIX='+self.installDir+' install', timeout=30, log = self.log) + output2,err2,ret = config.package.Package.executeShellCommand('cd '+blasDir+' && '+self.installSudo+' make PREFIX='+self.installDir+' '+cmdline+' install', timeout=60, log = self.log) except RuntimeError as e: self.logPrint('Error moving '+blasDir+' libraries: '+str(e)) raise RuntimeError('Error moving '+blasDir+' libraries') diff --git a/config/BuildSystem/config/packages/p4est.py b/config/BuildSystem/config/packages/p4est.py index 760f52f369f..e3e03dc2084 100644 --- a/config/BuildSystem/config/packages/p4est.py +++ b/config/BuildSystem/config/packages/p4est.py @@ -3,7 +3,7 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.gitcommit = 'bfdbf4b3771a6407e8261eb09066a26184a2cdbc' + self.gitcommit = '1727693b446ee0c987be701320db0cd5de617cfc' self.download = ['git://https://github.com/tisaac/p4est','https://github.com/p4est/p4est.github.io/raw/master/release/p4est-2.0.tar.gz'] self.functions = ['p4est_init'] self.includes = ['p4est_bits.h'] diff --git a/config/BuildSystem/config/packages/parmetis.py b/config/BuildSystem/config/packages/parmetis.py index 98dc9cc0af2..a48bb951e00 100644 --- a/config/BuildSystem/config/packages/parmetis.py +++ b/config/BuildSystem/config/packages/parmetis.py @@ -5,7 +5,7 @@ def __init__(self, framework): config.package.CMakePackage.__init__(self, framework) self.version = '4.0.3' self.versionname = 'PARMETIS_MAJOR_VERSION.PARMETIS_MINOR_VERSION.PARMETIS_SUBMINOR_VERSION' - self.gitcommit = 'v'+self.version+'-p5' + self.gitcommit = 'v'+self.version+'-p6' self.download = ['git://https://bitbucket.org/petsc/pkg-parmetis.git','https://bitbucket.org/petsc/pkg-parmetis/get/'+self.gitcommit+'.tar.gz'] self.functions = ['ParMETIS_V3_PartKway'] self.includes = ['parmetis.h'] @@ -28,7 +28,7 @@ def formCMakeConfigureArgs(self): args.append('-DGKLIB_PATH=../headers') args.append('-DMETIS_PATH='+self.metis.directory) if self.mpi.include: - args.append('-DMPI_INCLUDE_PATH='+self.mpi.include[0]) + args.append('-DMPI_INCLUDE_PATH="'+self.mpi.include[0]+'"') if self.checkSharedLibrariesEnabled(): args.append('-DSHARED=1') args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON') diff --git a/config/BuildSystem/config/packages/petsc4py.py b/config/BuildSystem/config/packages/petsc4py.py index bf3130bf334..ad1e473b178 100644 --- a/config/BuildSystem/config/packages/petsc4py.py +++ b/config/BuildSystem/config/packages/petsc4py.py @@ -3,7 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = '3.12.0' + self.gitcommit = '7222bbc' self.download = ['git://https://bitbucket.org/petsc/petsc4py','https://bitbucket.org/petsc/petsc4py/get/'+self.gitcommit+'.tar.gz'] self.functions = [] self.includes = [] diff --git a/config/BuildSystem/config/packages/pflotran.py b/config/BuildSystem/config/packages/pflotran.py index 58b3726f4e5..a17794dca64 100644 --- a/config/BuildSystem/config/packages/pflotran.py +++ b/config/BuildSystem/config/packages/pflotran.py @@ -53,6 +53,8 @@ def postProcess(self): try: self.logPrintBox('Configure Pflotran; this may take several minutes') + # TODO: remove this prefix code and use the mechanisms in package.py for selecting the destination directory; currently if postProcess is used + # TODO: package.py may not allow installing in prefix location if self.framework.argDB['prefix']: PDIR = 'PETSC_DIR='+self.framework.argDB['prefix'] PARCH = '' @@ -61,7 +63,7 @@ def postProcess(self): PDIR = 'PETSC_DIR='+self.petscdir.dir PARCH = 'PETSC_ARCH='+self.arch PREFIX = '--prefix='+os.path.join(self.petscdir.dir,self.arch) - output,err,ret = config.package.GNUPackage.executeShellCommand('cd '+self.packageDir+' && '+PARCH+' '+PDIR+' ./configure all '+PREFIX,timeout=10, log = self.log) + output,err,ret = config.package.GNUPackage.executeShellCommand('cd '+self.packageDir+' && '+PARCH+' '+PDIR+' ./configure all '+PREFIX,timeout=60, log = self.log) self.log.write(output+err) self.logPrintBox('Compiling Pflotran; this may take several minutes') @@ -70,6 +72,7 @@ def postProcess(self): self.logPrintBox('Installing Pflotran; this may take several minutes') self.installDirProvider.printSudoPasswordMessage(1) + # TODO: Once the prefix code above is handled correctly thgis should use self.installSudo output,err,ret = config.package.GNUPackage.executeShellCommand('cd '+self.packageDir+' && '+self.installDirProvider.installSudo+' make install',timeout=100, log = self.log) self.log.write(output+err) except RuntimeError as e: diff --git a/config/BuildSystem/config/packages/pnetcdf.py b/config/BuildSystem/config/packages/pnetcdf.py index 8099a203cb1..bce36a615a6 100644 --- a/config/BuildSystem/config/packages/pnetcdf.py +++ b/config/BuildSystem/config/packages/pnetcdf.py @@ -4,9 +4,11 @@ class Configure(config.package.GNUPackage): def __init__(self, framework): config.package.GNUPackage.__init__(self, framework) - self.version = '1.11.2' + self.version = '1.12.1' self.versionname = 'PNETCDF_VERSION' - self.download = ['https://parallel-netcdf.github.io/Release/pnetcdf-'+self.version+'.tar.gz', + self.gitcommit = 'checkpoint.1.12.1' # 1.12.1 is first to include MPI1 deprecated fix + self.download = ['git://https://github.com/parallel-netcdf/pnetcdf', + 'https://parallel-netcdf.github.io/Release/pnetcdf-'+self.version+'.tar.gz', 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/pnetcdf-'+self.version+'.tar.gz'] self.functions = ['ncmpi_create'] self.includes = ['pnetcdf.h'] @@ -23,6 +25,14 @@ def setupDependencies(self, framework): return def formGNUConfigureArgs(self): + # https://github.com/Parallel-NetCDF/PnetCDF/commit/38d210c006cabff70d78204d2db98a22ab87547c + if hasattr(self.mpi,'ompi_version') and self.mpi.ompi_version >= (4,0,0): + self.minversion = '1.12.1' + oldinclude = self.include + self.include.append(os.path.join(self.packageDir,'src','include')) + self.checkVersion() + self.include = oldinclude + args = config.package.GNUPackage.formGNUConfigureArgs(self) self.addToArgs(args,'LIBS',self.libraries.toStringNoDupes(self.flibs.lib)) return args diff --git a/config/BuildSystem/config/packages/pragmatic.py b/config/BuildSystem/config/packages/pragmatic.py index 20bfe182558..50c9393ec9e 100644 --- a/config/BuildSystem/config/packages/pragmatic.py +++ b/config/BuildSystem/config/packages/pragmatic.py @@ -18,10 +18,11 @@ def setupDependencies(self, framework): self.sharedLibraries = framework.require('PETSc.options.sharedLibraries', self) self.scalartypes = framework.require('PETSc.options.scalarTypes',self) self.indexTypes = framework.require('PETSc.options.indexTypes', self) + self.mpi = framework.require('config.packages.MPI',self) self.metis = framework.require('config.packages.metis', self) self.eigen = framework.require('config.packages.eigen', self) self.mathlib = framework.require('config.packages.mathlib',self) - self.deps = [self.metis, self.eigen, self.mathlib] + self.deps = [self.mpi, self.metis, self.eigen, self.mathlib] return def formCMakeConfigureArgs(self): @@ -33,6 +34,19 @@ def formCMakeConfigureArgs(self): args.append('-DENABLE_VTK=OFF') args.append('-DENABLE_OPENMP=OFF') args.append('-DEIGEN_INCLUDE_DIR='+self.eigen.include[0]) + + # prevent Pragmatic from linking to MPI it finds by itself + args.append('-DMPI_C_COMPILER:STRING="'+self.framework.getCompiler()+'"') + args.append('-DMPI_C_INCLUDE_PATH:STRING=""') + args.append('-DMPI_C_COMPILE_FLAGS:STRING=""') + args.append('-DMPI_C_LINK_FLAGS:STRING=""') + args.append('-DMPI_C_LIBRARIES:STRING=""') + args.append('-DMPI_CXX_COMPILER:STRING="'+self.framework.getCompiler('Cxx')+'"') + args.append('-DMPI_CXX_INCLUDE_PATH:STRING=""') + args.append('-DMPI_CXX_COMPILE_FLAGS:STRING=""') + args.append('-DMPI_CXX_LINK_FLAGS:STRING=""') + args.append('-DMPI_CXX_LIBRARIES:STRING=""') + if not self.compilerFlags.debugging: args.append('-DCMAKE_BUILD_TYPE=Release') if self.checkSharedLibrariesEnabled(): diff --git a/config/BuildSystem/config/packages/python.py b/config/BuildSystem/config/packages/python.py index 266ebead47d..a61f6af78e1 100644 --- a/config/BuildSystem/config/packages/python.py +++ b/config/BuildSystem/config/packages/python.py @@ -25,11 +25,11 @@ def configure(self): self.pyexe = sys.executable self.addDefine('PYTHON_EXE','"'+self.pyexe+'"') try: - output1,err1,ret1 = config.package.Package.executeShellCommand(self.pyexe + ' -c "import Cython"',timeout=50, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand(self.pyexe + ' -c "import Cython"',timeout=60, log = self.log) self.cython = 1 except: pass try: - output1,err1,ret1 = config.package.Package.executeShellCommand(self.pyexe + ' -c "import numpy"',timeout=50, log = self.log) + output1,err1,ret1 = config.package.Package.executeShellCommand(self.pyexe + ' -c "import numpy"',timeout=60, log = self.log) self.numpy = 1 except: pass return diff --git a/config/BuildSystem/config/packages/radau5.py b/config/BuildSystem/config/packages/radau5.py index a9dea2bb43d..e4d5938594e 100644 --- a/config/BuildSystem/config/packages/radau5.py +++ b/config/BuildSystem/config/packages/radau5.py @@ -26,6 +26,6 @@ def Install(self): self.framework.popLanguage() except RuntimeError as e: raise RuntimeError('Error running make on radau5: '+str(e)) - output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'libradau5.a')+' '+os.path.join(self.confDir,'lib'), timeout=25, log = self.log) + output,err,ret = config.package.Package.executeShellCommand('cp -f '+os.path.join(self.packageDir,'libradau5.a')+' '+os.path.join(self.confDir,'lib'), timeout=60, log = self.log) return os.path.join(self.confDir,'lib') diff --git a/config/BuildSystem/config/packages/regex.py b/config/BuildSystem/config/packages/regex.py new file mode 100644 index 00000000000..1c13617c1ae --- /dev/null +++ b/config/BuildSystem/config/packages/regex.py @@ -0,0 +1,11 @@ +import config.package +import os + +class Configure(config.package.Package): + def __init__(self, framework): + config.package.Package.__init__(self, framework) + self.functions = ['regexec', 'regcomp', 'regfree'] + self.includes = ['regex.h'] + self.liblist = [['libregex.a']] + self.lookforbydefault = 1 + return diff --git a/config/BuildSystem/config/packages/scalapack.py b/config/BuildSystem/config/packages/scalapack.py index 9b3bf55b3e5..7c1aae2d71b 100644 --- a/config/BuildSystem/config/packages/scalapack.py +++ b/config/BuildSystem/config/packages/scalapack.py @@ -3,7 +3,7 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = 'v2.0.2-p2' + self.gitcommit = 'v2.1.0-p1' self.download = ['git://https://bitbucket.org/petsc/pkg-scalapack','https://bitbucket.org/petsc/pkg-scalapack/get/'+self.gitcommit+'.tar.gz'] self.downloaddirnames = ['petsc-pkg-scalapack','scalapack'] self.includes = [] @@ -65,7 +65,7 @@ def Install(self): if self.installNeeded('SLmake.inc'): try: - output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+' && '+self.make.make+' -f Makefile.parallel cleanlib', timeout=25, log = self.log) + output,err,ret = config.package.Package.executeShellCommand('cd '+self.packageDir+' && '+self.make.make+' -f Makefile.parallel cleanlib', timeout=60, log = self.log) except RuntimeError as e: pass try: diff --git a/config/BuildSystem/config/packages/slepc.py b/config/BuildSystem/config/packages/slepc.py index 563efab8afa..c718d325584 100644 --- a/config/BuildSystem/config/packages/slepc.py +++ b/config/BuildSystem/config/packages/slepc.py @@ -3,8 +3,8 @@ class Configure(config.package.Package): def __init__(self, framework): config.package.Package.__init__(self, framework) - self.gitcommit = '59ff81b' #master oct-21-2019 - self.download = ['git://https://bitbucket.com/slepc/slepc.git','https://bitbucket.org/slepc/slepc/get/'+self.gitcommit+'.tar.gz'] + self.gitcommit = 'bf89b9d' #master dec-13-2019 + self.download = ['git://https://gitlab.com/slepc/slepc.git','https://gitlab.com/slepc/slepc/-/archive/'+self.gitcommit+'/slepc-'+self.gitcommit+'.tar.gz'] self.functions = [] self.includes = [] self.skippackagewithoptions = 1 diff --git a/config/BuildSystem/config/packages/tchem.py b/config/BuildSystem/config/packages/tchem.py index 704791bd3b5..08f45a4cad9 100644 --- a/config/BuildSystem/config/packages/tchem.py +++ b/config/BuildSystem/config/packages/tchem.py @@ -50,7 +50,7 @@ def Install(self): try: self.logPrintBox('Compiling TChem; this may take several minutes') output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+self.packageDir+' && make && cp include/TC_*.h %(includeDir)s && cp lib/libtchem* %(libDir)s' % dict(includeDir=includeDir,libDir=libDir), timeout=500, log = self.log) - output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+self.packageDir+' && cp data/periodictable.dat %(shareDir)s' % dict(shareDir=shareDir) , timeout=10, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand('cd '+self.packageDir+' && cp data/periodictable.dat %(shareDir)s' % dict(shareDir=shareDir) , timeout=60, log = self.log) except RuntimeError as e: raise RuntimeError('Error running make on TChem: '+str(e)) self.postInstall(output1+err1+output2+err2,'tchem') diff --git a/config/BuildSystem/config/packages/tetgen.py b/config/BuildSystem/config/packages/tetgen.py index bef10356e18..3f24d0a9b52 100644 --- a/config/BuildSystem/config/packages/tetgen.py +++ b/config/BuildSystem/config/packages/tetgen.py @@ -193,7 +193,7 @@ def Install(self): output1,err1,ret1 = config.package.Package.executeShellCommand('cd '+self.packageDir+' && make CXX="'+ self.setCompilers.getCompiler() + '" CXXFLAGS="' + cflags + '" PREDCXXFLAGS="' + predcflags + '" tetlib && mv tetgen_def.h tetgen.h && '+self.installSudo+'cp *.a ' + libDir + ' && rm *.a *.o', timeout=2500, log = self.log) except RuntimeError as e: raise RuntimeError('Error running make on TetGen: '+str(e)) - output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir, 'tetgen.h')+' '+includeDir, timeout=5, log = self.log) + output2,err2,ret2 = config.package.Package.executeShellCommand(self.installSudo+'cp -f '+os.path.join(self.packageDir, 'tetgen.h')+' '+includeDir, timeout=60, log = self.log) self.postInstall(output1+err1+output2+err2,'make.inc') return self.installDir diff --git a/config/BuildSystem/config/packages/xSDKTrilinos.py b/config/BuildSystem/config/packages/xSDKTrilinos.py index 08902305729..4f92453eb57 100644 --- a/config/BuildSystem/config/packages/xSDKTrilinos.py +++ b/config/BuildSystem/config/packages/xSDKTrilinos.py @@ -102,7 +102,7 @@ def postProcess(self): if not self.argDB['with-batch']: try: self.logPrintBox('Testing xSDKTrilinos; this may take several minutes') - output,err,ret = config.package.CMakePackage.executeShellCommand('cd '+os.path.join(self.packageDir,'petsc-build')+' && '+self.cmake.ctest,timeout=50, log = self.log) + output,err,ret = config.package.CMakePackage.executeShellCommand('cd '+os.path.join(self.packageDir,'petsc-build')+' && '+self.cmake.ctest,timeout=60, log = self.log) output = output+err self.log.write(output) if output.find('Failure') > -1: diff --git a/config/BuildSystem/config/packages/zstd.py b/config/BuildSystem/config/packages/zstd.py new file mode 100644 index 00000000000..f7234dd96ff --- /dev/null +++ b/config/BuildSystem/config/packages/zstd.py @@ -0,0 +1,35 @@ +import config.package + +class Configure(config.package.Package): + def __init__(self, framework): + config.package.Package.__init__(self, framework) + self.version = '1.4.4' + self.download = ['https://github.com/facebook/zstd/archive/v{}.tar.gz'.format(self.version)] + self.functions = ['ZSTD_compress'] + self.includes = ['zstd.h'] + self.liblist = [['libzstd.a']] + self.downloaddirnames = ['zstd'] + return + + def setupHelp(self, help): + config.package.Package.setupHelp(self,help) + import nargs + return + + def setupDependencies(self, framework): + config.package.Package.setupDependencies(self, framework) + self.setCompilers = framework.require('config.setCompilers',self) + self.make = framework.require('config.packages.make',self) + return + + def Install(self): + import os + with self.Language('C'): + cc = self.setCompilers.getCompiler() + cflags = self.setCompilers.getCompilerFlags() + try: + self.logPrintBox('Installing zstd; this may take several minutes') + output,err,ret = config.package.Package.executeShellCommand(self.make.make_jnp_list + ['CC='+cc, 'CFLAGS='+cflags, 'PREFIX='+self.installDir, 'install'], cwd=self.packageDir, timeout=250, log=self.log) + except RuntimeError as e: + raise RuntimeError('Error running make on zstd: '+str(e)) + return self.installDir diff --git a/config/BuildSystem/config/setCompilers.py b/config/BuildSystem/config/setCompilers.py index 0286a260ab3..9946b80f047 100644 --- a/config/BuildSystem/config/setCompilers.py +++ b/config/BuildSystem/config/setCompilers.py @@ -1,7 +1,6 @@ from __future__ import generators import config.base import config - import os from functools import reduce @@ -404,6 +403,15 @@ def isDarwin(log): return 0 isDarwin = staticmethod(isDarwin) + def isDarwinCatalina(log): + '''Returns true if system is Darwin/MacOSX Version Catalina or higher''' + import platform + if platform.system() != 'Darwin': return 0 + v = tuple([int(a) for a in platform.mac_ver()[0].split('.')]) + if v < (10,15,0): return 0 + return 1 + isDarwinCatalina = staticmethod(isDarwinCatalina) + def isFreeBSD(log): '''Returns true if system is FreeBSD''' (output, error, status) = config.base.Configure.executeShellCommand('uname -s', log = log) @@ -562,13 +570,13 @@ def generateCCompilerGuesses(self): yield 'win32fe '+self.argDB['with-cc'] else: yield self.argDB['with-cc'] - raise RuntimeError('C compiler you provided with -with-cc='+self.argDB['with-cc']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('C compiler you provided with -with-cc='+self.argDB['with-cc']+' cannot be found or does not work.'+'\n'+self.mesg) elif 'CC' in self.argDB: if self.isWindows(self.argDB['CC'], self.log): yield 'win32fe '+self.argDB['CC'] else: yield self.argDB['CC'] - raise RuntimeError('C compiler you provided with -CC='+self.argDB['CC']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('C compiler you provided with -CC='+self.argDB['CC']+' cannot be found or does not work.'+'\n'+self.mesg) elif self.useMPICompilers() and 'with-mpi-dir' in self.argDB and os.path.isdir(os.path.join(self.argDB['with-mpi-dir'], 'bin')): self.usedMPICompilers = 1 yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpiicc') @@ -577,7 +585,7 @@ def generateCCompilerGuesses(self): yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'hcc') yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpcc_r') self.usedMPICompilers = 0 - raise RuntimeError('MPI compiler wrappers in '+self.argDB['with-mpi-dir']+'/bin do not work. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') + raise RuntimeError('MPI compiler wrappers in '+self.argDB['with-mpi-dir']+'/bin cannot be found or do not work. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') else: if self.useMPICompilers() and 'with-mpi-dir' in self.argDB: # if it gets here these means that self.argDB['with-mpi-dir']/bin does not exist so we should not search for MPI compilers @@ -628,7 +636,6 @@ def checkCCompiler(self): self.checkCompiler('C') break except RuntimeError as e: - import os self.mesg = str(e) self.logPrint('Error testing C compiler: '+str(e)) if os.path.basename(self.CC) == 'mpicc': @@ -641,6 +648,9 @@ def checkCCompiler(self): self.executeShellCommand(self.CC+' --version', log = self.log) except: pass + if os.path.basename(self.CC).startswith('mpi'): + self.logPrint('Since MPI c compiler starts with mpi, force searches for other compilers to only look for MPI compilers\n') + self.argDB['with-mpi-compilers'] = 1 return def generateCPreprocessorGuesses(self): @@ -670,7 +680,6 @@ def checkCPreprocessor(self): return def generateCUDACompilerGuesses(self): - import os '''Determine the CUDA compiler using CUDAC, then --with-cudac - Any given category can be excluded''' if hasattr(self, 'CUDAC'): @@ -678,12 +687,11 @@ def generateCUDACompilerGuesses(self): raise RuntimeError('Error: '+self.mesg) elif 'with-cudac' in self.argDB: yield self.argDB['with-cudac'] - raise RuntimeError('CUDA compiler you provided with -with-cudac='+self.argDB['with-cudac']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('CUDA compiler you provided with -with-cudac='+self.argDB['with-cudac']+' cannot be found or does not work.'+'\n'+self.mesg) elif 'CUDAC' in self.argDB: yield self.argDB['CUDAC'] - raise RuntimeError('CUDA compiler you provided with -CUDAC='+self.argDB['CUDAC']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('CUDA compiler you provided with -CUDAC='+self.argDB['CUDAC']+' cannot be found or does not work.'+'\n'+self.mesg) elif 'with-cuda-dir' in self.argDB: - import os nvccPath = os.path.join(self.argDB['with-cuda-dir'], 'bin','nvcc') yield nvccPath else: @@ -748,7 +756,6 @@ def checkCUDAPreprocessor(self): def generateCxxCompilerGuesses(self): '''Determine the Cxx compiler''' - import os if hasattr(self, 'CXX'): yield self.CXX @@ -767,13 +774,13 @@ def generateCxxCompilerGuesses(self): yield 'win32fe '+self.argDB['with-cxx'] else: yield self.argDB['with-cxx'] - raise RuntimeError('C++ compiler you provided with -with-cxx='+self.argDB['with-cxx']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('C++ compiler you provided with -with-cxx='+self.argDB['with-cxx']+' cannot be found or does not work.'+'\n'+self.mesg) elif 'CXX' in self.argDB: if self.isWindows(self.argDB['CXX'], self.log): yield 'win32fe '+self.argDB['CXX'] else: yield self.argDB['CXX'] - raise RuntimeError('C++ compiler you provided with -CXX='+self.argDB['CXX']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('C++ compiler you provided with -CXX='+self.argDB['CXX']+' cannot be found or does not work.'+'\n'+self.mesg) elif self.useMPICompilers() and 'with-mpi-dir' in self.argDB and os.path.isdir(os.path.join(self.argDB['with-mpi-dir'], 'bin')): self.usedMPICompilers = 1 yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpiicpc') @@ -783,7 +790,7 @@ def generateCxxCompilerGuesses(self): yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpiCC') yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpCC_r') self.usedMPICompilers = 0 - raise RuntimeError('bin/ you provided with -with-mpi-dir='+self.argDB['with-mpi-dir']+' does not work. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') + raise RuntimeError('bin/ you provided with -with-mpi-dir='+self.argDB['with-mpi-dir']+' cannot be found or does not work. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') else: if self.useMPICompilers(): self.usedMPICompilers = 1 @@ -848,7 +855,6 @@ def checkCxxCompiler(self): self.checkCompiler('Cxx') break except RuntimeError as e: - import os self.mesg = str(e) self.logPrint('Error testing C++ compiler: '+str(e)) if os.path.basename(self.CXX) in ['mpicxx', 'mpiCC']: @@ -887,7 +893,6 @@ def checkCxxPreprocessor(self): self.popLanguage() break except RuntimeError as e: - import os if os.path.basename(self.CXXPP) in ['mpicxx', 'mpiCC']: self.logPrint('MPI installation '+self.getCompiler()+' is likely incorrect.\n Use --with-mpi-dir to indicate an alternate MPI') @@ -898,7 +903,6 @@ def checkCxxPreprocessor(self): def generateFortranCompilerGuesses(self): '''Determine the Fortran compiler''' - import os if hasattr(self, 'FC'): yield self.FC @@ -911,14 +915,14 @@ def generateFortranCompilerGuesses(self): yield 'win32fe '+self.argDB['with-fc'] else: yield self.argDB['with-fc'] - raise RuntimeError('Fortran compiler you provided with --with-fc='+self.argDB['with-fc']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('Fortran compiler you provided with --with-fc='+self.argDB['with-fc']+' cannot be found or does not work.'+'\n'+self.mesg) elif 'FC' in self.argDB: if self.isWindows(self.argDB['FC'], self.log): yield 'win32fe '+self.argDB['FC'] else: yield self.argDB['FC'] yield self.argDB['FC'] - raise RuntimeError('Fortran compiler you provided with -FC='+self.argDB['FC']+' does not work.'+'\n'+self.mesg) + raise RuntimeError('Fortran compiler you provided with -FC='+self.argDB['FC']+' cannot be found or does not work.'+'\n'+self.mesg) elif self.useMPICompilers() and 'with-mpi-dir' in self.argDB and os.path.isdir(os.path.join(self.argDB['with-mpi-dir'], 'bin')): self.usedMPICompilers = 1 yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpiifort') @@ -929,7 +933,7 @@ def generateFortranCompilerGuesses(self): yield os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpxlf_r') self.usedMPICompilers = 0 if os.path.isfile(os.path.join(self.argDB['with-mpi-dir'], 'bin', 'mpif90')): - raise RuntimeError('bin/mpif90 you provided with --with-mpi-dir='+self.argDB['with-mpi-dir']+' does not work.\nRun with --with-fc=0 if you wish to use this MPI and disable Fortran. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') + raise RuntimeError('bin/mpif90 you provided with --with-mpi-dir='+self.argDB['with-mpi-dir']+' cannot be found or does not work.\nRun with --with-fc=0 if you wish to use this MPI and disable Fortran. See https://www.mcs.anl.gov/petsc/documentation/faq.html#mpi-compilers') else: if self.useMPICompilers(): self.usedMPICompilers = 1 @@ -1027,7 +1031,6 @@ def checkFortranPreprocessor(self): self.popLanguage() break except RuntimeError as e: - import os if os.path.basename(self.FPP) in ['mpif90']: self.logPrint('MPI installation '+self.getCompiler()+' is likely incorrect.\n Use --with-mpi-dir to indicate an alternate MPI') diff --git a/config/BuildSystem/config/types.py b/config/BuildSystem/config/types.py index 5735ca5d7a0..84653c42db5 100644 --- a/config/BuildSystem/config/types.py +++ b/config/BuildSystem/config/types.py @@ -101,28 +101,6 @@ def checkCxxComplex(self): self.popLanguage() return - def checkFortranStar(self): - '''Checks whether integer*4, etc. is handled in Fortran, and if not defines MISSING_FORTRANSTAR''' - self.pushLanguage('FC') - body = ' integer*4 i\n real*8 d\n' - if not self.checkCompile('', body): - self.addDefine('MISSING_FORTRANSTAR', 1) - self.popLanguage() - return - -# reverse of the above - but more standard thing to do for F90 compilers - def checkFortranKind(self): - '''Checks whether selected_int_kind etc work USE_FORTRANKIND''' - self.pushLanguage('FC') - body = ''' - integer(kind=selected_int_kind(10)) i - real(kind=selected_real_kind(10)) d -''' - if self.checkCompile('', body): - self.addDefine('USE_FORTRANKIND', 1) - self.popLanguage() - return - def checkConst(self): '''Checks for working const, and if not found defines it to empty string''' body = ''' @@ -197,9 +175,12 @@ def checkSizeof(self, typeName, typeSizes, otherInclude = None, lang='C', save=T includes += mpiFix includes += '#include <' + otherInclude + '>\n' size = None + checkName = typeName + if typeName == 'enum': + checkName = 'enum{ENUM_DUMMY}' with self.Language(lang): for s in typeSizes: - body = 'char assert_sizeof[(sizeof({0})=={1})*2-1];'.format(typeName, s) + body = 'char assert_sizeof[(sizeof({0})=={1})*2-1];'.format(checkName, s) if self.checkCompile(includes, body, codeBegin=codeBegin, codeEnd='\n'): size = s break @@ -244,15 +225,13 @@ def configure(self): self.executeTest(self.checkC99Complex) if hasattr(self.compilers, 'CXX'): self.executeTest(self.checkCxxComplex) - if hasattr(self.compilers, 'FC'): - #self.executeTest(self.checkFortranStar) - self.executeTest(self.checkFortranKind) self.executeTest(self.checkConst) for t, sizes in {'void *': (8, 4), 'short': (2, 4, 8), 'int': (4, 8, 2), 'long': (8, 4), 'long long': (8,), + 'enum': (4, 8), 'size_t': (8, 4)}.items(): self.executeTest(self.checkSizeof, args=[t, sizes]) self.executeTest(self.checkVisibility) diff --git a/config/BuildSystem/script.py b/config/BuildSystem/script.py index 20f2736eaaa..a3bf82a5776 100644 --- a/config/BuildSystem/script.py +++ b/config/BuildSystem/script.py @@ -173,7 +173,10 @@ def runShellCommandSeq(commandseq, log=None, cwd=None): err = err.decode(encoding='UTF-8',errors='replace') ret = pipe.returncode except Exception as e: - return ('', e.message, e.errno) + if hasattr(e,'message') and hasattr(e,'errno'): + return ('', e.message, e.errno) + else: + return ('', str(e),1) output += out error += err if ret: @@ -193,6 +196,7 @@ def passCheckCommand(command, status, output, error): def executeShellCommand(command, checkCommand = None, timeout = 600.0, log = None, lineLimit = 0, cwd=None, logOutputflg = True, threads = 0): '''Execute a shell command returning the output, and optionally provide a custom error checker - This returns a tuple of the (output, error, statuscode)''' + '''The timeout is ignored unless the threads values is nonzero''' return Script.executeShellCommandSeq([command], checkCommand=checkCommand, timeout=timeout, log=log, lineLimit=lineLimit, cwd=cwd,logOutputflg = logOutputflg, threads = threads) @staticmethod @@ -217,8 +221,10 @@ def logOutput(log, output, logOutputflg): log.write('stdout: '+output+'\n') return output def runInShell(commandseq, log, cwd): + if not useThreads: log.write('UseThreads is off\n') if useThreads and threads: import threading + log.write('Running Executable with threads to time it out at '+str(timeout)+'\n') class InShell(threading.Thread): def __init__(self): threading.Thread.__init__(self) @@ -231,12 +237,13 @@ def run(self): thread.start() thread.join(timeout) if thread.isAlive(): - error = 'Runaway process exceeded time limit of '+str(timeout)+'s\n' + error = 'Runaway process exceeded time limit of '+str(timeout)+'\n' log.write(error) return ('', error, -1) else: return (thread.output, thread.error, thread.status) else: + log.write('Running Executable WITHOUT threads to time it out\n') return Script.runShellCommandSeq(commandseq, log, cwd) (output, error, status) = runInShell(commandseq, log, cwd) diff --git a/config/PETSc/Configure.py b/config/PETSc/Configure.py index 491f243f1da..be35c3df6f9 100644 --- a/config/PETSc/Configure.py +++ b/config/PETSc/Configure.py @@ -122,19 +122,19 @@ def setupDependencies(self, framework): self.registerPythonFile(fname,'') # test for a variety of basic headers and functions - headersC = map(lambda name: name+'.h', ['setjmp','dos', 'fcntl', 'float', 'io', 'malloc', 'pwd', 'strings', - 'unistd', 'sys/sysinfo', 'machine/endian', 'sys/param', 'sys/procfs', 'sys/resource', - 'sys/systeminfo', 'sys/times', 'sys/utsname', + headersC = map(lambda name: name+'.h',['setjmp','dos','fcntl','float','io','malloc','pwd','strings', + 'unistd','sys/sysinfo','machine/endian','sys/param','sys/procfs','sys/resource', + 'sys/systeminfo','sys/times','sys/utsname', 'sys/socket','sys/wait','netinet/in','netdb','Direct','time','Ws2tcpip','sys/types', - 'WindowsX', 'float','ieeefp','stdint','pthread','inttypes','immintrin','zmmintrin']) - functions = ['access', '_access', 'clock', 'drand48', 'getcwd', '_getcwd', 'getdomainname', 'gethostname', - 'getwd', 'memalign', 'popen', 'PXFGETARG', 'rand', 'getpagesize', - 'readlink', 'realpath', 'usleep', 'sleep', '_sleep', + 'WindowsX','float','ieeefp','stdint','pthread','inttypes','immintrin','zmmintrin']) + functions = ['access','_access','clock','drand48','getcwd','_getcwd','getdomainname','gethostname', + 'getwd','memalign','popen','PXFGETARG','rand','getpagesize', + 'readlink','realpath','usleep','sleep','_sleep', 'uname','snprintf','_snprintf','lseek','_lseek','time','fork','stricmp', - 'strcasecmp', 'bzero', 'dlopen', 'dlsym', 'dlclose', 'dlerror', - '_set_output_format','_mkdir','socket','gethostbyname'] - libraries = [(['fpe'], 'handle_sigfpes')] - librariessock = [(['socket', 'nsl'], 'socket')] + 'strcasecmp','bzero','dlopen','dlsym','dlclose','dlerror', + '_set_output_format','_mkdir','socket','gethostbyname','_pipe'] + libraries = [(['fpe'],'handle_sigfpes')] + librariessock = [(['socket','nsl'],'socket')] self.headers.headers.extend(headersC) self.functions.functions.extend(functions) self.libraries.libraries.extend(libraries) @@ -253,6 +253,7 @@ def Dump(self): if hasattr(self.compilers, 'CXX'): self.setCompilers.pushLanguage('Cxx') self.addDefine('HAVE_CXX','1') + self.addMakeMacro('CXXPP_FLAGS',self.setCompilers.CXXPPFLAGS) self.addMakeMacro('CXX_FLAGS',self.setCompilers.getCompilerFlags()) cxx_linker = self.setCompilers.getLinker() self.addMakeMacro('CXX_LINKER',cxx_linker) @@ -266,6 +267,12 @@ def Dump(self): self.setCompilers.pushLanguage(self.languages.clanguage) self.addMakeMacro('PCC',self.setCompilers.getCompiler()) self.addMakeMacro('PCC_FLAGS',self.setCompilers.getCompilerFlags()) + self.addMakeMacro('PCPP_FLAGS',getattr(self.setCompilers,self.languages.clanguage.upper()+'PPFLAGS')) + self.addMakeMacro('PFLAGS','${'+self.languages.clanguage.upper()+'FLAGS}') + self.addMakeMacro('PPPFLAGS','${'+self.languages.clanguage.upper()+'PPFLAGS}') + # ugly work-around for python3 distutils parse_makefile() issue with the above 2 lines + self.addMakeMacro('PY_'+self.languages.clanguage.upper()+'FLAGS','') + self.addMakeMacro('PY_'+self.languages.clanguage.upper()+'PPFLAGS','') self.setCompilers.popLanguage() # .o or .obj self.addMakeMacro('CC_SUFFIX','o') @@ -281,10 +288,12 @@ def Dump(self): if hasattr(self.compilers, 'FC'): if self.framework.argDB['with-fortran-bindings']: + if not self.fortran.fortranIsF90: + raise RuntimeError('Error! Fortran compiler "'+self.compilers.FC+'" does not support F90! PETSc fortran bindings require a F90 compiler') self.addDefine('HAVE_FORTRAN','1') self.setCompilers.pushLanguage('FC') # need FPPFLAGS in config/setCompilers - self.addMakeMacro('FPP_FLAGS',self.setCompilers.CPPFLAGS) + self.addMakeMacro('FPP_FLAGS',self.setCompilers.FPPFLAGS) # compiler values self.addMakeMacro('FC_FLAGS',self.setCompilers.getCompilerFlags()) @@ -601,7 +610,7 @@ def configureDeprecated(self): self.addDefine('DEPRECATED_ENUM(why)', '__attribute((deprecated))') else: self.addDefine('DEPRECATED_ENUM(why)', ' ') - # I was unable to make a CPP macro that takes the old and new values as seperate arguments and builds the message needed by _Pragma + # I was unable to make a CPP macro that takes the old and new values as separate arguments and builds the message needed by _Pragma # hence the deprecation message is handled as it is if self.checkCompile('#define TEST _Pragma("GCC warning \"Testing _Pragma\"") value'): self.addDefine('DEPRECATED_MACRO(why)', '_Pragma(why)') diff --git a/config/PETSc/options/arch.py b/config/PETSc/options/arch.py index 43400cda100..0b026006374 100644 --- a/config/PETSc/options/arch.py +++ b/config/PETSc/options/arch.py @@ -105,9 +105,14 @@ def checkDependency(self): import os import sys import hashlib - args = sorted(set(filter(lambda x: not (x.startswith('PETSC_ARCH') or x == '--force'),sys.argv[1:]))) - hash = 'args:\n' + '\n'.join(' '+a for a in args) + '\n' + import platform + if sys.version_info < (3,): + hash = 'Uname: '+platform.uname()[0]+' '+platform.uname()[4]+'\n' + else: + hash = 'Uname: '+platform.uname().system+' '+platform.uname().processor+'\n' hash += 'PATH=' + os.environ.get('PATH', '') + '\n' + args = sorted(set(filter(lambda x: not (x.startswith('PETSC_ARCH') or x == '--force'),sys.argv[1:]))) + hash += 'args:\n' + '\n'.join(' '+a for a in args) + '\n' chash='' try: for root, dirs, files in os.walk('config'): @@ -144,21 +149,33 @@ def checkDependency(self): self.arch = 'arch-'+hprefix[0:6] else: if not os.path.isdir(self.argDB['package-prefix-hash']): - raise RuntimeError('--package-prefix-hash '+self.argDB['package-prefix-hash']+' directory does not exist\n') - self.argDB['prefix'] = os.path.join(self.argDB['package-prefix-hash'],hprefix[0:6]) - if not os.path.isdir(self.argDB['prefix']): - os.mkdir(self.argDB['prefix']) - hashfilepackages = os.path.join(self.argDB['prefix'],'configure-hash') - else: + self.logPrintBox('Specified package-prefix-hash location %s not found! Attemping to create this dir!' % self.argDB['package-prefix-hash']) try: - with open(os.path.join(self.argDB['prefix'],'configure-hash'), 'r') as f: - a = f.read() - except: - self.logPrint('No previous hashfilepackages found') - a = '' - if a == hash: - self.logPrint('Reusing download packages in '+self.argDB['prefix']) - self.argDB['package-prefix-hash'] = 'reuse' # indicates prefix libraries already built, no need to rebuild + os.makedirs(self.argDB['package-prefix-hash']) + except Exception as e: + self.logPrint('Error creating package-prefix-hash directory '+self.argDB['package-prefix-hash']+': '+str(e)) + raise RuntimeError('You must have write permission to create this directory!') + status = False + for idx in range(6,len(hprefix)): + hashdirpackages = os.path.join(self.argDB['package-prefix-hash'],hprefix[0:idx]) + hashfilepackages = os.path.join(hashdirpackages,'configure-hash') + if os.path.isdir(hashdirpackages): + if os.path.exists(hashfilepackages): + self.argDB['package-prefix-hash'] = 'reuse' # indicates prefix libraries already built, no need to rebuild + status = True + break + else: continue # perhaps an incomplete build? use a longer hash + else: + try: + os.mkdir(hashdirpackages) + except Exception as e: + self.logPrint('Error creating package-prefix-hash directory '+hashdirpackages+': '+str(e)) + raise RuntimeError('You must have write permission on --package-prefix-hash='+self.argDB['package-prefix-hash']+' directory') + status = True + break + if not status: + raise RuntimeError('Unable to create package-prefix-hash dir! Suggest cleaning up %s* !' % os.path.join(self.argDB['package-prefix-hash'],hprefix[0:6]) ) + self.argDB['prefix'] = hashdirpackages hashfile = os.path.join(self.arch,'lib','petsc','conf','configure-hash') diff --git a/config/PETSc/options/indexTypes.py b/config/PETSc/options/indexTypes.py index 3fc67194fe9..a5bf59bbd18 100755 --- a/config/PETSc/options/indexTypes.py +++ b/config/PETSc/options/indexTypes.py @@ -27,6 +27,15 @@ def setupDependencies(self, framework): self.compilers = framework.require('config.compilers', None) return + def fortranPromoteInteger(self): + self.pushLanguage('FC') + flags = self.getCompilerFlags() + self.popLanguage() + # ifort compiler flag gfortran compiler flag + if '-integer-size 64' in flags or '-fdefault-integer-8' in flags: + return 1 + return 0 + def configureIndexSize(self): if self.framework.argDB['with-64-bit-indices']: self.integerSize = 64 @@ -34,9 +43,14 @@ def configureIndexSize(self): if self.libraries.check('-lgcc_s.1', '__floatdidf'): self.compilers.LIBS += ' '+self.libraries.getLibArgument('-lgcc_s.1') self.addMakeMacro('PETSC_INDEX_SIZE', '64') + if self.fortranPromoteInteger(): + self.addDefine('PROMOTE_FORTRAN_INTEGER', 1) + self.logPrintBox('Warning: you have a Fortran compiler option to promote integer to 8 bytes.\nThis is fragile and not supported by the MPI standard.\nYou must ensure in your code that all calls to MPI routines pass 4-byte integers.') else: self.integerSize = 32 self.addMakeMacro('PETSC_INDEX_SIZE', '32') + if self.fortranPromoteInteger(): + raise RuntimeError('Fortran compiler flag to promote integers to 8 bytes has been set, but PETSc is being built with 4-byte integers.') return def configure(self): diff --git a/config/PETSc/options/installDir.py b/config/PETSc/options/installDir.py index ac98641610c..860faf0fda4 100755 --- a/config/PETSc/options/installDir.py +++ b/config/PETSc/options/installDir.py @@ -43,7 +43,8 @@ def setInstallDir(self): try: os.makedirs(os.path.join(self.dir,'PETScTestDirectory')) os.rmdir(os.path.join(self.dir,'PETScTestDirectory')) - except: + except Exception as e: + self.logPrint('Error trying to to test write permissions on directory '+str(e)) self.installSudoMessage = 'You do not have write permissions to the --prefix directory '+self.dir+'\nYou will be prompted for the sudo password for any external package installs' self.installSudo = 'sudo ' else: diff --git a/config/PETSc/options/languages.py b/config/PETSc/options/languages.py index 2507556ffb5..0279abf8849 100755 --- a/config/PETSc/options/languages.py +++ b/config/PETSc/options/languages.py @@ -33,6 +33,7 @@ def configureCLanguage(self): self.logPrintBox('WARNING -with-clanguage=C++ is a developer feature and is *not* required for regular usage of PETSc either from C or C++') self.logPrint('C language is '+str(self.clanguage)) self.addDefine('CLANGUAGE_'+self.clanguage.upper(),'1') + self.addMakeMacro('CLANGUAGE',self.clanguage.upper()) def configure(self): self.executeTest(self.configureCLanguage) diff --git a/config/configure.py b/config/configure.py index 60b9dd4b7b6..4d868b24e73 100755 --- a/config/configure.py +++ b/config/configure.py @@ -19,6 +19,8 @@ def check_for_option_mistakes(opts): for opt in opts[1:]: name = opt.split('=')[0] + if name.find(' ') >= 0: + raise ValueError('The option "'+name+'" has a space character in the name - this is likely incorrect usage.'); if name.find('_') >= 0: exception = False for exc in ['mkl_sparse', 'mkl_sparse_optimize', 'mkl_cpardiso', 'mkl_pardiso', 'superlu_dist', 'PETSC_ARCH', 'PETSC_DIR', 'CXX_CXXFLAGS', 'LD_SHARED', 'CC_LINKER_FLAGS', 'CXX_LINKER_FLAGS', 'FC_LINKER_FLAGS', 'AR_FLAGS', 'C_VERSION', 'CXX_VERSION', 'FC_VERSION', 'size_t', 'MPI_Comm','MPI_Fint','int64_t']: @@ -177,9 +179,9 @@ def chksynonyms(): elif name.find('with-'+i.lower()+'=') >= 0: sys.argv[l] = name.replace(i.lower()+'=',j.lower()+'=') -def chkwinf90(): +def chkwincompilerusinglink(): for arg in sys.argv: - if (arg.find('win32fe') >= 0 and (arg.find('f90') >=0 or arg.find('ifort') >=0)): + if (arg.find('win32fe') >= 0 and (arg.find('f90') >=0 or arg.find('ifort') >=0 or arg.find('icl') >=0)): return 1 return 0 @@ -194,12 +196,12 @@ def chkdosfiles(): return def chkcygwinlink(): - if os.path.exists('/usr/bin/cygcheck.exe') and os.path.exists('/usr/bin/link.exe') and chkwinf90(): + if os.path.exists('/usr/bin/cygcheck.exe') and os.path.exists('/usr/bin/link.exe') and chkwincompilerusinglink(): if '--ignore-cygwin-link' in sys.argv: return 0 print('===============================================================================') - print(' *** Cygwin /usr/bin/link detected! Compiles with CVF/Intel f90 can break! **') + print(' *** Cygwin /usr/bin/link detected! Compiles with Intel icl/ifort can break! **') print(' *** To workarround do: "mv /usr/bin/link.exe /usr/bin/link-cygwin.exe" **') - print(' *** Or to ignore this check, use configure option: --ignore-cygwin-link **') + print(' *** Or to ignore this check, use configure option: --ignore-cygwin-link. But compiles can fail. **') print('===============================================================================') sys.exit(3) return 0 @@ -330,9 +332,11 @@ def move_configure_log(framework): # Just in case - confdir is not created lib_dir = os.path.join(petsc_arch,'lib') + petsc_dir = os.path.join(petsc_arch,'lib','petsc') conf_dir = os.path.join(petsc_arch,'lib','petsc','conf') if not os.path.isdir(petsc_arch): os.mkdir(petsc_arch) if not os.path.isdir(lib_dir): os.mkdir(lib_dir) + if not os.path.isdir(petsc_dir): os.mkdir(petsc_dir) if not os.path.isdir(conf_dir): os.mkdir(conf_dir) curr_bkp = curr_file + '.bkp' @@ -455,6 +459,7 @@ def petsc_configure(configure_options): pass return 0 except (RuntimeError, config.base.ConfigureSetupError) as e: + tbo = sys.exc_info()[2] emsg = str(e) if not emsg.endswith('\n'): emsg = emsg+'\n' msg ='*******************************************************************************\n'\ @@ -483,6 +488,7 @@ def petsc_configure(configure_options): +emsg+'*******************************************************************************\n' se = '' except OSError as e : + tbo = sys.exc_info()[2] emsg = str(e) if not emsg.endswith('\n'): emsg = emsg+'\n' msg ='*******************************************************************************\n'\ @@ -491,20 +497,23 @@ def petsc_configure(configure_options): +emsg+'*******************************************************************************\n' se = '' except SystemExit as e: + tbo = sys.exc_info()[2] if e.code is None or e.code == 0: return - if e.code is 10: + if e.code == 10: sys.exit(10) msg ='*******************************************************************************\n'\ +' CONFIGURATION FAILURE (Please send configure.log to petsc-maint@mcs.anl.gov)\n' \ +'*******************************************************************************\n' se = str(e) except Exception as e: + tbo = sys.exc_info()[2] msg ='*******************************************************************************\n'\ +' CONFIGURATION CRASH (Please send configure.log to petsc-maint@mcs.anl.gov)\n' \ +'*******************************************************************************\n' se = str(e) + framework.logClear() print(msg) if not framework is None: framework.logClear() @@ -518,7 +527,6 @@ def petsc_configure(configure_options): framework.outputCHeader(framework.log) except Exception as e: framework.log.write('Problem writing headers to log: '+str(e)) - if sys.exc_info()[2]: tbo = sys.exc_info()[2] try: framework.log.write(msg+se) traceback.print_tb(tbo, file = framework.log) @@ -527,14 +535,14 @@ def petsc_configure(configure_options): move_configure_log(framework) except Exception as e: print('Error printing error message from exception or printing the traceback:'+str(e)) - traceback.print_tb(tbo) + traceback.print_tb(sys.exc_info()[2]) sys.exit(1) else: print(se) - traceback.print_tb(sys.exc_info()[2]) + traceback.print_tb(tbo) else: print(se) - traceback.print_tb(sys.exc_info()[2]) + traceback.print_tb(tbo) if hasattr(framework,'log'): framework.log.close() if __name__ == '__main__': diff --git a/config/example_template.py b/config/example_template.py index d4b51ce429c..24a65f96130 100755 --- a/config/example_template.py +++ b/config/example_template.py @@ -17,6 +17,8 @@ testlogtapfile=@TESTROOT@/test_${@PKG_NAME@_arch}_tap.log testlogerrfile=@TESTROOT@/test_${@PKG_NAME@_arch}_err.log config_dir='@CONFIG_DIR@' +filter='@FILTER@' +filter_output='@FILTER_OUTPUT@' petsc_bindir='@PETSC_BINDIR@' @DATAFILESPATH_LINE@ args='@ARGS@' @@ -36,7 +38,6 @@ todoline='petsc_report_tapoutput "" "${label}" "TODO @TODOCOMMENT@"' skipline='petsc_report_tapoutput "" "${label}" "SKIP @SKIPCOMMENT@"' -mpitest='petsc_testrun "${mpiexec} -n ${nsize} ${exec} ${args} @SUBARGS@" @REDIRECT_FILE@ ${testname}.err "${label}@LABEL_SUFFIX@" @FILTER@' +mpitest='petsc_testrun "${mpiexec} -n ${nsize} ${exec} ${args} @SUBARGS@" @REDIRECT_FILE@ ${testname}.err "${label}@LABEL_SUFFIX@" @ERROR@' difftest='petsc_testrun "${diff_exe} @OUTPUT_FILE@ @REDIRECT_FILE@" diff-${testname}.out diff-${testname}.out diff-${label}@LABEL_SUFFIX@ ""' -filterdifftest='petsc_testrun "@FILTER_OUTPUT@ @OUTPUT_FILE@ | ${diff_exe} - @REDIRECT_FILE@" diff-${testname}.out diff-${testname}.out diff-${label}@LABEL_SUFFIX@ ""' -commandtest='petsc_testrun "@COMMAND@" @REDIRECT_FILE@ ${testname}.err cmd-${label}@LABEL_SUFFIX@ @FILTER@' +commandtest='petsc_testrun "@COMMAND@" @REDIRECT_FILE@ ${testname}.err cmd-${label}@LABEL_SUFFIX@ @ERROR@' diff --git a/config/examples/arch-ci-freebsd-c-single-opt.py b/config/examples/arch-ci-freebsd-c-single-opt.py index 03bec5a849e..6aff750a9ee 100755 --- a/config/examples/arch-ci-freebsd-c-single-opt.py +++ b/config/examples/arch-ci-freebsd-c-single-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-freebsd-cxx-cmplx-64idx-dbg.py b/config/examples/arch-ci-freebsd-cxx-cmplx-64idx-dbg.py index 3da23bb5e13..5e561875a98 100755 --- a/config/examples/arch-ci-freebsd-cxx-cmplx-64idx-dbg.py +++ b/config/examples/arch-ci-freebsd-cxx-cmplx-64idx-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-freebsd-cxx-cmplx-pkgs-dbg.py b/config/examples/arch-ci-freebsd-cxx-cmplx-pkgs-dbg.py index 4ead1ee279c..f73f1f43d75 100755 --- a/config/examples/arch-ci-freebsd-cxx-cmplx-pkgs-dbg.py +++ b/config/examples/arch-ci-freebsd-cxx-cmplx-pkgs-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py b/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py index 63555968c7a..86e01172171 100755 --- a/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py +++ b/config/examples/arch-ci-freebsd-cxx-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -34,6 +33,7 @@ '--download-saws', '--download-codipack=1', '--download-adblaslapack=1', + '--download-hpddm=1' ] if __name__ == '__main__': diff --git a/config/examples/arch-ci-freebsd-pkgs-opt.py b/config/examples/arch-ci-freebsd-pkgs-opt.py index bc925d52504..aab27501211 100755 --- a/config/examples/arch-ci-freebsd-pkgs-opt.py +++ b/config/examples/arch-ci-freebsd-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-64idx-i8-uni.py b/config/examples/arch-ci-linux-64idx-i8-uni.py new file mode 100755 index 00000000000..b4b3b576abb --- /dev/null +++ b/config/examples/arch-ci-linux-64idx-i8-uni.py @@ -0,0 +1,18 @@ +#!/usr/bin/python + +import os +petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') + +if __name__ == '__main__': + import sys + import os + sys.path.insert(0, os.path.abspath('config')) + import configure + configure_options = [ + '--package-prefix-hash='+petsc_hash_pkgs, + '--with-64-bit-indices', + 'FFLAGS=-Wall -ffree-line-length-0 -Wno-unused-dummy-argument -fdefault-integer-8', + '--with-mpi=0' + ] + configure.petsc_configure(configure_options) + diff --git a/config/examples/arch-ci-linux-64idx-i8.py b/config/examples/arch-ci-linux-64idx-i8.py new file mode 100755 index 00000000000..b471d76e00c --- /dev/null +++ b/config/examples/arch-ci-linux-64idx-i8.py @@ -0,0 +1,18 @@ +#!/usr/bin/python3 + +import os +petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') + +if __name__ == '__main__': + import sys + import os + sys.path.insert(0, os.path.abspath('config')) + import configure + configure_options = [ + '--package-prefix-hash='+petsc_hash_pkgs, + '--with-64-bit-indices', + 'FFLAGS=-Wall -ffree-line-length-0 -Wno-unused-dummy-argument -fdefault-integer-8', + '--with-mpi-dir=/nfs/gce/projects/petsc/soft/gcc-7.4.0/mpich-3.3.2', + ] + configure.petsc_configure(configure_options) + diff --git a/config/examples/arch-ci-linux-ILP64.py b/config/examples/arch-ci-linux-ILP64.py index 86d64d31aab..035dcc57069 100755 --- a/config/examples/arch-ci-linux-ILP64.py +++ b/config/examples/arch-ci-linux-ILP64.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-analyzer.py b/config/examples/arch-ci-linux-analyzer.py index e71709b6319..fb7d4be32a9 100755 --- a/config/examples/arch-ci-linux-analyzer.py +++ b/config/examples/arch-ci-linux-analyzer.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -25,6 +24,7 @@ '--download-metis=1', '--download-parmetis=1', '--download-pastix=1', + '--download-hwloc=1', '--download-ptscotch=1', '--download-superlu_dist=1' ] diff --git a/config/examples/arch-ci-linux-c-exodus-dbg.py b/config/examples/arch-ci-linux-c-exodus-dbg.py index eedfdb20965..84388ab357c 100755 --- a/config/examples/arch-ci-linux-c-exodus-dbg.py +++ b/config/examples/arch-ci-linux-c-exodus-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys @@ -11,6 +10,7 @@ import configure configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, + '--with-make-test-np=3', 'COPTFLAGS=-g -O', 'FOPTFLAGS=-g -O', 'CXXOPTFLAGS=-g -O', @@ -34,4 +34,5 @@ '--with-cuda', '--with-shared-libraries', ] + configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-linux-clang-avx.py b/config/examples/arch-ci-linux-clang-avx.py index 56bf68036ab..5a950ad940e 100755 --- a/config/examples/arch-ci-linux-clang-avx.py +++ b/config/examples/arch-ci-linux-clang-avx.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-linux-cmplx-gcov.py b/config/examples/arch-ci-linux-cmplx-gcov.py index 809fba303ed..b2327d0810c 100755 --- a/config/examples/arch-ci-linux-cmplx-gcov.py +++ b/config/examples/arch-ci-linux-cmplx-gcov.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-cmplx-single.py b/config/examples/arch-ci-linux-cmplx-single.py index 548f6f5f2a9..7c7fdb6eb2c 100755 --- a/config/examples/arch-ci-linux-cmplx-single.py +++ b/config/examples/arch-ci-linux-cmplx-single.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-cuda-double.py b/config/examples/arch-ci-linux-cuda-double.py index a4e53bb94bb..b7720c7357b 100755 --- a/config/examples/arch-ci-linux-cuda-double.py +++ b/config/examples/arch-ci-linux-cuda-double.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys @@ -11,7 +10,8 @@ import configure configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, - '--with-mpi-dir=/home/petsc/soft/openmpi-4.0.1-cuda', + '--with-make-test-np=2', + '--with-mpi-dir=/home/petsc/soft/openmpi-4.0.2-cuda', 'COPTFLAGS=-g -O', 'FOPTFLAGS=-g -O', 'CXXOPTFLAGS=-g -O', @@ -22,4 +22,5 @@ # on Mac OS X, MSVC on Windows), you must set -ccbin appropriately in CUDAFLAGS, as in the example for PGI below: # 'CUDAFLAGS=-ccbin pgc++', ] + configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-linux-cuda-single.py b/config/examples/arch-ci-linux-cuda-single.py index cad5c25eb47..63b7612f11f 100755 --- a/config/examples/arch-ci-linux-cuda-single.py +++ b/config/examples/arch-ci-linux-cuda-single.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys @@ -11,6 +10,7 @@ import configure configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, + '--with-make-test-np=4', 'COPTFLAGS=-g -O', 'FOPTFLAGS=-g -O', 'CXXOPTFLAGS=-g -O', @@ -19,8 +19,11 @@ '--download-openblas', # default ATLAS blas on Ubuntu 14.04 breaks runex76 in src/mat/examples/tests '--download-openblas-make-options=TARGET=GENERIC', '--with-clanguage=c', + '--with-single-library=0', + '--with-visibility=1', # Note: If using nvcc with a host compiler other than the CUDA SDK default for your platform (GCC on Linux, clang # on Mac OS X, MSVC on Windows), you must set -ccbin appropriately in CUDAFLAGS, as in the example for PGI below: # 'CUDAFLAGS=-ccbin pgc++', ] + configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-linux-cxx-cmplx-pkgs-64idx.py b/config/examples/arch-ci-linux-cxx-cmplx-pkgs-64idx.py index 3cc00c8d2be..d9d8792e34e 100755 --- a/config/examples/arch-ci-linux-cxx-cmplx-pkgs-64idx.py +++ b/config/examples/arch-ci-linux-cxx-cmplx-pkgs-64idx.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -26,6 +25,7 @@ '--download-metis=1', '--download-parmetis=1', '--download-pastix=1', + '--download-hwloc', '--download-ptscotch=1', '--download-superlu_dist=1', '--download-elemental=1', diff --git a/config/examples/arch-ci-linux-gcc-complex-opt.py b/config/examples/arch-ci-linux-gcc-complex-opt.py index 099e192b980..ad3cf0b40a1 100755 --- a/config/examples/arch-ci-linux-gcc-complex-opt.py +++ b/config/examples/arch-ci-linux-gcc-complex-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-gcc-cxx-avx2.py b/config/examples/arch-ci-linux-gcc-cxx-avx2.py index 3fb2857a7e4..ad9659eec9e 100755 --- a/config/examples/arch-ci-linux-gcc-cxx-avx2.py +++ b/config/examples/arch-ci-linux-gcc-cxx-avx2.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-linux-gcc-ifc-cmplx.py b/config/examples/arch-ci-linux-gcc-ifc-cmplx.py index ff804330469..19686e6805c 100755 --- a/config/examples/arch-ci-linux-gcc-ifc-cmplx.py +++ b/config/examples/arch-ci-linux-gcc-ifc-cmplx.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) # find the ifort libs location import os diff --git a/config/examples/arch-ci-linux-gcc-pkgs-opt.py b/config/examples/arch-ci-linux-gcc-pkgs-opt.py index cd1e5773673..9112b723aaa 100755 --- a/config/examples/arch-ci-linux-gcc-pkgs-opt.py +++ b/config/examples/arch-ci-linux-gcc-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -24,6 +23,7 @@ '--download-triangle', '--download-chaco', '--download-ctetgen', + '--download-egads', ] if __name__ == '__main__': diff --git a/config/examples/arch-ci-linux-gcc-quad-64idx-dbg.py b/config/examples/arch-ci-linux-gcc-quad-64idx-dbg.py index 25927c1fb60..e13b3e2f2a6 100755 --- a/config/examples/arch-ci-linux-gcc-quad-64idx-dbg.py +++ b/config/examples/arch-ci-linux-gcc-quad-64idx-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-gcov.py b/config/examples/arch-ci-linux-gcov.py index 079f4714cbe..f46703e3a26 100755 --- a/config/examples/arch-ci-linux-gcov.py +++ b/config/examples/arch-ci-linux-gcov.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-intel-cmplx.py b/config/examples/arch-ci-linux-intel-cmplx.py new file mode 100755 index 00000000000..58b33261790 --- /dev/null +++ b/config/examples/arch-ci-linux-intel-cmplx.py @@ -0,0 +1,44 @@ +#!/usr/bin/python + +import os +petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') + +if __name__ == '__main__': + import sys + import os + sys.path.insert(0, os.path.abspath('config')) + import configure + configure_options = [ + '--package-prefix-hash='+petsc_hash_pkgs, + 'CC=icc', + 'CXX=icpc', + 'FC=ifort', + 'COPTFLAGS=-g -O', + 'FOPTFLAGS=-g -O', + 'CXXOPTFLAGS=-g -O', + '--with-scalar-type=complex', + '--with-blaslapack-dir='+os.environ['MKLROOT'], + '--with-mkl_pardiso-dir='+os.environ['MKLROOT'], + '--with-mkl_sparse_optimize=0', + '--download-mpich', + '--download-chaco', + '--download-codipack', + '--download-ctetgen', + '--download-hdf5', + '--download-hypre', + '--download-metis', + '--download-mpi4py', + # '--download-mumps', + '--download-p4est', + '--download-parmetis', + '--download-petsc4py', + '--download-scalapack', + '--download-strumpack', + '--download-suitesparse', + '--download-superlu', + '--download-superlu_dist', + '--download-tetgen', + '--download-triangle', + '--download-zlib', + ] + configure.petsc_configure(configure_options) diff --git a/config/examples/arch-ci-linux-pardiso.py b/config/examples/arch-ci-linux-intel.py similarity index 79% rename from config/examples/arch-ci-linux-pardiso.py rename to config/examples/arch-ci-linux-intel.py index c5f4ad02aa6..94c5aa67a23 100755 --- a/config/examples/arch-ci-linux-pardiso.py +++ b/config/examples/arch-ci-linux-intel.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys @@ -17,12 +16,13 @@ 'COPTFLAGS=-g -O', 'FOPTFLAGS=-g -O', 'CXXOPTFLAGS=-g -O', - '--with-blaslapack-dir='+os.environ['MKL_HOME'], - '--with-mkl_pardiso-dir='+os.environ['MKL_HOME'], + '--with-blaslapack-dir='+os.environ['MKLROOT'], + '--with-mkl_pardiso-dir='+os.environ['MKLROOT'], '--with-mkl_sparse_optimize=0', '--download-mpich=1', '--download-triangle=1', '--download-ctetgen=1', + '--download-tetgen=1', '--download-p4est=1', '--download-zlib=1', '--download-codipack=1', diff --git a/config/examples/arch-ci-linux-knl.py b/config/examples/arch-ci-linux-knl.py index 43bf81901dd..dea5125858e 100755 --- a/config/examples/arch-ci-linux-knl.py +++ b/config/examples/arch-ci-linux-knl.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-linux-matlab-ilp64-gcov.py b/config/examples/arch-ci-linux-matlab-ilp64-gcov.py index 8d0e6fc2f36..82bf87b508e 100755 --- a/config/examples/arch-ci-linux-matlab-ilp64-gcov.py +++ b/config/examples/arch-ci-linux-matlab-ilp64-gcov.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) # This test is done on grind.mcs.anl.gov. It uses ILP64 MKL/BLAS packaged # with MATLAB. diff --git a/config/examples/arch-ci-linux-nagfor.py b/config/examples/arch-ci-linux-nagfor.py index 18bb34681b3..20f1191f3ff 100755 --- a/config/examples/arch-ci-linux-nagfor.py +++ b/config/examples/arch-ci-linux-nagfor.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-opt-cxx-quad.py b/config/examples/arch-ci-linux-opt-cxx-quad.py index 503a9d26a9b..db3a2fe4314 100755 --- a/config/examples/arch-ci-linux-opt-cxx-quad.py +++ b/config/examples/arch-ci-linux-opt-cxx-quad.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-opt-misc.py b/config/examples/arch-ci-linux-opt-misc.py index 2231dd0e84c..8cae17670ec 100755 --- a/config/examples/arch-ci-linux-opt-misc.py +++ b/config/examples/arch-ci-linux-opt-misc.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-pgi.py b/config/examples/arch-ci-linux-pgi.py index 52280d405c1..b8a5deface4 100755 --- a/config/examples/arch-ci-linux-pgi.py +++ b/config/examples/arch-ci-linux-pgi.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-pkgs-64idx.py b/config/examples/arch-ci-linux-pkgs-64idx.py index f5d8f1b7af4..d226d611fda 100755 --- a/config/examples/arch-ci-linux-pkgs-64idx.py +++ b/config/examples/arch-ci-linux-pkgs-64idx.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -13,6 +12,7 @@ '--download-openmpi=1', #download-mpich works - but system mpich gives wierd errors with superlu_dist+parmeits [with shared/64-bit-indices]? '--download-metis=1', '--download-parmetis=1', + '--download-hwloc=1', '--download-pastix=1', '--download-ptscotch=1', '--download-hypre=1', diff --git a/config/examples/arch-ci-linux-pkgs-cxx-mlib.py b/config/examples/arch-ci-linux-pkgs-cxx-mlib.py index 85fa1a73f73..69abb04fe46 100755 --- a/config/examples/arch-ci-linux-pkgs-cxx-mlib.py +++ b/config/examples/arch-ci-linux-pkgs-cxx-mlib.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -34,10 +33,12 @@ '--download-adios=1', '--with-zlib=1', '--download-szlib=1', + '--download-zstd=1', '--download-moab=1', '--download-petsc4py=1', '--download-mpi4py=1', '--download-saws', + '--download-egads', '--package-prefix-hash='+petsc_hash_pkgs, ] diff --git a/config/examples/arch-ci-linux-pkgs-dbg-ftn-interfaces.py b/config/examples/arch-ci-linux-pkgs-dbg-ftn-interfaces.py index 4a8ac8b9b8c..74c0222bcb5 100755 --- a/config/examples/arch-ci-linux-pkgs-dbg-ftn-interfaces.py +++ b/config/examples/arch-ci-linux-pkgs-dbg-ftn-interfaces.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-pkgs-gcov.py b/config/examples/arch-ci-linux-pkgs-gcov.py index a15a9890da6..b9a555b312f 100755 --- a/config/examples/arch-ci-linux-pkgs-gcov.py +++ b/config/examples/arch-ci-linux-pkgs-gcov.py @@ -2,8 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) - configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -25,7 +23,8 @@ '--download-spai=1', '--download-parms=1', '--download-chaco=1', - '--download-pastix', + '--download-pastix=1', + '--download-hwloc=1', '--download-ctetgen', '--download-netcdf', '--download-hdf5', @@ -38,6 +37,8 @@ '--download-ml', '--download-sundials', '--download-p4est=1', + '--download-eigen', + '--download-pragmatic', ] if __name__ == '__main__': diff --git a/config/examples/arch-ci-linux-pkgs-opt.py b/config/examples/arch-ci-linux-pkgs-opt.py index 8f37dd94649..36d4eb85d60 100755 --- a/config/examples/arch-ci-linux-pkgs-opt.py +++ b/config/examples/arch-ci-linux-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-pkgs-valgrind.py b/config/examples/arch-ci-linux-pkgs-valgrind.py index bb78f1092d7..52fc22d93cb 100755 --- a/config/examples/arch-ci-linux-pkgs-valgrind.py +++ b/config/examples/arch-ci-linux-pkgs-valgrind.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, @@ -26,7 +25,7 @@ '--download-elemental=1', #'--download-spai=1', valgrind leaks here will probably not get fixed in the near future '--download-parms=1', - '--download-moab=1', + #'--download-moab=1', '--download-chaco=1', '--download-revolve=1', '--download-codipack=1', diff --git a/config/examples/arch-ci-linux-viennacl.py b/config/examples/arch-ci-linux-viennacl.py index 6e03dd70b60..3aa5c4780e8 100755 --- a/config/examples/arch-ci-linux-viennacl.py +++ b/config/examples/arch-ci-linux-viennacl.py @@ -11,6 +11,7 @@ '--download-viennacl', '--with-opencl-include=/usr/local/cuda/include', '--with-opencl-lib=-L/usr/local/cuda/lib64 -lOpenCL', + '--with-cuda=1', '--download-codipack=1', '--download-adblaslapack=1', ] diff --git a/config/examples/arch-ci-linux-without-fc.py b/config/examples/arch-ci-linux-without-fc.py index 28cb0ecaf32..18a2f4d4b12 100755 --- a/config/examples/arch-ci-linux-without-fc.py +++ b/config/examples/arch-ci-linux-without-fc.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-linux-xsdk-dbg.py b/config/examples/arch-ci-linux-xsdk-dbg.py index 29805545693..e7dd40c2a8e 100755 --- a/config/examples/arch-ci-linux-xsdk-dbg.py +++ b/config/examples/arch-ci-linux-xsdk-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/tmp/arch-mswin-gnu.py b/config/examples/arch-ci-mswin-gnu.py similarity index 69% rename from config/examples/tmp/arch-mswin-gnu.py rename to config/examples/arch-ci-mswin-gnu.py index a71fd2e6787..b5971567854 100755 --- a/config/examples/tmp/arch-mswin-gnu.py +++ b/config/examples/arch-ci-mswin-gnu.py @@ -1,9 +1,7 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 configure_options = [ - # Blas autodetec with cygwin blas at /usr/lib/liblapack,a,libblas.a - '--with-mpi-dir=/home/petsc/soft/mpich-3.1', - '--with-shared-libraries=0', + # Autodetect cygwin blas/lapack, OpenMPI '--with-debugging=0', # not using -g so that the binaries are smaller 'COPTFLAGS=-O', diff --git a/config/examples/arch-ci-mswin-intel-cxx-cmplx.py b/config/examples/arch-ci-mswin-intel-cxx-cmplx.py index 91f04c247b6..b70a55bb223 100755 --- a/config/examples/arch-ci-mswin-intel-cxx-cmplx.py +++ b/config/examples/arch-ci-mswin-intel-cxx-cmplx.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-mswin-intel.py b/config/examples/arch-ci-mswin-intel.py index 8e7afd29cc7..37c23581073 100755 --- a/config/examples/arch-ci-mswin-intel.py +++ b/config/examples/arch-ci-mswin-intel.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-mswin-opt-impi.py b/config/examples/arch-ci-mswin-opt-impi.py index e39aa9c5176..705c16097fa 100755 --- a/config/examples/arch-ci-mswin-opt-impi.py +++ b/config/examples/arch-ci-mswin-opt-impi.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-mswin-uni.py b/config/examples/arch-ci-mswin-uni.py index 34913a42413..646cd5fb2f7 100755 --- a/config/examples/arch-ci-mswin-uni.py +++ b/config/examples/arch-ci-mswin-uni.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) if __name__ == '__main__': import sys diff --git a/config/examples/arch-ci-opensolaris-cmplx-pkgs-dbg.py b/config/examples/arch-ci-opensolaris-cmplx-pkgs-dbg.py index 19678b7ca11..7d24f4aad92 100755 --- a/config/examples/arch-ci-opensolaris-cmplx-pkgs-dbg.py +++ b/config/examples/arch-ci-opensolaris-cmplx-pkgs-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-opensolaris-misc.py b/config/examples/arch-ci-opensolaris-misc.py index fc45d39ee6c..8a11461d04c 100755 --- a/config/examples/arch-ci-opensolaris-misc.py +++ b/config/examples/arch-ci-opensolaris-misc.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-opensolaris-pkgs-opt.py b/config/examples/arch-ci-opensolaris-pkgs-opt.py index 3a926522681..f2fe4cae16d 100755 --- a/config/examples/arch-ci-opensolaris-pkgs-opt.py +++ b/config/examples/arch-ci-opensolaris-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-osx-cxx-cmplx-pkgs-dbg.py b/config/examples/arch-ci-osx-cxx-cmplx-pkgs-dbg.py index 0d08d4d00bb..febb72cee10 100755 --- a/config/examples/arch-ci-osx-cxx-cmplx-pkgs-dbg.py +++ b/config/examples/arch-ci-osx-cxx-cmplx-pkgs-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-osx-cxx-pkgs-opt.py b/config/examples/arch-ci-osx-cxx-pkgs-opt.py index 0b38b14b112..65f181bf129 100755 --- a/config/examples/arch-ci-osx-cxx-pkgs-opt.py +++ b/config/examples/arch-ci-osx-cxx-pkgs-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) # moab appears to break with -with-visibility=1 - so disable it @@ -12,7 +11,7 @@ '--with-cxx=clang++', '--with-fc=gfortran', # https://brew.sh/ - 'CXXFLAGS=-Wall -Wwrite-strings -Wno-strict-aliasing -Wno-unknown-pragmas -fstack-protector -Wno-deprecated', + 'CXXFLAGS=-Wall -Wwrite-strings -Wno-strict-aliasing -Wno-unknown-pragmas -fstack-protector -Wno-deprecated -fno-stack-check', '--with-clanguage=cxx', '--with-debugging=0', '--with-visibility=0', # CXXFLAGS disables this option diff --git a/config/examples/arch-ci-osx-dbg.py b/config/examples/arch-ci-osx-dbg.py index 00fd31534ab..34190ac70fa 100755 --- a/config/examples/arch-ci-osx-dbg.py +++ b/config/examples/arch-ci-osx-dbg.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/examples/arch-ci-osx-xsdk-opt.py b/config/examples/arch-ci-osx-xsdk-opt.py index 340e1453fbd..657af590968 100755 --- a/config/examples/arch-ci-osx-xsdk-opt.py +++ b/config/examples/arch-ci-osx-xsdk-opt.py @@ -2,7 +2,6 @@ import os petsc_hash_pkgs=os.path.join(os.getenv('HOME'),'petsc-hash-pkgs') -if not os.path.isdir(petsc_hash_pkgs): os.mkdir(petsc_hash_pkgs) configure_options = [ '--package-prefix-hash='+petsc_hash_pkgs, diff --git a/config/gmakegen.py b/config/gmakegen.py index 139094fd4aa..257f8702faa 100755 --- a/config/gmakegen.py +++ b/config/gmakegen.py @@ -46,7 +46,7 @@ def compareSourceLists(self, root, msources, files): if NOWARNDIRS.intersection(pathsplit(root)): return smsources = set(msources) - ssources = set(f for f in files if os.path.splitext(f)[1] in ['.c', '.cxx', '.cc', '.cu', '.cpp', '.F']) + ssources = set(f for f in files if os.path.splitext(f)[1] in ['.c', '.cxx', '.cc', '.cu', '.cpp', '.F', '.F90']) if not smsources.issubset(ssources): self.mistakes.append('Makefile contains file not on filesystem: %s: %r' % (root, sorted(smsources - ssources))) if not self.verbose: return diff --git a/config/gmakegentest.py b/config/gmakegentest.py index 85c46752a41..755439bc8d1 100755 --- a/config/gmakegentest.py +++ b/config/gmakegentest.py @@ -1,6 +1,7 @@ #!/usr/bin/env python from __future__ import print_function +import pickle import os,shutil, string, re import sys import logging, time @@ -73,6 +74,16 @@ def install_files(source, destdir): else: shutil.copyfile(source, os.path.join(destdir, os.path.basename(source))) +def nameSpace(srcfile,srcdir): + """ + Because the scripts have a non-unique naming, the pretty-printing + needs to convey the srcdir and srcfile. There are two ways of doing this. + """ + if srcfile.startswith('run'): srcfile=re.sub('^run','',srcfile) + prefix=srcdir.replace('/examples/','_').replace("/","_")+"-" + nameString=prefix+srcfile + return nameString + class generateExamples(Petsc): """ gmakegen.py has basic structure for finding the files, writing out @@ -106,7 +117,6 @@ def __init__(self,petsc_dir=None, petsc_arch=None, pkg_dir=None, pkg_arch=None, self.testroot_dir=os.path.abspath(testdir) - self.ptNaming=True self.verbose=verbose # Whether to write out a useful debugging self.summarize=True if verbose else False @@ -163,21 +173,6 @@ def getInInstallDir(self,thisscriptdir): else: return False - def nameSpace(self,srcfile,srcdir): - """ - Because the scripts have a non-unique naming, the pretty-printing - needs to convey the srcdir and srcfile. There are two ways of doing this. - """ - if self.ptNaming: - if srcfile.startswith('run'): srcfile=re.sub('^run','',srcfile) - cdir=srcdir - prefix=cdir.replace('/examples/','_').replace("/","_")+"-" - nameString=prefix+srcfile - else: - #nameString=srcdir+": "+srcfile - nameString=srcfile - return nameString - def getLanguage(self,srcfile): """ Based on the source, determine associated language as found in gmakegen.LANGS @@ -206,7 +201,7 @@ def _getLoopVars(self,inDict,testname, isSubtest=False): subst should be passed in instead of inDict """ loopVars={}; newargs=[] - lsuffix='_' + lsuffix='+' argregex = re.compile(' (?=-[a-zA-Z])') from testparse import parseLoopArgs for key in inDict: @@ -243,13 +238,13 @@ def _getLoopVars(self,inDict,testname, isSubtest=False): inDict['subargs'] += " "+" ".join(newargs) inDict['args']='' if 'label_suffix' in inDict: - inDict['label_suffix']+=lsuffix.rstrip('_') + inDict['label_suffix']+=lsuffix.rstrip('+').rstrip('_') else: - inDict['label_suffix']=lsuffix.rstrip('_') + inDict['label_suffix']=lsuffix.rstrip('+').rstrip('_') else: if loopVars: inDict['args'] = ' '.join(newargs) - inDict['label_suffix']=lsuffix.rstrip('_') + inDict['label_suffix']=lsuffix.rstrip('+').rstrip('_') return loopVars def getArgLabel(self,testDict): @@ -343,8 +338,13 @@ def getSubstVars(self,testDict,rpath,testname): # Now do other variables subst['execname']=testDict['execname'] + subst['error']='' if 'filter' in testDict: - subst['filter']="'"+testDict['filter']+"'" # Quotes are tricky - overwrite + if testDict['filter'].startswith("Error:"): + subst['error']="Error" + subst['filter']=testDict['filter'].lstrip("Error:") + else: + subst['filter']=testDict['filter'] # Others subst['subargs']='' # Default. For variables override @@ -383,7 +383,7 @@ def getSubstVars(self,testDict,rpath,testname): if 'output_file' not in testDict: subst['output_file']="output/"+defroot+".out" subst['redirect_file']=defroot+".tmp" - subst['label']=self.nameSpace(defroot,self.srcrelpath(subst['srcdir'])) + subst['label']=nameSpace(defroot,self.srcrelpath(subst['srcdir'])) # Add in the full path here. subst['output_file']=os.path.join(subst['srcdir'],subst['output_file']) @@ -422,7 +422,7 @@ def _substVars(self,subst,origStr): if subkey=='regexes': continue if not isinstance(subst[subkey],str): continue if subkey.upper() not in Str: continue - Str=subst['regexes'][subkey].sub(subst[subkey],Str) + Str=subst['regexes'][subkey].sub(lambda x: subst[subkey],Str) return Str def getCmds(self,subst,i): @@ -443,24 +443,21 @@ def getCmds(self,subst,i): cmdLines+=cmdindnt+'if test $res = 0; then\n' diffindnt=self.indent*(nindnt+1) - if not subst['filter_output']: - if 'altfiles' not in subst: - cmd=diffindnt+self._substVars(subst,example_template.difftest) - else: - # Have to do it by hand a bit because of variable number of alt files - rf=subst['redirect_file'] - cmd=diffindnt+example_template.difftest.split('@')[0] - for i in range(len(subst['altfiles'])): - af=subst['altfiles'][i] - cmd+=af+' '+rf - if i!=len(subst['altfiles'])-1: - cmd+=' > diff-${testname}-'+str(i)+'.out 2> diff-${testname}-'+str(i)+'.out' - cmd+=' || ${diff_exe} ' - else: - cmd+='" diff-${testname}.out diff-${testname}.out diff-${label}' - cmd+=subst['label_suffix']+' ""' # Quotes are painful + if 'altfiles' not in subst: + cmd=diffindnt+self._substVars(subst,example_template.difftest) else: - cmd=diffindnt+self._substVars(subst,example_template.filterdifftest) + # Have to do it by hand a bit because of variable number of alt files + rf=subst['redirect_file'] + cmd=diffindnt+example_template.difftest.split('@')[0] + for i in range(len(subst['altfiles'])): + af=subst['altfiles'][i] + cmd+=af+' '+rf + if i!=len(subst['altfiles'])-1: + cmd+=' > diff-${testname}-'+str(i)+'.out 2> diff-${testname}-'+str(i)+'.out' + cmd+=' || ${diff_exe} ' + else: + cmd+='" diff-${testname}.out diff-${testname}.out diff-${label}' + cmd+=subst['label_suffix']+' ""' # Quotes are painful cmdLines+=cmd+"\n" cmdLines+=cmdindnt+'else\n' cmdLines+=diffindnt+'petsc_report_tapoutput "" ${label} "SKIP Command failed so no diff"\n' @@ -584,7 +581,7 @@ def genRunScript(self,testname,root,isRun,srcDict): for stest in testDict["subtests"]: subst=substP.copy() subst.update(testDict[stest]) - subst['label_suffix']='-'+string.ascii_letters[k]; k+=1 + subst['label_suffix']='+'+string.ascii_letters[k]; k+=1 sLoopVars = self._getLoopVars(subst,testname,isSubtest=True) if sLoopVars: (sLoopHead,j) = self.getLoopVarsHead(sLoopVars,j,allLoopVars) @@ -623,7 +620,7 @@ def genScriptsAndInfo(self,exfile,root,srcDict): isBuilt=self._isBuilt(exfile,srcDict) for test in srcDict: if test in self.buildkeys: continue - if debug: print(self.nameSpace(exfile,root), test) + if debug: print(nameSpace(exfile,root), test) srcDict[test]['execname']=execname # Convenience in generating scripts isRun=self._isRun(srcDict[test]) self.genRunScript(test,root,isRun,srcDict) @@ -974,7 +971,7 @@ def write_gnumake(self, dataDict, output=None): for ftest in self.tests[pkg][lang]: test=os.path.basename(ftest) basedir=os.path.dirname(ftest) - testdeps.append(self.nameSpace(test,basedir)) + testdeps.append(nameSpace(test,basedir)) fd.write("test-"+pkg+"."+lang+" := "+' '.join(testdeps)+"\n") fd.write('test-%s.%s : $(test-%s.%s)\n' % (pkg, lang, pkg, lang)) @@ -983,7 +980,7 @@ def write_gnumake(self, dataDict, output=None): test=os.path.basename(ftest) basedir=os.path.dirname(ftest) testdir="${TESTDIR}/"+basedir+"/" - nmtest=self.nameSpace(test,basedir) + nmtest=nameSpace(test,basedir) rundir=os.path.join(testdir,test) script=test+".sh" @@ -1013,6 +1010,15 @@ def write_gnumake(self, dataDict, output=None): fd.close() return + def write_db(self, dataDict, testdir): + """ + Write out the dataDict into a pickle file + """ + fd = open(os.path.join(testdir,'datatest.pkl'), 'wb') + pickle.dump(dataDict,fd) + fd.close() + return + def main(petsc_dir=None, petsc_arch=None, pkg_dir=None, pkg_arch=None, pkg_name=None, pkg_pkgs=None, verbose=False, single_ex=False, srcdir=None, testdir=None, check=False): @@ -1031,6 +1037,7 @@ def main(petsc_dir=None, petsc_arch=None, pkg_dir=None, pkg_arch=None, dataDict=pEx.walktree(os.path.join(pEx.srcdir)) if not pEx.check_output: pEx.write_gnumake(dataDict, output) + pEx.write_db(dataDict, testdir) if __name__ == '__main__': import optparse diff --git a/config/petsc_harness.sh b/config/petsc_harness.sh index 75256b374a8..dec7d84095b 100644 --- a/config/petsc_harness.sh +++ b/config/petsc_harness.sh @@ -68,10 +68,10 @@ do f ) force=true ;; h ) print_usage; exit ;; n ) nsize="$OPTARG" ;; - j ) diff_flags="-j" ;; - J ) diff_flags="-J $OPTARG" ;; - m ) diff_flags="-m" ;; - M ) diff_flags="-M" ;; + j ) diff_flags=$diff_flags" -j" ;; + J ) diff_flags=$diff_flags" -J $OPTARG" ;; + m ) diff_flags=$diff_flags" -m" ;; + M ) diff_flags=$diff_flags" -M" ;; o ) output_fmt=$OPTARG ;; t ) TIMEOUT=$OPTARG ;; V ) mpiexec="petsc_mpiexec_valgrind $mpiexec" ;; @@ -97,6 +97,12 @@ fi if $debugger; then args="-start_in_debugger $args" fi +if test -n "$filter"; then + diff_flags=$diff_flags" -F \$'$filter'" +fi +if test -n "$filter_output"; then + diff_flags=$diff_flags" -f \$'$filter_output'" +fi # Init @@ -132,16 +138,12 @@ function petsc_testrun() { # Second arg = stdout file # Third arg = stderr file # Fourth arg = label for reporting - # Fifth arg = Filter rmfiles="${rmfiles} $2 $3" tlabel=$4 - filter=$5 + error=$5 cmd="$1 > $2 2> $3" - if test -n "$filter"; then - if test "${filter:0:6}"=="Error:"; then - filter=${filter##Error:} - cmd="$1 2>&1 | cat > $2" - fi + if test -n "$error"; then + cmd="$1 2>&1 | cat > $2" fi echo "$cmd" > ${tlabel}.sh; chmod 755 ${tlabel}.sh @@ -162,13 +164,6 @@ function petsc_testrun() { fi fi - # Handle filters separately and assume no timeout check needed - if test -n "$filter"; then - cmd="cat $2 | $filter > $2.tmp 2>> $3 ; mv $2.tmp $2" - echo "$cmd" >> ${tlabel}.sh - eval "$cmd" - fi - # Report errors comment="" if test $cmd_res == 0; then @@ -187,11 +182,13 @@ function petsc_testrun() { # Report errors in detail if [ -z "$timed_out" ]; then - # We've had tests fail but stderr->stdout. Fix with this test. - if test -s $3; then + # We've had tests fail but stderr->stdout, as well as having + # mpi_abort go to stderr which throws this test off. Show both + # with stdout first + awk '{print "#\t" $0}' < $2 | tee -a ${testlogerrfile} + # if statement is for diff tests + if test "$2" != "$3"; then awk '{print "#\t" $0}' < $3 | tee -a ${testlogerrfile} - else - awk '{print "#\t" $0}' < $2 | tee -a ${testlogerrfile} fi fi let failed=$failed+1 @@ -234,7 +231,7 @@ function petsc_mpiexec_valgrind() { npopt=$1;shift np=$1;shift - valgrind="valgrind -q --tool=memcheck --leak-check=yes --num-callers=20 --track-origins=yes --suppressions=$petsc_bindir/maint/petsc-val.supp" + valgrind="valgrind -q --tool=memcheck --leak-check=yes --num-callers=20 --track-origins=yes --suppressions=$petsc_bindir/maint/petsc-val.supp --error-exitcode=10" $mpiexec $npopt $np $valgrind $* } diff --git a/config/query_tests.py b/config/query_tests.py new file mode 100755 index 00000000000..65200f976e3 --- /dev/null +++ b/config/query_tests.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python +import fnmatch +import glob +import inspect +import os +import optparse +import pickle +import re +import sys + +thisfile = os.path.abspath(inspect.getfile(inspect.currentframe())) +pdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(thisfile))))) +sys.path.insert(0, os.path.join(pdir, 'config')) + +import testparse +from gmakegentest import nameSpace + + +""" + Tool for querying the tests. + + Which tests to query? Two options: + 1. Query only the tests that are run for a given configuration. + 2. Query all of the test files in the source directory + For #1: + Use dataDict as written out by gmakegentest.py in $PETSC_ARCH/$TESTBASE + For #2: + Walk the entire tree parsing the files as we go along using testparse. + The tree walker is simpler than what is in gmakegentest.py + + The dataDict follows that generated by testparse. gmakegentest.py does + further manipulations of the dataDict to handle things like for loops + so if using #2, those modifications are not included. + + Querying: + The dataDict dictionary is then "inverted" to create a dictionary with the + range of field values as keys and list test names as the values. This + allows fast searching + +""" + +def query(invDict,label): + """ + Search the keys using fnmatch to find matching names and return list with + the results + """ + results=[] + for key in invDict: + if fnmatch.filter([key],label): + # Do not return values with not unless label itself has not + if label.startswith('!') and not key.startswith('!'): continue + if not label.startswith('!') and key.startswith('!'): continue + results += invDict[key] + + return results + +def get_inverse_dictionary(dataDict,field,srcdir): + """ + Create a dictionary with the values of field as the keys, and the name of + the tests as the results. + """ + invDict={} + for root in dataDict: + for exfile in dataDict[root]: + for test in dataDict[root][exfile]: + if field not in dataDict[root][exfile][test]: continue + defroot = testparse.getDefaultOutputFileRoot(test) + name=nameSpace(defroot,os.path.relpath(root,srcdir)) + values=dataDict[root][exfile][test][field] + + for val in values.split(): + if val in invDict: + invDict[val].append(name) + else: + invDict[val] = [name] + return invDict + +def get_gmakegentest_data(testdir): + """ + Write out the dataDict into a pickle file + """ + # This needs to be consistent with gmakegentest.py of course + fd = open(os.path.join(testdir,'datatest.pkl'), 'rb') + dataDict=pickle.load(fd) + fd.close() + return dataDict + +def walktree(top): + """ + Walk a directory tree, starting from 'top' + """ + verbose = False + dataDict = {} + alldatafiles = [] + for root, dirs, files in os.walk(top, topdown=False): + if "examples" not in root: continue + if root == 'output': continue + if '.dSYM' in root: continue + if verbose: print(root) + + dataDict[root] = {} + + for exfile in files: + # Ignore emacs files + if exfile.startswith("#") or exfile.startswith(".#"): continue + ext=os.path.splitext(exfile)[1] + if ext[1:] not in ['c','cxx','cpp','cu','F90','F']: continue + + # Convenience + fullex = os.path.join(root, exfile) + if verbose: print(' --> '+fullex) + dataDict[root].update(testparse.parseTestFile(fullex, 0)) + + return dataDict + +def do_query(use_source, startdir, srcdir, testdir, field, label): + """ + Do the actual query + This part of the code is placed here instead of main() + to show how one could translate this into ipython/jupyer notebook + commands for more advanced queries + """ + # Get dictionary + if use_source: + dataDict=walktree(startdir) + else: + dataDict=get_gmakegentest_data(testdir) + + # Get inverse dictionary for searching + invDict=get_inverse_dictionary(dataDict, field, srcdir) + + # Now do query + resList=query(invDict, label) + + # Print in flat list suitable for use by gmakefile.test + print(' '.join(resList)) + + return + +def main(): + parser = optparse.OptionParser(usage="%prog [options] field match_pattern") + parser.add_option('-s', '--startdir', dest='startdir', + help='Where to start the recursion if not srcdir', + default='') + parser.add_option('-p', '--petsc_dir', dest='petsc_dir', + help='Set PETSC_ARCH different from environment', + default=os.environ.get('PETSC_DIR')) + parser.add_option('-a', '--petsc-arch', dest='petsc_arch', + help='Set PETSC_ARCH different from environment', + default=os.environ.get('PETSC_ARCH')) + parser.add_option('--srcdir', dest='srcdir', + help='Set location of sources different from PETSC_DIR/src. Must be full path.', + default='src') + parser.add_option('-t', '--testdir', dest='testdir', + help='Test directory if not PETSC_ARCH/tests. Must be full path', + default='tests') + parser.add_option('-u', '--use-source', action="store_false", + dest='use_source', + help='Query all sources rather than those configured in PETSC_ARCH') + + opts, args = parser.parse_args() + + # Argument Sanity checks + if len(args) != 2: + parser.print_usage() + print('Arguments: ') + print(' field: Field to search for; e.g., requires') + print(' match_pattern: Matching pattern for field; e.g., cuda') + return + + # Process arguments and options -- mostly just paths here + field=args[0] + match=args[1] + + petsc_dir = opts.petsc_dir + petsc_arch = opts.petsc_arch + petsc_full_arch = os.path.join(petsc_dir, petsc_arch) + + if opts.srcdir == 'src': + petsc_full_src = os.path.join(petsc_dir, 'src') + else: + petsc_full_src = opts.srcdir + if opts.testdir == 'tests': + petsc_full_test = os.path.join(petsc_full_arch, 'tests') + else: + petsc_full_test = opts.testdir + if opts.startdir: + startdir=opts.startdir=petsc_full_src + else: + startdir=petsc_full_src + + # Options Sanity checks + if not os.path.isdir(petsc_dir): + print("PETSC_DIR must be a directory") + return + + if not opts.use_source: + if not os.path.isdir(petsc_full_arch): + print("PETSC_DIR/PETSC_ARCH must be a directory") + return + elif not os.path.isdir(petsc_full_test): + print("Testdir must be a directory"+petsc_full_test) + return + else: + if not os.path.isdir(petsc_full_src): + print("Source directory must be a directory"+petsc_full_src) + return + + # Do the actual query + do_query(opts.use_source, startdir, petsc_full_src, petsc_full_test, field, match) + + return + + +if __name__ == "__main__": + main() diff --git a/config/report_tests.py b/config/report_tests.py index 6951f2177b5..ad1287c2388 100755 --- a/config/report_tests.py +++ b/config/report_tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python from __future__ import print_function -import glob, os, re +import glob, os, re, stat import optparse import inspect @@ -78,26 +78,25 @@ def summarize_results(directory,make,ntime,etime): re.sub('cmd-','', re.sub('diff-','',failstr+' ')) ) - print(fail_targets) # Strip off characters from subtests fail_list=[] for failure in fail_targets.split(): - if failure.split('-')[1].count('_')>1: - froot=failure.split('-')[0] - flabel='_'.join(failure.split('-')[1].split('_')[0:1]) - fail_list.append(froot+'-'+flabel+'_*') - elif failure.count('-')>1: - fail_list.append('-'.join(failure.split('-')[:-1])) - else: - fail_list.append(failure) + fail_list.append(failure.split('+')[0]) fail_list=list(set(fail_list)) fail_targets=' '.join(fail_list) + # create simple little script + sfile=os.path.join(os.path.dirname(os.path.abspath(os.curdir)),'echofailures.sh') + with open(sfile,'w') as f: + f.write('echo '+fail_targets.strip()) + st = os.stat(sfile) + os.chmod(sfile, st.st_mode | stat.S_IEXEC) + #Make the message nice makefile="gmakefile.test" if inInstallDir() else "gmakefile" print("#\n# To rerun failed tests: ") - print("# "+make+" -f "+makefile+" test globsearch='" + fail_targets.strip()+"'") + print("# "+make+" -f "+makefile+" test test-fail=1") if ntime>0: print("#\n# Timing summary (actual test time / total CPU time): ") @@ -192,25 +191,28 @@ def get_test_data(directory): 'fullname':fname } # process the *.counts file and increment problem status trackers - if len(testdata[pkgname]['problems'][probname]['stderr'])>0: - testdata[pkgname]['errors'] += 1 with open(cfile, 'r') as f: for line in f: l = line.split() - if l[0] == 'failed': - testdata[pkgname]['problems'][probname][l[0]] = True - testdata[pkgname][l[0]] += 1 - elif l[0] == 'time': + if l[0] == 'time': if len(l)==1: continue testdata[pkgname]['problems'][probname][l[0]] = float(l[1]) testdata[pkgname][l[0]] += float(l[1]) - elif l[0] == 'skip': - testdata[pkgname]['problems'][probname][l[0]] = True - testdata[pkgname][l[0]] += 1 - elif l[0] not in testdata[pkgname].keys(): - continue + elif l[0] in testdata[pkgname].keys(): + # This block includes total, success, failed, skip, todo + num_int=int(l[1]) + testdata[pkgname][l[0]] += num_int + if l[0] in ['failed']: + # If non-zero error code and non-zero stderr, something wrong + if len(testdata[pkgname]['problems'][probname]['stderr'])>0: + if not num_int: num_int=1 + if num_int: + testdata[pkgname]['errors'] += 1 + testdata[pkgname]['problems'][probname][l[0]] = True + if l[0] in ['skip'] and num_int: + testdata[pkgname]['problems'][probname][l[0]] = True else: - testdata[pkgname][l[0]] += 1 + continue os.chdir(startdir) # Keep function in good state return testdata @@ -271,11 +273,17 @@ def generate_xml(testdata,directory): if p['skipped']: # if we got here, the TAP output shows a skipped test junit.write(' \n') - elif len(p['stderr'])>0: + elif p['failed']: # if we got here, the test crashed with an error # we show the stderr output under junit.write(' \n') junit.write("0: + for line in p['stdout']: + junit.write("%s\n"%line.rstrip()) + junit.write("\nstderr:\n") for line in p['stderr']: junit.write("%s\n"%line.rstrip()) junit.write("]]>") @@ -289,19 +297,6 @@ def generate_xml(testdata,directory): junit.write("%s\n"%line.rstrip()) junit.write("]]>") junit.write(' \n') - elif len(p['stdout'])>0: - # if we got here, the test succeeded so we just show the stdout - # for manual sanity-checks - junit.write(' \n') - junit.write("= 1024: - break - junit.write("]]>") - junit.write(' \n') junit.write(' \n') junit.write(' \n') junit.write('') diff --git a/gmakefile b/gmakefile index 2a52c5144d1..da379b2c73c 100644 --- a/gmakefile +++ b/gmakefile @@ -14,10 +14,10 @@ SONAME_FUNCTION ?= $(1).$(SL_LINKER_SUFFIX).$(2) # $(call SL_LINKER_FUNCTION,libfoo,abiversion,libversion) SL_LINKER_FUNCTION ?= -shared -Wl,-soname,$(call SONAME_FUNCTION,$(notdir $(1)),$(2)) -PETSC_VERSION_MAJOR := $(shell awk '/\#define PETSC_VERSION_MAJOR/{print $$3;}' ./include/petscversion.h) -PETSC_VERSION_MINOR := $(shell awk '/\#define PETSC_VERSION_MINOR/{print $$3;}' ./include/petscversion.h) -PETSC_VERSION_SUBMINOR := $(shell awk '/\#define PETSC_VERSION_SUBMINOR/{print $$3;}' ./include/petscversion.h) -PETSC_VERSION_RELEASE := $(shell awk '/\#define PETSC_VERSION_RELEASE/{print $$3;}' ./include/petscversion.h) +PETSC_VERSION_MAJOR := $(shell awk '/define PETSC_VERSION_MAJOR/{print $$3;}' ./include/petscversion.h) +PETSC_VERSION_MINOR := $(shell awk '/define PETSC_VERSION_MINOR/{print $$3;}' ./include/petscversion.h) +PETSC_VERSION_SUBMINOR := $(shell awk '/define PETSC_VERSION_SUBMINOR/{print $$3;}' ./include/petscversion.h) +PETSC_VERSION_RELEASE := $(shell awk '/define PETSC_VERSION_RELEASE/{print $$3;}' ./include/petscversion.h) libpetsc_abi_version := $(PETSC_VERSION_MAJOR).$(if $(filter $(PETSC_VERSION_RELEASE), 0 -2 -3 -4 -5),0)$(PETSC_VERSION_MINOR) libpetsc_lib_version := $(libpetsc_abi_version).$(PETSC_VERSION_SUBMINOR) @@ -62,7 +62,7 @@ $(generated) : $(petscconf) $(petscvariables) config/gmakegen.py # Skip including generated files (which triggers rebuilding them) when we're just going to clean anyway. ifneq ($(MAKECMDGOALS:clean%=clean),clean) --include $(generated) +include $(generated) endif # implies shared libraries with MS compilers @@ -70,7 +70,7 @@ ifeq ($(SL_LINKER_FUNCTION),-LD) $(OBJDIR)/%.o : CCPPFLAGS+=-Dpetsc_EXPORTS endif -langs := F cu cxx c +langs := F F90 cu cxx c concatlang = $(foreach lang, $(langs), $(srcs-$(1).$(lang):src/%.$(lang)=$(OBJDIR)/%.o)) srcs.o := $(foreach pkg, $(pkgs), $(call concatlang,$(pkg))) diff --git a/gmakefile.test b/gmakefile.test index d02426b2e1f..41c81ac1ff8 100644 --- a/gmakefile.test +++ b/gmakefile.test @@ -12,8 +12,8 @@ include $(PETSC_DIR)/lib/petsc/conf/variables TESTDIR ?= ./$(PETSC_ARCH)/tests MODDIR := $(PETSC_DIR)/$(PETSC_ARCH)/include -TESTLOGTAPFILE ?= $(TESTDIR)/test_tap.log -TESTLOGERRFILE ?= $(TESTDIR)/test_err.log +TESTLOGTAPFILE ?= $(TESTDIR)/test_$(PETSC_ARCH)_tap.log +TESTLOGERRFILE ?= $(TESTDIR)/test_$(PETSC_ARCH)_err.log EXAMPLESDIR := $(TESTSRCDIR) pkgs := sys vec mat dm ksp snes ts tao @@ -84,7 +84,7 @@ $(generatedtest) : $(petscconf) $(petscvariables) $(CONFIGDIR)/gmakegentest.py $ $(PYTHON) $(CONFIGDIR)/gmakegentest.py --petsc-dir=$(PETSC_DIR) --petsc-arch=$(PETSC_ARCH) --testdir=$(TESTDIR) ifneq ($(MAKECMDGOALS:clean%=clean),clean) --include $(generatedtest) +include $(generatedtest) endif ifeq ($(PETSC_LANGUAGE),CXXONLY) @@ -93,8 +93,8 @@ else cc_name := CC endif -PETSC_COMPILE.c = $(call quiet,$(cc_name)) -c $(PCC_FLAGS) $(CFLAGS) $(CCPPFLAGS) $(C_DEPFLAGS) -PETSC_COMPILE.cxx = $(call quiet,CXX) -c $(CXX_FLAGS) $(CFLAGS) $(CCPPFLAGS) $(CXX_DEPFLAGS) +PETSC_COMPILE.c = $(call quiet,$(cc_name)) -c $(PCC_FLAGS) $(PFLAGS) $(CCPPFLAGS) $(C_DEPFLAGS) +PETSC_COMPILE.cxx = $(call quiet,CXX) -c $(CXX_FLAGS) $(CXXFLAGS) $(CXXCPPFLAGS) $(CXX_DEPFLAGS) PETSC_COMPILE.cu = $(call quiet,CUDAC) -c $(CUDAC_FLAGS) --compiler-options="$(PCC_FLAGS) $(CXXFLAGS) $(CCPPFLAGS)" PETSC_GENDEPS.cu = $(call quiet,CUDAC,.dep) --generate-dependencies --output-directory=$(@D) $(CUDAC_FLAGS) --compiler-options="$(PCC_FLAGS) $(CXXFLAGS) $(CCPPFLAGS)" PETSC_COMPILE.F = $(call quiet,FC) -c $(FC_FLAGS) $(FFLAGS) $(FCPPFLAGS) $(FC_DEPFLAGS) @@ -182,7 +182,7 @@ $(foreach pkg, $(pkgs), $(call concattestlang,$(pkg),F F90)) : $(libpetscall) # Testing convenience targets .PHONY: test pre-clean -test: report_tests check-test-errors +test: report_tests pre-clean: @$(RM) -rf $(TESTDIR)/counts $(TESTLOGTAPFILE) $(TESTLOGERRFILE) @@ -273,6 +273,10 @@ else ifdef argsearch TESTTARGETS := $(foreach v,$(alltesttargets),$(if $(findstring $(argsearch),$($(v)_ARGS)),$(v))) else ifdef globsearch TESTTARGETS := $(shell $(PYTHON) -c"import sys,fnmatch,itertools; m=[fnmatch.filter(sys.argv[2].split(),p) for p in sys.argv[1].split()]; print(' '.join(list(itertools.chain.from_iterable(m))))" '$(globsearch)' '$(alltesttargets)') +else ifdef test-fail + TESTTARGETS := $(shell $(PETSC_ARCH)/tests/echofailures.sh) +else ifdef query + TESTTARGETS := $(shell $(PYTHON) config/query_tests.py '$(query)' '$(queryval)') else # No filter - run them all, but delete the executables as we go TESTTARGETS := $(testpkgs) endif @@ -372,6 +376,12 @@ help-test: -@echo " NOTE: uses shell which is possibly slower and more brittle" -@echo " make -f ${makefile} test globsearch='sys*ex2*'" -@echo + -@echo " To re-run the last tests which failed:" + -@echo " make -f ${makefile} test test-fail='1'" + -@echo + -@echo " To search for fields from the original test definitions:" + -@echo " make -f ${makefile} test query='requires' queryval='*MPI_PROCESS_SHARED_MEMORY*'" + -@echo -@echo " To see which targets match a given pattern (useful for doing a specific target):" -@echo " make -f ${makefile} print-test search=sys%" -@echo " which is equivalent to:" diff --git a/include/petsc/finclude/makefile b/include/petsc/finclude/makefile index c7973653912..49501a00c2f 100644 --- a/include/petsc/finclude/makefile +++ b/include/petsc/finclude/makefile @@ -1,5 +1,4 @@ #requiresdefine 'PETSC_HAVE_FORTRAN' -#requiresdefine 'PETSC_USING_F90' CFLAGS = FFLAGS = diff --git a/include/petsc/finclude/petscdm.h b/include/petsc/finclude/petscdm.h index b77dba2842f..10f66e5f364 100644 --- a/include/petsc/finclude/petscdm.h +++ b/include/petsc/finclude/petscdm.h @@ -10,13 +10,15 @@ #include "petsc/finclude/petscmat.h" #define DMType character*(80) -#define DMBoundaryType PetscEnum -#define DMPointLocationType PetscEnum -#define DMAdaptationType PetscEnum -#define DMAdaptFlag PetscEnum -#define PetscUnit PetscEnum +#define DMBoundaryType PetscEnum +#define DMPointLocationType PetscEnum +#define DMAdaptationType PetscEnum +#define DMAdaptFlag PetscEnum +#define PetscUnit PetscEnum #define DMAdaptationStrategy PetscEnum -#define DMDirection PetscEnum +#define DMDirection PetscEnum +#define DMEnclosureType PetscEnum +#define DMPolytopeType PetscEnum #define DM type(tDM) diff --git a/include/petsc/finclude/petscerror.h b/include/petsc/finclude/petscerror.h deleted file mode 100644 index 6339187ef37..00000000000 --- a/include/petsc/finclude/petscerror.h +++ /dev/null @@ -1,56 +0,0 @@ - -! -! Include file for Fortran error codes -! These are also in include/petscerror.h -! -#if !defined (PETSCERRORDEF_H) -#define PETSCERRORDEF_H - -#define PETSC_ERR_MEM 55 -#define PETSC_ERR_SUP 56 -#define PETSC_ERR_SUP_SYS 57 -#define PETSC_ERR_ORDER 58 -#define PETSC_ERR_SIG 59 -#define PETSC_ERR_FP 72 -#define PETSC_ERR_COR 74 -#define PETSC_ERR_LIB 76 -#define PETSC_ERR_PLIB 77 -#define PETSC_ERR_MEMC 78 -#define PETSC_ERR_CONV_FAILED 82 -#define PETSC_ERR_USER 83 -#define PETSC_ERR_SYS 88 -#define PETSC_ERR_POINTER 70 -#define PETSC_ERR_MPI_LIB_INCOMP 87 - -#define PETSC_ERR_ARG_SIZ 60 -#define PETSC_ERR_ARG_IDN 61 -#define PETSC_ERR_ARG_WRONG 62 -#define PETSC_ERR_ARG_CORRUPT 64 -#define PETSC_ERR_ARG_OUTOFRANGE 63 -#define PETSC_ERR_ARG_BADPTR 68 -#define PETSC_ERR_ARG_NOTSAMETYPE 69 -#define PETSC_ERR_ARG_NOTSAMECOMM 80 -#define PETSC_ERR_ARG_WRONGSTATE 73 -#define PETSC_ERR_ARG_TYPENOTSET 89 -#define PETSC_ERR_ARG_INCOMP 75 -#define PETSC_ERR_ARG_NULL 85 -#define PETSC_ERR_ARG_UNKNOWN_TYPE 86 - -#define PETSC_ERR_FILE_OPEN 65 -#define PETSC_ERR_FILE_READ 66 -#define PETSC_ERR_FILE_WRITE 67 -#define PETSC_ERR_FILE_UNEXPECTED 79 - -#define PETSC_ERR_MAT_LU_ZRPVT 71 -#define PETSC_ERR_MAT_CH_ZRPVT 81 - -#define PETSC_ERR_INT_OVERFLOW 84 - -#define PETSC_ERR_FLOP_COUNT 90 -#define PETSC_ERR_NOT_CONVERGED 91 -#define PETSC_ERR_MISSING_FACTOR 92 -#define PETSC_ERR_OPT_OVERWRITE 93 -#define PETSC_ERR_WRONG_MPI_SIZE 94 -#define PETSC_ERR_USER_INPUT 95 - -#endif diff --git a/include/petsc/finclude/petscis.h b/include/petsc/finclude/petscis.h index 4a70c5b97ba..d7881a132cf 100644 --- a/include/petsc/finclude/petscis.h +++ b/include/petsc/finclude/petscis.h @@ -17,6 +17,8 @@ #define PetscLayout PetscFortranAddr #define ISType character*(80) +#define ISInfo PetscEnum +#define ISInfoType PetscEnum #define ISLocalToGlobalMapping PetscFortranAddr #define ISGlobalToLocalType character*(80) #define ISGlobalToLocalMappingMode PetscEnum diff --git a/include/petsc/finclude/petscksp.h b/include/petsc/finclude/petscksp.h index a68a4facca8..1c3b53e8e28 100644 --- a/include/petsc/finclude/petscksp.h +++ b/include/petsc/finclude/petscksp.h @@ -24,12 +24,18 @@ #define KSPRICHARDSON 'richardson' #define KSPCHEBYSHEV 'chebyshev' #define KSPCG 'cg' +#define KSPGROPPCG 'groppcg' +#define KSPPIPECG 'pipecg' +#define KSPPIPECGRR 'pipecgrr' +#define KSPPIPELCG 'pipelcg' #define KSPCGNE 'cgne' #define KSPNASH 'nash' #define KSPSTCG 'stcg' #define KSPGLTR 'gltr' #define KSPFCG 'fcg' +#define KSPPIPEFCG 'pipefcg' #define KSPGMRES 'gmres' +#define KSPPIPEFGMRES 'pipefgmres' #define KSPFGMRES 'fgmres' #define KSPLGMRES 'lgmres' #define KSPDGMRES 'dgmres' @@ -40,9 +46,11 @@ #define KSPFBCGS 'fbcgs' #define KSPFBCGSR 'fbcgsr' #define KSPBCGSL 'bcgsl' +#define KSPPIPEBCGS 'pipebcgs' #define KSPCGS 'cgs' #define KSPTFQMR 'tfqmr' #define KSPCR 'cr' +#define KSPPIPECR 'pipecr' #define KSPLSQR 'lsqr' #define KSPPREONLY 'preonly' #define KSPQCG 'qcg' @@ -52,9 +60,11 @@ #define KSPLCD 'lcd' #define KSPPYTHON 'python' #define KSPGCR 'gcr' +#define KSPPIPEGCR 'pipegcr' #define KSPTSIRM 'tsirm' #define KSPCGLS 'cgls' #define KSPFETIDP 'fetidp' +#define KSPHPDDM 'hpddm' ! ! Various Initial guesses for Krylov subspace methods ! diff --git a/include/petsc/finclude/petscmat.h b/include/petsc/finclude/petscmat.h index a7493bd740b..438707913ac 100644 --- a/include/petsc/finclude/petscmat.h +++ b/include/petsc/finclude/petscmat.h @@ -61,6 +61,7 @@ #define MATCOLORINGJP 'jp' #define MATORDERINGNATURAL 'natural' +#define MATORDERINGNATURAL_OR_ND 'natural_or_nd' #define MATORDERINGND 'nd' #define MATORDERING1WD '1wd' #define MATORDERINGRCM 'rcm' diff --git a/include/petsc/finclude/petscpc.h b/include/petsc/finclude/petscpc.h index a41ab1e65f0..52045e10265 100644 --- a/include/petsc/finclude/petscpc.h +++ b/include/petsc/finclude/petscpc.h @@ -29,7 +29,7 @@ ! GAMG types ! #define PCGAMGAGG 'agg' -#define PCGAMGGEO 'geo' +#define PCGAMGGEO 'geo' #define PCGAMGCLASSICAL 'classical' ! ! GAMG classical types @@ -38,7 +38,7 @@ #define PCGAMGCLASSICALSTANDARD 'standard' ! -! Various preconditioners +! Various preconditioners ! #define PCNONE 'none' #define PCJACOBI 'jacobi' @@ -68,7 +68,6 @@ #define PCML 'ml' #define PCGALERKIN 'galerkin' #define PCEXOTIC 'exotic' -#define PCSUPPORTGRAPH 'supportgraph' #define PCCP 'cp' #define PCBFBT 'bfbt' #define PCLSC 'lsc' @@ -78,9 +77,17 @@ #define PCREDISTRIBUTE 'redistribute' #define PCSVD 'svd' #define PCGAMG 'gamg' +#define PCCHOWILUVIENNACL 'chowiluviennacl' +#define PCROWSCALINGVIENNACL 'rowscalingviennacl' +#define PCSAVIENNACL 'saviennacl' #define PCBDDC 'bddc' +#define PCKACZMARZ 'kaczmarz' +#define PCTELESCOPE 'telescope' #define PCPATCH 'patch' +#define PCLMVM 'lmvm' +#define PCHMG 'hmg' #define PCDEFLATION 'deflation' +#define PCHPDDM 'hpddm' #define PCMGType PetscEnum #define PCMGCycleType PetscEnum @@ -88,5 +95,6 @@ #define PCExoticType PetscEnum #define PCDeflationSpaceType PetscEnum #define PCBDDCInterfaceExtType PetscEnum +#define PCHPDDMCoarseCorrectionType PetscEnum #define PCFailedReason PetscEnum #endif diff --git a/include/petsc/finclude/petscsys.h b/include/petsc/finclude/petscsys.h index 50d61fefecf..ca0b86d2f92 100644 --- a/include/petsc/finclude/petscsys.h +++ b/include/petsc/finclude/petscsys.h @@ -15,7 +15,6 @@ #endif #include "petscversion.h" #include "petsc/finclude/petscviewer.h" -#include "petsc/finclude/petscerror.h" #include "petsc/finclude/petsclog.h" #include "petsc/finclude/petscbag.h" @@ -25,19 +24,11 @@ ! compiler options like -r4,-r8, sometimes invoked ! by the user. NAG compiler does not like integer*4,real*8 -#if defined(PETSC_USE_FORTRANKIND) #define integer8 integer(kind=selected_int_kind(10)) #define integer4 integer(kind=selected_int_kind(5)) #define integer2 integer(kind=selected_int_kind(3)) #define integer1 integer(kind=selected_int_kind(1)) #define PetscBool logical(kind=4) -#else -#define integer8 integer*8 -#define integer4 integer*4 -#define integer2 integer*2 -#define integer1 integer*1 -#define PetscBool logical*4 -#endif #if (PETSC_SIZEOF_VOID_P == 8) #define PetscOffset integer8 @@ -53,13 +44,8 @@ #define PetscInt integer4 #endif #define PetscInt64 integer8 -#if defined(PETSC_USING_F90) && !defined(PETSC_USE_FORTRANKIND) -#define PetscObjectState integer4 -#define PetscObjectId integer4 -#else #define PetscObjectState PetscInt64 #define PetscObjectId PetscInt64 -#endif #if (PETSC_SIZEOF_INT == 4) #define PetscFortranInt integer4 @@ -74,13 +60,13 @@ #endif ! #if defined(PETSC_HAVE_MPIUNI) -#define MPI_Comm PetscFortranInt -#define MPI_Group PetscFortranInt -#define PetscMPIInt PetscFortranInt +#define MPI_Comm MPIUNI_FInt +#define MPI_Group MPIUNI_FInt +#define PetscMPIInt MPIUNI_FInt #else -#define MPI_Comm integer -#define MPI_Group integer -#define PetscMPIInt integer +#define MPI_Comm integer4 +#define MPI_Group integer4 +#define PetscMPIInt integer4 #endif ! #define PetscEnum PetscFortranInt @@ -95,7 +81,6 @@ #define PetscDataType PetscEnum #define PetscFPTrap PetscEnum ! -#if defined (PETSC_USE_FORTRANKIND) #define PetscFortranFloat real(kind=selected_real_kind(5)) #define PetscFortranDouble real(kind=selected_real_kind(10)) #define PetscFortranLongDouble real(kind=selected_real_kind(19)) @@ -107,19 +92,6 @@ #define PetscFortranComplex complex(kind=selected_real_kind(20)) #endif #define PetscChar(a) character(len = a) :: -#else -#define PetscFortranFloat real*4 -#define PetscFortranDouble real*8 -#define PetscFortranLongDouble real*16 -#if defined(PETSC_USE_REAL_SINGLE) -#define PetscFortranComplex complex*8 -#elif defined(PETSC_USE_REAL_DOUBLE) -#define PetscFortranComplex complex*16 -#elif defined(PETSC_USE_REAL___FLOAT128) -#define PetscFortranComplex complex*32 -#endif -#define PetscChar(a) character*(a) -#endif #if defined(PETSC_USE_COMPLEX) #define PETSC_SCALAR PETSC_COMPLEX diff --git a/include/petsc/finclude/petsctao.h b/include/petsc/finclude/petsctao.h index 2c28f6b7749..317ea90a5b2 100644 --- a/include/petsc/finclude/petsctao.h +++ b/include/petsc/finclude/petsctao.h @@ -5,9 +5,11 @@ #define Tao PetscFortranAddr #define TaoLineSearch PetscFortranAddr -#define TaoConvergedReason integer +#define TaoConvergedReason PetscEnum #define TaoType character*(80) #define TaoLineSearchType character*(80) +#define TaoADMMUpdateType PetscEnum +#define TaoADMMRegularizerType PetscEnum #define TAOLMVM "lmvm" #define TAONLS "nls" @@ -37,6 +39,7 @@ #define TAOASILS "asils" #define TAOASFLS "asfls" #define TAOIPM "ipm" +#define TAOADMM "admm" #define TAOFDTEST "test" #endif diff --git a/include/petsc/mpiuni/mpi.h b/include/petsc/mpiuni/mpi.h index 86ebc58d899..39943b18423 100644 --- a/include/petsc/mpiuni/mpi.h +++ b/include/petsc/mpiuni/mpi.h @@ -517,6 +517,12 @@ typedef int MPI_Fint; (MPIUNI_ARG(comm),\ *group = 1,\ MPI_SUCCESS) +#define MPI_Group_excl(group,n,ranks,newgroup) \ + (MPIUNI_ARG(group),\ + MPIUNI_ARG(n),\ + MPIUNI_ARG(ranks),\ + MPIUNI_ARG(newgroup),\ + MPI_SUCCESS) #define MPI_Group_incl(group,n,ranks,newgroup) \ (MPIUNI_ARG(group),\ MPIUNI_ARG(n),\ @@ -863,7 +869,6 @@ typedef int MPI_Fint; #define MPI_Group_union(group1,group2,newgroup) MPI_SUCCESS #define MPI_Group_intersection(group1,group2,newgroup) MPI_SUCCESS #define MPI_Group_difference(group1,group2,newgroup) MPI_SUCCESS -#define MPI_Group_excl(group,n,ranks,newgroup) MPI_SUCCESS #define MPI_Group_range_incl(group,n,ranges,newgroup) MPI_SUCCESS #define MPI_Group_range_excl(group,n,ranges,newgroup) MPI_SUCCESS #define MPI_Group_free(group) \ diff --git a/include/petsc/mpiuni/mpif.h b/include/petsc/mpiuni/mpif.h index 62a8c5d946e..8338dffed34 100644 --- a/include/petsc/mpiuni/mpif.h +++ b/include/petsc/mpiuni/mpif.h @@ -4,61 +4,61 @@ #include "petsc/mpiuni/mpiunifdef.h" ! ! External objects outside of MPI calls - integer MPI_COMM_WORLD + MPIUNI_FInt MPI_COMM_WORLD parameter (MPI_COMM_WORLD = 2) - integer MPI_COMM_SELF + MPIUNI_FInt MPI_COMM_SELF parameter (MPI_COMM_SELF = 1) - integer MPI_COMM_NULL + MPIUNI_FInt MPI_COMM_NULL parameter (MPI_COMM_NULL = 0) - integer MPI_IDENT + MPIUNI_FInt MPI_IDENT parameter (MPI_IDENT = 0) - integer MPI_UNEQUAL + MPIUNI_FInt MPI_UNEQUAL parameter (MPI_UNEQUAL = 3) - integer MPI_KEYVAL_INVALID + MPIUNI_FInt MPI_KEYVAL_INVALID parameter (MPI_KEYVAL_INVALID = 0) - integer MPI_SUCCESS + MPIUNI_FInt MPI_SUCCESS parameter (MPI_SUCCESS = 0) - integer MPI_ERR_OTHER + MPIUNI_FInt MPI_ERR_OTHER parameter (MPI_ERR_OTHER = 17) - integer MPI_ERR_UNKNOWN + MPIUNI_FInt MPI_ERR_UNKNOWN parameter (MPI_ERR_UNKNOWN = 18) - integer MPI_ERR_INTERN + MPIUNI_FInt MPI_ERR_INTERN parameter (MPI_ERR_INTERN = 21) - integer MPI_PACKED + MPIUNI_FInt MPI_PACKED parameter (MPI_PACKED=0) - integer MPI_ANY_SOURCE + MPIUNI_FInt MPI_ANY_SOURCE parameter (MPI_ANY_SOURCE=2) - integer MPI_ANY_TAG + MPIUNI_FInt MPI_ANY_TAG parameter (MPI_ANY_TAG=-1) - integer MPI_UNDEFINED + MPIUNI_FInt MPI_UNDEFINED parameter (MPI_UNDEFINED=-32766) - INTEGER MPI_INFO_NULL + MPIUNI_FInt MPI_INFO_NULL PARAMETER (MPI_INFO_NULL=0) - integer MPI_REQUEST_NULL + MPIUNI_FInt MPI_REQUEST_NULL parameter (MPI_REQUEST_NULL=0) - integer MPI_STATUS_SIZE + MPIUNI_FInt MPI_STATUS_SIZE parameter (MPI_STATUS_SIZE=3) - INTEGER MPI_SOURCE,MPI_TAG,MPI_ERROR + MPIUNI_FInt MPI_SOURCE,MPI_TAG,MPI_ERROR PARAMETER(MPI_SOURCE=1,MPI_TAG=2,MPI_ERROR=3) - integer MPI_STATUS_IGNORE + MPIUNI_FInt MPI_STATUS_IGNORE parameter (MPI_STATUS_IGNORE=0) ! Data Types. Same Values used in mpi.c - integer MPI_INTEGER,MPI_LOGICAL - integer MPI_REAL,MPI_DOUBLE_PRECISION - integer MPI_COMPLEX, MPI_CHARACTER - integer MPI_COMPLEX16 - integer MPI_2INTEGER - integer MPI_DOUBLE_COMPLEX - integer MPI_INTEGER4 - integer MPI_INTEGER8 - integer MPI_2DOUBLE_PRECISION - integer MPI_REAL4,MPI_REAL8 + MPIUNI_FInt MPI_INTEGER,MPI_LOGICAL + MPIUNI_FInt MPI_REAL,MPI_DOUBLE_PRECISION + MPIUNI_FInt MPI_COMPLEX, MPI_CHARACTER + MPIUNI_FInt MPI_COMPLEX16 + MPIUNI_FInt MPI_2INTEGER + MPIUNI_FInt MPI_DOUBLE_COMPLEX + MPIUNI_FInt MPI_INTEGER4 + MPIUNI_FInt MPI_INTEGER8 + MPIUNI_FInt MPI_2DOUBLE_PRECISION + MPIUNI_FInt MPI_REAL4,MPI_REAL8 ! ! These should match the values in mpi.h many below are wrong @@ -78,18 +78,18 @@ parameter (MPI_INTEGER8=INT(Z'400108')) parameter (MPI_2DOUBLE_PRECISION=INT(Z'100208')) - integer MPI_SUM + MPIUNI_FInt MPI_SUM parameter (MPI_SUM=1) - integer MPI_MAX + MPIUNI_FInt MPI_MAX parameter (MPI_MAX=2) - integer MPI_MIN + MPIUNI_FInt MPI_MIN parameter (MPI_MIN=3) - integer MPI_MAXLOC + MPIUNI_FInt MPI_MAXLOC parameter (MPI_MAXLOC=12) - integer MPI_MINLOC + MPIUNI_FInt MPI_MINLOC parameter (MPI_MINLOC=13) - integer MPI_MAX_PROCESSOR_NAME + MPIUNI_FInt MPI_MAX_PROCESSOR_NAME parameter (MPI_MAX_PROCESSOR_NAME=128-1) diff --git a/include/petsc/mpiuni/mpiunifdef.h b/include/petsc/mpiuni/mpiunifdef.h index 46d665f5be8..cc5b3512a22 100644 --- a/include/petsc/mpiuni/mpiunifdef.h +++ b/include/petsc/mpiuni/mpiunifdef.h @@ -1,6 +1,13 @@ #if !defined (MPIUNIFDEF_H) #define MPIUNIFDEF_H +#include "petscconf.h" +#if (PETSC_SIZEOF_INT == 4) +#define MPIUNI_FInt integer(kind=selected_int_kind(5)) +#elif (PETSC_SIZEOF_INT == 8) +#define MPIUNI_FInt integer(kind=selected_int_kind(10)) +#endif + #define MPI_Init PETSC_MPI_INIT #define MPI_Finalize PETSC_MPI_FINALIZE #define MPI_Comm_size PETSC_MPI_COMM_SIZE diff --git a/include/petsc/private/dmimpl.h b/include/petsc/private/dmimpl.h index 20f60cd084e..ebbf0d98e60 100644 --- a/include/petsc/private/dmimpl.h +++ b/include/petsc/private/dmimpl.h @@ -169,6 +169,8 @@ typedef struct _n_Space { PETSC_INTERN PetscErrorCode DMDestroyLabelLinkList_Internal(DM); +#define MAXDMMONITORS 5 + struct _p_DM { PETSCHEADER(struct _DMOps); Vec localin[DM_MAX_WORK_VECTORS],localout[DM_MAX_WORK_VECTORS]; @@ -178,6 +180,7 @@ struct _p_DM { DMWorkLink workin,workout; DMLabelLink labels; /* Linked list of labels */ DMLabel depthLabel; /* Optimized access to depth label */ + DMLabel celltypeLabel; /* Optimized access to celltype label */ void *ctx; /* a user context */ PetscErrorCode (*ctxdestroy)(void**); Vec x; /* location at which the functions/Jacobian are computed */ @@ -247,6 +250,10 @@ struct _p_DM { DM dmBC; /* The DM with boundary conditions in the global DM */ PetscInt outputSequenceNum; /* The current sequence number for output */ PetscReal outputSequenceVal; /* The current sequence value for output */ + PetscErrorCode (*monitor[MAXDMMONITORS])(DM, void *); + PetscErrorCode (*monitordestroy[MAXDMMONITORS])(void **); + void *monitorcontext[MAXDMMONITORS]; + PetscInt numbermonitors; PetscObject dmksp,dmsnes,dmts; }; @@ -261,6 +268,7 @@ PETSC_EXTERN PetscLogEvent DM_CreateInterpolation; PETSC_EXTERN PetscLogEvent DM_CreateRestriction; PETSC_EXTERN PetscLogEvent DM_CreateInjection; PETSC_EXTERN PetscLogEvent DM_CreateMatrix; +PETSC_EXTERN PetscLogEvent DM_Load; PETSC_EXTERN PetscErrorCode DMCreateGlobalVector_Section_Private(DM,Vec*); PETSC_EXTERN PetscErrorCode DMCreateLocalVector_Section_Private(DM,Vec*); diff --git a/include/petsc/private/dmnetworkimpl.h b/include/petsc/private/dmnetworkimpl.h index b2ceca95b3b..b59437fe3c4 100644 --- a/include/petsc/private/dmnetworkimpl.h +++ b/include/petsc/private/dmnetworkimpl.h @@ -17,6 +17,8 @@ struct _p_DMNetworkComponentHeader { PetscInt size[MAX_DATA_AT_POINT]; PetscInt key[MAX_DATA_AT_POINT]; PetscInt offset[MAX_DATA_AT_POINT]; + PetscInt nvar[MAX_DATA_AT_POINT]; /* Number of variables */ + PetscInt offsetvarrel[MAX_DATA_AT_POINT]; /* offset from the first variable of the network point */ } PETSC_ATTRIBUTEALIGNED(sizeof(PetscScalar)); typedef struct _p_DMNetworkComponentValue *DMNetworkComponentValue; diff --git a/include/petsc/private/dmpleximpl.h b/include/petsc/private/dmpleximpl.h index 6c64d29a155..074e966abdd 100644 --- a/include/petsc/private/dmpleximpl.h +++ b/include/petsc/private/dmpleximpl.h @@ -37,6 +37,9 @@ PETSC_EXTERN PetscLogEvent DMPLEX_InjectorFEM; PETSC_EXTERN PetscLogEvent DMPLEX_IntegralFEM; PETSC_EXTERN PetscLogEvent DMPLEX_CreateGmsh; PETSC_EXTERN PetscLogEvent DMPLEX_RebalanceSharedPoints; +PETSC_EXTERN PetscLogEvent DMPLEX_CreateFromFile; +PETSC_EXTERN PetscLogEvent DMPLEX_CreateFromCellList; +PETSC_EXTERN PetscLogEvent DMPLEX_CreateFromCellList_Coordinates; PETSC_EXTERN PetscBool PetscPartitionerRegisterAllCalled; PETSC_EXTERN PetscErrorCode PetscPartitionerRegisterAll(void); @@ -63,19 +66,20 @@ struct _PetscPartitionerOps { PetscErrorCode (*setup)(PetscPartitioner); PetscErrorCode (*view)(PetscPartitioner,PetscViewer); PetscErrorCode (*destroy)(PetscPartitioner); - PetscErrorCode (*partition)(PetscPartitioner, DM, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, IS *); + PetscErrorCode (*partition)(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, IS *); }; struct _p_PetscPartitioner { PETSCHEADER(struct _PetscPartitionerOps); - void *data; /* Implementation object */ - PetscInt height; /* Height of points to partition into non-overlapping subsets */ - PetscInt edgeCut; /* The number of edge cut by the partition */ - PetscReal balance; /* The maximum partition size divided by the minimum size */ - PetscViewer viewerGraph; - PetscViewerFormat formatGraph; - PetscBool viewGraph; - PetscBool noGraph; /* if true, the partitioner does not need the connectivity graph, only the number of local vertices */ + void *data; /* Implementation object */ + PetscInt height; /* Height of points to partition into non-overlapping subsets */ + PetscInt edgeCut; /* The number of edge cut by the partition */ + PetscReal balance; /* The maximum partition size divided by the minimum size */ + PetscViewer viewer; + PetscViewer viewerGraph; + PetscBool viewGraph; + PetscBool noGraph; /* if true, the partitioner does not need the connectivity graph, only the number of local vertices */ + PetscBool usevwgt; /* if true, the partitioner looks at the local section vertSection to weight the vertices of the graph */ }; typedef struct { @@ -83,6 +87,7 @@ typedef struct { } PetscPartitioner_Chaco; typedef struct { + MPI_Comm pcomm; PetscInt ptype; PetscReal imbalanceRatio; PetscInt debugFlag; @@ -90,6 +95,7 @@ typedef struct { } PetscPartitioner_ParMetis; typedef struct { + MPI_Comm pcomm; PetscInt strategy; PetscReal imbalance; } PetscPartitioner_PTScotch; @@ -194,6 +200,7 @@ typedef struct { /* Labels and numbering */ PetscObjectState depthState; /* State of depth label, so that we can determine if a user changes it */ + PetscObjectState celltypeState; /* State of celltype label, so that we can determine if a user changes it */ IS globalVertexNumbers; IS globalCellNumbers; @@ -298,9 +305,9 @@ PETSC_EXTERN PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec, int, int); PETSC_EXTERN PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec, int, int); PETSC_INTERN PetscErrorCode DMPlexVTKGetCellType_Internal(DM,PetscInt,PetscInt,PetscInt*); PETSC_INTERN PetscErrorCode DMPlexGetAdjacency_Internal(DM,PetscInt,PetscBool,PetscBool,PetscBool,PetscInt*,PetscInt*[]); -PETSC_INTERN PetscErrorCode DMPlexGetFaces_Internal(DM,PetscInt,PetscInt,PetscInt*,PetscInt*,const PetscInt*[]); -PETSC_INTERN PetscErrorCode DMPlexGetRawFaces_Internal(DM,PetscInt,PetscInt,const PetscInt[], PetscInt*,PetscInt*,const PetscInt*[]); -PETSC_INTERN PetscErrorCode DMPlexRestoreFaces_Internal(DM,PetscInt,PetscInt,PetscInt*,PetscInt*,const PetscInt*[]); +PETSC_INTERN PetscErrorCode DMPlexGetFaces_Internal(DM,PetscInt,PetscInt*,PetscInt*,const PetscInt*[]); +PETSC_INTERN PetscErrorCode DMPlexGetRawFaces_Internal(DM,DMPolytopeType,const PetscInt[], PetscInt*,PetscInt*,const PetscInt*[]); +PETSC_INTERN PetscErrorCode DMPlexRestoreFaces_Internal(DM,PetscInt,PetscInt*,PetscInt*,const PetscInt*[]); PETSC_INTERN PetscErrorCode DMPlexRefineUniform_Internal(DM,CellRefiner,DM*); PETSC_INTERN PetscErrorCode DMPlexGetCellRefiner_Internal(DM,CellRefiner*); PETSC_INTERN PetscErrorCode CellRefinerGetAffineTransforms_Internal(CellRefiner, PetscInt *, PetscReal *[], PetscReal *[], PetscReal *[]); @@ -322,9 +329,11 @@ PETSC_INTERN PetscErrorCode DMPlexLocatePoint_Internal(DM,PetscInt,const PetscSc PETSC_EXTERN PetscErrorCode DMPlexOrientCell_Internal(DM,PetscInt,PetscInt,PetscBool); PETSC_EXTERN PetscErrorCode DMPlexOrientInterface_Internal(DM); +/* Applications may use this function */ +PETSC_EXTERN PetscErrorCode DMPlexCreateNumbering_Plex(DM, PetscInt, PetscInt, PetscInt, PetscInt *, PetscSF, IS *); + PETSC_INTERN PetscErrorCode DMPlexCreateCellNumbering_Internal(DM, PetscBool, IS *); PETSC_INTERN PetscErrorCode DMPlexCreateVertexNumbering_Internal(DM, PetscBool, IS *); -PETSC_INTERN PetscErrorCode DMPlexCreateNumbering_Internal(DM, PetscInt, PetscInt, PetscInt, PetscInt *, PetscSF, IS *); PETSC_INTERN PetscErrorCode DMPlexRefine_Internal(DM, DMLabel, DM *); PETSC_INTERN PetscErrorCode DMPlexCoarsen_Internal(DM, DMLabel, DM *); PETSC_INTERN PetscErrorCode DMCreateMatrix_Plex(DM, Mat*); @@ -626,8 +635,8 @@ PETSC_STATIC_INLINE PetscErrorCode DMPlexFixFaceOrientations_Permute_Private(Pet } PETSC_INTERN PetscErrorCode DMPlexGetPointDualSpaceFEM(DM,PetscInt,PetscInt,PetscDualSpace *); -PETSC_INTERN PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection,PetscInt,PetscInt,PetscInt *,PetscBool,const PetscInt[],const PetscInt[],PetscInt[]); -PETSC_INTERN PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection,PetscInt,PetscInt,PetscInt[],PetscBool,const PetscInt***,PetscInt,const PetscInt[],PetscInt[]); +PETSC_INTERN PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection,PetscBool,PetscInt,PetscInt,PetscInt *,PetscBool,const PetscInt[],const PetscInt[],PetscInt[]); +PETSC_INTERN PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection,PetscBool,PetscInt,PetscInt,PetscInt[],PetscBool,const PetscInt***,PetscInt,const PetscInt[],PetscInt[]); PETSC_INTERN PetscErrorCode DMPlexGetCompressedClosure(DM, PetscSection, PetscInt, PetscInt *, PetscInt **, PetscSection *, IS *, const PetscInt **); PETSC_INTERN PetscErrorCode DMPlexRestoreCompressedClosure(DM, PetscSection, PetscInt, PetscInt *, PetscInt **, PetscSection *, IS *, const PetscInt **); diff --git a/include/petsc/private/dmstagimpl.h b/include/petsc/private/dmstagimpl.h index 1d026ee6c03..2144f8ecb19 100644 --- a/include/petsc/private/dmstagimpl.h +++ b/include/petsc/private/dmstagimpl.h @@ -7,41 +7,47 @@ #define DMSTAG_MAX_DIM 3 #define DMSTAG_MAX_STRATA DMSTAG_MAX_DIM + 1 -/* This value is 1 + 3^DMSTAG_MAX DIM */ +/* This value is 1 + 3^DMSTAG_MAX_DIM */ #define DMSTAG_NUMBER_LOCATIONS 28 typedef struct { - /* Fields which may require being set before DMSetUp() is called */ + /* Fields which may require being set before DMSetUp() is called, set by DMStagInitialize(). + Some may be adjusted by DMSetUp() */ PetscInt N[DMSTAG_MAX_DIM]; /* Global dimensions (elements) */ PetscInt n[DMSTAG_MAX_DIM]; /* Local dimensions (elements) */ + PetscInt *l[DMSTAG_MAX_DIM]; /* Elements/rank in each direction */ PetscInt dof[DMSTAG_MAX_STRATA]; /* Dof per point for each stratum */ DMStagStencilType stencilType; /* Elementwise stencil type */ PetscInt stencilWidth; /* Elementwise ghost width */ DMBoundaryType boundaryType[DMSTAG_MAX_DIM]; /* Physical domain ghosting type */ PetscInt nRanks[DMSTAG_MAX_DIM]; /* Ranks in each direction */ - /* Additional fields populated by DMSetUp() */ + /* Fields unrelated to setup */ + DMType coordinateDMType; /* DM type to create for coordinates */ + + /* Data above is copied by DMStagDuplicateWithoutSetup(), while data below is not */ + + /* Fields populated by DMSetUp() */ PetscInt nGhost[DMSTAG_MAX_DIM]; /* Local dimensions (w/ ghosts) */ PetscInt start[DMSTAG_MAX_DIM]; /* First element number */ PetscInt startGhost[DMSTAG_MAX_DIM]; /* First element number (w/ ghosts) */ PetscMPIInt rank[DMSTAG_MAX_DIM]; /* Location in grid of ranks */ PetscMPIInt *neighbors; /* dim^3 local ranks */ - PetscInt *l[DMSTAG_MAX_DIM]; /* Elements/rank in each direction */ VecScatter gtol; /* Global --> Local */ VecScatter ltog_injective; /* Local --> Global, injective */ PetscInt *locationOffsets; /* Offsets for points in loc. rep. */ - /* Coordinates */ - DMType coordinateDMType; /* DM type to create for coordinates */ - - /* Convenience (easily computed from the above) */ + /* Additional convenience fields populated by DMSetUp() (easily computed from the above) */ PetscInt entriesPerElement; /* Entries stored with each element */ PetscInt entries; /* Local number of entries */ PetscInt entriesGhost; /* Local numbers of entries w/ ghosts */ PetscBool firstRank[DMSTAG_MAX_DIM]; /* First rank in this dim? */ PetscBool lastRank[DMSTAG_MAX_DIM]; /* Last rank in this dim? */ + } DM_Stag; +PETSC_INTERN PetscErrorCode DMStagDuplicateWithoutSetup(DM,MPI_Comm,DM*); +PETSC_INTERN PetscErrorCode DMStagInitialize(DMBoundaryType,DMBoundaryType,DMBoundaryType,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,DMStagStencilType,PetscInt,const PetscInt[],const PetscInt[],const PetscInt[],DM); PETSC_INTERN PetscErrorCode DMSetUp_Stag_1d(DM); PETSC_INTERN PetscErrorCode DMSetUp_Stag_2d(DM); PETSC_INTERN PetscErrorCode DMSetUp_Stag_3d(DM); diff --git a/include/petsc/private/f90impl.h b/include/petsc/private/f90impl.h index e831994ed80..0504e05ca53 100644 --- a/include/petsc/private/f90impl.h +++ b/include/petsc/private/f90impl.h @@ -14,7 +14,6 @@ #define PETSC_F90_2PTR_PARAM(ptr) #endif -#if defined(PETSC_USING_F90) typedef struct { char dummy; } F90Array1d; typedef struct { char dummy; } F90Array2d; typedef struct { char dummy; } F90Array3d; @@ -56,5 +55,4 @@ PETSC_EXTERN PetscErrorCode F90Array4dGetNextRecord(F90Array4d*,void** PETSC_F90 . ptr - Fortran 90 pointer */ -#endif /* PETSC_USING_F90 */ #endif diff --git a/include/petsc/private/fortranimpl.h b/include/petsc/private/fortranimpl.h index 2abb651a0e9..8b3206068bd 100644 --- a/include/petsc/private/fortranimpl.h +++ b/include/petsc/private/fortranimpl.h @@ -62,7 +62,7 @@ PETSC_EXTERN void (*PETSC_NULL_FUNCTION_Fortran)(void); #define FIXCHAR(a,n,b) \ {\ if (a == PETSC_NULL_CHARACTER_Fortran) { \ - b = a = 0; \ + b = a = NULL; \ } else { \ while((n > 0) && (a[n-1] == ' ')) n--; \ *ierr = PetscMalloc1(n+1,&b); \ diff --git a/include/petsc/private/isimpl.h b/include/petsc/private/isimpl.h index 9e0da0129f9..b62cd31c063 100644 --- a/include/petsc/private/isimpl.h +++ b/include/petsc/private/isimpl.h @@ -14,6 +14,9 @@ PETSC_EXTERN PetscBool ISRegisterAllCalled; PETSC_EXTERN PetscBool ISLocalToGlobalMappingRegisterAllCalled; PETSC_EXTERN PetscErrorCode ISRegisterAll(void); +/* events */ +PETSC_EXTERN PetscLogEvent IS_Load; + struct _ISOps { PetscErrorCode (*getindices)(IS,const PetscInt*[]); PetscErrorCode (*restoreindices)(IS,const PetscInt*[]); @@ -25,25 +28,34 @@ struct _ISOps { PetscErrorCode (*destroy)(IS); PetscErrorCode (*view)(IS,PetscViewer); PetscErrorCode (*load)(IS,PetscViewer); - PetscErrorCode (*identity)(IS,PetscBool*); PetscErrorCode (*copy)(IS,IS); PetscErrorCode (*togeneral)(IS); PetscErrorCode (*oncomm)(IS,MPI_Comm,PetscCopyMode,IS*); PetscErrorCode (*setblocksize)(IS,PetscInt); PetscErrorCode (*contiguous)(IS,PetscInt,PetscInt,PetscInt*,PetscBool*); PetscErrorCode (*locate)(IS,PetscInt,PetscInt *); + PetscErrorCode (*sortedlocal)(IS,PetscBool*); + PetscErrorCode (*sortedglobal)(IS,PetscBool*); + PetscErrorCode (*uniquelocal)(IS,PetscBool*); + PetscErrorCode (*uniqueglobal)(IS,PetscBool*); + PetscErrorCode (*permlocal)(IS,PetscBool*); + PetscErrorCode (*permglobal)(IS,PetscBool*); + PetscErrorCode (*intervallocal)(IS,PetscBool*); + PetscErrorCode (*intervalglobal)(IS,PetscBool*); }; +typedef enum {IS_INFO_UNKNOWN=0, IS_INFO_FALSE=1, IS_INFO_TRUE=2} ISInfoBool; + struct _p_IS { PETSCHEADER(struct _ISOps); PetscLayout map; - PetscBool isperm; /* if is a permutation */ PetscInt max,min; /* range of possible values */ void *data; - PetscBool isidentity; PetscInt *total, *nonlocal; /* local representation of ALL indices across the comm as well as the nonlocal part. */ PetscInt local_offset; /* offset to the local part within the total index set */ IS complement; /* IS wrapping nonlocal indices. */ + PetscBool info_permanent[2][IS_INFO_MAX]; /* whether local / global properties are permanent */ + ISInfoBool info[2][IS_INFO_MAX]; /* local / global properties */ }; extern PetscErrorCode ISLoad_Default(IS, PetscViewer); diff --git a/include/petsc/private/kernels/khash.h b/include/petsc/private/kernels/khash.h index f87e7bc7986..cd04258c465 100644 --- a/include/petsc/private/kernels/khash.h +++ b/include/petsc/private/kernels/khash.h @@ -247,7 +247,7 @@ static const double __ac_HASH_UPPER = 0.77; } \ SCOPE int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets) \ { /* This function uses 0.25*n_buckets bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets. */ \ - khint32_t *new_flags = 0; \ + khint32_t *new_flags = NULL; \ khint_t j = 1; \ { \ kroundup32(new_n_buckets); \ diff --git a/include/petsc/private/matimpl.h b/include/petsc/private/matimpl.h index a5bd3a9d4f8..96802a27dbc 100644 --- a/include/petsc/private/matimpl.h +++ b/include/petsc/private/matimpl.h @@ -26,7 +26,7 @@ PETSC_EXTERN PetscErrorCode MatSeqAIJRegisterAll(void); /* If you add entries here also add them to the MATOP enum - in include/petscmat.h and include/petsc/finclude/petscmat.h + in include/petscmat.h and src/mat/f90-mod/petscmat.h */ typedef struct _MatOps *MatOps; struct _MatOps { @@ -207,10 +207,11 @@ struct _MatOps { PetscErrorCode (*creatempimatconcatenateseqmat)(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); PetscErrorCode (*destroysubmatrices)(PetscInt,Mat*[]); PetscErrorCode (*mattransposesolve)(Mat,Mat,Mat); + PetscErrorCode (*getvalueslocal)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); }; /* If you add MatOps entries above also add them to the MATOP enum - in include/petscmat.h and include/petsc/finclude/petscmat.h + in include/petscmat.h and src/mat/f90-mod/petscmat.h */ #include @@ -400,12 +401,12 @@ struct _p_Mat { PetscBool symmetric_eternal; PetscBool nooffprocentries,nooffproczerorows; PetscBool assembly_subset; /* set by MAT_SUBSET_OFF_PROC_ENTRIES */ - PetscBool submat_singleis; /* for efficient PCSetUP_ASM() */ + PetscBool submat_singleis; /* for efficient PCSetUp_ASM() */ PetscBool structure_only; - PetscBool sortedfull; /* full, sorted rows are inserted */ + PetscBool sortedfull; /* full, sorted rows are inserted */ #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) PetscOffloadMask offloadmask; /* a mask which indicates where the valid matrix data is (GPU, CPU or both) */ - PetscBool pinnedtocpu; + PetscBool boundtocpu; #endif void *spptr; /* pointer for special library like SuperLU */ char *solvertype; @@ -462,6 +463,7 @@ struct _p_MatPartitioning { PetscInt n; /* number of partitions */ void *data; PetscInt setupcalled; + PetscBool use_edge_weights; /* A flag indicates whether or not to use edge weights */ }; /* needed for parallel nested dissection by ParMetis and PTSCOTCH */ diff --git a/include/petsc/private/pcgamgimpl.h b/include/petsc/private/pcgamgimpl.h index 7e6eaf2da49..9f37d27caea 100644 --- a/include/petsc/private/pcgamgimpl.h +++ b/include/petsc/private/pcgamgimpl.h @@ -1,5 +1,6 @@ #if !defined(__GAMG_IMPL) #define __GAMG_IMPL +#include #include #include /*I "petscksp.h" I*/ #include /*I "petscmatcoarsen.h" I*/ @@ -15,7 +16,6 @@ struct _PCGAMGOps { PetscErrorCode (*destroy)(PC); PetscErrorCode (*view)(PC,PetscViewer); }; -#define PETSC_GAMG_MAXLEVELS 30 /* Private context for the GAMG preconditioner */ typedef struct gamg_TAG { PCGAMGType type; @@ -31,7 +31,7 @@ typedef struct gamg_TAG { PetscInt coarse_eq_limit; PetscReal threshold_scale; PetscInt current_level; /* stash construction state */ - PetscReal threshold[PETSC_GAMG_MAXLEVELS]; /* common quatity to many AMG methods so keep it up here */ + PetscReal threshold[PETSC_MG_MAXLEVELS]; /* common quatity to many AMG methods so keep it up here */ /* these 4 are all related to the method data and should be in the subctx */ PetscInt data_sz; /* nloc*data_rows*data_cols */ @@ -43,17 +43,22 @@ typedef struct gamg_TAG { PetscReal *orig_data; /* cache data */ struct _PCGAMGOps *ops; - char *gamg_type_name; + char *gamg_type_name; - void *subctx; + void *subctx; + + char esteig_type[32]; + PetscInt esteig_max_it; + PetscInt use_sa_esteig; + PetscReal emin,emax; } PC_GAMG; PetscErrorCode PCReset_MG(PC); /* hooks create derivied classes */ -PetscErrorCode PCCreateGAMG_GEO(PC); -PetscErrorCode PCCreateGAMG_AGG(PC); -PetscErrorCode PCCreateGAMG_Classical(PC); +PetscErrorCode PCCreateGAMG_GEO(PC); +PetscErrorCode PCCreateGAMG_AGG(PC); +PetscErrorCode PCCreateGAMG_Classical(PC); PetscErrorCode PCDestroy_GAMG(PC); diff --git a/include/petsc/private/pcmgimpl.h b/include/petsc/private/pcmgimpl.h index dd0139e2bd8..d414f8b3898 100644 --- a/include/petsc/private/pcmgimpl.h +++ b/include/petsc/private/pcmgimpl.h @@ -5,7 +5,7 @@ #define __MG_IMPL #include #include - +#define PETSC_MG_MAXLEVELS 10 /* Each level has its own copy of this data. Level (0) is always the coarsest level and Level (levels-1) is the finest. @@ -39,7 +39,6 @@ typedef struct { typedef struct { PCMGType am; /* Multiplicative, additive or full */ PetscInt cyclesperpcapply; /* Number of cycles to use in each PCApply(), multiplicative only*/ - PetscInt maxlevels; /* total number of levels allocated */ PCMGGalerkinType galerkin; /* use Galerkin process to compute coarser matrices */ PetscBool usedmfornumberoflevels; /* sets the number of levels by getting this information out of the DM */ @@ -52,6 +51,8 @@ typedef struct { void *innerctx; /* optional data for preconditioner, like PCEXOTIC that inherits off of PCMG */ PetscLogStage stageApply; PetscErrorCode (*view)(PC,PetscViewer); /* GAMG and other objects that use PCMG can set their own viewer here */ + PetscReal min_eigen_DinvA[PETSC_MG_MAXLEVELS]; + PetscReal max_eigen_DinvA[PETSC_MG_MAXLEVELS]; } PC_MG; PETSC_INTERN PetscErrorCode PCSetUp_MG(PC); diff --git a/include/petsc/private/petscconvestimpl.h b/include/petsc/private/petscconvestimpl.h index 9d9519808ea..d4c7b347e67 100644 --- a/include/petsc/private/petscconvestimpl.h +++ b/include/petsc/private/petscconvestimpl.h @@ -10,22 +10,28 @@ struct _PetscConvEstOps { PetscErrorCode (*setup)(PetscConvEst); PetscErrorCode (*view)(PetscConvEst,PetscViewer); PetscErrorCode (*destroy)(PetscConvEst); + PetscErrorCode (*setsolver)(PetscConvEst, PetscObject); + PetscErrorCode (*initguess)(PetscConvEst, PetscInt, DM, Vec); + PetscErrorCode (*computeerror)(PetscConvEst, PetscInt, DM, Vec, PetscReal[]); + PetscErrorCode (*getconvrate)(PetscConvEst, PetscReal[]); }; struct _p_PetscConvEst { PETSCHEADER(struct _PetscConvEstOps); /* Inputs */ - DM idm; /* Initial grid */ - SNES snes; /* Solver */ - PetscInt Nr; /* The number of refinements */ - PetscInt Nf; /* The number of fields in the DM */ + DM idm; /* Initial grid */ + PetscObject solver; /* Solver */ + PetscReal r; /* The refinement factor (spatial check requires r = 2) */ + PetscInt Nr; /* The number of refinements */ + PetscInt Nf; /* The number of fields in the DM */ PetscErrorCode (**initGuess)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *); PetscErrorCode (**exactSol)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *); void **ctxs; /* Outputs */ - PetscBool monitor; - PetscReal *errors; + PetscLogEvent event; + PetscBool monitor; + PetscReal *errors; }; #endif diff --git a/include/petsc/private/petscdsimpl.h b/include/petsc/private/petscdsimpl.h index babb7916659..31d23aa7ac8 100644 --- a/include/petsc/private/petscdsimpl.h +++ b/include/petsc/private/petscdsimpl.h @@ -59,31 +59,31 @@ struct _p_PetscDS { PetscScalar *constants; /* Array of constants passed to point functions */ void **ctx; /* User contexts for each field */ /* Computed sizes */ - PetscInt totDim; /* Total system dimension */ - PetscInt totComp; /* Total field components */ - PetscInt *Nc; /* Number of components for each field */ - PetscInt *Nb; /* Number of basis functions for each field */ - PetscInt *off; /* Offsets for each field */ - PetscInt *offDer; /* Derivative offsets for each field */ - PetscReal **basis; /* Default basis tabulation for each field */ - PetscReal **basisDer; /* Default basis derivative tabulation for each field */ - PetscReal **basisFace; /* Basis tabulation for each local face and field */ - PetscReal **basisDerFace; /* Basis derivative tabulation for each local face and field */ + PetscInt totDim; /* Total system dimension */ + PetscInt totComp; /* Total field components */ + PetscInt *Nc; /* Number of components for each field */ + PetscInt *Nb; /* Number of basis functions for each field */ + PetscInt *off; /* Offsets for each field */ + PetscInt *offDer; /* Derivative offsets for each field */ + PetscTabulation *T; /* Basis function and derivative tabulation for each field */ + PetscTabulation *Tf; /* Basis function and derivative tabulation for each local face and field */ /* Work space */ - PetscScalar *u; /* Field evaluation */ - PetscScalar *u_t; /* Field time derivative evaluation */ - PetscScalar *u_x; /* Field gradient evaluation */ - PetscScalar *basisReal; /* Workspace for pushforward */ - PetscScalar *basisDerReal; /* Workspace for derivative pushforward */ - PetscScalar *testReal; /* Workspace for pushforward */ - PetscScalar *testDerReal; /* Workspace for derivative pushforward */ - PetscReal *x; /* Workspace for computing real coordinates */ - PetscScalar *f0, *f1; /* Point evaluations of weak form residual integrands */ - PetscScalar *g0, *g1, *g2, *g3; /* Point evaluations of weak form Jacobian integrands */ + PetscScalar *u; /* Field evaluation */ + PetscScalar *u_t; /* Field time derivative evaluation */ + PetscScalar *u_x; /* Field gradient evaluation */ + PetscScalar *basisReal; /* Workspace for pushforward */ + PetscScalar *basisDerReal; /* Workspace for derivative pushforward */ + PetscScalar *testReal; /* Workspace for pushforward */ + PetscScalar *testDerReal; /* Workspace for derivative pushforward */ + PetscReal *x; /* Workspace for computing real coordinates */ + PetscScalar *f0, *f1; /* Point evaluations of weak form residual integrands */ + PetscScalar *g0, *g1, *g2, *g3; /* Point evaluations of weak form Jacobian integrands */ }; typedef struct { PetscInt dummy; /* */ } PetscDS_Basic; +PETSC_INTERN PetscErrorCode PetscDSIsFE_Internal(PetscDS, PetscInt, PetscBool *); + #endif diff --git a/include/petsc/private/petscfeimpl.h b/include/petsc/private/petscfeimpl.h index b066cc1edc1..7a4d799aa55 100644 --- a/include/petsc/private/petscfeimpl.h +++ b/include/petsc/private/petscfeimpl.h @@ -128,7 +128,7 @@ struct _PetscFEOps { PetscErrorCode (*view)(PetscFE,PetscViewer); PetscErrorCode (*destroy)(PetscFE); PetscErrorCode (*getdimension)(PetscFE,PetscInt*); - PetscErrorCode (*gettabulation)(PetscFE,PetscInt,const PetscReal*,PetscReal*,PetscReal*,PetscReal*); + PetscErrorCode (*createtabulation)(PetscFE,PetscInt,const PetscReal*,PetscInt,PetscTabulation); /* Element integration */ PetscErrorCode (*integrate)(PetscDS, PetscInt, PetscInt, PetscFEGeom *, const PetscScalar[], PetscDS, const PetscScalar[], PetscScalar[]); PetscErrorCode (*integratebd)(PetscDS, PetscInt, PetscBdPointFunc, PetscInt, PetscFEGeom *, const PetscScalar[], PetscDS, const PetscScalar[], PetscScalar[]); @@ -149,9 +149,9 @@ struct _p_PetscFE { PetscQuadrature faceQuadrature; /* Suitable face quadrature on \partial K */ PetscFE *subspaces; /* Subspaces for each dimension */ PetscReal *invV; /* Change of basis matrix, from prime to nodal basis set */ - PetscReal *B, *D, *H; /* Tabulation of basis and derivatives at quadrature points */ - PetscReal *Bf, *Df, *Hf; /* Tabulation of basis and derivatives at quadrature points on each face */ - PetscReal *F; /* Tabulation of basis at face centroids */ + PetscTabulation T; /* Tabulation of basis and derivatives at quadrature points */ + PetscTabulation Tf; /* Tabulation of basis and derivatives at quadrature points on each face */ + PetscTabulation Tc; /* Tabulation of basis at face centroids */ PetscInt blockSize, numBlocks; /* Blocks are processed concurrently */ PetscInt batchSize, numBatches; /* A batch is made up of blocks, Batches are processed in serial */ PetscBool setupcalled; @@ -215,18 +215,21 @@ PETSC_STATIC_INLINE void CoordinatesRealToRef(PetscInt dimReal, PetscInt dimRef, PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolate_Static(PetscFE fe, const PetscScalar x[], PetscFEGeom *fegeom, PetscInt q, PetscScalar interpolant[]) { - PetscReal *basis; - PetscInt Nb, Nc, fc, f; - PetscErrorCode ierr; + PetscTabulation T; + PetscInt fc, f; + PetscErrorCode ierr; PetscFunctionBeginHot; - ierr = PetscFEGetDimension(fe, &Nb);CHKERRQ(ierr); - ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr); - ierr = PetscFEGetDefaultTabulation(fe, &basis, NULL, NULL);CHKERRQ(ierr); - for (fc = 0; fc < Nc; ++fc) { - interpolant[fc] = 0.0; - for (f = 0; f < Nb; ++f) { - interpolant[fc] += x[f]*basis[(q*Nb + f)*Nc + fc]; + ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr); + { + const PetscReal *basis = T->T[0]; + const PetscInt Nb = T->Nb; + const PetscInt Nc = T->Nc; + for (fc = 0; fc < Nc; ++fc) { + interpolant[fc] = 0.0; + for (f = 0; f < Nb; ++f) { + interpolant[fc] += x[f]*basis[(q*Nb + f)*Nc + fc]; + } } } ierr = PetscFEPushforward(fe, fegeom, 1, interpolant);CHKERRQ(ierr); @@ -235,20 +238,25 @@ PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolate_Static(PetscFE fe, const P PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolateGradient_Static(PetscFE fe, const PetscScalar x[], PetscFEGeom *fegeom, PetscInt q, PetscScalar interpolant[]) { - PetscReal *basisDer; - PetscInt Nb, Nc, fc, f, d; - const PetscInt dim = fegeom->dimEmbed; - PetscErrorCode ierr; + PetscTabulation T; + PetscInt fc, f, d; + PetscErrorCode ierr; PetscFunctionBeginHot; - ierr = PetscFEGetDimension(fe, &Nb);CHKERRQ(ierr); - ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr); - ierr = PetscFEGetDefaultTabulation(fe, NULL, &basisDer, NULL);CHKERRQ(ierr); - for (fc = 0; fc < Nc; ++fc) { - for (d = 0; d < dim; ++d) interpolant[fc*dim+d] = 0.0; - for (f = 0; f < Nb; ++f) { - for (d = 0; d < dim; ++d) { - interpolant[fc*dim+d] += x[f]*basisDer[((q*Nb + f)*Nc + fc)*dim + d]; + ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr); + { + const PetscReal *basisDer = T->T[1]; + const PetscInt Nb = T->Nb; + const PetscInt Nc = T->Nc; + const PetscInt cdim = T->cdim; + + if (cdim != fegeom->dimEmbed) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Geometry dim %D must match tabulation dim %D", fegeom->dimEmbed, cdim); + for (fc = 0; fc < Nc; ++fc) { + for (d = 0; d < cdim; ++d) interpolant[fc*cdim+d] = 0.0; + for (f = 0; f < Nb; ++f) { + for (d = 0; d < cdim; ++d) { + interpolant[fc*cdim+d] += x[f]*basisDer[((q*Nb + f)*Nc + fc)*cdim + d]; + } } } } @@ -258,21 +266,27 @@ PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolateGradient_Static(PetscFE fe, PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolateFieldAndGradient_Static(PetscFE fe, const PetscScalar x[], PetscFEGeom *fegeom, PetscInt q, PetscScalar interpolant[], PetscScalar interpolantGrad[]) { - PetscReal *basis, *basisDer; - PetscInt Nb, Nc, fc, f, d; - const PetscInt dim = fegeom->dimEmbed; - PetscErrorCode ierr; + PetscTabulation T; + PetscInt fc, f, d; + PetscErrorCode ierr; PetscFunctionBeginHot; - ierr = PetscFEGetDimension(fe, &Nb);CHKERRQ(ierr); - ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr); - ierr = PetscFEGetDefaultTabulation(fe, &basis, &basisDer, NULL);CHKERRQ(ierr); - for (fc = 0; fc < Nc; ++fc) { - interpolant[fc] = 0.0; - for (d = 0; d < dim; ++d) interpolantGrad[fc*dim+d] = 0.0; - for (f = 0; f < Nb; ++f) { - interpolant[fc] += x[f]*basis[(q*Nb + f)*Nc + fc]; - for (d = 0; d < dim; ++d) interpolantGrad[fc*dim+d] += x[f]*basisDer[((q*Nb + f)*Nc + fc)*dim + d]; + ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr); + { + const PetscReal *basis = T->T[0]; + const PetscReal *basisDer = T->T[1]; + const PetscInt Nb = T->Nb; + const PetscInt Nc = T->Nc; + const PetscInt cdim = T->cdim; + + if (cdim != fegeom->dimEmbed) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Geometry dim %D must match tabulation dim %D", fegeom->dimEmbed, cdim); + for (fc = 0; fc < Nc; ++fc) { + interpolant[fc] = 0.0; + for (d = 0; d < cdim; ++d) interpolantGrad[fc*cdim+d] = 0.0; + for (f = 0; f < Nb; ++f) { + interpolant[fc] += x[f]*basis[(q*Nb + f)*Nc + fc]; + for (d = 0; d < cdim; ++d) interpolantGrad[fc*cdim+d] += x[f]*basisDer[((q*Nb + f)*Nc + fc)*cdim + d]; + } } } ierr = PetscFEPushforward(fe, fegeom, 1, interpolant);CHKERRQ(ierr); @@ -283,10 +297,10 @@ PETSC_STATIC_INLINE PetscErrorCode PetscFEInterpolateFieldAndGradient_Static(Pet PETSC_INTERN PetscErrorCode PetscDualSpaceLatticePointLexicographic_Internal(PetscInt, PetscInt, PetscInt[]); PETSC_INTERN PetscErrorCode PetscDualSpaceTensorPointLexicographic_Internal(PetscInt, PetscInt, PetscInt[]); -PETSC_INTERN PetscErrorCode PetscFEEvaluateFieldJets_Internal(PetscDS, PetscInt, PetscInt, const PetscInt[], const PetscInt[], PetscInt, PetscReal *[], PetscReal *[], PetscFEGeom *, const PetscScalar[], const PetscScalar[], PetscScalar[], PetscScalar[], PetscScalar[]); +PETSC_INTERN PetscErrorCode PetscFEEvaluateFieldJets_Internal(PetscDS, PetscInt, PetscInt, PetscInt, PetscTabulation[], PetscFEGeom *, const PetscScalar[], const PetscScalar[], PetscScalar[], PetscScalar[], PetscScalar[]); PETSC_INTERN PetscErrorCode PetscFEEvaluateFaceFields_Internal(PetscDS, PetscInt, PetscInt, const PetscScalar[], PetscScalar[]); -PETSC_INTERN PetscErrorCode PetscFEUpdateElementVec_Internal(PetscFE, PetscInt, PetscInt, PetscInt, PetscInt, PetscReal[], PetscReal[], PetscScalar[], PetscScalar[], PetscFEGeom *, PetscScalar[], PetscScalar[], PetscScalar[]); -PETSC_INTERN PetscErrorCode PetscFEUpdateElementMat_Internal(PetscFE, PetscFE, PetscInt, PetscInt, PetscInt, const PetscReal[], const PetscReal[], PetscScalar[], PetscScalar[], PetscInt, PetscInt, const PetscReal[], const PetscReal[], PetscScalar[], PetscScalar[], PetscFEGeom *, const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscInt, PetscInt, PetscInt, PetscInt, PetscScalar[]); +PETSC_INTERN PetscErrorCode PetscFEUpdateElementVec_Internal(PetscFE, PetscTabulation, PetscInt, PetscScalar[], PetscScalar[], PetscFEGeom *, PetscScalar[], PetscScalar[], PetscScalar[]); +PETSC_INTERN PetscErrorCode PetscFEUpdateElementMat_Internal(PetscFE, PetscFE, PetscInt, PetscInt, PetscTabulation, PetscScalar[], PetscScalar[], PetscTabulation, PetscScalar[], PetscScalar[], PetscFEGeom *, const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscInt, PetscInt, PetscInt, PetscInt, PetscScalar[]); PETSC_EXTERN PetscErrorCode PetscFEGetDimension_Basic(PetscFE, PetscInt *); PETSC_EXTERN PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS, PetscInt, PetscInt, PetscFEGeom *, const PetscScalar [], const PetscScalar [], PetscDS, const PetscScalar [], PetscReal, PetscScalar []); diff --git a/include/petsc/private/petscfptimpl.h b/include/petsc/private/petscfptimpl.h index 98903b1eaa1..c376b02a537 100644 --- a/include/petsc/private/petscfptimpl.h +++ b/include/petsc/private/petscfptimpl.h @@ -110,7 +110,7 @@ PETSC_STATIC_INLINE PetscErrorCode PetscFPTFind(void* key,char const **data) { PetscInt hash,ii = 0; - *data = 0; + *data = NULL; if (!PetscFPTData) return(0); hash = PetscHashPointer(key); while (ii++ < PetscFPTData->tablesize) { diff --git a/include/petsc/private/petscfvimpl.h b/include/petsc/private/petscfvimpl.h index 1dc3cf96072..b1f566da439 100644 --- a/include/petsc/private/petscfvimpl.h +++ b/include/petsc/private/petscfvimpl.h @@ -67,16 +67,16 @@ struct _PetscFVOps { struct _p_PetscFV { PETSCHEADER(struct _PetscFVOps); - void *data; /* Implementation object */ - PetscLimiter limiter; /* The slope limiter */ - PetscDualSpace dualSpace; /* The dual space P', usually simple */ - PetscInt numComponents; /* The number of field components */ - PetscInt dim; /* The spatial dimension */ - PetscBool computeGradients; /* Flag for gradient computation */ - PetscScalar *fluxWork; /* The work array for flux calculation */ - PetscQuadrature quadrature; /* Suitable quadrature on the volume */ - PetscReal *B, *D, *H; /* Tabulation of pseudo-basis and derivatives at quadrature points */ - char **componentNames; /* Names of the component fields */ + void *data; /* Implementation object */ + PetscLimiter limiter; /* The slope limiter */ + PetscDualSpace dualSpace; /* The dual space P', usually simple */ + PetscInt numComponents; /* The number of field components */ + PetscInt dim; /* The spatial dimension */ + PetscBool computeGradients; /* Flag for gradient computation */ + PetscScalar *fluxWork; /* The work array for flux calculation */ + PetscQuadrature quadrature; /* Suitable quadrature on the volume */ + PetscTabulation T; /* Tabulation of pseudo-basis and derivatives at quadrature points */ + char **componentNames; /* Names of the component fields */ }; typedef struct { diff --git a/include/petsc/private/petschpddm.h b/include/petsc/private/petschpddm.h index 26f79565300..cc533321374 100644 --- a/include/petsc/private/petschpddm.h +++ b/include/petsc/private/petschpddm.h @@ -3,8 +3,14 @@ #include +PETSC_EXTERN PetscLogEvent PC_HPDDM_Strc; +PETSC_EXTERN PetscLogEvent PC_HPDDM_PtAP; +PETSC_EXTERN PetscLogEvent PC_HPDDM_PtBP; +PETSC_EXTERN PetscLogEvent PC_HPDDM_Next; + namespace HPDDM { - template class Schwarz; /* forward definition of the HPDDM class */ + template class Schwarz; /* forward definitions of two needed HPDDM classes */ + struct PETScOperator; } struct PC_HPDDM_Level { @@ -22,13 +28,23 @@ struct PC_HPDDM_Level { struct PC_HPDDM { PC_HPDDM_Level **levels; /* array of shells */ Mat aux; /* local auxiliary matrix defined at the finest level on PETSC_COMM_SELF */ + Mat B; /* right-hand side matrix defined at the finest level on PETSC_COMM_SELF */ IS is; /* global numbering of the auxiliary matrix */ PetscInt N; /* number of levels */ PCHPDDMCoarseCorrectionType correction; /* type of coarse correction */ + PetscBool Neumann; /* aux is the local Neumann matrix? */ PetscErrorCode (*setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void*); /* setup function for the auxiliary matrix */ void* setup_ctx; /* context for setup */ }; +struct KSP_HPDDM { + HPDDM::PETScOperator *op; + PetscReal rcntl[2]; + int icntl[1]; + unsigned short scntl[3]; + char cntl [6]; +}; + #define PETSC_HPDDM_MAXLEVELS 10 #include diff --git a/include/petsc/private/petscimpl.h b/include/petsc/private/petscimpl.h index 709d953026c..494e226257c 100644 --- a/include/petsc/private/petscimpl.h +++ b/include/petsc/private/petscimpl.h @@ -158,9 +158,9 @@ PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*PetscObjectViewFunction)(PetscObje @*/ #define PetscHeaderCreate(h,classid,class_name,descr,mansec,comm,destroy,view) \ (PetscNew(&(h)) || \ - PetscHeaderCreate_Private((PetscObject)h,classid,class_name,descr,mansec,comm,(PetscObjectDestroyFunction)destroy,(PetscObjectViewFunction)view) || \ + PetscHeaderCreate_Private((PetscObject)(h),classid,class_name,descr,mansec,comm,(PetscObjectDestroyFunction)(destroy),(PetscObjectViewFunction)(view)) || \ PetscLogObjectCreate(h) || \ - PetscLogObjectMemory((PetscObject)h,sizeof(*(h)))) + PetscLogObjectMemory((PetscObject)(h),sizeof(*(h)))) PETSC_EXTERN PetscErrorCode PetscComposedQuantitiesDestroy(PetscObject obj); PETSC_EXTERN PetscErrorCode PetscHeaderCreate_Private(PetscObject,PetscClassId,const char[],const char[],const char[],MPI_Comm,PetscObjectDestroyFunction,PetscObjectViewFunction); @@ -175,7 +175,7 @@ PETSC_EXTERN PetscErrorCode PetscHeaderCreate_Private(PetscObject,PetscClassId,c .seealso: PetscHeaderCreate() @*/ -#define PetscHeaderDestroy(h) (PetscHeaderDestroy_Private((PetscObject)(*h)) || PetscFree(*h)) +#define PetscHeaderDestroy(h) (PetscHeaderDestroy_Private((PetscObject)(*(h))) || PetscFree(*(h))) PETSC_EXTERN PetscErrorCode PetscHeaderDestroy_Private(PetscObject); PETSC_EXTERN PetscErrorCode PetscObjectCopyFortranFunctionPointers(PetscObject,PetscObject); @@ -213,13 +213,13 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); PetscErrorCode _7_ierr; \ PetscBool _7_same; \ PetscValidHeaderSpecific(h,ck,arg); \ - _7_ierr = PetscObjectTypeCompare((PetscObject)h,t,&_7_same);CHKERRQ(_7_ierr); \ - if (!_7_same) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Wrong subtype object:Parameter # %d must have implementation %s it is %s",arg,t,((PetscObject)h)->type_name); \ + _7_ierr = PetscObjectTypeCompare((PetscObject)(h),t,&_7_same);CHKERRQ(_7_ierr); \ + if (!_7_same) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Wrong subtype object:Parameter # %d must have implementation %s it is %s",arg,t,((PetscObject)(h))->type_name); \ } while (0) #define PetscValidHeaderSpecific(h,ck,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Object: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Object: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_OBJECT)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Invalid Pointer to Object: Parameter # %d",arg); \ if (((PetscObject)(h))->classid != ck) { \ if (((PetscObject)(h))->classid == PETSCFREEDHEADER) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Object already free: Parameter # %d",arg); \ @@ -229,7 +229,7 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); #define PetscValidHeader(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Object: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Object: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_OBJECT)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Invalid Pointer to Object: Parameter # %d",arg); \ if (((PetscObject)(h))->classid == PETSCFREEDHEADER) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Object already free: Parameter # %d",arg); \ else if (((PetscObject)(h))->classid < PETSC_SMALLEST_CLASSID || ((PetscObject)(h))->classid > PETSC_LARGEST_CLASSID) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Invalid type of object: Parameter # %d",arg); \ @@ -237,47 +237,56 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); #define PetscValidPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_CHAR)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer: Parameter # %d",arg); \ } while (0) #define PetscValidCharPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg);\ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg);\ if (!PetscCheckPointer(h,PETSC_CHAR)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer to char: Parameter # %d",arg); \ } while (0) #define PetscValidIntPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Null Pointer: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Null Pointer: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_INT)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer to PetscInt: Parameter # %d",arg); \ } while (0) #define PetscValidBoolPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Null Pointer: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Null Pointer: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_BOOL)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer to PetscBool: Parameter # %d",arg); \ } while (0) #define PetscValidScalarPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_SCALAR)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer to PetscScalar: Parameter # %d",arg); \ } while (0) -#define PetscValidRealPointer(h,arg) \ +#define PetscValidRealPointer(h,arg) \ do { \ - if (!h) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ + if (!(h)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Pointer: Parameter # %d",arg); \ if (!PetscCheckPointer(h,PETSC_REAL)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Invalid Pointer to PetscReal: Parameter # %d",arg); \ } while (0) #define PetscValidFunction(f,arg) \ do { \ - if (!f) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Function Pointer: Parameter # %d",arg); \ + if (!(f)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Null Function Pointer: Parameter # %d",arg); \ } while (0) #endif +#define PetscSorted(n,idx,sorted) \ + do { \ + PetscInt _i_; \ + (sorted) = PETSC_TRUE; \ + for (_i_ = 1; _i_ < (n); _i_++) \ + if ((idx)[_i_] < (idx)[_i_ - 1]) \ + { (sorted) = PETSC_FALSE; break; } \ + } while(0) + #if !defined(PETSC_USE_DEBUG) #define PetscCheckSameType(a,arga,b,argb) do {(void)(a);(void)(b);} while (0) @@ -286,12 +295,13 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); #define PetscValidType(a,arg) do {(void)(a);} while (0) #define PetscCheckSameComm(a,arga,b,argb) do {(void)(a);(void)(b);} while (0) #define PetscCheckSameTypeAndComm(a,arga,b,argb) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveScalar(a,b,c) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveReal(a,b,c) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveInt(a,b,c) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveMPIInt(a,b,c) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveBool(a,b,c) do {(void)(a);(void)(b);} while (0) -#define PetscValidLogicalCollectiveEnum(a,b,c) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveScalar(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveReal(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveInt(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveMPIInt(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveBool(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscValidLogicalCollectiveEnum(a,b,arg) do {(void)(a);(void)(b);} while (0) +#define PetscCheckSorted(n,idx) do {(void)(n);(void)(idx);} while (0) #else @@ -301,32 +311,32 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); */ #define PetscCheckSameType(a,arga,b,argb) \ do { \ - if (((PetscObject)a)->type != ((PetscObject)b)->type) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"Objects not of same type: Argument # %d and %d",arga,argb); \ + if (((PetscObject)(a))->type != ((PetscObject)(b))->type) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"Objects not of same type: Argument # %d and %d",arga,argb); \ } while (0) /* Check type_name */ #define PetscCheckTypeName(a,type) \ do { \ - PetscBool _7_match; \ + PetscBool _7_match; \ PetscErrorCode _7_ierr; \ - _7_ierr = PetscObjectTypeCompare(((PetscObject)a),(type),&_7_match);CHKERRQ(_7_ierr); \ - if (!_7_match) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Object (%s) is not %s",(char*)(((PetscObject)a)->type_name),type); \ + _7_ierr = PetscObjectTypeCompare(((PetscObject)(a)),(type),&_7_match);CHKERRQ(_7_ierr); \ + if (!_7_match) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Object (%s) is not %s",(char*)(((PetscObject)(a))->type_name),type); \ } while (0) #define PetscCheckTypeNames(a,type1,type2) \ do { \ PetscBool _7_match; \ PetscErrorCode _7_ierr; \ - _7_ierr = PetscObjectTypeCompareAny(((PetscObject)a),&_7_match,(type1),(type2),"");CHKERRQ(_7_ierr); \ - if (!_7_match) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Object (%s) is not %s or %s",(char*)(((PetscObject)a)->type_name),type1,type2); \ + _7_ierr = PetscObjectTypeCompareAny(((PetscObject)(a)),&_7_match,(type1),(type2),"");CHKERRQ(_7_ierr); \ + if (!_7_match) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Object (%s) is not %s or %s",(char*)(((PetscObject)(a))->type_name),type1,type2); \ } while (0) /* Use this macro to check if the type is set */ #define PetscValidType(a,arg) \ do { \ - if (!((PetscObject)a)->type_name) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"%s object's type is not set: Argument # %d",((PetscObject)a)->class_name,arg); \ + if (!((PetscObject)(a))->type_name) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"%s object's type is not set: Argument # %d",((PetscObject)(a))->class_name,arg); \ } while (0) /* Sometimes object must live on same communicator to inter-operate @@ -335,7 +345,7 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); do { \ PetscErrorCode _7_ierr; \ PetscMPIInt _7_flag; \ - _7_ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)a),PetscObjectComm((PetscObject)b),&_7_flag);CHKERRQ(_7_ierr); \ + _7_ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)(a)),PetscObjectComm((PetscObject)(b)),&_7_flag);CHKERRQ(_7_ierr); \ if (_7_flag != MPI_CONGRUENT && _7_flag != MPI_IDENT) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMECOMM,"Different communicators in the two objects: Argument # %d and %d flag %d",arga,argb,_7_flag); \ } while (0) @@ -345,53 +355,68 @@ PETSC_EXTERN PetscBool PetscCheckPointer(const void*,PetscDataType); PetscCheckSameComm(a,arga,b,argb); \ } while (0) -#define PetscValidLogicalCollectiveScalar(a,b,c) \ +#define PetscValidLogicalCollectiveScalar(a,b,arg) \ do { \ PetscErrorCode _7_ierr; \ + PetscScalar b0=(b); \ PetscReal b1[5],b2[5]; \ - if (PetscIsNanScalar(b)) {b1[4] = 1;} else {b1[4] = 0;}; \ - b1[0] = -PetscRealPart(b); b1[1] = PetscRealPart(b); b1[2] = -PetscImaginaryPart(b); b1[3] = PetscImaginaryPart(b); \ - _7_ierr = MPI_Allreduce(b1,b2,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)a));CHKERRQ(_7_ierr); \ - if (!(b2[4] > 0) && !(PetscEqualReal(-b2[0],b2[1]) && PetscEqualReal(-b2[2],b2[3]))) SETERRQ1(PetscObjectComm((PetscObject)a),PETSC_ERR_ARG_WRONG,"Scalar value must be same on all processes, argument # %d",c); \ + if (PetscIsNanScalar(b0)) {b1[4] = 1;} else {b1[4] = 0;}; \ + b1[0] = -PetscRealPart(b0); b1[1] = PetscRealPart(b0); b1[2] = -PetscImaginaryPart(b0); b1[3] = PetscImaginaryPart(b0); \ + _7_ierr = MPI_Allreduce(b1,b2,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (!(b2[4] > 0) && !(PetscEqualReal(-b2[0],b2[1]) && PetscEqualReal(-b2[2],b2[3]))) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"Scalar value must be same on all processes, argument # %d",arg); \ } while (0) -#define PetscValidLogicalCollectiveReal(a,b,c) \ +#define PetscValidLogicalCollectiveReal(a,b,arg) \ do { \ PetscErrorCode _7_ierr; \ - PetscReal b1[3],b2[3]; \ - if (PetscIsNanReal(b)) {b1[2] = 1;} else {b1[2] = 0;}; \ - b1[0] = -b; b1[1] = b; \ - _7_ierr = MPI_Allreduce(b1,b2,3,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)a));CHKERRQ(_7_ierr); \ - if (!(b2[2] > 0) && !PetscEqualReal(-b2[0],b2[1])) SETERRQ1(PetscObjectComm((PetscObject)a),PETSC_ERR_ARG_WRONG,"Real value must be same on all processes, argument # %d",c); \ + PetscReal b0=(b),b1[3],b2[3]; \ + if (PetscIsNanReal(b0)) {b1[2] = 1;} else {b1[2] = 0;}; \ + b1[0] = -b0; b1[1] = b0; \ + _7_ierr = MPI_Allreduce(b1,b2,3,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (!(b2[2] > 0) && !PetscEqualReal(-b2[0],b2[1])) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"Real value must be same on all processes, argument # %d",arg); \ } while (0) -#define PetscValidLogicalCollectiveInt(a,b,c) \ +#define PetscValidLogicalCollectiveInt(a,b,arg) \ do { \ PetscErrorCode _7_ierr; \ - PetscInt b1[2],b2[2]; \ - b1[0] = -b; b1[1] = b; \ - _7_ierr = MPIU_Allreduce(b1,b2,2,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)a));CHKERRQ(_7_ierr); \ - if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)a),PETSC_ERR_ARG_WRONG,"Int value must be same on all processes, argument # %d",c); \ + PetscInt b0=(b),b1[2],b2[2]; \ + b1[0] = -b0; b1[1] = b0; \ + _7_ierr = MPIU_Allreduce(b1,b2,2,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"Int value must be same on all processes, argument # %d",arg); \ } while (0) -#define PetscValidLogicalCollectiveMPIInt(a,b,c) do {} while (0) +#define PetscValidLogicalCollectiveMPIInt(a,b,arg) \ + do { \ + PetscErrorCode _7_ierr; \ + PetscMPIInt b0=(b),b1[2],b2[2]; \ + b1[0] = -b0; b1[1] = b0; \ + _7_ierr = MPIU_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"PetscMPIInt value must be same on all processes, argument # %d",arg); \ + } while (0) -#define PetscValidLogicalCollectiveBool(a,b,c) \ +#define PetscValidLogicalCollectiveBool(a,b,arg) \ do { \ PetscErrorCode _7_ierr; \ - PetscMPIInt b1[2],b2[2]; \ - b1[0] = -(PetscMPIInt)b; b1[1] = (PetscMPIInt)b; \ - _7_ierr = MPIU_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)a));CHKERRQ(_7_ierr); \ - if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)a),PETSC_ERR_ARG_WRONG,"Bool value must be same on all processes, argument # %d",c); \ + PetscMPIInt b0=(PetscMPIInt)(b),b1[2],b2[2]; \ + b1[0] = -b0; b1[1] = b0; \ + _7_ierr = MPIU_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"Bool value must be same on all processes, argument # %d",arg); \ } while (0) -#define PetscValidLogicalCollectiveEnum(a,b,c) \ +#define PetscValidLogicalCollectiveEnum(a,b,arg) \ do { \ PetscErrorCode _7_ierr; \ - PetscMPIInt b1[2],b2[2]; \ - b1[0] = -(PetscMPIInt)b; b1[1] = (PetscMPIInt)b; \ - _7_ierr = MPIU_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)a));CHKERRQ(_7_ierr); \ - if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)a),PETSC_ERR_ARG_WRONG,"Enum value must be same on all processes, argument # %d",c); \ + PetscMPIInt b0=(PetscMPIInt)(b),b1[2],b2[2]; \ + b1[0] = -b0; b1[1] = b0; \ + _7_ierr = MPIU_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)(a)));CHKERRQ(_7_ierr); \ + if (-b2[0] != b2[1]) SETERRQ1(PetscObjectComm((PetscObject)(a)),PETSC_ERR_ARG_WRONG,"Enum value must be same on all processes, argument # %d",arg); \ + } while (0) + +#define PetscCheckSorted(n,idx) \ + do { \ + PetscBool _1_flg; \ + PetscSorted(n,idx,_1_flg); \ + if (!_1_flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Input array needs to be sorted"); \ } while (0) #endif @@ -783,11 +808,6 @@ M*/ PetscObjectComposedDataGetRealstar(obj,id,data,flag) #endif -PETSC_EXTERN PetscErrorCode PetscObjectGetId(PetscObject,PetscObjectId*); -PETSC_EXTERN PetscErrorCode PetscObjectCompareId(PetscObject,PetscObjectId,PetscBool*); - -PETSC_EXTERN PetscErrorCode PetscMonitorCompare(PetscErrorCode (*)(void),void *,PetscErrorCode (*)(void**),PetscErrorCode (*)(void),void *,PetscErrorCode (*)(void**),PetscBool *); - PETSC_EXTERN PetscMPIInt Petsc_Counter_keyval; PETSC_EXTERN PetscMPIInt Petsc_InnerComm_keyval; PETSC_EXTERN PetscMPIInt Petsc_OuterComm_keyval; diff --git a/include/petsc/private/sfimpl.h b/include/petsc/private/sfimpl.h index 4a6b9b37f63..3b0f0172f47 100644 --- a/include/petsc/private/sfimpl.h +++ b/include/petsc/private/sfimpl.h @@ -6,7 +6,7 @@ #include #if defined(PETSC_HAVE_CUDA) -#include +#include <../src/vec/vec/impls/seq/seqcuda/cudavecimpl.h> #endif PETSC_EXTERN PetscLogEvent PETSCSF_SetGraph; @@ -24,7 +24,8 @@ PETSC_EXTERN PetscLogEvent PETSCSF_DistSect; PETSC_EXTERN PetscLogEvent PETSCSF_SectSF; PETSC_EXTERN PetscLogEvent PETSCSF_RemoteOff; -typedef enum {PETSC_MEMTYPE_HOST=0, PETSC_MEMTYPE_DEVICE} PetscMemType; +typedef enum {PETSCSF_LEAF2ROOT_REDUCE=0, PETSCSF_ROOT2LEAF_BCAST=1} PetscSFDirection; +typedef enum {PETSC_MEMTYPE_HOST=0, PETSC_MEMTYPE_DEVICE=1} PetscMemType; struct _PetscSFOps { PetscErrorCode (*Reset)(PetscSF); @@ -82,9 +83,10 @@ struct _p_PetscSF { PetscSFPattern pattern; /* Pattern of the graph */ PetscLayout map; /* Layout of leaves over all processes when building a patterned graph */ + PetscBool use_pinned_buf; /* Whether use pinned (i.e., non-pagable) host memory for send/recv buffers */ #if defined(PETSC_HAVE_CUDA) PetscInt *rmine_d; /* A copy of rmine in device memory */ - PetscInt MAX_CORESIDENT_THREADS; + PetscInt maxResidentThreadsPerGPU; #endif void *data; /* Pointer to implementation */ }; @@ -126,7 +128,7 @@ PETSC_STATIC_INLINE PetscErrorCode PetscGetMemType(const void *data,PetscMemType PetscValidPointer(mtype,2); *mtype = PETSC_MEMTYPE_HOST; #if defined(PETSC_HAVE_CUDA) - if (use_gpu_aware_mpi) { + { struct cudaPointerAttributes attr; if (data) { #if (CUDART_VERSION < 10000) @@ -146,6 +148,25 @@ PETSC_STATIC_INLINE PetscErrorCode PetscGetMemType(const void *data,PetscMemType PetscFunctionReturn(0); } +#if defined(PETSC_HAVE_CUDA) +PETSC_STATIC_INLINE PetscErrorCode PetscMallocPinnedMemory(size_t size,void** ptr) +{ + cudaError_t cerr; + PetscFunctionBegin; + cerr = cudaMallocHost(ptr,size);CHKERRCUDA(cerr); + PetscFunctionReturn(0); +} + +PETSC_STATIC_INLINE PetscErrorCode PetscFreePinnedMemory_Private(void* ptr) +{ + cudaError_t cerr; + PetscFunctionBegin; + cerr = cudaFreeHost(ptr);CHKERRCUDA(cerr); + PetscFunctionReturn(0); +} +#define PetscFreePinnedMemory(p) ((p) && (PetscFreePinnedMemory_Private(p) || ((p)=NULL,0))) +#endif + PETSC_STATIC_INLINE PetscErrorCode PetscMallocWithMemType(PetscMemType mtype,size_t size,void** ptr) { PetscFunctionBegin; @@ -169,7 +190,7 @@ PETSC_STATIC_INLINE PetscErrorCode PetscFreeWithMemType_Private(PetscMemType mty } /* Free memory and set ptr to NULL when succeeded */ -#define PetscFreeWithMemType(t,p) ((p) && (PetscFreeWithMemType_Private((t),(p)) || ((p)=0,0))) +#define PetscFreeWithMemType(t,p) ((p) && (PetscFreeWithMemType_Private((t),(p)) || ((p)=NULL,0))) PETSC_STATIC_INLINE PetscErrorCode PetscMemcpyWithMemType(PetscMemType dstmtype,PetscMemType srcmtype,void* dst,const void*src,size_t n) { @@ -177,9 +198,13 @@ PETSC_STATIC_INLINE PetscErrorCode PetscMemcpyWithMemType(PetscMemType dstmtype, if (n) { if (dstmtype == PETSC_MEMTYPE_HOST && srcmtype == PETSC_MEMTYPE_HOST) {PetscErrorCode ierr = PetscMemcpy(dst,src,n);CHKERRQ(ierr);} #if defined(PETSC_HAVE_CUDA) - else if (dstmtype == PETSC_MEMTYPE_DEVICE && srcmtype == PETSC_MEMTYPE_HOST) {cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyHostToDevice);CHKERRCUDA(err);} - else if (dstmtype == PETSC_MEMTYPE_HOST && srcmtype == PETSC_MEMTYPE_DEVICE) {cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyDeviceToHost);CHKERRCUDA(err);} - else if (dstmtype == PETSC_MEMTYPE_DEVICE && srcmtype == PETSC_MEMTYPE_DEVICE) {cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyDeviceToDevice);CHKERRCUDA(err);} + else if (dstmtype == PETSC_MEMTYPE_DEVICE && srcmtype == PETSC_MEMTYPE_HOST) { + cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyHostToDevice);CHKERRCUDA(err); + PetscErrorCode ierr = PetscLogCpuToGpu(n);CHKERRQ(ierr); + } else if (dstmtype == PETSC_MEMTYPE_HOST && srcmtype == PETSC_MEMTYPE_DEVICE) { + cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyDeviceToHost);CHKERRCUDA(err); + PetscErrorCode ierr = PetscLogGpuToCpu(n);CHKERRQ(ierr); + } else if (dstmtype == PETSC_MEMTYPE_DEVICE && srcmtype == PETSC_MEMTYPE_DEVICE) {cudaError_t err = cudaMemcpy(dst,src,n,cudaMemcpyDeviceToDevice);CHKERRCUDA(err);} #endif else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Wrong PetscMemType for dst %d and src %d",(int)dstmtype,(int)srcmtype); } diff --git a/include/petsc/private/tsimpl.h b/include/petsc/private/tsimpl.h index c5f3361d7c2..109681a7faf 100644 --- a/include/petsc/private/tsimpl.h +++ b/include/petsc/private/tsimpl.h @@ -61,6 +61,8 @@ struct _TSOps { PetscErrorCode (*gettimeerror)(TS,PetscInt,Vec*); PetscErrorCode (*settimeerror)(TS,Vec); PetscErrorCode (*startingmethod) (TS); + PetscErrorCode (*initcondition)(TS,Vec); + PetscErrorCode (*exacterror)(TS,Vec,Vec); }; /* diff --git a/include/petsc/private/vecimpl.h b/include/petsc/private/vecimpl.h index 86a835a0bb2..72e6230500e 100644 --- a/include/petsc/private/vecimpl.h +++ b/include/petsc/private/vecimpl.h @@ -142,7 +142,7 @@ struct _p_Vec { PetscInt lock; /* lock state. vector can be free (=0), locked for read (>0) or locked for write(<0) */ #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) PetscOffloadMask offloadmask; /* a mask which indicates where the valid vector data is (GPU, CPU or both) */ - PetscBool pinnedtocpu; + PetscBool boundtocpu; void *spptr; /* this is the special pointer to the array on the GPU */ #endif }; diff --git a/include/petsc/private/vecscatterimpl.h b/include/petsc/private/vecscatterimpl.h index 0e53e335688..1f1cce27e26 100644 --- a/include/petsc/private/vecscatterimpl.h +++ b/include/petsc/private/vecscatterimpl.h @@ -312,6 +312,7 @@ struct _p_VecScatter { PetscInt to_n,from_n; PetscBool inuse; /* prevents corruption from mixing two scatters */ PetscBool beginandendtogether; /* indicates that the scatter begin and end function are called together, VecScatterEnd() is then treated as a nop */ + PetscBool packongpu; /* For GPU vectors, pack needed entries on GPU, then copy packed data to CPU, then do MPI. Otherwise, we might copy a segment encompassing needed entries */ void *fromdata,*todata; void *spptr; PetscBool is_duplicate; /* IS has duplicate indices, would cause writing error in the case StoP of VecScatterEndMPI3Node */ diff --git a/include/petsc/private/viewerexodusiiimpl.h b/include/petsc/private/viewerexodusiiimpl.h new file mode 100644 index 00000000000..03c6b98e6ee --- /dev/null +++ b/include/petsc/private/viewerexodusiiimpl.h @@ -0,0 +1,15 @@ +#ifndef __VIEWEREXODUSIIIMPL_H +#define __VIEWEREXODUSIIIMPL_H + +#include + +#if defined(PETSC_HAVE_EXODUSII) + +typedef struct { + char *filename; + PetscFileMode btype; + int exoid; +} PetscViewer_ExodusII; + +#endif +#endif diff --git a/include/petscao.h b/include/petscao.h index 0f5ff8fb9df..d13918ba422 100644 --- a/include/petscao.h +++ b/include/petscao.h @@ -47,7 +47,7 @@ PETSC_EXTERN PetscErrorCode AOCreateMapping(MPI_Comm,PetscInt,const PetscInt[],c PETSC_EXTERN PetscErrorCode AOCreateMappingIS(IS,IS,AO*); PETSC_EXTERN PetscErrorCode AOView(AO,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode AOViewFromOptions(AO A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode AOViewFromOptions(AO,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode AODestroy(AO*); /* Dynamic creation and loading functions */ diff --git a/include/petscconvest.h b/include/petscconvest.h index bfa1e95ede5..60af9abf259 100644 --- a/include/petscconvest.h +++ b/include/petscconvest.h @@ -18,9 +18,11 @@ PETSC_EXTERN PetscErrorCode PetscConvEstCreate(MPI_Comm, PetscConvEst *); PETSC_EXTERN PetscErrorCode PetscConvEstDestroy(PetscConvEst *); PETSC_EXTERN PetscErrorCode PetscConvEstView(PetscConvEst, PetscViewer); PETSC_EXTERN PetscErrorCode PetscConvEstSetFromOptions(PetscConvEst); -PETSC_EXTERN PetscErrorCode PetscConvEstGetSolver(PetscConvEst, SNES *); -PETSC_EXTERN PetscErrorCode PetscConvEstSetSolver(PetscConvEst, SNES); +PETSC_EXTERN PetscErrorCode PetscConvEstGetSolver(PetscConvEst, PetscObject *); +PETSC_EXTERN PetscErrorCode PetscConvEstSetSolver(PetscConvEst, PetscObject); PETSC_EXTERN PetscErrorCode PetscConvEstSetUp(PetscConvEst); +PETSC_EXTERN PetscErrorCode PetscConvEstComputeInitialGuess(PetscConvEst, PetscInt, DM, Vec); +PETSC_EXTERN PetscErrorCode PetscConvEstComputeError(PetscConvEst, PetscInt, DM, Vec, PetscReal[]); PETSC_EXTERN PetscErrorCode PetscConvEstGetConvRate(PetscConvEst, PetscReal[]); PETSC_EXTERN PetscErrorCode PetscConvEstRateView(PetscConvEst, const PetscReal[], PetscViewer); diff --git a/include/petsccublas.h b/include/petsccublas.h index 92c169efcd4..dacc63c2b66 100644 --- a/include/petsccublas.h +++ b/include/petsccublas.h @@ -1,10 +1,42 @@ #if !defined(PETSCCUBLAS_H) #define PETSCCUBLAS_H +#include #include #include +#include +#define WaitForGPU() PetscCUDASynchronize ? cudaDeviceSynchronize() : cudaSuccess; + +/* CUDART_VERSION = 1000 x major + 10 x minor version */ + +/* Could not find exactly which CUDART_VERSION introduced cudaGetErrorName. At least it was in CUDA 8.0 (Sep. 2016) */ +#if (CUDART_VERSION >= 8000) /* CUDA 8.0 */ +#define CHKERRCUDA(cerr) \ +do { \ + if (PetscUnlikely(cerr)) { \ + const char *name = cudaGetErrorName(cerr); \ + const char *descr = cudaGetErrorString(cerr); \ + SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_LIB,"cuda error %d (%s) : %s",(int)cerr,name,descr); \ + } \ +} while(0) +#else +#define CHKERRCUDA(cerr) do {if (PetscUnlikely(cerr)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"cuda error %d",(int)cerr);} while(0) +#endif + +#define CHKERRCUBLAS(stat) \ +do { \ + if (PetscUnlikely(stat)) { \ + const char *name = PetscCUBLASGetErrorName(stat); \ + SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"cuBLAS error %d (%s)",(int)stat,name); \ + } \ +} while(0) + +PETSC_INTERN PetscErrorCode PetscCUBLASInitializeHandle(void); +PETSC_INTERN PetscErrorCode PetscCUSOLVERDnInitializeHandle(void); + +/* cuBLAS does not have cublasGetErrorName(). We create one on our own. */ +PETSC_EXTERN const char* PetscCUBLASGetErrorName(cublasStatus_t); /* PETSC_EXTERN since it is exposed by the CHKERRCUBLAS macro */ PETSC_EXTERN PetscErrorCode PetscCUBLASGetHandle(cublasHandle_t*); PETSC_EXTERN PetscErrorCode PetscCUSOLVERDnGetHandle(cusolverDnHandle_t*); - #endif diff --git a/include/petsccxxcomplexfix.h b/include/petsccxxcomplexfix.h index d3614e8a62b..9041ad0669f 100644 --- a/include/petsccxxcomplexfix.h +++ b/include/petsccxxcomplexfix.h @@ -2,6 +2,15 @@ #define PETSCCXXCOMPLEXFIX_H #if defined(__cplusplus) && defined(PETSC_HAVE_COMPLEX) && defined(PETSC_HAVE_CXX_COMPLEX) +/* + The pragma below silence all compiler warnings comming from code in this header file. + In particular, it silences `-Wfloat-equal` warnings in `operator==()` and `operator!=` below. + Other compilers beyond GCC support this pragma. +*/ +#if defined(__GNUC__) && (__GNUC__ >= 4) +#pragma GCC system_header +#endif + /* Defines additional operator overloading for the C++ complex class that are "missing" in the standard include files. For example, the code fragment @@ -35,6 +44,10 @@ before including any PETSc include files to prevent these methods from being provided. */ +#if defined(__GNUC__) && (__GNUC__ >= 4) +#pragma GCC system_header +#endif + #define PETSC_CXX_COMPLEX_FIX(Type) \ static inline PetscComplex operator+(const PetscComplex& lhs, const Type& rhs) { return const_cast(lhs) + PetscReal(rhs); } \ static inline PetscComplex operator+(const Type& lhs, const PetscComplex& rhs) { return PetscReal(lhs) + const_cast(rhs); } \ diff --git a/include/petscdm.h b/include/petscdm.h index 06694492f05..2266702b9dd 100644 --- a/include/petscdm.h +++ b/include/petscdm.h @@ -95,7 +95,7 @@ PETSC_EXTERN PetscErrorCode DMRefineHookRemove(DM,PetscErrorCode (*)(DM,DM,void* PETSC_EXTERN PetscErrorCode DMRestrict(DM,Mat,Vec,Mat,DM); PETSC_EXTERN PetscErrorCode DMInterpolate(DM,Mat,DM); PETSC_EXTERN PetscErrorCode DMSetFromOptions(DM); -PETSC_STATIC_INLINE PetscErrorCode DMViewFromOptions(DM A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode DMViewFromOptions(DM,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode DMAdaptLabel(DM,DMLabel,DM*); PETSC_EXTERN PetscErrorCode DMAdaptMetric(DM, Vec, DMLabel, DM *); @@ -346,4 +346,10 @@ PETSC_EXTERN PetscErrorCode DMGetNullSpaceConstructor(DM, PetscInt, PetscErrorCo PETSC_EXTERN PetscErrorCode DMSetNullSpaceConstructor(DM, PetscInt, PetscErrorCode (*)(DM, PetscInt, MatNullSpace *)); PETSC_EXTERN PetscErrorCode DMGetCompatibility(DM,DM,PetscBool*,PetscBool*); + +PETSC_EXTERN PetscErrorCode DMMonitorSet(DM, PetscErrorCode (*)(DM, void *), void *, PetscErrorCode (*)(void**)); +PETSC_EXTERN PetscErrorCode DMMonitorCancel(DM); +PETSC_EXTERN PetscErrorCode DMMonitorSetFromOptions(DM, const char[], const char[], const char[], PetscErrorCode (*)(DM, void *), PetscErrorCode (*)(DM, PetscViewerAndFormat *), PetscBool *); +PETSC_EXTERN PetscErrorCode DMMonitor(DM); + #endif diff --git a/include/petscdmnetwork.h b/include/petscdmnetwork.h index a20e5503bc2..a2710243ecb 100644 --- a/include/petscdmnetwork.h +++ b/include/petscdmnetwork.h @@ -16,6 +16,7 @@ typedef PetscInt DMNetworkComponentGenericDataType; PETSC_EXTERN PetscErrorCode DMNetworkCreate(MPI_Comm,DM*); PETSC_EXTERN PetscErrorCode DMNetworkSetSizes(DM,PetscInt,PetscInt[],PetscInt[],PetscInt,PetscInt[]); +PETSC_EXTERN PetscErrorCode DMNetworkGetSizes(DM,PetscInt*,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkSetEdgeList(DM,PetscInt*[],PetscInt*[]); PETSC_EXTERN PetscErrorCode DMNetworkLayoutSetUp(DM); PETSC_EXTERN PetscErrorCode DMNetworkRegisterComponent(DM,const char*,size_t,PetscInt*); @@ -23,9 +24,12 @@ PETSC_EXTERN PetscErrorCode DMNetworkGetVertexRange(DM,PetscInt*,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkGetEdgeRange(DM,PetscInt*,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkAddComponent(DM,PetscInt,PetscInt,void*); PETSC_EXTERN PetscErrorCode DMNetworkGetComponent(DM,PetscInt,PetscInt,PetscInt*,void**); +PETSC_EXTERN PetscErrorCode DMNetworkSetComponentNumVariables(DM,PetscInt,PetscInt,PetscInt); PETSC_EXTERN PetscErrorCode DMNetworkGetNumComponents(DM,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkGetVariableOffset(DM,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkGetVariableGlobalOffset(DM,PetscInt,PetscInt*); +PETSC_EXTERN PetscErrorCode DMNetworkGetComponentVariableOffset(DM,PetscInt,PetscInt,PetscInt*); +PETSC_EXTERN PetscErrorCode DMNetworkGetComponentVariableGlobalOffset(DM,PetscInt,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkGetEdgeOffset(DM,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkGetVertexOffset(DM,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMNetworkAddNumVariables(DM,PetscInt,PetscInt); diff --git a/include/petscdmplex.h b/include/petscdmplex.h index 7dfe4806158..a70d3518f24 100644 --- a/include/petscdmplex.h +++ b/include/petscdmplex.h @@ -38,12 +38,13 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerSetType(PetscPartitioner, PetscParti PETSC_EXTERN PetscErrorCode PetscPartitionerGetType(PetscPartitioner, PetscPartitionerType *); PETSC_EXTERN PetscErrorCode PetscPartitionerSetUp(PetscPartitioner); PETSC_EXTERN PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner); -PETSC_STATIC_INLINE PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscPartitionerView(PetscPartitioner, PetscViewer); PETSC_EXTERN PetscErrorCode PetscPartitionerRegister(const char [], PetscErrorCode (*)(PetscPartitioner)); PETSC_EXTERN PetscErrorCode PetscPartitionerRegisterDestroy(void); +PETSC_EXTERN PetscErrorCode PetscPartitionerPartition(PetscPartitioner, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscSection, PetscSection, PetscSection, IS*); -PETSC_EXTERN PetscErrorCode PetscPartitionerPartition(PetscPartitioner, DM, PetscSection, IS *); +PETSC_EXTERN PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner, DM, PetscSection, PetscSection, IS *); PETSC_EXTERN PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner, PetscInt, const PetscInt[], const PetscInt[]); PETSC_EXTERN PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner, PetscBool); @@ -106,6 +107,23 @@ PETSC_EXTERN PetscErrorCode DMPlexPointGlobalFieldRead(DM,PetscInt,PetscInt,cons /* Topological interpolation */ PETSC_EXTERN const char * const DMPlexInterpolatedFlags[]; +/*E + DMPlexInterpolatedFlag - Describes level of topological interpolatedness. + It is a local or collective property depending on whether it is returned by DMPlexIsInterpolated() or DMPlexIsInterpolatedCollective(). + +$ DMPLEX_INTERPOLATED_INVALID - Uninitialized value (internal use only; never returned by DMPlexIsInterpolated() or DMPlexIsInterpolatedCollective()) +$ DMPLEX_INTERPOLATED_NONE - Mesh is not interpolated +$ DMPLEX_INTERPOLATED_PARTIAL - Mesh is partially interpolated. This can e.g. mean DMPlex with cells, faces and vertices but no edges represented, or a mesh with mixed cones (see DMPlexStratify() for an example) +$ DMPLEX_INTERPOLATED_MIXED - Can be returned only by DMPlexIsInterpolatedCollective(), meaning that DMPlexIsInterpolated() returns different interpolatedness on different ranks +$ DMPLEX_INTERPOLATED_FULL - Mesh is fully interpolated + + Level: intermediate + + Developer Note: + Any additions/changes here MUST also be made in include/petsc/finclude/petscdmplex.h and src/dm/f90-mod/petscdmplex.h + +.seealso: DMPlexIsInterpolated(), DMPlexIsInterpolatedCollective(), DMPlexInterpolate(), DMPlexUninterpolate() +E*/ typedef enum { DMPLEX_INTERPOLATED_INVALID = -1, DMPLEX_INTERPOLATED_NONE = 0, @@ -130,6 +148,11 @@ PETSC_EXTERN PetscErrorCode DMPlexGetDepth(DM, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexGetDepthLabel(DM, DMLabel *); PETSC_EXTERN PetscErrorCode DMPlexGetDepthStratum(DM, PetscInt, PetscInt *, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexGetHeightStratum(DM, PetscInt, PetscInt *, PetscInt *); +PETSC_EXTERN PetscErrorCode DMPlexGetPointDepth(DM, PetscInt, PetscInt *); +PETSC_EXTERN PetscErrorCode DMPlexGetPointHeight(DM, PetscInt, PetscInt *); +PETSC_EXTERN PetscErrorCode DMPlexGetCellTypeLabel(DM, DMLabel *); +PETSC_EXTERN PetscErrorCode DMPlexGetCellType(DM, PetscInt, DMPolytopeType *); +PETSC_EXTERN PetscErrorCode DMPlexComputeCellTypes(DM); /* Topological Operations */ PETSC_EXTERN PetscErrorCode DMPlexGetMeet(DM, PetscInt, const PetscInt [], PetscInt *, const PetscInt **); @@ -164,7 +187,7 @@ PETSC_EXTERN PetscErrorCode DMPlexCheckSkeleton(DM, PetscInt); PETSC_EXTERN PetscErrorCode DMPlexCheckFaces(DM, PetscInt); PETSC_EXTERN PetscErrorCode DMPlexCheckGeometry(DM); PETSC_EXTERN PetscErrorCode DMPlexCheckPointSF(DM); -PETSC_EXTERN PetscErrorCode DMPlexCheckConesConformOnInterfaces(DM); +PETSC_EXTERN PetscErrorCode DMPlexCheckInterfaceCones(DM); PETSC_EXTERN PetscErrorCode DMPlexCheckCellShape(DM, PetscBool, PetscReal); PETSC_EXTERN PetscErrorCode DMPlexTriangleSetOptions(DM, const char *); @@ -182,6 +205,8 @@ PETSC_EXTERN PetscErrorCode DMPlexCreateFluentFromFile(MPI_Comm, const char [], PETSC_EXTERN PetscErrorCode DMPlexCreateMedFromFile(MPI_Comm, const char [], PetscBool, DM *); PETSC_EXTERN PetscErrorCode DMPlexCreatePLYFromFile(MPI_Comm, const char [], PetscBool, DM *); +PETSC_EXTERN PetscErrorCode PetscViewerExodusIIGetId(PetscViewer, int *); + /* Mesh Partitioning and Distribution */ PETSC_EXTERN PetscErrorCode DMPlexCreateNeighborCSR(DM, PetscInt, PetscInt *, PetscInt **, PetscInt **); PETSC_EXTERN PetscErrorCode DMPlexGetPartitioner(DM, PetscPartitioner *); @@ -196,6 +221,7 @@ PETSC_EXTERN PetscErrorCode DMPlexPartitionLabelPropagate(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexPartitionLabelCreateSF(DM, DMLabel, PetscSF *); PETSC_EXTERN PetscErrorCode DMPlexSetPartitionBalance(DM, PetscBool); PETSC_EXTERN PetscErrorCode DMPlexGetPartitionBalance(DM, PetscBool *); +PETSC_EXTERN PetscErrorCode DMPlexIsDistributed(DM, PetscBool *); PETSC_EXTERN PetscErrorCode DMPlexDistribute(DM, PetscInt, PetscSF*, DM*); PETSC_EXTERN PetscErrorCode DMPlexDistributeOverlap(DM, PetscInt, PetscSF *, DM *); PETSC_EXTERN PetscErrorCode DMPlexGetOverlap(DM, PetscInt *); @@ -234,12 +260,14 @@ PETSC_EXTERN PetscErrorCode DMPlexCreateHybridMesh(DM, DMLabel, DMLabel, DMLabel PETSC_EXTERN PetscErrorCode DMPlexGetSubpointMap(DM, DMLabel*); PETSC_EXTERN PetscErrorCode DMPlexSetSubpointMap(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexCreateSubpointIS(DM, IS *); -PETSC_EXTERN PetscErrorCode DMPlexGetSubpoint(DM, PetscInt, PetscInt *); -PETSC_EXTERN PetscErrorCode DMPlexGetAuxiliaryPoint(DM, DM, PetscInt, PetscInt *); + +PETSC_EXTERN PetscErrorCode DMGetEnclosureRelation(DM, DM, DMEnclosureType *); +PETSC_EXTERN PetscErrorCode DMGetEnclosurePoint(DM, DM, DMEnclosureType, PetscInt, PetscInt *); PETSC_EXTERN PetscErrorCode DMPlexLabelComplete(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexLabelCohesiveComplete(DM, DMLabel, DMLabel, PetscBool, DM); PETSC_EXTERN PetscErrorCode DMPlexLabelAddCells(DM, DMLabel); +PETSC_EXTERN PetscErrorCode DMPlexLabelAddFaceCells(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexLabelClearCells(DM, DMLabel); PETSC_EXTERN PetscErrorCode DMPlexGetRefinementLimit(DM, PetscReal *); @@ -410,6 +438,8 @@ PETSC_EXTERN PetscErrorCode DMPlexTreeRefineCell(DM, PetscInt, DM *); PETSC_EXTERN PetscErrorCode DMPlexComputeInjectorReferenceTree(DM, Mat *); PETSC_EXTERN PetscErrorCode DMPlexTransferVecTree(DM,Vec,DM,Vec,PetscSF,PetscSF,PetscInt *,PetscInt *,PetscBool,PetscReal); +PETSC_EXTERN PetscErrorCode DMPlexMonitorThroughput(DM, void *); + /* natural order */ PETSC_EXTERN PetscErrorCode DMPlexCreateGlobalToNaturalSF(DM, PetscSection, PetscSF, PetscSF *); PETSC_EXTERN PetscErrorCode DMPlexSetGlobalToNaturalSF(DM, PetscSF); @@ -421,6 +451,7 @@ PETSC_EXTERN PetscErrorCode DMPlexNaturalToGlobalEnd(DM, Vec, Vec); /* mesh adaptation */ PETSC_EXTERN PetscErrorCode DMPlexAdapt(DM, Vec, const char [], DM *); +PETSC_EXTERN PetscErrorCode DMPlexSnapToGeomModel(DM, PetscInt, const PetscScalar[], PetscScalar[]); PETSC_EXTERN PetscErrorCode DMPlexGlobalToLocalBasis(DM, Vec); PETSC_EXTERN PetscErrorCode DMPlexLocalToGlobalBasis(DM, Vec); diff --git a/include/petscdmstag.h b/include/petscdmstag.h index 82fb5e119b0..3e64d051f46 100644 --- a/include/petscdmstag.h +++ b/include/petscdmstag.h @@ -80,8 +80,6 @@ PETSC_EXTERN PetscErrorCode DMStagCreate1d(MPI_Comm,DMBoundaryType,PetscInt,Pets PETSC_EXTERN PetscErrorCode DMStagCreate2d(MPI_Comm,DMBoundaryType,DMBoundaryType,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,DMStagStencilType,PetscInt,const PetscInt[],const PetscInt[],DM*); PETSC_EXTERN PetscErrorCode DMStagCreate3d(MPI_Comm,DMBoundaryType,DMBoundaryType,DMBoundaryType,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,DMStagStencilType,PetscInt,const PetscInt[],const PetscInt[],const PetscInt[],DM*); PETSC_EXTERN PetscErrorCode DMStagCreateCompatibleDMStag(DM,PetscInt,PetscInt,PetscInt,PetscInt,DM*); -PETSC_EXTERN PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM,void*,void*,void*); -PETSC_EXTERN PetscErrorCode DMStagGet1dCoordinateLocationSlot(DM,DMStagStencilLocation,PetscInt*); PETSC_EXTERN PetscErrorCode DMStagGetBoundaryTypes(DM,DMBoundaryType*,DMBoundaryType*,DMBoundaryType*); PETSC_EXTERN PetscErrorCode DMStagGetCorners(DM,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*); PETSC_EXTERN PetscErrorCode DMStagGetDOF(DM,PetscInt*,PetscInt*,PetscInt*,PetscInt*); @@ -95,12 +93,17 @@ PETSC_EXTERN PetscErrorCode DMStagGetLocationDOF(DM,DMStagStencilLocation,PetscI PETSC_EXTERN PetscErrorCode DMStagGetLocationSlot(DM,DMStagStencilLocation,PetscInt,PetscInt*); PETSC_EXTERN PetscErrorCode DMStagGetNumRanks(DM,PetscInt*,PetscInt*,PetscInt*); PETSC_EXTERN PetscErrorCode DMStagGetOwnershipRanges(DM,const PetscInt**,const PetscInt**,const PetscInt**); +PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateArrays(DM,void*,void*,void*); +PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateArraysRead(DM,void*,void*,void*); +PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateLocationSlot(DM,DMStagStencilLocation,PetscInt*); PETSC_EXTERN PetscErrorCode DMStagGetStencilType(DM,DMStagStencilType*); PETSC_EXTERN PetscErrorCode DMStagGetStencilWidth(DM,PetscInt*); +PETSC_EXTERN PetscErrorCode DMStagMatGetValuesStencil(DM,Mat,PetscInt,const DMStagStencil*,PetscInt,const DMStagStencil*,PetscScalar*); PETSC_EXTERN PetscErrorCode DMStagMatSetValuesStencil(DM,Mat,PetscInt,const DMStagStencil*,PetscInt,const DMStagStencil*,const PetscScalar*,InsertMode); PETSC_EXTERN PetscErrorCode DMStagMigrateVec(DM,Vec,DM,Vec); PETSC_EXTERN PetscErrorCode DMStagPopulateLocalToGlobalInjective(DM); -PETSC_EXTERN PetscErrorCode DMStagRestore1dCoordinateArraysDOFRead(DM,void*,void*,void*); +PETSC_EXTERN PetscErrorCode DMStagRestoreProductCoordinateArrays(DM,void*,void*,void*); +PETSC_EXTERN PetscErrorCode DMStagRestoreProductCoordinateArraysRead(DM,void*,void*,void*); PETSC_EXTERN PetscErrorCode DMStagSetBoundaryTypes(DM,DMBoundaryType,DMBoundaryType,DMBoundaryType); PETSC_EXTERN PetscErrorCode DMStagSetCoordinateDMType(DM,DMType); PETSC_EXTERN PetscErrorCode DMStagSetDOF(DM,PetscInt,PetscInt,PetscInt,PetscInt); @@ -112,15 +115,22 @@ PETSC_EXTERN PetscErrorCode DMStagSetStencilWidth(DM,PetscInt); PETSC_EXTERN PetscErrorCode DMStagSetUniformCoordinates(DM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal); PETSC_EXTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit(DM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal); PETSC_EXTERN PetscErrorCode DMStagSetUniformCoordinatesProduct(DM,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal,PetscReal); -PETSC_EXTERN PetscErrorCode DMStagVecGetArrayDOF(DM,Vec,void*); -PETSC_EXTERN PetscErrorCode DMStagVecGetArrayDOFRead(DM,Vec,void*); +PETSC_EXTERN PetscErrorCode DMStagVecGetArray(DM,Vec,void*); +PETSC_EXTERN PetscErrorCode DMStagVecGetArrayRead(DM,Vec,void*); PETSC_EXTERN PetscErrorCode DMStagVecGetValuesStencil(DM,Vec,PetscInt,const DMStagStencil*,PetscScalar*); -PETSC_EXTERN PetscErrorCode DMStagVecRestoreArrayDOF(DM,Vec,void*); -PETSC_EXTERN PetscErrorCode DMStagVecRestoreArrayDOFRead(DM,Vec,void*); +PETSC_EXTERN PetscErrorCode DMStagVecRestoreArray(DM,Vec,void*); +PETSC_EXTERN PetscErrorCode DMStagVecRestoreArrayRead(DM,Vec,void*); PETSC_EXTERN PetscErrorCode DMStagVecSetValuesStencil(DM,Vec,PetscInt,const DMStagStencil*,const PetscScalar*,InsertMode); PETSC_EXTERN PetscErrorCode DMStagVecSplitToDMDA(DM,Vec,DMStagStencilLocation,PetscInt,DM*,Vec*); +PETSC_DEPRECATED_FUNCTION("Use DMStagGetProductCoordinateArraysRead() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM dm,void *ax,void *ay,void *az) {return DMStagGetProductCoordinateArraysRead(dm,ax,ay,az);} +PETSC_DEPRECATED_FUNCTION("Use DMStagGetProductCoordinateLocationSlot() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagGet1dCoordinateLocationSlot(DM dm,DMStagStencilLocation loc,PetscInt *s) {return DMStagGetProductCoordinateLocationSlot(dm,loc,s);} PETSC_DEPRECATED_FUNCTION("Use DMStagGetStencilType() (since version 3.11)") PETSC_STATIC_INLINE PetscErrorCode DMStagGetGhostType(DM dm,DMStagStencilType *s) {return DMStagGetStencilType(dm,s);} +PETSC_DEPRECATED_FUNCTION("Use DMStagRestoreProductCoordinateArraysRead() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagRestore1dCoordinateArraysDOFRead(DM dm,void *ax,void *ay,void *az) {return DMStagRestoreProductCoordinateArraysRead(dm,ax,ay,az);} PETSC_DEPRECATED_FUNCTION("Use DMStagSetStencilType() (since version 3.11)") PETSC_STATIC_INLINE PetscErrorCode DMStagSetGhostType(DM dm,DMStagStencilType *s) {return DMStagGetStencilType(dm,s);} +PETSC_DEPRECATED_FUNCTION("Use DMStagVecGetArray() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagVecGetArrayDOF(DM dm,Vec v,void *a) {return DMStagVecGetArray(dm,v,a);} +PETSC_DEPRECATED_FUNCTION("Use DMStagVecGetArrayRead() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagVecGetArrayDOFRead(DM dm,Vec v,void *a) {return DMStagVecGetArrayRead(dm,v,a);} +PETSC_DEPRECATED_FUNCTION("Use DMStagVecRestoreArray() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagVecRestoreArrayDOF(DM dm,Vec v,void *a) {return DMStagVecRestoreArray(dm,v,a);} +PETSC_DEPRECATED_FUNCTION("Use DMStagVecRestoreArrayRead() (since version 3.13") PETSC_STATIC_INLINE PetscErrorCode DMStagVecRestoreArrayDOFRead(DM dm,Vec v,void *a) {return DMStagVecRestoreArrayRead(dm,v,a);} #endif diff --git a/include/petscdmtypes.h b/include/petscdmtypes.h index 36e5e9a7ef4..410c26abdc4 100644 --- a/include/petscdmtypes.h +++ b/include/petscdmtypes.h @@ -118,6 +118,36 @@ typedef enum {DM_ADAPT_DETERMINE = PETSC_DETERMINE, DM_ADAPT_KEEP = 0, DM_ADAPT_ E*/ typedef enum {DM_X, DM_Y, DM_Z} DMDirection; +/*E +DMEnclosureType - The type of enclosure relation between one DM and another + +Level: beginner + +For example, one DM dmA may be the boundary of another dmB, in which case it would be labeled DM_ENC_SUBMESH. If +the situation is reversed, and dmA has boundary dmB, it would be labeled DM_ENC_SUPERMESH. Likewise, if dmA was +a subregion of dmB, it would be labeled DM_ENC_SUBMESH. If no relation can be determined, DM_ENC_NONE is used. +If a relation is not yet known, then DM_ENC_UNKNOWN is used. + +.seealso: DMGetEnclosureRelation() +E*/ +typedef enum {DM_ENC_EQUALITY, DM_ENC_SUPERMESH, DM_ENC_SUBMESH, DM_ENC_NONE, DM_ENC_UNKNOWN} DMEnclosureType; + +/*E + DMPolytopeType - This describes the polytope represented by each cell. + + Level: beginner + + While most operations only need the topology information in the Plex, we must sometimes have the + user specify a polytope. For instance, when interpolating from a cell-vertex mesh, the type of + polytope can be ambiguous. Also, Plex allows different symmetries of prism cell with the same + constituent points. Normally these types are autoamtically inferred and the user does not specify + them. + +.seealso: DMPlexComputeCellTypes() +E*/ +typedef enum {DM_POLYTOPE_POINT, DM_POLYTOPE_SEGMENT, DM_POLYTOPE_TRIANGLE, DM_POLYTOPE_QUADRILATERAL, DM_POLYTOPE_SEG_PRISM_TENSOR, DM_POLYTOPE_TETRAHEDRON, DM_POLYTOPE_HEXAHEDRON, DM_POLYTOPE_TRI_PRISM, DM_POLYTOPE_TRI_PRISM_TENSOR, DM_POLYTOPE_QUAD_PRISM_TENSOR, DM_POLYTOPE_FV_GHOST, DM_POLYTOPE_UNKNOWN, DM_NUM_POLYTOPES} DMPolytopeType; +PETSC_EXTERN const char *const DMPolytopeTypes[]; + /*S PetscPartitioner - PETSc object that manages a graph partitioner diff --git a/include/petscdraw.h b/include/petscdraw.h index 399dc4127c4..51ebd948c3b 100644 --- a/include/petscdraw.h +++ b/include/petscdraw.h @@ -22,7 +22,7 @@ PETSC_EXTERN PetscErrorCode PetscDrawSetSave(PetscDraw,const char[]); PETSC_EXTERN PetscErrorCode PetscDrawSetSaveMovie(PetscDraw,const char[]); PETSC_EXTERN PetscErrorCode PetscDrawSetSaveFinalImage(PetscDraw,const char[]); PETSC_EXTERN PetscErrorCode PetscDrawView(PetscDraw,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PetscDrawViewFromOptions(PetscDraw A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PetscDrawViewFromOptions(PetscDraw,PetscObject,const char[]); /* Number of basic colors in the draw routines, the others are used diff --git a/include/petscds.h b/include/petscds.h index f1bc0b4baa7..3e60f6adc44 100644 --- a/include/petscds.h +++ b/include/petscds.h @@ -47,7 +47,7 @@ PETSC_EXTERN PetscErrorCode PetscDSSetType(PetscDS, PetscDSType); PETSC_EXTERN PetscErrorCode PetscDSGetType(PetscDS, PetscDSType *); PETSC_EXTERN PetscErrorCode PetscDSSetUp(PetscDS); PETSC_EXTERN PetscErrorCode PetscDSSetFromOptions(PetscDS); -PETSC_STATIC_INLINE PetscErrorCode PetscDSViewFromOptions(PetscDS A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscDSViewFromOptions(PetscDS,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscDSView(PetscDS,PetscViewer); PETSC_EXTERN PetscErrorCode PetscDSRegister(const char [], PetscErrorCode (*)(PetscDS)); @@ -284,8 +284,8 @@ PETSC_EXTERN PetscErrorCode PetscDSSetBdJacobian(PetscDS, PetscInt, PetscInt, PetscReal, PetscReal, const PetscReal[], const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[])); PETSC_EXTERN PetscErrorCode PetscDSGetExactSolution(PetscDS, PetscInt, PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *), void **); PETSC_EXTERN PetscErrorCode PetscDSSetExactSolution(PetscDS, PetscInt, PetscErrorCode (*)(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *), void *); -PETSC_EXTERN PetscErrorCode PetscDSGetTabulation(PetscDS, PetscReal ***, PetscReal ***); -PETSC_EXTERN PetscErrorCode PetscDSGetFaceTabulation(PetscDS, PetscReal ***, PetscReal ***); +PETSC_EXTERN PetscErrorCode PetscDSGetTabulation(PetscDS, PetscTabulation *[]); +PETSC_EXTERN PetscErrorCode PetscDSGetFaceTabulation(PetscDS, PetscTabulation *[]); PETSC_EXTERN PetscErrorCode PetscDSGetEvaluationArrays(PetscDS, PetscScalar **, PetscScalar **, PetscScalar **); PETSC_EXTERN PetscErrorCode PetscDSGetWeakFormArrays(PetscDS, PetscScalar **, PetscScalar **, PetscScalar **, PetscScalar **, PetscScalar **, PetscScalar **); PETSC_EXTERN PetscErrorCode PetscDSGetWorkspace(PetscDS, PetscReal **, PetscScalar **, PetscScalar **, PetscScalar **, PetscScalar **); diff --git a/include/petscdt.h b/include/petscdt.h index 6f613bef94a..7383fef8f27 100644 --- a/include/petscdt.h +++ b/include/petscdt.h @@ -6,6 +6,8 @@ #include +PETSC_EXTERN PetscClassId PETSCQUADRATURE_CLASSID; + /*S PetscQuadrature - Quadrature rule for integration. @@ -39,6 +41,8 @@ PETSC_EXTERN PetscErrorCode PetscQuadratureDestroy(PetscQuadrature *); PETSC_EXTERN PetscErrorCode PetscQuadratureExpandComposite(PetscQuadrature, PetscInt, const PetscReal[], const PetscReal[], PetscQuadrature *); +PETSC_EXTERN PetscErrorCode PetscQuadraturePushForward(PetscQuadrature, PetscInt, const PetscReal[], const PetscReal[], const PetscReal[], PetscInt, PetscQuadrature *); + PETSC_EXTERN PetscErrorCode PetscDTLegendreEval(PetscInt,const PetscReal*,PetscInt,const PetscInt*,PetscReal*,PetscReal*,PetscReal*); PETSC_EXTERN PetscErrorCode PetscDTGaussQuadrature(PetscInt,PetscReal,PetscReal,PetscReal*,PetscReal*); PETSC_EXTERN PetscErrorCode PetscDTGaussLobattoLegendreQuadrature(PetscInt,PetscGaussLobattoLegendreCreateType,PetscReal*,PetscReal*); @@ -60,4 +64,389 @@ PETSC_EXTERN PetscErrorCode PetscGaussLobattoLegendreElementAdvectionDestroy(Pet PETSC_EXTERN PetscErrorCode PetscGaussLobattoLegendreElementMassCreate(PetscInt, PetscReal *, PetscReal *, PetscReal ***); PETSC_EXTERN PetscErrorCode PetscGaussLobattoLegendreElementMassDestroy(PetscInt, PetscReal *, PetscReal *, PetscReal ***); +PETSC_EXTERN PetscErrorCode PetscDTAltVApply(PetscInt, PetscInt, const PetscReal *, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVWedge(PetscInt, PetscInt, PetscInt, const PetscReal *, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVWedgeMatrix(PetscInt, PetscInt, PetscInt, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVPullback(PetscInt, PetscInt, const PetscReal *, PetscInt, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVPullbackMatrix(PetscInt, PetscInt, const PetscReal *, PetscInt, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVInterior(PetscInt, PetscInt, const PetscReal *, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVInteriorMatrix(PetscInt, PetscInt, const PetscReal *, PetscReal *); +PETSC_EXTERN PetscErrorCode PetscDTAltVInteriorPattern(PetscInt, PetscInt, PetscInt (*)[3]); +PETSC_EXTERN PetscErrorCode PetscDTAltVStar(PetscInt, PetscInt, PetscInt, const PetscReal *, PetscReal *); + +#if defined(PETSC_USE_64BIT_INDICES) +#define PETSC_FACTORIAL_MAX 20 +#define PETSC_BINOMIAL_MAX 61 +#else +#define PETSC_FACTORIAL_MAX 12 +#define PETSC_BINOMIAL_MAX 29 +#endif + +/*MC + PetscDTFactorial - Approximate n! as a real number + + Input Arguments: +. n - a non-negative integer + + Output Arguments: +. factorial - n! + + Level: beginner +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTFactorial(PetscInt n, PetscReal *factorial) +{ + PetscReal f = 1.0; + PetscInt i; + + PetscFunctionBegin; + *factorial = -1.0; + if (n < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Factorial called with negative number %D\n", n); + for (i = 1; i < n+1; ++i) f *= (PetscReal)i; + *factorial = f; + PetscFunctionReturn(0); +} + +/*MC + PetscDTFactorialInt - Compute n! as an integer + + Input Arguments: +. n - a non-negative integer + + Output Arguments: +. factorial - n! + + Level: beginner + + Note: this is limited to n such that n! can be represented by PetscInt, which is 12 if PetscInt is a signed 32-bit integer and 20 if PetscInt is a signed 64-bit integer. +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTFactorialInt(PetscInt n, PetscInt *factorial) +{ + PetscInt facLookup[13] = {1, 1, 2, 6, 24, 120, 720, 5040, 40320, 362880, 3628800, 39916800, 479001600}; + + PetscFunctionBegin; + *factorial = -1; + if (n < 0 || n > PETSC_FACTORIAL_MAX) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of elements %D is not in supported range [0,%D]\n",n,PETSC_FACTORIAL_MAX); + if (n <= 12) { + *factorial = facLookup[n]; + } else { + PetscInt f = facLookup[12]; + PetscInt i; + + for (i = 13; i < n+1; ++i) f *= i; + *factorial = f; + } + PetscFunctionReturn(0); +} + +/*MC + PetscDTBinomial - Approximate the binomial coefficient "n choose k" + + Input Arguments: ++ n - a non-negative integer +- k - an integer between 0 and n, inclusive + + Output Arguments: +. binomial - approximation of the binomial coefficient n choose k + + Level: beginner +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTBinomial(PetscInt n, PetscInt k, PetscReal *binomial) +{ + PetscFunctionBeginHot; + *binomial = -1.0; + if (n < 0 || k < 0 || k > n) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Binomial arguments (%D %D) must be non-negative, k <= n\n", n, k); + if (n <= 3) { + PetscInt binomLookup[4][4] = {{1, 0, 0, 0}, {1, 1, 0, 0}, {1, 2, 1, 0}, {1, 3, 3, 1}}; + + *binomial = (PetscReal)binomLookup[n][k]; + } else { + PetscReal binom = 1.0; + PetscInt i; + + k = PetscMin(k, n - k); + for (i = 0; i < k; i++) binom = (binom * (PetscReal)(n - i)) / (PetscReal)(i + 1); + *binomial = binom; + } + PetscFunctionReturn(0); +} + +/*MC + PetscDTBinomialInt - Compute the binomial coefficient "n choose k" + + Input Arguments: ++ n - a non-negative integer +- k - an integer between 0 and n, inclusive + + Output Arguments: +. binomial - the binomial coefficient n choose k + + Note: this is limited by integers that can be represented by PetscInt + + Level: beginner +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTBinomialInt(PetscInt n, PetscInt k, PetscInt *binomial) +{ + PetscInt bin; + + PetscFunctionBegin; + *binomial = -1; + if (n < 0 || k < 0 || k > n) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Binomial arguments (%D %D) must be non-negative, k <= n\n", n, k); + if (n > PETSC_BINOMIAL_MAX) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Binomial elements %D is larger than max for PetscInt, %D\n", n, PETSC_BINOMIAL_MAX); + if (n <= 3) { + PetscInt binomLookup[4][4] = {{1, 0, 0, 0}, {1, 1, 0, 0}, {1, 2, 1, 0}, {1, 3, 3, 1}}; + + bin = binomLookup[n][k]; + } else { + PetscInt binom = 1; + PetscInt i; + + k = PetscMin(k, n - k); + for (i = 0; i < k; i++) binom = (binom * (n - i)) / (i + 1); + bin = binom; + } + *binomial = bin; + PetscFunctionReturn(0); +} + +/*MC + PetscDTEnumPerm - Get a permutation of n integers from its encoding into the integers [0, n!) as a sequence of swaps. + + A permutation can be described by the operations that convert the lists [0, 1, ..., n-1] into the permutation, + by a sequence of swaps, where the ith step swaps whatever number is in ith position with a number that is in + some position j >= i. This swap is encoded as the difference (j - i). The difference d_i at step i is less than + (n - i). This sequence of n-1 differences [d_0, ..., d_{n-2}] is encoded as the number + (n-1)! * d_0 + (n-2)! * d_1 + ... + 1! * d_{n-2}. + + Input Arguments: ++ n - a non-negative integer (see note about limits below) +- k - an integer in [0, n!) + + Output Arguments: ++ perm - the permuted list of the integers [0, ..., n-1] +- isOdd - if not NULL, returns wether the permutation used an even or odd number of swaps. + + Note: this is limited to n such that n! can be represented by PetscInt, which is 12 if PetscInt is a signed 32-bit integer and 20 if PetscInt is a signed 64-bit integer. + + Level: beginner +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTEnumPerm(PetscInt n, PetscInt k, PetscInt *perm, PetscBool *isOdd) +{ + PetscInt odd = 0; + PetscInt i; + PetscInt work[PETSC_FACTORIAL_MAX]; + PetscInt *w; + + PetscFunctionBegin; + if (isOdd) *isOdd = PETSC_FALSE; + if (n < 0 || n > PETSC_FACTORIAL_MAX) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of elements %D is not in supported range [0,%D]\n",n,PETSC_FACTORIAL_MAX); + w = &work[n - 2]; + for (i = 2; i <= n; i++) { + *(w--) = k % i; + k /= i; + } + for (i = 0; i < n; i++) perm[i] = i; + for (i = 0; i < n - 1; i++) { + PetscInt s = work[i]; + PetscInt swap = perm[i]; + + perm[i] = perm[i + s]; + perm[i + s] = swap; + odd ^= (!!s); + } + if (isOdd) *isOdd = odd ? PETSC_TRUE : PETSC_FALSE; + PetscFunctionReturn(0); +} + +/*MC + PetscDTPermIndex - Encode a permutation of n into an integer in [0, n!). This inverts PetscDTEnumPerm. + + Input Arguments: ++ n - a non-negative integer (see note about limits below) +- perm - the permuted list of the integers [0, ..., n-1] + + Output Arguments: ++ k - an integer in [0, n!) +. isOdd - if not NULL, returns wether the permutation used an even or odd number of swaps. + + Note: this is limited to n such that n! can be represented by PetscInt, which is 12 if PetscInt is a signed 32-bit integer and 20 if PetscInt is a signed 64-bit integer. + + Level: beginner +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTPermIndex(PetscInt n, const PetscInt *perm, PetscInt *k, PetscBool *isOdd) +{ + PetscInt odd = 0; + PetscInt i, idx; + PetscInt work[PETSC_FACTORIAL_MAX]; + PetscInt iwork[PETSC_FACTORIAL_MAX]; + + PetscFunctionBeginHot; + *k = -1; + if (isOdd) *isOdd = PETSC_FALSE; + if (n < 0 || n > PETSC_FACTORIAL_MAX) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of elements %D is not in supported range [0,%D]\n",n,PETSC_FACTORIAL_MAX); + for (i = 0; i < n; i++) work[i] = i; /* partial permutation */ + for (i = 0; i < n; i++) iwork[i] = i; /* partial permutation inverse */ + for (idx = 0, i = 0; i < n - 1; i++) { + PetscInt j = perm[i]; + PetscInt icur = work[i]; + PetscInt jloc = iwork[j]; + PetscInt diff = jloc - i; + + idx = idx * (n - i) + diff; + /* swap (i, jloc) */ + work[i] = j; + work[jloc] = icur; + iwork[j] = i; + iwork[icur] = jloc; + odd ^= (!!diff); + } + *k = idx; + if (isOdd) *isOdd = odd ? PETSC_TRUE : PETSC_FALSE; + PetscFunctionReturn(0); +} + +/*MC + PetscDTEnumSubset - Get an ordered subset of the integers [0, ..., n - 1] from its encoding as an integers in [0, n choose k). + The encoding is in lexicographic order. + + Input Arguments: ++ n - a non-negative integer (see note about limits below) +. k - an integer in [0, n] +- j - an index in [0, n choose k) + + Output Arguments: +. subset - the jth subset of size k of the integers [0, ..., n - 1] + + Note: this is limited by arguments such that n choose k can be represented by PetscInt + + Level: beginner + +.seealso: PetscDTSubsetIndex() +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTEnumSubset(PetscInt n, PetscInt k, PetscInt j, PetscInt *subset) +{ + PetscInt Nk, i, l; + PetscErrorCode ierr; + + PetscFunctionBeginHot; + ierr = PetscDTBinomialInt(n, k, &Nk);CHKERRQ(ierr); + for (i = 0, l = 0; i < n && l < k; i++) { + PetscInt Nminuskminus = (Nk * (k - l)) / (n - i); + PetscInt Nminusk = Nk - Nminuskminus; + + if (j < Nminuskminus) { + subset[l++] = i; + Nk = Nminuskminus; + } else { + j -= Nminuskminus; + Nk = Nminusk; + } + } + PetscFunctionReturn(0); +} + +/*MC + PetscDTSubsetIndex - Convert an ordered subset of k integers from the set [0, ..., n - 1] to its encoding as an integers in [0, n choose k) in lexicographic order. This is the inverse of PetscDTEnumSubset. + + Input Arguments: ++ n - a non-negative integer (see note about limits below) +. k - an integer in [0, n] +- subset - an ordered subset of the integers [0, ..., n - 1] + + Output Arguments: +. index - the rank of the subset in lexicographic order + + Note: this is limited by arguments such that n choose k can be represented by PetscInt + + Level: beginner + +.seealso: PetscDTEnumSubset() +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTSubsetIndex(PetscInt n, PetscInt k, const PetscInt *subset, PetscInt *index) +{ + PetscInt i, j = 0, l, Nk; + PetscErrorCode ierr; + + PetscFunctionBegin; + *index = -1; + ierr = PetscDTBinomialInt(n, k, &Nk);CHKERRQ(ierr); + for (i = 0, l = 0; i < n && l < k; i++) { + PetscInt Nminuskminus = (Nk * (k - l)) / (n - i); + PetscInt Nminusk = Nk - Nminuskminus; + + if (subset[l] == i) { + l++; + Nk = Nminuskminus; + } else { + j += Nminuskminus; + Nk = Nminusk; + } + } + *index = j; + PetscFunctionReturn(0); +} + +/*MC + PetscDTEnumSubset - Split the integers [0, ..., n - 1] into two complementary ordered subsets, the first subset of size k and being the jth subset of that size in lexicographic order. + + Input Arguments: ++ n - a non-negative integer (see note about limits below) +. k - an integer in [0, n] +- j - an index in [0, n choose k) + + Output Arguments: ++ perm - the jth subset of size k of the integers [0, ..., n - 1], followed by its complementary set. +- isOdd - if not NULL, return whether perm is an even or odd permutation. + + Note: this is limited by arguments such that n choose k can be represented by PetscInt + + Level: beginner + +.seealso: PetscDTEnumSubset(), PetscDTSubsetIndex() +M*/ +PETSC_STATIC_INLINE PetscErrorCode PetscDTEnumSplit(PetscInt n, PetscInt k, PetscInt j, PetscInt *perm, PetscBool *isOdd) +{ + PetscInt i, l, m, *subcomp, Nk; + PetscInt odd; + PetscErrorCode ierr; + + PetscFunctionBegin; + if (isOdd) *isOdd = PETSC_FALSE; + ierr = PetscDTBinomialInt(n, k, &Nk);CHKERRQ(ierr); + odd = 0; + subcomp = &perm[k]; + for (i = 0, l = 0, m = 0; i < n && l < k; i++) { + PetscInt Nminuskminus = (Nk * (k - l)) / (n - i); + PetscInt Nminusk = Nk - Nminuskminus; + + if (j < Nminuskminus) { + perm[l++] = i; + Nk = Nminuskminus; + } else { + subcomp[m++] = i; + j -= Nminuskminus; + odd ^= ((k - l) & 1); + Nk = Nminusk; + } + } + for (; i < n; i++) { + subcomp[m++] = i; + } + if (isOdd) *isOdd = odd ? PETSC_TRUE : PETSC_FALSE; + PetscFunctionReturn(0); +} + +struct _p_PetscTabulation { + PetscInt K; /* Indicates a k-jet, namely tabulated derviatives up to order k */ + PetscInt Nr; /* THe number of tabulation replicas (often 1) */ + PetscInt Np; /* The number of tabulation points in a replica */ + PetscInt Nb; /* The number of functions tabulated */ + PetscInt Nc; /* The number of function components */ + PetscInt cdim; /* The coordinate dimension */ + PetscReal **T; /* The tabulation T[K] of functions and their derivatives + T[0] = B[Nr*Np][Nb][Nc]: The basis function values at quadrature points + T[1] = D[Nr*Np][Nb][Nc][cdim]: The basis function derivatives at quadrature points + T[2] = H[Nr*Np][Nb][Nc][cdim][cdim]: The basis function second derivatives at quadrature points */ +}; +typedef struct _p_PetscTabulation *PetscTabulation; + #endif diff --git a/include/petscerror.h b/include/petscerror.h index 7ec887fbebe..6a7d2526ec8 100644 --- a/include/petscerror.h +++ b/include/petscerror.h @@ -8,7 +8,7 @@ These are the generic error codes. These error codes are used many different places in the PETSc source code. The string versions are at src/sys/error/err.c any changes here must also be made there - These are also define in include/petsc/finclude/petscerror.h any CHANGES here + These are also define in src/sys/f90-mod/petscerror.h any CHANGES here must be also made there. */ @@ -89,6 +89,10 @@ Experienced users can set the error handler with PetscPushErrorHandler(). + Fortran Notes: + SETERRQ() may be called from Fortran subroutines but SETERRA() must be called from the + Fortran main program. + .seealso: PetscTraceBackErrorHandler(), PetscPushErrorHandler(), PetscError(), CHKERRQ(), CHKMEMQ, SETERRQ1(), SETERRQ2(), SETERRQ3() M*/ #define SETERRQ(comm,ierr,s) return PetscError(comm,__LINE__,PETSC_FUNCTION_NAME,__FILE__,ierr,PETSC_ERROR_INITIAL,s) @@ -546,9 +550,9 @@ M*/ #endif -#define CHKERRCUDA(err) do {if (PetscUnlikely(err)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"CUDA error %d",err);} while(0) -#define CHKERRCUBLAS(err) do {if (PetscUnlikely(err)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"CUBLAS error %d",err);} while(0) - +#if defined(PETSC_HAVE_CUDA) +#define CHKERRCUSOLVER(err) do {if (PetscUnlikely(err)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"CUSOLVER error %d",err);} while(0) +#endif /*MC CHKMEMQ - Checks the memory for corruption, calls error handler if any is detected @@ -727,8 +731,8 @@ PETSC_STATIC_INLINE PetscBool PetscStackActive(void) PetscStackSAWsTakeAccess(); \ if (petscstack && petscstack->currentsize > 0) { \ petscstack->currentsize--; \ - petscstack->function[petscstack->currentsize] = 0; \ - petscstack->file[petscstack->currentsize] = 0; \ + petscstack->function[petscstack->currentsize] = NULL; \ + petscstack->file[petscstack->currentsize] = NULL; \ petscstack->line[petscstack->currentsize] = 0; \ petscstack->petscroutine[petscstack->currentsize] = PETSC_FALSE;\ } \ diff --git a/include/petscfe.h b/include/petscfe.h index 67bcf85b56b..d9c0e3eb9a2 100644 --- a/include/petscfe.h +++ b/include/petscfe.h @@ -50,7 +50,7 @@ PETSC_EXTERN PetscErrorCode PetscSpaceSetType(PetscSpace, PetscSpaceType); PETSC_EXTERN PetscErrorCode PetscSpaceGetType(PetscSpace, PetscSpaceType *); PETSC_EXTERN PetscErrorCode PetscSpaceSetUp(PetscSpace); PETSC_EXTERN PetscErrorCode PetscSpaceSetFromOptions(PetscSpace); -PETSC_STATIC_INLINE PetscErrorCode PetscSpaceViewFromOptions(PetscSpace A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscSpaceViewFromOptions(PetscSpace,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscSpaceView(PetscSpace,PetscViewer); PETSC_EXTERN PetscErrorCode PetscSpaceRegister(const char [], PetscErrorCode (*)(PetscSpace)); @@ -105,7 +105,7 @@ PETSC_EXTERN PetscErrorCode PetscDualSpaceGetNumDof(PetscDualSpace, const PetscI PETSC_EXTERN PetscErrorCode PetscDualSpaceCreateSection(PetscDualSpace, PetscSection *); PETSC_EXTERN PetscErrorCode PetscDualSpaceSetUp(PetscDualSpace); PETSC_EXTERN PetscErrorCode PetscDualSpaceSetFromOptions(PetscDualSpace); -PETSC_STATIC_INLINE PetscErrorCode PetscDualSpaceViewFromOptions(PetscDualSpace A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscDualSpaceViewFromOptions(PetscDualSpace,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscDualSpaceView(PetscDualSpace,PetscViewer); PETSC_EXTERN PetscErrorCode PetscDualSpaceRegister(const char [], PetscErrorCode (*)(PetscDualSpace)); @@ -176,13 +176,14 @@ PETSC_EXTERN PetscErrorCode PetscFESetType(PetscFE, PetscFEType); PETSC_EXTERN PetscErrorCode PetscFEGetType(PetscFE, PetscFEType *); PETSC_EXTERN PetscErrorCode PetscFESetUp(PetscFE); PETSC_EXTERN PetscErrorCode PetscFESetFromOptions(PetscFE); -PETSC_STATIC_INLINE PetscErrorCode PetscFEViewFromOptions(PetscFE A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscFEViewFromOptions(PetscFE,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscFESetName(PetscFE, const char []); PETSC_EXTERN PetscErrorCode PetscFEView(PetscFE,PetscViewer); PETSC_EXTERN PetscErrorCode PetscFERegister(const char [], PetscErrorCode (*)(PetscFE)); PETSC_EXTERN PetscErrorCode PetscFERegisterDestroy(void); PETSC_EXTERN PetscErrorCode PetscFECreateDefault(MPI_Comm, PetscInt, PetscInt, PetscBool, const char [], PetscInt, PetscFE *); +PETSC_EXTERN PetscErrorCode PetscFECreateLagrange(MPI_Comm, PetscInt, PetscInt, PetscBool, PetscInt, PetscInt, PetscFE *); PETSC_EXTERN PetscErrorCode PetscFEGetDimension(PetscFE, PetscInt *); PETSC_EXTERN PetscErrorCode PetscFEGetSpatialDimension(PetscFE, PetscInt *); @@ -200,11 +201,15 @@ PETSC_EXTERN PetscErrorCode PetscFESetFaceQuadrature(PetscFE, PetscQuadrature); PETSC_EXTERN PetscErrorCode PetscFEGetFaceQuadrature(PetscFE, PetscQuadrature *); PETSC_EXTERN PetscErrorCode PetscFECopyQuadrature(PetscFE, PetscFE); PETSC_EXTERN PetscErrorCode PetscFEGetNumDof(PetscFE, const PetscInt **); -PETSC_EXTERN PetscErrorCode PetscFEGetDefaultTabulation(PetscFE, PetscReal **, PetscReal **, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFEGetFaceTabulation(PetscFE, PetscReal **, PetscReal **, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFEGetFaceCentroidTabulation(PetscFE, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFEGetTabulation(PetscFE, PetscInt, const PetscReal[], PetscReal **, PetscReal **, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFERestoreTabulation(PetscFE, PetscInt, const PetscReal[], PetscReal **, PetscReal **, PetscReal **); + +/* TODO: Need a function to reuse the memory when retabulating the same FE at different points */ +PETSC_EXTERN PetscErrorCode PetscFEGetCellTabulation(PetscFE, PetscTabulation *); +PETSC_EXTERN PetscErrorCode PetscFEGetFaceTabulation(PetscFE, PetscTabulation *); +PETSC_EXTERN PetscErrorCode PetscFEGetFaceCentroidTabulation(PetscFE, PetscTabulation *); +PETSC_EXTERN PetscErrorCode PetscFECreateTabulation(PetscFE, PetscInt, PetscInt, const PetscReal[], PetscInt, PetscTabulation *); +PETSC_EXTERN PetscErrorCode PetscFEComputeTabulation(PetscFE, PetscInt, const PetscReal[], PetscInt, PetscTabulation); +PETSC_EXTERN PetscErrorCode PetscTabulationDestroy(PetscTabulation *); + PETSC_EXTERN PetscErrorCode PetscFERefine(PetscFE, PetscFE *); PETSC_EXTERN PetscErrorCode PetscFEGetHeightSubspace(PetscFE, PetscInt, PetscFE *); diff --git a/include/petscfv.h b/include/petscfv.h index 9f30eb12f8b..783ccac6f57 100644 --- a/include/petscfv.h +++ b/include/petscfv.h @@ -34,7 +34,7 @@ PETSC_EXTERN PetscErrorCode PetscLimiterSetType(PetscLimiter, PetscLimiterType); PETSC_EXTERN PetscErrorCode PetscLimiterGetType(PetscLimiter, PetscLimiterType *); PETSC_EXTERN PetscErrorCode PetscLimiterSetUp(PetscLimiter); PETSC_EXTERN PetscErrorCode PetscLimiterSetFromOptions(PetscLimiter); -PETSC_STATIC_INLINE PetscErrorCode PetscLimiterViewFromOptions(PetscLimiter A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscLimiterViewFromOptions(PetscLimiter,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscLimiterView(PetscLimiter, PetscViewer); PETSC_EXTERN PetscErrorCode PetscLimiterRegister(const char [], PetscErrorCode (*)(PetscLimiter)); PETSC_EXTERN PetscErrorCode PetscLimiterRegisterDestroy(void); @@ -64,7 +64,7 @@ PETSC_EXTERN PetscErrorCode PetscFVSetType(PetscFV, PetscFVType); PETSC_EXTERN PetscErrorCode PetscFVGetType(PetscFV, PetscFVType *); PETSC_EXTERN PetscErrorCode PetscFVSetUp(PetscFV); PETSC_EXTERN PetscErrorCode PetscFVSetFromOptions(PetscFV); -PETSC_STATIC_INLINE PetscErrorCode PetscFVViewFromOptions(PetscFV A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode PetscFVViewFromOptions(PetscFV,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscFVView(PetscFV, PetscViewer); PETSC_EXTERN PetscErrorCode PetscFVRegister(const char [], PetscErrorCode (*)(PetscFV)); PETSC_EXTERN PetscErrorCode PetscFVRegisterDestroy(void); @@ -86,9 +86,8 @@ PETSC_EXTERN PetscErrorCode PetscFVGetDualSpace(PetscFV, PetscDualSpace *); PETSC_EXTERN PetscErrorCode PetscFVRefine(PetscFV, PetscFV *); -PETSC_EXTERN PetscErrorCode PetscFVGetDefaultTabulation(PetscFV, PetscReal **, PetscReal **, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFVGetTabulation(PetscFV, PetscInt, const PetscReal[], PetscReal **, PetscReal **, PetscReal **); -PETSC_EXTERN PetscErrorCode PetscFVRestoreTabulation(PetscFV, PetscInt, const PetscReal[], PetscReal **, PetscReal **, PetscReal **); +PETSC_EXTERN PetscErrorCode PetscFVGetCellTabulation(PetscFV, PetscTabulation *); +PETSC_EXTERN PetscErrorCode PetscFVCreateTabulation(PetscFV, PetscInt, PetscInt, const PetscReal[], PetscInt, PetscTabulation *); PETSC_EXTERN PetscErrorCode PetscFVComputeGradient(PetscFV, PetscInt, PetscScalar[], PetscScalar[]); PETSC_EXTERN PetscErrorCode PetscFVIntegrateRHSFunction(PetscFV, PetscDS, PetscInt, PetscInt, PetscFVFaceGeom *, PetscReal *, PetscScalar[], PetscScalar[], PetscScalar[], PetscScalar[]); diff --git a/include/petscis.h b/include/petscis.h index da8a10716a3..7b2835cf583 100644 --- a/include/petscis.h +++ b/include/petscis.h @@ -50,6 +50,32 @@ PETSC_EXTERN PetscErrorCode ISSetIdentity(IS); PETSC_EXTERN PetscErrorCode ISIdentity(IS,PetscBool *); PETSC_EXTERN PetscErrorCode ISContiguousLocal(IS,PetscInt,PetscInt,PetscInt*,PetscBool*); +/*E + ISInfo - Info that may either be computed or set as known for an index set + + Level: beginner + + Any additions/changes here MUST also be made in include/petsc/finclude/petscis.h + Any additions/changes here must also be made in src/vec/vec/interface/dlregisvec.c in ISInfos[] + + Developer Notes: + Entries that are negative need not be called collectively by all processes. + +.seealso: ISSetInfo() +E*/ +typedef enum {IS_INFO_MIN = -1, + IS_SORTED = 0, + IS_UNIQUE = 1, + IS_PERMUTATION = 2, + IS_INTERVAL = 3, + IS_IDENTITY = 4, + IS_INFO_MAX = 5} ISInfo; + +typedef enum {IS_LOCAL, IS_GLOBAL} ISInfoType; + +PETSC_EXTERN PetscErrorCode ISSetInfo(IS,ISInfo,ISInfoType,PetscBool,PetscBool); +PETSC_EXTERN PetscErrorCode ISGetInfo(IS,ISInfo,ISInfoType,PetscBool,PetscBool*); +PETSC_EXTERN PetscErrorCode ISClearInfoCache(IS,PetscBool); PETSC_EXTERN PetscErrorCode ISGetIndices(IS,const PetscInt *[]); PETSC_EXTERN PetscErrorCode ISRestoreIndices(IS,const PetscInt *[]); PETSC_EXTERN PetscErrorCode ISGetTotalIndices(IS,const PetscInt *[]); @@ -62,7 +88,7 @@ PETSC_EXTERN PetscErrorCode ISGetSize(IS,PetscInt *); PETSC_EXTERN PetscErrorCode ISGetLocalSize(IS,PetscInt *); PETSC_EXTERN PetscErrorCode ISInvertPermutation(IS,PetscInt,IS*); PETSC_EXTERN PetscErrorCode ISView(IS,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode ISViewFromOptions(IS A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode ISViewFromOptions(IS,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode ISLoad(IS,PetscViewer); PETSC_EXTERN PetscErrorCode ISEqual(IS,IS,PetscBool *); PETSC_EXTERN PetscErrorCode ISEqualUnsorted(IS,IS,PetscBool *); @@ -142,7 +168,7 @@ PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF,PetscInt,ISLo PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingSetUp(ISLocalToGlobalMapping); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping*); PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping,PetscInt,const PetscInt[],PetscInt[]); @@ -246,7 +272,7 @@ struct _n_PetscLayout{ Not available from Fortran @*/ -PETSC_STATIC_INLINE PetscErrorCode PetscLayoutFindOwner(PetscLayout map,PetscInt idx,PetscInt *owner) +PETSC_STATIC_INLINE PetscErrorCode PetscLayoutFindOwner(PetscLayout map,PetscInt idx,PetscMPIInt *owner) { PetscErrorCode ierr; PetscMPIInt lo = 0,hi,t; @@ -284,7 +310,7 @@ PETSC_STATIC_INLINE PetscErrorCode PetscLayoutFindOwner(PetscLayout map,PetscInt Not available from Fortran @*/ -PETSC_STATIC_INLINE PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout map,PetscInt idx,PetscInt *owner, PetscInt *lidx) +PETSC_STATIC_INLINE PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout map,PetscInt idx,PetscMPIInt *owner,PetscInt *lidx) { PetscErrorCode ierr; PetscMPIInt lo = 0,hi,t; @@ -323,6 +349,10 @@ PETSC_EXTERN PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout,ISL PETSC_EXTERN PetscErrorCode PetscLayoutMapLocal(PetscLayout,PetscInt,const PetscInt[],PetscInt*,PetscInt**,PetscInt**); PETSC_EXTERN PetscErrorCode PetscSFSetGraphLayout(PetscSF,PetscLayout,PetscInt,const PetscInt*,PetscCopyMode,const PetscInt*); +PETSC_EXTERN PetscErrorCode PetscParallelSortInt(PetscLayout, PetscLayout, PetscInt*, PetscInt*); + +PETSC_EXTERN PetscErrorCode ISGetLayout(IS, PetscLayout *); + /* PetscSF support */ PETSC_EXTERN PetscErrorCode PetscSFConvertPartition(PetscSF, PetscSection, IS, ISLocalToGlobalMapping *, PetscSF *); PETSC_EXTERN PetscErrorCode PetscSFCreateRemoteOffsets(PetscSF, PetscSection, PetscSection, PetscInt **); diff --git a/include/petscksp.h b/include/petscksp.h index b75810337b9..107d682360e 100644 --- a/include/petscksp.h +++ b/include/petscksp.h @@ -36,6 +36,7 @@ typedef const char* KSPType; #define KSPPIPECG "pipecg" #define KSPPIPECGRR "pipecgrr" #define KSPPIPELCG "pipelcg" +#define KSPPIPEPRCG "pipeprcg" #define KSPCGNE "cgne" #define KSPNASH "nash" #define KSPSTCG "stcg" @@ -227,6 +228,10 @@ PETSC_EXTERN PetscErrorCode KSPFETIDPGetInnerBDDC(KSP,PC*); PETSC_EXTERN PetscErrorCode KSPFETIDPSetInnerBDDC(KSP,PC); PETSC_EXTERN PetscErrorCode KSPFETIDPGetInnerKSP(KSP,KSP*); PETSC_EXTERN PetscErrorCode KSPFETIDPSetPressureOperator(KSP,Mat); + +PETSC_EXTERN PetscErrorCode KSPHPDDMSetDeflationSpace(KSP,Mat); +PETSC_EXTERN PetscErrorCode KSPHPDDMGetDeflationSpace(KSP,Mat*); + /*E KSPGMRESCGSRefinementType - How the classical (unmodified) Gram-Schmidt is performed. @@ -333,7 +338,7 @@ PETSC_EXTERN PetscErrorCode KSPGetDiagonalScaleFix(KSP,PetscBool*); PETSC_EXTERN PetscErrorCode KSPView(KSP,PetscViewer); PETSC_EXTERN PetscErrorCode KSPLoad(KSP,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode KSPViewFromOptions(KSP A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode KSPViewFromOptions(KSP,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode KSPReasonView(KSP,PetscViewer); PETSC_EXTERN PetscErrorCode KSPReasonViewFromOptions(KSP); diff --git a/include/petsclog.h b/include/petsclog.h index 88b1093c7d3..5f9f000cbb0 100644 --- a/include/petsclog.h +++ b/include/petsclog.h @@ -59,7 +59,7 @@ PETSC_EXTERN PetscLogDouble petsc_TotalFlops; PETSC_EXTERN PetscLogDouble petsc_tmp_flops; /* Global GPU counters */ -#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) +#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) PETSC_EXTERN PetscLogDouble petsc_ctog_ct; PETSC_EXTERN PetscLogDouble petsc_gtoc_ct; PETSC_EXTERN PetscLogDouble petsc_ctog_sz; @@ -236,19 +236,24 @@ PETSC_STATIC_INLINE PetscErrorCode PetscLogFlops(PetscLogDouble n) } #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) -PETSC_STATIC_INLINE PetscErrorCode PetscLogCpuToGpu(PetscLogDouble size){ +PETSC_STATIC_INLINE PetscErrorCode PetscLogCpuToGpu(PetscLogDouble size) +{ PetscFunctionBegin; petsc_ctog_ct += 1; petsc_ctog_sz += size; PetscFunctionReturn(0); } -PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuToCpu(PetscLogDouble size){ + +PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuToCpu(PetscLogDouble size) +{ PetscFunctionBegin; petsc_gtoc_ct += 1; petsc_gtoc_sz += size; PetscFunctionReturn(0); } -PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuFlops(PetscLogDouble n){ + +PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuFlops(PetscLogDouble n) +{ PetscFunctionBegin; #if defined(PETSC_USE_DEBUG) if (n < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Cannot log negative flops"); @@ -257,18 +262,29 @@ PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuFlops(PetscLogDouble n){ petsc_gflops += PETSC_FLOPS_PER_OP*n; PetscFunctionReturn(0); } -PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuTimeBegin(){ + +PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuTimeBegin() +{ PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscTimeSubtract(&petsc_gtime);CHKERRQ(ierr); PetscFunctionReturn(0); } -PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuTimeEnd(){ + +PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuTimeEnd() +{ PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscTimeAdd(&petsc_gtime);CHKERRQ(ierr); PetscFunctionReturn(0); } + +PETSC_STATIC_INLINE PetscErrorCode PetscLogGpuTimeAdd(PetscLogDouble t) +{ + PetscFunctionBegin; + petsc_gtime += t; + PetscFunctionReturn(0); +} #endif PETSC_EXTERN PetscErrorCode PetscGetFlops(PetscLogDouble *); diff --git a/include/petscmat.h b/include/petscmat.h index b3c5f23d978..902ca09d0f1 100644 --- a/include/petscmat.h +++ b/include/petscmat.h @@ -205,7 +205,7 @@ PETSC_EXTERN PetscErrorCode MatCreate(MPI_Comm,Mat*); PETSC_EXTERN PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); PETSC_EXTERN PetscErrorCode MatSetType(Mat,MatType); PETSC_EXTERN PetscErrorCode MatSetFromOptions(Mat); -PETSC_STATIC_INLINE PetscErrorCode MatViewFromOptions(Mat A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode MatViewFromOptions(Mat,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode MatRegister(const char[],PetscErrorCode(*)(Mat)); PETSC_EXTERN PetscErrorCode MatRegisterRootName(const char[],const char[],const char[]); PETSC_EXTERN PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); @@ -411,6 +411,7 @@ typedef enum {MAT_OPTION_MIN = -3, PETSC_EXTERN const char *const *MatOptions; PETSC_EXTERN PetscErrorCode MatSetOption(Mat,MatOption,PetscBool); PETSC_EXTERN PetscErrorCode MatGetOption(Mat,MatOption,PetscBool*); +PETSC_EXTERN PetscErrorCode MatPropagateSymmetryOptions(Mat,Mat); PETSC_EXTERN PetscErrorCode MatGetType(Mat,MatType*); PETSC_EXTERN PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); @@ -428,6 +429,8 @@ PETSC_EXTERN PetscErrorCode MatSeqAIJSetValuesLocalFast(Mat,PetscInt,const Petsc PETSC_EXTERN PetscErrorCode MatSeqAIJSetType(Mat,MatType); PETSC_EXTERN PetscErrorCode MatSeqAIJRegister(const char[],PetscErrorCode (*)(Mat,MatType,MatReuse,Mat *)); PETSC_EXTERN PetscFunctionList MatSeqAIJList; +PETSC_EXTERN PetscErrorCode MatSeqBAIJGetArray(Mat,PetscScalar *[]); +PETSC_EXTERN PetscErrorCode MatSeqBAIJRestoreArray(Mat,PetscScalar *[]); PETSC_EXTERN PetscErrorCode MatSeqSBAIJGetArray(Mat,PetscScalar *[]); PETSC_EXTERN PetscErrorCode MatSeqSBAIJRestoreArray(Mat,PetscScalar *[]); PETSC_EXTERN PetscErrorCode MatDenseGetArray(Mat,PetscScalar *[]); @@ -637,6 +640,7 @@ PETSC_EXTERN PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],Pets PETSC_EXTERN PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); PETSC_EXTERN PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); PETSC_EXTERN PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); +PETSC_EXTERN PetscErrorCode MatGetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); PETSC_EXTERN PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); PETSC_EXTERN PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); @@ -1083,15 +1087,16 @@ PETSC_EXTERN PetscErrorCode MatFindZeroRows(Mat,IS*); .seealso: MatGetOrdering() J*/ typedef const char* MatOrderingType; -#define MATORDERINGNATURAL "natural" -#define MATORDERINGND "nd" -#define MATORDERING1WD "1wd" -#define MATORDERINGRCM "rcm" -#define MATORDERINGQMD "qmd" -#define MATORDERINGROWLENGTH "rowlength" -#define MATORDERINGWBM "wbm" -#define MATORDERINGSPECTRAL "spectral" -#define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ +#define MATORDERINGNATURAL "natural" +#define MATORDERINGND "nd" +#define MATORDERING1WD "1wd" +#define MATORDERINGRCM "rcm" +#define MATORDERINGQMD "qmd" +#define MATORDERINGROWLENGTH "rowlength" +#define MATORDERINGWBM "wbm" +#define MATORDERINGSPECTRAL "spectral" +#define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ +#define MATORDERINGNATURAL_OR_ND "natural_or_nd" /* special coase used for Cholesky and ICC, allows ND when AIJ matrix is used but Natural when SBAIJ is used */ PETSC_EXTERN PetscErrorCode MatGetOrdering(Mat,MatOrderingType,IS*,IS*); PETSC_EXTERN PetscErrorCode MatGetOrderingList(PetscFunctionList*); @@ -1102,17 +1107,18 @@ PETSC_EXTERN PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); PETSC_EXTERN PetscErrorCode MatCreateLaplacian(Mat,PetscReal,PetscBool,Mat*); /*S - MatFactorShiftType - Numeric Shift. + MatFactorShiftType - Numeric Shift for factorizations Level: beginner +.seealso: MatGetFactor() S*/ typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; PETSC_EXTERN const char *const MatFactorShiftTypes[]; PETSC_EXTERN const char *const MatFactorShiftTypesDetail[]; /*S - MatFactorError - indicates what type of error in matrix factor + MatFactorError - indicates what type of error was generated in a matrix factorization Level: beginner @@ -1218,9 +1224,17 @@ PETSC_EXTERN PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscI Level: beginner -.seealso: MatFDColoringCreate() ISColoring MatFDColoring + Notes: + Coloring of matrices can be computed directly from the sparse matrix nonzero structure via the MatColoring object or from the mesh from which the + matrix comes from via DMCreateColoring(). In general using the mesh produces a more optimal coloring (fewer colors). + + Once a coloring is available MatFDColoringCreate() creates an object that can be used to efficiently compute Jacobians using that coloring. This + same object can also be used to efficiently convert data created by Automatic Differentation tools to PETSc sparse matrices. + +.seealso: MatFDColoringCreate(), MatColoringWeightType, ISColoring, MatFDColoring, DMCreateColoring(), MatColoringCreate(), MatOrdering, MatPartitioning S*/ typedef struct _p_MatColoring* MatColoring; + /*J MatColoringType - String with the name of a PETSc matrix coloring @@ -1228,7 +1242,6 @@ typedef struct _p_MatColoring* MatColoring; .seealso: MatColoringSetType(), MatColoring J*/ - typedef const char* MatColoringType; #define MATCOLORINGJP "jp" #define MATCOLORINGPOWER "power" @@ -1250,6 +1263,8 @@ typedef const char* MatColoringType; Level: intermediate Any additions/changes here MUST also be made in include/petsc/finclude/petscmat.h + +.seealso: MatColoring, MatColoringCreate() E*/ typedef enum {MAT_COLORING_WEIGHT_RANDOM,MAT_COLORING_WEIGHT_LEXICAL,MAT_COLORING_WEIGHT_LF,MAT_COLORING_WEIGHT_SL} MatColoringWeightType; @@ -1279,7 +1294,10 @@ PETSC_EXTERN PetscErrorCode MatISColoringTest(Mat,ISColoring); Level: beginner -.seealso: MatFDColoringCreate() + Notes: + This object is creating utilizing a coloring provided by the MatColoring object or DMCreateColoring() + +.seealso: MatFDColoringCreate(), MatColoring, DMCreateColoring() S*/ typedef struct _p_MatFDColoring* MatFDColoring; @@ -1321,7 +1339,14 @@ PETSC_EXTERN PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*); Level: beginner -.seealso: MatPartitioningCreate(), MatPartitioningType + Notes: + There is also a PetscPartitioner object that provides the same functionality. It can utilize the MatPartitioning operations + via PetscPartitionerSetType(p,PETSCPARTITIONERMATPARTITIONING) + + Developers Note: + It is an extra maintainance and documentation cost to have two objects with the same functionality. + +.seealso: MatPartitioningCreate(), MatPartitioningType, MatColoring, MatGetOrdering() S*/ typedef struct _p_MatPartitioning* MatPartitioning; @@ -1342,26 +1367,22 @@ typedef const char* MatPartitioningType; #define MATPARTITIONINGPTSCOTCH "ptscotch" #define MATPARTITIONINGHIERARCH "hierarch" - PETSC_EXTERN PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); PETSC_EXTERN PetscErrorCode MatPartitioningSetType(MatPartitioning,MatPartitioningType); PETSC_EXTERN PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); PETSC_EXTERN PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); PETSC_EXTERN PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); PETSC_EXTERN PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); +PETSC_EXTERN PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning,PetscBool); +PETSC_EXTERN PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning,PetscBool*); PETSC_EXTERN PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); PETSC_EXTERN PetscErrorCode MatPartitioningImprove(MatPartitioning,IS*); PETSC_EXTERN PetscErrorCode MatPartitioningViewImbalance(MatPartitioning,IS); PETSC_EXTERN PetscErrorCode MatPartitioningApplyND(MatPartitioning,IS*); PETSC_EXTERN PetscErrorCode MatPartitioningDestroy(MatPartitioning*); - PETSC_EXTERN PetscErrorCode MatPartitioningRegister(const char[],PetscErrorCode (*)(MatPartitioning)); - - - PETSC_EXTERN PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} - +PETSC_EXTERN PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); PETSC_EXTERN PetscErrorCode MatPartitioningGetType(MatPartitioning,MatPartitioningType*); @@ -1425,7 +1446,8 @@ PETSC_EXTERN PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); PETSC_EXTERN PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); /* - If you add entries here you must also add them to petsc/finclude/petscmat.h + If you add entries here you must also add them to include/petscmat.h + and src/mat/f90-mod/petscmat.h */ typedef enum { MATOP_SET_VALUES=0, MATOP_GET_ROW=1, @@ -1571,7 +1593,9 @@ typedef enum { MATOP_SET_VALUES=0, MATOP_RESIDUAL=141, MATOP_FDCOLORING_SETUP=142, MATOP_MPICONCATENATESEQ=144, - MATOP_DESTROYSUBMATRICES=145 + MATOP_DESTROYSUBMATRICES=145, + MATOP_TRANSPOSE_SOLVE=146, + MATOP_GET_VALUES_LOCAL=147 } MatOperation; PETSC_EXTERN PetscErrorCode MatSetOperation(Mat,MatOperation,void(*)(void)); PETSC_EXTERN PetscErrorCode MatGetOperation(Mat,MatOperation,void(**)(void)); @@ -1582,6 +1606,7 @@ PETSC_EXTERN PetscErrorCode MatFreeIntermediateDataStructures(Mat); PETSC_EXTERN PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); PETSC_EXTERN PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); PETSC_EXTERN PetscErrorCode MatShellSetContext(Mat,void*); +PETSC_EXTERN PetscErrorCode MatShellSetVecType(Mat,VecType); PETSC_EXTERN PetscErrorCode MatShellTestMult(Mat,PetscErrorCode (*)(void*,Vec,Vec),Vec,void*,PetscBool*); PETSC_EXTERN PetscErrorCode MatShellTestMultTranspose(Mat,PetscErrorCode (*)(void*,Vec,Vec),Vec,void*,PetscBool*); PETSC_EXTERN PetscErrorCode MatShellSetManageScalingShifts(Mat); @@ -1789,7 +1814,8 @@ PETSC_EXTERN PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat,PetscInt); #endif -PETSC_EXTERN PetscErrorCode MatPinToCPU(Mat,PetscBool); +PETSC_EXTERN PetscErrorCode MatBindToCPU(Mat,PetscBool); +PETSC_DEPRECATED_FUNCTION("Use MatBindToCPU (since v3.13)") PETSC_STATIC_INLINE PetscErrorCode MatPinToCPU(Mat A,PetscBool flg) {return MatBindToCPU(A,flg);} #ifdef PETSC_HAVE_CUDA /*E diff --git a/include/petscmatcoarsen.h b/include/petscmatcoarsen.h index 12f22dab65a..36b98dbb65d 100644 --- a/include/petscmatcoarsen.h +++ b/include/petscmatcoarsen.h @@ -63,7 +63,7 @@ PETSC_EXTERN PetscErrorCode MatCoarsenRegister(const char[],PetscErrorCode (*)(M PETSC_EXTERN PetscErrorCode MatCoarsenView(MatCoarsen,PetscViewer); PETSC_EXTERN PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen); PETSC_EXTERN PetscErrorCode MatCoarsenGetType(MatCoarsen,MatCoarsenType*); -PETSC_STATIC_INLINE PetscErrorCode MatCoarsenViewFromOptions(MatCoarsen A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode MatCoarsenViewFromOptions(MatCoarsen,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscCDCreate(PetscInt,PetscCoarsenData**); PETSC_EXTERN PetscErrorCode PetscCDDestroy(PetscCoarsenData*); diff --git a/include/petscoptions.h b/include/petscoptions.h index 14008cfdd1f..e8577b7a49e 100644 --- a/include/petscoptions.h +++ b/include/petscoptions.h @@ -282,6 +282,7 @@ M*/ #define PetscOptionsBoolArray(a,b,c,d,e,f) PetscOptionsBoolArray_Private(PetscOptionsObject,a,b,c,d,e,f) #define PetscOptionsEnumArray(a,b,c,d,e,f,g) PetscOptionsEnumArray_Private(PetscOptionsObject,a,b,c,d,e,f,g) #define PetscOptionsDeprecated(a,b,c,d) PetscOptionsDeprecated_Private(PetscOptionsObject,a,b,c,d) +#define PetscOptionsDeprecatedNoObject(a,b,c,d) PetscOptionsDeprecated_Private(NULL,a,b,c,d) PETSC_EXTERN PetscErrorCode PetscOptionsEnum_Private(PetscOptionItems*,const char[],const char[],const char[],const char *const*,PetscEnum,PetscEnum*,PetscBool*); diff --git a/include/petscpc.h b/include/petscpc.h index 82b4510f7a0..b97b5e05614 100644 --- a/include/petscpc.h +++ b/include/petscpc.h @@ -61,7 +61,6 @@ PETSC_EXTERN PetscErrorCode PCApplyRichardsonExists(PC,PetscBool *); PETSC_EXTERN PetscErrorCode PCSetUseAmat(PC,PetscBool); PETSC_EXTERN PetscErrorCode PCGetUseAmat(PC,PetscBool*); - PETSC_EXTERN PetscErrorCode PCRegister(const char[],PetscErrorCode(*)(PC)); PETSC_EXTERN PetscErrorCode PCReset(PC); @@ -78,7 +77,7 @@ PETSC_EXTERN PetscErrorCode PCGetOperatorsSet(PC,PetscBool *,PetscBool *); PETSC_EXTERN PetscErrorCode PCView(PC,PetscViewer); PETSC_EXTERN PetscErrorCode PCLoad(PC,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PCViewFromOptions(PC,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PCSetOptionsPrefix(PC,const char[]); PETSC_EXTERN PetscErrorCode PCAppendOptionsPrefix(PC,const char[]); @@ -293,12 +292,16 @@ PETSC_EXTERN PetscErrorCode PCPARMSSetFill(PC,PetscInt,PetscInt,PetscInt); PETSC_EXTERN PetscErrorCode PCGAMGSetType( PC,PCGAMGType); PETSC_EXTERN PetscErrorCode PCGAMGGetType( PC,PCGAMGType*); PETSC_EXTERN PetscErrorCode PCGAMGSetProcEqLim(PC,PetscInt); + PETSC_EXTERN PetscErrorCode PCGAMGSetRepartition(PC,PetscBool); +PETSC_EXTERN PetscErrorCode PCGAMGSetUseSAEstEig(PC,PetscBool); +PETSC_EXTERN PetscErrorCode PCGAMGSetEstEigKSPMaxIt(PC,PetscInt); +PETSC_EXTERN PetscErrorCode PCGAMGSetEstEigKSPType(PC,char[]); +PETSC_EXTERN PetscErrorCode PCGAMGSetEigenvalues(PC,PetscReal,PetscReal); PETSC_EXTERN PetscErrorCode PCGAMGASMSetUseAggs(PC,PetscBool); PETSC_EXTERN PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC,PetscBool); PETSC_EXTERN PetscErrorCode PCGAMGSetCpuPinCoarseGrids(PC,PetscBool); PETSC_EXTERN PetscErrorCode PCGAMGSetCoarseGridLayoutType(PC,PCGAMGLayoutType); -PETSC_EXTERN PetscErrorCode PCGAMGSetSolverType(PC,char[],PetscInt); PETSC_EXTERN PetscErrorCode PCGAMGSetThreshold(PC,PetscReal[],PetscInt); PETSC_EXTERN PetscErrorCode PCGAMGSetThresholdScale(PC,PetscReal); PETSC_EXTERN PetscErrorCode PCGAMGSetCoarseEqLim(PC,PetscInt); @@ -432,7 +435,11 @@ PETSC_EXTERN PetscErrorCode PCDeflationSetCoarseMat(PC,Mat); PETSC_EXTERN PetscErrorCode PCDeflationGetPC(PC,PC*); PETSC_EXTERN PetscErrorCode PCHPDDMSetAuxiliaryMat(PC,IS,Mat,PetscErrorCode (*)(Mat,PetscReal,Vec,Vec,PetscReal,IS,void*),void*); +PETSC_EXTERN PetscErrorCode PCHPDDMSetRHSMat(PC,Mat); +PETSC_EXTERN PetscErrorCode PCHPDDMHasNeumannMat(PC,PetscBool); PETSC_EXTERN PetscErrorCode PCHPDDMSetCoarseCorrectionType(PC,PCHPDDMCoarseCorrectionType); PETSC_EXTERN PetscErrorCode PCHPDDMGetCoarseCorrectionType(PC,PCHPDDMCoarseCorrectionType*); +PETSC_EXTERN PetscErrorCode PCHPDDMFinalizePackage(void); +PETSC_EXTERN PetscErrorCode PCHPDDMInitializePackage(void); #endif /* PETSCPC_H */ diff --git a/include/petscpf.h b/include/petscpf.h index 8019c70da2b..485d7051f2f 100644 --- a/include/petscpf.h +++ b/include/petscpf.h @@ -52,7 +52,7 @@ PETSC_EXTERN PetscErrorCode PFSetFromOptions(PF); PETSC_EXTERN PetscErrorCode PFGetType(PF,PFType*); PETSC_EXTERN PetscErrorCode PFView(PF,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PFViewFromOptions(PF A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PFViewFromOptions(PF,PetscObject,const char[]); #define PFSetOptionsPrefix(a,s) PetscObjectSetOptionsPrefix((PetscObject)(a),s) diff --git a/include/petscsection.h b/include/petscsection.h index 6e7c50d9a85..be2f4af4d71 100644 --- a/include/petscsection.h +++ b/include/petscsection.h @@ -52,7 +52,7 @@ PETSC_EXTERN PetscErrorCode PetscSectionSetFieldOffset(PetscSection, PetscInt, P PETSC_EXTERN PetscErrorCode PetscSectionGetFieldPointOffset(PetscSection, PetscInt, PetscInt, PetscInt*); PETSC_EXTERN PetscErrorCode PetscSectionGetOffsetRange(PetscSection, PetscInt *, PetscInt *); PETSC_EXTERN PetscErrorCode PetscSectionView(PetscSection, PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PetscSectionViewFromOptions(PetscSection A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PetscSectionViewFromOptions(PetscSection,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscSectionReset(PetscSection); PETSC_EXTERN PetscErrorCode PetscSectionDestroy(PetscSection*); PETSC_EXTERN PetscErrorCode PetscSectionCreateGlobalSection(PetscSection, PetscSF, PetscBool, PetscBool, PetscSection *); diff --git a/include/petscsf.h b/include/petscsf.h index 965414706cb..4bd91518561 100644 --- a/include/petscsf.h +++ b/include/petscsf.h @@ -56,6 +56,21 @@ E*/ typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType; PETSC_EXTERN const char *const PetscSFWindowSyncTypes[]; +/*E + PetscSFWindowFlavorType - Flavor for the creation of MPI windows for PETSCSFWINDOW + +$ PETSCSF_WINDOW_FLAVOR_CREATE - Use MPI_Win_create, no reusage +$ PETSCSF_WINDOW_FLAVOR_DYNAMIC - Use MPI_Win_create_dynamic and dynamically attach pointers +$ PETSCSF_WINDOW_FLAVOR_ALLOCATE - Use MPI_Win_allocate +$ PETSCSF_WINDOW_FLAVOR_SHARED - Use MPI_Win_allocate_shared + + Level: advanced + +.seealso: PetscSFWindowSetFlavorType(), PetscSFWindowGetFlavorType() +E*/ +typedef enum {PETSCSF_WINDOW_FLAVOR_CREATE,PETSCSF_WINDOW_FLAVOR_DYNAMIC,PETSCSF_WINDOW_FLAVOR_ALLOCATE,PETSCSF_WINDOW_FLAVOR_SHARED} PetscSFWindowFlavorType; +PETSC_EXTERN const char *const PetscSFWindowFlavorTypes[]; + /*E PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF @@ -80,12 +95,16 @@ PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*); PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType); PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*); PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PetscSFViewFromOptions(PetscSF A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PetscSFViewFromOptions(PetscSF,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF); PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF); PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*); PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType); PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*); +PETSC_EXTERN PetscErrorCode PetscSFWindowSetFlavorType(PetscSF,PetscSFWindowFlavorType); +PETSC_EXTERN PetscErrorCode PetscSFWindowGetFlavorType(PetscSF,PetscSFWindowFlavorType*); +PETSC_EXTERN PetscErrorCode MPIAPI PetscSFWindowSetInfo(PetscSF,MPI_Info); +PETSC_EXTERN PetscErrorCode MPIAPI PetscSFWindowGetInfo(PetscSF,MPI_Info*); PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool); PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode); PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern); @@ -149,9 +168,44 @@ PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,c return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote); } +/*@C + PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd() + + Collective on PetscSF + + Input Arguments: ++ sf - star forest on which to communicate +. unit - data type associated with each node +- rootdata - buffer to broadcast + + Output Arguments: +. leafdata - buffer to update with values from each leaf's respective root + + Level: intermediate + +.seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin() +@*/ PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) { return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE); } + +/*@C + PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin() + + Collective + + Input Arguments: ++ sf - star forest +. unit - data type +- rootdata - buffer to broadcast + + Output Arguments: +. leafdata - buffer to update with values from each leaf's respective root + + Level: intermediate + +.seealso: PetscSFSetGraph(), PetscSFReduceEnd() +@*/ PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) { return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE); } diff --git a/include/petscsnes.h b/include/petscsnes.h index a175418b398..8d5e375a054 100644 --- a/include/petscsnes.h +++ b/include/petscsnes.h @@ -87,7 +87,7 @@ PETSC_EXTERN PetscErrorCode SNESGetSolutionUpdate(SNES,Vec*); PETSC_EXTERN PetscErrorCode SNESGetRhs(SNES,Vec*); PETSC_EXTERN PetscErrorCode SNESView(SNES,PetscViewer); PETSC_EXTERN PetscErrorCode SNESLoad(SNES,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode SNESViewFromOptions(SNES A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode SNESViewFromOptions(SNES,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode SNESReasonView(SNES,PetscViewer); PETSC_EXTERN PetscErrorCode SNESReasonViewFromOptions(SNES); @@ -235,6 +235,7 @@ PETSC_EXTERN PetscErrorCode SNESGetCheckJacobianDomainError(SNES,PetscBool*); .seealso: SNESSolve(), SNESGetConvergedReason(), KSPConvergedReason, SNESSetConvergenceTest() E*/ +#define SNES_CONVERGED_TR_DELTA_DEPRECATED SNES_CONVERGED_TR_DELTA PETSC_DEPRECATED_ENUM("Use SNES_DIVERGED_TR_DELTA (since version 3.12)") typedef enum {/* converged */ SNES_CONVERGED_FNORM_ABS = 2, /* ||F|| < atol */ SNES_CONVERGED_FNORM_RELATIVE = 3, /* ||F|| < rtol*||F_initial|| */ @@ -252,6 +253,7 @@ typedef enum {/* converged */ SNES_DIVERGED_DTOL = -9, /* || F || > divtol*||F_initial|| */ SNES_DIVERGED_JACOBIAN_DOMAIN = -10, /* Jacobian calculation does not make sense */ SNES_DIVERGED_TR_DELTA = -11, + SNES_CONVERGED_TR_DELTA_DEPRECATED = -11, SNES_CONVERGED_ITERATING = 0} SNESConvergedReason; PETSC_EXTERN const char *const*SNESConvergedReasons; diff --git a/include/petscsys.h b/include/petscsys.h index 25ca0bbe2b1..b5d6306d0c9 100644 --- a/include/petscsys.h +++ b/include/petscsys.h @@ -168,8 +168,14 @@ void assert_never_put_petsc_headers_inside_an_extern_c(int); void assert_never_p # elif (OMPI_MAJOR_VERSION != PETSC_HAVE_OMPI_MAJOR_VERSION) || (OMPI_MINOR_VERSION != PETSC_HAVE_OMPI_MINOR_VERSION) || (OMPI_RELEASE_VERSION < PETSC_HAVE_OMPI_RELEASE_VERSION) # error "PETSc was configured with one OpenMPI mpi.h version but now appears to be compiling using a different OpenMPI mpi.h version" # endif -#elif defined(OMPI_MAJOR_VERSION) || defined(MPICH_NUMVERSION) -# error "PETSc was configured with undetermined MPI - but now appears to be compiling using either of OpenMPI or a MPICH variant" +#elif defined(PETSC_HAVE_MSMPI_VERSION) +# if !defined(MSMPI_VER) +# error "PETSc was configured with MSMPI but now appears to be compiling using a non-MSMPI mpi.h" +# elif (MSMPI_VER != PETSC_HAVE_MSMPI_VERSION) +# error "PETSc was configured with one MSMPI mpi.h version but now appears to be compiling using a different MSMPI mpi.h version" +# endif +#elif defined(OMPI_MAJOR_VERSION) || defined(MPICH_NUMVERSION) || defined(MSMPI_VER) +# error "PETSc was configured with undetermined MPI - but now appears to be compiling using any of OpenMPI, MS-MPI or a MPICH variant" #endif /* @@ -510,7 +516,7 @@ M*/ . m1 - number of elements to allocate (may be zero) Output Parameter: -. r1 - memory allocated in first chunk +. r1 - memory allocated Note: This uses the sizeof() of the memory type requested to determine the total memory to be allocated, therefore you should not @@ -521,7 +527,7 @@ M*/ $ PetscInt *id; $ PetscMalloc1(10*sizeof(PetscInt),&id); - Does not zero the memory allocatd, used PetscCalloc1() to obtain memory that has been zeroed. + Does not zero the memory allocated, use PetscCalloc1() to obtain memory that has been zeroed. Level: beginner @@ -543,7 +549,7 @@ M*/ . m1 - number of elements to allocate in 1st chunk (may be zero) Output Parameter: -. r1 - memory allocated in first chunk +. r1 - memory allocated Notes: See PetsMalloc1() for more details on usage. @@ -967,7 +973,7 @@ M*/ .seealso: PetscNew(), PetscMalloc(), PetscNewLog(), PetscMalloc1(), PetscCalloc1() M*/ -#define PetscFree(a) ((*PetscTrFree)((void*)(a),__LINE__,PETSC_FUNCTION_NAME,__FILE__) || ((a) = 0,0)) +#define PetscFree(a) ((*PetscTrFree)((void*)(a),__LINE__,PETSC_FUNCTION_NAME,__FILE__) || ((a) = NULL,0)) /*MC PetscFree2 - Frees 2 chunks of memory obtained with PetscMalloc2() @@ -1240,6 +1246,9 @@ PETSC_EXTERN const char *const PetscFileModes[]; PETSC_EXTERN PetscClassId PETSC_LARGEST_CLASSID; PETSC_EXTERN PetscClassId PETSC_OBJECT_CLASSID; PETSC_EXTERN PetscErrorCode PetscClassIdRegister(const char[],PetscClassId *); +PETSC_EXTERN PetscErrorCode PetscObjectGetId(PetscObject,PetscObjectId*); +PETSC_EXTERN PetscErrorCode PetscObjectCompareId(PetscObject,PetscObjectId,PetscBool*); + /* Routines that get memory usage information from the OS @@ -1274,6 +1283,9 @@ PETSC_EXTERN PetscErrorCode PetscPythonFinalize(void); PETSC_EXTERN PetscErrorCode PetscPythonPrintError(void); PETSC_EXTERN PetscErrorCode PetscPythonMonitorSet(PetscObject,const char[]); +PETSC_EXTERN PetscErrorCode PetscMonitorCompare(PetscErrorCode (*)(void),void *,PetscErrorCode (*)(void**),PetscErrorCode (*)(void),void *,PetscErrorCode (*)(void**),PetscBool *); + + /* These are so that in extern C code we can caste function pointers to non-extern C function pointers. Since the regular C++ code expects its function pointers to be C++ @@ -2304,7 +2316,11 @@ PETSC_EXTERN PetscErrorCode PetscGetDate(char[],size_t); PETSC_EXTERN PetscErrorCode PetscGetVersion(char[], size_t); PETSC_EXTERN PetscErrorCode PetscGetVersionNumber(PetscInt*,PetscInt*,PetscInt*,PetscInt*); +PETSC_EXTERN PetscErrorCode PetscSortedInt(PetscInt,const PetscInt[],PetscBool*); +PETSC_EXTERN PetscErrorCode PetscSortedMPIInt(PetscInt,const PetscMPIInt[],PetscBool*); +PETSC_EXTERN PetscErrorCode PetscSortedReal(PetscInt,const PetscReal[],PetscBool*); PETSC_EXTERN PetscErrorCode PetscSortInt(PetscInt,PetscInt[]); +PETSC_EXTERN PetscErrorCode PetscSortReverseInt(PetscInt,PetscInt[]); PETSC_EXTERN PetscErrorCode PetscSortedRemoveDupsInt(PetscInt*,PetscInt[]); PETSC_EXTERN PetscErrorCode PetscSortRemoveDupsInt(PetscInt*,PetscInt[]); PETSC_EXTERN PetscErrorCode PetscCheckDupsInt(PetscInt,const PetscInt[],PetscBool*); @@ -2332,6 +2348,8 @@ PETSC_EXTERN PetscErrorCode PetscMergeIntArrayPair(PetscInt,const PetscInt[],con PETSC_EXTERN PetscErrorCode PetscMergeIntArray(PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscInt*,PetscInt**); PETSC_EXTERN PetscErrorCode PetscMergeMPIIntArray(PetscInt,const PetscMPIInt[],PetscInt,const PetscMPIInt[],PetscInt*,PetscMPIInt**); +PETSC_EXTERN PetscErrorCode PetscParallelSortedInt(MPI_Comm, PetscInt, const PetscInt[], PetscBool *); + PETSC_EXTERN PetscErrorCode PetscSetDisplay(void); PETSC_EXTERN PetscErrorCode PetscGetDisplay(char[],size_t); @@ -2365,7 +2383,7 @@ PETSC_EXTERN PetscErrorCode PetscRandomRegister(const char[],PetscErrorCode (*)( PETSC_EXTERN PetscErrorCode PetscRandomSetType(PetscRandom, PetscRandomType); PETSC_EXTERN PetscErrorCode PetscRandomSetFromOptions(PetscRandom); PETSC_EXTERN PetscErrorCode PetscRandomGetType(PetscRandom, PetscRandomType*); -PETSC_STATIC_INLINE PetscErrorCode PetscRandomViewFromOptions(PetscRandom A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PetscRandomViewFromOptions(PetscRandom,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscRandomView(PetscRandom,PetscViewer); PETSC_EXTERN PetscErrorCode PetscRandomCreate(MPI_Comm,PetscRandom*); diff --git a/include/petscsystypes.h b/include/petscsystypes.h index ff850a19798..fd5a45c7f75 100644 --- a/include/petscsystypes.h +++ b/include/petscsystypes.h @@ -397,11 +397,7 @@ typedef struct _p_PetscObject* PetscObject; .seealso: PetscObjectState, PetscObjectGetId() M*/ -#if defined(PETSC_USING_F90) && !defined(PETSC_USE_FORTRANKIND) /* compaq F90 */ - typedef int PetscObjectId; -#else - typedef PetscInt64 PetscObjectId; -#endif +typedef PetscInt64 PetscObjectId; /*MC PetscObjectState - integer state for a PetscObject @@ -414,11 +410,7 @@ M*/ .seealso: PetscObjectId, PetscObjectStateGet(), PetscObjectStateIncrease(), PetscObjectStateSet() M*/ -#if defined(PETSC_USING_F90) && !defined(PETSC_USE_FORTRANKIND) /* compaq F90 */ - typedef int PetscObjectState; -#else - typedef PetscInt64 PetscObjectState; -#endif +typedef PetscInt64 PetscObjectState; /*S PetscFunctionList - Linked list of functions, possibly stored in dynamic libraries, accessed @@ -426,7 +418,7 @@ M*/ Level: advanced -.seealso: PetscFunctionListAdd(), PetscFunctionListDestroy(), PetscOpFlist +.seealso: PetscFunctionListAdd(), PetscFunctionListDestroy() S*/ typedef struct _n_PetscFunctionList *PetscFunctionList; @@ -449,7 +441,7 @@ typedef void* PetscDLHandle; typedef enum {PETSC_DL_DECIDE=0,PETSC_DL_NOW=1,PETSC_DL_LOCAL=2} PetscDLMode; /*S - PetscObjectList - Linked list of PETSc objects, each accessable by string name + PetscObjectList - Linked list of PETSc objects, each accessible by string name Level: developer @@ -532,6 +524,7 @@ typedef enum { /* Updates here must be accompanied by updates in finclude/petscsys.h and the string array in mpits.c */ } PetscBuildTwoSidedType; +/* NOTE: If you change this, you must also change the values in src/vec/f90-mod/petscvec.h */ /*E InsertMode - Whether entries are inserted or added into vectors or matrices diff --git a/include/petsctao.h b/include/petsctao.h index d72ff1a8fdb..0095f8c2ffe 100644 --- a/include/petsctao.h +++ b/include/petsctao.h @@ -34,6 +34,76 @@ PETSC_EXTERN const char *const TaoSubsetTypes[]; .seealso TaoCreate(), TaoDestroy(), TaoSetType(), TaoType S*/ +/*E + TaoADMMUpdateType - Determine spectral penalty update routine for lagrange augmented term for ADMM. + + Level: advanced + +.seealso TaoADMMSetUpdateType() +E*/ +typedef enum {TAO_ADMM_UPDATE_BASIC,TAO_ADMM_UPDATE_ADAPTIVE,TAO_ADMM_UPDATE_ADAPTIVE_RELAXED} TaoADMMUpdateType; +PETSC_EXTERN const char *const TaoADMMUpdateTypes[]; +/*MC + TAO_ADMM_UPDATE_BASIC - Use same spectral penalty set at the beginning. No update + + Level: advanced + + Note: Most basic implementation. Generally slower than adaptive or adaptive relaxed version. + +.seealso: TaoADMMSetUpdateType(), TAO_ADMM_UPDATE_ADAPTIVE, TAO_ADMM_UPDATE_ADAPTIVE_RELAXED +M*/ + +/*MC + TAO_ADMM_UPDATE_ADAPTIVE - Adaptively update spectral penalty + + Level: advanced + + Note: Adaptively updates spectral penalty, using both steepest descent and minimum gradient. + +.seealso: TaoADMMSetUpdateType(), TAO_ADMM_UPDATE_BASIC, TAO_ADMM_UPDATE_ADAPTIVE_RELAXED +M*/ + +/*MC + ADMM_UPDATE_ADAPTIVE_RELAXED - Adaptively update spectral penalty, and relaxes parameter update + + Level: advanced + + Note: With adaptive spectral penalty update, it also relaxes x vector update by a factor. + +.seealso: TaoADMMSetUpdateType(), TAO_ADMM_UPDATE_BASIC, TAO_ADMM_UPDATE_ADAPTIVE +M*/ + + +/*E + TaoADMMRegularizerType - Determine regularizer routine - either user provided or soft threshold + + Level: advanced + +.seealso TaoADMMSetRegularizerType() +E*/ +typedef enum {TAO_ADMM_REGULARIZER_USER,TAO_ADMM_REGULARIZER_SOFT_THRESH} TaoADMMRegularizerType; +PETSC_EXTERN const char *const TaoADMMRegularizerTypes[]; +/*MC + TAO_ADMM_REGULARIZER_USER - User provided routines for regularizer part of ADMM + + Level: advanced + + Note: User needs to provided appropriate routines and type for regularizer solver + +.seealso: TaoADMMSetRegularizerType(), TAO_ADMM_REGULARIZER_SOFT_THRESH +M*/ + +/*MC + TAO_ADMM_REGULARIZER_SOFT_THRESH - Soft threshold to solve regularizer part of ADMM + + Level: advanced + + Note: Utilizes built-in SoftThreshold routines + +.seealso: TaoSoftThreshold(), TaoADMMSetRegularizerObjectiveAndGradientRoutine(), + TaoADMMSetRegularizerHessianRoutine(), TaoADMMSetRegularizerType(), TAO_ADMM_REGULARIZER_USER +M*/ + typedef struct _p_Tao* Tao; /*J @@ -72,6 +142,7 @@ typedef const char *TaoType; #define TAOASFLS "asfls" #define TAOIPM "ipm" #define TAOSHELL "shell" +#define TAOADMM "admm" PETSC_EXTERN PetscClassId TAO_CLASSID; PETSC_EXTERN PetscFunctionList TaoList; @@ -126,7 +197,7 @@ PETSC_EXTERN PetscErrorCode TaoDestroy(Tao*); PETSC_EXTERN PetscErrorCode TaoSetOptionsPrefix(Tao,const char []); PETSC_EXTERN PetscErrorCode TaoView(Tao, PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode TaoViewFromOptions(Tao A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode TaoViewFromOptions(Tao,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode TaoSolve(Tao); @@ -274,4 +345,26 @@ PETSC_EXTERN PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(Tao,Mat,PetscErr PETSC_EXTERN PetscErrorCode TaoBRGNSetRegularizerWeight(Tao,PetscReal); PETSC_EXTERN PetscErrorCode TaoBRGNSetL1SmoothEpsilon(Tao,PetscReal); PETSC_EXTERN PetscErrorCode TaoBRGNSetDictionaryMatrix(Tao,Mat); + +PETSC_EXTERN PetscErrorCode TaoADMMGetMisfitSubsolver(Tao,Tao *); +PETSC_EXTERN PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao,Tao *); +PETSC_EXTERN PetscErrorCode TaoADMMGetDualVector(Tao,Vec*); +PETSC_EXTERN PetscErrorCode TaoADMMGetSpectralPenalty(Tao,PetscReal*); +PETSC_EXTERN PetscErrorCode TaoADMMSetSpectralPenalty(Tao,PetscReal); +PETSC_EXTERN PetscErrorCode TaoGetADMMParentTao(Tao, Tao *); +PETSC_EXTERN PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao,Vec); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao,PetscReal); +PETSC_EXTERN PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao,Mat, Mat,PetscErrorCode (*)(Tao,Vec,Mat,Mat,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao,Mat, Mat,PetscErrorCode (*)(Tao,Vec,Mat,Mat,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao,Mat,Mat,PetscErrorCode (*)(Tao,Vec,Mat,Mat,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao,PetscErrorCode (*)(Tao,Vec,PetscReal *,Vec,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao,Mat,Mat,PetscErrorCode (*)(Tao,Vec,Mat,Mat,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao,PetscErrorCode (*)(Tao,Vec,PetscReal *,Vec,void*),void*); +PETSC_EXTERN PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao, PetscBool); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao, PetscBool); +PETSC_EXTERN PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao, PetscReal); +PETSC_EXTERN PetscErrorCode TaoADMMSetRegularizerType(Tao, TaoADMMRegularizerType); +PETSC_EXTERN PetscErrorCode TaoADMMGetRegularizerType(Tao, TaoADMMRegularizerType*); +PETSC_EXTERN PetscErrorCode TaoADMMSetUpdateType(Tao, TaoADMMUpdateType); +PETSC_EXTERN PetscErrorCode TaoADMMGetUpdateType(Tao, TaoADMMUpdateType*); #endif diff --git a/include/petsctaolinesearch.h b/include/petsctaolinesearch.h index 88fbe62f0b9..a7e7f7ae849 100644 --- a/include/petsctaolinesearch.h +++ b/include/petsctaolinesearch.h @@ -37,7 +37,7 @@ PETSC_EXTERN PetscErrorCode TaoLineSearchSetUp(TaoLineSearch); PETSC_EXTERN PetscErrorCode TaoLineSearchDestroy(TaoLineSearch*); PETSC_EXTERN PetscErrorCode TaoLineSearchMonitor(TaoLineSearch,PetscInt,PetscReal,PetscReal); PETSC_EXTERN PetscErrorCode TaoLineSearchView(TaoLineSearch,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode TaoLineSearchViewFromOptions(TaoLineSearch A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode TaoLineSearchViewFromOptions(TaoLineSearch,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode TaoLineSearchSetOptionsPrefix(TaoLineSearch,const char prefix[]); PETSC_EXTERN PetscErrorCode TaoLineSearchReset(TaoLineSearch); diff --git a/include/petsctime.h b/include/petsctime.h index ce9df5ebf37..7588146a621 100644 --- a/include/petsctime.h +++ b/include/petsctime.h @@ -17,7 +17,7 @@ PETSC_EXTERN PetscLogDouble petsc_BaseTime; Synopsis: #include - PetscTime(PetscLogDouble *v) + PetscErrorCode PetscTime(PetscLogDouble *v) Not Collective @@ -49,7 +49,7 @@ M*/ Synopsis: #include - PetscTimeSubtract(&PetscLogDouble *v) + PetscErrorCode PetscTimeSubtract(PetscLogDouble *v) Not Collective @@ -78,7 +78,7 @@ M*/ Synopsis: #include - PetscTimeAdd(PetscLogDouble *v) + PetscErrorCode PetscTimeAdd(PetscLogDouble *v) Not Collective diff --git a/include/petscts.h b/include/petscts.h index 3187b7c763a..c4d1f548239 100644 --- a/include/petscts.h +++ b/include/petscts.h @@ -5,6 +5,7 @@ #if !defined(PETSCTS_H) #define PETSCTS_H #include +#include /*S TS - Abstract PETSc object that manages all time-steppers (ODE integrators) @@ -369,8 +370,6 @@ PETSC_EXTERN PetscErrorCode TSForwardSetSensitivities(TS,PetscInt,Mat); PETSC_EXTERN PetscErrorCode TSForwardGetSensitivities(TS,PetscInt*,Mat*); PETSC_EXTERN PETSC_DEPRECATED_FUNCTION("Use TSCreateQuadratureTS() and TSForwardSetSensitivities() (since version 3.12)") PetscErrorCode TSForwardSetIntegralGradients(TS,PetscInt,Vec *); PETSC_EXTERN PETSC_DEPRECATED_FUNCTION("Use TSForwardGetSensitivities()") PetscErrorCode TSForwardGetIntegralGradients(TS,PetscInt*,Vec **); -PETSC_EXTERN PetscErrorCode TSForwardSetRHSJacobianP(TS,Vec*,PetscErrorCode(*)(TS,PetscReal,Vec,Vec*,void*),void*); -PETSC_EXTERN PetscErrorCode TSForwardComputeRHSJacobianP(TS,PetscReal,Vec,Vec*); PETSC_EXTERN PetscErrorCode TSForwardSetUp(TS); PETSC_EXTERN PetscErrorCode TSForwardReset(TS); PETSC_EXTERN PetscErrorCode TSForwardCostIntegral(TS); @@ -593,8 +592,8 @@ PETSC_EXTERN PetscErrorCode TSGetKSP(TS,KSP*); PETSC_EXTERN PetscErrorCode TSView(TS,PetscViewer); PETSC_EXTERN PetscErrorCode TSLoad(TS,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode TSViewFromOptions(TS A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} -PETSC_STATIC_INLINE PetscErrorCode TSTrajectoryViewFromOptions(TSTrajectory A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode TSViewFromOptions(TS,PetscObject,const char[]); +PETSC_EXTERN PetscErrorCode TSTrajectoryViewFromOptions(TSTrajectory,PetscObject,const char[]); #define TS_FILE_CLASSID 1211225 @@ -814,11 +813,12 @@ typedef const char* TSRKType; #define TSRK7VR "7vr" #define TSRK8VR "8vr" -PETSC_EXTERN PetscErrorCode TSRKGetType(TS ts,TSRKType*); -PETSC_EXTERN PetscErrorCode TSRKSetType(TS ts,TSRKType); +PETSC_EXTERN PetscErrorCode TSRKGetOrder(TS,PetscInt*); +PETSC_EXTERN PetscErrorCode TSRKGetType(TS,TSRKType*); +PETSC_EXTERN PetscErrorCode TSRKSetType(TS,TSRKType); +PETSC_EXTERN PetscErrorCode TSRKGetTableau(TS,PetscInt*,const PetscReal**,const PetscReal**,const PetscReal**,const PetscReal**,PetscInt*,const PetscReal**,PetscBool*); PETSC_EXTERN PetscErrorCode TSRKSetMultirate(TS,PetscBool); PETSC_EXTERN PetscErrorCode TSRKGetMultirate(TS,PetscBool*); -PETSC_EXTERN PetscErrorCode TSRKSetFullyImplicit(TS,PetscBool); PETSC_EXTERN PetscErrorCode TSRKRegister(TSRKType,PetscInt,PetscInt,const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],PetscInt,const PetscReal[]); PETSC_EXTERN PetscErrorCode TSRKInitializePackage(void); PETSC_EXTERN PetscErrorCode TSRKFinalizePackage(void); @@ -832,10 +832,10 @@ PETSC_EXTERN PetscErrorCode TSRKRegisterDestroy(void); .seealso: TSMPRKSetType(), TS, TSMPRK, TSMPRKRegister() J*/ typedef const char* TSMPRKType; -#define TSMPRK2A22 "2a22" -#define TSMPRK2A23 "2a23" -#define TSMPRK2A32 "2a32" -#define TSMPRK2A33 "2a33" +#define TSMPRK2A22 "2a22" +#define TSMPRK2A23 "2a23" +#define TSMPRK2A32 "2a32" +#define TSMPRK2A33 "2a33" #define TSMPRKP2 "p2" #define TSMPRKP3 "p3" @@ -900,6 +900,7 @@ typedef const char* TSARKIMEXType; PETSC_EXTERN PetscErrorCode TSARKIMEXGetType(TS ts,TSARKIMEXType*); PETSC_EXTERN PetscErrorCode TSARKIMEXSetType(TS ts,TSARKIMEXType); PETSC_EXTERN PetscErrorCode TSARKIMEXSetFullyImplicit(TS,PetscBool); +PETSC_EXTERN PetscErrorCode TSARKIMEXGetFullyImplicit(TS,PetscBool*); PETSC_EXTERN PetscErrorCode TSARKIMEXRegister(TSARKIMEXType,PetscInt,PetscInt,const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],PetscInt,const PetscReal[],const PetscReal[]); PETSC_EXTERN PetscErrorCode TSARKIMEXInitializePackage(void); PETSC_EXTERN PetscErrorCode TSARKIMEXFinalizePackage(void); @@ -981,6 +982,7 @@ PETSC_EXTERN PetscErrorCode TSSundialsSetLinearTolerance(TS,PetscReal); PETSC_EXTERN PetscErrorCode TSSundialsMonitorInternalSteps(TS,PetscBool ); PETSC_EXTERN PetscErrorCode TSSundialsGetParameters(TS,PetscInt *,long*[],double*[]); PETSC_EXTERN PetscErrorCode TSSundialsSetMaxl(TS,PetscInt); +PETSC_EXTERN PetscErrorCode TSSundialsSetMaxord(TS,PetscInt); #endif PETSC_EXTERN PetscErrorCode TSThetaSetTheta(TS,PetscReal); @@ -1004,4 +1006,12 @@ PETSC_EXTERN PetscErrorCode SNESTSFormJacobian(SNES,Vec,Mat,Mat,void*); PETSC_EXTERN PetscErrorCode TSRHSJacobianTest(TS,PetscBool*); PETSC_EXTERN PetscErrorCode TSRHSJacobianTestTranspose(TS,PetscBool*); + +PETSC_EXTERN PetscErrorCode TSGetComputeInitialCondition(TS, PetscErrorCode (**)(TS, Vec)); +PETSC_EXTERN PetscErrorCode TSSetComputeInitialCondition(TS, PetscErrorCode (*)(TS, Vec)); +PETSC_EXTERN PetscErrorCode TSComputeInitialCondition(TS, Vec); +PETSC_EXTERN PetscErrorCode TSGetComputeExactError(TS, PetscErrorCode (**)(TS, Vec, Vec)); +PETSC_EXTERN PetscErrorCode TSSetComputeExactError(TS, PetscErrorCode (*)(TS, Vec, Vec)); +PETSC_EXTERN PetscErrorCode TSComputeExactError(TS, Vec, Vec); +PETSC_EXTERN PetscErrorCode PetscConvEstUseTS(PetscConvEst); #endif diff --git a/include/petscvec.h b/include/petscvec.h index 5367a079ba7..daaee49b2aa 100644 --- a/include/petscvec.h +++ b/include/petscvec.h @@ -145,7 +145,7 @@ PETSC_EXTERN PetscErrorCode VecCreateShared(MPI_Comm,PetscInt,PetscInt,Vec*); PETSC_EXTERN PetscErrorCode VecCreateNode(MPI_Comm,PetscInt,PetscInt,Vec*); PETSC_EXTERN PetscErrorCode VecSetFromOptions(Vec); -PETSC_STATIC_INLINE PetscErrorCode VecViewFromOptions(Vec A,PetscObject B,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,B,name);} +PETSC_EXTERN PetscErrorCode VecViewFromOptions(Vec,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode VecSetUp(Vec); PETSC_EXTERN PetscErrorCode VecDestroy(Vec*); @@ -344,7 +344,7 @@ PETSC_EXTERN PetscErrorCode VecScatterDestroy(VecScatter*); PETSC_EXTERN PetscErrorCode VecScatterSetUp(VecScatter); PETSC_EXTERN PetscErrorCode VecScatterCopy(VecScatter,VecScatter *); PETSC_EXTERN PetscErrorCode VecScatterView(VecScatter,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode VecScatterViewFromOptions(VecScatter A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode VecScatterViewFromOptions(VecScatter,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode VecScatterRemap(VecScatter,PetscInt[],PetscInt[]); PETSC_EXTERN PetscErrorCode VecScatterGetMerged(VecScatter,PetscBool *); @@ -454,7 +454,8 @@ PETSC_EXTERN PetscErrorCode VecMTDotBegin(Vec,PetscInt,const Vec[],PetscScalar[] PETSC_EXTERN PetscErrorCode VecMTDotEnd(Vec,PetscInt,const Vec[],PetscScalar[]); PETSC_EXTERN PetscErrorCode PetscCommSplitReductionBegin(MPI_Comm); -PETSC_EXTERN PetscErrorCode VecPinToCPU(Vec,PetscBool); +PETSC_EXTERN PetscErrorCode VecBindToCPU(Vec,PetscBool); +PETSC_DEPRECATED_FUNCTION("Use VecBindToCPU (since v3.13)") PETSC_STATIC_INLINE PetscErrorCode VecPinToCPU(Vec v,PetscBool flg) {return VecBindToCPU(v,flg);} typedef enum {VEC_IGNORE_OFF_PROC_ENTRIES,VEC_IGNORE_NEGATIVE_INDICES,VEC_SUBSET_OFF_PROC_ENTRIES} VecOption; PETSC_EXTERN PetscErrorCode VecSetOption(Vec,VecOption,PetscBool ); diff --git a/include/petscversion.h b/include/petscversion.h index a3cb6df1dc2..57470b9f576 100644 --- a/include/petscversion.h +++ b/include/petscversion.h @@ -4,7 +4,7 @@ #define PETSC_VERSION_RELEASE 0 #define PETSC_VERSION_MAJOR 3 #define PETSC_VERSION_MINOR 12 -#define PETSC_VERSION_SUBMINOR 0 +#define PETSC_VERSION_SUBMINOR 4 #define PETSC_VERSION_PATCH 0 #define PETSC_RELEASE_DATE "Sep, 29, 2019" #define PETSC_VERSION_DATE "unknown" diff --git a/include/petscviewer.h b/include/petscviewer.h index b91e92a87a2..78d0cff146c 100644 --- a/include/petscviewer.h +++ b/include/petscviewer.h @@ -33,6 +33,7 @@ typedef const char* PetscViewerType; #define PETSCVIEWERGLVIS "glvis" #define PETSCVIEWERADIOS "adios" #define PETSCVIEWERADIOS2 "adios2" +#define PETSCVIEWEREXODUSII "exodusii" PETSC_EXTERN PetscFunctionList PetscViewerList; PETSC_EXTERN PetscErrorCode PetscViewerInitializePackage(void); @@ -96,7 +97,7 @@ PETSC_EXTERN PetscErrorCode PetscViewerRestoreSubViewer(PetscViewer,MPI_Comm,Pet PETSC_EXTERN PetscErrorCode PetscViewerSetUp(PetscViewer); PETSC_EXTERN PetscErrorCode PetscViewerView(PetscViewer,PetscViewer); -PETSC_STATIC_INLINE PetscErrorCode PetscViewerViewFromOptions(PetscViewer A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);} +PETSC_EXTERN PetscErrorCode PetscViewerViewFromOptions(PetscViewer,PetscObject,const char[]); PETSC_EXTERN PetscErrorCode PetscViewerSetOptionsPrefix(PetscViewer,const char[]); PETSC_EXTERN PetscErrorCode PetscViewerAppendOptionsPrefix(PetscViewer,const char[]); @@ -243,8 +244,8 @@ PETSC_EXTERN PetscErrorCode PetscViewerSiloGetMeshName(PetscViewer, char **); PETSC_EXTERN PetscErrorCode PetscViewerSiloSetMeshName(PetscViewer, const char []); PETSC_EXTERN PetscErrorCode PetscViewerSiloClearMeshName(PetscViewer); -typedef enum {PETSC_VTK_POINT_FIELD, PETSC_VTK_POINT_VECTOR_FIELD, PETSC_VTK_CELL_FIELD, PETSC_VTK_CELL_VECTOR_FIELD} PetscViewerVTKFieldType; -PETSC_EXTERN PetscErrorCode PetscViewerVTKAddField(PetscViewer,PetscObject,PetscErrorCode (*PetscViewerVTKWriteFunction)(PetscObject,PetscViewer),PetscViewerVTKFieldType,PetscBool,PetscObject); +typedef enum {PETSC_VTK_INVALID, PETSC_VTK_POINT_FIELD, PETSC_VTK_POINT_VECTOR_FIELD, PETSC_VTK_CELL_FIELD, PETSC_VTK_CELL_VECTOR_FIELD} PetscViewerVTKFieldType; +PETSC_EXTERN PetscErrorCode PetscViewerVTKAddField(PetscViewer,PetscObject,PetscErrorCode (*PetscViewerVTKWriteFunction)(PetscObject,PetscViewer),PetscInt,PetscViewerVTKFieldType,PetscBool,PetscObject); PETSC_EXTERN PetscErrorCode PetscViewerVTKGetDM(PetscViewer,PetscObject*); PETSC_EXTERN PetscErrorCode PetscViewerVTKOpen(MPI_Comm,const char[],PetscFileMode,PetscViewer*); @@ -261,6 +262,7 @@ PETSC_EXTERN PetscViewer PETSC_VIEWER_BINARY_(MPI_Comm); PETSC_EXTERN PetscViewer PETSC_VIEWER_MATLAB_(MPI_Comm); PETSC_EXTERN PetscViewer PETSC_VIEWER_HDF5_(MPI_Comm); PETSC_EXTERN PetscViewer PETSC_VIEWER_GLVIS_(MPI_Comm); +PETSC_EXTERN PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm); PETSC_EXTERN PetscViewer PETSC_VIEWER_MATHEMATICA_WORLD_PRIVATE; /*MC diff --git a/include/petscviewerexodusii.h b/include/petscviewerexodusii.h new file mode 100644 index 00000000000..05a7f746617 --- /dev/null +++ b/include/petscviewerexodusii.h @@ -0,0 +1,14 @@ + +#if !defined(PETSCVIEWEREXODUSII_H) +#define PETSCVIEWEREXODUSII_H + +#include + +#if defined(PETSC_HAVE_EXODUSII) +#include + +PETSC_EXTERN PetscErrorCode PetscViewerExodusIIGetId(PetscViewer,int*); + +PETSC_EXTERN PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm,const char[],PetscFileMode,PetscViewer*); +#endif /* defined(PETSC_HAVE_HDF5) */ +#endif diff --git a/lib/petsc/bin/maint/convertExamplesUtils.py b/lib/petsc/bin/maint/convertExamplesUtils.py index 9755e647a1b..17681bf9df3 100644 --- a/lib/petsc/bin/maint/convertExamplesUtils.py +++ b/lib/petsc/bin/maint/convertExamplesUtils.py @@ -37,7 +37,7 @@ makefileMap["NOSINGLE"]="buildrequires: !single" makefileMap["DOUBLEINT32"]="buildrequires: !define(USE_64BIT_INDICES) define(PETSC_USE_REAL_DOUBLE)" -makefileMap["THREADSAFETY"]="buildrequires: define(PETSC_USING_FREEFORM) define(PETSC_USING_F90)" +makefileMap["THREADSAFETY"]="buildrequires: define(PETSC_USING_FREEFORM)" makefileMap["F2003"]="buildrequires: define(PETSC_USING_FREEFORM) define(PETSC_USING_F2003)" #makefileMap["F90_DATATYPES"]="" # ?? diff --git a/lib/petsc/bin/maint/gcov.py b/lib/petsc/bin/maint/gcov.py index bc3c47dbf02..e07195945a6 100755 --- a/lib/petsc/bin/maint/gcov.py +++ b/lib/petsc/bin/maint/gcov.py @@ -2,21 +2,26 @@ # # Usage: # Run gcov on the results of "make alltests" and create tar ball containing coverage results for one machine -# ./gcov.py -run_gcov +# ./gcov.py --run_gcov # Generate html pages showing coverage by merging tar balls from multiple machines (index_gcov1.html and index_gcov2.html) -# ./gcov.py -merge_gcov [LOC] tarballs +# ./gcov.py --merge_gcov [LOC] tarballs # from __future__ import print_function import os +import glob +import inspect import shutil import operator +import optparse import sys from time import gmtime,strftime -PETSC_DIR = os.environ['PETSC_DIR'] +thisfile = os.path.abspath(inspect.getfile(inspect.currentframe())) +pdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(thisfile))))) +sys.path.insert(0, os.path.join(pdir, 'config')) -def run_gcov(gcov_dir): +def run_gcov(gcov_dir,petsc_dir,petsc_arch): # 1. Runs gcov # 2. Saves the untested source code line numbers in @@ -27,7 +32,7 @@ def run_gcov(gcov_dir): shutil.rmtree(gcov_dir) os.mkdir(gcov_dir) print("Running gcov\n") - for root,dirs,files in os.walk(os.path.join(PETSC_DIR,"src")): + for root,dirs,files in os.walk(os.path.join(petsc_dir,"src")): # Directories to skip if (root.find('tests') != -1) | (root.find('tutorials') != -1) | (root.find('benchmarks') != -1)| (root.find('examples') != -1) | (root.find('src'+os.sep+'dm'+os.sep+'mesh') != -1) | (root.find('draw'+os.sep+'impls'+os.sep+'win32') != -1) | (root.find('impls'+os.sep+'python') != -1) : continue @@ -36,9 +41,8 @@ def run_gcov(gcov_dir): csrc_file = file_name.endswith('.c') if csrc_file: c_file = file_name.split('.c')[0] - PETSC_ARCH = os.environ['PETSC_ARCH'] - OBJDIR = os.path.join(PETSC_DIR, PETSC_ARCH, 'obj') - objpath = os.path.join(OBJDIR, os.path.relpath(c_file, os.path.join(PETSC_DIR,"src"))) + OBJDIR = os.path.join(petsc_dir, petsc_arch, 'obj') + objpath = os.path.join(OBJDIR, os.path.relpath(c_file, os.path.join(petsc_dir,"src"))) gcov_graph_file = objpath+".gcno" gcov_data_file = objpath+".gcda" if os.path.isfile(gcov_graph_file) and os.path.isfile(gcov_data_file): @@ -48,7 +52,7 @@ def run_gcov(gcov_dir): gcov_file = file_name+".gcov" try: gcov_fid = open(gcov_file,'r') - root_tmp1 = root.split(PETSC_DIR+os.sep)[1].replace(os.sep,'_') + root_tmp1 = root.split(petsc_dir+os.sep)[1].replace(os.sep,'_') lines_fid = open(os.path.join(gcov_dir,root_tmp1+'_'+file_name+'.lines'),'w') for line in gcov_fid: if line.find("#####") > 0: @@ -61,7 +65,7 @@ def run_gcov(gcov_dir): else: # gcov did not create .gcno or .gcda file,save the source code line numbers to .lines file file_id = open(file_name,'r') - root_tmp1 = root.split(PETSC_DIR+os.sep)[1].replace(os.sep,'_') + root_tmp1 = root.split(petsc_dir+os.sep)[1].replace(os.sep,'_') lines_fid = open(os.path.join(gcov_dir,root_tmp1+'_'+file_name+'.lines'),'w') nlines = 0 line_num = 1 @@ -82,17 +86,22 @@ def run_gcov(gcov_dir): print("""Finshed running gcov on PETSc source code""") return -def make_tarball(dirname): +def make_tarball(dirname,petsc_dir,petsc_arch): # Create tarball of .lines files stored in gcov_dir - print("""Creating tarball in %s to store gcov results files""" %(PETSC_DIR)) + print("""Creating tarball in %s to store gcov results files""" %(petsc_dir)) + curdir=os.path.abspath(os.path.curdir) os.chdir(dirname) - os.system("tar -czf "+PETSC_DIR+os.sep+"gcov.tar.gz *.lines") + os.system("tar -czf "+petsc_dir+os.sep+"gcov.tar.gz *.lines") + os.chdir(petsc_dir) shutil.rmtree(dirname) - print("""Tarball created in %s"""%(PETSC_DIR)) + # Copy file so artifacts in CI propogate without overwriting + shutil.copyfile('gcov.tar.gz',os.path.join(petsc_arch,'gcov.tar.gz')) + print("""Tarball created in %s"""%(petsc_dir)) + os.chdir(curdir) return -def make_htmlpage(gcov_dir,LOC,tarballs): +def make_htmlpage(gcov_dir,petsc_dir,LOC,tarballs,isCI): # Create index_gcov webpages using information processed from # running gcov @@ -109,9 +118,6 @@ def make_htmlpage(gcov_dir,LOC,tarballs): cwd = os.getcwd() # -------------------------- Stage 1 ------------------------------- len_tarballs = len(tarballs) - if len_tarballs == 0: - print("No gcov tar balls found in directory %s" %(cwd)) - sys.exit() print("%s tarballs found\n%s" %(len_tarballs,tarballs)) print("Extracting gcov directories from tar balls") @@ -122,7 +128,7 @@ def make_htmlpage(gcov_dir,LOC,tarballs): dir = os.path.join(gcov_dir,str(i)) tmp.append(dir) os.mkdir(dir) - os.system("cd "+dir+";gunzip -c "+cwd+os.sep+tarballs[i] + "|tar -xof -") + os.system("cd "+dir+";gunzip -c "+tarballs[i] + "|tar -xof -") tmp.append(len(os.listdir(dir))) tmp_dirs.append(tmp) @@ -139,6 +145,7 @@ def make_htmlpage(gcov_dir,LOC,tarballs): print("Merging files") nfiles = tmp_dirs[0][1] files_dir1 = os.listdir(tmp_dirs[0][0]) + print(files_dir1) for i in range(0,nfiles): out_file = os.path.join(gcov_dir,files_dir1[i]) out_fid = open(out_file,'w') @@ -218,7 +225,7 @@ def make_htmlpage(gcov_dir,LOC,tarballs): nfiles_not_processed = 0 sep = LOC+os.sep for file_ctr in range(0,file_len): - inhtml_file = PETSC_DIR+os.sep+src_not_tested_path[file_ctr]+os.sep+src_not_tested_filename[file_ctr]+'.html' + inhtml_file = petsc_dir+os.sep+src_not_tested_path[file_ctr]+os.sep+src_not_tested_filename[file_ctr]+'.html' outhtml_file = LOC+os.sep+src_not_tested_path[file_ctr]+os.sep+src_not_tested_filename[file_ctr]+'.gcov.html' try: inhtml_fid = open(inhtml_file,"r") @@ -387,32 +394,73 @@ def make_htmlpage(gcov_dir,LOC,tarballs): def main(): - USER = os.environ['USER'] + parser = optparse.OptionParser(usage="%prog [options] ") + parser.add_option('-p', '--petsc_dir', dest='petsc_dir', + help='Location of PETSC_DIR', + default='') + parser.add_option('-a', '--petsc_arch', dest='petsc_arch', + help='Location of PETSC_ARCH', + default='') + parser.add_option('-l', '--loc', dest='loc', + help='Location', + default='') + parser.add_option('-r', '--run_gcov', dest='run_gcov', + help='Running gcov and printing tarball', + action='store_true',default=False) + parser.add_option('-m', '--merge_gcov', dest='merge_gcov', + help='Merging gcov results and creating main html page', + action='store_true',default=False) + options, args = parser.parse_args() + + if 'USER' in os.environ: + USER = os.environ['USER'] + else: + USER = 'petsc_ci' gcov_dir = "/tmp/gcov-"+USER - if (sys.argv[1] == "-run_gcov"): + if options.petsc_dir: + petsc_dir = options.petsc_dir + else: + petsc_dir = pdir + if options.petsc_arch: + petsc_arch = options.petsc_arch + else: + if 'PETSC_ARCH' in os.environ: + if os.environ['PETSC_ARCH']: + petsc_arch = os.environ['PETSC_ARCH'] + else: + print("Must specify PETSC_ARCH with --petsc_arch") + return + + if options.run_gcov: print("Running gcov and creating tarball") - run_gcov(gcov_dir) - make_tarball(gcov_dir) - elif (sys.argv[1] == "-merge_gcov"): + run_gcov(gcov_dir,petsc_dir,petsc_arch) + make_tarball(gcov_dir,petsc_dir,petsc_arch) + elif options.merge_gcov: print("Creating main html page") - # check to see if LOC is given - if os.path.isdir(sys.argv[2]): - print("Using %s to save the main HTML file pages" % (sys.argv[2])) - LOC = sys.argv[2] - tarballs = sys.argv[3:] + # check to see if LOC is given + if options.loc: + print("Using %s to save the main HTML file pages" % (options.loc)) + LOC = options.loc else: print("No Directory specified for saving main HTML file pages, using PETSc root directory") - LOC = PETSC_DIR - tarballs = sys.argv[2:] + LOC = petsc_dir + + tarballs = glob.glob(os.path.join(LOC,'*.tar.gz')) + + # Gitlab CI organizes things differently + isCI=False + if len(tarballs)==0: + tarballs=glob.glob(os.path.join(LOC,'arch-*/gcov.tar.gz')) + isCI=True + + if len(tarballs)==0: + print("No coverage tarballs found") + return - make_htmlpage(gcov_dir,LOC,tarballs) + make_htmlpage(gcov_dir,petsc_dir,LOC,tarballs,isCI) else: - print("No or invalid option specified:") - print("Usage: To run gcov and create tarball") - print(" ./gcov.py -run_gcov ") - print("Usage: To create main html page") - print(" ./gcov.py -merge_gcov [LOC] tarballs") + parser.print_usage() if __name__ == '__main__': main() diff --git a/lib/petsc/bin/maint/generatefortranstubs.py b/lib/petsc/bin/maint/generatefortranstubs.py index 8c0c6e062cc..1d96f296ac1 100755 --- a/lib/petsc/bin/maint/generatefortranstubs.py +++ b/lib/petsc/bin/maint/generatefortranstubs.py @@ -75,13 +75,13 @@ def FixDir(petscdir,dir,verbose): + Generates a makefile + copies over Fortran interface files that are generated''' submansec = 'unknown' - mansec = 'unknown' + mansec = 'unknown' cnames = [] hnames = [] - parentdir =os.path.abspath(os.path.join(dir,'..')) + parentdir = os.path.abspath(os.path.join(dir,'..')) for f in os.listdir(dir): ext = os.path.splitext(f)[1] - if ext == '.c': + if ext == '.c' or ext == '.cxx': FixFile(os.path.join(dir, f)) cnames.append(f) elif ext == '.h90': @@ -155,7 +155,7 @@ def FixDir(petscdir,dir,verbose): def PrepFtnDir(dir): - ''' Generate a fnt-auto directory if needed''' + ''' Generate a ftn-auto directory if needed''' import shutil if os.path.exists(dir) and not os.path.isdir(dir): raise RuntimeError('Error - specified path is not a dir: ' + dir) @@ -171,7 +171,7 @@ def PrepFtnDir(dir): def processDir(petscdir, bfort, verbose, dirpath, dirnames, filenames): ''' Runs bfort on a directory and then fixes the files generated by bfort including moving generated F90 fortran interface files''' outdir = os.path.join(dirpath,'ftn-auto') - newls = [l for l in filenames if os.path.splitext(l)[1] in ['.c','.h','.cu']] + newls = [l for l in filenames if os.path.splitext(l)[1] in ['.c','.h','.cxx','.cu']] if newls: PrepFtnDir(outdir) options = ['-dir '+outdir, '-mnative', '-ansi', '-nomsgs', '-noprofile', '-anyname', '-mapptr', @@ -209,6 +209,8 @@ def processf90interfaces(petscdir,verbose): fdr = open(os.path.join(petscdir,'src',mansec,'f90-mod','ftn-auto-interfaces',submansec+'-tmpdir',sfile)) txt = fdr.readline() while txt: + if 'integer z' in txt: txt = ' PetscErrorCode z\n' + if 'integer a ! MPI_Comm' in txt: txt = ' MPI_Comm a ! MPI_Comm\n' fd.write(txt) if txt.find('subroutine ') > -1 and txt.find('end subroutine') == -1: while txt.endswith('&\n'): diff --git a/lib/petsc/bin/maint/petsc-val.supp b/lib/petsc/bin/maint/petsc-val.supp index dc37a8f823e..6ad4918b9a1 100644 --- a/lib/petsc/bin/maint/petsc-val.supp +++ b/lib/petsc/bin/maint/petsc-val.supp @@ -1,14 +1,3 @@ -{ - - Memcheck:Cond - fun:fd_to_stream - fun:_gfortrani_init_units - fun:init - fun:call_init.part.0 - fun:_dl_init - obj:/lib/x86_64-linux-gnu/ld-2.15.so -} - { Memcheck:Param @@ -37,37 +26,3 @@ fun:MPIDI_CH3i_Progress_wait fun:MPIDI_CH3I_Progress } - -{ - - Memcheck:Param - writev(vector[...]) - fun:writev - fun:MPL_large_writev - fun:MPIDI_CH3I_Sock_writev - fun:MPIDI_CH3_iSendv - fun:MPIDI_CH3_EagerContigIsend - fun:MPID_Isend - fun:PMPI_Isend - fun:p4est_balance_ext - fun:p4est_balance -} - -{ - - Memcheck:Param - writev(vector[...]) - fun:writev - fun:MPL_large_writev - fun:MPIDI_CH3I_Sock_writev - fun:MPIDI_CH3_iSendv - fun:MPIDI_CH3_EagerContigIsend - fun:MPID_Isend - fun:PMPI_Isend - fun:p4est_partition_for_coarsening - fun:p4est_partition_ext -} - - - - diff --git a/lib/petsc/bin/maint/rebuildtar b/lib/petsc/bin/maint/rebuildtar index 688f81c0fb8..4573a6470ce 100755 --- a/lib/petsc/bin/maint/rebuildtar +++ b/lib/petsc/bin/maint/rebuildtar @@ -4,6 +4,7 @@ set -x export LANG=en_US export LC=C export LC_ALL=C +export PATH=$HOME/bin/python2:$PATH PETSC_DIR=/home/balay/git-repo/petsc-maint.clone diff --git a/lib/petsc/bin/petscdiff b/lib/petsc/bin/petscdiff index 88ae0a41920..176a94de323 100755 --- a/lib/petsc/bin/petscdiff +++ b/lib/petsc/bin/petscdiff @@ -10,12 +10,16 @@ print_usage() { cat >&2 < ${file}${filter_suffix}" +} + if [ "x${RM}" = "x" ]; then RM="rm"; fi if [ "x${SED}" = "x" ]; then SED="sed"; fi if [ "x${DIFF}" = "x" ]; then DIFF="diff -w"; @@ -56,7 +74,13 @@ elif [ -f ${1} ]; then else if ${mvfile}; then echo "mv'ing $2 --> $1" - mv "$2" "$1" + # If filter need to filter first + if ${filter}; then + filter_file "${2}" "${filter_cmd}" + mv "${2}${filter_suffix}" "$1" + else + mv "$2" "$1" + fi exit 0 else echo Error! file1 check failed: "${1}" @@ -66,7 +90,12 @@ else echo "mvfile_ifalt" if echo $1 | grep '_alt.out'; then echo "mv'ing $2 --> $1" - mv "$2" "$1" + if ${filter}; then + filter_file "${2}" "${filter_cmd}" + mv "${2}${filter_suffix}" "$1" + else + mv "$2" "$1" + fi exit 0 fi fi @@ -79,6 +108,15 @@ else exit 1 fi +if ${filter}; then + filter_file "${file2}" "${filter_cmd}" + file2="${file2}${filter_suffix}" # Will need to remove later +fi +if ${filter_output}; then + filter_file "${file1}" "${filter_output_cmd}" + file1="${file1}${filter_suffix}" # Will need to remove later +fi + if ! ${justdiff}; then tmpA=`mktemp -t petscdiffA.XXXXXX` ; tmpB=`mktemp -t petscdiffB.XXXXXX` ; @@ -114,4 +152,14 @@ if ${mvfile_ifalt} && test ${err} -gt 0; then fi fi +# For debugging filters, it is useful to be able to keep the files +if ! ${keep_files}; then + if ${filter}; then + ${RM} -f ${file2} # Temporary file. See above + fi + if ${filter_output}; then + ${RM} -f ${file1} # Temporary file. See above + fi +fi + exit ${err}; diff --git a/lib/petsc/conf/rules b/lib/petsc/conf/rules index 1386613a28b..d4c1bf7d97c 100644 --- a/lib/petsc/conf/rules +++ b/lib/petsc/conf/rules @@ -39,7 +39,13 @@ gnumake: +@echo "make gnumake is deprecated, use make libs" +@make libs -libs: +${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/files: + @touch -t 197102020000 ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/files + +./${PETSC_ARCH}/tests/testfiles: + @${MKDIR} -p ./${PETSC_ARCH}/tests && touch -t 197102020000 ./${PETSC_ARCH}/tests/testfiles + +libs: ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/files ./${PETSC_ARCH}/tests/testfiles +@cd ${PETSC_DIR} && MAKEFLAGS="-j$(MAKE_NP) -l$(MAKE_LOAD) $(MAKEFLAGS)" ${OMAKE_PRINTDIR} -f gmakefile ${MAKE_PAR_OUT_FLG} V=${V} # Does nothing; needed for some rules that require actions. @@ -152,9 +158,6 @@ alltree: ${ACTION} PETSC_ARCH=${PETSC_ARCH} LOC=${LOC} ; ) fi; \ done ; fi -getpetscflags: - -@echo ${PETSCFLAGS} - getmpilinklibs: -@echo ${MPI_LIB} @@ -273,6 +276,8 @@ getautoconfargs: .F.o .F90.o .F95.o: ${PETSC_FCOMPILE} -o $@ $< +.f.o .f90.o .f95.o: + ${FC} -c ${FC_FLAGS} ${FFLAGS} -o $@ $< # # These rules are for compiling the test examples. # @@ -322,6 +327,7 @@ getautoconfargs: grep -v "may result in errors or" | \ grep -v "is deprecated" | \ grep -v "Werror=format-security" | \ + grep -v " -Werror " | \ egrep -i '(Error|warning|Can|Unresolved)' >> /dev/null;\ if [ "$$?" != 1 ]; then \ printf ${PETSC_TEXT_HILIGHT}"*******************Error detected during compile or link!*******************\n";\ @@ -368,6 +374,7 @@ getautoconfargs: grep -v "IPO Error: unresolved" | \ grep -v "warning multiple definitions of symbol _matdensegetarray_" | \ grep -v "Werror=format-security" | \ + grep -v " -Werror " | \ egrep -i '(Error|warning|Can|Unresolved)' >> /dev/null ; \ if [ "$$?" != 1 ]; then \ printf ${PETSC_TEXT_HILIGHT}"*******************Error detected during compile or link!*******************\n";\ @@ -570,7 +577,7 @@ html: chk_c2html ${RM} $${loc}/$$i.html; \ echo "
Actual source code: $${iroot}

" > $${loc}/$$i.html; \ sed -e "s/CHKERRQ(ierr);//g" -e "s/PetscFunctionReturn(0)/return(0)/g" -e "s/ierr [ ]*= //g" -e "s/PETSC[A-Z]*_DLLEXPORT//g" $$i | ${C2HTML} -n | \ - awk '{ sub(/
/,"
\n"); print }'  | ${PETSC_DIR}/lib/petsc/bin/maint/fixinclude $$i $${PETSC_DIR} | \
+              awk '{ sub(/
/,"
\n"); print }'  | ${PETSC_DIR}/lib/petsc/bin/maint/fixinclude $$i $${PETSC_DIR} | \
               egrep -v '(PetscValid|PetscFunctionBegin|PetscCheck|PetscErrorCode ierr;|#if !defined\(__|#define __|#undef __|EXTERN_C )' | \
               ${MAPNAMES} -map /tmp/$$USER.htmlmap -inhtml | sed -e s?ROOT?$${IROOT}?g >> $${loc}/$$i.html ; \
 	    fi; \
@@ -694,7 +701,7 @@ checkTestCoverage:
 	  fi; \
 	done
 
-#  Lists all the URLs in the PETSc repository that are unaccessable, nonexistent, or permanently moved (301)
+#  Lists all the URLs in the PETSc repository that are unaccessible, nonexistent, or permanently moved (301)
 #  REPLACE=1 locations marked as permanently moved (301) are replaced in the repository
 #  This code is fragile; always check the changes after a use of REPLACE=1 before commiting the changes
 #
diff --git a/lib/petsc/conf/test.common b/lib/petsc/conf/test.common
index f1e9114a679..3a33846ffd8 100644
--- a/lib/petsc/conf/test.common
+++ b/lib/petsc/conf/test.common
@@ -4,7 +4,7 @@
 TESTMODE = testexamples
 ALLTESTS_CHECK_FAILURES = no
 ALLTESTS_MAKEFILE = gmakefile.test
-alltests:
+alltests: ./${PETSC_ARCH}/tests/testfiles
 	-@${RM} -rf ${PETSC_ARCH}/lib/petsc/conf/alltests.log alltests.log
 	+@if [ -f ${PETSC_DIR}/share/petsc/examples/gmakefile.test ] ; then \
             ALLTESTS_MAKEFILE=${PETSC_DIR}/share/petsc/examples/gmakefile.test ; \
@@ -22,10 +22,10 @@ alltests:
 allgtests-tap: allgtest-tap
 	+@${OMAKE} -f gmakefile.test PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} check-test-errors
 
-allgtest-tap:
+allgtest-tap: ./${PETSC_ARCH}/tests/testfiles
 	+@MAKEFLAGS="-j$(MAKE_TEST_NP) -l$(MAKE_LOAD) $(MAKEFLAGS)" ${OMAKE} -f gmakefile.test PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} test OUTPUT=1
 
-allgtest:
+allgtest: ./${PETSC_ARCH}/tests/testfiles
 	+@MAKEFLAGS="-j$(MAKE_TEST_NP) -l$(MAKE_LOAD) $(MAKEFLAGS)" ${OMAKE} -k -f ${ALLTESTS_MAKEFILE} PETSC_ARCH=${PETSC_ARCH} PETSC_DIR=${PETSC_DIR} test V=0 2>&1 | egrep -v '^(ok [^#]*(# SKIP|# TODO|$$)|[A-Za-z][A-Za-z0-9_]*\.(c|F|cxx|F90).$$)'
 
 cleantest:
diff --git a/lib/petsc/conf/variables b/lib/petsc/conf/variables
index 6d0ced7b265..a7de68e2ae0 100644
--- a/lib/petsc/conf/variables
+++ b/lib/petsc/conf/variables
@@ -11,7 +11,8 @@
 PETSC_LIB_DIR	     = ${PETSC_DIR}/${PETSC_ARCH}/lib
 PETSCCONF_H          = ${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h
 
-PETSC_CCPPFLAGS	    = ${PETSC_CC_INCLUDES} ${PETSCFLAGS} ${CPP_FLAGS} ${CPPFLAGS}
+PETSC_CCPPFLAGS     = ${PETSC_CC_INCLUDES} ${PETSCFLAGS} ${PCPP_FLAGS} ${PPPFLAGS}
+PETSC_CXXCPPFLAGS   = ${PETSC_CC_INCLUDES} ${PETSCFLAGS} ${CXXPP_FLAGS} ${CXXPPFLAGS}
 PETSC_FCPPFLAGS	    = ${PETSC_FC_INCLUDES} ${PETSCFLAGS} ${FPP_FLAGS} ${FPPFLAGS}
 PETSC_C_SH_LIB_PATH = ${CC_LINKER_SLFLAG}${PETSC_LIB_DIR}
 PETSC_F_SH_LIB_PATH = ${FC_LINKER_SLFLAG}${PETSC_LIB_DIR}
@@ -25,6 +26,7 @@ PETSC_F_SH_LIB_PATH = ${FC_LINKER_SLFLAG}${PETSC_LIB_DIR}
 #
 INSTALL_LIB_DIR	= ${PETSC_LIB_DIR}
 CCPPFLAGS	= ${PETSC_CCPPFLAGS}
+CXXCPPFLAGS	= ${PETSC_CXXCPPFLAGS}
 FCPPFLAGS	= ${PETSC_FCPPFLAGS}
 C_SH_LIB_PATH	= ${PETSC_C_SH_LIB_PATH}
 F_SH_LIB_PATH	= ${PETSC_F_SH_LIB_PATH}
@@ -34,11 +36,12 @@ F_SH_LIB_PATH	= ${PETSC_F_SH_LIB_PATH}
 PSOURCEC  = $(SOURCEC:%=`pwd`/%)
 PSOURCECXX= $(SOURCECXX:%=`pwd`/%)
 PSOURCECU = $(SOURCECU:%=`pwd`/%)
-PETSC_COMPILE           = ${PCC} -c ${PCC_FLAGS} ${CFLAGS} ${CCPPFLAGS}  ${PSOURCEC}
-PETSC_CXXCOMPILE        = ${CXX} -c ${CXX_FLAGS} ${CXXFLAGS} ${CCPPFLAGS}  ${PSOURCECXX}
-PETSC_COMPILE_SINGLE    = ${PCC} -o $*.o -c ${PCC_FLAGS} ${CFLAGS} ${CCPPFLAGS}
-PETSC_CXXCOMPILE_SINGLE = ${CXX} -o $*.o -c ${CXX_FLAGS} ${CXXFLAGS} ${CCPPFLAGS}
-PETSC_FCOMPILE          = ${FC} -c ${FC_FLAGS} ${FFLAGS} ${FCPPFLAGS}  ${SOURCEF}
+PETSC_COMPILE           = ${PCC} -c ${PCC_FLAGS} ${PFLAGS} ${CCPPFLAGS}  ${PSOURCEC}
+PETSC_CCOMPILE          = ${CC} -c ${CC_FLAGS} ${CPPFLAGS} ${PETSC_CC_INCLUDES}
+PETSC_CXXCOMPILE        = ${CXX} -c ${CXX_FLAGS} ${CXXFLAGS} ${CXXCPPFLAGS}  ${PSOURCECXX}
+PETSC_COMPILE_SINGLE    = ${PCC} -o $*.o -c ${PCC_FLAGS} ${PFLAGS} ${CCPPFLAGS}
+PETSC_CXXCOMPILE_SINGLE = ${CXX} -o $*.o -c ${CXX_FLAGS} ${CXXFLAGS} ${CXXCPPFLAGS}
+PETSC_FCOMPILE          = ${FC} -c ${FC_FLAGS} ${FFLAGS} ${FCPPFLAGS}  ${SOURCEF} ${SOURCEF90}
 PETSC_CUCOMPILE         = ${CUDAC} ${CUDAC_FLAGS} -c --compiler-options="${PCC_FLAGS} ${CFLAGS} ${CCPPFLAGS}" ${PSOURCECU}
 PETSC_CUCOMPILE_SINGLE  = ${CUDAC} -o $*.o ${CUDAC_FLAGS} -c --compiler-options="${PCC_FLAGS} ${CFLAGS} ${CCPPFLAGS}"
 #
@@ -47,7 +50,7 @@ OBJSC        = $(SOURCEC:.c=.o)
 OBJSCXX_TMP1 = $(SOURCECXX:.C=.o)
 OBJSCXX_TMP2 = $(OBJSCXX_TMP1:.cxx=.o)
 OBJSCXX      = $(OBJSCXX_TMP2:.cpp=.o)
-OBJSF_TMP1   = $(SOURCEF:.F=.o)
+OBJSF_TMP1   = $(SOURCEF:.F=.o) ${SOURCEF90}
 OBJSF_TMP2   = $(OBJSF_TMP1:.F90=.o)
 OBJSF_TMP3   = $(OBJSF_TMP2:.F95=.o)
 OBJSF        = $(OBJSF_TMP3:.f=.o)
diff --git a/makefile b/makefile
index 31bb76c66ae..7a2bc73a70a 100644
--- a/makefile
+++ b/makefile
@@ -70,14 +70,14 @@ info:
 	-@echo "Using configuration flags:"
 	-@grep "\#define " ${PETSCCONF_H}
 	-@echo "-----------------------------------------"
-	-@echo "Using C compile: ${PETSC_COMPILE}"
+	-@echo "Using C compile: ${PETSC_CCOMPILE}"
 	-@if [  "${MPICC_SHOW}" != "" ]; then \
              printf  "mpicc -show: %b\n" "${MPICC_SHOW}";\
           fi; \
-          printf  "C compiler version: %b\n" "${C_VERSION}"; \
-	  if [ "${CXX}" != "" ]; then \
-	   echo "Using C++ compile: ${PETSC_CXXCOMPILE}";\
-	    if [ "${MPICXX_SHOW}" != "" ]; then \
+        printf  "C compiler version: %b\n" "${C_VERSION}"; \
+        if [ "${PETSC_CXXCOMPILE}" != "" ]; then \
+        echo "Using C++ compile: ${PETSC_CXXCOMPILE}";\
+        if [ "${MPICXX_SHOW}" != "" ]; then \
                printf "mpicxx -show: %b\n" "${MPICXX_SHOW}"; \
             fi;\
             printf  "C++ compiler version: %b\n" "${Cxx_VERSION}"; \
@@ -92,6 +92,9 @@ info:
 	-@if [ "${CUDAC}" != "" ]; then \
 	   echo "Using CUDA compile: ${PETSC_CUCOMPILE}";\
          fi
+	-@if [ "${CLANGUAGE}" = "CXX" ]; then \
+           echo "Using C++ compiler to compile PETSc";\
+        fi
 	-@echo "-----------------------------------------"
 	-@echo "Using C/C++ linker: ${PCC_LINKER}"
 	-@echo "Using C/C++ flags: ${PCC_LINKER_FLAGS}"
@@ -106,7 +109,7 @@ info:
         else \
            TESTDIR=`mktemp -q -d -t petscmpi-XXXXXXXX` && \
            echo '#include ' > $${TESTDIR}/mpitest.c && \
-           BUF=`${CPP} ${PETSC_CCPPFLAGS} $${TESTDIR}/mpitest.c |grep 'mpi\.h' | ( head -1 ; cat > /dev/null )` && \
+           BUF=`${CPP} ${PETSC_CPPFLAGS} ${PETSC_CC_INCLUDES} $${TESTDIR}/mpitest.c |grep 'mpi\.h' | ( head -1 ; cat > /dev/null )` && \
            echo Using mpi.h: $${BUF}; ${RM} -rf $${TESTDIR}; \
         fi
 	-@echo "-----------------------------------------"
@@ -160,6 +163,9 @@ test_build:
 	+@if ( [ "${ML_LIB}" != "" ] ||  [ "${TRILINOS_LIB}" != "" ] ) && [ "${PETSC_WITH_BATCH}" = "" ]; then \
           cd src/snes/examples/tutorials >/dev/null; ${OMAKE} PETSC_ARCH=${PETSC_ARCH}  PETSC_DIR=${PETSC_DIR}  DIFF=${PETSC_DIR}/lib/petsc/bin/petscdiff runex19_ml; \
          fi;
+	+@if [ "${SUITESPARSE_LIB}" != "" ] && [ "${PETSC_WITH_BATCH}" = "" ]; then \
+          cd src/snes/examples/tutorials >/dev/null; ${OMAKE} PETSC_ARCH=${PETSC_ARCH}  PETSC_DIR=${PETSC_DIR} DIFF=${PETSC_DIR}/lib/petsc/bin/petscdiff runex19_suitesparse; \
+         fi;
 	+@cd src/snes/examples/tutorials >/dev/null; ${OMAKE} PETSC_ARCH=${PETSC_ARCH}  PETSC_DIR=${PETSC_DIR} ex19.rm
 	+@if [ "${PETSC4PY}" = "yes" ]; then \
           cd src/ksp/ksp/examples/tutorials >/dev/null; \
@@ -193,7 +199,7 @@ test_usermakefile:
 
 # Compare ABI/API of two versions of PETSc library with the old one defined by PETSC_{DIR,ARCH}_ABI_OLD
 abitest:
-	@if [ "${PETSC_DIR_ABI_OLD}" == "" ] || [ "${PETSC_ARCH_ABI_OLD}" == "" ]; \
+	@if [ "${PETSC_DIR_ABI_OLD}" = "" ] || [ "${PETSC_ARCH_ABI_OLD}" = "" ]; \
 		then printf "You must set environment variables PETSC_DIR_ABI_OLD and PETSC_ARCH_ABI_OLD to run abitest\n"; \
 		exit 1; \
 	fi;
@@ -439,10 +445,11 @@ update-web:
 #  See script for details
 #
 gcov:
-	-@$(PYTHON) ${PETSC_DIR}/lib/petsc/bin/maint/gcov.py -run_gcov
+	-@$(PYTHON) ${PETSC_DIR}/lib/petsc/bin/maint/gcov.py --run_gcov --petsc_arch ${PETSC_ARCH}
 
 mergegcov:
-	-@$(PYTHON) ${PETSC_DIR}/lib/petsc/bin/maint/gcov.py -merge_gcov ${LOC} *.tar.gz
+	-@$(PYTHON) ${PETSC_DIR}/lib/petsc/bin/maint/gcov.py --merge_gcov --loc=${LOC} --petsc_arch ${PETSC_ARCH}
+
 
 ########################
 #
diff --git a/share/petsc/datafiles/meshes/unit_sphere.egadslite b/share/petsc/datafiles/meshes/unit_sphere.egadslite
new file mode 100644
index 00000000000..426c183c616
Binary files /dev/null and b/share/petsc/datafiles/meshes/unit_sphere.egadslite differ
diff --git a/src/benchmarks/streams/CUDAVersion.cu b/src/benchmarks/streams/CUDAVersion.cu
index e7bdfff18fa..f381dc7cc63 100644
--- a/src/benchmarks/streams/CUDAVersion.cu
+++ b/src/benchmarks/streams/CUDAVersion.cu
@@ -7,7 +7,7 @@
     TRIAD:      a(i) = b(i) + q*c(i)
 
   It measures the memory system on the device.
-  The implementation is in single precision.
+  The implementation is in double precision with a single option.
 
   Code based on the code developed by John D. McCalpin
   http://www.cs.virginia.edu/stream/FTP/Code/stream.c
@@ -19,15 +19,13 @@
 
   User interface motivated by bandwidthTest NVIDIA SDK example.
 */
-static char *help = "Single-Precision STREAM Benchmark implementation in CUDA\n"
-                    "Performs Copy, Scale, Add, and Triad single-precision kernels\n\n";
+static char help[] = "Double-Precision STREAM Benchmark implementation in CUDA\n Performs Copy, Scale, Add, and Triad double-precision kernels\n\n";
 
 #include 
 #include 
 #include 
 
-#define N        2000000
-#define N_DOUBLE 8000000
+#define N        10000000
 #define NTIMES   10
 
 # ifndef MIN
@@ -243,7 +241,7 @@ bool STREAM_Copy_verify_double(double *a, double *b, size_t len)
   for (idx = 0; idx < len && !bDifferent; idx++) {
     double expectedResult     = a[idx];
     double diffResultExpected = (b[idx] - expectedResult);
-    double relErrorULPS       = (fabsf(diffResultExpected)/fabsf(expectedResult))/flt_eps;
+    double relErrorULPS       = (fabsf(diffResultExpected)/fabsf(expectedResult))/dbl_eps;
     /* element-wise relative error determination */
     bDifferent = (relErrorULPS > 2.);
   }
@@ -351,27 +349,26 @@ bool STREAM_Triad_verify_double(double *a, double *b, double *c, double scalar,
 PetscErrorCode setupStream(PetscInt device, PetscBool runDouble, PetscBool cpuTiming);
 PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseGPUTiming);
 PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseGPUTiming);
-PetscErrorCode printResultsReadable(float times[][NTIMES]);
+PetscErrorCode printResultsReadable(float times[][NTIMES], size_t);
 
 int main(int argc, char *argv[])
 {
   PetscInt       device    = 0;
-  PetscBool      runDouble = PETSC_FALSE;
-  PetscBool      cpuTiming = PETSC_FALSE;
+  PetscBool      runDouble = PETSC_TRUE;
+  const PetscBool cpuTiming = PETSC_TRUE; // must be true
   PetscErrorCode ierr;
 
+  ierr = cudaSetDeviceFlags(cudaDeviceBlockingSync);CHKERRQ(ierr);
+
   ierr = PetscInitialize(&argc, &argv, 0, help);if (ierr) return ierr;
-  ierr = PetscPrintf(PETSC_COMM_SELF, "[Single and Double-Precision Device-Only STREAM Benchmark implementation in CUDA]\n");CHKERRQ(ierr);
-  ierr = PetscPrintf(PETSC_COMM_SELF, "%s Starting...\n\n", argv[0]);CHKERRQ(ierr);
 
   ierr = PetscOptionsBegin(PETSC_COMM_WORLD, "", "STREAM Benchmark Options", "STREAM");CHKERRQ(ierr);
   ierr = PetscOptionsBoundedInt("-device", "Specify the CUDA device to be used", "STREAM", device, &device, NULL,0);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-double",    "Also run double precision tests",   "STREAM", runDouble, &runDouble, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBool("-cputiming", "Force CPU-based timing to be used", "STREAM", cpuTiming, &cpuTiming, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();
 
   ierr = setupStream(device, runDouble, cpuTiming);
-  if (ierr >= 0) {
+  if (ierr) {
     ierr = PetscPrintf(PETSC_COMM_SELF, "\n[streamBenchmark] - results:\t%s\n\n", (ierr == 0) ? "PASSES" : "FAILED");CHKERRQ(ierr);
   }
   ierr = PetscFinalize();
@@ -404,11 +401,9 @@ PetscErrorCode setupStream(PetscInt deviceNum, PetscBool runDouble, PetscBool cp
   }
 
   cudaSetDevice(deviceNum);
-  ierr = PetscPrintf(PETSC_COMM_SELF, "Running on...\n\n");CHKERRQ(ierr);
+  // ierr = PetscPrintf(PETSC_COMM_SELF, "Running on...\n\n");CHKERRQ(ierr);
   cudaDeviceProp deviceProp;
-  if (cudaGetDeviceProperties(&deviceProp, deviceNum) == cudaSuccess) {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " Device %d: %s\n", deviceNum, deviceProp.name);CHKERRQ(ierr);
-  } else {
+  if (cudaGetDeviceProperties(&deviceProp, deviceNum) != cudaSuccess) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " Unable to determine device %d properties, exiting\n");CHKERRQ(ierr);
     return -1;
   }
@@ -420,14 +415,10 @@ PetscErrorCode setupStream(PetscInt deviceNum, PetscBool runDouble, PetscBool cp
   if (deviceProp.major == 2 && deviceProp.minor == 1) iNumThreadsPerBlock = 192; /* GF104 architecture / 48 CUDA Cores per MP */
   else iNumThreadsPerBlock = 128; /* GF100 architecture / 32 CUDA Cores per MP */
 
-  if (cpuTiming) {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " Using cpu-only timer.\n");CHKERRQ(ierr);
-  }
-
-  ierr = runStream(iNumThreadsPerBlock, cpuTiming);CHKERRQ(ierr);
   if (runDouble) {
-    ierr = cudaSetDeviceFlags(cudaDeviceBlockingSync);CHKERRQ(ierr);
     ierr = runStreamDouble(iNumThreadsPerBlock, cpuTiming);CHKERRQ(ierr);
+  } else {
+    ierr = runStream(iNumThreadsPerBlock, cpuTiming);CHKERRQ(ierr);
   }
   PetscFunctionReturn(0);
 }
@@ -455,9 +446,6 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   dim3 dimGrid(N/dimBlock.x); /* (N/dimBlock.x,1,1) */
   if (N % dimBlock.x != 0) dimGrid.x+=1;
 
-  ierr = PetscPrintf(PETSC_COMM_SELF, " Array size (single precision) = %u\n",N);CHKERRQ(ierr);
-  ierr = PetscPrintf(PETSC_COMM_SELF, " using %u threads per block, %u blocks\n",dimBlock.x,dimGrid.x);CHKERRQ(ierr);
-
   /* Initialize memory on the device */
   set_array<<>>(d_a, 2.f, N);
   set_array<<>>(d_b, .5f, N);
@@ -465,120 +453,86 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
 
   /* --- MAIN LOOP --- repeat test cases NTIMES times --- */
   PetscLogDouble cpuTimer = 0.0;
-  cudaEvent_t    start, stop;
-
-  /* both timers report msec */
-  ierr = cudaEventCreate(&start);CHKERRQ(ierr); /* gpu timer facility */
-  ierr = cudaEventCreate(&stop);CHKERRQ(ierr);  /* gpu timer facility */
 
   scalar=3.0f;
   for (k = 0; k < NTIMES; ++k) {
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Copy<<>>(d_a, d_c, N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
-    //get the total elapsed time in ms
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[0][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[0][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[0][k] = cpuTimer*1.e3; // millisec
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Copy_Optimized<<>>(d_a, d_c, N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[1][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[1][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[1][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Scale<<>>(d_b, d_c, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[2][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[2][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[2][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Scale_Optimized<<>>(d_b, d_c, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[3][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[3][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[3][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
+    // ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Add<<>>(d_a, d_b, d_c,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);    // ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
+    // ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[4][k] = cpuTimer;
+    if (bDontUseGPUTiming) times[4][k] = cpuTimer*1.e3;
     else {
-      ierr = cudaEventElapsedTime(×[4][k], start, stop);CHKERRQ(ierr);
+      // ierr = cudaEventElapsedTime(×[4][k], start, stop);CHKERRQ(ierr);
     }
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Add_Optimized<<>>(d_a, d_b, d_c,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[5][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[5][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[5][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Triad<<>>(d_b, d_c, d_a, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[6][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[6][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[6][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Triad_Optimized<<>>(d_b, d_c, d_a, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[7][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[7][k], start, stop);CHKERRQ(ierr);
-    }
-
+    if (bDontUseGPUTiming) times[7][k] = cpuTimer*1.e3;
   }
 
-  /* verify kernels */
+  if (1) { /* verify kernels */
   float *h_a, *h_b, *h_c;
   bool  errorSTREAMkernel = true;
 
@@ -613,8 +567,6 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy:\t\tError detected in device STREAM_Copy, exiting\n");CHKERRQ(ierr);
     exit(-2000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -629,11 +581,10 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy_Optimized:\tError detected in device STREAM_Copy_Optimized, exiting\n");CHKERRQ(ierr);
     exit(-3000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy_Optimized:\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
+  set_array<<>>(d_a, 2.f, N);
   set_array<<>>(d_b, .5f, N);
   set_array<<>>(d_c, .5f, N);
 
@@ -644,8 +595,6 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Scale:\t\tError detected in device STREAM_Scale, exiting\n");CHKERRQ(ierr);
     exit(-4000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Scale:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -661,8 +610,6 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Add:\t\tError detected in device STREAM_Add, exiting\n");CHKERRQ(ierr);
     exit(-5000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Add:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -678,21 +625,20 @@ PetscErrorCode runStream(const PetscInt iNumThreadsPerBlock, PetscBool bDontUseG
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Triad:\t\tError detected in device STREAM_Triad, exiting\n");CHKERRQ(ierr);
     exit(-6000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Triad:\t\tPass\n");CHKERRQ(ierr);
   }
 
+  free(h_a);
+  free(h_b);
+  free(h_c);
+  }
   /* continue from here */
-  printResultsReadable(times);
-
-  //clean up timers
-  ierr = cudaEventDestroy(stop);CHKERRQ(ierr);
-  ierr = cudaEventDestroy(start);CHKERRQ(ierr);
+  printResultsReadable(times, sizeof(float));
 
   /* Free memory on device */
   ierr = cudaFree(d_a);CHKERRQ(ierr);
   ierr = cudaFree(d_b);CHKERRQ(ierr);
   ierr = cudaFree(d_c);CHKERRQ(ierr);
+  
   PetscFunctionReturn(0);
 }
 
@@ -716,9 +662,6 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   dim3 dimGrid(N/dimBlock.x); /* (N/dimBlock.x,1,1) */
   if (N % dimBlock.x != 0) dimGrid.x+=1;
 
-  ierr = PetscPrintf(PETSC_COMM_SELF, " Array size (double precision) = %u\n",N);CHKERRQ(ierr);
-  ierr = PetscPrintf(PETSC_COMM_SELF, " using %u threads per block, %u blocks\n",dimBlock.x,dimGrid.x);CHKERRQ(ierr);
-
   /* Initialize memory on the device */
   set_array_double<<>>(d_a, 2., N);
   set_array_double<<>>(d_b, .5, N);
@@ -726,122 +669,86 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
 
   /* --- MAIN LOOP --- repeat test cases NTIMES times --- */
   PetscLogDouble cpuTimer = 0.0;
-  cudaEvent_t    start, stop;
-
-  /* both timers report msec */
-  ierr = cudaEventCreate(&start);CHKERRQ(ierr); /* gpu timer facility */
-  ierr = cudaEventCreate(&stop);CHKERRQ(ierr);  /* gpu timer facility */
 
   scalar=3.0;
   for (k = 0; k < NTIMES; ++k) {
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Copy_double<<>>(d_a, d_c, N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     if (bDontUseGPUTiming) {
       PetscTimeAdd(&cpuTimer);
-      times[0][k] = cpuTimer;
-    } else {
-      ierr = cudaEventElapsedTime(×[0][k], start, stop);CHKERRQ(ierr);
+      times[0][k] = cpuTimer*1.e3;
     }
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Copy_Optimized_double<<>>(d_a, d_c, N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     if (bDontUseGPUTiming) {
       PetscTimeAdd(&cpuTimer);
-      times[1][k] = cpuTimer;
-    } else {
-      ierr = cudaEventElapsedTime(×[1][k], start, stop);CHKERRQ(ierr);
+      times[1][k] = cpuTimer*1.e3;
     }
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Scale_double<<>>(d_b, d_c, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[2][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[2][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[2][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Scale_Optimized_double<<>>(d_b, d_c, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[3][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[2][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[3][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Add_double<<>>(d_a, d_b, d_c,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[4][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[3][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[4][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Add_Optimized_double<<>>(d_a, d_b, d_c,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[5][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[3][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[5][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Triad_double<<>>(d_b, d_c, d_a, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[6][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[4][k], start, stop);CHKERRQ(ierr);
-    }
+    if (bDontUseGPUTiming) times[6][k] = cpuTimer*1.e3;
 
     cpuTimer = 0.0;
     PetscTimeSubtract(&cpuTimer);
-    ierr = cudaEventRecord(start, 0);CHKERRQ(ierr);
     STREAM_Triad_Optimized_double<<>>(d_b, d_c, d_a, scalar,  N);
-    ierr = cudaEventRecord(stop, 0);CHKERRQ(ierr);
-    ierr = cudaEventSynchronize(stop);CHKERRQ(ierr);
+    cudaStreamSynchronize(NULL);
+    ierr = MPI_Barrier(MPI_COMM_WORLD);CHKERRQ(ierr);
     //get the total elapsed time in ms
     PetscTimeAdd(&cpuTimer);
-    if (bDontUseGPUTiming) times[7][k] = cpuTimer;
-    else {
-      ierr = cudaEventElapsedTime(×[4][k], start, stop);CHKERRQ(ierr);
-    }
-
+    if (bDontUseGPUTiming) times[7][k] = cpuTimer*1.e3;
   }
 
-  /* verify kernels */
+  if (1) { /* verify kernels */
   double *h_a, *h_b, *h_c;
   bool   errorSTREAMkernel = true;
 
@@ -876,8 +783,6 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy:\t\tError detected in device STREAM_Copy, exiting\n");CHKERRQ(ierr);
     exit(-2000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -892,8 +797,6 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy_Optimized:\tError detected in device STREAM_Copy_Optimized, exiting\n");CHKERRQ(ierr);
     exit(-3000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Copy_Optimized:\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -907,8 +810,6 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Scale:\t\tError detected in device STREAM_Scale, exiting\n");CHKERRQ(ierr);
     exit(-4000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Scale:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -924,8 +825,6 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Add:\t\tError detected in device STREAM_Add, exiting\n");CHKERRQ(ierr);
     exit(-5000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Add:\t\tPass\n");CHKERRQ(ierr);
   }
 
   /* Initialize memory on the device */
@@ -941,60 +840,78 @@ PetscErrorCode runStreamDouble(const PetscInt iNumThreadsPerBlock, PetscBool bDo
   if (errorSTREAMkernel) {
     ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Triad:\t\tError detected in device STREAM_Triad, exiting\n");CHKERRQ(ierr);
     exit(-6000);
-  } else {
-    ierr = PetscPrintf(PETSC_COMM_SELF, " device STREAM_Triad:\t\tPass\n");CHKERRQ(ierr);
   }
 
+  free(h_a);
+  free(h_b);
+  free(h_c);
+  }
   /* continue from here */
-  printResultsReadable(times);
-
-  //clean up timers
-  ierr = cudaEventDestroy(stop);CHKERRQ(ierr);
-  ierr = cudaEventDestroy(start);CHKERRQ(ierr);
+  printResultsReadable(times,sizeof(double));
 
   /* Free memory on device */
   ierr = cudaFree(d_a);CHKERRQ(ierr);
   ierr = cudaFree(d_b);CHKERRQ(ierr);
   ierr = cudaFree(d_c);CHKERRQ(ierr);
+
   PetscFunctionReturn(0);
 }
 
 ///////////////////////////////////////////////////////////////////////////
 //Print Results to Screen and File
 ///////////////////////////////////////////////////////////////////////////
-PetscErrorCode printResultsReadable(float times[][NTIMES])
+PetscErrorCode printResultsReadable(float times[][NTIMES], const size_t bsize)
 {
   PetscErrorCode ierr;
   PetscInt       j, k;
   float          avgtime[8]          = {0., 0., 0., 0., 0., 0., 0., 0.};
   float          maxtime[8]          = {0., 0., 0., 0., 0., 0., 0., 0.};
   float          mintime[8]          = {1e30,1e30,1e30,1e30,1e30,1e30,1e30,1e30};
-  char           *label[8]           = {"Copy:      ", "Copy Opt.: ", "Scale:     ", "Scale Opt: ", "Add:       ", "Add Opt:   ", "Triad:     ", "Triad Opt: "};
-  float          bytes_per_kernel[8] = {
-    2. * sizeof(float) * N,
-    2. * sizeof(float) * N,
-    2. * sizeof(float) * N,
-    2. * sizeof(float) * N,
-    3. * sizeof(float) * N,
-    3. * sizeof(float) * N,
-    3. * sizeof(float) * N,
-    3. * sizeof(float) * N
+  // char           *label[8]           = {"Copy:      ", "Copy Opt.: ", "Scale:     ", "Scale Opt: ", "Add:       ", "Add Opt:   ", "Triad:     ", "Triad Opt: "};
+  const float    bytes_per_kernel[8] = {
+    2. * bsize * N,
+    2. * bsize * N,
+    2. * bsize * N,
+    2. * bsize * N,
+    3. * bsize * N,
+    3. * bsize * N,
+    3. * bsize * N,
+    3. * bsize * N
   };
-
+  double         rate,irate;
+  int            rank,size;
   PetscFunctionBegin;
+  ierr = MPI_Comm_rank(MPI_COMM_WORLD,&rank);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(MPI_COMM_WORLD,&size);CHKERRQ(ierr);
   /* --- SUMMARY --- */
-  for (k = 1; k < NTIMES; ++k)   /* note -- skip first iteration */
+  for (k = 0; k < NTIMES; ++k) {
     for (j = 0; j < 8; ++j) {
-      avgtime[j] = avgtime[j] + (1.e-03f * times[j][k]);
+      avgtime[j] = avgtime[j] + (1.e-03f * times[j][k]); // millisec --> sec
       mintime[j] = MIN(mintime[j], (1.e-03f * times[j][k]));
       maxtime[j] = MAX(maxtime[j], (1.e-03f * times[j][k]));
     }
-
-  ierr = PetscPrintf(PETSC_COMM_SELF, "Function    Rate (MB/s)    Avg time      Min time      Max time\n");CHKERRQ(ierr);
-
+  }
   for (j = 0; j < 8; ++j) {
     avgtime[j] = avgtime[j]/(float)(NTIMES-1);
-    ierr       = PetscPrintf(PETSC_COMM_SELF, "%s%11.4f  %11.6f  %12.6f  %12.6f\n", label[j], 1.0E-06 * bytes_per_kernel[j]/mintime[j], avgtime[j], mintime[j], maxtime[j]);CHKERRQ(ierr);
   }
+  j = 7;
+  irate = 1.0E-06 * bytes_per_kernel[j]/mintime[j];
+  ierr = MPI_Reduce(&irate,&rate,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD);
+  if (!rank) {
+    FILE *fd;
+    if (size == 1) {
+      printf("%d %11.4f   Rate (MB/s)\n",size, rate);
+      fd = fopen("flops","w");
+      fprintf(fd,"%g\n",rate);
+      fclose(fd);
+    } else {
+      double prate;
+      fd = fopen("flops","r");
+      fscanf(fd,"%lg",&prate);
+      fclose(fd);
+      printf("%d %11.4f   Rate (MB/s) %g \n", size, rate, rate/prate);
+    }
+  }
+
   PetscFunctionReturn(0);
 }
diff --git a/src/benchmarks/streams/makefile b/src/benchmarks/streams/makefile
index 65c7f579e15..d484968b81f 100644
--- a/src/benchmarks/streams/makefile
+++ b/src/benchmarks/streams/makefile
@@ -1,12 +1,12 @@
 
 ALL:
 
-CFLAGS	      = 
+CFLAGS	      =
 FFLAGS	      =
 CPPFLAGS      =
 FPPFLAGS      =
 LOCDIR        = src/benchmarks/streams/
-EXAMPLESC     = BasicVersion.c MPIVersion.c OpenMPVersion.c SSEVersion.c PthreadVersion.c
+EXAMPLESC     = BasicVersion.c MPIVersion.c OpenMPVersion.c SSEVersion.c PthreadVersion.c CUDAVersion.cu
 EXAMPLESF     =
 TESTS         = BasicVersion OpenMPVersion
 MANSEC        = Sys
@@ -15,15 +15,19 @@ include ${PETSC_DIR}/lib/petsc/conf/variables
 include ${PETSC_DIR}/lib/petsc/conf/rules
 include ${PETSC_DIR}/lib/petsc/conf/test
 
-BasicVersion: BasicVersion.o 
+BasicVersion: BasicVersion.o
 	-@${CLINKER} -o BasicVersion BasicVersion.o ${PETSC_LIB}
 	@${RM} -f BasicVersion.o
 
-MPIVersion: MPIVersion.o 
+MPIVersion: MPIVersion.o
 	-@${CLINKER} -o MPIVersion MPIVersion.o ${PETSC_LIB}
 	@${RM} -f MPIVersion.o
 
-OpenMPVersion: OpenMPVersion.o 
+CUDAVersion: CUDAVersion.o
+	-@${CLINKER} -o CUDAVersion CUDAVersion.o ${PETSC_LIB}
+	@${RM} -f CUDAVersion.o
+
+OpenMPVersion: OpenMPVersion.o
 	-@${CLINKER} -o OpenMPVersion OpenMPVersion.o
 	@${RM} -f OpenMPVersion.o
 
@@ -31,7 +35,7 @@ SSEVersion: SSEVersion.o
 	-${CLINKER} -o $@ $< ${PETSC_LIB}
 	${RM} -f $<
 
-PthreadVersion: PthreadVersion.o 
+PthreadVersion: PthreadVersion.o
 	-@${CLINKER} -o PthreadVersion PthreadVersion.o ${PETSC_LIB}
 	@${RM} -f PthreadVersion.o
 
@@ -44,7 +48,22 @@ mpistream:  MPIVersion
 	  ${MPIEXEC} ${MPI_BINDING} -n $${i} ./MPIVersion | tee -a scaling.log; \
         done
 	-@echo "------------------------------------------------"
-	-@${PYTHON} process.py MPI fileoutput 
+	-@${PYTHON} process.py MPI fileoutput
+
+# Works on SUMMIT
+cudastreamjsrun:  CUDAVersion
+	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with [PETSC_OPTIONS=-process_view] make streams NPMAX= [MPI_BINDING='-bind-to core -map-by numa']\n or       [I_MPI_PIN_PROCESSOR_LIST=:map=scatter] [PETSC_OPTIONS=-process_view] make streams NPMAX=\n"; exit 1 ; fi
+	-@printf "" > scaling.log
+	-@printf "Running streams with '${MPIEXEC} ${MPI_BINDING}' using 'NPMAX=${NPMAX}' \n"
+	-@i=0; while [ $${i} -lt ${NPMAX} ] && [ $${i} -lt 7 ]; do i=`expr $${i} + 1`; \
+	  ${MPIEXEC} ${MPI_BINDING} -n 1 -c$${i} -a$${i} -g1 ./CUDAVersion | tee -a scaling.log; \
+        done
+	-@n=1; i=7; while [ $${i} -lt ${NPMAX} ]; do i=`expr $${i} + 7`; n=`expr $${n} + 1`; \
+	       c=5; while [ $${c} -lt 7 ]; do c=`expr $${c} + 1`; \
+	  ${MPIEXEC} ${MPI_BINDING} -n $${n} -c$${c} -a$${c} -g1 ./CUDAVersion | tee -a scaling.log; \
+        done; done
+	-@echo "------------------------------------------------"
+	-@${PYTHON} process.py CUDA fileoutput
 
 openmpstream:  OpenMPVersion
 	@if [ "${NPMAX}foo" = "foo" ]; then echo "---------"; printf " Run with make openmpstream NPMAX=\n"; exit 1 ; fi
diff --git a/src/contrib/fun3d/comp/flow.c b/src/contrib/fun3d/comp/flow.c
index 776ff723ef2..83752979a23 100644
--- a/src/contrib/fun3d/comp/flow.c
+++ b/src/contrib/fun3d/comp/flow.c
@@ -988,7 +988,7 @@ int GetLocalOrdering(GRID *grid)
   l2a  = grid->loc2glo;
   ICALLOC(nvertices, &grid->loc2pet);
   l2p  = grid->loc2pet;
-  ierr = PetscMemcpy(l2p,l2a,nvertices*sizeof(int));CHKERRQ(ierr);CHKERRQ(ierr);
+  ierr = PetscMemcpy(l2p,l2a,nvertices*sizeof(int));CHKERRQ(ierr);
   ierr = AOApplicationToPetsc(grid->ao,nvertices,l2p);CHKERRQ(ierr);
 
 /* Map the 'ja' array in petsc ordering */
@@ -1534,8 +1534,8 @@ int GetLocalOrdering(GRID *grid)
    ICALLOC(nvertices, &grid->loc2glo);
    ierr = PetscMemcpy(grid->loc2pet,l2p,nvertices*sizeof(int));CHKERRQ(ierr);
    ierr = PetscMemcpy(grid->loc2glo,l2a,nvertices*sizeof(int));CHKERRQ(ierr);
-   ierr = PetscFree(l2a);CHKERRQ(ierr);CHKERRQ(ierr);
-   ierr = PetscFree(l2p);CHKERRQ(ierr);CHKERRQ(ierr);*/
+   ierr = PetscFree(l2a);CHKERRQ(ierr);
+   ierr = PetscFree(l2p);CHKERRQ(ierr);*/
 
   grid->nnodesLoc  = nnodesLoc;
   grid->nedgeLoc   = nedgeLoc;
diff --git a/src/dm/dt/dualspace/impls/bdm/dspacebdm.c b/src/dm/dt/dualspace/impls/bdm/dspacebdm.c
index 4194669ac55..1b03043ff61 100644
--- a/src/dm/dt/dualspace/impls/bdm/dspacebdm.c
+++ b/src/dm/dt/dualspace/impls/bdm/dspacebdm.c
@@ -348,13 +348,15 @@ static PetscErrorCode PetscDualSpaceSetUp_BDM(PetscDualSpace sp)
       isCell = ((d == dim) && !faceSp) ? PETSC_TRUE : PETSC_FALSE;
       if (isFace) {
         PetscQuadrature  fq;
+        PetscTabulation  T;
         PetscReal       *B, n[3];
         const PetscReal *fqpoints, *fqweights;
         PetscInt         faceDim = PetscMax(dim-1, 1), Nq, q, fdim, fb;
 
         if (cdim == 1) {n[0] = 0.; n[1] = 1.;}
         else           {ierr = DMPlexComputeCellGeometryFVM(dm, p, NULL, NULL, n);CHKERRQ(ierr);}
-        ierr = PetscFEGetDefaultTabulation(faceFE, &B, NULL, NULL);CHKERRQ(ierr);
+        ierr = PetscFEGetCellTabulation(faceFE, &T);CHKERRQ(ierr);
+        B = T->T[0];
         ierr = PetscFEGetQuadrature(faceFE, &fq);CHKERRQ(ierr);
         ierr = PetscQuadratureGetData(fq, NULL, NULL, &Nq, &fqpoints, &fqweights);CHKERRQ(ierr);
         /* Create a dual basis vector for each basis function */
diff --git a/src/dm/dt/dualspace/interface/dualspace.c b/src/dm/dt/dualspace/interface/dualspace.c
index d2b52ab9a67..f914d9c54b8 100644
--- a/src/dm/dt/dualspace/interface/dualspace.c
+++ b/src/dm/dt/dualspace/interface/dualspace.c
@@ -209,6 +209,29 @@ static PetscErrorCode PetscDualSpaceView_ASCII(PetscDualSpace sp, PetscViewer v)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscDualSpaceViewFromOptions - View from Options
+
+   Collective on PetscDualSpace
+
+   Input Parameters:
++  A - the PetscDualSpace object
+.  obj - Optional object, proivides prefix
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscDualSpace, PetscDualSpaceView(), PetscObjectViewFromOptions(), PetscDualSpaceCreate()
+@*/
+PetscErrorCode  PetscDualSpaceViewFromOptions(PetscDualSpace A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCDUALSPACE_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@
   PetscDualSpaceView - Views a PetscDualSpace
 
@@ -220,7 +243,7 @@ static PetscErrorCode PetscDualSpaceView_ASCII(PetscDualSpace sp, PetscViewer v)
 
   Level: beginner
 
-.seealso PetscDualSpaceDestroy()
+.seealso PetscDualSpaceDestroy(), PetscDualSpace
 @*/
 PetscErrorCode PetscDualSpaceView(PetscDualSpace sp, PetscViewer v)
 {
@@ -249,7 +272,7 @@ PetscErrorCode PetscDualSpaceView(PetscDualSpace sp, PetscViewer v)
 
   Level: intermediate
 
-.seealso PetscDualSpaceView()
+.seealso PetscDualSpaceView(), PetscDualSpace, PetscObjectSetFromOptions()
 @*/
 PetscErrorCode PetscDualSpaceSetFromOptions(PetscDualSpace sp)
 {
@@ -309,7 +332,7 @@ PetscErrorCode PetscDualSpaceSetFromOptions(PetscDualSpace sp)
 
   Level: intermediate
 
-.seealso PetscDualSpaceView(), PetscDualSpaceDestroy()
+.seealso PetscDualSpaceView(), PetscDualSpaceDestroy(), PetscDualSpace
 @*/
 PetscErrorCode PetscDualSpaceSetUp(PetscDualSpace sp)
 {
@@ -334,7 +357,7 @@ PetscErrorCode PetscDualSpaceSetUp(PetscDualSpace sp)
 
   Level: beginner
 
-.seealso PetscDualSpaceView()
+.seealso PetscDualSpaceView(), PetscDualSpace(), PetscDualSpaceCreate()
 @*/
 PetscErrorCode PetscDualSpaceDestroy(PetscDualSpace *sp)
 {
diff --git a/src/dm/dt/examples/tests/ex5.c b/src/dm/dt/examples/tests/ex5.c
index 1df10e69706..9c530fefb02 100644
--- a/src/dm/dt/examples/tests/ex5.c
+++ b/src/dm/dt/examples/tests/ex5.c
@@ -43,7 +43,7 @@ int main(int argc, char **argv)
       PetscReal       xi0[3] = {-1., -1., -1.};
       PetscScalar     *outSub, *outFull;
       PetscReal       *testSub, *testFull;
-      PetscReal       *Bsub, *Bfull;
+      PetscTabulation Tsub, Tfull;
       PetscReal       J[9], detJ;
       PetscInt        i, j;
       PetscSection    sectionFull;
@@ -87,12 +87,12 @@ int main(int argc, char **argv)
       ierr = PetscMalloc1(nSub,&arraySub);CHKERRQ(ierr);
       ierr = DMPlexVecGetClosure(dm,sectionFull,vecFull,point,&nSub,&arraySub);CHKERRQ(ierr);
       /* get the tabulations */
-      ierr = PetscFEGetTabulation(traceFE,1,testSub,&Bsub,NULL,NULL);CHKERRQ(ierr);
-      ierr = PetscFEGetTabulation(fe,1,testFull,&Bfull,NULL,NULL);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(traceFE,1,1,testSub,0,&Tsub);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(fe,1,1,testFull,0,&Tfull);CHKERRQ(ierr);
       for (i = 0; i < Nc; i++) {
         outSub[i] = 0.0;
         for (j = 0; j < nSub; j++) {
-          outSub[i] += Bsub[j * Nc + i] * arraySub[j];
+          outSub[i] += Tsub->T[0][j * Nc + i] * arraySub[j];
         }
       }
       ierr = VecGetArray(vecFull,&arrayFull);CHKERRQ(ierr);
@@ -102,7 +102,7 @@ int main(int argc, char **argv)
 
         outFull[i] = 0.0;
         for (j = 0; j < nFull; j++) {
-          outFull[i] += Bfull[j * Nc + i] * arrayFull[j];
+          outFull[i] += Tfull->T[0][j * Nc + i] * arrayFull[j];
         }
         diff = outFull[i] - outSub[i];
         err += PetscRealPart(PetscConj(diff) * diff);
@@ -112,8 +112,8 @@ int main(int argc, char **argv)
         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Trace FE error %g\n",err);
       }
       ierr = VecRestoreArray(vecFull,&arrayFull);CHKERRQ(ierr);
-      ierr = PetscFERestoreTabulation(fe,1,testFull,&Bfull,NULL,NULL);CHKERRQ(ierr);
-      ierr = PetscFERestoreTabulation(traceFE,1,testSub,&Bsub,NULL,NULL);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&Tfull);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&Tsub);CHKERRQ(ierr);
       /* clean up */
       ierr = PetscFree(arraySub);CHKERRQ(ierr);
       ierr = PetscSectionDestroy(§ionFull);CHKERRQ(ierr);
diff --git a/src/dm/dt/examples/tests/ex7.c b/src/dm/dt/examples/tests/ex7.c
new file mode 100644
index 00000000000..d5f5a2ad6d5
--- /dev/null
+++ b/src/dm/dt/examples/tests/ex7.c
@@ -0,0 +1,527 @@
+static char help[] = "Test the PetscDTAltV interface for k-forms (alternating k-linear maps).\n\n";
+
+#include 
+#include 
+
+static PetscErrorCode CheckPullback(PetscInt N, PetscInt M, const PetscReal *L, PetscInt k,
+                                    const PetscReal *w, PetscReal *x, PetscBool verbose, PetscViewer viewer)
+{
+  PetscInt        Nk, Mk, i, j, l;
+  PetscReal       *Lstarw, *Lx, *Lstar, *Lstarwcheck, wLx, Lstarwx;
+  PetscReal       diff, diffMat, normMat;
+  PetscReal       *walloc = NULL;
+  const PetscReal *ww = NULL;
+  PetscBool       negative = (PetscBool) (k < 0);
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  k = PetscAbsInt(k);
+  ierr = PetscDTBinomialInt(N, k, &Nk);CHKERRQ(ierr);
+  ierr = PetscDTBinomialInt(M, k, &Mk);CHKERRQ(ierr);
+  if (negative) {
+    ierr = PetscMalloc1(Mk, &walloc);CHKERRQ(ierr);
+    ierr = PetscDTAltVStar(M, M - k, 1, w, walloc);CHKERRQ(ierr);
+    ww = walloc;
+  } else {
+    ww = w;
+  }
+  ierr = PetscMalloc2(Nk, &Lstarw, (M*k), &Lx);CHKERRQ(ierr);
+  ierr = PetscMalloc2(Nk * Mk, &Lstar, Nk, &Lstarwcheck);CHKERRQ(ierr);
+  ierr = PetscDTAltVPullback(N, M, L, negative ? -k : k, w, Lstarw);CHKERRQ(ierr);
+  ierr = PetscDTAltVPullbackMatrix(N, M, L, negative ? -k : k, Lstar);CHKERRQ(ierr);
+  if (negative) {
+    PetscReal *sLsw;
+
+    ierr = PetscMalloc1(Nk, &sLsw);CHKERRQ(ierr);
+    ierr = PetscDTAltVStar(N, N - k, 1, Lstarw, sLsw);CHKERRQ(ierr);
+    ierr = PetscDTAltVApply(N, k, sLsw, x, &Lstarwx);CHKERRQ(ierr);
+    ierr = PetscFree(sLsw);CHKERRQ(ierr);
+  } else {
+    ierr = PetscDTAltVApply(N, k, Lstarw, x, &Lstarwx);CHKERRQ(ierr);
+  }
+  for (l = 0; l < k; l++) {
+    for (i = 0; i < M; i++) {
+      PetscReal sum = 0.;
+
+      for (j = 0; j < N; j++) sum += L[i * N + j] * x[l * N + j];
+      Lx[l * M + i] = sum;
+    }
+  }
+  diffMat = 0.;
+  normMat = 0.;
+  for (i = 0; i < Nk; i++) {
+    PetscReal sum = 0.;
+    for (j = 0; j < Mk; j++) {
+      sum += Lstar[i * Mk + j] * w[j];
+    }
+    Lstarwcheck[i] = sum;
+    diffMat += PetscSqr(PetscAbsReal(Lstarwcheck[i] - Lstarw[i]));
+    normMat += PetscSqr(Lstarwcheck[i]) +  PetscSqr(Lstarw[i]);
+  }
+  diffMat = PetscSqrtReal(diffMat);
+  normMat = PetscSqrtReal(normMat);
+  if (verbose) {
+    ierr = PetscViewerASCIIPrintf(viewer, "L:\n");CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+    if (M*N > 0) {ierr = PetscRealView(M*N, L, viewer);CHKERRQ(ierr);}
+    ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+
+    ierr = PetscViewerASCIIPrintf(viewer, "L*:\n");CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+    if (Nk * Mk > 0) {ierr = PetscRealView(Nk * Mk, Lstar, viewer);CHKERRQ(ierr);}
+    ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+
+    ierr = PetscViewerASCIIPrintf(viewer, "L*w:\n");CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+    if (Nk > 0) {ierr = PetscRealView(Nk, Lstarw, viewer);CHKERRQ(ierr);}
+    ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+  }
+  ierr = PetscDTAltVApply(M, k, ww, Lx, &wLx);CHKERRQ(ierr);
+  diff = PetscAbsReal(wLx - Lstarwx);
+  if (diff > 10. * PETSC_SMALL * (PetscAbsReal(wLx) + PetscAbsReal(Lstarwx))) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "pullback check: pullback does not commute with application: w(Lx)(%g) != (L* w)(x)(%g)", wLx, Lstarwx);
+  if (diffMat > PETSC_SMALL * normMat) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "pullback check: pullback matrix does match matrix free result");
+  ierr = PetscFree2(Lstar, Lstarwcheck);CHKERRQ(ierr);
+  ierr = PetscFree2(Lstarw, Lx);CHKERRQ(ierr);
+  ierr = PetscFree(walloc);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+int main(int argc, char **argv)
+{
+  PetscInt       i, numTests = 5, n[5] = {0, 1, 2, 3, 4};
+  PetscBool      verbose = PETSC_FALSE;
+  PetscRandom    rand;
+  PetscViewer    viewer;
+  PetscErrorCode ierr;
+
+  ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
+  ierr = PetscOptionsBegin(PETSC_COMM_WORLD,"","Options for exterior algebra tests","none");CHKERRQ(ierr);
+  ierr = PetscOptionsIntArray("-N", "Up to 5 vector space dimensions to test","ex6.c",n,&numTests,NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-verbose", "Verbose test output","ex6.c",verbose,&verbose,NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEnd();
+  ierr = PetscRandomCreate(PETSC_COMM_SELF, &rand);CHKERRQ(ierr);
+  ierr = PetscRandomSetInterval(rand, -1., 1.);CHKERRQ(ierr);
+  ierr = PetscRandomSetFromOptions(rand);CHKERRQ(ierr);
+  if (!numTests) numTests = 5;
+  viewer = PETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD);
+  for (i = 0; i < numTests; i++) {
+    PetscInt       k, N = n[i];
+
+    if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "N = %D:\n", N);CHKERRQ(ierr);}
+    ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+
+    if (verbose) {
+      PetscInt *perm;
+      PetscInt fac = 1;
+
+      ierr = PetscMalloc1(N, &perm);CHKERRQ(ierr);
+
+      for (k = 1; k <= N; k++) fac *= k;
+      ierr = PetscViewerASCIIPrintf(viewer, "Permutations of %D:\n", N);CHKERRQ(ierr);
+      ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+      for (k = 0; k < fac; k++) {
+        PetscBool isOdd, isOddCheck;
+        PetscInt  j, kCheck;
+
+        ierr = PetscDTEnumPerm(N, k, perm, &isOdd);CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPrintf(viewer, "%D:", k);CHKERRQ(ierr);
+        for (j = 0; j < N; j++) {
+          ierr = PetscPrintf(PETSC_COMM_WORLD, " %D", perm[j]);CHKERRQ(ierr);
+        }
+        ierr = PetscPrintf(PETSC_COMM_WORLD, ", %s\n", isOdd ? "odd" : "even");CHKERRQ(ierr);
+        ierr = PetscDTPermIndex(N, perm, &kCheck, &isOddCheck);CHKERRQ(ierr);
+        if (kCheck != k || isOddCheck != isOdd) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "PetscDTEnumPerm / PetscDTPermIndex mismatch for (%D, %D)\n", N, k);
+      }
+      ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+      ierr = PetscFree(perm);CHKERRQ(ierr);
+    }
+    for (k = 0; k <= N; k++) {
+      PetscInt   j, Nk, M;
+      PetscReal *w, *v, wv;
+      PetscInt  *subset;
+
+      ierr = PetscDTBinomialInt(N, k, &Nk);CHKERRQ(ierr);
+      if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "k = %D:\n", k);CHKERRQ(ierr);}
+      ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+      if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "(%D choose %D): %D\n", N, k, Nk);CHKERRQ(ierr);}
+
+      /* Test subset and complement enumeration */
+      ierr = PetscMalloc1(N, &subset);CHKERRQ(ierr);
+      ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+      for (j = 0; j < Nk; j++) {
+        PetscBool isOdd, isOddCheck;
+        PetscInt  jCheck, kCheck;
+
+        ierr = PetscDTEnumSplit(N, k, j, subset, &isOdd);CHKERRQ(ierr);
+        ierr = PetscDTPermIndex(N, subset, &kCheck, &isOddCheck);CHKERRQ(ierr);
+        if (isOddCheck != isOdd) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "PetscDTEnumSplit sign does not mmatch PetscDTPermIndex sign");
+        if (verbose) {
+          PetscInt l;
+
+          ierr = PetscViewerASCIIPrintf(viewer, "subset %D:", j);CHKERRQ(ierr);
+          for (l = 0; l < k; l++) {
+            ierr = PetscPrintf(PETSC_COMM_WORLD, " %D", subset[l]);CHKERRQ(ierr);
+          }
+          ierr = PetscPrintf(PETSC_COMM_WORLD, " |");CHKERRQ(ierr);
+          for (l = k; l < N; l++) {
+            ierr = PetscPrintf(PETSC_COMM_WORLD, " %D", subset[l]);CHKERRQ(ierr);
+          }
+          ierr = PetscPrintf(PETSC_COMM_WORLD, ", %s\n", isOdd ? "odd" : "even");CHKERRQ(ierr);
+        }
+        ierr = PetscDTSubsetIndex(N, k, subset, &jCheck);CHKERRQ(ierr);
+        if (jCheck != j) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "jCheck (%D) != j (%D)", jCheck, j);
+      }
+      ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+      ierr = PetscFree(subset);CHKERRQ(ierr);
+
+      /* Make a random k form */
+      ierr = PetscMalloc1(Nk, &w);CHKERRQ(ierr);
+      for (j = 0; j < Nk; j++) {ierr = PetscRandomGetValueReal(rand, &w[j]);CHKERRQ(ierr);}
+      /* Make a set of random vectors */
+      ierr = PetscMalloc1(N*k, &v);CHKERRQ(ierr);
+      for (j = 0; j < N*k; j++) {ierr = PetscRandomGetValueReal(rand, &v[j]);CHKERRQ(ierr);}
+
+      ierr = PetscDTAltVApply(N, k, w, v, &wv);CHKERRQ(ierr);
+
+      if (verbose) {
+        ierr = PetscViewerASCIIPrintf(viewer, "w:\n");CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+        if (Nk) {ierr = PetscRealView(Nk, w, viewer);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPrintf(viewer, "v:\n");CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+        if (N*k > 0) {ierr = PetscRealView(N*k, v, viewer);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPrintf(viewer, "w(v): %g\n", (double) wv);CHKERRQ(ierr);
+      }
+
+      /* sanity checks */
+      if (k == 1) { /* 1-forms are functionals (dot products) */
+        PetscInt  l;
+        PetscReal wvcheck = 0.;
+        PetscReal diff;
+
+        for (l = 0; l < N; l++) wvcheck += w[l] * v[l];
+        diff = PetscSqrtReal(PetscSqr(wvcheck - wv));
+        if (diff >= PETSC_SMALL * (PetscAbsReal(wv) + PetscAbsReal(wvcheck))) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "1-form / dot product equivalence: wvcheck (%g) != wv (%g)", (double) wvcheck, (double) wv);
+      }
+      if (k == N && N < 5) { /* n-forms are scaled determinants */
+        PetscReal det, wvcheck, diff;
+
+        switch (k) {
+        case 0:
+          det = 1.;
+          break;
+        case 1:
+          det = v[0];
+          break;
+        case 2:
+          det = v[0] * v[3] - v[1] * v[2];
+          break;
+        case 3:
+          det = v[0] * (v[4] * v[8] - v[5] * v[7]) +
+                v[1] * (v[5] * v[6] - v[3] * v[8]) +
+                v[2] * (v[3] * v[7] - v[4] * v[6]);
+          break;
+        case 4:
+          det = v[0] * (v[5] * (v[10] * v[15] - v[11] * v[14]) +
+                        v[6] * (v[11] * v[13] - v[ 9] * v[15]) +
+                        v[7] * (v[ 9] * v[14] - v[10] * v[13])) -
+                v[1] * (v[4] * (v[10] * v[15] - v[11] * v[14]) +
+                        v[6] * (v[11] * v[12] - v[ 8] * v[15]) +
+                        v[7] * (v[ 8] * v[14] - v[10] * v[12])) +
+                v[2] * (v[4] * (v[ 9] * v[15] - v[11] * v[13]) +
+                        v[5] * (v[11] * v[12] - v[ 8] * v[15]) +
+                        v[7] * (v[ 8] * v[13] - v[ 9] * v[12])) -
+                v[3] * (v[4] * (v[ 9] * v[14] - v[10] * v[13]) +
+                        v[5] * (v[10] * v[12] - v[ 8] * v[14]) +
+                        v[6] * (v[ 8] * v[13] - v[ 9] * v[12]));
+          break;
+        default:
+          SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "invalid k");
+        }
+        wvcheck = det * w[0];
+        diff = PetscSqrtReal(PetscSqr(wvcheck - wv));
+        if (diff >= PETSC_SMALL * (PetscAbsReal(wv) + PetscAbsReal(wvcheck))) SETERRQ3(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "n-form / determinant equivalence: wvcheck (%g) != wv (%g) %g", (double) wvcheck, (double) wv, (double) diff);
+      }
+      if (k > 0) { /* k-forms are linear in each component */
+        PetscReal alpha;
+        PetscReal *x, *axv, wx, waxv, waxvcheck;
+        PetscReal diff;
+        PetscReal rj;
+        PetscInt  l;
+
+        ierr = PetscMalloc2(N * k, &x, N * k, &axv);CHKERRQ(ierr);
+        ierr = PetscRandomGetValueReal(rand, &alpha);CHKERRQ(ierr);
+        ierr = PetscRandomSetInterval(rand, 0, k);CHKERRQ(ierr);
+        ierr = PetscRandomGetValueReal(rand, &rj);CHKERRQ(ierr);
+        j = (PetscInt) rj;
+        ierr = PetscRandomSetInterval(rand, -1., 1.);CHKERRQ(ierr);
+        for (l = 0; l < N*k; l++) x[l] = v[l];
+        for (l = 0; l < N*k; l++) axv[l] = v[l];
+        for (l = 0; l < N; l++) {
+          PetscReal val;
+
+          ierr = PetscRandomGetValueReal(rand, &val);CHKERRQ(ierr);
+          x[j * N + l] = val;
+          axv[j * N + l] += alpha * val;
+        }
+        ierr = PetscDTAltVApply(N, k, w, x, &wx);CHKERRQ(ierr);
+        ierr = PetscDTAltVApply(N, k, w, axv, &waxv);CHKERRQ(ierr);
+        waxvcheck = alpha * wx + wv;
+        diff = waxv - waxvcheck;
+        if (PetscAbsReal(diff) > 10. * PETSC_SMALL * (PetscAbsReal(waxv) + PetscAbsReal(waxvcheck))) SETERRQ3(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "linearity check: component %D, waxvcheck (%g) != waxv (%g)", j, (double) waxvcheck, (double) waxv);
+        ierr = PetscFree2(x,axv);CHKERRQ(ierr);
+      }
+      if (k > 1) { /* k-forms are antisymmetric */
+        PetscReal rj, rl, *swapv, wswapv, diff;
+        PetscInt  l, m;
+
+        ierr = PetscRandomSetInterval(rand, 0, k);CHKERRQ(ierr);
+        ierr = PetscRandomGetValueReal(rand, &rj);CHKERRQ(ierr);
+        j = (PetscInt) rj;
+        l = j;
+        while (l == j) {
+          ierr = PetscRandomGetValueReal(rand, &rl);CHKERRQ(ierr);
+          l = (PetscInt) rl;
+        }
+        ierr = PetscRandomSetInterval(rand, -1., 1.);CHKERRQ(ierr);
+        ierr = PetscMalloc1(N * k, &swapv);CHKERRQ(ierr);
+        for (m = 0; m < N * k; m++) swapv[m] = v[m];
+        for (m = 0; m < N; m++) {
+          swapv[j * N + m] = v[l * N + m];
+          swapv[l * N + m] = v[j * N + m];
+        }
+        ierr = PetscDTAltVApply(N, k, w, swapv, &wswapv);CHKERRQ(ierr);
+        diff = PetscAbsReal(wswapv + wv);
+        if (diff > PETSC_SMALL * (PetscAbsReal(wswapv) + PetscAbsReal(wv))) SETERRQ4(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "antisymmetry check: components %D & %D, wswapv (%g) != -wv (%g)", j, l, (double) wswapv, (double) wv);
+        ierr = PetscFree(swapv);CHKERRQ(ierr);
+      }
+      for (j = 0; j <= k && j + k <= N; j++) { /* wedge product */
+        PetscInt   Nj, Njk, l, JKj;
+        PetscReal *u, *uWw, *uWwcheck, *uWwmat, *x, *xsplit, uWwx, uWwxcheck, diff, norm;
+        PetscInt  *split;
+
+        if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "wedge j = %D:\n", j);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+        ierr = PetscDTBinomialInt(N, j,   &Nj);CHKERRQ(ierr);
+        ierr = PetscDTBinomialInt(N, j+k, &Njk);CHKERRQ(ierr);
+        ierr = PetscMalloc4(Nj, &u, Njk, &uWw, N*(j+k), &x, N*(j+k), &xsplit);CHKERRQ(ierr);
+        ierr = PetscMalloc1(j+k,&split);CHKERRQ(ierr);
+        for (l = 0; l < Nj; l++) {ierr = PetscRandomGetValueReal(rand, &u[l]);CHKERRQ(ierr);}
+        for (l = 0; l < N*(j+k); l++) {ierr = PetscRandomGetValueReal(rand, &x[l]);CHKERRQ(ierr);}
+        ierr = PetscDTAltVWedge(N, j, k, u, w, uWw);CHKERRQ(ierr);
+        ierr = PetscDTAltVApply(N, j+k, uWw, x, &uWwx);CHKERRQ(ierr);
+        if (verbose) {
+          ierr = PetscViewerASCIIPrintf(viewer, "u:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Nj) {ierr = PetscRealView(Nj, u, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPrintf(viewer, "u wedge w:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Njk) {ierr = PetscRealView(Njk, uWw, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPrintf(viewer, "x:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (N*(j+k) > 0) {ierr = PetscRealView(N*(j+k), x, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPrintf(viewer, "u wedge w(x): %g\n", (double) uWwx);CHKERRQ(ierr);
+        }
+        /* verify wedge formula */
+        uWwxcheck = 0.;
+        ierr = PetscDTBinomialInt(j+k, j, &JKj);CHKERRQ(ierr);
+        for (l = 0; l < JKj; l++) {
+          PetscBool isOdd;
+          PetscReal ux, wx;
+          PetscInt  m, p;
+
+          ierr = PetscDTEnumSplit(j+k, j, l, split, &isOdd);CHKERRQ(ierr);
+          for (m = 0; m < j+k; m++) {for (p = 0; p < N; p++) {xsplit[m * N + p] = x[split[m] * N + p];}}
+          ierr = PetscDTAltVApply(N, j, u, xsplit, &ux);CHKERRQ(ierr);
+          ierr = PetscDTAltVApply(N, k, w, &xsplit[j*N], &wx);CHKERRQ(ierr);
+          uWwxcheck += isOdd ? -(ux * wx) : (ux * wx);
+        }
+        diff = PetscAbsReal(uWwx - uWwxcheck);
+        if (diff > 10. * PETSC_SMALL * (PetscAbsReal(uWwx) + PetscAbsReal(uWwxcheck))) SETERRQ4(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "wedge check: forms %D & %D, uWwxcheck (%g) != uWwx (%g)", j, k, (double) uWwxcheck, (double) uWwx);
+        ierr = PetscFree(split);CHKERRQ(ierr);
+        ierr = PetscMalloc2(Nk * Njk, &uWwmat, Njk, &uWwcheck);CHKERRQ(ierr);
+        ierr = PetscDTAltVWedgeMatrix(N, j, k, u, uWwmat);CHKERRQ(ierr);
+        if (verbose) {
+          ierr = PetscViewerASCIIPrintf(viewer, "(u wedge):\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if ((Nk * Njk) > 0) {ierr = PetscRealView(Nk * Njk, uWwmat, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        }
+        diff = 0.;
+        norm = 0.;
+        for (l = 0; l < Njk; l++) {
+          PetscInt  m;
+          PetscReal sum = 0.;
+
+          for (m = 0; m < Nk; m++) sum += uWwmat[l * Nk + m] * w[m];
+          uWwcheck[l] = sum;
+          diff += PetscSqr(uWwcheck[l] - uWw[l]);
+          norm += PetscSqr(uWwcheck[l]) + PetscSqr(uWw[l]);
+        }
+        diff = PetscSqrtReal(diff);
+        norm = PetscSqrtReal(norm);
+        if (diff > PETSC_SMALL * norm) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "wedge matrix check: wedge matrix application does not match wedge direct application");
+        ierr = PetscFree2(uWwmat, uWwcheck);CHKERRQ(ierr);
+        ierr = PetscFree4(u, uWw, x, xsplit);CHKERRQ(ierr);
+        ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+      }
+      for (M = PetscMax(1,k); M <= N; M++) { /* pullback */
+        PetscReal   *L, *u, *x;
+        PetscInt     Mk, l;
+
+        ierr = PetscDTBinomialInt(M, k, &Mk);CHKERRQ(ierr);
+        ierr = PetscMalloc3(M*N, &L, Mk, &u, M*k, &x);CHKERRQ(ierr);
+        for (l = 0; l < M*N; l++) {ierr = PetscRandomGetValueReal(rand, &L[l]);CHKERRQ(ierr);}
+        for (l = 0; l < Mk; l++) {ierr = PetscRandomGetValueReal(rand, &u[l]);CHKERRQ(ierr);}
+        for (l = 0; l < M*k; l++) {ierr = PetscRandomGetValueReal(rand, &x[l]);CHKERRQ(ierr);}
+        if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "pullback M = %D:\n", M);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+        ierr = CheckPullback(M, N, L, k, w, x, verbose, viewer);CHKERRQ(ierr);
+        if (M != N) {ierr = CheckPullback(N, M, L, k, u, v, PETSC_FALSE, viewer);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        if ((k % N) && (N > 1)) {
+          if (verbose) {ierr = PetscViewerASCIIPrintf(viewer, "negative pullback M = %D:\n", M);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          ierr = CheckPullback(M, N, L, -k, w, x, verbose, viewer);CHKERRQ(ierr);
+          if (M != N) {ierr = CheckPullback(N, M, L, -k, u, v, PETSC_FALSE, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        }
+        ierr = PetscFree3(L, u, x);CHKERRQ(ierr);
+      }
+      if (k > 0) { /* Interior */
+        PetscInt    Nkm, l, m;
+        PetscReal  *wIntv0, *wIntv0check, wvcheck, diff, diffMat, normMat;
+        PetscReal  *intv0mat, *matcheck;
+        PetscInt  (*indices)[3];
+
+        ierr = PetscDTBinomialInt(N, k-1, &Nkm);CHKERRQ(ierr);
+        ierr = PetscMalloc5(Nkm, &wIntv0, Nkm, &wIntv0check, Nk * Nkm, &intv0mat, Nk * Nkm, &matcheck, Nk * k, &indices);CHKERRQ(ierr);
+        ierr = PetscDTAltVInterior(N, k, w, v, wIntv0);CHKERRQ(ierr);
+        ierr = PetscDTAltVInteriorMatrix(N, k, v, intv0mat);CHKERRQ(ierr);
+        ierr = PetscDTAltVInteriorPattern(N, k, indices);CHKERRQ(ierr);
+        if (verbose) {
+          ierr = PetscViewerASCIIPrintf(viewer, "interior product matrix pattern:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          for (l = 0; l < Nk * k; l++) {
+            PetscInt row = indices[l][0];
+            PetscInt col = indices[l][1];
+            PetscInt x   = indices[l][2];
+
+            ierr = PetscViewerASCIIPrintf(viewer,"intV[%D,%D] = %sV[%D]\n", row, col, x < 0 ? "-" : " ", x < 0 ? -(x + 1) : x);CHKERRQ(ierr);
+          }
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        }
+        for (l = 0; l < Nkm * Nk; l++) matcheck[l] = 0.;
+        for (l = 0; l < Nk * k; l++) {
+          PetscInt row = indices[l][0];
+          PetscInt col = indices[l][1];
+          PetscInt x   = indices[l][2];
+
+          if (x < 0) {
+            matcheck[row * Nk + col] = -v[-(x+1)];
+          } else {
+            matcheck[row * Nk + col] = v[x];
+          }
+        }
+        diffMat = 0.;
+        normMat = 0.;
+        for (l = 0; l < Nkm * Nk; l++) {
+          diffMat += PetscSqr(PetscAbsReal(matcheck[l] - intv0mat[l]));
+          normMat += PetscSqr(matcheck[l]) + PetscSqr(intv0mat[l]);
+        }
+        diffMat = PetscSqrtReal(diffMat);
+        normMat = PetscSqrtReal(normMat);
+        if (diffMat > PETSC_SMALL * normMat) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Interior product check: matrix pattern does not match matrix");
+        diffMat = 0.;
+        normMat = 0.;
+        for (l = 0; l < Nkm; l++) {
+          PetscReal sum = 0.;
+
+          for (m = 0; m < Nk; m++) sum += intv0mat[l * Nk + m] * w[m];
+          wIntv0check[l] = sum;
+
+          diffMat += PetscSqr(PetscAbsReal(wIntv0check[l] - wIntv0[l]));
+          normMat += PetscSqr(wIntv0check[l]) + PetscSqr(wIntv0[l]);
+        }
+        diffMat = PetscSqrtReal(diffMat);
+        normMat = PetscSqrtReal(normMat);
+        if (diffMat > PETSC_SMALL * normMat) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Interior product check: application does not match matrix");
+        if (verbose) {
+          ierr = PetscViewerASCIIPrintf(viewer, "(w int v_0):\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Nkm) {ierr = PetscRealView(Nkm, wIntv0, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+
+          ierr = PetscViewerASCIIPrintf(viewer, "(int v_0):\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Nk * Nkm > 0) {ierr = PetscRealView(Nk * Nkm, intv0mat, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        }
+        ierr = PetscDTAltVApply(N, k - 1, wIntv0, &v[N], &wvcheck);CHKERRQ(ierr);
+        diff = PetscSqrtReal(PetscSqr(wvcheck - wv));
+        if (diff >= PETSC_SMALL * (PetscAbsReal(wv) + PetscAbsReal(wvcheck))) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Interior product check: (w Int v0)(v_rem) (%g) != w(v) (%g)", (double) wvcheck, (double) wv);
+        ierr = PetscFree5(wIntv0,wIntv0check,intv0mat,matcheck,indices);CHKERRQ(ierr);
+      }
+      if (k >= N - k) { /* Hodge star */
+        PetscReal *u, *starw, *starstarw, wu, starwdotu;
+        PetscReal diff, norm;
+        PetscBool isOdd;
+        PetscInt l;
+
+        isOdd = (PetscBool) ((k * (N - k)) & 1);
+        ierr = PetscMalloc3(Nk, &u, Nk, &starw, Nk, &starstarw);CHKERRQ(ierr);
+        ierr = PetscDTAltVStar(N, k, 1, w, starw);CHKERRQ(ierr);
+        ierr = PetscDTAltVStar(N, N-k, 1, starw, starstarw);CHKERRQ(ierr);
+        if (verbose) {
+          ierr = PetscViewerASCIIPrintf(viewer, "star w:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Nk) {ierr = PetscRealView(Nk, starw, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+
+          ierr = PetscViewerASCIIPrintf(viewer, "star star w:\n");CHKERRQ(ierr);
+          ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
+          if (Nk) {ierr = PetscRealView(Nk, starstarw, viewer);CHKERRQ(ierr);}
+          ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+        }
+        for (l = 0; l < Nk; l++) {ierr = PetscRandomGetValueReal(rand,&u[l]);CHKERRQ(ierr);}
+        ierr = PetscDTAltVWedge(N, k, N - k, w, u, &wu);CHKERRQ(ierr);
+        starwdotu = 0.;
+        for (l = 0; l < Nk; l++) starwdotu += starw[l] * u[l];
+        diff = PetscAbsReal(wu - starwdotu);
+        if (diff > PETSC_SMALL * (PetscAbsReal(wu) + PetscAbsReal(starwdotu))) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Hodge star check: (star w, u) (%g) != (w wedge u) (%g)", (double) starwdotu, (double) wu);
+
+        diff = 0.;
+        norm = 0.;
+        for (l = 0; l < Nk; l++) {
+          diff += PetscSqr(w[l] - (isOdd ? -starstarw[l] : starstarw[l]));
+          norm += PetscSqr(w[l]) + PetscSqr(starstarw[l]);
+        }
+        diff = PetscSqrtReal(diff);
+        norm = PetscSqrtReal(norm);
+        if (diff > PETSC_SMALL * norm) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Hodge star check: star(star(w)) != (-1)^(N*(N-k)) w");
+        ierr = PetscFree3(u, starw, starstarw);CHKERRQ(ierr);
+      }
+      ierr = PetscFree(v);CHKERRQ(ierr);
+      ierr = PetscFree(w);CHKERRQ(ierr);
+      ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+    }
+    ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
+  }
+  ierr = PetscRandomDestroy(&rand);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+/*TEST
+  test:
+    suffix: 1234
+    args: -verbose
+  test:
+    suffix: 56
+    args: -N 5,6
+TEST*/
diff --git a/src/dm/dt/examples/tests/makefile b/src/dm/dt/examples/tests/makefile
index 4e605e5f05e..19b18b52fc9 100644
--- a/src/dm/dt/examples/tests/makefile
+++ b/src/dm/dt/examples/tests/makefile
@@ -4,7 +4,7 @@ FFLAGS	        =
 CPPFLAGS        =
 FPPFLAGS        =
 LOCDIR          = src/dm/dt/examples/tests/
-EXAMPLESC       = ex1.c ex2.c ex3.c ex4.c ex5.c ex6.c
+EXAMPLESC       = ex1.c ex2.c ex3.c ex4.c ex5.c ex6.c ex7.c
 EXAMPLESF       =
 MANSEC          = DM
 
diff --git a/src/dm/dt/examples/tests/output/ex7_1234.out b/src/dm/dt/examples/tests/output/ex7_1234.out
new file mode 100644
index 00000000000..8e430032ffa
--- /dev/null
+++ b/src/dm/dt/examples/tests/output/ex7_1234.out
@@ -0,0 +1,1017 @@
+N = 0:
+  Permutations of 0:
+    0:, even
+  k = 0:
+    (0 choose 0): 1
+      subset 0: |, even
+    w:
+       0:   4.4006e-01
+    v:
+    w(v): 0.440064
+    wedge j = 0:
+      u:
+         0:  -8.7641e-01
+      u wedge w:
+         0:  -3.8568e-01
+      x:
+      u wedge w(x): -0.385677
+      (u wedge):
+         0:  -8.7641e-01
+    star w:
+       0:   4.4006e-01
+    star star w:
+       0:   4.4006e-01
+N = 1:
+  Permutations of 1:
+    0: 0, even
+  k = 0:
+    (1 choose 0): 1
+      subset 0: | 0, even
+    w:
+       0:  -7.1072e-01
+    v:
+    w(v): -0.710721
+    wedge j = 0:
+      u:
+         0:  -2.0444e-01
+      u wedge w:
+         0:   1.4530e-01
+      x:
+      u wedge w(x): 0.145303
+      (u wedge):
+         0:  -2.0444e-01
+    pullback M = 1:
+      L:
+         0:  -8.5393e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.1072e-01
+  k = 1:
+    (1 choose 1): 1
+      subset 0: 0 |, even
+    w:
+       0:  -4.9844e-01
+    v:
+       0:   9.6810e-01
+    w(v): -0.482541
+    wedge j = 0:
+      u:
+         0:  -7.1189e-01
+      u wedge w:
+         0:   3.5483e-01
+      x:
+         0:   9.9301e-01
+      u wedge w(x): 0.352354
+      (u wedge):
+         0:  -7.1189e-01
+    pullback M = 1:
+      L:
+         0:  -7.8645e-01
+      L*:
+         0:  -7.8645e-01
+      L*w:
+         0:   3.9200e-01
+    interior product matrix pattern:
+      intV[0,0] =  V[0]
+    (w int v_0):
+       0:  -4.8254e-01
+    (int v_0):
+       0:   9.6810e-01
+    star w:
+       0:  -4.9844e-01
+    star star w:
+       0:  -4.9844e-01
+N = 2:
+  Permutations of 2:
+    0: 0 1, even
+    1: 1 0, odd
+  k = 0:
+    (2 choose 0): 1
+      subset 0: | 0 1, even
+    w:
+       0:  -6.3889e-01
+    v:
+    w(v): -0.638887
+    wedge j = 0:
+      u:
+         0:   9.7209e-01
+      u wedge w:
+         0:  -6.2106e-01
+      x:
+      u wedge w(x): -0.621056
+      (u wedge):
+         0:   9.7209e-01
+    pullback M = 1:
+      L:
+         0:   8.3958e-01   1.5564e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -6.3889e-01
+    pullback M = 2:
+      L:
+         0:   8.3173e-01  -7.9185e-01  -5.3832e-01   2.1831e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -6.3889e-01
+  k = 1:
+    (2 choose 1): 2
+      subset 0: 0 | 1, even
+      subset 1: 1 | 0, odd
+    w:
+       0:   3.1332e-02   7.4829e-01
+    v:
+       0:  -6.7041e-01  -1.2642e-01
+    w(v): -0.115601
+    wedge j = 0:
+      u:
+         0:   5.0537e-01
+      u wedge w:
+         0:   1.5834e-02   3.7816e-01
+      x:
+         0:  -1.1516e-01  -5.2579e-02
+      u wedge w(x): -0.0217066
+      (u wedge):
+         0:   5.0537e-01   0.0000e+00   0.0000e+00   5.0537e-01
+    wedge j = 1:
+      u:
+         0:  -7.4302e-01   9.1177e-01
+      u wedge w:
+         0:  -5.8456e-01
+      x:
+         0:   7.0146e-01  -7.8962e-01   7.3216e-01   2.7505e-01
+      u wedge w(x): -0.450731
+      (u wedge):
+         0:  -9.1177e-01  -7.4302e-01
+    pullback M = 1:
+      L:
+         0:  -4.0927e-01   7.0128e-01
+      L*:
+         0:  -4.0927e-01   7.0128e-01
+      L*w:
+         0:   5.1194e-01
+    negative pullback M = 1:
+      L:
+         0:  -4.0927e-01   7.0128e-01
+      L*:
+         0:   7.0128e-01   4.0927e-01
+      L*w:
+         0:   3.2823e-01
+    pullback M = 2:
+      L:
+         0:   8.1629e-01  -7.1931e-01   5.4752e-01  -6.4367e-01
+      L*:
+         0:   8.1629e-01   5.4752e-01  -7.1931e-01  -6.4367e-01
+      L*w:
+         0:   4.3528e-01  -5.0419e-01
+    negative pullback M = 2:
+      L:
+         0:   8.1629e-01  -7.1931e-01   5.4752e-01  -6.4367e-01
+      L*:
+         0:  -6.4367e-01   7.1931e-01  -5.4752e-01   8.1629e-01
+      L*w:
+         0:   5.1808e-01   5.9366e-01
+    interior product matrix pattern:
+      intV[0,0] =  V[0]
+      intV[0,1] =  V[1]
+    (w int v_0):
+       0:  -1.1560e-01
+    (int v_0):
+       0:  -6.7041e-01  -1.2642e-01
+    star w:
+       0:  -7.4829e-01   3.1332e-02
+    star star w:
+       0:  -3.1332e-02  -7.4829e-01
+  k = 2:
+    (2 choose 2): 1
+      subset 0: 0 1 |, even
+    w:
+       0:   2.9906e-01
+    v:
+       0:  -8.1483e-01   3.6126e-01   1.4164e-01   3.6475e-01
+    w(v): -0.104186
+    wedge j = 0:
+      u:
+         0:   4.1139e-01
+      u wedge w:
+         0:   1.2303e-01
+      x:
+         0:   5.7180e-01   8.9367e-01  -3.7286e-01   2.5011e-01
+      u wedge w(x): 0.0585903
+      (u wedge):
+         0:   4.1139e-01
+    pullback M = 2:
+      L:
+         0:  -7.3847e-01   8.5710e-01   7.6242e-01  -2.7071e-02
+      L*:
+         0:  -6.3348e-01
+      L*w:
+         0:  -1.8945e-01
+    interior product matrix pattern:
+      intV[1,0] =  V[0]
+      intV[0,0] = -V[1]
+    (w int v_0):
+       0:  -1.0804e-01  -2.4368e-01
+    (int v_0):
+       0:  -3.6126e-01  -8.1483e-01
+    star w:
+       0:   2.9906e-01
+    star star w:
+       0:   2.9906e-01
+N = 3:
+  Permutations of 3:
+    0: 0 1 2, even
+    1: 0 2 1, odd
+    2: 1 0 2, odd
+    3: 1 2 0, even
+    4: 2 1 0, odd
+    5: 2 0 1, even
+  k = 0:
+    (3 choose 0): 1
+      subset 0: | 0 1 2, even
+    w:
+       0:  -7.8527e-01
+    v:
+    w(v): -0.785273
+    wedge j = 0:
+      u:
+         0:  -4.1607e-01
+      u wedge w:
+         0:   3.2673e-01
+      x:
+      u wedge w(x): 0.326726
+      (u wedge):
+         0:  -4.1607e-01
+    pullback M = 1:
+      L:
+         0:  -1.1162e-02  -5.1602e-01  -5.7893e-02
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.8527e-01
+    pullback M = 2:
+      L:
+         0:  -1.5048e-01  -2.6896e-01   9.3065e-01  -5.1508e-01   3.0863e-01
+         5:   7.3348e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.8527e-01
+    pullback M = 3:
+      L:
+         0:   9.4925e-01  -5.3134e-01  -6.2828e-01  -4.4492e-01   6.9910e-01
+         5:  -2.4978e-01   8.0559e-02  -8.9172e-02   1.9506e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.8527e-01
+  k = 1:
+    (3 choose 1): 3
+      subset 0: 0 | 1 2, even
+      subset 1: 1 | 0 2, odd
+      subset 2: 2 | 0 1, even
+    w:
+       0:   4.6649e-01  -4.5685e-01  -7.1699e-01
+    v:
+       0:   1.5665e-01  -2.1640e-01   7.2654e-01
+    w(v): -0.348987
+    wedge j = 0:
+      u:
+         0:  -5.0009e-01
+      u wedge w:
+         0:  -2.3329e-01   2.2847e-01   3.5856e-01
+      x:
+         0:   8.8050e-01  -9.6914e-02   3.4416e-01
+      u wedge w(x): -0.104147
+      (u wedge):
+         0:  -5.0009e-01   0.0000e+00   0.0000e+00   0.0000e+00  -5.0009e-01
+         5:   0.0000e+00   0.0000e+00   0.0000e+00  -5.0009e-01
+    wedge j = 1:
+      u:
+         0:   6.2172e-01  -6.2324e-01  -2.7127e-02
+      u wedge w:
+         0:   6.7021e-03  -4.3311e-01   4.3447e-01
+      x:
+         0:   5.8240e-01  -1.8885e-01  -5.5782e-03   5.1936e-02  -6.0479e-01
+         5:   8.5265e-01
+      u wedge w(x): -0.288922
+      (u wedge):
+         0:   6.2324e-01   6.2172e-01   0.0000e+00   2.7127e-02   0.0000e+00
+         5:   6.2172e-01   0.0000e+00   2.7127e-02  -6.2324e-01
+    pullback M = 1:
+      L:
+         0:  -3.9946e-01  -4.3518e-01  -3.9815e-01
+      L*:
+         0:  -3.9946e-01  -4.3518e-01  -3.9815e-01
+      L*w:
+         0:   2.9794e-01
+    negative pullback M = 1:
+      L:
+         0:  -3.9946e-01  -4.3518e-01  -3.9815e-01
+      L*:
+         0:  -3.9815e-01   4.3518e-01  -3.9946e-01
+      L*w:
+         0:  -9.8141e-02
+    pullback M = 2:
+      L:
+         0:  -4.6038e-01  -4.4391e-01  -9.6304e-01  -8.8672e-01   3.5819e-01
+         5:   1.3291e-01
+      L*:
+         0:  -4.6038e-01  -9.6304e-01   3.5819e-01  -4.4391e-01  -8.8672e-01
+         5:   1.3291e-01
+      L*w:
+         0:  -3.1612e-02   1.0272e-01
+    negative pullback M = 2:
+      L:
+         0:  -4.6038e-01  -4.4391e-01  -9.6304e-01  -8.8672e-01   3.5819e-01
+         5:   1.3291e-01
+      L*:
+         0:   1.3291e-01   8.8672e-01  -4.4391e-01  -3.5819e-01  -9.6304e-01
+         5:   4.6038e-01
+      L*w:
+         0:  -2.4815e-02  -5.7210e-02
+    pullback M = 3:
+      L:
+         0:   6.4982e-01   1.1086e-01  -9.7880e-02   6.2412e-01  -6.4816e-01
+         5:   9.4609e-01  -5.9075e-01   6.7407e-01  -7.9853e-01
+      L*:
+         0:   6.4982e-01   6.2412e-01  -5.9075e-01   1.1086e-01  -6.4816e-01
+         5:   6.7407e-01  -9.7880e-02   9.4609e-01  -7.9853e-01
+      L*w:
+         0:   4.4157e-01  -1.3547e-01   9.4654e-02
+    negative pullback M = 3:
+      L:
+         0:   6.4982e-01   1.1086e-01  -9.7880e-02   6.2412e-01  -6.4816e-01
+         5:   9.4609e-01  -5.9075e-01   6.7407e-01  -7.9853e-01
+      L*:
+         0:  -7.9853e-01  -9.4609e-01  -9.7880e-02  -6.7407e-01  -6.4816e-01
+         5:  -1.1086e-01  -5.9075e-01  -6.2412e-01   6.4982e-01
+      L*w:
+         0:   1.2990e-01   6.1155e-02  -4.5636e-01
+    interior product matrix pattern:
+      intV[0,0] =  V[0]
+      intV[0,1] =  V[1]
+      intV[0,2] =  V[2]
+    (w int v_0):
+       0:  -3.4899e-01
+    (int v_0):
+       0:   1.5665e-01  -2.1640e-01   7.2654e-01
+  k = 2:
+    (3 choose 2): 3
+      subset 0: 0 1 | 2, even
+      subset 1: 0 2 | 1, odd
+      subset 2: 1 2 | 0, even
+    w:
+       0:  -1.8782e-01   7.0744e-01   1.7253e-01
+    v:
+       0:   6.5771e-01   5.7049e-01   2.8630e-02  -8.5477e-01   3.3782e-01
+       5:  -2.1430e-01
+    w(v): -0.238478
+    wedge j = 0:
+      u:
+         0:  -5.7759e-01
+      u wedge w:
+         0:   1.0848e-01  -4.0861e-01  -9.9649e-02
+      x:
+         0:  -7.1156e-01   3.7126e-01  -4.3078e-01  -3.1688e-01   4.4620e-01
+         5:   4.2193e-01
+      u wedge w(x): 0.122009
+      (u wedge):
+         0:  -5.7759e-01   0.0000e+00   0.0000e+00   0.0000e+00  -5.7759e-01
+         5:   0.0000e+00   0.0000e+00   0.0000e+00  -5.7759e-01
+    wedge j = 1:
+      u:
+         0:   9.1910e-02   8.7977e-01  -5.5295e-02
+      u wedge w:
+         0:  -5.9614e-01
+      x:
+         0:   2.9690e-01   6.6471e-01   5.1740e-01   1.4031e-01   1.7736e-01
+         5:   8.2148e-01  -9.2988e-01   7.3394e-01  -8.3483e-01
+      u wedge w(x): 0.306565
+      (u wedge):
+         0:  -5.5295e-02  -8.7977e-01   9.1910e-02
+    pullback M = 2:
+      L:
+         0:   8.8443e-01   4.9797e-01   9.8585e-01  -6.3764e-01  -9.9988e-02
+         5:  -7.0149e-01
+      L*:
+         0:  -1.0549e+00  -5.7063e-01  -7.5532e-01
+      L*w:
+         0:  -3.3587e-01
+    negative pullback M = 2:
+      L:
+         0:   8.8443e-01   4.9797e-01   9.8585e-01  -6.3764e-01  -9.9988e-02
+         5:  -7.0149e-01
+      L*:
+         0:  -7.5532e-01   5.7063e-01  -1.0549e+00
+      L*w:
+         0:   3.6355e-01
+    pullback M = 3:
+      L:
+         0:  -9.5909e-01  -1.8504e-01  -7.3579e-01  -5.3436e-01  -1.8697e-01
+         5:  -2.4928e-01  -1.7239e-01  -8.6664e-01  -4.3552e-01
+      L*:
+         0:   8.0437e-02   7.9928e-01   4.3086e-01  -1.5409e-01   2.9086e-01
+         5:   1.8975e-01  -9.1438e-02  -5.5707e-01  -1.3461e-01
+      L*w:
+         0:   6.2467e-01   2.6744e-01  -4.0014e-01
+    negative pullback M = 3:
+      L:
+         0:  -9.5909e-01  -1.8504e-01  -7.3579e-01  -5.3436e-01  -1.8697e-01
+         5:  -2.4928e-01  -1.7239e-01  -8.6664e-01  -4.3552e-01
+      L*:
+         0:  -1.3461e-01   5.5707e-01  -9.1438e-02  -1.8975e-01   2.9086e-01
+         5:   1.5409e-01   4.3086e-01  -7.9928e-01   8.0437e-02
+      L*w:
+         0:   4.0360e-01   2.6799e-01  -6.3249e-01
+    interior product matrix pattern:
+      intV[0,0] = -V[1]
+      intV[0,1] = -V[2]
+      intV[1,0] =  V[0]
+      intV[1,2] = -V[2]
+      intV[2,1] =  V[0]
+      intV[2,2] =  V[1]
+    (w int v_0):
+       0:   8.6894e-02  -1.2847e-01   5.6371e-01
+    (int v_0):
+       0:  -5.7049e-01  -2.8630e-02   0.0000e+00   6.5771e-01   0.0000e+00
+       5:  -2.8630e-02   0.0000e+00   6.5771e-01   5.7049e-01
+    star w:
+       0:   1.7253e-01  -7.0744e-01  -1.8782e-01
+    star star w:
+       0:  -1.8782e-01   7.0744e-01   1.7253e-01
+  k = 3:
+    (3 choose 3): 1
+      subset 0: 0 1 2 |, even
+    w:
+       0:  -5.7996e-01
+    v:
+       0:  -6.7343e-01   7.7333e-01   4.9289e-01  -6.8617e-01   8.4124e-01
+       5:  -4.3276e-01   7.7254e-01   5.8776e-01   7.8121e-01
+    w(v): 0.566604
+    wedge j = 0:
+      u:
+         0:  -5.0080e-01
+      u wedge w:
+         0:   2.9044e-01
+      x:
+         0:  -7.6236e-01  -2.2003e-02  -3.4366e-01   2.4058e-01  -1.7215e-01
+         5:   2.4512e-01   3.5984e-01   8.2625e-01  -7.1276e-01
+      u wedge w(x): -0.0100078
+      (u wedge):
+         0:  -5.0080e-01
+    pullback M = 3:
+      L:
+         0:  -4.2983e-01  -5.6291e-01  -4.1641e-01  -2.0869e-01   1.9987e-01
+         5:   4.4418e-01  -9.9065e-01  -1.5900e-01  -2.9704e-01
+      L*:
+         0:   1.8149e-01
+      L*w:
+         0:  -1.0525e-01
+    interior product matrix pattern:
+      intV[2,0] =  V[0]
+      intV[1,0] = -V[1]
+      intV[0,0] =  V[2]
+    (w int v_0):
+       0:  -2.8586e-01   4.4850e-01   3.9056e-01
+    (int v_0):
+       0:   4.9289e-01  -7.7333e-01  -6.7343e-01
+    star w:
+       0:  -5.7996e-01
+    star star w:
+       0:  -5.7996e-01
+N = 4:
+  Permutations of 4:
+    0: 0 1 2 3, even
+    1: 0 1 3 2, odd
+    2: 0 2 1 3, odd
+    3: 0 2 3 1, even
+    4: 0 3 2 1, odd
+    5: 0 3 1 2, even
+    6: 1 0 2 3, odd
+    7: 1 0 3 2, even
+    8: 1 2 0 3, even
+    9: 1 2 3 0, odd
+    10: 1 3 2 0, even
+    11: 1 3 0 2, odd
+    12: 2 1 0 3, odd
+    13: 2 1 3 0, even
+    14: 2 0 1 3, even
+    15: 2 0 3 1, odd
+    16: 2 3 0 1, even
+    17: 2 3 1 0, odd
+    18: 3 1 2 0, odd
+    19: 3 1 0 2, even
+    20: 3 2 1 0, even
+    21: 3 2 0 1, odd
+    22: 3 0 2 1, even
+    23: 3 0 1 2, odd
+  k = 0:
+    (4 choose 0): 1
+      subset 0: | 0 1 2 3, even
+    w:
+       0:  -7.4394e-01
+    v:
+    w(v): -0.743937
+    wedge j = 0:
+      u:
+         0:   3.5896e-01
+      u wedge w:
+         0:  -2.6704e-01
+      x:
+      u wedge w(x): -0.267043
+      (u wedge):
+         0:   3.5896e-01
+    pullback M = 1:
+      L:
+         0:   1.5959e-01  -6.4365e-01   2.5399e-01   2.9917e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.4394e-01
+    pullback M = 2:
+      L:
+         0:  -1.8009e-01  -5.7793e-01   3.3057e-01  -9.7700e-01   5.4455e-01
+         5:  -4.1276e-01   4.3369e-01   6.1429e-02
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.4394e-01
+    pullback M = 3:
+      L:
+         0:   2.5055e-01   8.6968e-01   7.4232e-01   3.0309e-01   5.9046e-01
+         5:  -2.1829e-01  -5.5160e-01  -6.7168e-01  -4.1558e-01  -1.3479e-01
+        10:   6.5574e-01   5.6780e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.4394e-01
+    pullback M = 4:
+      L:
+         0:   8.2121e-01  -2.9750e-01  -5.7062e-01   6.2420e-01   8.2407e-01
+         5:   5.6269e-01  -3.1815e-01   8.2830e-01   6.0784e-01  -8.7081e-01
+        10:  -5.7205e-01   2.8393e-01  -4.5425e-01  -3.5681e-01   8.2933e-01
+        15:   2.4311e-01
+      L*:
+         0:   1.0000e+00
+      L*w:
+         0:  -7.4394e-01
+  k = 1:
+    (4 choose 1): 4
+      subset 0: 0 | 1 2 3, even
+      subset 1: 1 | 0 2 3, odd
+      subset 2: 2 | 0 1 3, even
+      subset 3: 3 | 0 1 2, odd
+    w:
+       0:   2.6369e-01  -3.5748e-01  -2.8349e-01   2.2739e-01
+    v:
+       0:   8.8206e-01  -7.8617e-01  -9.1753e-01   4.5956e-01
+    w(v): 0.878243
+    wedge j = 0:
+      u:
+         0:   7.4388e-01
+      u wedge w:
+         0:   1.9615e-01  -2.6592e-01  -2.1089e-01   1.6915e-01
+      x:
+         0:  -9.6728e-01   1.3094e-01   8.2809e-01   8.9815e-01
+      u wedge w(x): -0.247262
+      (u wedge):
+         0:   7.4388e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+         5:   7.4388e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        10:   7.4388e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        15:   7.4388e-01
+    wedge j = 1:
+      u:
+         0:  -2.6993e-01  -2.0090e-01  -3.1502e-02   7.2239e-02
+      u wedge w:
+         0:   1.4947e-01   8.4831e-02  -8.0429e-02   4.5693e-02  -1.9860e-02
+         5:   1.3316e-02
+      x:
+         0:  -3.7522e-01   1.0796e-01  -3.3212e-01   6.8021e-01   9.6693e-01
+         5:  -6.5846e-02   4.8128e-01   4.0758e-01
+      u wedge w(x): 0.0586624
+      (u wedge):
+         0:   2.0090e-01  -2.6993e-01   0.0000e+00   0.0000e+00   3.1502e-02
+         5:   0.0000e+00  -2.6993e-01   0.0000e+00  -7.2239e-02   0.0000e+00
+        10:   0.0000e+00  -2.6993e-01   0.0000e+00   3.1502e-02  -2.0090e-01
+        15:   0.0000e+00   0.0000e+00  -7.2239e-02   0.0000e+00  -2.0090e-01
+        20:   0.0000e+00   0.0000e+00  -7.2239e-02  -3.1502e-02
+    pullback M = 1:
+      L:
+         0:   7.8673e-01   8.7400e-01   5.0507e-01   5.8168e-01
+      L*:
+         0:   7.8673e-01   8.7400e-01   5.0507e-01   5.8168e-01
+      L*w:
+         0:  -1.1590e-01
+    negative pullback M = 1:
+      L:
+         0:   7.8673e-01   8.7400e-01   5.0507e-01   5.8168e-01
+      L*:
+         0:   5.8168e-01  -5.0507e-01   8.7400e-01  -7.8673e-01
+      L*w:
+         0:  -9.2737e-02
+    pullback M = 2:
+      L:
+         0:  -9.5632e-01   8.7212e-01  -6.3417e-01   5.7161e-01  -8.5299e-01
+         5:  -4.6585e-02  -1.2355e-01  -7.6224e-01
+      L*:
+         0:  -9.5632e-01  -6.3417e-01  -8.5299e-01  -1.2355e-01   8.7212e-01
+         5:   5.7161e-01  -4.6585e-02  -7.6224e-01
+      L*w:
+         0:   1.8826e-01  -1.3449e-01
+    negative pullback M = 2:
+      L:
+         0:  -9.5632e-01   8.7212e-01  -6.3417e-01   5.7161e-01  -8.5299e-01
+         5:  -4.6585e-02  -1.2355e-01  -7.6224e-01
+      L*:
+         0:  -7.6224e-01   4.6585e-02   5.7161e-01  -8.7212e-01   1.2355e-01
+         5:  -8.5299e-01   6.3417e-01  -9.5632e-01
+      L*w:
+         0:  -5.7801e-01  -5.9742e-02
+    pullback M = 3:
+      L:
+         0:   1.7964e-02  -1.3200e-01  -8.4531e-01   9.1974e-01   4.6205e-01
+         5:  -3.0878e-01  -9.2426e-01  -2.3007e-01  -4.3335e-01  -7.2587e-01
+        10:   5.4066e-02   5.9836e-01
+      L*:
+         0:   1.7964e-02   9.1974e-01  -9.2426e-01  -7.2587e-01  -1.3200e-01
+         5:   4.6205e-01  -2.3007e-01   5.4066e-02  -8.4531e-01  -3.0878e-01
+        10:  -4.3335e-01   5.9836e-01
+      L*w:
+         0:  -2.2709e-01  -1.2246e-01   1.4640e-01
+    negative pullback M = 3:
+      L:
+         0:   1.7964e-02  -1.3200e-01  -8.4531e-01   9.1974e-01   4.6205e-01
+         5:  -3.0878e-01  -9.2426e-01  -2.3007e-01  -4.3335e-01  -7.2587e-01
+        10:   5.4066e-02   5.9836e-01
+      L*:
+         0:   5.9836e-01   4.3335e-01  -3.0878e-01   8.4531e-01  -5.4066e-02
+         5:  -2.3007e-01  -4.6205e-01  -1.3200e-01  -7.2587e-01   9.2426e-01
+        10:   9.1974e-01  -1.7964e-02
+      L*w:
+         0:   2.8262e-01   1.6896e-01  -7.8663e-01
+    pullback M = 4:
+      L:
+         0:   6.3375e-01  -3.0028e-02   9.4808e-01   6.3855e-02  -5.1181e-02
+         5:   7.3297e-01  -9.2044e-01  -2.6976e-01   3.4461e-01  -1.2073e-01
+        10:   7.5831e-01  -7.9239e-01   6.9351e-01   9.7345e-01  -6.8279e-01
+        15:  -1.2872e-02
+      L*:
+         0:   6.3375e-01  -5.1181e-02   3.4461e-01   6.9351e-01  -3.0028e-02
+         5:   7.3297e-01  -1.2073e-01   9.7345e-01   9.4808e-01  -9.2044e-01
+        10:   7.5831e-01  -6.8279e-01   6.3855e-02  -2.6976e-01  -7.9239e-01
+        15:  -1.2872e-02
+      L*w:
+         0:   2.4541e-01  -1.4359e-02   2.0880e-01   3.3498e-01
+    negative pullback M = 4:
+      L:
+         0:   6.3375e-01  -3.0028e-02   9.4808e-01   6.3855e-02  -5.1181e-02
+         5:   7.3297e-01  -9.2044e-01  -2.6976e-01   3.4461e-01  -1.2073e-01
+        10:   7.5831e-01  -7.9239e-01   6.9351e-01   9.7345e-01  -6.8279e-01
+        15:  -1.2872e-02
+      L*:
+         0:  -1.2872e-02   7.9239e-01  -2.6976e-01  -6.3855e-02   6.8279e-01
+         5:   7.5831e-01   9.2044e-01   9.4808e-01   9.7345e-01   1.2073e-01
+        10:   7.3297e-01   3.0028e-02  -6.9351e-01   3.4461e-01   5.1181e-02
+        15:   6.3375e-01
+      L*w:
+         0:  -2.2470e-01  -1.3639e-01   1.2560e-02  -1.7646e-01
+    interior product matrix pattern:
+      intV[0,0] =  V[0]
+      intV[0,1] =  V[1]
+      intV[0,2] =  V[2]
+      intV[0,3] =  V[3]
+    (w int v_0):
+       0:   8.7824e-01
+    (int v_0):
+       0:   8.8206e-01  -7.8617e-01  -9.1753e-01   4.5956e-01
+  k = 2:
+    (4 choose 2): 6
+      subset 0: 0 1 | 2 3, even
+      subset 1: 0 2 | 1 3, odd
+      subset 2: 0 3 | 1 2, even
+      subset 3: 1 2 | 0 3, even
+      subset 4: 1 3 | 0 2, odd
+      subset 5: 2 3 | 0 1, even
+    w:
+       0:   9.4113e-01   1.5232e-01  -5.0107e-01  -2.3200e-01   4.8558e-02
+       5:   5.2142e-01
+    v:
+       0:  -8.9591e-01   8.4061e-01  -2.2518e-01   3.4966e-02   1.6809e-01
+       5:   8.1800e-01   9.0384e-01   8.7769e-01
+    w(v): -0.847405
+    wedge j = 0:
+      u:
+         0:   4.7703e-01
+      u wedge w:
+         0:   4.4895e-01   7.2659e-02  -2.3903e-01  -1.1067e-01   2.3164e-02
+         5:   2.4873e-01
+      x:
+         0:   2.0731e-01  -9.8742e-01  -7.1849e-01   9.6120e-01  -4.7646e-01
+         5:   6.4692e-01  -8.0816e-01   7.0296e-01
+      u wedge w(x): -0.435001
+      (u wedge):
+         0:   4.7703e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+         5:   0.0000e+00   0.0000e+00   4.7703e-01   0.0000e+00   0.0000e+00
+        10:   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00   4.7703e-01
+        15:   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        20:   0.0000e+00   4.7703e-01   0.0000e+00   0.0000e+00   0.0000e+00
+        25:   0.0000e+00   0.0000e+00   0.0000e+00   4.7703e-01   0.0000e+00
+        30:   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        35:   4.7703e-01
+    wedge j = 1:
+      u:
+         0:   2.9008e-01   1.5075e-01  -9.5663e-01  -6.9372e-01
+      u wedge w:
+         0:  -9.9057e-01  -5.6326e-01  -4.3375e-01   2.8600e-01
+      x:
+         0:   8.7614e-02  -5.5981e-01   4.1114e-01   9.1749e-01  -3.7016e-01
+         5:  -4.9479e-01  -3.4250e-01   2.8268e-01  -1.8432e-01   4.4155e-01
+        10:   5.9688e-01  -7.4634e-01
+      u wedge w(x): 0.377565
+      (u wedge):
+         0:  -9.5663e-01  -1.5075e-01   0.0000e+00   2.9008e-01   0.0000e+00
+         5:   0.0000e+00  -6.9372e-01   0.0000e+00  -1.5075e-01   0.0000e+00
+        10:   2.9008e-01   0.0000e+00   0.0000e+00  -6.9372e-01   9.5663e-01
+        15:   0.0000e+00   0.0000e+00   2.9008e-01   0.0000e+00   0.0000e+00
+        20:   0.0000e+00  -6.9372e-01   9.5663e-01   1.5075e-01
+    wedge j = 2:
+      u:
+         0:  -8.6722e-01  -3.8501e-01   8.8852e-01   9.5774e-02  -6.6658e-01
+         5:   1.6054e-02
+      u wedge w:
+         0:  -5.7097e-01
+      x:
+         0:  -8.4414e-01  -7.0774e-01   8.9762e-03   3.6565e-01   3.9703e-01
+         5:  -6.7742e-01  -9.5197e-02  -5.9403e-01  -7.3056e-01  -7.3361e-01
+        10:   9.3656e-01   8.4796e-01   5.2428e-01  -8.9313e-01   6.5618e-02
+        15:   5.9341e-01
+      u wedge w(x): -0.576003
+      (u wedge):
+         0:   1.6054e-02   6.6658e-01   9.5774e-02   8.8852e-01   3.8501e-01
+         5:  -8.6722e-01
+    pullback M = 2:
+      L:
+         0:   4.8556e-01   7.4037e-01   9.1990e-01   7.0991e-01  -2.0408e-01
+         5:  -6.8744e-01   5.4403e-01   9.7044e-01
+      L*:
+         0:  -3.3637e-01  -1.8270e-01   6.8416e-02  -4.8750e-01   5.0649e-01
+         5:   1.7594e-01
+      L*w:
+         0:  -1.4924e-01
+    negative pullback M = 2:
+      L:
+         0:   4.8556e-01   7.4037e-01   9.1990e-01   7.0991e-01  -2.0408e-01
+         5:  -6.8744e-01   5.4403e-01   9.7044e-01
+      L*:
+         0:   1.7594e-01  -5.0649e-01  -4.8750e-01   6.8416e-02   1.8270e-01
+         5:  -3.3637e-01
+      L*w:
+         0:   1.5032e-01
+    pullback M = 3:
+      L:
+         0:  -8.7798e-01  -5.1988e-01  -3.0551e-01  -4.5998e-01   9.3861e-01
+         5:   6.2693e-01  -8.3359e-01   3.3612e-01   6.0071e-02  -1.8260e-01
+        10:   4.9155e-01  -8.7632e-01
+      L*:
+         0:  -1.0632e+00  -7.2847e-01  -5.2650e-01   6.2781e-01  -5.4709e-02
+         5:  -3.4837e-01  -6.9096e-01  -3.0741e-01   7.1360e-01   4.9497e-01
+        10:   5.1756e-01   7.4146e-01  -3.9168e-02   7.1459e-02   6.0575e-01
+        15:  -1.5434e-01  -1.1307e+00  -3.2407e-01
+      L*w:
+         0:  -1.1777e+00  -7.5776e-01  -5.1758e-01
+    negative pullback M = 3:
+      L:
+         0:  -8.7798e-01  -5.1988e-01  -3.0551e-01  -4.5998e-01   9.3861e-01
+         5:   6.2693e-01  -8.3359e-01   3.3612e-01   6.0071e-02  -1.8260e-01
+        10:   4.9155e-01  -8.7632e-01
+      L*:
+         0:  -3.2407e-01   1.1307e+00  -1.5434e-01   6.0575e-01  -7.1459e-02
+         5:  -3.9168e-02  -7.4146e-01   5.1756e-01  -4.9497e-01  -7.1360e-01
+        10:  -3.0741e-01   6.9096e-01  -3.4837e-01   5.4709e-02   6.2781e-01
+        15:  -5.2650e-01   7.2847e-01  -1.0632e+00
+      L*w:
+         0:  -2.1987e-01   1.3994e-01  -1.0310e+00
+    pullback M = 4:
+      L:
+         0:  -9.4566e-01  -7.0382e-01   2.1323e-02   6.7554e-01   4.0173e-01
+         5:  -8.0673e-01   4.6571e-01   1.5665e-01  -2.9256e-01   9.9166e-01
+        10:  -2.7217e-01   1.8503e-01   3.9038e-01   4.6708e-01  -4.1968e-02
+        15:  -9.6358e-01
+      L*:
+         0:   1.0456e+00  -1.1437e+00  -1.6694e-01   1.6236e-01   5.0257e-01
+         5:  -5.2377e-01  -4.4897e-01   2.6362e-01   3.1363e-02   2.6909e-02
+        10:  -1.9867e-01   1.1853e-01  -4.1952e-01   2.2659e-02   6.4750e-01
+        15:   1.2016e-01  -4.4825e-01   2.0967e-01  -3.1058e-01   1.7041e-01
+        20:   1.9578e-02  -2.4226e-01  -1.8367e-01   8.5507e-02   4.3473e-01
+        25:  -8.0013e-01   3.6265e-01  -3.0461e-01   7.0418e-01  -1.0420e+00
+        30:  -3.1127e-01   1.8781e-01   7.8048e-03   1.2880e-01  -4.4218e-01
+        35:   2.7002e-01
+      L*w:
+         0:   6.0717e-01  -3.5219e-01  -6.5613e-01  -1.8428e-01  -3.3288e-01
+         5:  -1.7881e-01
+    negative pullback M = 4:
+      L:
+         0:  -9.4566e-01  -7.0382e-01   2.1323e-02   6.7554e-01   4.0173e-01
+         5:  -8.0673e-01   4.6571e-01   1.5665e-01  -2.9256e-01   9.9166e-01
+        10:  -2.7217e-01   1.8503e-01   3.9038e-01   4.6708e-01  -4.1968e-02
+        15:  -9.6358e-01
+      L*:
+         0:   2.7002e-01   4.4218e-01   1.2880e-01   7.8048e-03  -1.8781e-01
+         5:  -3.1127e-01   1.0420e+00   7.0418e-01   3.0461e-01  -3.6265e-01
+        10:  -8.0013e-01  -4.3473e-01   8.5507e-02   1.8367e-01  -2.4226e-01
+        15:   1.9578e-02  -1.7041e-01  -3.1058e-01   2.0967e-01   4.4825e-01
+        20:   1.2016e-01   6.4750e-01  -2.2659e-02  -4.1952e-01  -1.1853e-01
+        25:  -1.9867e-01  -2.6909e-02  -3.1363e-02   2.6362e-01   4.4897e-01
+        30:  -5.2377e-01  -5.0257e-01   1.6236e-01  -1.6694e-01   1.1437e+00
+        35:   1.0456e+00
+      L*w:
+         0:   8.3704e-02   7.5386e-01   5.5081e-02  -1.6467e-01   1.2585e-01
+         5:  -1.1361e-02
+    interior product matrix pattern:
+      intV[1,0] =  V[0]
+      intV[0,0] = -V[1]
+      intV[2,1] =  V[0]
+      intV[0,1] = -V[2]
+      intV[3,2] =  V[0]
+      intV[0,2] = -V[3]
+      intV[2,3] =  V[1]
+      intV[1,3] = -V[2]
+      intV[3,4] =  V[1]
+      intV[1,4] = -V[3]
+      intV[3,5] =  V[2]
+      intV[2,5] = -V[3]
+    (w int v_0):
+       0:  -7.3931e-01  -8.9711e-01  -3.4971e-01   3.7232e-01
+    (int v_0):
+       0:  -8.4061e-01   2.2518e-01  -3.4966e-02   0.0000e+00   0.0000e+00
+       5:   0.0000e+00  -8.9591e-01   0.0000e+00   0.0000e+00   2.2518e-01
+      10:  -3.4966e-02   0.0000e+00   0.0000e+00  -8.9591e-01   0.0000e+00
+      15:   8.4061e-01   0.0000e+00  -3.4966e-02   0.0000e+00   0.0000e+00
+      20:  -8.9591e-01   0.0000e+00   8.4061e-01  -2.2518e-01
+    star w:
+       0:   5.2142e-01  -4.8558e-02  -2.3200e-01  -5.0107e-01  -1.5232e-01
+       5:   9.4113e-01
+    star star w:
+       0:   9.4113e-01   1.5232e-01  -5.0107e-01  -2.3200e-01   4.8558e-02
+       5:   5.2142e-01
+  k = 3:
+    (4 choose 3): 4
+      subset 0: 0 1 2 | 3, even
+      subset 1: 0 1 3 | 2, odd
+      subset 2: 0 2 3 | 1, even
+      subset 3: 1 2 3 | 0, odd
+    w:
+       0:  -3.7517e-01   6.1766e-01   9.8353e-01  -1.2600e-01
+    v:
+       0:  -2.5691e-01  -2.8870e-01   1.0936e-02  -3.8726e-01   8.4560e-01
+       5:  -6.7717e-01  -9.7375e-01  -5.5894e-01  -2.5643e-01   8.0538e-01
+      10:  -2.0257e-01  -5.4053e-01
+    w(v): -0.102079
+    wedge j = 0:
+      u:
+         0:   8.5022e-01
+      u wedge w:
+         0:  -3.1898e-01   5.2515e-01   8.3622e-01  -1.0713e-01
+      x:
+         0:  -8.0037e-01   4.5905e-01  -1.5464e-01   6.7704e-01  -8.6982e-01
+         5:  -9.9576e-01   7.5080e-01  -9.6707e-01  -5.1927e-01   6.8438e-01
+        10:  -1.5516e-01  -6.6693e-01
+      u wedge w(x): -0.26249
+      (u wedge):
+         0:   8.5022e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+         5:   8.5022e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        10:   8.5022e-01   0.0000e+00   0.0000e+00   0.0000e+00   0.0000e+00
+        15:   8.5022e-01
+    wedge j = 1:
+      u:
+         0:  -5.3738e-01  -3.2322e-01   4.7929e-01   1.4265e-01
+      u wedge w:
+         0:   7.3516e-01
+      x:
+         0:   8.3074e-01   4.8066e-03  -2.4025e-01  -7.5128e-01   1.7846e-01
+         5:  -4.2150e-01   6.3963e-01   4.8965e-01   4.3057e-01  -3.0549e-01
+        10:   2.0139e-01  -4.2717e-01  -2.1362e-01   2.2242e-01   7.8195e-02
+        15:  -4.3793e-01
+      u wedge w(x): -0.0636979
+      (u wedge):
+         0:  -1.4265e-01   4.7929e-01   3.2322e-01  -5.3738e-01
+    pullback M = 3:
+      L:
+         0:   1.8703e-01   9.8369e-01   5.9181e-02  -2.3898e-01   7.2997e-01
+         5:  -6.2029e-01  -1.9544e-01  -4.0435e-01  -9.1224e-01   9.6188e-01
+        10:  -9.5261e-01  -6.7639e-01
+      L*:
+         0:  -2.5250e-01  -9.7686e-01  -1.0705e+00  -9.5144e-01
+      L*w:
+         0:  -1.4417e+00
+    negative pullback M = 3:
+      L:
+         0:   1.8703e-01   9.8369e-01   5.9181e-02  -2.3898e-01   7.2997e-01
+         5:  -6.2029e-01  -1.9544e-01  -4.0435e-01  -9.1224e-01   9.6188e-01
+        10:  -9.5261e-01  -6.7639e-01
+      L*:
+         0:  -9.5144e-01   1.0705e+00  -9.7686e-01   2.5250e-01
+      L*w:
+         0:   2.5598e-02
+    pullback M = 4:
+      L:
+         0:  -4.9748e-01  -3.0328e-01  -8.7441e-01  -5.8880e-01  -9.1914e-01
+         5:  -2.2123e-01  -4.3706e-01   3.3706e-01   4.1845e-01   7.4420e-01
+        10:  -1.5180e-01  -3.5261e-01  -3.7428e-01  -8.5541e-01  -6.4351e-01
+        15:   8.8954e-02
+      L*:
+         0:   4.3644e-01  -3.7015e-01   2.7337e-01   5.2209e-01   4.8975e-01
+         5:  -5.3437e-01   1.3514e-01   1.6866e-01  -1.3189e-01  -3.0169e-01
+        10:   2.2875e-01   6.9647e-02  -4.2467e-01   3.1722e-01   2.2549e-01
+        15:  -2.5489e-01
+      L*w:
+         0:  -1.8928e-01  -4.0214e-01   7.9348e-02   6.0915e-01
+    negative pullback M = 4:
+      L:
+         0:  -4.9748e-01  -3.0328e-01  -8.7441e-01  -5.8880e-01  -9.1914e-01
+         5:  -2.2123e-01  -4.3706e-01   3.3706e-01   4.1845e-01   7.4420e-01
+        10:  -1.5180e-01  -3.5261e-01  -3.7428e-01  -8.5541e-01  -6.4351e-01
+        15:   8.8954e-02
+      L*:
+         0:  -2.5489e-01  -2.2549e-01   3.1722e-01   4.2467e-01  -6.9647e-02
+         5:   2.2875e-01   3.0169e-01  -1.3189e-01   1.6866e-01  -1.3514e-01
+        10:  -5.3437e-01  -4.8975e-01  -5.2209e-01   2.7337e-01   3.7015e-01
+        15:   4.3644e-01
+      L*w:
+         0:   2.1484e-01   4.8076e-01  -6.1060e-01   6.7379e-01
+    interior product matrix pattern:
+      intV[3,0] =  V[0]
+      intV[1,0] = -V[1]
+      intV[0,0] =  V[2]
+      intV[4,1] =  V[0]
+      intV[2,1] = -V[1]
+      intV[0,1] =  V[3]
+      intV[5,2] =  V[0]
+      intV[2,2] = -V[2]
+      intV[1,2] =  V[3]
+      intV[5,3] =  V[1]
+      intV[4,3] = -V[2]
+      intV[3,3] =  V[3]
+    (w int v_0):
+       0:  -2.4330e-01  -4.8919e-01   1.6756e-01   1.4518e-01  -1.5730e-01
+       5:  -2.1630e-01
+    (int v_0):
+       0:   1.0936e-02  -3.8726e-01   0.0000e+00   0.0000e+00   2.8870e-01
+       5:   0.0000e+00  -3.8726e-01   0.0000e+00   0.0000e+00   2.8870e-01
+      10:  -1.0936e-02   0.0000e+00  -2.5691e-01   0.0000e+00   0.0000e+00
+      15:  -3.8726e-01   0.0000e+00  -2.5691e-01   0.0000e+00  -1.0936e-02
+      20:   0.0000e+00   0.0000e+00  -2.5691e-01  -2.8870e-01
+    star w:
+       0:   1.2600e-01   9.8353e-01  -6.1766e-01  -3.7517e-01
+    star star w:
+       0:   3.7517e-01  -6.1766e-01  -9.8353e-01   1.2600e-01
+  k = 4:
+    (4 choose 4): 1
+      subset 0: 0 1 2 3 |, even
+    w:
+       0:  -2.5480e-01
+    v:
+       0:   1.0979e-01  -5.6223e-01  -6.8199e-02   6.1742e-02  -6.2533e-01
+       5:  -1.5664e-01  -3.9865e-01  -2.5882e-01   8.7131e-01   2.7764e-01
+      10:   2.1143e-01  -8.8580e-01   9.5433e-01   2.6095e-02   3.9046e-01
+      15:  -7.6788e-01
+    w(v): 0.0196566
+    wedge j = 0:
+      u:
+         0:   7.6849e-01
+      u wedge w:
+         0:  -1.9581e-01
+      x:
+         0:  -5.1168e-01  -2.8257e-01  -5.0308e-01  -4.1424e-01  -7.8720e-01
+         5:  -8.0635e-01   8.3809e-01  -6.9425e-01  -5.4188e-01   4.1450e-02
+        10:  -2.7054e-01   9.3937e-01  -8.3739e-01   3.8953e-01   6.9703e-01
+        15:   6.5400e-01
+      u wedge w(x): 0.179666
+      (u wedge):
+         0:   7.6849e-01
+    pullback M = 4:
+      L:
+         0:   6.0675e-01   4.7193e-01  -5.4014e-01   2.2124e-02   1.5380e-01
+         5:  -2.6649e-01   4.5758e-01   2.9025e-02   6.1407e-01  -6.9521e-01
+        10:  -9.8726e-01  -3.4833e-01  -8.2794e-01  -3.3814e-01  -3.8553e-01
+        15:   6.5902e-01
+      L*:
+         0:   4.3994e-01
+      L*w:
+         0:  -1.1210e-01
+    interior product matrix pattern:
+      intV[3,0] =  V[0]
+      intV[2,0] = -V[1]
+      intV[1,0] =  V[2]
+      intV[0,0] = -V[3]
+    (w int v_0):
+       0:   1.5732e-02   1.7377e-02  -1.4326e-01  -2.7975e-02
+    (int v_0):
+       0:  -6.1742e-02  -6.8199e-02   5.6223e-01   1.0979e-01
+    star w:
+       0:  -2.5480e-01
+    star star w:
+       0:  -2.5480e-01
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo_metis.out b/src/dm/dt/examples/tests/output/ex7_56.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_7_exo_metis.out
rename to src/dm/dt/examples/tests/output/ex7_56.out
diff --git a/src/dm/dt/fe/examples/makefile b/src/dm/dt/fe/examples/makefile
new file mode 100644
index 00000000000..f14281e41fe
--- /dev/null
+++ b/src/dm/dt/fe/examples/makefile
@@ -0,0 +1,9 @@
+
+ALL:
+
+LOCDIR   = src/dm/dt/fe/examples/
+DIRS     = tests
+
+include ${PETSC_DIR}/lib/petsc/conf/variables
+include ${PETSC_DIR}/lib/petsc/conf/rules
+include ${PETSC_DIR}/lib/petsc/conf/test
diff --git a/src/dm/dt/fe/examples/tests/ex1.c b/src/dm/dt/fe/examples/tests/ex1.c
new file mode 100644
index 00000000000..d5627137bc7
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/ex1.c
@@ -0,0 +1,351 @@
+static const char help[] = "Performance Tests for FE Integration";
+
+#include 
+#include 
+#include 
+
+typedef struct {
+  PetscInt  dim;     /* The topological dimension */
+  PetscBool simplex; /* True for simplices, false for hexes */
+  PetscInt  its;     /* Number of replications for timing */
+  PetscInt  cbs;     /* Number of cells in an integration block */
+} AppCtx;
+
+static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  options->dim     = 2;
+  options->simplex = PETSC_TRUE;
+  options->its     = 1;
+  options->cbs     = 8;
+
+  ierr = PetscOptionsBegin(comm, "", "FE Integration Performance Options", "PETSCFE");CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-dim", "The topological dimension", "ex1.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-simplex", "Simplex or hex cells", "ex1.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-its", "The number of replications for timing", "ex1.c", options->its, &options->its, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-cbs", "The number of cells in an integration block", "ex1.c", options->cbs, &options->cbs, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEnd();
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode trig_u(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx)
+{
+  PetscInt d;
+  *u = 0.0;
+  for (d = 0; d < dim; ++d) *u += PetscSinReal(2.0*PETSC_PI*x[d]);
+  return 0;
+}
+
+static void f0_trig_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+                      const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+                      const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+                      PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
+{
+  PetscInt d;
+  for (d = 0; d < dim; ++d) f0[0] += -4.0*PetscSqr(PETSC_PI)*PetscSinReal(2.0*PETSC_PI*x[d]);
+}
+
+static void f1_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+                 const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+                 const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+                 PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
+{
+  PetscInt d;
+  for (d = 0; d < dim; ++d) f1[d] = u_x[d];
+}
+
+static void g3_uu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+                  const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+                  const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+                  PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
+{
+  PetscInt d;
+  for (d = 0; d < dim; ++d) g3[d*dim+d] = 1.0;
+}
+
+static PetscErrorCode SetupPrimalProblem(DM dm, AppCtx *user)
+{
+  PetscDS        prob;
+  const PetscInt id = 1;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
+  ierr = PetscDSSetResidual(prob, 0, f0_trig_u, f1_u);CHKERRQ(ierr);
+  ierr = PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, g3_uu);CHKERRQ(ierr);
+  ierr = PetscDSAddBoundary(prob, DM_BC_ESSENTIAL, "wall", "marker", 0, 0, NULL, (void (*)(void)) trig_u, 1, &id, user);CHKERRQ(ierr);
+  ierr = PetscDSSetExactSolution(prob, 0, trig_u, user);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode SetupDiscretization(DM dm, const char name[], PetscErrorCode (*setup)(DM, AppCtx *), AppCtx *user)
+{
+  DM             cdm = dm;
+  PetscFE        fe;
+  char           prefix[PETSC_MAX_PATH_LEN];
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  /* Create finite element */
+  ierr = PetscSNPrintf(prefix, PETSC_MAX_PATH_LEN, "%s_", name);CHKERRQ(ierr);
+  ierr = PetscFECreateDefault(PetscObjectComm((PetscObject) dm), user->dim, 1, user->simplex, name ? prefix : NULL, -1, &fe);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) fe, name);CHKERRQ(ierr);
+  /* Set discretization and boundary conditions for each mesh */
+  ierr = DMSetField(dm, 0, NULL, (PetscObject) fe);CHKERRQ(ierr);
+  ierr = DMCreateDS(dm);CHKERRQ(ierr);
+  ierr = (*setup)(dm, user);CHKERRQ(ierr);
+  while (cdm) {
+    ierr = DMCopyDisc(dm,cdm);CHKERRQ(ierr);
+    /* TODO: Check whether the boundary of coarse meshes is marked */
+    ierr = DMGetCoarseDM(cdm, &cdm);CHKERRQ(ierr);
+  }
+  ierr = PetscFEDestroy(&fe);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscContainerUserDestroy_PetscFEGeom(void *ctx)
+{
+  PetscFEGeom   *geom = (PetscFEGeom *) ctx;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscFEGeomDestroy(&geom);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+PetscErrorCode CellRangeGetFEGeom(IS cellIS, DMField coordField, PetscQuadrature quad, PetscBool faceData, PetscFEGeom **geom)
+{
+  char            composeStr[33] = {0};
+  PetscObjectId   id;
+  PetscContainer  container;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscObjectGetId((PetscObject) quad, &id);CHKERRQ(ierr);
+  ierr = PetscSNPrintf(composeStr, 32, "CellRangeGetFEGeom_%x\n", id);CHKERRQ(ierr);
+  ierr = PetscObjectQuery((PetscObject) cellIS, composeStr, (PetscObject *) &container);CHKERRQ(ierr);
+  if (container) {
+    ierr = PetscContainerGetPointer(container, (void **) geom);CHKERRQ(ierr);
+  } else {
+    ierr = DMFieldCreateFEGeom(coordField, cellIS, quad, faceData, geom);CHKERRQ(ierr);
+    ierr = PetscContainerCreate(PETSC_COMM_SELF, &container);CHKERRQ(ierr);
+    ierr = PetscContainerSetPointer(container, (void *) *geom);CHKERRQ(ierr);
+    ierr = PetscContainerSetUserDestroy(container, PetscContainerUserDestroy_PetscFEGeom);CHKERRQ(ierr);
+    ierr = PetscObjectCompose((PetscObject) cellIS, composeStr, (PetscObject) container);CHKERRQ(ierr);
+    ierr = PetscContainerDestroy(&container);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+PetscErrorCode CellRangeRestoreFEGeom(IS cellIS, DMField coordField, PetscQuadrature quad, PetscBool faceData, PetscFEGeom **geom)
+{
+  PetscFunctionBegin;
+  *geom = NULL;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateFEGeometry(DM dm, PetscDS ds, IS cellIS, PetscQuadrature *affineQuad, PetscFEGeom **affineGeom, PetscQuadrature **quads, PetscFEGeom ***geoms)
+{
+  DMField        coordField;
+  PetscInt       Nf, f, maxDegree;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  *affineQuad = NULL;
+  *affineGeom = NULL;
+  *quads      = NULL;
+  *geoms      = NULL;
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = DMGetCoordinateField(dm, &coordField);CHKERRQ(ierr);
+  ierr = DMFieldGetDegree(coordField, cellIS, NULL, &maxDegree);CHKERRQ(ierr);
+  if (maxDegree <= 1) {
+    ierr = DMFieldCreateDefaultQuadrature(coordField, cellIS, affineQuad);CHKERRQ(ierr);
+    if (*affineQuad) {ierr = CellRangeGetFEGeom(cellIS, coordField, *affineQuad, PETSC_FALSE, affineGeom);CHKERRQ(ierr);}
+  } else {
+    ierr = PetscCalloc2(Nf, quads, Nf, geoms);CHKERRQ(ierr);
+    for (f = 0; f < Nf; ++f) {
+      PetscFE fe;
+
+      ierr = PetscDSGetDiscretization(ds, f, (PetscObject *) &fe);CHKERRQ(ierr);
+      ierr = PetscFEGetQuadrature(fe, &(*quads)[f]);CHKERRQ(ierr);
+      ierr = PetscObjectReference((PetscObject) (*quads)[f]);CHKERRQ(ierr);
+      ierr = CellRangeGetFEGeom(cellIS, coordField, (*quads)[f], PETSC_FALSE, &(*geoms)[f]);CHKERRQ(ierr);
+    }
+  }
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode DestroyFEGeometry(DM dm, PetscDS ds, IS cellIS, PetscQuadrature *affineQuad, PetscFEGeom **affineGeom, PetscQuadrature **quads, PetscFEGeom ***geoms)
+{
+  DMField        coordField;
+  PetscInt       Nf, f;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = DMGetCoordinateField(dm, &coordField);CHKERRQ(ierr);
+  if (*affineQuad) {
+    ierr = CellRangeRestoreFEGeom(cellIS, coordField, *affineQuad, PETSC_FALSE, affineGeom);CHKERRQ(ierr);
+    ierr = PetscQuadratureDestroy(affineQuad);CHKERRQ(ierr);
+  } else {
+    for (f = 0; f < Nf; ++f) {
+      ierr = CellRangeRestoreFEGeom(cellIS, coordField, (*quads)[f], PETSC_FALSE, &(*geoms)[f]);CHKERRQ(ierr);
+      ierr = PetscQuadratureDestroy(&(*quads)[f]);CHKERRQ(ierr);
+    }
+    ierr = PetscFree2(*quads, *geoms);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode TestIntegration(DM dm, PetscInt cbs, PetscInt its)
+{
+  PetscDS         ds;
+  PetscFEGeom    *chunkGeom = NULL;
+  PetscQuadrature affineQuad,  *quads = NULL;
+  PetscFEGeom    *affineGeom, **geoms = NULL;
+  PetscScalar    *u, *elemVec;
+  IS              cellIS;
+  PetscInt        depth, cStart, cEnd, cell, chunkSize = cbs, Nch = 0, Nf, f, totDim, i, k;
+  PetscLogStage   stage;
+  PetscLogEvent   event;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscLogStageRegister("PetscFE Residual Integration Test", &stage);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("FEIntegRes", PETSCFE_CLASSID, &event);CHKERRQ(ierr);
+  ierr = PetscLogStagePush(stage);CHKERRQ(ierr);
+  ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
+  ierr = DMGetStratumIS(dm, "depth", depth, &cellIS);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMGetCellDS(dm, cStart, &ds);CHKERRQ(ierr);
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
+  ierr = CreateFEGeometry(dm, ds, cellIS, &affineQuad, &affineGeom, &quads, &geoms);CHKERRQ(ierr);;
+  ierr = PetscMalloc2(chunkSize*totDim, &u, chunkSize*totDim, &elemVec);CHKERRQ(ierr);
+  /* Assumptions:
+    - Single field
+    - No input data
+    - No auxiliary data
+    - No time-dependence
+  */
+  for (i = 0; i < its; ++i) {
+    for (cell = cStart; cell < cEnd; cell += chunkSize, ++Nch) {
+      const PetscInt cS = cell, cE = PetscMin(cS + chunkSize, cEnd), Ne = cE - cS;
+
+      ierr = PetscArrayzero(elemVec, chunkSize*totDim);CHKERRQ(ierr);
+      /* TODO Replace with DMPlexGetCellFields() */
+      for (k = 0; k < chunkSize*totDim; ++k) u[k] = 1.0;
+      for (f = 0; f < Nf; ++f) {
+        PetscFEGeom    *geom = affineGeom ? affineGeom : geoms[f];
+        /* PetscQuadrature quad = affineQuad ? affineQuad : quads[f]; */
+
+        ierr = PetscFEGeomGetChunk(geom, cS, cE, &chunkGeom);CHKERRQ(ierr);
+        ierr = PetscLogEventBegin(event,0,0,0,0);CHKERRQ(ierr);
+        ierr = PetscFEIntegrateResidual(ds, f, Ne, chunkGeom, u, NULL, NULL, NULL, 0.0, elemVec);CHKERRQ(ierr);
+        ierr = PetscLogEventEnd(event,0,0,0,0);CHKERRQ(ierr);
+      }
+    }
+  }
+  ierr = PetscFEGeomRestoreChunk(affineGeom, cStart, cEnd, &chunkGeom);CHKERRQ(ierr);
+  ierr = DestroyFEGeometry(dm, ds, cellIS, &affineQuad, &affineGeom, &quads, &geoms);CHKERRQ(ierr);;
+  ierr = ISDestroy(&cellIS);CHKERRQ(ierr);
+  ierr = PetscFree2(u, elemVec);CHKERRQ(ierr);
+  ierr = PetscLogStagePop();CHKERRQ(ierr);
+  {
+    const char        *title = "Petsc FE Residual Integration";
+    PetscEventPerfInfo eventInfo;
+    PetscInt           N = (cEnd - cStart)*Nf*its;
+    PetscReal          flopRate, cellRate;
+
+    ierr = PetscLogEventGetPerfInfo(stage, event, &eventInfo);CHKERRQ(ierr);
+    flopRate = eventInfo.time != 0.0 ? eventInfo.flops/eventInfo.time : 0.0;
+    cellRate = eventInfo.time != 0.0 ? N/eventInfo.time : 0.0;
+    ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s: %D integrals %D chunks %D reps\n  Cell rate: %.2f/s flop rate: %.2f MF/s\n", title, N, Nch, its, (double)cellRate, (double)(flopRate/1.e6));CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode TestIntegration2(DM dm, PetscInt cbs, PetscInt its)
+{
+  Vec             X, F;
+  PetscLogStage   stage;
+  PetscLogEvent   event;
+  PetscInt        i;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscLogStageRegister("DMPlex Residual Integration Test", &stage);CHKERRQ(ierr);
+  ierr = PetscLogEventGetId("DMPlexResidualFE", &event);CHKERRQ(ierr);
+  ierr = PetscLogStagePush(stage);CHKERRQ(ierr);
+  ierr = DMGetLocalVector(dm, &X);CHKERRQ(ierr);
+  ierr = DMGetLocalVector(dm, &F);CHKERRQ(ierr);
+  for (i = 0; i < its; ++i) {
+    ierr = DMPlexSNESComputeResidualFEM(dm, X, F, NULL);CHKERRQ(ierr);
+  }
+  ierr = DMRestoreLocalVector(dm, &X);CHKERRQ(ierr);
+  ierr = DMRestoreLocalVector(dm, &F);CHKERRQ(ierr);
+  ierr = PetscLogStagePop();CHKERRQ(ierr);
+  {
+    const char        *title = "DMPlex Residual Integration";
+    PetscEventPerfInfo eventInfo;
+    PetscReal          flopRate, cellRate;
+    PetscInt           cStart, cEnd, Nf, N;
+
+    ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+    ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr);
+    ierr = PetscLogEventGetPerfInfo(stage, event, &eventInfo);CHKERRQ(ierr);
+    N        = (cEnd - cStart)*Nf*eventInfo.count;
+    flopRate = eventInfo.time != 0.0 ? eventInfo.flops/eventInfo.time : 0.0;
+    cellRate = eventInfo.time != 0.0 ? N/eventInfo.time : 0.0;
+    ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s: %D integrals %D reps\n  Cell rate: %.2f/s flop rate: %.2f MF/s\n", title, N, eventInfo.count, (double)cellRate, (double)(flopRate/1.e6));CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+int main(int argc, char **argv)
+{
+  DM             dm;
+  AppCtx         ctx;
+  PetscMPIInt    size;
+  PetscErrorCode ierr;
+
+  ierr = PetscInitialize(&argc, &argv, NULL, help); if (ierr) return ierr;
+  ierr = MPI_Comm_size(PETSC_COMM_WORLD, &size);CHKERRQ(ierr);
+  if (size > 1) SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "This is a uniprocessor example only.");
+  ierr = ProcessOptions(PETSC_COMM_WORLD, &ctx);CHKERRQ(ierr);
+  ierr = PetscLogDefaultBegin();CHKERRQ(ierr);
+  ierr = DMPlexCreateBoxMesh(PETSC_COMM_WORLD, ctx.dim, ctx.simplex, NULL, NULL, NULL, NULL, PETSC_TRUE, &dm);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) dm, "Mesh");CHKERRQ(ierr);
+  ierr = PetscObjectViewFromOptions((PetscObject) dm, NULL, "-dm_view");CHKERRQ(ierr);
+  ierr = SetupDiscretization(dm, "potential", SetupPrimalProblem, &ctx);CHKERRQ(ierr);
+  ierr = TestIntegration(dm, ctx.cbs, ctx.its);CHKERRQ(ierr);
+  ierr = TestIntegration2(dm, ctx.cbs, ctx.its);CHKERRQ(ierr);
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+/*TEST
+  test:
+    suffix: 0
+    requires: triangle
+    args: -dm_view
+
+  test:
+    suffix: 1
+    requires: triangle
+    args: -dm_view -potential_petscspace_degree 1
+
+  test:
+    suffix: 2
+    requires: triangle
+    args: -dm_view -potential_petscspace_degree 2
+
+  test:
+    suffix: 3
+    requires: triangle
+    args: -dm_view -potential_petscspace_degree 3
+TEST*/
diff --git a/src/dm/dt/fe/examples/tests/makefile b/src/dm/dt/fe/examples/tests/makefile
new file mode 100644
index 00000000000..12c94b862a5
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/makefile
@@ -0,0 +1,11 @@
+
+CFLAGS	        =
+FFLAGS	        =
+CPPFLAGS        =
+FPPFLAGS        =
+LOCDIR          = src/dm/dt/fe/examples/tests/
+MANSEC          = DM
+
+include ${PETSC_DIR}/lib/petsc/conf/variables
+include ${PETSC_DIR}/lib/petsc/conf/rules
+include ${PETSC_DIR}/lib/petsc/conf/test
diff --git a/src/dm/dt/fe/examples/tests/output/ex1_0.out b/src/dm/dt/fe/examples/tests/output/ex1_0.out
new file mode 100644
index 00000000000..f38143a5a6a
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/output/ex1_0.out
@@ -0,0 +1,14 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 9
+  1-cells: 16
+  2-cells: 8
+Labels:
+  depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
+  marker: 1 strata with value/size (1 (16))
+  Face Sets: 1 strata with value/size (1 (8))
+Petsc FE Residual Integration: 8 integrals 1 chunks 1 reps
+  Cell rate: 1.6e+05/s flop rate: 1.9 MF/s
+DMPlex Residual Integration: 8 integrals 1 reps
+  Cell rate: 1.9e+04/s flop rate: 0.39 MF/s
diff --git a/src/dm/dt/fe/examples/tests/output/ex1_1.out b/src/dm/dt/fe/examples/tests/output/ex1_1.out
new file mode 100644
index 00000000000..fb00422a768
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/output/ex1_1.out
@@ -0,0 +1,14 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 9
+  1-cells: 16
+  2-cells: 8
+Labels:
+  depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
+  marker: 1 strata with value/size (1 (16))
+  Face Sets: 1 strata with value/size (1 (8))
+Petsc FE Residual Integration: 8 integrals 1 chunks 1 reps
+  Cell rate: 1.302e+05/s flop rate: 12.5 MF/s
+DMPlex Residual Integration: 8 integrals 1 reps
+  Cell rate: 5.868e+04/s flop rate: 6.103 MF/s
diff --git a/src/dm/dt/fe/examples/tests/output/ex1_2.out b/src/dm/dt/fe/examples/tests/output/ex1_2.out
new file mode 100644
index 00000000000..2240f848ef9
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/output/ex1_2.out
@@ -0,0 +1,14 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 9
+  1-cells: 16
+  2-cells: 8
+Labels:
+  depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
+  marker: 1 strata with value/size (1 (16))
+  Face Sets: 1 strata with value/size (1 (8))
+Petsc FE Residual Integration: 8 integrals 1 chunks 1 reps
+  Cell rate: 92721.32/s flop rate: 35.05 MF/s
+DMPlex Residual Integration: 8 integrals 1 reps
+  Cell rate: 50356.92/s flop rate: 19.44 MF/s
diff --git a/src/dm/dt/fe/examples/tests/output/ex1_3.out b/src/dm/dt/fe/examples/tests/output/ex1_3.out
new file mode 100644
index 00000000000..fd1e02f1d45
--- /dev/null
+++ b/src/dm/dt/fe/examples/tests/output/ex1_3.out
@@ -0,0 +1,14 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 9
+  1-cells: 16
+  2-cells: 8
+Labels:
+  depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
+  marker: 1 strata with value/size (1 (16))
+  Face Sets: 1 strata with value/size (1 (8))
+Petsc FE Residual Integration: 8 integrals 1 chunks 1 reps
+  Cell rate: 4.648e+04/s flop rate: 49.09 MF/s
+DMPlex Residual Integration: 8 integrals 1 reps
+  Cell rate: 1.577e+04/s flop rate: 16.78 MF/s
diff --git a/src/dm/dt/fe/impls/basic/febasic.c b/src/dm/dt/fe/impls/basic/febasic.c
index 9b365272e5c..3186d84c481 100644
--- a/src/dm/dt/fe/impls/basic/febasic.c
+++ b/src/dm/dt/fe/impls/basic/febasic.c
@@ -106,13 +106,16 @@ PetscErrorCode PetscFEGetDimension_Basic(PetscFE fem, PetscInt *dim)
   PetscFunctionReturn(0);
 }
 
-PetscErrorCode PetscFEGetTabulation_Basic(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscReal *B, PetscReal *D, PetscReal *H)
+PetscErrorCode PetscFECreateTabulation_Basic(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscInt K, PetscTabulation T)
 {
   DM               dm;
   PetscInt         pdim; /* Dimension of FE space P */
   PetscInt         dim;  /* Spatial dimension */
   PetscInt         Nc;   /* Field components */
-  PetscReal       *tmpB, *tmpD, *tmpH;
+  PetscReal       *B = K >= 0 ? T->T[0] : NULL;
+  PetscReal       *D = K >= 1 ? T->T[1] : NULL;
+  PetscReal       *H = K >= 2 ? T->T[2] : NULL;
+  PetscReal       *tmpB = NULL, *tmpD = NULL, *tmpH = NULL;
   PetscInt         p, d, j, k, c;
   PetscErrorCode   ierr;
 
@@ -122,10 +125,10 @@ PetscErrorCode PetscFEGetTabulation_Basic(PetscFE fem, PetscInt npoints, const P
   ierr = PetscDualSpaceGetDimension(fem->dualSpace, &pdim);CHKERRQ(ierr);
   ierr = PetscFEGetNumComponents(fem, &Nc);CHKERRQ(ierr);
   /* Evaluate the prime basis functions at all points */
-  if (B) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
-  if (D) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
-  if (H) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
-  ierr = PetscSpaceEvaluate(fem->basisSpace, npoints, points, B ? tmpB : NULL, D ? tmpD : NULL, H ? tmpH : NULL);CHKERRQ(ierr);
+  if (K >= 0) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
+  if (K >= 1) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
+  if (K >= 2) {ierr = DMGetWorkArray(dm, npoints*pdim*Nc*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
+  ierr = PetscSpaceEvaluate(fem->basisSpace, npoints, points, tmpB, tmpD, tmpH);CHKERRQ(ierr);
   /* Translate to the nodal basis */
   for (p = 0; p < npoints; ++p) {
     if (B) {
@@ -172,9 +175,9 @@ PetscErrorCode PetscFEGetTabulation_Basic(PetscFE fem, PetscInt npoints, const P
       }
     }
   }
-  if (B) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
-  if (D) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
-  if (H) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
+  if (K >= 0) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
+  if (K >= 1) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
+  if (K >= 2) {ierr = DMRestoreWorkArray(dm, npoints*pdim*Nc*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
@@ -185,11 +188,11 @@ static PetscErrorCode PetscFEIntegrate_Basic(PetscDS ds, PetscInt field, PetscIn
   PetscFE            fe;
   PetscPointFunc     obj_func;
   PetscQuadrature    quad;
+  PetscTabulation   *T, *TAux = NULL;
   PetscScalar       *u, *u_x, *a, *a_x;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
   PetscInt           dim, dE, Np, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, cOffset = 0, cOffsetAux = 0, e;
   PetscBool          isAffine;
   const PetscReal   *quadPoints, *quadWeights;
@@ -204,23 +207,19 @@ static PetscErrorCode PetscFEIntegrate_Basic(PetscDS ds, PetscInt field, PetscIn
   ierr = PetscFEGetQuadrature(fe, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
+  ierr = PetscDSGetTabulation(ds, &T);CHKERRQ(ierr);
   ierr = PetscDSGetEvaluationArrays(ds, &u, NULL, &u_x);CHKERRQ(ierr);
   ierr = PetscDSGetWorkspace(ds, &x, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
-  ierr = PetscDSGetTabulation(ds, &B, &D);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
+    ierr = PetscDSGetTabulation(dsAux, &TAux);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
-    ierr = PetscDSGetTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);
   }
   ierr = PetscQuadratureGetData(quad, NULL, &qNc, &Nq, &quadPoints, &quadWeights);CHKERRQ(ierr);
   if (qNc != 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supports scalar quadrature, not %D components\n", qNc);
@@ -257,8 +256,8 @@ static PetscErrorCode PetscFEIntegrate_Basic(PetscDS ds, PetscInt field, PetscIn
 #endif
       }
       if (debug) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
-      ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, q, B, D, &fegeom, &coefficients[cOffset], NULL, u, u_x, NULL);CHKERRQ(ierr);
-      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dim, NfAux, NbAux, NcAux, q, BAux, DAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, 0, q, T, &fegeom, &coefficients[cOffset], NULL, u, u_x, NULL);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, 0, q, TAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       obj_func(dim, Nf, NfAux, uOff, uOff_x, u, NULL, u_x, aOff, aOff_x, a, NULL, a_x, 0.0, fegeom.v, numConstants, constants, &integrand);
       integrand *= w;
       integral[e*Nf+field] += integrand;
@@ -277,11 +276,11 @@ static PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field,
   const PetscInt     debug = 0;
   PetscFE            fe;
   PetscQuadrature    quad;
+  PetscTabulation   *Tf, *TfAux = NULL;
   PetscScalar       *u, *u_x, *a, *a_x, *basisReal, *basisDerReal;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
   PetscBool          isAffine, auxOnBd;
   const PetscReal   *quadPoints, *quadWeights;
   PetscInt           qNc, Nq, q, Np, dE;
@@ -295,26 +294,22 @@ static PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field,
   ierr = PetscFEGetFaceQuadrature(fe, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
   ierr = PetscDSGetEvaluationArrays(ds, &u, NULL, &u_x);CHKERRQ(ierr);
   ierr = PetscDSGetWorkspace(ds, &x, &basisReal, &basisDerReal, NULL, NULL);CHKERRQ(ierr);
-  ierr = PetscDSGetFaceTabulation(ds, &B, &D);CHKERRQ(ierr);
+  ierr = PetscDSGetFaceTabulation(ds, &Tf);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetSpatialDimension(dsAux, &dimAux);CHKERRQ(ierr);
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
     auxOnBd = dimAux < dim ? PETSC_TRUE : PETSC_FALSE;
-    if (auxOnBd) {ierr = PetscDSGetTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);}
-    else         {ierr = PetscDSGetFaceTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);}
+    if (auxOnBd) {ierr = PetscDSGetTabulation(dsAux, &TfAux);CHKERRQ(ierr);}
+    else         {ierr = PetscDSGetFaceTabulation(dsAux, &TfAux);CHKERRQ(ierr);}
   }
   ierr = PetscQuadratureGetData(quad, NULL, &qNc, &Nq, &quadPoints, &quadWeights);CHKERRQ(ierr);
   if (qNc != 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supports scalar quadrature, not %D components\n", qNc);
@@ -365,8 +360,8 @@ static PetscErrorCode PetscFEIntegrateBd_Basic(PetscDS ds, PetscInt field,
 #endif
       }
       if (debug > 1) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
-      ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, face*Nq+q, B, D, &cgeom, &coefficients[cOffset], NULL, u, u_x, NULL);CHKERRQ(ierr);
-      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dimAux, NfAux, NbAux, NcAux, face*Nq+q, BAux, DAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, face, q, Tf, &cgeom, &coefficients[cOffset], NULL, u, u_x, NULL);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, face, q, TfAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       obj_func(dim, Nf, NfAux, uOff, uOff_x, u, NULL, u_x, aOff, aOff_x, a, NULL, a_x, 0.0, fegeom.v, fegeom.n, numConstants, constants, &integrand);
       integrand *= w;
       integral[e*Nf+field] += integrand;
@@ -386,12 +381,12 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscInt field, PetscI
   PetscPointFunc     f0_func;
   PetscPointFunc     f1_func;
   PetscQuadrature    quad;
+  PetscTabulation   *T, *TAux = NULL;
   PetscScalar       *f0, *f1, *u, *u_t = NULL, *u_x, *a, *a_x, *basisReal, *basisDerReal;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL, *BI, *DI;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
-  PetscInt           dim, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, cOffset = 0, cOffsetAux = 0, fOffset, e, NbI, NcI;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
+  PetscInt           dim, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, cOffset = 0, cOffsetAux = 0, fOffset, e;
   PetscBool          isAffine;
   const PetscReal   *quadPoints, *quadWeights;
   PetscInt           qNc, Nq, q, Np, dE;
@@ -403,8 +398,6 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscInt field, PetscI
   ierr = PetscFEGetQuadrature(fe, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
   ierr = PetscDSGetFieldOffset(ds, field, &fOffset);CHKERRQ(ierr);
@@ -413,22 +406,16 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscInt field, PetscI
   ierr = PetscDSGetWorkspace(ds, &x, &basisReal, &basisDerReal, NULL, NULL);CHKERRQ(ierr);
   ierr = PetscDSGetWeakFormArrays(ds, &f0, &f1, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
   if (!f0_func && !f1_func) PetscFunctionReturn(0);
-  ierr = PetscDSGetTabulation(ds, &B, &D);CHKERRQ(ierr);
+  ierr = PetscDSGetTabulation(ds, &T);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
-    ierr = PetscDSGetTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);
+    ierr = PetscDSGetTabulation(dsAux, &TAux);CHKERRQ(ierr);
   }
-  NbI = Nb[field];
-  NcI = Nc[field];
-  BI  = B[field];
-  DI  = D[field];
   ierr = PetscQuadratureGetData(quad, NULL, &qNc, &Nq, &quadPoints, &quadWeights);CHKERRQ(ierr);
   if (qNc != 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supports scalar quadrature, not %D components\n", qNc);
   Np = cgeom->numPoints;
@@ -444,8 +431,8 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscInt field, PetscI
       fegeom.invJ = &cgeom->invJ[e*dE*dE];
       fegeom.detJ = &cgeom->detJ[e];
     }
-    ierr = PetscArrayzero(f0, Nq*NcI);CHKERRQ(ierr);
-    ierr = PetscArrayzero(f1, Nq*NcI*dim);CHKERRQ(ierr);
+    ierr = PetscArrayzero(f0, Nq*T[field]->Nc);CHKERRQ(ierr);
+    ierr = PetscArrayzero(f1, Nq*T[field]->Nc*dim);CHKERRQ(ierr);
     for (q = 0; q < Nq; ++q) {
       PetscReal w;
       PetscInt  c, d;
@@ -466,18 +453,18 @@ PetscErrorCode PetscFEIntegrateResidual_Basic(PetscDS ds, PetscInt field, PetscI
 #endif
       }
       if (debug) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
-      ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, q, B, D, &fegeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);
-      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dim, NfAux, NbAux, NcAux, q, BAux, DAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, 0, q, T, &fegeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, 0, q, TAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       if (f0_func) {
-        f0_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, fegeom.v, numConstants, constants, &f0[q*NcI]);
-        for (c = 0; c < NcI; ++c) f0[q*NcI+c] *= w;
+        f0_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, fegeom.v, numConstants, constants, &f0[q*T[field]->Nc]);
+        for (c = 0; c < T[field]->Nc; ++c) f0[q*T[field]->Nc+c] *= w;
       }
       if (f1_func) {
-        f1_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, fegeom.v, numConstants, constants, &f1[q*NcI*dim]);
-        for (c = 0; c < NcI; ++c) for (d = 0; d < dim; ++d) f1[(q*NcI+c)*dim+d] *= w;
+        f1_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, fegeom.v, numConstants, constants, &f1[q*T[field]->Nc*dim]);
+        for (c = 0; c < T[field]->Nc; ++c) for (d = 0; d < dim; ++d) f1[(q*T[field]->Nc+c)*dim+d] *= w;
       }
     }
-    ierr = PetscFEUpdateElementVec_Internal(fe, dim, Nq, NbI, NcI, BI, DI, basisReal, basisDerReal, &fegeom, f0, f1, &elemVec[cOffset+fOffset]);CHKERRQ(ierr);
+    ierr = PetscFEUpdateElementVec_Internal(fe, T[field], 0, basisReal, basisDerReal, &fegeom, f0, f1, &elemVec[cOffset+fOffset]);CHKERRQ(ierr);
     cOffset    += totDim;
     cOffsetAux += totDimAux;
   }
@@ -492,12 +479,12 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscInt field, Pets
   PetscBdPointFunc   f0_func;
   PetscBdPointFunc   f1_func;
   PetscQuadrature    quad;
+  PetscTabulation   *Tf, *TfAux = NULL;
   PetscScalar       *f0, *f1, *u, *u_t = NULL, *u_x, *a, *a_x, *basisReal, *basisDerReal;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL, *BI, *DI;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
-  PetscInt           dim, dimAux, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, cOffset = 0, cOffsetAux = 0, fOffset, e, NbI, NcI;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
+  PetscInt           dim, dimAux, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, cOffset = 0, cOffsetAux = 0, fOffset, e, NcI;
   PetscBool          isAffine, auxOnBd = PETSC_FALSE;
   const PetscReal   *quadPoints, *quadWeights;
   PetscInt           qNc, Nq, q, Np, dE;
@@ -509,8 +496,6 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscInt field, Pets
   ierr = PetscFEGetFaceQuadrature(fe, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
   ierr = PetscDSGetFieldOffset(ds, field, &fOffset);CHKERRQ(ierr);
@@ -519,25 +504,20 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscInt field, Pets
   ierr = PetscDSGetEvaluationArrays(ds, &u, coefficients_t ? &u_t : NULL, &u_x);CHKERRQ(ierr);
   ierr = PetscDSGetWorkspace(ds, &x, &basisReal, &basisDerReal, NULL, NULL);CHKERRQ(ierr);
   ierr = PetscDSGetWeakFormArrays(ds, &f0, &f1, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
-  ierr = PetscDSGetFaceTabulation(ds, &B, &D);CHKERRQ(ierr);
+  ierr = PetscDSGetFaceTabulation(ds, &Tf);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetSpatialDimension(dsAux, &dimAux);CHKERRQ(ierr);
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
     auxOnBd = dimAux < dim ? PETSC_TRUE : PETSC_FALSE;
-    if (auxOnBd) {ierr = PetscDSGetTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);}
-    else         {ierr = PetscDSGetFaceTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);}
+    if (auxOnBd) {ierr = PetscDSGetTabulation(dsAux, &TfAux);CHKERRQ(ierr);}
+    else         {ierr = PetscDSGetFaceTabulation(dsAux, &TfAux);CHKERRQ(ierr);}
   }
-  NbI = Nb[field];
-  NcI = Nc[field];
-  BI  = B[field];
-  DI  = D[field];
+  NcI = Tf[field]->Nc;
   ierr = PetscQuadratureGetData(quad, NULL, &qNc, &Nq, &quadPoints, &quadWeights);CHKERRQ(ierr);
   if (qNc != 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supports scalar quadrature, not %D components\n", qNc);
   Np = fgeom->numPoints;
@@ -589,8 +569,8 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscInt field, Pets
 #endif
       }
       if (debug) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
-      ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, face*Nq+q, B, D, &cgeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);
-      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dimAux, NfAux, NbAux, NcAux, auxOnBd ? q : face*Nq+q, BAux, DAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, face, q, Tf, &cgeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, auxOnBd ? 0 : face, q, TfAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       if (f0_func) {
         f0_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, fegeom.v, fegeom.n, numConstants, constants, &f0[q*NcI]);
         for (c = 0; c < NcI; ++c) f0[q*NcI+c] *= w;
@@ -600,7 +580,7 @@ PetscErrorCode PetscFEIntegrateBdResidual_Basic(PetscDS ds, PetscInt field, Pets
         for (c = 0; c < NcI; ++c) for (d = 0; d < dim; ++d) f1[(q*NcI+c)*dim+d] *= w;
       }
     }
-    ierr = PetscFEUpdateElementVec_Internal(fe, dim, Nq, NbI, NcI, &BI[face*Nq*NbI*NcI], &DI[face*Nq*NbI*NcI*dim], basisReal, basisDerReal, &cgeom, f0, f1, &elemVec[cOffset+fOffset]);CHKERRQ(ierr);
+    ierr = PetscFEUpdateElementVec_Internal(fe, Tf[field], face, basisReal, basisDerReal, &cgeom, f0, f1, &elemVec[cOffset+fOffset]);CHKERRQ(ierr);
     cOffset    += totDim;
     cOffsetAux += totDimAux;
   }
@@ -619,12 +599,12 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
   PetscInt           offsetI    = 0; /* Offset into an element vector for fieldI */
   PetscInt           offsetJ    = 0; /* Offset into an element vector for fieldJ */
   PetscQuadrature    quad;
+  PetscTabulation   *T, *TAux = NULL;
   PetscScalar       *g0, *g1, *g2, *g3, *u, *u_t = NULL, *u_x, *a, *a_x, *basisReal, *basisDerReal, *testReal, *testDerReal;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL, *BI, *DI, *BJ, *DJ;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
-  PetscInt           NbI = 0, NcI = 0, NbJ = 0, NcJ = 0;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
+  PetscInt           NcI = 0, NcJ = 0;
   PetscInt           dim, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, e;
   PetscInt           dE, Np;
   PetscBool          isAffine;
@@ -639,8 +619,6 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
   ierr = PetscFEGetQuadrature(feI, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
   switch(jtype) {
@@ -652,24 +630,19 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
   ierr = PetscDSGetEvaluationArrays(ds, &u, coefficients_t ? &u_t : NULL, &u_x);CHKERRQ(ierr);
   ierr = PetscDSGetWorkspace(ds, &x, &basisReal, &basisDerReal, &testReal, &testDerReal);CHKERRQ(ierr);
   ierr = PetscDSGetWeakFormArrays(ds, NULL, NULL, &g0, &g1, &g2, &g3);CHKERRQ(ierr);
-  ierr = PetscDSGetTabulation(ds, &B, &D);CHKERRQ(ierr);
+  ierr = PetscDSGetTabulation(ds, &T);CHKERRQ(ierr);
   ierr = PetscDSGetFieldOffset(ds, fieldI, &offsetI);CHKERRQ(ierr);
   ierr = PetscDSGetFieldOffset(ds, fieldJ, &offsetJ);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
-    ierr = PetscDSGetTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);
+    ierr = PetscDSGetTabulation(dsAux, &TAux);CHKERRQ(ierr);
   }
-  NbI = Nb[fieldI], NbJ = Nb[fieldJ];
-  NcI = Nc[fieldI], NcJ = Nc[fieldJ];
-  BI  = B[fieldI],  BJ  = B[fieldJ];
-  DI  = D[fieldI],  DJ  = D[fieldJ];
+  NcI = T[fieldI]->Nc, NcJ = T[fieldJ]->Nc;
   /* Initialize here in case the function is not defined */
   ierr = PetscArrayzero(g0, NcI*NcJ);CHKERRQ(ierr);
   ierr = PetscArrayzero(g1, NcI*NcJ*dim);CHKERRQ(ierr);
@@ -691,10 +664,8 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
       fegeom.detJ = &cgeom->detJ[e];
     }
     for (q = 0; q < Nq; ++q) {
-      const PetscReal *BIq = &BI[q*NbI*NcI],     *BJq = &BJ[q*NbJ*NcJ];
-      const PetscReal *DIq = &DI[q*NbI*NcI*dim], *DJq = &DJ[q*NbJ*NcJ*dim];
-      PetscReal        w;
-      PetscInt         c;
+      PetscReal w;
+      PetscInt  c;
 
       if (debug) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
       if (isAffine) {
@@ -706,8 +677,8 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
         fegeom.detJ = &cgeom->detJ[e*Np+q];
       }
       w = fegeom.detJ[0]*quadWeights[q];
-      if (coefficients) {ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, q, B, D, &fegeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);}
-      if (dsAux)      {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dim, NfAux, NbAux, NcAux, q, BAux, DAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      if (coefficients) {ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, 0, q, T, &fegeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);}
+      if (dsAux)        {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, 0, q, TAux, &fegeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       if (g0_func) {
         ierr = PetscArrayzero(g0, NcI*NcJ);CHKERRQ(ierr);
         g0_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, u_tshift, fegeom.v, numConstants, constants, g0);
@@ -729,18 +700,18 @@ PetscErrorCode PetscFEIntegrateJacobian_Basic(PetscDS ds, PetscFEJacobianType jt
         for (c = 0; c < NcI*NcJ*dim*dim; ++c) g3[c] *= w;
       }
 
-      ierr = PetscFEUpdateElementMat_Internal(feI, feJ, dim, NbI, NcI, BIq, DIq, basisReal, basisDerReal, NbJ, NcJ, BJq, DJq, testReal, testDerReal, &fegeom, g0, g1, g2, g3, eOffset, totDim, offsetI, offsetJ, elemMat);CHKERRQ(ierr);
+      ierr = PetscFEUpdateElementMat_Internal(feI, feJ, 0, q, T[fieldI], basisReal, basisDerReal, T[fieldJ], testReal, testDerReal, &fegeom, g0, g1, g2, g3, eOffset, totDim, offsetI, offsetJ, elemMat);CHKERRQ(ierr);
     }
     if (debug > 1) {
       PetscInt fc, f, gc, g;
 
       ierr = PetscPrintf(PETSC_COMM_SELF, "Element matrix for fields %d and %d\n", fieldI, fieldJ);CHKERRQ(ierr);
-      for (fc = 0; fc < NcI; ++fc) {
-        for (f = 0; f < NbI; ++f) {
-          const PetscInt i = offsetI + f*NcI+fc;
-          for (gc = 0; gc < NcJ; ++gc) {
-            for (g = 0; g < NbJ; ++g) {
-              const PetscInt j = offsetJ + g*NcJ+gc;
+      for (fc = 0; fc < T[fieldI]->Nc; ++fc) {
+        for (f = 0; f < T[fieldI]->Nb; ++f) {
+          const PetscInt i = offsetI + f*T[fieldI]->Nc+fc;
+          for (gc = 0; gc < T[fieldJ]->Nc; ++gc) {
+            for (g = 0; g < T[fieldJ]->Nb; ++g) {
+              const PetscInt j = offsetJ + g*T[fieldJ]->Nc+gc;
               ierr = PetscPrintf(PETSC_COMM_SELF, "    elemMat[%d,%d,%d,%d]: %g\n", f, fc, g, gc, PetscRealPart(elemMat[eOffset+i*totDim+j]));CHKERRQ(ierr);
             }
           }
@@ -767,12 +738,12 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
   PetscInt           offsetI    = 0; /* Offset into an element vector for fieldI */
   PetscInt           offsetJ    = 0; /* Offset into an element vector for fieldJ */
   PetscQuadrature    quad;
+  PetscTabulation   *T, *TAux = NULL;
   PetscScalar       *g0, *g1, *g2, *g3, *u, *u_t = NULL, *u_x, *a, *a_x, *basisReal, *basisDerReal, *testReal, *testDerReal;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscReal        **B, **D, **BAux = NULL, **DAux = NULL, *BI, *DI, *BJ, *DJ;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
-  PetscInt           NbI = 0, NcI = 0, NbJ = 0, NcJ = 0;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL;
+  PetscInt           NcI = 0, NcJ = 0;
   PetscInt           dim, numConstants, Nf, NfAux = 0, totDim, totDimAux = 0, e;
   PetscBool          isAffine;
   const PetscReal   *quadPoints, *quadWeights;
@@ -786,8 +757,6 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
   ierr = PetscFEGetFaceQuadrature(feI, &quad);CHKERRQ(ierr);
   ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
   ierr = PetscDSGetTotalDimension(ds, &totDim);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(ds, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
   ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
   ierr = PetscDSGetFieldOffset(ds, fieldI, &offsetI);CHKERRQ(ierr);
@@ -797,22 +766,17 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
   ierr = PetscDSGetEvaluationArrays(ds, &u, coefficients_t ? &u_t : NULL, &u_x);CHKERRQ(ierr);
   ierr = PetscDSGetWorkspace(ds, &x, &basisReal, &basisDerReal, &testReal, &testDerReal);CHKERRQ(ierr);
   ierr = PetscDSGetWeakFormArrays(ds, NULL, NULL, &g0, &g1, &g2, &g3);CHKERRQ(ierr);
-  ierr = PetscDSGetFaceTabulation(ds, &B, &D);CHKERRQ(ierr);
+  ierr = PetscDSGetFaceTabulation(ds, &T);CHKERRQ(ierr);
   ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   if (dsAux) {
     ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = PetscDSGetTotalDimension(dsAux, &totDimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(dsAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(dsAux, &NcAux);CHKERRQ(ierr);
     ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
     ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
     ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL, &a_x);CHKERRQ(ierr);
-    ierr = PetscDSGetFaceTabulation(dsAux, &BAux, &DAux);CHKERRQ(ierr);
+    ierr = PetscDSGetFaceTabulation(dsAux, &TAux);CHKERRQ(ierr);
   }
-  NbI = Nb[fieldI], NbJ = Nb[fieldJ];
-  NcI = Nc[fieldI], NcJ = Nc[fieldJ];
-  BI  = B[fieldI],  BJ  = B[fieldJ];
-  DI  = D[fieldI],  DJ  = D[fieldJ];
+  NcI = T[fieldI]->Nc, NcJ = T[fieldJ]->Nc;
   /* Initialize here in case the function is not defined */
   ierr = PetscArrayzero(g0, NcI*NcJ);CHKERRQ(ierr);
   ierr = PetscArrayzero(g1, NcI*NcJ*dim);CHKERRQ(ierr);
@@ -847,10 +811,8 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
       cgeom.detJ  = &fgeom->suppDetJ[0][e];
     }
     for (q = 0; q < Nq; ++q) {
-      const PetscReal *BIq = &BI[(face*Nq+q)*NbI*NcI],     *BJq = &BJ[(face*Nq+q)*NbJ*NcJ];
-      const PetscReal *DIq = &DI[(face*Nq+q)*NbI*NcI*dim], *DJq = &DJ[(face*Nq+q)*NbJ*NcJ*dim];
-      PetscReal  w;
-      PetscInt   c;
+      PetscReal w;
+      PetscInt  c;
 
       if (debug) {ierr = PetscPrintf(PETSC_COMM_SELF, "  quad point %d\n", q);CHKERRQ(ierr);}
       if (isAffine) {
@@ -867,8 +829,8 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
         cgeom.detJ  = &fgeom->suppDetJ[0][e*Np+q];
       }
       w = fegeom.detJ[0]*quadWeights[q];
-      if (coefficients) {ierr = PetscFEEvaluateFieldJets_Internal(ds, dim, Nf, Nb, Nc, face*Nq+q, B, D, &cgeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);}
-      if (dsAux)      {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, dim, NfAux, NbAux, NcAux, face*Nq+q, BAux, DAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
+      if (coefficients) {ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, face, q, T, &cgeom, &coefficients[cOffset], &coefficients_t[cOffset], u, u_x, u_t);CHKERRQ(ierr);}
+      if (dsAux)        {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, face, q, TAux, &cgeom, &coefficientsAux[cOffsetAux], NULL, a, a_x, NULL);CHKERRQ(ierr);}
       if (g0_func) {
         ierr = PetscArrayzero(g0, NcI*NcJ);CHKERRQ(ierr);
         g0_func(dim, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, NULL, a_x, t, u_tshift, fegeom.v, fegeom.n, numConstants, constants, g0);
@@ -890,18 +852,18 @@ static PetscErrorCode PetscFEIntegrateBdJacobian_Basic(PetscDS ds, PetscInt fiel
         for (c = 0; c < NcI*NcJ*dim*dim; ++c) g3[c] *= w;
       }
 
-      ierr = PetscFEUpdateElementMat_Internal(feI, feJ, dim, NbI, NcI, BIq, DIq, basisReal, basisDerReal, NbJ, NcJ, BJq, DJq, testReal, testDerReal, &cgeom, g0, g1, g2, g3, eOffset, totDim, offsetI, offsetJ, elemMat);CHKERRQ(ierr);
+      ierr = PetscFEUpdateElementMat_Internal(feI, feJ, face, q, T[fieldI], basisReal, basisDerReal, T[fieldJ], testReal, testDerReal, &cgeom, g0, g1, g2, g3, eOffset, totDim, offsetI, offsetJ, elemMat);CHKERRQ(ierr);
     }
     if (debug > 1) {
       PetscInt fc, f, gc, g;
 
       ierr = PetscPrintf(PETSC_COMM_SELF, "Element matrix for fields %d and %d\n", fieldI, fieldJ);CHKERRQ(ierr);
-      for (fc = 0; fc < NcI; ++fc) {
-        for (f = 0; f < NbI; ++f) {
-          const PetscInt i = offsetI + f*NcI+fc;
-          for (gc = 0; gc < NcJ; ++gc) {
-            for (g = 0; g < NbJ; ++g) {
-              const PetscInt j = offsetJ + g*NcJ+gc;
+      for (fc = 0; fc < T[fieldI]->Nc; ++fc) {
+        for (f = 0; f < T[fieldI]->Nb; ++f) {
+          const PetscInt i = offsetI + f*T[fieldI]->Nc+fc;
+          for (gc = 0; gc < T[fieldJ]->Nc; ++gc) {
+            for (g = 0; g < T[fieldJ]->Nb; ++g) {
+              const PetscInt j = offsetJ + g*T[fieldJ]->Nc+gc;
               ierr = PetscPrintf(PETSC_COMM_SELF, "    elemMat[%d,%d,%d,%d]: %g\n", f, fc, g, gc, PetscRealPart(elemMat[eOffset+i*totDim+j]));CHKERRQ(ierr);
             }
           }
@@ -924,7 +886,7 @@ static PetscErrorCode PetscFEInitialize_Basic(PetscFE fem)
   fem->ops->view                    = PetscFEView_Basic;
   fem->ops->destroy                 = PetscFEDestroy_Basic;
   fem->ops->getdimension            = PetscFEGetDimension_Basic;
-  fem->ops->gettabulation           = PetscFEGetTabulation_Basic;
+  fem->ops->createtabulation        = PetscFECreateTabulation_Basic;
   fem->ops->integrate               = PetscFEIntegrate_Basic;
   fem->ops->integratebd             = PetscFEIntegrateBd_Basic;
   fem->ops->integrateresidual       = PetscFEIntegrateResidual_Basic;
diff --git a/src/dm/dt/fe/impls/composite/fecomposite.c b/src/dm/dt/fe/impls/composite/fecomposite.c
index ec9bedb4482..18948786012 100644
--- a/src/dm/dt/fe/impls/composite/fecomposite.c
+++ b/src/dm/dt/fe/impls/composite/fecomposite.c
@@ -97,7 +97,7 @@ static PetscErrorCode PetscFESetUp_Composite(PetscFE fem)
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscFEGetTabulation_Composite(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscReal *B, PetscReal *D, PetscReal *H)
+static PetscErrorCode PetscFECreateTabulation_Composite(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscInt K, PetscTabulation T)
 {
   PetscFE_Composite *cmp = (PetscFE_Composite *) fem->data;
   DM                 dm;
@@ -106,7 +106,10 @@ static PetscErrorCode PetscFEGetTabulation_Composite(PetscFE fem, PetscInt npoin
   PetscInt           dim;   /* Spatial dimension */
   PetscInt           comp;  /* Field components */
   PetscInt          *subpoints;
-  PetscReal         *tmpB, *tmpD, *tmpH, *subpoint;
+  PetscReal         *B = K >= 0 ? T->T[0] : NULL;
+  PetscReal         *D = K >= 1 ? T->T[1] : NULL;
+  PetscReal         *H = K >= 2 ? T->T[2] : NULL;
+  PetscReal         *tmpB = NULL, *tmpD = NULL, *tmpH = NULL, *subpoint;
   PetscInt           p, s, d, e, j, k;
   PetscErrorCode     ierr;
 
@@ -135,14 +138,14 @@ static PetscErrorCode PetscFEGetTabulation_Composite(PetscFE fem, PetscInt npoin
   }
   ierr = DMRestoreWorkArray(dm, dim, MPIU_REAL, &subpoint);CHKERRQ(ierr);
   /* Evaluate the prime basis functions at all points */
-  if (B) {ierr = DMGetWorkArray(dm, npoints*spdim, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
-  if (D) {ierr = DMGetWorkArray(dm, npoints*spdim*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
-  if (H) {ierr = DMGetWorkArray(dm, npoints*spdim*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
-  ierr = PetscSpaceEvaluate(fem->basisSpace, npoints, points, B ? tmpB : NULL, D ? tmpD : NULL, H ? tmpH : NULL);CHKERRQ(ierr);
+  if (K >= 0) {ierr = DMGetWorkArray(dm, npoints*spdim, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
+  if (K >= 1) {ierr = DMGetWorkArray(dm, npoints*spdim*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
+  if (K >= 2) {ierr = DMGetWorkArray(dm, npoints*spdim*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
+  ierr = PetscSpaceEvaluate(fem->basisSpace, npoints, points, tmpB, tmpD, tmpH);CHKERRQ(ierr);
   /* Translate to the nodal basis */
-  if (B) {ierr = PetscArrayzero(B, npoints*pdim*comp);CHKERRQ(ierr);}
-  if (D) {ierr = PetscArrayzero(D, npoints*pdim*comp*dim);CHKERRQ(ierr);}
-  if (H) {ierr = PetscArrayzero(H, npoints*pdim*comp*dim*dim);CHKERRQ(ierr);}
+  if (K >= 0) {ierr = PetscArrayzero(B, npoints*pdim*comp);CHKERRQ(ierr);}
+  if (K >= 1) {ierr = PetscArrayzero(D, npoints*pdim*comp*dim);CHKERRQ(ierr);}
+  if (K >= 2) {ierr = PetscArrayzero(H, npoints*pdim*comp*dim*dim);CHKERRQ(ierr);}
   for (p = 0; p < npoints; ++p) {
     const PetscInt s = subpoints[p];
 
@@ -185,9 +188,9 @@ static PetscErrorCode PetscFEGetTabulation_Composite(PetscFE fem, PetscInt npoin
     }
   }
   ierr = DMRestoreWorkArray(dm, npoints, MPIU_INT, &subpoints);CHKERRQ(ierr);
-  if (B) {ierr = DMRestoreWorkArray(dm, npoints*spdim, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
-  if (D) {ierr = DMRestoreWorkArray(dm, npoints*spdim*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
-  if (H) {ierr = DMRestoreWorkArray(dm, npoints*spdim*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
+  if (K >= 0) {ierr = DMRestoreWorkArray(dm, npoints*spdim, MPIU_REAL, &tmpB);CHKERRQ(ierr);}
+  if (K >= 1) {ierr = DMRestoreWorkArray(dm, npoints*spdim*dim, MPIU_REAL, &tmpD);CHKERRQ(ierr);}
+  if (K >= 2) {ierr = DMRestoreWorkArray(dm, npoints*spdim*dim*dim, MPIU_REAL, &tmpH);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
@@ -199,7 +202,7 @@ static PetscErrorCode PetscFEInitialize_Composite(PetscFE fem)
   fem->ops->view                    = NULL;
   fem->ops->destroy                 = PetscFEDestroy_Composite;
   fem->ops->getdimension            = PetscFEGetDimension_Basic;
-  fem->ops->gettabulation           = PetscFEGetTabulation_Composite;
+  fem->ops->createtabulation        = PetscFECreateTabulation_Composite;
   fem->ops->integrateresidual       = PetscFEIntegrateResidual_Basic;
   fem->ops->integratebdresidual     = PetscFEIntegrateBdResidual_Basic;
   fem->ops->integratejacobianaction = NULL/* PetscFEIntegrateJacobianAction_Basic */;
diff --git a/src/dm/dt/fe/impls/opencl/feopencl.c b/src/dm/dt/fe/impls/opencl/feopencl.c
index b1dab07d0d7..26752e691a9 100644
--- a/src/dm/dt/fe/impls/opencl/feopencl.c
+++ b/src/dm/dt/fe/impls/opencl/feopencl.c
@@ -50,7 +50,7 @@ static PetscErrorCode PetscFEOpenCLGenerateIntegrationCode(PetscFE fem, char **s
   PetscBool       useF0         = PETSC_TRUE;
   PetscBool       useF1         = PETSC_TRUE;
   const PetscReal *points, *weights;
-  PetscReal      *basis, *basisDer;
+  PetscTabulation T;
   PetscInt        dim, qNc, N_b, N_c, N_q, N_t, p, d, b, c;
   size_t          count;
   PetscErrorCode  ierr;
@@ -102,7 +102,7 @@ static PetscErrorCode PetscFEOpenCLGenerateIntegrationCode(PetscFE fem, char **s
   }
   ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "};\n", &count);STRING_ERROR_CHECK("Message to short");
   /* Basis Functions */
-  ierr = PetscFEGetDefaultTabulation(fem, &basis, &basisDer, NULL);CHKERRQ(ierr);
+  ierr = PetscFEGetCellTabulation(fem, &T);CHKERRQ(ierr);
   ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail,
 "  /* Nodal basis function evaluations\n"
 "    - basis component is fastest varying, the basis function, then point */\n"
@@ -111,7 +111,7 @@ static PetscErrorCode PetscFEOpenCLGenerateIntegrationCode(PetscFE fem, char **s
   for (p = 0; p < N_q; ++p) {
     for (b = 0; b < N_b; ++b) {
       for (c = 0; c < N_c; ++c) {
-        ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "%g,\n", &count, basis[(p*N_b + b)*N_c + c]);STRING_ERROR_CHECK("Message to short");
+        ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "%g,\n", &count, T->T[0][(p*N_b + b)*N_c + c]);STRING_ERROR_CHECK("Message to short");
       }
     }
   }
@@ -128,9 +128,9 @@ static PetscErrorCode PetscFEOpenCLGenerateIntegrationCode(PetscFE fem, char **s
         ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "(%s%d)(", &count, numeric_str, dim);STRING_ERROR_CHECK("Message to short");
         for (d = 0; d < dim; ++d) {
           if (d > 0) {
-            ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, ", %g", &count, basisDer[((p*N_b + b)*dim + d)*N_c + c]);STRING_ERROR_CHECK("Message to short");
+            ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, ", %g", &count, T->T[1][((p*N_b + b)*dim + d)*N_c + c]);STRING_ERROR_CHECK("Message to short");
           } else {
-            ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "%g", &count, basisDer[((p*N_b + b)*dim + d)*N_c + c]);STRING_ERROR_CHECK("Message to short");
+            ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "%g", &count, T->T[1][((p*N_b + b)*dim + d)*N_c + c]);STRING_ERROR_CHECK("Message to short");
           }
         }
         ierr = PetscSNPrintfCount(string_tail, end_of_buffer - string_tail, "),\n", &count);STRING_ERROR_CHECK("Message to short");
@@ -773,8 +773,7 @@ static PetscErrorCode PetscFEIntegrateResidual_OpenCL(PetscDS prob, PetscInt fie
 }
 
 PETSC_EXTERN PetscErrorCode PetscFESetUp_Basic(PetscFE);
-PETSC_EXTERN PetscErrorCode PetscFEGetTabulation_Basic(PetscFE, PetscInt, const PetscReal [],
-                                                       PetscReal *, PetscReal *, PetscReal *);
+PETSC_EXTERN PetscErrorCode PetscFECreateTabulation_Basic(PetscFE, PetscInt, const PetscReal [], PetscInt, PetscTabulation);
 
 static PetscErrorCode PetscFEInitialize_OpenCL(PetscFE fem)
 {
@@ -784,7 +783,7 @@ static PetscErrorCode PetscFEInitialize_OpenCL(PetscFE fem)
   fem->ops->view                    = NULL;
   fem->ops->destroy                 = PetscFEDestroy_OpenCL;
   fem->ops->getdimension            = PetscFEGetDimension_Basic;
-  fem->ops->gettabulation           = PetscFEGetTabulation_Basic;
+  fem->ops->createtabulation        = PetscFECreateTabulation_Basic;
   fem->ops->integrateresidual       = PetscFEIntegrateResidual_OpenCL;
   fem->ops->integratebdresidual     = NULL/* PetscFEIntegrateBdResidual_OpenCL */;
   fem->ops->integratejacobianaction = NULL/* PetscFEIntegrateJacobianAction_OpenCL */;
diff --git a/src/dm/dt/fe/interface/fe.c b/src/dm/dt/fe/interface/fe.c
index 9d9b026646a..c16dc3f00be 100644
--- a/src/dm/dt/fe/interface/fe.c
+++ b/src/dm/dt/fe/interface/fe.c
@@ -163,6 +163,29 @@ PetscErrorCode PetscFEGetType(PetscFE fem, PetscFEType *name)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscFEViewFromOptions - View from Options
+
+   Collective on PetscFE
+
+   Input Parameters:
++  A - the PetscFE object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscFE(), PetscFEView(), PetscObjectViewFromOptions(), PetscFECreate()
+@*/
+PetscErrorCode  PetscFEViewFromOptions(PetscFE A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCFE_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscFEView - Views a PetscFE
 
@@ -297,9 +320,9 @@ PetscErrorCode PetscFEDestroy(PetscFE *fem)
   }
   ierr = PetscFree((*fem)->subspaces);CHKERRQ(ierr);
   ierr = PetscFree((*fem)->invV);CHKERRQ(ierr);
-  ierr = PetscFERestoreTabulation((*fem), 0, NULL, &(*fem)->B, &(*fem)->D, NULL /*&(*fem)->H*/);CHKERRQ(ierr);
-  ierr = PetscFERestoreTabulation((*fem), 0, NULL, &(*fem)->Bf, &(*fem)->Df, NULL /*&(*fem)->Hf*/);CHKERRQ(ierr);
-  ierr = PetscFERestoreTabulation((*fem), 0, NULL, &(*fem)->F, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&(*fem)->T);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&(*fem)->Tf);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&(*fem)->Tc);CHKERRQ(ierr);
   ierr = PetscSpaceDestroy(&(*fem)->basisSpace);CHKERRQ(ierr);
   ierr = PetscDualSpaceDestroy(&(*fem)->dualSpace);CHKERRQ(ierr);
   ierr = PetscQuadratureDestroy(&(*fem)->quadrature);CHKERRQ(ierr);
@@ -343,12 +366,9 @@ PetscErrorCode PetscFECreate(MPI_Comm comm, PetscFE *fem)
   f->numComponents = 1;
   f->subspaces     = NULL;
   f->invV          = NULL;
-  f->B             = NULL;
-  f->D             = NULL;
-  f->H             = NULL;
-  f->Bf            = NULL;
-  f->Df            = NULL;
-  f->Hf            = NULL;
+  f->T             = NULL;
+  f->Tf            = NULL;
+  f->Tc            = NULL;
   ierr = PetscArrayzero(&f->quadrature, 1);CHKERRQ(ierr);
   ierr = PetscArrayzero(&f->faceQuadrature, 1);CHKERRQ(ierr);
   f->blockSize     = 0;
@@ -640,7 +660,8 @@ PetscErrorCode PetscFESetQuadrature(PetscFE fem, PetscQuadrature q)
   ierr = PetscFEGetNumComponents(fem, &Nc);CHKERRQ(ierr);
   ierr = PetscQuadratureGetNumComponents(q, &qNc);CHKERRQ(ierr);
   if ((qNc != 1) && (Nc != qNc)) SETERRQ2(PetscObjectComm((PetscObject) fem), PETSC_ERR_ARG_SIZ, "FE components %D != Quadrature components %D and non-scalar quadrature", Nc, qNc);
-  ierr = PetscFERestoreTabulation(fem, 0, NULL, &fem->B, &fem->D, NULL /*&(*fem)->H*/);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&fem->T);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&fem->Tc);CHKERRQ(ierr);
   ierr = PetscQuadratureDestroy(&fem->quadrature);CHKERRQ(ierr);
   fem->quadrature = q;
   ierr = PetscObjectReference((PetscObject) q);CHKERRQ(ierr);
@@ -686,11 +707,15 @@ PetscErrorCode PetscFEGetFaceQuadrature(PetscFE fem, PetscQuadrature *q)
 @*/
 PetscErrorCode PetscFESetFaceQuadrature(PetscFE fem, PetscQuadrature q)
 {
+  PetscInt       Nc, qNc;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  ierr = PetscFERestoreTabulation(fem, 0, NULL, &fem->Bf, &fem->Df, NULL /*&(*fem)->Hf*/);CHKERRQ(ierr);
+  ierr = PetscFEGetNumComponents(fem, &Nc);CHKERRQ(ierr);
+  ierr = PetscQuadratureGetNumComponents(q, &qNc);CHKERRQ(ierr);
+  if ((qNc != 1) && (Nc != qNc)) SETERRQ2(PetscObjectComm((PetscObject) fem), PETSC_ERR_ARG_SIZ, "FE components %D != Quadrature components %D and non-scalar quadrature", Nc, qNc);
+  ierr = PetscTabulationDestroy(&fem->Tf);CHKERRQ(ierr);
   ierr = PetscQuadratureDestroy(&fem->faceQuadrature);CHKERRQ(ierr);
   fem->faceQuadrature = q;
   ierr = PetscObjectReference((PetscObject) q);CHKERRQ(ierr);
@@ -752,28 +777,26 @@ PetscErrorCode PetscFEGetNumDof(PetscFE fem, const PetscInt **numDof)
 }
 
 /*@C
-  PetscFEGetDefaultTabulation - Returns the tabulation of the basis functions at the quadrature points
+  PetscFEGetCellTabulation - Returns the tabulation of the basis functions at the quadrature points on the reference cell
 
   Not collective
 
   Input Parameter:
 . fem - The PetscFE object
 
-  Output Parameters:
-+ B - The basis function values at quadrature points
-. D - The basis function derivatives at quadrature points
-- H - The basis function second derivatives at quadrature points
+  Output Parameter:
+. T - The basis function values and derivatives at quadrature points
 
   Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
+$ T->T[0] = B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
+$ T->T[1] = D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
+$ T->T[2] = H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFEGetTabulation(), PetscFERestoreTabulation()
+.seealso: PetscFECreateTabulation(), PetscTabulationDestroy()
 @*/
-PetscErrorCode PetscFEGetDefaultTabulation(PetscFE fem, PetscReal **B, PetscReal **D, PetscReal **H)
+PetscErrorCode PetscFEGetCellTabulation(PetscFE fem, PetscTabulation *T)
 {
   PetscInt         npoints;
   const PetscReal *points;
@@ -781,19 +804,15 @@ PetscErrorCode PetscFEGetDefaultTabulation(PetscFE fem, PetscReal **B, PetscReal
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  if (B) PetscValidPointer(B, 2);
-  if (D) PetscValidPointer(D, 3);
-  if (H) PetscValidPointer(H, 4);
+  PetscValidPointer(T, 2);
   ierr = PetscQuadratureGetData(fem->quadrature, NULL, NULL, &npoints, &points, NULL);CHKERRQ(ierr);
-  if (!fem->B) {ierr = PetscFEGetTabulation(fem, npoints, points, &fem->B, &fem->D, NULL/*&fem->H*/);CHKERRQ(ierr);}
-  if (B) *B = fem->B;
-  if (D) *D = fem->D;
-  if (H) *H = fem->H;
+  if (!fem->T) {ierr = PetscFECreateTabulation(fem, 1, npoints, points, 1, &fem->T);CHKERRQ(ierr);}
+  *T = fem->T;
   PetscFunctionReturn(0);
 }
 
 /*@C
-  PetscFEGetFaceTabulation - Returns the tabulation of the basis functions at the face quadrature points
+  PetscFEGetFaceTabulation - Returns the tabulation of the basis functions at the face quadrature points for each face of the reference cell
 
   Not collective
 
@@ -801,29 +820,25 @@ PetscErrorCode PetscFEGetDefaultTabulation(PetscFE fem, PetscReal **B, PetscReal
 . fem - The PetscFE object
 
   Output Parameters:
-+ B - The basis function values at face quadrature points
-. D - The basis function derivatives at face quadrature points
-- H - The basis function second derivatives at face quadrature points
+. Tf - The basis function values and derviatives at face quadrature points
 
   Note:
-$ Bf[((f*Nq + q)*pdim + i)*Nc + c] is the value at point f,q for basis function i and component c
-$ Df[(((f*Nq + q)*pdim + i)*Nc + c)*dim + d] is the derivative value at point f,q for basis function i, component c, in direction d
-$ Hf[((((f*Nq + q)*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point f,q for basis function i, component c, in directions d and e
+$ T->T[0] = Bf[((f*Nq + q)*pdim + i)*Nc + c] is the value at point f,q for basis function i and component c
+$ T->T[1] = Df[(((f*Nq + q)*pdim + i)*Nc + c)*dim + d] is the derivative value at point f,q for basis function i, component c, in direction d
+$ T->T[2] = Hf[((((f*Nq + q)*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point f,q for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFEGetDefaultTabulation(), PetscFEGetTabulation(), PetscFERestoreTabulation()
+.seealso: PetscFEGetCellTabulation(), PetscFECreateTabulation(), PetscTabulationDestroy()
 @*/
-PetscErrorCode PetscFEGetFaceTabulation(PetscFE fem, PetscReal **Bf, PetscReal **Df, PetscReal **Hf)
+PetscErrorCode PetscFEGetFaceTabulation(PetscFE fem, PetscTabulation *Tf)
 {
   PetscErrorCode   ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  if (Bf) PetscValidPointer(Bf, 2);
-  if (Df) PetscValidPointer(Df, 3);
-  if (Hf) PetscValidPointer(Hf, 4);
-  if (!fem->Bf) {
+  PetscValidPointer(Tf, 2);
+  if (!fem->Tf) {
     const PetscReal  xi0[3] = {-1., -1., -1.};
     PetscReal        v0[3], J[9], detJ;
     PetscQuadrature  fq;
@@ -847,18 +862,16 @@ PetscErrorCode PetscFEGetFaceTabulation(PetscFE fem, PetscReal **Bf, PetscReal *
         ierr = DMPlexComputeCellGeometryFEM(dm, faces[f], NULL, v0, J, NULL, &detJ);CHKERRQ(ierr);
         for (q = 0; q < npoints; ++q) CoordinatesRefToReal(dim, dim-1, xi0, v0, J, &points[q*(dim-1)], &facePoints[(f*npoints+q)*dim]);
       }
-      ierr = PetscFEGetTabulation(fem, numFaces*npoints, facePoints, &fem->Bf, &fem->Df, NULL/*&fem->Hf*/);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(fem, numFaces, npoints, facePoints, 1, &fem->Tf);CHKERRQ(ierr);
       ierr = PetscFree(facePoints);CHKERRQ(ierr);
     }
   }
-  if (Bf) *Bf = fem->Bf;
-  if (Df) *Df = fem->Df;
-  if (Hf) *Hf = fem->Hf;
+  *Tf = fem->Tf;
   PetscFunctionReturn(0);
 }
 
 /*@C
-  PetscFEGetFaceTabulation - Returns the tabulation of the basis functions at the face centroid points
+  PetscFEGetFaceCentroidTabulation - Returns the tabulation of the basis functions at the face centroid points
 
   Not collective
 
@@ -866,27 +879,23 @@ PetscErrorCode PetscFEGetFaceTabulation(PetscFE fem, PetscReal **Bf, PetscReal *
 . fem - The PetscFE object
 
   Output Parameters:
-+ B - The basis function values at face centroid points
-. D - The basis function derivatives at face centroid points
-- H - The basis function second derivatives at face centroid points
+. Tc - The basis function values at face centroid points
 
   Note:
-$ Bf[(f*pdim + i)*Nc + c] is the value at point f for basis function i and component c
-$ Df[((f*pdim + i)*Nc + c)*dim + d] is the derivative value at point f for basis function i, component c, in direction d
-$ Hf[(((f*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point f for basis function i, component c, in directions d and e
+$ T->T[0] = Bf[(f*pdim + i)*Nc + c] is the value at point f for basis function i and component c
 
   Level: intermediate
 
-.seealso: PetscFEGetFaceTabulation(), PetscFEGetDefaultTabulation(), PetscFEGetTabulation(), PetscFERestoreTabulation()
+.seealso: PetscFEGetFaceTabulation(), PetscFEGetCellTabulation(), PetscFECreateTabulation(), PetscTabulationDestroy()
 @*/
-PetscErrorCode PetscFEGetFaceCentroidTabulation(PetscFE fem, PetscReal **F)
+PetscErrorCode PetscFEGetFaceCentroidTabulation(PetscFE fem, PetscTabulation *Tc)
 {
   PetscErrorCode   ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  PetscValidPointer(F, 2);
-  if (!fem->F) {
+  PetscValidPointer(Tc, 2);
+  if (!fem->Tc) {
     PetscDualSpace  sp;
     DM              dm;
     const PetscInt *cone;
@@ -900,75 +909,77 @@ PetscErrorCode PetscFEGetFaceCentroidTabulation(PetscFE fem, PetscReal **F)
     ierr = DMPlexGetCone(dm, 0, &cone);CHKERRQ(ierr);
     ierr = PetscMalloc1(numFaces*dim, ¢roids);CHKERRQ(ierr);
     for (f = 0; f < numFaces; ++f) {ierr = DMPlexComputeCellGeometryFVM(dm, cone[f], NULL, ¢roids[f*dim], NULL);CHKERRQ(ierr);}
-    ierr = PetscFEGetTabulation(fem, numFaces, centroids, &fem->F, NULL, NULL);CHKERRQ(ierr);
+    ierr = PetscFECreateTabulation(fem, 1, numFaces, centroids, 0, &fem->Tc);CHKERRQ(ierr);
     ierr = PetscFree(centroids);CHKERRQ(ierr);
   }
-  *F = fem->F;
+  *Tc = fem->Tc;
   PetscFunctionReturn(0);
 }
 
 /*@C
-  PetscFEGetTabulation - Tabulates the basis functions, and perhaps derivatives, at the points provided.
+  PetscFECreateTabulation - Tabulates the basis functions, and perhaps derivatives, at the points provided.
 
   Not collective
 
   Input Parameters:
 + fem     - The PetscFE object
-. npoints - The number of tabulation points
-- points  - The tabulation point coordinates
+. nrepl   - The number of replicas
+. npoints - The number of tabulation points in a replica
+. points  - The tabulation point coordinates
+- K       - The number of derivatives calculated
 
-  Output Parameters:
-+ B - The basis function values at tabulation points
-. D - The basis function derivatives at tabulation points
-- H - The basis function second derivatives at tabulation points
+  Output Parameter:
+. T - The basis function values and derivatives at tabulation points
 
   Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
+$ T->T[0] = B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
+$ T->T[1] = D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
+$ T->T[2] = H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFERestoreTabulation(), PetscFEGetDefaultTabulation()
+.seealso: PetscFEGetCellTabulation(), PetscTabulationDestroy()
 @*/
-PetscErrorCode PetscFEGetTabulation(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscReal **B, PetscReal **D, PetscReal **H)
+PetscErrorCode PetscFECreateTabulation(PetscFE fem, PetscInt nrepl, PetscInt npoints, const PetscReal points[], PetscInt K, PetscTabulation *T)
 {
   DM               dm;
-  PetscInt         pdim; /* Dimension of FE space P */
-  PetscInt         dim;  /* Spatial dimension */
-  PetscInt         comp; /* Field components */
+  PetscDualSpace   Q;
+  PetscInt         Nb;   /* Dimension of FE space P */
+  PetscInt         Nc;   /* Field components */
+  PetscInt         cdim; /* Reference coordinate dimension */
+  PetscInt         k;
   PetscErrorCode   ierr;
 
   PetscFunctionBegin;
-  if (!npoints) {
-    if (B) *B = NULL;
-    if (D) *D = NULL;
-    if (H) *H = NULL;
+  if (!npoints || !fem->dualSpace || K < 0) {
+    *T = NULL;
     PetscFunctionReturn(0);
   }
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  PetscValidPointer(points, 3);
-  if (B) PetscValidPointer(B, 4);
-  if (D) PetscValidPointer(D, 5);
-  if (H) PetscValidPointer(H, 6);
-  ierr = PetscDualSpaceGetDM(fem->dualSpace, &dm);CHKERRQ(ierr);
-  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = PetscDualSpaceGetDimension(fem->dualSpace, &pdim);CHKERRQ(ierr);
-  ierr = PetscFEGetNumComponents(fem, &comp);CHKERRQ(ierr);
-  if (B) {ierr = DMGetWorkArray(dm, npoints*pdim*comp, MPIU_REAL, B);CHKERRQ(ierr);}
-  if (!dim) {
-    if (D) *D = NULL;
-    if (H) *H = NULL;
-  } else {
-    if (D) {ierr = DMGetWorkArray(dm, npoints*pdim*comp*dim, MPIU_REAL, D);CHKERRQ(ierr);}
-    if (H) {ierr = DMGetWorkArray(dm, npoints*pdim*comp*dim*dim, MPIU_REAL, H);CHKERRQ(ierr);}
+  PetscValidPointer(points, 4);
+  PetscValidPointer(T, 6);
+  ierr = PetscFEGetDualSpace(fem, &Q);CHKERRQ(ierr);
+  ierr = PetscDualSpaceGetDM(Q, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &cdim);CHKERRQ(ierr);
+  ierr = PetscDualSpaceGetDimension(Q, &Nb);CHKERRQ(ierr);
+  ierr = PetscFEGetNumComponents(fem, &Nc);CHKERRQ(ierr);
+  ierr = PetscMalloc1(1, T);CHKERRQ(ierr);
+  (*T)->K    = !cdim ? 0 : K;
+  (*T)->Nr   = nrepl;
+  (*T)->Np   = npoints;
+  (*T)->Nb   = Nb;
+  (*T)->Nc   = Nc;
+  (*T)->cdim = cdim;
+  ierr = PetscMalloc1((*T)->K+1, &(*T)->T);CHKERRQ(ierr);
+  for (k = 0; k <= (*T)->K; ++k) {
+    ierr = PetscMalloc1(nrepl*npoints*Nb*Nc*PetscPowInt(cdim, k), &(*T)->T[k]);CHKERRQ(ierr);
   }
-  ierr = (*fem->ops->gettabulation)(fem, npoints, points, B ? *B : NULL, D ? *D : NULL, H ? *H : NULL);CHKERRQ(ierr);
+  ierr = (*fem->ops->createtabulation)(fem, nrepl*npoints, points, K, *T);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 /*@C
-  PetscFERestoreTabulation - Frees memory from the associated tabulation.
+  PetscFEComputeTabulation - Tabulates the basis functions, and perhaps derivatives, at the points provided.
 
   Not collective
 
@@ -976,30 +987,79 @@ PetscErrorCode PetscFEGetTabulation(PetscFE fem, PetscInt npoints, const PetscRe
 + fem     - The PetscFE object
 . npoints - The number of tabulation points
 . points  - The tabulation point coordinates
-. B - The basis function values at tabulation points
-. D - The basis function derivatives at tabulation points
-- H - The basis function second derivatives at tabulation points
+. K       - The number of derivatives calculated
+- T       - An existing tabulation object with enough allocated space
+
+  Output Parameter:
+. T - The basis function values and derivatives at tabulation points
 
   Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
+$ T->T[0] = B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
+$ T->T[1] = D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
+$ T->T[2] = H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFEGetTabulation(), PetscFEGetDefaultTabulation()
+.seealso: PetscFEGetCellTabulation(), PetscTabulationDestroy()
 @*/
-PetscErrorCode PetscFERestoreTabulation(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscReal **B, PetscReal **D, PetscReal **H)
+PetscErrorCode PetscFEComputeTabulation(PetscFE fem, PetscInt npoints, const PetscReal points[], PetscInt K, PetscTabulation T)
 {
-  DM             dm;
   PetscErrorCode ierr;
 
-  PetscFunctionBegin;
+  PetscFunctionBeginHot;
+  if (!npoints || !fem->dualSpace || K < 0) PetscFunctionReturn(0);
   PetscValidHeaderSpecific(fem, PETSCFE_CLASSID, 1);
-  ierr = PetscDualSpaceGetDM(fem->dualSpace, &dm);CHKERRQ(ierr);
-  if (B && *B) {ierr = DMRestoreWorkArray(dm, 0, MPIU_REAL, B);CHKERRQ(ierr);}
-  if (D && *D) {ierr = DMRestoreWorkArray(dm, 0, MPIU_REAL, D);CHKERRQ(ierr);}
-  if (H && *H) {ierr = DMRestoreWorkArray(dm, 0, MPIU_REAL, H);CHKERRQ(ierr);}
+  PetscValidPointer(points, 3);
+  PetscValidPointer(T, 5);
+#ifdef PETSC_USE_DEBUG
+  {
+    DM               dm;
+    PetscDualSpace   Q;
+    PetscInt         Nb;   /* Dimension of FE space P */
+    PetscInt         Nc;   /* Field components */
+    PetscInt         cdim; /* Reference coordinate dimension */
+
+    ierr = PetscFEGetDualSpace(fem, &Q);CHKERRQ(ierr);
+    ierr = PetscDualSpaceGetDM(Q, &dm);CHKERRQ(ierr);
+    ierr = DMGetDimension(dm, &cdim);CHKERRQ(ierr);
+    ierr = PetscDualSpaceGetDimension(Q, &Nb);CHKERRQ(ierr);
+    ierr = PetscFEGetNumComponents(fem, &Nc);CHKERRQ(ierr);
+    if (T->K    != (!cdim ? 0 : K)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Tabulation K %D must match requested K %D", T->K, !cdim ? 0 : K);
+    if (T->Nb   != Nb)              SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Tabulation Nb %D must match requested Nb %D", T->Nb, Nb);
+    if (T->Nc   != Nc)              SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Tabulation Nc %D must match requested Nc %D", T->Nc, Nc);
+    if (T->cdim != cdim)            SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Tabulation cdim %D must match requested cdim %D", T->cdim, cdim);
+  }
+#endif
+  T->Nr = 1;
+  T->Np = npoints;
+  ierr = (*fem->ops->createtabulation)(fem, npoints, points, K, T);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/*@C
+  PetscTabulationDestroy - Frees memory from the associated tabulation.
+
+  Not collective
+
+  Input Parameter:
+. T - The tabulation
+
+  Level: intermediate
+
+.seealso: PetscFECreateTabulation(), PetscFEGetCellTabulation()
+@*/
+PetscErrorCode PetscTabulationDestroy(PetscTabulation *T)
+{
+  PetscInt       k;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidPointer(T, 1);
+  if (!T || !(*T)) PetscFunctionReturn(0);
+  for (k = 0; k <= (*T)->K; ++k) {ierr = PetscFree((*T)->T[k]);CHKERRQ(ierr);}
+  ierr = PetscFree((*T)->T);CHKERRQ(ierr);
+  ierr = PetscFree(*T);CHKERRQ(ierr);
+  *T = NULL;
   PetscFunctionReturn(0);
 }
 
@@ -1647,6 +1707,9 @@ PetscErrorCode PetscFERefine(PetscFE fe, PetscFE *feRef)
   Output Parameter:
 . fem - The PetscFE object
 
+  Note:
+  Each object is SetFromOption() during creation, so that the object may be customized from the command line.
+
   Level: beginner
 
 .seealso: PetscFECreate(), PetscSpaceCreate(), PetscDualSpaceCreate()
@@ -1714,6 +1777,87 @@ PetscErrorCode PetscFECreateDefault(MPI_Comm comm, PetscInt dim, PetscInt Nc, Pe
   PetscFunctionReturn(0);
 }
 
+/*@
+  PetscFECreateLagrange - Create a PetscFE for the basic Lagrange space of degree k
+
+  Collective
+
+  Input Parameters:
++ comm      - The MPI comm
+. dim       - The spatial dimension
+. Nc        - The number of components
+. isSimplex - Flag for simplex reference cell, otherwise its a tensor product
+. k         - The degree k of the space
+- qorder    - The quadrature order or PETSC_DETERMINE to use PetscSpace polynomial degree
+
+  Output Parameter:
+. fem       - The PetscFE object
+
+  Level: beginner
+
+  Notes:
+  For simplices, this element is the space of maximum polynomial degree k, otherwise it is a tensor product of 1D polynomials, each with maximal degree k.
+
+.seealso: PetscFECreate(), PetscSpaceCreate(), PetscDualSpaceCreate()
+@*/
+PetscErrorCode PetscFECreateLagrange(MPI_Comm comm, PetscInt dim, PetscInt Nc, PetscBool isSimplex, PetscInt k, PetscInt qorder, PetscFE *fem)
+{
+  PetscQuadrature q, fq;
+  DM              K;
+  PetscSpace      P;
+  PetscDualSpace  Q;
+  PetscInt        quadPointsPerEdge;
+  PetscBool       tensor = isSimplex ? PETSC_FALSE : PETSC_TRUE;
+  char            name[64];
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  /* Create space */
+  ierr = PetscSpaceCreate(comm, &P);CHKERRQ(ierr);
+  ierr = PetscSpaceSetType(P, PETSCSPACEPOLYNOMIAL);CHKERRQ(ierr);
+  ierr = PetscSpacePolynomialSetTensor(P, tensor);CHKERRQ(ierr);
+  ierr = PetscSpaceSetNumComponents(P, Nc);CHKERRQ(ierr);
+  ierr = PetscSpaceSetNumVariables(P, dim);CHKERRQ(ierr);
+  ierr = PetscSpaceSetDegree(P, k, PETSC_DETERMINE);CHKERRQ(ierr);
+  ierr = PetscSpaceSetUp(P);CHKERRQ(ierr);
+  /* Create dual space */
+  ierr = PetscDualSpaceCreate(comm, &Q);CHKERRQ(ierr);
+  ierr = PetscDualSpaceSetType(Q, PETSCDUALSPACELAGRANGE);CHKERRQ(ierr);
+  ierr = PetscDualSpaceCreateReferenceCell(Q, dim, isSimplex, &K);CHKERRQ(ierr);
+  ierr = PetscDualSpaceSetDM(Q, K);CHKERRQ(ierr);
+  ierr = DMDestroy(&K);CHKERRQ(ierr);
+  ierr = PetscDualSpaceSetNumComponents(Q, Nc);CHKERRQ(ierr);
+  ierr = PetscDualSpaceSetOrder(Q, k);CHKERRQ(ierr);
+  ierr = PetscDualSpaceLagrangeSetTensor(Q, tensor);CHKERRQ(ierr);
+  ierr = PetscDualSpaceSetUp(Q);CHKERRQ(ierr);
+  /* Create element */
+  ierr = PetscFECreate(comm, fem);CHKERRQ(ierr);
+  ierr = PetscSNPrintf(name, 64, "P%d", (int) k);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) *fem, name);CHKERRQ(ierr);
+  ierr = PetscFESetType(*fem, PETSCFEBASIC);CHKERRQ(ierr);
+  ierr = PetscFESetBasisSpace(*fem, P);CHKERRQ(ierr);
+  ierr = PetscFESetDualSpace(*fem, Q);CHKERRQ(ierr);
+  ierr = PetscFESetNumComponents(*fem, Nc);CHKERRQ(ierr);
+  ierr = PetscFESetUp(*fem);CHKERRQ(ierr);
+  ierr = PetscSpaceDestroy(&P);CHKERRQ(ierr);
+  ierr = PetscDualSpaceDestroy(&Q);CHKERRQ(ierr);
+  /* Create quadrature (with specified order if given) */
+  qorder = qorder >= 0 ? qorder : k;
+  quadPointsPerEdge = PetscMax(qorder + 1,1);
+  if (isSimplex) {
+    ierr = PetscDTGaussJacobiQuadrature(dim,   1, quadPointsPerEdge, -1.0, 1.0, &q);CHKERRQ(ierr);
+    ierr = PetscDTGaussJacobiQuadrature(dim-1, 1, quadPointsPerEdge, -1.0, 1.0, &fq);CHKERRQ(ierr);
+  } else {
+    ierr = PetscDTGaussTensorQuadrature(dim,   1, quadPointsPerEdge, -1.0, 1.0, &q);CHKERRQ(ierr);
+    ierr = PetscDTGaussTensorQuadrature(dim-1, 1, quadPointsPerEdge, -1.0, 1.0, &fq);CHKERRQ(ierr);
+  }
+  ierr = PetscFESetQuadrature(*fem, q);CHKERRQ(ierr);
+  ierr = PetscFESetFaceQuadrature(*fem, fq);CHKERRQ(ierr);
+  ierr = PetscQuadratureDestroy(&q);CHKERRQ(ierr);
+  ierr = PetscQuadratureDestroy(&fq);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscFESetName - Names the FE and its subobjects
 
@@ -1742,31 +1886,34 @@ PetscErrorCode PetscFESetName(PetscFE fe, const char name[])
   PetscFunctionReturn(0);
 }
 
-PetscErrorCode PetscFEEvaluateFieldJets_Internal(PetscDS ds, PetscInt dim, PetscInt Nf, const PetscInt Nb[], const PetscInt Nc[], PetscInt q, PetscReal *basisField[], PetscReal *basisFieldDer[], PetscFEGeom *fegeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscScalar u[], PetscScalar u_x[], PetscScalar u_t[])
+PetscErrorCode PetscFEEvaluateFieldJets_Internal(PetscDS ds, PetscInt Nf, PetscInt r, PetscInt q, PetscTabulation T[], PetscFEGeom *fegeom, const PetscScalar coefficients[], const PetscScalar coefficients_t[], PetscScalar u[], PetscScalar u_x[], PetscScalar u_t[])
 {
   PetscInt       dOffset = 0, fOffset = 0, f;
   PetscErrorCode ierr;
 
   for (f = 0; f < Nf; ++f) {
     PetscFE          fe;
-    const PetscInt   Nbf = Nb[f], Ncf = Nc[f];
-    const PetscReal *Bq = &basisField[f][q*Nbf*Ncf];
-    const PetscReal *Dq = &basisFieldDer[f][q*Nbf*Ncf*dim];
+    const PetscInt   cdim = T[f]->cdim;
+    const PetscInt   Nq   = T[f]->Np;
+    const PetscInt   Nbf  = T[f]->Nb;
+    const PetscInt   Ncf  = T[f]->Nc;
+    const PetscReal *Bq   = &T[f]->T[0][(r*Nq+q)*Nbf*Ncf];
+    const PetscReal *Dq   = &T[f]->T[1][(r*Nq+q)*Nbf*Ncf*cdim];
     PetscInt         b, c, d;
 
     ierr = PetscDSGetDiscretization(ds, f, (PetscObject *) &fe);CHKERRQ(ierr);
-    for (c = 0; c < Ncf; ++c)     u[fOffset+c] = 0.0;
-    for (d = 0; d < dim*Ncf; ++d) u_x[fOffset*dim+d] = 0.0;
+    for (c = 0; c < Ncf; ++c) u[fOffset+c] = 0.0;
+    for (d = 0; d < cdim*Ncf; ++d) u_x[fOffset*cdim+d] = 0.0;
     for (b = 0; b < Nbf; ++b) {
       for (c = 0; c < Ncf; ++c) {
         const PetscInt cidx = b*Ncf+c;
 
         u[fOffset+c] += Bq[cidx]*coefficients[dOffset+b];
-        for (d = 0; d < dim; ++d) u_x[(fOffset+c)*dim+d] += Dq[cidx*dim+d]*coefficients[dOffset+b];
+        for (d = 0; d < cdim; ++d) u_x[(fOffset+c)*cdim+d] += Dq[cidx*cdim+d]*coefficients[dOffset+b];
       }
     }
     ierr = PetscFEPushforward(fe, fegeom, 1, &u[fOffset]);CHKERRQ(ierr);
-    ierr = PetscFEPushforwardGradient(fe, fegeom, 1, &u_x[fOffset*dim]);CHKERRQ(ierr);
+    ierr = PetscFEPushforwardGradient(fe, fegeom, 1, &u_x[fOffset*cdim]);CHKERRQ(ierr);
     if (u_t) {
       for (c = 0; c < Ncf; ++c) u_t[fOffset+c] = 0.0;
       for (b = 0; b < Nbf; ++b) {
@@ -1786,31 +1933,41 @@ PetscErrorCode PetscFEEvaluateFieldJets_Internal(PetscDS ds, PetscInt dim, Petsc
 
 PetscErrorCode PetscFEEvaluateFaceFields_Internal(PetscDS prob, PetscInt field, PetscInt faceLoc, const PetscScalar coefficients[], PetscScalar u[])
 {
-  PetscFE        fe;
-  PetscReal     *faceBasis;
-  PetscInt       Nb, Nc, b, c;
-  PetscErrorCode ierr;
+  PetscFE         fe;
+  PetscTabulation Tc;
+  PetscInt        b, c;
+  PetscErrorCode  ierr;
 
   if (!prob) return 0;
   ierr = PetscDSGetDiscretization(prob, field, (PetscObject *) &fe);CHKERRQ(ierr);
-  ierr = PetscFEGetDimension(fe, &Nb);CHKERRQ(ierr);
-  ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr);
-  ierr = PetscFEGetFaceCentroidTabulation(fe, &faceBasis);CHKERRQ(ierr);
-  for (c = 0; c < Nc; ++c) {u[c] = 0.0;}
-  for (b = 0; b < Nb; ++b) {
-    for (c = 0; c < Nc; ++c) {
-      const PetscInt cidx = b*Nc+c;
+  ierr = PetscFEGetFaceCentroidTabulation(fe, &Tc);CHKERRQ(ierr);
+  {
+    const PetscReal *faceBasis = Tc->T[0];
+    const PetscInt   Nb        = Tc->Nb;
+    const PetscInt   Nc        = Tc->Nc;
 
-      u[c] += coefficients[cidx]*faceBasis[faceLoc*Nb*Nc+cidx];
+    for (c = 0; c < Nc; ++c) {u[c] = 0.0;}
+    for (b = 0; b < Nb; ++b) {
+      for (c = 0; c < Nc; ++c) {
+        const PetscInt cidx = b*Nc+c;
+
+        u[c] += coefficients[cidx]*faceBasis[faceLoc*Nb*Nc+cidx];
+      }
     }
   }
   return 0;
 }
 
-PetscErrorCode PetscFEUpdateElementVec_Internal(PetscFE fe, PetscInt dim, PetscInt Nq, PetscInt Nb, PetscInt Nc, PetscReal basis[], PetscReal basisDer[], PetscScalar tmpBasis[], PetscScalar tmpBasisDer[], PetscFEGeom *fegeom, PetscScalar f0[], PetscScalar f1[], PetscScalar elemVec[])
+PetscErrorCode PetscFEUpdateElementVec_Internal(PetscFE fe, PetscTabulation T, PetscInt r, PetscScalar tmpBasis[], PetscScalar tmpBasisDer[], PetscFEGeom *fegeom, PetscScalar f0[], PetscScalar f1[], PetscScalar elemVec[])
 {
-  PetscInt       q, b, c, d;
-  PetscErrorCode ierr;
+  const PetscInt   dim      = T->cdim;
+  const PetscInt   Nq       = T->Np;
+  const PetscInt   Nb       = T->Nb;
+  const PetscInt   Nc       = T->Nc;
+  const PetscReal *basis    = &T->T[0][r*Nq*Nb*Nc];
+  const PetscReal *basisDer = &T->T[1][r*Nq*Nb*Nc*dim];
+  PetscInt         q, b, c, d;
+  PetscErrorCode   ierr;
 
   for (b = 0; b < Nb; ++b) elemVec[b] = 0.0;
   for (q = 0; q < Nq; ++q) {
@@ -1837,10 +1994,21 @@ PetscErrorCode PetscFEUpdateElementVec_Internal(PetscFE fe, PetscInt dim, PetscI
   return(0);
 }
 
-PetscErrorCode PetscFEUpdateElementMat_Internal(PetscFE feI, PetscFE feJ, PetscInt dim, PetscInt NbI, PetscInt NcI, const PetscReal basisI[], const PetscReal basisDerI[], PetscScalar tmpBasisI[], PetscScalar tmpBasisDerI[], PetscInt NbJ, PetscInt NcJ, const PetscReal basisJ[], const PetscReal basisDerJ[], PetscScalar tmpBasisJ[], PetscScalar tmpBasisDerJ[], PetscFEGeom *fegeom, const PetscScalar g0[], const PetscScalar g1[], const PetscScalar g2[], const PetscScalar g3[], PetscInt eOffset, PetscInt totDim, PetscInt offsetI, PetscInt offsetJ, PetscScalar elemMat[])
+PetscErrorCode PetscFEUpdateElementMat_Internal(PetscFE feI, PetscFE feJ, PetscInt r, PetscInt q, PetscTabulation TI, PetscScalar tmpBasisI[], PetscScalar tmpBasisDerI[], PetscTabulation TJ, PetscScalar tmpBasisJ[], PetscScalar tmpBasisDerJ[], PetscFEGeom *fegeom, const PetscScalar g0[], const PetscScalar g1[], const PetscScalar g2[], const PetscScalar g3[], PetscInt eOffset, PetscInt totDim, PetscInt offsetI, PetscInt offsetJ, PetscScalar elemMat[])
 {
-  PetscInt       f, fc, g, gc, df, dg;
-  PetscErrorCode ierr;
+  const PetscInt   dim       = TI->cdim;
+  const PetscInt   NqI       = TI->Np;
+  const PetscInt   NbI       = TI->Nb;
+  const PetscInt   NcI       = TI->Nc;
+  const PetscReal *basisI    = &TI->T[0][(r*NqI+q)*NbI*NcI];
+  const PetscReal *basisDerI = &TI->T[1][(r*NqI+q)*NbI*NcI*dim];
+  const PetscInt   NqJ       = TJ->Np;
+  const PetscInt   NbJ       = TJ->Nb;
+  const PetscInt   NcJ       = TJ->Nc;
+  const PetscReal *basisJ    = &TJ->T[0][(r*NqJ+q)*NbJ*NcJ];
+  const PetscReal *basisDerJ = &TJ->T[1][(r*NqJ+q)*NbJ*NcJ*dim];
+  PetscInt         f, fc, g, gc, df, dg;
+  PetscErrorCode   ierr;
 
   for (f = 0; f < NbI; ++f) {
     for (fc = 0; fc < NcI; ++fc) {
diff --git a/src/dm/dt/fe/interface/ftn-custom/makefile b/src/dm/dt/fe/interface/ftn-custom/makefile
new file mode 100644
index 00000000000..ab71b7242a2
--- /dev/null
+++ b/src/dm/dt/fe/interface/ftn-custom/makefile
@@ -0,0 +1,15 @@
+
+#requiresdefine   'PETSC_HAVE_FORTRAN'
+ALL: lib
+CFLAGS   =
+FFLAGS   =
+SOURCEC  = zfef.c
+SOURCEF  =
+SOURCEH  =
+DIRS     =
+LIBBASE  = libpetscdm
+LOCDIR   = src/dm/dt/fe/interface/ftn-custom/
+
+include ${PETSC_DIR}/lib/petsc/conf/variables
+include ${PETSC_DIR}/lib/petsc/conf/rules
+include ${PETSC_DIR}/lib/petsc/conf/test
diff --git a/src/dm/dt/fe/interface/ftn-custom/zfef.c b/src/dm/dt/fe/interface/ftn-custom/zfef.c
new file mode 100644
index 00000000000..297fe69f34f
--- /dev/null
+++ b/src/dm/dt/fe/interface/ftn-custom/zfef.c
@@ -0,0 +1,42 @@
+#include 
+#include 
+#include 
+
+#if defined(PETSC_HAVE_FORTRAN_CAPS)
+#define petscspaceviewfromoptions_   PETSCSPACEVIEWFROMOPTIONS
+#define petscdualspaceviewfromoptions_   PETSCDUALSPACEVIEWFROMOPTIONS
+#define petscfeviewfromoptions_   PETSCFEVIEWFROMOPTIONS
+#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
+#define petscspaceviewfromoptions_   petscspaceviewfromoptions
+#define petscdualspaceviewfromoptions_   petscdualspaceviewfromoptions
+#define petscfeviewfromoptions_   petscfeviewfromoptions
+#endif
+
+PETSC_EXTERN void PETSC_STDCALL petscspaceviewfromoptions_(PetscSpace *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscSpaceViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
+PETSC_EXTERN void PETSC_STDCALL petscdualspaceviewfromoptions_(PetscDualSpace *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscDualSpaceViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
+PETSC_EXTERN void PETSC_STDCALL petscfeviewfromoptions_(PetscFE *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscFEViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
+
diff --git a/src/dm/dt/fe/makefile b/src/dm/dt/fe/makefile
index f6e666a9c1b..cb37837b854 100644
--- a/src/dm/dt/fe/makefile
+++ b/src/dm/dt/fe/makefile
@@ -2,7 +2,7 @@
 ALL: lib
 
 SOURCEH   = ../../../../include/petscdt.h ../../../../include/petscfe.h ../../../../include/petscfetypes.h
-DIRS      = interface impls
+DIRS      = interface impls examples
 LOCDIR    = src/dm/dt/fe/
 MANSEC    = DM
 SUBMANSEC = FE
diff --git a/src/dm/dt/fv/interface/ftn-custom/makefile b/src/dm/dt/fv/interface/ftn-custom/makefile
new file mode 100644
index 00000000000..5beb3afb31e
--- /dev/null
+++ b/src/dm/dt/fv/interface/ftn-custom/makefile
@@ -0,0 +1,15 @@
+
+#requiresdefine   'PETSC_HAVE_FORTRAN'
+ALL: lib
+CFLAGS   =
+FFLAGS   =
+SOURCEC  = zfvf.c
+SOURCEF  =
+SOURCEH  =
+DIRS     =
+LIBBASE  = libpetscdm
+LOCDIR   = src/dm/dt/fv/interface/ftn-custom/
+
+include ${PETSC_DIR}/lib/petsc/conf/variables
+include ${PETSC_DIR}/lib/petsc/conf/rules
+include ${PETSC_DIR}/lib/petsc/conf/test
diff --git a/src/dm/dt/fv/interface/ftn-custom/zfvf.c b/src/dm/dt/fv/interface/ftn-custom/zfvf.c
new file mode 100644
index 00000000000..959d3946056
--- /dev/null
+++ b/src/dm/dt/fv/interface/ftn-custom/zfvf.c
@@ -0,0 +1,29 @@
+#include 
+#include 
+#include 
+
+#if defined(PETSC_HAVE_FORTRAN_CAPS)
+#define petsclimiterviewfromoptions_  PETSCLIMITERVIEWFROMOPTIONS
+#define petscfvviewfromoptions_ PETSCFVVIEWFROMOPTIONS
+#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
+#define petsclimiterviewfromoptions_  petsclimiterviewfromoptions
+#define petscfvviewfromoptions_ petscfvviewfromoptions
+#endif
+
+PETSC_EXTERN void PETSC_STDCALL petsclimiterviewfromoptions_(PetscLimiter *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscLimiterViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
+PETSC_EXTERN void PETSC_STDCALL petscfvviewfromoptions_(PetscFV *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscFVViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
diff --git a/src/dm/dt/fv/interface/fv.c b/src/dm/dt/fv/interface/fv.c
index de363512b3c..dccee8750a3 100644
--- a/src/dm/dt/fv/interface/fv.c
+++ b/src/dm/dt/fv/interface/fv.c
@@ -123,6 +123,29 @@ PetscErrorCode PetscLimiterGetType(PetscLimiter lim, PetscLimiterType *name)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscLimiterViewFromOptions - View from Options
+
+   Collective on PetscLimiter
+
+   Input Parameters:
++  A - the PetscLimiter object to view
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscLimiter, PetscLimiterView, PetscObjectViewFromOptions(), PetscLimiterCreate()
+@*/
+PetscErrorCode  PetscLimiterViewFromOptions(PetscLimiter A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCLIMITER_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscLimiterView - Views a PetscLimiter
 
@@ -1018,6 +1041,29 @@ PetscErrorCode PetscFVGetType(PetscFV fvm, PetscFVType *name)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscFVViewFromOptions - View from Options
+
+   Collective on PetscFV
+
+   Input Parameters:
++  A - the PetscFV object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscFV, PetscFVView, PetscObjectViewFromOptions(), PetscFVCreate()
+@*/
+PetscErrorCode  PetscFVViewFromOptions(PetscFV A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCFV_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscFVView - Views a PetscFV
 
@@ -1143,7 +1189,7 @@ PetscErrorCode PetscFVDestroy(PetscFV *fvm)
   ierr = PetscDualSpaceDestroy(&(*fvm)->dualSpace);CHKERRQ(ierr);
   ierr = PetscFree((*fvm)->fluxWork);CHKERRQ(ierr);
   ierr = PetscQuadratureDestroy(&(*fvm)->quadrature);CHKERRQ(ierr);
-  ierr = PetscFVRestoreTabulation((*fvm), 0, NULL, &(*fvm)->B, &(*fvm)->D, NULL /*&(*fvm)->H*/);CHKERRQ(ierr);
+  ierr = PetscTabulationDestroy(&(*fvm)->T);CHKERRQ(ierr);
 
   if ((*fvm)->ops->destroy) {ierr = (*(*fvm)->ops->destroy)(*fvm);CHKERRQ(ierr);}
   ierr = PetscHeaderDestroy(fvm);CHKERRQ(ierr);
@@ -1576,28 +1622,26 @@ PetscErrorCode PetscFVSetDualSpace(PetscFV fvm, PetscDualSpace sp)
 }
 
 /*@C
-  PetscFVGetDefaultTabulation - Returns the tabulation of the basis functions at the quadrature points
+  PetscFVGetCellTabulation - Returns the tabulation of the basis functions at the quadrature points
 
   Not collective
 
   Input Parameter:
 . fvm - The PetscFV object
 
-  Output Parameters:
-+ B - The basis function values at quadrature points
-. D - The basis function derivatives at quadrature points
-- H - The basis function second derivatives at quadrature points
+  Output Parameter:
+. T - The basis function values and derviatives at quadrature points
 
   Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
+$ T->T[0] = B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
+$ T->T[1] = D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
+$ T->T[2] = H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFEGetDefaultTabulation(), PetscFEGetTabulation(), PetscFERestoreTabulation(), PetscFVGetQuadrature(), PetscQuadratureGetData()
+.seealso: PetscFEGetCellTabulation(), PetscFVCreateTabulation(), PetscFVGetQuadrature(), PetscQuadratureGetData()
 @*/
-PetscErrorCode PetscFVGetDefaultTabulation(PetscFV fvm, PetscReal **B, PetscReal **D, PetscReal **H)
+PetscErrorCode PetscFVGetCellTabulation(PetscFV fvm, PetscTabulation *T)
 {
   PetscInt         npoints;
   const PetscReal *points;
@@ -1605,97 +1649,69 @@ PetscErrorCode PetscFVGetDefaultTabulation(PetscFV fvm, PetscReal **B, PetscReal
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(fvm, PETSCFV_CLASSID, 1);
-  if (B) PetscValidPointer(B, 2);
-  if (D) PetscValidPointer(D, 3);
-  if (H) PetscValidPointer(H, 4);
+  PetscValidPointer(T, 2);
   ierr = PetscQuadratureGetData(fvm->quadrature, NULL, NULL, &npoints, &points, NULL);CHKERRQ(ierr);
-  if (!fvm->B) {ierr = PetscFVGetTabulation(fvm, npoints, points, &fvm->B, &fvm->D, NULL/*&fvm->H*/);CHKERRQ(ierr);}
-  if (B) *B = fvm->B;
-  if (D) *D = fvm->D;
-  if (H) *H = fvm->H;
+  if (!fvm->T) {ierr = PetscFVCreateTabulation(fvm, 1, npoints, points, 1, &fvm->T);CHKERRQ(ierr);}
+  *T = fvm->T;
   PetscFunctionReturn(0);
 }
 
 /*@C
-  PetscFVGetTabulation - Tabulates the basis functions, and perhaps derivatives, at the points provided.
+  PetscFVCreateTabulation - Tabulates the basis functions, and perhaps derivatives, at the points provided.
 
   Not collective
 
   Input Parameters:
 + fvm     - The PetscFV object
-. npoints - The number of tabulation points
-- points  - The tabulation point coordinates
+. nrepl   - The number of replicas
+. npoints - The number of tabulation points in a replica
+. points  - The tabulation point coordinates
+- K       - The order of derivative to tabulate
 
-  Output Parameters:
-+ B - The basis function values at tabulation points
-. D - The basis function derivatives at tabulation points
-- H - The basis function second derivatives at tabulation points
+  Output Parameter:
+. T - The basis function values and derviative at tabulation points
 
   Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
+$ T->T[0] = B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
+$ T->T[1] = D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
+$ T->T[2] = H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
 
   Level: intermediate
 
-.seealso: PetscFEGetTabulation(), PetscFERestoreTabulation(), PetscFEGetDefaultTabulation()
+.seealso: PetscFECreateTabulation(), PetscTabulationDestroy(), PetscFEGetCellTabulation()
 @*/
-PetscErrorCode PetscFVGetTabulation(PetscFV fvm, PetscInt npoints, const PetscReal points[], PetscReal **B, PetscReal **D, PetscReal **H)
+PetscErrorCode PetscFVCreateTabulation(PetscFV fvm, PetscInt nrepl, PetscInt npoints, const PetscReal points[], PetscInt K, PetscTabulation *T)
 {
   PetscInt         pdim = 1; /* Dimension of approximation space P */
-  PetscInt         dim;      /* Spatial dimension */
-  PetscInt         comp;     /* Field components */
-  PetscInt         p, d, c, e;
+  PetscInt         cdim;     /* Spatial dimension */
+  PetscInt         Nc;       /* Field components */
+  PetscInt         k, p, d, c, e;
   PetscErrorCode   ierr;
 
   PetscFunctionBegin;
+  if (!npoints || K < 0) {
+    *T = NULL;
+    PetscFunctionReturn(0);
+  }
   PetscValidHeaderSpecific(fvm, PETSCFV_CLASSID, 1);
-  PetscValidPointer(points, 3);
-  if (B) PetscValidPointer(B, 4);
-  if (D) PetscValidPointer(D, 5);
-  if (H) PetscValidPointer(H, 6);
-  ierr = PetscFVGetSpatialDimension(fvm, &dim);CHKERRQ(ierr);
-  ierr = PetscFVGetNumComponents(fvm, &comp);CHKERRQ(ierr);
-  if (B) {ierr = PetscMalloc1(npoints*pdim*comp, B);CHKERRQ(ierr);}
-  if (D) {ierr = PetscMalloc1(npoints*pdim*comp*dim, D);CHKERRQ(ierr);}
-  if (H) {ierr = PetscMalloc1(npoints*pdim*comp*dim*dim, H);CHKERRQ(ierr);}
-  if (B) {for (p = 0; p < npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < comp; ++c) (*B)[(p*pdim + d)*comp + c] = 1.0;}
-  if (D) {for (p = 0; p < npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < comp; ++c) for (e = 0; e < dim; ++e) (*D)[((p*pdim + d)*comp + c)*dim + e] = 0.0;}
-  if (H) {for (p = 0; p < npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < comp; ++c) for (e = 0; e < dim*dim; ++e) (*H)[((p*pdim + d)*comp + c)*dim*dim + e] = 0.0;}
-  PetscFunctionReturn(0);
-}
-
-/*@C
-  PetscFVRestoreTabulation - Frees memory from the associated tabulation.
-
-  Not collective
-
-  Input Parameters:
-+ fvm     - The PetscFV object
-. npoints - The number of tabulation points
-. points  - The tabulation point coordinates
-. B - The basis function values at tabulation points
-. D - The basis function derivatives at tabulation points
-- H - The basis function second derivatives at tabulation points
-
-  Note:
-$ B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c
-$ D[((p*pdim + i)*Nc + c)*dim + d] is the derivative value at point p for basis function i, component c, in direction d
-$ H[(((p*pdim + i)*Nc + c)*dim + d)*dim + e] is the value at point p for basis function i, component c, in directions d and e
-
-  Level: intermediate
-
-.seealso: PetscFVGetTabulation(), PetscFVGetDefaultTabulation()
-@*/
-PetscErrorCode PetscFVRestoreTabulation(PetscFV fvm, PetscInt npoints, const PetscReal points[], PetscReal **B, PetscReal **D, PetscReal **H)
-{
-  PetscErrorCode ierr;
-
-  PetscFunctionBegin;
-  PetscValidHeaderSpecific(fvm, PETSCFV_CLASSID, 1);
-  if (B && *B) {ierr = PetscFree(*B);CHKERRQ(ierr);}
-  if (D && *D) {ierr = PetscFree(*D);CHKERRQ(ierr);}
-  if (H && *H) {ierr = PetscFree(*H);CHKERRQ(ierr);}
+  PetscValidPointer(points, 4);
+  PetscValidPointer(T, 6);
+  ierr = PetscFVGetSpatialDimension(fvm, &cdim);CHKERRQ(ierr);
+  ierr = PetscFVGetNumComponents(fvm, &Nc);CHKERRQ(ierr);
+  ierr = PetscMalloc1(1, T);CHKERRQ(ierr);
+  (*T)->K    = !cdim ? 0 : K;
+  (*T)->Nr   = nrepl;
+  (*T)->Np   = npoints;
+  (*T)->Nb   = pdim;
+  (*T)->Nc   = Nc;
+  (*T)->cdim = cdim;
+  ierr = PetscMalloc1((*T)->K+1, &(*T)->T);CHKERRQ(ierr);
+  for (k = 0; k <= (*T)->K; ++k) {
+    ierr = PetscMalloc1(nrepl*npoints*pdim*Nc*PetscPowInt(cdim, k), &(*T)->T[k]);CHKERRQ(ierr);
+  }
+  if (K >= 0) {for (p = 0; p < nrepl*npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < Nc; ++c) (*T)->T[0][(p*pdim + d)*Nc + c] = 1.0;}
+  if (K >= 1) {for (p = 0; p < nrepl*npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < Nc; ++c) for (e = 0; e < cdim; ++e) (*T)->T[1][((p*pdim + d)*Nc + c)*cdim + e] = 0.0;}
+  if (K >= 2) {for (p = 0; p < nrepl*npoints; ++p) for (d = 0; d < pdim; ++d) for (c = 0; c < Nc; ++c) for (e = 0; e < cdim*cdim; ++e) (*T)->T[2][((p*pdim + d)*Nc + c)*cdim*cdim + e] = 0.0;}
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/dt/interface/dt.c b/src/dm/dt/interface/dt.c
index 2a32d7d7d7b..a1a0dd08a55 100644
--- a/src/dm/dt/interface/dt.c
+++ b/src/dm/dt/interface/dt.c
@@ -22,6 +22,9 @@ const char       GaussCitation[] = "@article{GolubWelsch1969,\n"
                                    "  pages   = {221--230},\n"
                                    "  year    = {1969}\n}\n";
 
+
+PetscClassId PETSCQUADRATURE_CLASSID = 0;
+
 /*@
   PetscQuadratureCreate - Create a PetscQuadrature object
 
@@ -43,8 +46,8 @@ PetscErrorCode PetscQuadratureCreate(MPI_Comm comm, PetscQuadrature *q)
 
   PetscFunctionBegin;
   PetscValidPointer(q, 2);
-  ierr = PetscSysInitializePackage();CHKERRQ(ierr);
-  ierr = PetscHeaderCreate(*q,PETSC_OBJECT_CLASSID,"PetscQuadrature","Quadrature","DT",comm,PetscQuadratureDestroy,PetscQuadratureView);CHKERRQ(ierr);
+  ierr = DMInitializePackage();CHKERRQ(ierr);
+  ierr = PetscHeaderCreate(*q,PETSCQUADRATURE_CLASSID,"PetscQuadrature","Quadrature","DT",comm,PetscQuadratureDestroy,PetscQuadratureView);CHKERRQ(ierr);
   (*q)->dim       = -1;
   (*q)->Nc        =  1;
   (*q)->order     = -1;
@@ -108,7 +111,7 @@ PetscErrorCode PetscQuadratureDestroy(PetscQuadrature *q)
 
   PetscFunctionBegin;
   if (!*q) PetscFunctionReturn(0);
-  PetscValidHeaderSpecific((*q),PETSC_OBJECT_CLASSID,1);
+  PetscValidHeaderSpecific((*q),PETSCQUADRATURE_CLASSID,1);
   if (--((PetscObject)(*q))->refct > 0) {
     *q = NULL;
     PetscFunctionReturn(0);
@@ -137,7 +140,7 @@ PetscErrorCode PetscQuadratureDestroy(PetscQuadrature *q)
 PetscErrorCode PetscQuadratureGetOrder(PetscQuadrature q, PetscInt *order)
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   PetscValidPointer(order, 2);
   *order = q->order;
   PetscFunctionReturn(0);
@@ -159,7 +162,7 @@ PetscErrorCode PetscQuadratureGetOrder(PetscQuadrature q, PetscInt *order)
 PetscErrorCode PetscQuadratureSetOrder(PetscQuadrature q, PetscInt order)
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   q->order = order;
   PetscFunctionReturn(0);
 }
@@ -184,7 +187,7 @@ PetscErrorCode PetscQuadratureSetOrder(PetscQuadrature q, PetscInt order)
 PetscErrorCode PetscQuadratureGetNumComponents(PetscQuadrature q, PetscInt *Nc)
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   PetscValidPointer(Nc, 2);
   *Nc = q->Nc;
   PetscFunctionReturn(0);
@@ -208,7 +211,7 @@ PetscErrorCode PetscQuadratureGetNumComponents(PetscQuadrature q, PetscInt *Nc)
 PetscErrorCode PetscQuadratureSetNumComponents(PetscQuadrature q, PetscInt Nc)
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   q->Nc = Nc;
   PetscFunctionReturn(0);
 }
@@ -238,7 +241,7 @@ PetscErrorCode PetscQuadratureSetNumComponents(PetscQuadrature q, PetscInt Nc)
 PetscErrorCode PetscQuadratureGetData(PetscQuadrature q, PetscInt *dim, PetscInt *Nc, PetscInt *npoints, const PetscReal *points[], const PetscReal *weights[])
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   if (dim) {
     PetscValidPointer(dim, 2);
     *dim = q->dim;
@@ -262,6 +265,175 @@ PetscErrorCode PetscQuadratureGetData(PetscQuadrature q, PetscInt *dim, PetscInt
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode PetscDTJacobianInverse_Internal(PetscInt m, PetscInt n, const PetscReal J[], PetscReal Jinv[])
+{
+  PetscScalar    *Js, *Jinvs;
+  PetscInt       i, j, k;
+  PetscBLASInt   bm, bn, info;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscBLASIntCast(m, &bm);CHKERRQ(ierr);
+  ierr = PetscBLASIntCast(n, &bn);CHKERRQ(ierr);
+#if defined(PETSC_USE_COMPLEX)
+  ierr = PetscMalloc2(m*n, &Js, m*n, &Jinvs);CHKERRQ(ierr);
+  for (i = 0; i < m*n; i++) Js[i] = J[i];
+#else
+  Js = (PetscReal *) J;
+  Jinvs = Jinv;
+#endif
+  if (m == n) {
+    PetscBLASInt *pivots;
+    PetscScalar *W;
+
+    ierr = PetscMalloc2(m, &pivots, m, &W);CHKERRQ(ierr);
+
+    ierr = PetscArraycpy(Jinvs, Js, m * m);CHKERRQ(ierr);
+    PetscStackCallBLAS("LAPACKgetrf", LAPACKgetrf_(&bm, &bm, Jinvs, &bm, pivots, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetrf %D",(PetscInt)info);
+    PetscStackCallBLAS("LAPACKgetri", LAPACKgetri_(&bm, Jinvs, &bm, pivots, W, &bm, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetri %D",(PetscInt)info);
+    ierr = PetscFree2(pivots, W);CHKERRQ(ierr);
+  } else if (m < n) {
+    PetscScalar *JJT;
+    PetscBLASInt *pivots;
+    PetscScalar *W;
+
+    ierr = PetscMalloc1(m*m, &JJT);CHKERRQ(ierr);
+    ierr = PetscMalloc2(m, &pivots, m, &W);CHKERRQ(ierr);
+    for (i = 0; i < m; i++) {
+      for (j = 0; j < m; j++) {
+        PetscScalar val = 0.;
+
+        for (k = 0; k < n; k++) val += Js[i * n + k] * Js[j * n + k];
+        JJT[i * m + j] = val;
+      }
+    }
+
+    PetscStackCallBLAS("LAPACKgetrf", LAPACKgetrf_(&bm, &bm, JJT, &bm, pivots, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetrf %D",(PetscInt)info);
+    PetscStackCallBLAS("LAPACKgetri", LAPACKgetri_(&bm, JJT, &bm, pivots, W, &bm, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetri %D",(PetscInt)info);
+    for (i = 0; i < n; i++) {
+      for (j = 0; j < m; j++) {
+        PetscScalar val = 0.;
+
+        for (k = 0; k < m; k++) val += Js[k * n + i] * JJT[k * m + j];
+        Jinvs[i * m + j] = val;
+      }
+    }
+    ierr = PetscFree2(pivots, W);CHKERRQ(ierr);
+    ierr = PetscFree(JJT);CHKERRQ(ierr);
+  } else {
+    PetscScalar *JTJ;
+    PetscBLASInt *pivots;
+    PetscScalar *W;
+
+    ierr = PetscMalloc1(n*n, &JTJ);CHKERRQ(ierr);
+    ierr = PetscMalloc2(n, &pivots, n, &W);CHKERRQ(ierr);
+    for (i = 0; i < n; i++) {
+      for (j = 0; j < n; j++) {
+        PetscScalar val = 0.;
+
+        for (k = 0; k < m; k++) val += Js[k * n + i] * Js[k * n + j];
+        JTJ[i * n + j] = val;
+      }
+    }
+
+    PetscStackCallBLAS("LAPACKgetrf", LAPACKgetrf_(&bn, &bn, JTJ, &bm, pivots, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetrf %D",(PetscInt)info);
+    PetscStackCallBLAS("LAPACKgetri", LAPACKgetri_(&bn, JTJ, &bn, pivots, W, &bn, &info));
+    if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error returned from LAPACKgetri %D",(PetscInt)info);
+    for (i = 0; i < n; i++) {
+      for (j = 0; j < m; j++) {
+        PetscScalar val = 0.;
+
+        for (k = 0; k < n; k++) val += JTJ[i * n + k] * Js[j * n + k];
+        Jinvs[i * m + j] = val;
+      }
+    }
+    ierr = PetscFree2(pivots, W);CHKERRQ(ierr);
+    ierr = PetscFree(JTJ);CHKERRQ(ierr);
+  }
+#if defined(PETSC_USE_COMPLEX)
+  for (i = 0; i < m*n; i++) Jinv[i] = PetscRealPart(Jinvs[i]);
+  ierr = PetscFree2(Js, Jinvs);CHKERRQ(ierr);
+#endif
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscQuadraturePushForward - Push forward a quadrature functional under an affine transformation.
+
+   Collecive on PetscQuadrature
+
+   Input Arguments:
++  q - the quadrature functional
+.  imageDim - the dimension of the image of the transformation
+.  origin - a point in the original space
+.  originImage - the image of the origin under the transformation
+.  J - the Jacobian of the image: an [imageDim x dim] matrix in row major order
+-  formDegree - transform the quadrature weights as k-forms of this form degree (if the number of components is a multiple of (dim choose formDegree), it is assumed that they represent multiple k-forms) [see PetscDTAltVPullback() for interpretation of formDegree]
+
+   Output Arguments:
+.  Jinvstarq - a quadrature rule where each point is the image of a point in the original quadrature rule, and where the k-form weights have been pulled-back by the pseudoinverse of J to the k-form weights in the image space.
+
+   Note: the new quadrature rule will have a different number of components if spaces have different dimensions.  For example, pushing a 2-form forward from a two dimensional space to a three dimensional space changes the number of components from 1 to 3.
+
+.seealso: PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscQuadraturePushForward(PetscQuadrature q, PetscInt imageDim, const PetscReal origin[], const PetscReal originImage[], const PetscReal J[], PetscInt formDegree, PetscQuadrature *Jinvstarq)
+{
+  PetscInt         dim, Nc, imageNc, formSize, Ncopies, imageFormSize, Npoints, pt, i, j, c;
+  const PetscReal *points;
+  const PetscReal *weights;
+  PetscReal       *imagePoints, *imageWeights;
+  PetscReal       *Jinv;
+  PetscReal       *Jinvstar;
+  PetscErrorCode   ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  if (imageDim < PetscAbsInt(formDegree)) SETERRQ2(PetscObjectComm((PetscObject)q), PETSC_ERR_ARG_INCOMP, "Cannot represent a %D-form in %D dimensions", PetscAbsInt(formDegree), imageDim);
+  ierr = PetscQuadratureGetData(q, &dim, &Nc, &Npoints, &points, &weights);CHKERRQ(ierr);
+  ierr = PetscDTBinomialInt(dim, PetscAbsInt(formDegree), &formSize);CHKERRQ(ierr);
+  if (Nc % formSize) SETERRQ2(PetscObjectComm((PetscObject)q), PETSC_ERR_ARG_INCOMP, "Number of components %D is not a multiple of formSize %D\n", Nc, formSize);
+  Ncopies = Nc / formSize;
+  ierr = PetscDTBinomialInt(imageDim, PetscAbsInt(formDegree), &imageFormSize);CHKERRQ(ierr);
+  imageNc = Ncopies * imageFormSize;
+  ierr = PetscMalloc1(Npoints * imageDim, &imagePoints);CHKERRQ(ierr);
+  ierr = PetscMalloc1(Npoints * imageNc, &imageWeights);CHKERRQ(ierr);
+  ierr = PetscMalloc2(imageDim * dim, &Jinv, formSize * imageFormSize, &Jinvstar);CHKERRQ(ierr);
+  ierr = PetscDTJacobianInverse_Internal(dim, imageDim, J, Jinv);CHKERRQ(ierr);
+  ierr = PetscDTAltVPullbackMatrix(imageDim, dim, Jinv, formDegree, Jinvstar);CHKERRQ(ierr);
+  for (pt = 0; pt < Npoints; pt++) {
+    const PetscReal *point = &points[pt * dim];
+    PetscReal       *imagePoint = &imagePoints[pt * imageDim];
+
+    for (i = 0; i < imageDim; i++) {
+      PetscReal val = originImage[i];
+
+      for (j = 0; j < dim; j++) val += J[i * dim + j] * (point[j] - origin[j]);
+      imagePoint[i] = val;
+    }
+    for (c = 0; c < Ncopies; c++) {
+      const PetscReal *form = &weights[pt * Nc + c * formSize];
+      PetscReal       *imageForm = &imageWeights[pt * imageNc + c * imageFormSize];
+
+      for (i = 0; i < imageFormSize; i++) {
+        PetscReal val = 0.;
+
+        for (j = 0; j < formSize; j++) val += Jinvstar[i * formSize + j] * form[j];
+        imageForm[i] = val;
+      }
+    }
+  }
+  ierr = PetscQuadratureCreate(PetscObjectComm((PetscObject)q), Jinvstarq);CHKERRQ(ierr);
+  ierr = PetscQuadratureSetData(*Jinvstarq, imageDim, imageNc, Npoints, imagePoints, imageWeights);CHKERRQ(ierr);
+  ierr = PetscFree2(Jinv, Jinvstar);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscQuadratureSetData - Sets the data defining the quadrature
 
@@ -284,7 +456,7 @@ PetscErrorCode PetscQuadratureGetData(PetscQuadrature q, PetscInt *dim, PetscInt
 PetscErrorCode PetscQuadratureSetData(PetscQuadrature q, PetscInt dim, PetscInt Nc, PetscInt npoints, const PetscReal points[], const PetscReal weights[])
 {
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   if (dim >= 0)     q->dim       = dim;
   if (Nc >= 0)      q->Nc        = Nc;
   if (npoints >= 0) q->numPoints = npoints;
@@ -389,7 +561,7 @@ PetscErrorCode PetscQuadratureExpandComposite(PetscQuadrature q, PetscInt numSub
   PetscErrorCode   ierr;
 
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(q, PETSC_OBJECT_CLASSID, 1);
+  PetscValidHeaderSpecific(q, PETSCQUADRATURE_CLASSID, 1);
   PetscValidPointer(v0, 3);
   PetscValidPointer(jac, 4);
   PetscValidPointer(qref, 5);
@@ -766,19 +938,6 @@ PetscErrorCode PetscDTGaussTensorQuadrature(PetscInt dim, PetscInt Nc, PetscInt
   PetscFunctionReturn(0);
 }
 
-/* Evaluates the nth jacobi polynomial with weight parameters a,b at a point x.
-   Recurrence relations implemented from the pseudocode given in Karniadakis and Sherwin, Appendix B */
-PETSC_STATIC_INLINE PetscErrorCode PetscDTFactorial_Internal(PetscInt n, PetscReal *factorial)
-{
-  PetscReal f = 1.0;
-  PetscInt  i;
-
-  PetscFunctionBegin;
-  for (i = 1; i < n+1; ++i) f *= i;
-  *factorial = f;
-  PetscFunctionReturn(0);
-}
-
 /* Evaluates the nth jacobi polynomial with weight parameters a,b at a point x.
    Recurrence relations implemented from the pseudocode given in Karniadakis and Sherwin, Appendix B */
 PETSC_STATIC_INLINE PetscErrorCode PetscDTComputeJacobi(PetscReal a, PetscReal b, PetscInt n, PetscReal x, PetscReal *P)
@@ -863,16 +1022,14 @@ static PetscErrorCode PetscDTGaussJacobiQuadrature1D_Internal(PetscInt npoints,
     ia = (PetscInt) a;
     ib = (PetscInt) b;
     if (ia == a && ib == b && ia + npoints + 1 > 0 && ib + npoints + 1 > 0 && ia + ib + npoints + 1 > 0) { /* All gamma(x) terms are (x-1)! terms */
-      ierr = PetscDTFactorial_Internal(ia + npoints, &a2);CHKERRQ(ierr);
-      ierr = PetscDTFactorial_Internal(ib + npoints, &a3);CHKERRQ(ierr);
-      ierr = PetscDTFactorial_Internal(ia + ib + npoints, &a4);CHKERRQ(ierr);
-    } else {
-      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"tgamma() - math routine is unavailable.");
-    }
+      ierr = PetscDTFactorial(ia + npoints, &a2);CHKERRQ(ierr);
+      ierr = PetscDTFactorial(ib + npoints, &a3);CHKERRQ(ierr);
+      ierr = PetscDTFactorial(ia + ib + npoints, &a4);CHKERRQ(ierr);
+    } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"tgamma() - math routine is unavailable.");
   }
 #endif
 
-  ierr = PetscDTFactorial_Internal(npoints, &a5);CHKERRQ(ierr);
+  ierr = PetscDTFactorial(npoints, &a5);CHKERRQ(ierr);
   a6   = a1 * a2 * a3 / a4 / a5;
   /* Computes the m roots of P_{m}^{a,b} on [-1,1] by Newton's method with Chebyshev points as initial guesses.
    Algorithm implemented from the pseudocode given by Karniadakis and Sherwin and Python in FIAT */
@@ -1731,4 +1888,3 @@ PetscErrorCode PetscGaussLobattoLegendreElementMassDestroy(PetscInt n,PetscReal
   *AA  = NULL;
   PetscFunctionReturn(0);
 }
-
diff --git a/src/dm/dt/interface/dtaltv.c b/src/dm/dt/interface/dtaltv.c
new file mode 100644
index 00000000000..f515c034aa6
--- /dev/null
+++ b/src/dm/dt/interface/dtaltv.c
@@ -0,0 +1,824 @@
+#include 
+#include  /*I "petscdt.h" I*/
+
+/*MC
+   PetscDTAltV - An interface for common operations on k-forms, also known as alternating algebraic forms or alternating k-linear maps.
+   The name of the interface comes from the notation "Alt V" for the algebra of all k-forms acting vectors in the space V, also known as the exterior algebra of V*.
+
+   A recommended reference for this material is Section 2 "Exterior algebra and exterior calculus" in "Finite element
+   exterior calculus, homological techniques, and applications", by Arnold, Falk, & Winther (2006, doi:10.1017/S0962492906210018).
+
+   A k-form w (k is called the "form degree" of w) is an alternating k-linear map acting on tuples (v_1, ..., v_k) of
+   vectors from a vector space V and producing a real number:
+   - alternating: swapping any two vectors in a tuple reverses the sign of the result, e.g. w(v_1, v_2, ..., v_k) = -w(v_2, v_1, ..., v_k)
+   - k-linear: w acts linear in each vector separately, e.g. w(a*v + b*y, v_2, ..., v_k) = a*w(v,v_2,...,v_k) + b*w(y,v_2,...,v_k)
+   This action is implemented as PetscDTAltVApply.
+
+   The k-forms on a vector space form a vector space themselves, Alt^k V.  The dimension of Alt^k V, if V is N dimensional, is N choose k.  (This
+   shows that for an N dimensional space, only 0 <= k <= N are valid form degrees.)
+   The standard basis for Alt^k V, used in PetscDTAltV, has one basis k-form for each ordered subset of k coordinates of the N dimensional space:
+   For example, if the coordinate directions of a four dimensional space are (t, x, y, z), then there are 4 choose 2 = 6 ordered subsets of two coordinates.
+   They are, in lexicographic order, (t, x), (t, y), (t, z), (x, y), (x, z) and (y, z).  PetscDTAltV also orders the basis of Alt^k V lexicographically
+   by the associated subsets.
+
+   The unit basis k-form associated with coordinates (c_1, ..., c_k) acts on a set of k vectors (v_1, ..., v_k) by creating a square matrix V where
+   V[i,j] = v_i[c_j] and taking the determinant of V.
+
+   If j + k <= N, then a j-form f and a k-form g can be multiplied to create a (j+k)-form using the wedge or exterior product, (f wedge g).
+   This is an anticommutative product, (f wedge g) = -(g wedge f).  It is sufficient to describe the wedge product of two basis forms.
+   Let f be the basis j-form associated with coordinates (f_1,...,f_j) and g be the basis k-form associated with coordinates (g_1,...,g_k):
+   - If there is any coordinate in both sets, then (f wedge g) = 0.
+   - Otherwise, (f wedge g) is a multiple of the basis (j+k)-form h associated with (f_1,...,f_j,g_1,...,g_k).
+   - In fact it is equal to either h or -h depending on how (f_1,...,f_j,g_1,...,g_k) compares to the same list of coordinates given in ascending order: if it is an even permutation of that list, then (f wedge g) = h, otherwise (f wedge g) = -h.
+   The wedge product is implemented for either two inputs (f and g) in PetscDTAltVWedge, or for one (just f, giving a
+   matrix to multiply against multiple choices of g) in PetscDTAltVWedgeMatrix.
+
+   If k > 0, a k-form w and a vector v can combine to make a (k-1)-formm through the interior product, (w int v),
+   defined by (w int v)(v_1,...,v_{k-1}) = w(v,v_1,...,v_{k-1}).
+
+   The interior product is implemented for either two inputs (w and v) in PetscDTAltVInterior, for one (just v, giving a
+   matrix to multiply against multiple choices of w) in PetscDTAltVInteriorMatrix,
+   or for no inputs (giving the sparsity pattern of PetscDTAltVInteriorMatrix) in PetscDTAltVInteriorPattern.
+
+   When there is a linear map L: V -> W from an N dimensional vector space to an M dimensional vector space,
+   it induces the linear pullback map L^* : Alt^k W -> Alt^k V, defined by L^* w(v_1,...,v_k) = w(L v_1, ..., L v_k).
+   The pullback is implemented as PetscDTAltVPullback (acting on a known w) and PetscDTAltVPullbackMatrix (creating a matrix that computes the actin of L^*).
+
+   Alt^k V and Alt^(N-k) V have the same dimension, and the Hodge star operator maps between them.  We note that Alt^N V is a one dimensional space, and its
+   basis vector is sometime called vol.  The Hodge star operator has the property that (f wedge (star g)) = (f,g) vol, where (f,g) is the simple inner product
+   of the basis coefficients of f and g.
+   Powers of the Hodge star operator can be applied with PetscDTAltVStar
+
+   level: intermediate
+
+.seealso: PetscDTAltVApply(), PetscDTAltVWedge(), PetscDTAltVInterior(), PetscDTAltVPullback(), PetscDTAltVStar()
+M*/
+
+/*@
+   PetscDTAltVApply - Apply an a k-form (an alternating k-linear map) to a set of k N-dimensional vectors
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  k - the degree k of the k-form w, 0 <= k <= N
+.  w - a k-form, size [N choose k] (each degree of freedom of a k-form is associated with a subset of k coordinates of the N-dimensional vectors: the degrees of freedom are ordered lexicographically by their associated subsets)
+-  v - a set of k vectors of size N, size [k x N], each vector stored contiguously
+
+   Output Arguments:
+.  wv - w(v_1,...,v_k) = \sum_i w_i * det(V_i): the degree of freedom w_i is associated with coordinates [s_{i,1},...,s_{i,k}], and the square matrix V_i has entry (j,k) given by the s_{i,k}'th coordinate of v_j
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVApply(PetscInt N, PetscInt k, const PetscReal *w, const PetscReal *v, PetscReal *wv)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  if (N < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid dimension");
+  if (k < 0 || k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  if (N <= 3) {
+    if (!k) {
+      *wv = w[0];
+    } else {
+      if (N == 1)        {*wv = w[0] * v[0];}
+      else if (N == 2) {
+        if (k == 1)      {*wv = w[0] * v[0] + w[1] * v[1];}
+        else             {*wv = w[0] * (v[0] * v[3] - v[1] * v[2]);}
+      } else {
+        if (k == 1)      {*wv = w[0] * v[0] + w[1] * v[1] + w[2] * v[2];}
+        else if (k == 2) {
+          *wv = w[0] * (v[0] * v[4] - v[1] * v[3]) +
+                w[1] * (v[0] * v[5] - v[2] * v[3]) +
+                w[2] * (v[1] * v[5] - v[2] * v[4]);
+        } else {
+          *wv = w[0] * (v[0] * (v[4] * v[8] - v[5] * v[7]) +
+                        v[1] * (v[5] * v[6] - v[3] * v[8]) +
+                        v[2] * (v[3] * v[7] - v[4] * v[6]));
+        }
+      }
+    }
+  } else {
+    PetscInt Nk, Nf;
+    PetscInt *subset, *perm;
+    PetscInt i, j, l;
+    PetscReal sum = 0.;
+
+    ierr = PetscDTFactorialInt(k, &Nf);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N, k, &Nk);CHKERRQ(ierr);
+    ierr = PetscMalloc2(k, &subset, k, &perm);CHKERRQ(ierr);
+    for (i = 0; i < Nk; i++) {
+      PetscReal subsum = 0.;
+
+      ierr = PetscDTEnumSubset(N, k, i, subset);CHKERRQ(ierr);
+      for (j = 0; j < Nf; j++) {
+        PetscBool permOdd;
+        PetscReal prod;
+
+        ierr = PetscDTEnumPerm(k, j, perm, &permOdd);CHKERRQ(ierr);
+        prod = permOdd ? -1. : 1.;
+        for (l = 0; l < k; l++) {
+          prod *= v[perm[l] * N + subset[l]];
+        }
+        subsum += prod;
+      }
+      sum += w[i] * subsum;
+    }
+    ierr = PetscFree2(subset, perm);CHKERRQ(ierr);
+    *wv = sum;
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVWedge - Compute the wedge product of a j-form and a k-form, giving a (j+k) form
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  j - the degree j of the j-form a, 0 <= j <= N
+.  k - the degree k of the k-form b, 0 <= k <= N and 0 <= j+k <= N
+.  a - a j-form, size [N choose j]
+-  b - a k-form, size [N choose k]
+
+   Output Arguments:
+.  awedgeb - the (j+k)-form a wedge b, size [N choose (j+k)]: (a wedge b)(v_1,...,v_{j+k}) = \sum_{s} sign(s) a(v_{s_1},...,v_{s_j}) b(v_{s_{j+1}},...,v_{s_{j+k}}),
+             where the sum is over permutations s such that s_1 < s_2 < ... < s_j and s_{j+1} < s_{j+2} < ... < s_{j+k}.
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVWedgeMatrix(), PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVWedge(PetscInt N, PetscInt j, PetscInt k, const PetscReal *a, const PetscReal *b, PetscReal *awedgeb)
+{
+  PetscInt       i;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  if (N < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid dimension");
+  if (j < 0 || k < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "negative form degree");
+  if (j + k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Wedge greater than dimension");
+  if (N <= 3) {
+    PetscInt Njk;
+
+    ierr = PetscDTBinomialInt(N, j+k, &Njk);CHKERRQ(ierr);
+    if (!j)      {for (i = 0; i < Njk; i++) {awedgeb[i] = a[0] * b[i];}}
+    else if (!k) {for (i = 0; i < Njk; i++) {awedgeb[i] = a[i] * b[0];}}
+    else {
+      if (N == 2) {awedgeb[0] = a[0] * b[1] - a[1] * b[0];}
+      else {
+        if (j+k == 2) {
+          awedgeb[0] = a[0] * b[1] - a[1] * b[0];
+          awedgeb[1] = a[0] * b[2] - a[2] * b[0];
+          awedgeb[2] = a[1] * b[2] - a[2] * b[1];
+        } else {
+          awedgeb[0] = a[0] * b[2] - a[1] * b[1] + a[2] * b[0];
+        }
+      }
+    }
+  } else {
+    PetscInt  Njk;
+    PetscInt  JKj;
+    PetscInt *subset, *subsetjk, *subsetj, *subsetk;
+    PetscInt  i;
+
+    ierr = PetscDTBinomialInt(N, j+k, &Njk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(j+k, j, &JKj);CHKERRQ(ierr);
+    ierr = PetscMalloc4(j+k, &subset, j+k, &subsetjk, j, &subsetj, k, &subsetk);CHKERRQ(ierr);
+    for (i = 0; i < Njk; i++) {
+      PetscReal sum = 0.;
+      PetscInt  l;
+
+      ierr = PetscDTEnumSubset(N, j+k, i, subset);CHKERRQ(ierr);
+      for (l = 0; l < JKj; l++) {
+        PetscBool jkOdd;
+        PetscInt  m, jInd, kInd;
+
+        ierr = PetscDTEnumSplit(j+k, j, l, subsetjk, &jkOdd);CHKERRQ(ierr);
+        for (m = 0; m < j; m++) {
+          subsetj[m] = subset[subsetjk[m]];
+        }
+        for (m = 0; m < k; m++) {
+          subsetk[m] = subset[subsetjk[j+m]];
+        }
+        ierr = PetscDTSubsetIndex(N, j, subsetj, &jInd);CHKERRQ(ierr);
+        ierr = PetscDTSubsetIndex(N, k, subsetk, &kInd);CHKERRQ(ierr);
+        sum += jkOdd ? -(a[jInd] * b[kInd]) : (a[jInd] * b[kInd]);
+      }
+      awedgeb[i] = sum;
+    }
+    ierr = PetscFree4(subset, subsetjk, subsetj, subsetk);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVWedgeMatrix - Compute the matrix defined by the wedge product with a given j-form that maps k-forms to (j+k)-forms
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  j - the degree j of the j-form a, 0 <= j <= N
+.  k - the degree k of the k-forms that (a wedge) will be applied to, 0 <= k <= N and 0 <= j+k <= N
+-  a - a j-form, size [N choose j]
+
+   Output Arguments:
+.  awedge - (a wedge), an [(N choose j+k) x (N choose k)] matrix in row-major order, such that (a wedge) * b = a wedge b
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVWedgeMatrix(PetscInt N, PetscInt j, PetscInt k, const PetscReal *a, PetscReal *awedgeMat)
+{
+  PetscInt       i;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  if (N < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid dimension");
+  if (j < 0 || k < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "negative form degree");
+  if (j + k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Wedge greater than dimension");
+  if (N <= 3) {
+    PetscInt Njk;
+
+    ierr = PetscDTBinomialInt(N, j+k, &Njk);CHKERRQ(ierr);
+    if (!j) {
+      for (i = 0; i < Njk * Njk; i++) {awedgeMat[i] = 0.;}
+      for (i = 0; i < Njk; i++) {awedgeMat[i * (Njk + 1)] = a[0];}
+    } else if (!k) {
+      for (i = 0; i < Njk; i++) {awedgeMat[i] = a[i];}
+    } else {
+      if (N == 2) {
+        awedgeMat[0] = -a[1]; awedgeMat[1] =  a[0];
+      } else {
+        if (j+k == 2) {
+          awedgeMat[0] = -a[1]; awedgeMat[1] =  a[0]; awedgeMat[2] =    0.;
+          awedgeMat[3] = -a[2]; awedgeMat[4] =    0.; awedgeMat[5] =  a[0];
+          awedgeMat[6] =    0.; awedgeMat[7] = -a[2]; awedgeMat[8] =  a[1];
+        } else {
+          awedgeMat[0] =  a[2]; awedgeMat[1] = -a[1]; awedgeMat[2] =  a[0];
+        }
+      }
+    }
+  } else {
+    PetscInt  Njk;
+    PetscInt  Nk;
+    PetscInt  JKj, i;
+    PetscInt *subset, *subsetjk, *subsetj, *subsetk;
+
+    ierr = PetscDTBinomialInt(N,   k,   &Nk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N,   j+k, &Njk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(j+k, j,   &JKj);CHKERRQ(ierr);
+    ierr = PetscMalloc4(j+k, &subset, j+k, &subsetjk, j, &subsetj, k, &subsetk);CHKERRQ(ierr);
+    for (i = 0; i < Njk * Nk; i++) awedgeMat[i] = 0.;
+    for (i = 0; i < Njk; i++) {
+      PetscInt  l;
+
+      ierr = PetscDTEnumSubset(N, j+k, i, subset);CHKERRQ(ierr);
+      for (l = 0; l < JKj; l++) {
+        PetscBool jkOdd;
+        PetscInt  m, jInd, kInd;
+
+        ierr = PetscDTEnumSplit(j+k, j, l, subsetjk, &jkOdd);CHKERRQ(ierr);
+        for (m = 0; m < j; m++) {
+          subsetj[m] = subset[subsetjk[m]];
+        }
+        for (m = 0; m < k; m++) {
+          subsetk[m] = subset[subsetjk[j+m]];
+        }
+        ierr = PetscDTSubsetIndex(N, j, subsetj, &jInd);CHKERRQ(ierr);
+        ierr = PetscDTSubsetIndex(N, k, subsetk, &kInd);CHKERRQ(ierr);
+        awedgeMat[i * Nk + kInd] += jkOdd ? - a[jInd] : a[jInd];
+      }
+    }
+    ierr = PetscFree4(subset, subsetjk, subsetj, subsetk);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVPullback - Compute the pullback of a k-form under a linear transformation of the coordinate space
+
+   Input Arguments:
++  N - the dimension of the origin vector space of the linear transformation, M >= 0
+.  M - the dimension of the image vector space of the linear transformation, N >= 0
+.  L - a linear transformation, an [M x N] matrix in row-major format
+.  k - the *signed* degree k of the |k|-form w, -(min(M,N)) <= k <= min(M,N).  A negative form degree indicates that the pullback should be conjugated by the Hodge star operator (see note).
+-  w - a |k|-form in the image space, size [M choose |k|]
+
+   Output Arguments:
+.  Lstarw - the pullback of w to a |k|-form in the origin space, size [N choose |k|]: (Lstarw)(v_1,...v_k) = w(L*v_1,...,L*v_k).
+
+   Level: intermediate
+
+   Note: negative form degrees accomodate, e.g., H-div conforming vector fields.  An H-div conforming vector field stores its degrees of freedom as (dx, dy, dz), like a 1-form,
+   but its normal trace is integrated on faces, like a 2-form.  The correct pullback then is to apply the Hodge star transformation from (M-2)-form to 2-form, pullback as a 2-form,
+   then the inverse Hodge star transformation.
+
+.seealso: PetscDTAltV, PetscDTAltVPullbackMatrix(), PetscDTAltVStar()
+@*/
+PetscErrorCode PetscDTAltVPullback(PetscInt N, PetscInt M, const PetscReal *L, PetscInt k, const PetscReal *w, PetscReal *Lstarw)
+{
+  PetscInt         i, j, Nk, Mk;
+  PetscErrorCode   ierr;
+
+  PetscFunctionBegin;
+  if (N < 0 || M < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid dimensions");
+  if (PetscAbsInt(k) > N || PetscAbsInt(k) > M) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  if (N <= 3 && M <= 3) {
+
+    ierr = PetscDTBinomialInt(M, PetscAbsInt(k), &Mk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N, PetscAbsInt(k), &Nk);CHKERRQ(ierr);
+    if (!k) {
+      Lstarw[0] = w[0];
+    } else if (k == 1) {
+      for (i = 0; i < Nk; i++) {
+        PetscReal sum = 0.;
+
+        for (j = 0; j < Mk; j++) {sum += L[j * Nk + i] * w[j];}
+        Lstarw[i] = sum;
+      }
+    } else if (k == -1) {
+      PetscReal mult[3] = {1., -1., 1.};
+
+      for (i = 0; i < Nk; i++) {
+        PetscReal sum = 0.;
+
+        for (j = 0; j < Mk; j++) {
+          sum += L[(Mk - 1 - j) * Nk + (Nk - 1 - i)] * w[j] * mult[j];
+        }
+        Lstarw[i] = mult[i] * sum;
+      }
+    } else if (k == 2) {
+      PetscInt pairs[3][2] = {{0,1},{0,2},{1,2}};
+
+      for (i = 0; i < Nk; i++) {
+        PetscReal sum = 0.;
+        for (j = 0; j < Mk; j++) {
+          sum += (L[pairs[j][0] * N + pairs[i][0]] * L[pairs[j][1] * N + pairs[i][1]] -
+                  L[pairs[j][1] * N + pairs[i][0]] * L[pairs[j][0] * N + pairs[i][1]]) * w[j];
+        }
+        Lstarw[i] = sum;
+      }
+    } else if (k == -2) {
+      PetscInt  pairs[3][2] = {{1,2},{2,0},{0,1}};
+      PetscInt  offi = (N == 2) ? 2 : 0;
+      PetscInt  offj = (M == 2) ? 2 : 0;
+
+      for (i = 0; i < Nk; i++) {
+        PetscReal sum   = 0.;
+
+        for (j = 0; j < Mk; j++) {
+          sum += (L[pairs[offj + j][0] * N + pairs[offi + i][0]] *
+                  L[pairs[offj + j][1] * N + pairs[offi + i][1]] -
+                  L[pairs[offj + j][1] * N + pairs[offi + i][0]] *
+                  L[pairs[offj + j][0] * N + pairs[offi + i][1]]) * w[j];
+
+        }
+        Lstarw[i] = sum;
+      }
+    } else {
+      PetscReal detL = L[0] * (L[4] * L[8] - L[5] * L[7]) +
+                       L[1] * (L[5] * L[6] - L[3] * L[8]) +
+                       L[2] * (L[3] * L[7] - L[4] * L[6]);
+
+      for (i = 0; i < Nk; i++) {Lstarw[i] = detL * w[i];}
+    }
+  } else {
+    PetscInt         Nf, l, p;
+    PetscReal       *Lw, *Lwv;
+    PetscInt        *subsetw, *subsetv;
+    PetscInt        *perm;
+    PetscReal       *walloc = NULL;
+    const PetscReal *ww = NULL;
+    PetscBool        negative = PETSC_FALSE;
+
+    ierr = PetscDTBinomialInt(M, PetscAbsInt(k), &Mk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N, PetscAbsInt(k), &Nk);CHKERRQ(ierr);
+    ierr = PetscDTFactorialInt(PetscAbsInt(k), &Nf);CHKERRQ(ierr);
+    if (k < 0) {
+      negative = PETSC_TRUE;
+      k = -k;
+      ierr = PetscMalloc1(Mk, &walloc);CHKERRQ(ierr);
+      ierr = PetscDTAltVStar(M, M - k, 1, w, walloc);CHKERRQ(ierr);
+      ww = walloc;
+    } else {
+      ww = w;
+    }
+    ierr = PetscMalloc5(k, &subsetw, k, &subsetv, k, &perm, N * k, &Lw, k * k, &Lwv);CHKERRQ(ierr);
+    for (i = 0; i < Nk; i++) Lstarw[i] = 0.;
+    for (i = 0; i < Mk; i++) {
+      ierr = PetscDTEnumSubset(M, k, i, subsetw);CHKERRQ(ierr);
+      for (j = 0; j < Nk; j++) {
+        ierr = PetscDTEnumSubset(N, k, j, subsetv);CHKERRQ(ierr);
+        for (p = 0; p < Nf; p++) {
+          PetscReal prod;
+          PetscBool isOdd;
+
+          ierr = PetscDTEnumPerm(k, p, perm, &isOdd);CHKERRQ(ierr);
+          prod = isOdd ? -ww[i] : ww[i];
+          for (l = 0; l < k; l++) {
+            prod *= L[subsetw[perm[l]] * N + subsetv[l]];
+          }
+          Lstarw[j] += prod;
+        }
+      }
+    }
+    if (negative) {
+      PetscReal *sLsw;
+
+      ierr = PetscMalloc1(Nk, &sLsw);CHKERRQ(ierr);
+      ierr = PetscDTAltVStar(N, N - k, -1,  Lstarw, sLsw);CHKERRQ(ierr);
+      for (i = 0; i < Nk; i++) Lstarw[i] = sLsw[i];
+      ierr = PetscFree(sLsw);CHKERRQ(ierr);
+    }
+    ierr = PetscFree5(subsetw, subsetv, perm, Lw, Lwv);CHKERRQ(ierr);
+    ierr = PetscFree(walloc);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVPullbackMatrix - Compute the pullback matrix for k-forms under a linear transformation
+
+   Input Arguments:
++  N - the dimension of the origin vector space of the linear transformation, N >= 0
+.  M - the dimension of the image vector space of the linear transformation, M >= 0
+.  L - a linear transformation, an [M x N] matrix in row-major format
+-  k - the *signed* degree k of the |k|-forms on which Lstar acts, -(min(M,N)) <= k <= min(M,N).  A negative form degree indicates that the pullback should be conjugated by the Hodge star operator (see note in PetscDTAltvPullback())
+
+   Output Arguments:
+.  Lstar - the pullback matrix, an [(N choose |k|) x (M choose |k|)] matrix in row-major format such that Lstar * w = L^* w
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVPullback(), PetscDTAltVStar()
+@*/
+PetscErrorCode PetscDTAltVPullbackMatrix(PetscInt N, PetscInt M, const PetscReal *L, PetscInt k, PetscReal *Lstar)
+{
+  PetscInt        Nk, Mk, Nf, i, j, l, p;
+  PetscReal      *Lw, *Lwv;
+  PetscInt       *subsetw, *subsetv;
+  PetscInt       *perm;
+  PetscBool       negative = PETSC_FALSE;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  if (N < 0 || M < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid dimensions");
+  if (PetscAbsInt(k) > N || PetscAbsInt(k) > M) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  if (N <= 3 && M <= 3) {
+    PetscReal mult[3] = {1., -1., 1.};
+
+    ierr = PetscDTBinomialInt(M, PetscAbsInt(k), &Mk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N, PetscAbsInt(k), &Nk);CHKERRQ(ierr);
+    if (!k) {
+      Lstar[0] = 1.;
+    } else if (k == 1) {
+      for (i = 0; i < Nk; i++) {for (j = 0; j < Mk; j++) {Lstar[i * Mk + j] = L[j * Nk + i];}}
+    } else if (k == -1) {
+      for (i = 0; i < Nk; i++) {
+        for (j = 0; j < Mk; j++) {
+          Lstar[i * Mk + j] = L[(Mk - 1 - j) * Nk + (Nk - 1 - i)] * mult[i] * mult[j];
+        }
+      }
+    } else if (k == 2) {
+      PetscInt pairs[3][2] = {{0,1},{0,2},{1,2}};
+
+      for (i = 0; i < Nk; i++) {
+        for (j = 0; j < Mk; j++) {
+          Lstar[i * Mk + j] = L[pairs[j][0] * N + pairs[i][0]] *
+                              L[pairs[j][1] * N + pairs[i][1]] -
+                              L[pairs[j][1] * N + pairs[i][0]] *
+                              L[pairs[j][0] * N + pairs[i][1]];
+        }
+      }
+    } else if (k == -2) {
+      PetscInt  pairs[3][2] = {{1,2},{2,0},{0,1}};
+      PetscInt  offi = (N == 2) ? 2 : 0;
+      PetscInt  offj = (M == 2) ? 2 : 0;
+
+      for (i = 0; i < Nk; i++) {
+        for (j = 0; j < Mk; j++) {
+          Lstar[i * Mk + j] = L[pairs[offj + j][0] * N + pairs[offi + i][0]] *
+                              L[pairs[offj + j][1] * N + pairs[offi + i][1]] -
+                              L[pairs[offj + j][1] * N + pairs[offi + i][0]] *
+                              L[pairs[offj + j][0] * N + pairs[offi + i][1]];
+        }
+      }
+    } else {
+      PetscReal detL = L[0] * (L[4] * L[8] - L[5] * L[7]) +
+                       L[1] * (L[5] * L[6] - L[3] * L[8]) +
+                       L[2] * (L[3] * L[7] - L[4] * L[6]);
+
+      for (i = 0; i < Nk; i++) {Lstar[i] = detL;}
+    }
+  } else {
+    if (k < 0) {
+      negative = PETSC_TRUE;
+      k = -k;
+    }
+    ierr = PetscDTBinomialInt(M, PetscAbsInt(k), &Mk);CHKERRQ(ierr);
+    ierr = PetscDTBinomialInt(N, PetscAbsInt(k), &Nk);CHKERRQ(ierr);
+    ierr = PetscDTFactorialInt(PetscAbsInt(k), &Nf);CHKERRQ(ierr);
+    ierr = PetscMalloc5(M, &subsetw, N, &subsetv, k, &perm, N * k, &Lw, k * k, &Lwv);CHKERRQ(ierr);
+    for (i = 0; i < Nk * Mk; i++) Lstar[i] = 0.;
+    for (i = 0; i < Mk; i++) {
+      PetscBool iOdd;
+      PetscInt  iidx, jidx;
+
+      ierr = PetscDTEnumSplit(M, k, i, subsetw, &iOdd);CHKERRQ(ierr);
+      iidx = negative ? Mk - 1 - i : i;
+      iOdd = negative ? (PetscBool) (iOdd ^ ((k * (M-k)) & 1)) : PETSC_FALSE;
+      for (j = 0; j < Nk; j++) {
+        PetscBool jOdd;
+
+        ierr = PetscDTEnumSplit(N, k, j, subsetv, &jOdd);CHKERRQ(ierr);
+        jidx = negative ? Nk - 1 - j : j;
+        jOdd = negative ? (PetscBool) (iOdd ^ jOdd ^ ((k * (N-k)) & 1)) : PETSC_FALSE;
+        for (p = 0; p < Nf; p++) {
+          PetscReal prod;
+          PetscBool isOdd;
+
+          ierr = PetscDTEnumPerm(k, p, perm, &isOdd);CHKERRQ(ierr);
+          isOdd = (PetscBool) (isOdd ^ jOdd);
+          prod = isOdd ? -1. : 1.;
+          for (l = 0; l < k; l++) {
+            prod *= L[subsetw[perm[l]] * N + subsetv[l]];
+          }
+          Lstar[jidx * Mk + iidx] += prod;
+        }
+      }
+    }
+    ierr = PetscFree5(subsetw, subsetv, perm, Lw, Lwv);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVInterior - Compute the interior product of a k-form with a vector
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  k - the degree k of the k-form w, 0 <= k <= N
+.  w - a k-form, size [N choose k]
+-  v - an N dimensional vector
+
+   Output Arguments:
+.  wIntv - the (k-1)-form (w int v), size [N choose (k-1)]: (w int v) is defined by its action on (k-1) vectors {v_1, ..., v_{k-1}} as (w inv v)(v_1, ..., v_{k-1}) = w(v, v_1, ..., v_{k-1}).
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVInteriorMatrix(), PetscDTAltVInteriorPattern(), PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVInterior(PetscInt N, PetscInt k, const PetscReal *w, const PetscReal *v, PetscReal *wIntv)
+{
+  PetscInt        i, Nk, Nkm;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  if (k <= 0 || k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  ierr = PetscDTBinomialInt(N, k,   &Nk);CHKERRQ(ierr);
+  ierr = PetscDTBinomialInt(N, k-1, &Nkm);CHKERRQ(ierr);
+  if (N <= 3) {
+    if (k == 1) {
+      PetscReal sum = 0.;
+
+      for (i = 0; i < N; i++) {
+        sum += w[i] * v[i];
+      }
+      wIntv[0] = sum;
+    } else if (k == N) {
+      PetscReal mult[3] = {1., -1., 1.};
+
+      for (i = 0; i < N; i++) {
+        wIntv[N - 1 - i] = w[0] * v[i] * mult[i];
+      }
+    } else {
+      wIntv[0] = - w[0]*v[1] - w[1]*v[2];
+      wIntv[1] =   w[0]*v[0] - w[2]*v[2];
+      wIntv[2] =   w[1]*v[0] + w[2]*v[1];
+    }
+  } else {
+    PetscInt       *subset, *work;
+
+    ierr = PetscMalloc2(k, &subset, k, &work);CHKERRQ(ierr);
+    for (i = 0; i < Nkm; i++) wIntv[i] = 0.;
+    for (i = 0; i < Nk; i++) {
+      PetscInt  j, l, m;
+
+      ierr = PetscDTEnumSubset(N, k, i, subset);CHKERRQ(ierr);
+      for (j = 0; j < k; j++) {
+        PetscInt  idx;
+        PetscBool flip = (PetscBool) (j & 1);
+
+        for (l = 0, m = 0; l < k; l++) {
+          if (l != j) work[m++] = subset[l];
+        }
+        ierr = PetscDTSubsetIndex(N, k - 1, work, &idx);CHKERRQ(ierr);
+        wIntv[idx] += flip ? -(w[i] * v[subset[j]]) :  (w[i] * v[subset[j]]);
+      }
+    }
+    ierr = PetscFree2(subset, work);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVInteriorMatrix - Compute the matrix of the linear transformation induced on a k-form by the interior product with a vector
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  k - the degree k of the k-forms on which intvMat acts, 0 <= k <= N
+-  v - an N dimensional vector
+
+   Output Arguments:
+.  intvMat - an [(N choose (k-1)) x (N choose k)] matrix, row-major: (intvMat) * w = (w int v)
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVInterior(), PetscDTAltVInteriorPattern(), PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVInteriorMatrix(PetscInt N, PetscInt k, const PetscReal *v, PetscReal *intvMat)
+{
+  PetscInt        i, Nk, Nkm;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  if (k <= 0 || k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  ierr = PetscDTBinomialInt(N, k,   &Nk);CHKERRQ(ierr);
+  ierr = PetscDTBinomialInt(N, k-1, &Nkm);CHKERRQ(ierr);
+  if (N <= 3) {
+    if (k == 1) {
+      for (i = 0; i < N; i++) intvMat[i] = v[i];
+    } else if (k == N) {
+      PetscReal mult[3] = {1., -1., 1.};
+
+      for (i = 0; i < N; i++) intvMat[N - 1 - i] = v[i] * mult[i];
+    } else {
+      intvMat[0] = -v[1]; intvMat[1] = -v[2]; intvMat[2] =    0.;
+      intvMat[3] =  v[0]; intvMat[4] =    0.; intvMat[5] = -v[2];
+      intvMat[6] =    0.; intvMat[7] =  v[0]; intvMat[8] =  v[1];
+    }
+  } else {
+    PetscInt       *subset, *work;
+
+    ierr = PetscMalloc2(k, &subset, k, &work);CHKERRQ(ierr);
+    for (i = 0; i < Nk * Nkm; i++) intvMat[i] = 0.;
+    for (i = 0; i < Nk; i++) {
+      PetscInt  j, l, m;
+
+      ierr = PetscDTEnumSubset(N, k, i, subset);CHKERRQ(ierr);
+      for (j = 0; j < k; j++) {
+        PetscInt  idx;
+        PetscBool flip = (PetscBool) (j & 1);
+
+        for (l = 0, m = 0; l < k; l++) {
+          if (l != j) work[m++] = subset[l];
+        }
+        ierr = PetscDTSubsetIndex(N, k - 1, work, &idx);CHKERRQ(ierr);
+        intvMat[idx * Nk + i] += flip ? -v[subset[j]] :  v[subset[j]];
+      }
+    }
+    ierr = PetscFree2(subset, work);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVInteriorPattern - compute the sparsity and sign pattern of the interior product matrix computed in PetscDTAltVInteriorMatrix()
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+-  k - the degree of the k-forms on which intvMat from PetscDTAltVInteriorMatrix() acts, 0 <= k <= N.
+
+   Output Arguments:
+.  indices - The interior product matrix intvMat has size [(N choose (k-1)) x (N choose k)] and has (N choose k) * k
+             non-zeros.  indices[i][0] and indices[i][1] are the row and column of a non-zero, and its value is equal to the vector
+             coordinate v[j] if indices[i][2] = j, or -v[j] if indices[i][2] = -(j+1)
+
+   Level: intermediate
+
+   Note: this function is useful when the interior product needs to be computed at multiple locations, as when computing the Koszul differential
+
+.seealso: PetscDTAltV, PetscDTAltVInterior(), PetscDTAltVInteriorMatrix(), PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVInteriorPattern(PetscInt N, PetscInt k, PetscInt (*indices)[3])
+{
+  PetscInt        i, Nk, Nkm;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  if (k <= 0 || k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  ierr = PetscDTBinomialInt(N, k,   &Nk);CHKERRQ(ierr);
+  ierr = PetscDTBinomialInt(N, k-1, &Nkm);CHKERRQ(ierr);
+  if (N <= 3) {
+    if (k == 1) {
+      for (i = 0; i < N; i++) {
+        indices[i][0] = 0;
+        indices[i][1] = i;
+        indices[i][2] = i;
+      }
+    } else if (k == N) {
+      PetscInt val[3] = {0, -2, 2};
+
+      for (i = 0; i < N; i++) {
+        indices[i][0] = N - 1 - i;
+        indices[i][1] = 0;
+        indices[i][2] = val[i];
+      }
+    } else {
+      indices[0][0] = 0; indices[0][1] = 0; indices[0][2] = -(1 + 1);
+      indices[1][0] = 0; indices[1][1] = 1; indices[1][2] = -(2 + 1);
+      indices[2][0] = 1; indices[2][1] = 0; indices[2][2] = 0;
+      indices[3][0] = 1; indices[3][1] = 2; indices[3][2] = -(2 + 1);
+      indices[4][0] = 2; indices[4][1] = 1; indices[4][2] = 0;
+      indices[5][0] = 2; indices[5][1] = 2; indices[5][2] = 1;
+    }
+  } else {
+    PetscInt       *subset, *work;
+
+    ierr = PetscMalloc2(k, &subset, k, &work);CHKERRQ(ierr);
+    for (i = 0; i < Nk; i++) {
+      PetscInt  j, l, m;
+
+      ierr = PetscDTEnumSubset(N, k, i, subset);CHKERRQ(ierr);
+      for (j = 0; j < k; j++) {
+        PetscInt  idx;
+        PetscBool flip = (PetscBool) (j & 1);
+
+        for (l = 0, m = 0; l < k; l++) {
+          if (l != j) work[m++] = subset[l];
+        }
+        ierr = PetscDTSubsetIndex(N, k - 1, work, &idx);CHKERRQ(ierr);
+        indices[i * k + j][0] = idx;
+        indices[i * k + j][1] = i;
+        indices[i * k + j][2] = flip ? -(subset[j] + 1) : subset[j];
+      }
+    }
+    ierr = PetscFree2(subset, work);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   PetscDTAltVStar - Apply a power of the Hodge star operator, which maps k-forms to (N-k) forms, to a k-form
+
+   Input Arguments:
++  N - the dimension of the vector space, N >= 0
+.  k - the degree k of the k-form w, 0 <= k <= N
+.  pow - the number of times to apply the Hodge star operator: pow < 0 indicates that the inverse of the Hodge star operator should be applied |pow| times.
+-  w - a k-form, size [N choose k]
+
+   Output Arguments:
+.  starw = (star)^pow w.  Each degree of freedom of a k-form is associated with a subset S of k coordinates of the N dimensional vector space: the Hodge start operator (star) maps that degree of freedom to the degree of freedom associated with S', the complement of S, with a sign change if the permutation of coordinates {S[0], ... S[k-1], S'[0], ... S'[N-k- 1]} is an odd permutation.  This implies (star)^2 w = (-1)^{k(N-k)} w, and (star)^4 w = w.
+
+   Level: intermediate
+
+.seealso: PetscDTAltV, PetscDTAltVPullback(), PetscDTAltVPullbackMatrix()
+@*/
+PetscErrorCode PetscDTAltVStar(PetscInt N, PetscInt k, PetscInt pow, const PetscReal *w, PetscReal *starw)
+{
+  PetscInt        Nk, i;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  if (k < 0 || k > N) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "invalid form degree");
+  ierr = PetscDTBinomialInt(N, k, &Nk);CHKERRQ(ierr);
+  pow = pow % 4;
+  pow = (pow + 4) % 4; /* make non-negative */
+  /* pow is now 0, 1, 2, 3 */
+  if (N <= 3) {
+    if (pow & 1) {
+      PetscReal mult[3] = {1., -1., 1.};
+
+      for (i = 0; i < Nk; i++) starw[Nk - 1 - i] = w[i] * mult[i];
+    } else {
+      for (i = 0; i < Nk; i++) starw[i] = w[i];
+    }
+    if (pow > 1 && ((k * (N - k)) & 1)) {
+      for (i = 0; i < Nk; i++) starw[i] = -starw[i];
+    }
+  } else {
+    PetscInt       *subset;
+
+    ierr = PetscMalloc1(N, &subset);CHKERRQ(ierr);
+    if (pow % 2) {
+      PetscInt l = (pow == 1) ? k : N - k;
+      for (i = 0; i < Nk; i++) {
+        PetscBool sOdd;
+        PetscInt  j, idx;
+
+        ierr = PetscDTEnumSplit(N, l, i, subset, &sOdd);CHKERRQ(ierr);
+        ierr = PetscDTSubsetIndex(N, l, subset, &idx);CHKERRQ(ierr);
+        ierr = PetscDTSubsetIndex(N, N-l, &subset[l], &j);CHKERRQ(ierr);
+        starw[j] = sOdd ? -w[idx] : w[idx];
+      }
+    } else {
+      for (i = 0; i < Nk; i++) starw[i] = w[i];
+    }
+    /* star^2 = -1^(k * (N - k)) */
+    if (pow > 1 && (k * (N - k)) % 2) {
+      for (i = 0; i < Nk; i++) starw[i] = -starw[i];
+    }
+    ierr = PetscFree(subset);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/dt/interface/dtds.c b/src/dm/dt/interface/dtds.c
index afdc1ec0814..c1c74a0a006 100644
--- a/src/dm/dt/interface/dtds.c
+++ b/src/dm/dt/interface/dtds.c
@@ -223,6 +223,29 @@ static PetscErrorCode PetscDSView_Ascii(PetscDS prob, PetscViewer viewer)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscDSViewFromOptions - View from Options
+
+   Collective on PetscDS
+
+   Input Parameters:
++  A - the PetscDS object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscDS, PetscDSView, PetscObjectViewFromOptions(), PetscDSCreate()
+@*/
+PetscErrorCode  PetscDSViewFromOptions(PetscDS A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCDS_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscDSView - Views a PetscDS
 
@@ -355,7 +378,7 @@ PetscErrorCode PetscDSSetUp(PetscDS prob)
   prob->totDim = prob->totComp = 0;
   ierr = PetscMalloc2(Nf,&prob->Nc,Nf,&prob->Nb);CHKERRQ(ierr);
   ierr = PetscCalloc2(Nf+1,&prob->off,Nf+1,&prob->offDer);CHKERRQ(ierr);
-  ierr = PetscMalloc4(Nf,&prob->basis,Nf,&prob->basisDer,Nf,&prob->basisFace,Nf,&prob->basisDerFace);CHKERRQ(ierr);
+  ierr = PetscMalloc2(Nf,&prob->T,Nf,&prob->Tf);CHKERRQ(ierr);
   for (f = 0; f < Nf; ++f) {
     PetscObject     obj;
     PetscClassId    id;
@@ -370,15 +393,15 @@ PetscErrorCode PetscDSSetUp(PetscDS prob)
       ierr = PetscFEGetQuadrature(fe, &q);CHKERRQ(ierr);
       ierr = PetscFEGetDimension(fe, &Nb);CHKERRQ(ierr);
       ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr);
-      ierr = PetscFEGetDefaultTabulation(fe, &prob->basis[f], &prob->basisDer[f], NULL);CHKERRQ(ierr);
-      ierr = PetscFEGetFaceTabulation(fe, &prob->basisFace[f], &prob->basisDerFace[f], NULL);CHKERRQ(ierr);
+      ierr = PetscFEGetCellTabulation(fe, &prob->T[f]);CHKERRQ(ierr);
+      ierr = PetscFEGetFaceTabulation(fe, &prob->Tf[f]);CHKERRQ(ierr);
     } else if (id == PETSCFV_CLASSID) {
       PetscFV fv = (PetscFV) obj;
 
       ierr = PetscFVGetQuadrature(fv, &q);CHKERRQ(ierr);
       ierr = PetscFVGetNumComponents(fv, &Nc);CHKERRQ(ierr);
       Nb   = Nc;
-      ierr = PetscFVGetDefaultTabulation(fv, &prob->basis[f], &prob->basisDer[f], NULL);CHKERRQ(ierr);
+      ierr = PetscFVGetCellTabulation(fv, &prob->T[f]);CHKERRQ(ierr);
       /* TODO: should PetscFV also have face tabulation? Otherwise there will be a null pointer in prob->basisFace */
     } else SETERRQ1(PetscObjectComm((PetscObject) prob), PETSC_ERR_ARG_WRONG, "Unknown discretization type for field %d", f);
     prob->Nc[f]       = Nc;
@@ -413,7 +436,7 @@ static PetscErrorCode PetscDSDestroyStructs_Static(PetscDS prob)
   PetscFunctionBegin;
   ierr = PetscFree2(prob->Nc,prob->Nb);CHKERRQ(ierr);
   ierr = PetscFree2(prob->off,prob->offDer);CHKERRQ(ierr);
-  ierr = PetscFree4(prob->basis,prob->basisDer,prob->basisFace,prob->basisDerFace);CHKERRQ(ierr);
+  ierr = PetscFree2(prob->T,prob->Tf);CHKERRQ(ierr);
   ierr = PetscFree3(prob->u,prob->u_t,prob->u_x);CHKERRQ(ierr);
   ierr = PetscFree5(prob->x,prob->basisReal, prob->basisDerReal,prob->testReal,prob->testDerReal);CHKERRQ(ierr);
   ierr = PetscFree6(prob->f0,prob->f1,prob->g0,prob->g1,prob->g2,prob->g3);CHKERRQ(ierr);
@@ -2570,23 +2593,22 @@ PetscErrorCode PetscDSGetComponentDerivativeOffsets(PetscDS prob, PetscInt *offs
   Input Parameter:
 . prob - The PetscDS object
 
-  Output Parameters:
-+ basis - The basis function tabulation at quadrature points
-- basisDer - The basis function derivative tabulation at quadrature points
+  Output Parameter:
+. T - The basis function and derivatives tabulation at quadrature points for each field
 
   Level: intermediate
 
 .seealso: PetscDSCreate()
 @*/
-PetscErrorCode PetscDSGetTabulation(PetscDS prob, PetscReal ***basis, PetscReal ***basisDer)
+PetscErrorCode PetscDSGetTabulation(PetscDS prob, PetscTabulation *T[])
 {
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(prob, PETSCDS_CLASSID, 1);
+  PetscValidPointer(T, 2);
   ierr = PetscDSSetUp(prob);CHKERRQ(ierr);
-  if (basis)    {PetscValidPointer(basis, 2);    *basis    = prob->basis;}
-  if (basisDer) {PetscValidPointer(basisDer, 3); *basisDer = prob->basisDer;}
+  *T = prob->T;
   PetscFunctionReturn(0);
 }
 
@@ -2598,23 +2620,22 @@ PetscErrorCode PetscDSGetTabulation(PetscDS prob, PetscReal ***basis, PetscReal
   Input Parameter:
 . prob - The PetscDS object
 
-  Output Parameters:
-+ basisFace - The basis function tabulation at quadrature points
-- basisDerFace - The basis function derivative tabulation at quadrature points
+  Output Parameter:
+. Tf - The basis function and derviative tabulation on each lcoal face at quadrature points for each and field
 
   Level: intermediate
 
 .seealso: PetscDSGetTabulation(), PetscDSCreate()
 @*/
-PetscErrorCode PetscDSGetFaceTabulation(PetscDS prob, PetscReal ***basis, PetscReal ***basisDer)
+PetscErrorCode PetscDSGetFaceTabulation(PetscDS prob, PetscTabulation *Tf[])
 {
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(prob, PETSCDS_CLASSID, 1);
+  PetscValidPointer(Tf, 2);
   ierr = PetscDSSetUp(prob);CHKERRQ(ierr);
-  if (basis)    {PetscValidPointer(basis, 2);    *basis    = prob->basisFace;}
-  if (basisDer) {PetscValidPointer(basisDer, 3); *basisDer = prob->basisDerFace;}
+  *Tf = prob->Tf;
   PetscFunctionReturn(0);
 }
 
@@ -3098,6 +3119,25 @@ PetscErrorCode PetscDSGetHeightSubspace(PetscDS prob, PetscInt height, PetscDS *
   PetscFunctionReturn(0);
 }
 
+PetscErrorCode PetscDSIsFE_Internal(PetscDS ds, PetscInt f, PetscBool *isFE)
+{
+  PetscObject    obj;
+  PetscClassId   id;
+  PetscInt       Nf;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(ds, PETSCDS_CLASSID, 1);
+  PetscValidPointer(isFE, 3);
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  if (f >= Nf) SETERRQ2(PetscObjectComm((PetscObject) ds), PETSC_ERR_ARG_SIZ, "Field %D must be in [0, %D)", f, Nf);
+  ierr = PetscDSGetDiscretization(ds, f, &obj);CHKERRQ(ierr);
+  ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
+  if (id == PETSCFE_CLASSID) *isFE = PETSC_TRUE;
+  else                       *isFE = PETSC_FALSE;
+  PetscFunctionReturn(0);
+}
+
 static PetscErrorCode PetscDSDestroy_Basic(PetscDS prob)
 {
   PetscErrorCode      ierr;
diff --git a/src/dm/dt/interface/f90-custom/makefile b/src/dm/dt/interface/f90-custom/makefile
index d6851da21d4..c90887c1a3e 100644
--- a/src/dm/dt/interface/f90-custom/makefile
+++ b/src/dm/dt/interface/f90-custom/makefile
@@ -1,5 +1,4 @@
 #requiresdefine   'PETSC_HAVE_FORTRAN'
-#requiresdefine   'PETSC_USING_F90'
 ALL: lib
 
 CFLAGS   =
diff --git a/src/dm/dt/interface/f90-custom/zdtdsf90.c b/src/dm/dt/interface/f90-custom/zdtdsf90.c
index ecdde74b860..f9040efb299 100644
--- a/src/dm/dt/interface/f90-custom/zdtdsf90.c
+++ b/src/dm/dt/interface/f90-custom/zdtdsf90.c
@@ -16,10 +16,10 @@
 
 PETSC_EXTERN void PETSC_STDCALL petscdsgettabulation_(PetscDS *prob, PetscInt *f, F90Array1d *ptrB, F90Array1d *ptrD, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptrb) PETSC_F90_2PTR_PROTO(ptrd))
 {
-  PetscFE         fe;
-  PetscQuadrature q;
-  PetscInt        dim, Nb, Nc, Nq;
-  PetscReal     **basis, **basisDer;
+  PetscFE          fe;
+  PetscQuadrature  q;
+  PetscInt         dim, Nb, Nc, Nq;
+  PetscTabulation *T;
 
   *ierr = PetscDSGetSpatialDimension(*prob, &dim);if (*ierr) return;
   *ierr = PetscDSGetDiscretization(*prob, *f, (PetscObject *) &fe);if (*ierr) return;
@@ -27,9 +27,9 @@ PETSC_EXTERN void PETSC_STDCALL petscdsgettabulation_(PetscDS *prob, PetscInt *f
   *ierr = PetscFEGetNumComponents(fe, &Nc);if (*ierr) return;
   *ierr = PetscFEGetQuadrature(fe, &q);if (*ierr) return;
   *ierr = PetscQuadratureGetData(q, NULL, NULL, &Nq, NULL, NULL);if (*ierr) return;
-  *ierr = PetscDSGetTabulation(*prob, &basis, &basisDer);if (*ierr) return;
-  *ierr = F90Array1dCreate((void *) basis[*f],    MPIU_REAL, 1, Nq*Nb*Nc, ptrB PETSC_F90_2PTR_PARAM(ptrb));if (*ierr) return;
-  *ierr = F90Array1dCreate((void *) basisDer[*f], MPIU_REAL, 1, Nq*Nb*Nc*dim, ptrD PETSC_F90_2PTR_PARAM(ptrd));
+  *ierr = PetscDSGetTabulation(*prob, &T);if (*ierr) return;
+  *ierr = F90Array1dCreate((void *) T[*f]->T[0], MPIU_REAL, 1, Nq*Nb*Nc,     ptrB PETSC_F90_2PTR_PARAM(ptrb));if (*ierr) return;
+  *ierr = F90Array1dCreate((void *) T[*f]->T[0], MPIU_REAL, 1, Nq*Nb*Nc*dim, ptrD PETSC_F90_2PTR_PARAM(ptrd));
 }
 
 PETSC_EXTERN void PETSC_STDCALL petscdsrestoretabulation_(PetscDS *prob, PetscInt *f, F90Array1d *ptrB, F90Array1d *ptrD, PetscErrorCode *ierr PETSC_F90_2PTR_PROTO(ptrb) PETSC_F90_2PTR_PROTO(ptrd))
diff --git a/src/dm/dt/interface/ftn-custom/zdsf.c b/src/dm/dt/interface/ftn-custom/zdsf.c
new file mode 100644
index 00000000000..79cd79ef15c
--- /dev/null
+++ b/src/dm/dt/interface/ftn-custom/zdsf.c
@@ -0,0 +1,19 @@
+#include 
+#include 
+#include 
+
+#if defined(PETSC_HAVE_FORTRAN_CAPS)
+#define petscdsviewfromoptions_   PETSCDSVIEWFROMOPTIONS
+#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
+#define petscdsviewfromoptions_   petscdsviewfromoptions
+#endif
+
+PETSC_EXTERN void PETSC_STDCALL petscdsviewfromoptions_(PetscDS *ao,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscDSViewFromOptions(*ao,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
diff --git a/src/dm/dt/interface/makefile b/src/dm/dt/interface/makefile
index e35dd886896..f2eb0d6012e 100644
--- a/src/dm/dt/interface/makefile
+++ b/src/dm/dt/interface/makefile
@@ -3,7 +3,7 @@ ALL: lib
 
 CFLAGS   =
 FFLAGS   =
-SOURCEC  = dt.c dtds.c
+SOURCEC  = dt.c dtds.c dtaltv.c
 SOURCEF  =
 SOURCEH  =
 LIBBASE  = libpetscdm
diff --git a/src/dm/dt/space/interface/space.c b/src/dm/dt/space/interface/space.c
index 060ae1d1ec6..c3831f72587 100644
--- a/src/dm/dt/space/interface/space.c
+++ b/src/dm/dt/space/interface/space.c
@@ -118,6 +118,29 @@ PetscErrorCode PetscSpaceGetType(PetscSpace sp, PetscSpaceType *name)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscSpaceViewFromOptions - View from Options
+
+   Collective on PetscSpace
+
+   Input Parameters:
++  A - the PetscSpace object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscSpace, PetscSpaceView, PetscObjectViewFromOptions(), PetscSpaceCreate()
+@*/
+PetscErrorCode  PetscSpaceViewFromOptions(PetscSpace A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCSPACE_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscSpaceView - Views a PetscSpace
 
@@ -473,7 +496,7 @@ PetscErrorCode PetscSpaceGetNumVariables(PetscSpace sp, PetscInt *n)
 
   Level: beginner
 
-.seealso: PetscFEGetTabulation(), PetscFEGetDefaultTabulation(), PetscSpaceCreate()
+.seealso: PetscFECreateTabulation(), PetscFEGetCellTabulation(), PetscSpaceCreate()
 @*/
 PetscErrorCode PetscSpaceEvaluate(PetscSpace sp, PetscInt npoints, const PetscReal points[], PetscReal B[], PetscReal D[], PetscReal H[])
 {
diff --git a/src/dm/examples/tests/ex24.c b/src/dm/examples/tests/ex24.c
index 4182b3238ca..7c66a97b2cf 100644
--- a/src/dm/examples/tests/ex24.c
+++ b/src/dm/examples/tests/ex24.c
@@ -1,5 +1,5 @@
 
-static char help[] = "Tests DMDALocalToGlocal() for dof > 1\n\n";
+static char help[] = "Tests DMLocalToGlobal() for dof > 1\n\n";
 
 #include 
 #include 
diff --git a/src/dm/examples/tests/ex25.c b/src/dm/examples/tests/ex25.c
index 490a905250c..803e85f3833 100644
--- a/src/dm/examples/tests/ex25.c
+++ b/src/dm/examples/tests/ex25.c
@@ -1,5 +1,5 @@
 
-static char help[] = "Tests DMDALocalToGlocal() for dof > 1\n\n";
+static char help[] = "Tests DMLocalToGlobal() for dof > 1\n\n";
 
 #include 
 #include 
diff --git a/src/dm/examples/tests/ex9.c b/src/dm/examples/tests/ex9.c
new file mode 100644
index 00000000000..7be608c9548
--- /dev/null
+++ b/src/dm/examples/tests/ex9.c
@@ -0,0 +1,69 @@
+
+static char help[] = "Tests DMCreateMatrix for DMComposite.\n\n";
+
+#include 
+#include 
+#include 
+#include 
+#include 
+
+int main(int argc,char **argv)
+{
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping *ltog,ltogs;
+  PetscMPIInt            size;
+  DM                     packer;
+  DM                     da,dmred;
+  Mat                    M;
+  PetscInt               i;
+
+  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
+  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
+
+  ierr = DMCompositeCreate(PETSC_COMM_WORLD,&packer);CHKERRQ(ierr);
+
+  ierr = DMRedundantCreate(PETSC_COMM_WORLD,0,5,&dmred);CHKERRQ(ierr);
+  ierr = DMCompositeAddDM(packer,dmred);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(dmred,<ogs);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of dmred\n");CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingView(ltogs,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = DMDestroy(&dmred);CHKERRQ(ierr);
+
+  ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_MIRROR,DM_BOUNDARY_MIRROR,DMDA_STENCIL_STAR,3,3,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(da);CHKERRQ(ierr);
+  ierr = DMSetUp(da);CHKERRQ(ierr);
+  ierr = DMCompositeAddDM(packer,da);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,<ogs);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of da\n");CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingView(ltogs,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+
+  ierr = DMSetMatType(packer,MATNEST);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(packer);CHKERRQ(ierr);
+  ierr = DMCreateMatrix(packer,&M);CHKERRQ(ierr);
+  ierr = MatView(M,NULL);CHKERRQ(ierr);
+  ierr = MatDestroy(&M);CHKERRQ(ierr);
+
+  /* get the global numbering for each subvector element */
+  ierr = DMCompositeGetISLocalToGlobalMappings(packer,<og);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of dmred vector\n");CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingView(ltog[0],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of da vector\n");CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingView(ltog[1],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  for (i=0; i<2; i++) {ierr = ISLocalToGlobalMappingDestroy(<og[i]);CHKERRQ(ierr);}
+
+  ierr = PetscFree(ltog);CHKERRQ(ierr);
+  ierr = DMDestroy(&packer);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+
+
+/*TEST
+
+   test:
+     suffix: composite_nest_l2g
+     nsize: {{1 2}separate output}
+
+TEST*/
diff --git a/src/dm/examples/tests/makefile b/src/dm/examples/tests/makefile
index a6cfdd729fe..315316c82c1 100644
--- a/src/dm/examples/tests/makefile
+++ b/src/dm/examples/tests/makefile
@@ -4,8 +4,8 @@ FFLAGS	        =
 CPPFLAGS        =
 FPPFLAGS        =
 LOCDIR          = src/dm/examples/tests/
-EXAMPLESC       = ex1.c ex2.c ex3.c ex4.c ex5.c ex6.c ex7.c ex8.c \
-                  ex11.c ex12.c ex13.c ex14.c ex15.c ex16.c  ex19.c ex20.c \
+EXAMPLESC       = ex1.c ex2.c ex3.c ex4.c ex5.c ex6.c ex7.c ex8.c ex9.c\
+                  ex11.c ex12.c ex13.c ex14.c ex15.c ex16.c ex19.c ex20.c \
                   ex21.c ex22.c ex23.c ex24.c ex25.c ex26.c ex27.c ex28.c ex30.c \
                   ex31.c ex32.c ex34.c ex36.c ex37.c ex38.c ex39.c ex40.c ex41.c \
                   ex42.c ex43.c ex44.c ex45.c ex46.c ex47.c ex48.c ex49.c ex50.c ex51.c ex52.c
diff --git a/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-1.out b/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-1.out
new file mode 100644
index 00000000000..d7282b68085
--- /dev/null
+++ b/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-1.out
@@ -0,0 +1,106 @@
+Local to global mapping of dmred
+ISLocalToGlobalMapping Object: 1 MPI processes
+  type not yet set
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[0] 4 4
+Local to global mapping of da
+ISLocalToGlobalMapping Object: 1 MPI processes
+  type not yet set
+[0] 0 -1
+[0] 1 3
+[0] 2 4
+[0] 3 5
+[0] 4 -1
+[0] 5 1
+[0] 6 0
+[0] 7 1
+[0] 8 2
+[0] 9 1
+[0] 10 4
+[0] 11 3
+[0] 12 4
+[0] 13 5
+[0] 14 4
+[0] 15 7
+[0] 16 6
+[0] 17 7
+[0] 18 8
+[0] 19 7
+[0] 20 -1
+[0] 21 3
+[0] 22 4
+[0] 23 5
+[0] 24 -1
+Mat Object: 1 MPI processes
+  type: nest
+  Matrix object: 
+    type=nest, rows=2, cols=2 
+    MatNest structure: 
+    (0,0) : type=seqaij, rows=5, cols=5 
+    (0,1) : NULL 
+    (1,0) : NULL 
+    (1,1) : type=seqaij, rows=18, cols=18 
+Local to global mapping of dmred vector
+ISLocalToGlobalMapping Object: 1 MPI processes
+  type not yet set
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[0] 4 4
+Local to global mapping of da vector
+ISLocalToGlobalMapping Object: 1 MPI processes
+  type not yet set
+[0] 0 -7
+[0] 1 -6
+[0] 2 11
+[0] 3 12
+[0] 4 13
+[0] 5 14
+[0] 6 15
+[0] 7 16
+[0] 8 -7
+[0] 9 -6
+[0] 10 7
+[0] 11 8
+[0] 12 5
+[0] 13 6
+[0] 14 7
+[0] 15 8
+[0] 16 9
+[0] 17 10
+[0] 18 7
+[0] 19 8
+[0] 20 13
+[0] 21 14
+[0] 22 11
+[0] 23 12
+[0] 24 13
+[0] 25 14
+[0] 26 15
+[0] 27 16
+[0] 28 13
+[0] 29 14
+[0] 30 19
+[0] 31 20
+[0] 32 17
+[0] 33 18
+[0] 34 19
+[0] 35 20
+[0] 36 21
+[0] 37 22
+[0] 38 19
+[0] 39 20
+[0] 40 -7
+[0] 41 -6
+[0] 42 11
+[0] 43 12
+[0] 44 13
+[0] 45 14
+[0] 46 15
+[0] 47 16
+[0] 48 -7
+[0] 49 -6
diff --git a/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-2.out b/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-2.out
new file mode 100644
index 00000000000..84bd8d68b61
--- /dev/null
+++ b/src/dm/examples/tests/output/ex9_composite_nest_l2g_nsize-2.out
@@ -0,0 +1,146 @@
+Local to global mapping of dmred
+ISLocalToGlobalMapping Object: 2 MPI processes
+  type not yet set
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[0] 4 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[1] 4 4
+Local to global mapping of da
+ISLocalToGlobalMapping Object: 2 MPI processes
+  type not yet set
+[0] 0 -1
+[0] 1 3
+[0] 2 4
+[0] 3 5
+[0] 4 -1
+[0] 5 1
+[0] 6 0
+[0] 7 1
+[0] 8 2
+[0] 9 1
+[0] 10 4
+[0] 11 3
+[0] 12 4
+[0] 13 5
+[0] 14 4
+[0] 15 -1
+[0] 16 6
+[0] 17 7
+[0] 18 8
+[0] 19 -1
+[1] 0 -1
+[1] 1 3
+[1] 2 4
+[1] 3 5
+[1] 4 -1
+[1] 5 7
+[1] 6 6
+[1] 7 7
+[1] 8 8
+[1] 9 7
+[1] 10 -1
+[1] 11 3
+[1] 12 4
+[1] 13 5
+[1] 14 -1
+Mat Object: 2 MPI processes
+  type: nest
+  Matrix object: 
+    type=nest, rows=2, cols=2 
+    MatNest structure: 
+    (0,0) : type=mpiaij, rows=5, cols=5 
+    (0,1) : NULL 
+    (1,0) : NULL 
+    (1,1) : type=mpiaij, rows=18, cols=18 
+Local to global mapping of dmred vector
+ISLocalToGlobalMapping Object: 2 MPI processes
+  type not yet set
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[0] 4 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[1] 4 4
+Local to global mapping of da vector
+ISLocalToGlobalMapping Object: 2 MPI processes
+  type not yet set
+[0] 0 -7
+[0] 1 -6
+[0] 2 11
+[0] 3 12
+[0] 4 13
+[0] 5 14
+[0] 6 15
+[0] 7 16
+[0] 8 -7
+[0] 9 -6
+[0] 10 7
+[0] 11 8
+[0] 12 5
+[0] 13 6
+[0] 14 7
+[0] 15 8
+[0] 16 9
+[0] 17 10
+[0] 18 7
+[0] 19 8
+[0] 20 13
+[0] 21 14
+[0] 22 11
+[0] 23 12
+[0] 24 13
+[0] 25 14
+[0] 26 15
+[0] 27 16
+[0] 28 13
+[0] 29 14
+[0] 30 -7
+[0] 31 -6
+[0] 32 17
+[0] 33 18
+[0] 34 19
+[0] 35 20
+[0] 36 21
+[0] 37 22
+[0] 38 -7
+[0] 39 -6
+[1] 0 -19
+[1] 1 -18
+[1] 2 11
+[1] 3 12
+[1] 4 13
+[1] 5 14
+[1] 6 15
+[1] 7 16
+[1] 8 -19
+[1] 9 -18
+[1] 10 19
+[1] 11 20
+[1] 12 17
+[1] 13 18
+[1] 14 19
+[1] 15 20
+[1] 16 21
+[1] 17 22
+[1] 18 19
+[1] 19 20
+[1] 20 -19
+[1] 21 -18
+[1] 22 11
+[1] 23 12
+[1] 24 13
+[1] 25 14
+[1] 26 15
+[1] 27 16
+[1] 28 -19
+[1] 29 -18
diff --git a/src/dm/examples/tutorials/ex2.c b/src/dm/examples/tutorials/ex2.c
new file mode 100644
index 00000000000..ae876b810ef
--- /dev/null
+++ b/src/dm/examples/tutorials/ex2.c
@@ -0,0 +1,157 @@
+static char help[] = "Demonstrates Conway's Game of Life using a 2d DMDA.\n\n";
+
+/*
+ At each step in time, the following transitions occur:
+
+    Any live cell with fewer than two live neighbours dies, as if by underpopulation.
+    Any live cell with two or three live neighbours lives on to the next generation.
+    Any live cell with more than three live neighbours dies, as if by overpopulation.
+    Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.
+
+ https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life
+*/
+
+#include 
+#include 
+
+static const int GLIDER[3][3] = {
+  {0, 1, 0},
+  {0, 1, 1},
+  {1, 0, 1}
+};
+
+int main(int argc,char **argv)
+{
+  PetscErrorCode   ierr;
+  DM               da;
+  PetscViewer      viewer;
+  Vec              Xlocal, Xglobal;
+  PetscInt         glider_loc[2] = {10, 20}, blinker_loc[2] = {20, 10}, two, steps = 100, viz_interval = 1;
+  PetscInt         check_step_alive = -1, check_step_dead = -1;
+  PetscBool        has_glider, has_blinker;
+
+  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
+  ierr = PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"Conway's Game of Life","");CHKERRQ(ierr);
+  {
+    ierr = PetscOptionsIntArray("-glider","Coordinate at which to center a glider",NULL,glider_loc,(two=2,&two),&has_glider);CHKERRQ(ierr);
+    ierr = PetscOptionsIntArray("-blinker","Coordinate at which to center a blinker",NULL,blinker_loc,(two=2,&two),&has_blinker);CHKERRQ(ierr);
+    ierr = PetscOptionsInt("-steps","Number of steps to take",NULL,steps,&steps,NULL);CHKERRQ(ierr);
+    ierr = PetscOptionsInt("-viz_interval","Vizualization interval",NULL,viz_interval,&viz_interval,NULL);CHKERRQ(ierr);
+    ierr = PetscOptionsInt("-check_step_alive","Step on which to check that the simulation is alive",NULL,check_step_alive,&check_step_alive,NULL);CHKERRQ(ierr);
+    ierr = PetscOptionsInt("-check_step_dead","Step on which to check that the simulation is dead",NULL,check_step_dead,&check_step_dead,NULL);CHKERRQ(ierr);
+  }
+  ierr = PetscOptionsEnd();CHKERRQ(ierr);
+
+  ierr = PetscViewerDrawOpen(PETSC_COMM_WORLD,NULL,"Life",PETSC_DECIDE,PETSC_DECIDE,1000,1000,&viewer);CHKERRQ(ierr);
+
+  /* Create distributed array and get vectors */
+  ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_BOX,30,30,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(da);CHKERRQ(ierr);
+  ierr = DMSetUp(da);CHKERRQ(ierr);
+  ierr = DMCreateLocalVector(da,&Xlocal);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(da,&Xglobal);CHKERRQ(ierr);
+
+  {  /* Initialize */
+    DMDALocalInfo info;
+    PetscScalar   **x;
+    PetscInt      i,j;
+
+    ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
+    ierr = DMDAVecGetArray(da, Xlocal, &x);CHKERRQ(ierr);
+    for (j=info.ys; j 0;
+          live_neighbors += PetscRealPart(x[j-1][i]) > 0;
+          live_neighbors += PetscRealPart(x[j-1][i+1]) > 0;
+          live_neighbors += PetscRealPart(x[j][i-1]) > 0;
+          live_neighbors += PetscRealPart(x[j][i+1]) > 0;
+          live_neighbors += PetscRealPart(x[j+1][i-1]) > 0;
+          live_neighbors += PetscRealPart(x[j+1][i]) > 0;
+          live_neighbors += PetscRealPart(x[j+1][i+1]) > 0;
+          if (PetscRealPart(x[j][i]) > 0) {    /* Live cell */
+            switch (live_neighbors) {
+            case 2:
+            case 3:
+              y[j][i] = 1;      /* Survive */
+              break;
+            default:
+              y[j][i] = 0;      /* Death */
+            }
+          } else {                                /* Dead cell */
+            if (live_neighbors == 3) y[j][i] = 1; /* Birth */
+            else y[j][i] = 0;
+          }
+        }
+      }
+      ierr = DMDAVecRestoreArrayRead(da, Xlocal, &x);CHKERRQ(ierr);
+      ierr = DMDAVecRestoreArrayWrite(da, Xglobal, &y);CHKERRQ(ierr);
+      if (step == check_step_alive || step == check_step_dead) {
+        PetscScalar sum;
+        ierr = VecSum(Xglobal, &sum);CHKERRQ(ierr);
+        if (PetscAbsScalar(sum) > 0.1) {
+          if (step == check_step_dead) {
+            ierr = PetscPrintf(PETSC_COMM_WORLD,"Simulation alive at step %D\n",step);CHKERRQ(ierr);
+          }
+        } else if (step == check_step_alive) {
+          ierr = PetscPrintf(PETSC_COMM_WORLD,"Simulation dead at step %D\n",step);CHKERRQ(ierr);
+        }
+      }
+      if (step % viz_interval == 0) {
+        ierr = VecView(Xglobal, viewer);CHKERRQ(ierr);
+      }
+    }
+  }
+
+  ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
+  ierr = VecDestroy(&Xglobal);CHKERRQ(ierr);
+  ierr = VecDestroy(&Xlocal);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+
+/*TEST
+
+   test:
+      requires: x
+      nsize: 2
+      args: -glider 5,6 -blinker 12,12 -steps 35 -check_step_alive 31 -check_step_dead 32 -da_grid_x 20 -da_grid_y 20 -nox
+
+TEST*/
diff --git a/src/dm/examples/tutorials/output/ex20_1.out b/src/dm/examples/tutorials/output/ex20_1.out
index 8dc965beb87..c8071f3cfb9 100644
--- a/src/dm/examples/tutorials/output/ex20_1.out
+++ b/src/dm/examples/tutorials/output/ex20_1.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: 1 MPI processes
   type: da
 Processor [0] M 25 N 13 m 1 n 1 w 1 s 1
diff --git a/src/dm/examples/tutorials/output/ex20_2.out b/src/dm/examples/tutorials/output/ex20_2.out
index 941f5797c46..2a93b514569 100644
--- a/src/dm/examples/tutorials/output/ex20_2.out
+++ b/src/dm/examples/tutorials/output/ex20_2.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
   type: plex
   0-cells: 15
   1-cells: 30
diff --git a/src/dm/examples/tutorials/output/swarm_ex3.out b/src/dm/examples/tutorials/output/swarm_ex3.out
index 12dd37aacc8..ea36adad1f1 100644
--- a/src/dm/examples/tutorials/output/swarm_ex3.out
+++ b/src/dm/examples/tutorials/output/swarm_ex3.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/f90-mod/makefile b/src/dm/f90-mod/makefile
index e070ada376b..214725efe73 100644
--- a/src/dm/f90-mod/makefile
+++ b/src/dm/f90-mod/makefile
@@ -1,11 +1,10 @@
 #requiresdefine   'PETSC_HAVE_FORTRAN'
-#requiresdefine   'PETSC_USING_F90'
 #
 
 CFLAGS	   =
 FFLAGS	   =
 SOURCEC	   =
-SOURCEF	   = petscdmmod.F petscdmplexmod.F petscdmdamod.F
+SOURCEF90  = petscdmmod.F90 petscdmplexmod.F90 petscdmdamod.F90
 SOURCEH = petscdmcomposite.h90 petscdmda.h90 petscdmforest.h90          \
  petscdm.h90 petscdmlabel.h90 petscdmnetwork.h90 petscdmpatch.h90       \
  petscdmplex.h90 petscdt.h90
diff --git a/src/dm/f90-mod/petscdm.h b/src/dm/f90-mod/petscdm.h
index 8fb369ee3d9..9a5ecc13841 100644
--- a/src/dm/f90-mod/petscdm.h
+++ b/src/dm/f90-mod/petscdm.h
@@ -14,45 +14,27 @@
 !
 !  Types of periodicity
 !
-      PetscEnum DM_BOUNDARY_NONE
-      PetscEnum DM_BOUNDARY_GHOSTED
-      PetscEnum DM_BOUNDARY_MIRROR
-      PetscEnum DM_BOUNDARY_PERIODIC
-      PetscEnum DM_BOUNDARY_TWIST
-
-      parameter (DM_BOUNDARY_NONE = 0)
-      parameter (DM_BOUNDARY_GHOSTED = 1)
-      parameter (DM_BOUNDARY_MIRROR = 2)
-      parameter (DM_BOUNDARY_PERIODIC = 3)
-      parameter (DM_BOUNDARY_TWIST = 4)
+      PetscEnum, parameter :: DM_BOUNDARY_NONE = 0
+      PetscEnum, parameter :: DM_BOUNDARY_GHOSTED = 1
+      PetscEnum, parameter :: DM_BOUNDARY_MIRROR = 2
+      PetscEnum, parameter :: DM_BOUNDARY_PERIODIC = 3
+      PetscEnum, parameter :: DM_BOUNDARY_TWIST = 4
 
 !
 !  Types of point location
 !
-      PetscEnum DM_POINTLOCATION_NONE
-      PetscEnum DM_POINTLOCATION_NEAREST
-      PetscEnum DM_POINTLOCATION_REMOVE
-
-      parameter (DM_POINTLOCATION_NONE = 0)
-      parameter (DM_POINTLOCATION_NEAREST = 1)
-      parameter (DM_POINTLOCATION_REMOVE = 2)
+      PetscEnum, parameter :: DM_POINTLOCATION_NONE = 0
+      PetscEnum, parameter :: DM_POINTLOCATION_NEAREST = 1
+      PetscEnum, parameter :: DM_POINTLOCATION_REMOVE = 2
 
-      PetscEnum DM_ADAPT_DETERMINE
-      PetscEnum DM_ADAPT_KEEP
-      PetscEnum DM_ADAPT_REFINE
-      PetscEnum DM_ADAPT_COARSEN
-      PetscEnum DM_ADAPT_RESERVED_COUNT
-
-      parameter (DM_ADAPT_DETERMINE=-1)
-      parameter (DM_ADAPT_KEEP=0)
-      parameter (DM_ADAPT_REFINE=1)
-      parameter (DM_ADAPT_COARSEN=2)
-      parameter (DM_ADAPT_RESERVED_COUNT=3)
+      PetscEnum, parameter :: DM_ADAPT_DETERMINE=-1
+      PetscEnum, parameter :: DM_ADAPT_KEEP=0
+      PetscEnum, parameter :: DM_ADAPT_REFINE=1
+      PetscEnum, parameter :: DM_ADAPT_COARSEN=2
+      PetscEnum, parameter :: DM_ADAPT_RESERVED_COUNT=3
 !
 ! DMDA Directions
 !
-      PetscEnum DM_X
-      PetscEnum DM_Y
-      PetscEnum DM_Z
-
-      parameter (DM_X = 0,DM_Y = 1,DM_Z = 2)
+      PetscEnum, parameter :: DM_X = 0
+      PetscEnum, parameter :: DM_Y = 1
+      PetscEnum, parameter :: DM_Z = 2
diff --git a/src/dm/f90-mod/petscdmda.h b/src/dm/f90-mod/petscdmda.h
index 27e0284c5ea..6b86f676f44 100644
--- a/src/dm/f90-mod/petscdmda.h
+++ b/src/dm/f90-mod/petscdmda.h
@@ -7,23 +7,17 @@
 !
 !  Types of stencils
 !
-      PetscEnum DMDA_STENCIL_STAR
-      PetscEnum DMDA_STENCIL_BOX
-
-      parameter (DMDA_STENCIL_STAR = 0,DMDA_STENCIL_BOX = 1)
-
+      PetscEnum, parameter :: DMDA_STENCIL_STAR = 0
+      PetscEnum, parameter :: DMDA_STENCIL_BOX = 1
 !
 ! DMDAInterpolationType
 !
-      PetscEnum DMDA_Q0
-      PetscEnum DMDA_Q1
-      parameter (DMDA_Q0=0,DMDA_Q1=1)
-
+      PetscEnum, parameter :: DMDA_Q0=0
+      PetscEnum, parameter :: DMDA_Q1=1
 !
 !     DMDAElementType
 !
-      PetscEnum DMDA_ELEMENT_P1
-      PetscEnum DMDA_ELEMENT_Q1
-      parameter(DMDA_ELEMENT_P1=0,DMDA_ELEMENT_Q1=1)
+      PetscEnum, parameter :: DMDA_ELEMENT_P1=0
+      PetscEnum, parameter :: DMDA_ELEMENT_Q1=1
 !
 !  End of Fortran include file for the DM package in PETSc
diff --git a/src/dm/f90-mod/petscdmdamod.F b/src/dm/f90-mod/petscdmdamod.F90
similarity index 100%
rename from src/dm/f90-mod/petscdmdamod.F
rename to src/dm/f90-mod/petscdmdamod.F90
diff --git a/src/dm/f90-mod/petscdmmod.F b/src/dm/f90-mod/petscdmmod.F90
similarity index 100%
rename from src/dm/f90-mod/petscdmmod.F
rename to src/dm/f90-mod/petscdmmod.F90
diff --git a/src/dm/f90-mod/petscdmplex.h b/src/dm/f90-mod/petscdmplex.h
index f83eeb2eac3..74900d5f523 100644
--- a/src/dm/f90-mod/petscdmplex.h
+++ b/src/dm/f90-mod/petscdmplex.h
@@ -6,14 +6,8 @@
 !
 ! DMPlexInterpolatedFlag
 !
-      PetscEnum DMPLEX_INTERPOLATED_INVALID
-      PetscEnum DMPLEX_INTERPOLATED_NONE
-      PetscEnum DMPLEX_INTERPOLATED_PARTIAL
-      PetscEnum DMPLEX_INTERPOLATED_MIXED
-      PetscEnum DMPLEX_INTERPOLATED_FULL
-
-      parameter (DMPLEX_INTERPOLATED_INVALID = -1)
-      parameter (DMPLEX_INTERPOLATED_NONE = 0)
-      parameter (DMPLEX_INTERPOLATED_PARTIAL = 1)
-      parameter (DMPLEX_INTERPOLATED_MIXED = 2)
-      parameter (DMPLEX_INTERPOLATED_FULL = 3)
+      PetscEnum, parameter :: DMPLEX_INTERPOLATED_INVALID = -1
+      PetscEnum, parameter :: DMPLEX_INTERPOLATED_NONE = 0
+      PetscEnum, parameter :: DMPLEX_INTERPOLATED_PARTIAL = 1
+      PetscEnum, parameter :: DMPLEX_INTERPOLATED_MIXED = 2
+      PetscEnum, parameter :: DMPLEX_INTERPOLATED_FULL = 3
diff --git a/src/dm/f90-mod/petscdmplexmod.F b/src/dm/f90-mod/petscdmplexmod.F90
similarity index 100%
rename from src/dm/f90-mod/petscdmplexmod.F
rename to src/dm/f90-mod/petscdmplexmod.F90
diff --git a/src/dm/field/impls/ds/dmfieldds.c b/src/dm/field/impls/ds/dmfieldds.c
index 699b28cea13..3628902bb88 100644
--- a/src/dm/field/impls/ds/dmfieldds.c
+++ b/src/dm/field/impls/ds/dmfieldds.c
@@ -92,6 +92,7 @@ static PetscErrorCode DMFieldDSGetHeightDisc(DMField field, PetscInt height, Pet
     }                                                                            \
   } while (0)
 
+/* TODO: Reorganize interface so that I can reuse a tabulation rather than mallocing each time */
 static PetscErrorCode DMFieldEvaluateFE_DS(DMField field, IS pointIS, PetscQuadrature quad, PetscDataType type, void *B, void *D, void *H)
 {
   DMField_DS      *dsfield = (DMField_DS *) field->data;
@@ -126,10 +127,11 @@ static PetscErrorCode DMFieldEvaluateFE_DS(DMField field, IS pointIS, PetscQuadr
   if (classid == PETSCFE_CLASSID) {
     PetscFE      fe = (PetscFE) disc;
     PetscInt     feDim, i;
-    PetscReal    *fB = NULL, *fD = NULL, *fH = NULL;
+    PetscInt          K = H ? 2 : (D ? 1 : (B ? 0 : -1));
+    PetscTabulation   T;
 
     ierr = PetscFEGetDimension(fe,&feDim);CHKERRQ(ierr);
-    ierr = PetscFEGetTabulation(fe,nq,qpoints,B ? &fB : NULL,D ? &fD : NULL,H ? &fH : NULL);CHKERRQ(ierr);
+    ierr = PetscFECreateTabulation(fe,1,nq,qpoints,K,&T);CHKERRQ(ierr);
     for (i = 0; i < numCells; i++) {
       PetscInt     c = isStride ? (sfirst + i * stride) : points[i];
       PetscInt     closureSize;
@@ -140,38 +142,38 @@ static PetscErrorCode DMFieldEvaluateFE_DS(DMField field, IS pointIS, PetscQuadr
         if (type == PETSC_SCALAR) {
           PetscScalar *cB = &((PetscScalar *) B)[nc * nq * i];
 
-          DMFieldDSdot(cB,fB,elem,nq,feDim,nc,(PetscScalar));
+          DMFieldDSdot(cB,T->T[0],elem,nq,feDim,nc,(PetscScalar));
         } else {
           PetscReal *cB = &((PetscReal *) B)[nc * nq * i];
 
-          DMFieldDSdot(cB,fB,elem,nq,feDim,nc,PetscRealPart);
+          DMFieldDSdot(cB,T->T[0],elem,nq,feDim,nc,PetscRealPart);
         }
       }
       if (D) {
         if (type == PETSC_SCALAR) {
           PetscScalar *cD = &((PetscScalar *) D)[nc * nq * dim * i];
 
-          DMFieldDSdot(cD,fD,elem,nq,feDim,(nc * dim),(PetscScalar));
+          DMFieldDSdot(cD,T->T[1],elem,nq,feDim,(nc * dim),(PetscScalar));
         } else {
           PetscReal *cD = &((PetscReal *) D)[nc * nq * dim * i];
 
-          DMFieldDSdot(cD,fD,elem,nq,feDim,(nc * dim),PetscRealPart);
+          DMFieldDSdot(cD,T->T[1],elem,nq,feDim,(nc * dim),PetscRealPart);
         }
       }
       if (H) {
         if (type == PETSC_SCALAR) {
           PetscScalar *cH = &((PetscScalar *) H)[nc * nq * dim * dim * i];
 
-          DMFieldDSdot(cH,fH,elem,nq,feDim,(nc * dim * dim),(PetscScalar));
+          DMFieldDSdot(cH,T->T[2],elem,nq,feDim,(nc * dim * dim),(PetscScalar));
         } else {
           PetscReal *cH = &((PetscReal *) H)[nc * nq * dim * dim * i];
 
-          DMFieldDSdot(cH,fH,elem,nq,feDim,(nc * dim * dim),PetscRealPart);
+          DMFieldDSdot(cH,T->T[2],elem,nq,feDim,(nc * dim * dim),PetscRealPart);
         }
       }
       ierr = DMPlexVecRestoreClosure(dm,section,dsfield->vec,c,&closureSize,&elem);CHKERRQ(ierr);
     }
-    ierr = PetscFERestoreTabulation(fe,nq,qpoints,B ? &fB : NULL,D ? &fD : NULL,H ? &fH : NULL);CHKERRQ(ierr);
+    ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);
   } else {SETERRQ(PetscObjectComm((PetscObject)field),PETSC_ERR_SUP,"Not implemented");}
   if (!isStride) {
     ierr = ISRestoreIndices(pointIS,&points);CHKERRQ(ierr);
@@ -240,7 +242,8 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
     PetscInt nq = cellDegrees[c], p;
 
     if (nq) {
-      PetscReal *fB, *fD, *fH;
+      PetscInt          K = H ? 2 : (D ? 1 : (B ? 0 : -1));
+      PetscTabulation   T;
       PetscInt     closureSize;
       PetscScalar *elem = NULL;
       PetscReal   *quadPoints;
@@ -249,7 +252,7 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
 
       for (p = 0; p < dim * nq; p++) coordsReal[p] = PetscRealPart(cellPoints[dim * offset + p]);
       ierr = DMPlexCoordinatesToReference(field->dm, c, nq, coordsReal, coordsRef);CHKERRQ(ierr);
-      ierr = PetscFEGetTabulation(cellFE,nq,coordsRef,B ? &fB : NULL,D ? &fD : NULL,H ? &fH : NULL);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(cellFE,1,nq,coordsRef,K,&T);CHKERRQ(ierr);
       ierr = PetscQuadratureCreate(PETSC_COMM_SELF, &quad);CHKERRQ(ierr);
       ierr = PetscMalloc1(dimR * nq, &quadPoints);CHKERRQ(ierr);
       for (p = 0; p < dimR * nq; p++) quadPoints[p] = coordsRef[p];
@@ -261,18 +264,18 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
         if (datatype == PETSC_SCALAR) {
           PetscScalar *cB = &cellBs[nc * offset];
 
-          DMFieldDSdot(cB,fB,elem,nq,feDim,nc,(PetscScalar));
+          DMFieldDSdot(cB,T->T[0],elem,nq,feDim,nc,(PetscScalar));
         } else {
           PetscReal *cB = &cellBr[nc * offset];
 
-          DMFieldDSdot(cB,fB,elem,nq,feDim,nc,PetscRealPart);
+          DMFieldDSdot(cB,T->T[0],elem,nq,feDim,nc,PetscRealPart);
         }
       }
       if (D) {
         if (datatype == PETSC_SCALAR) {
           PetscScalar *cD = &cellDs[nc * dim * offset];
 
-          DMFieldDSdot(cD,fD,elem,nq,feDim,(nc * dim),(PetscScalar));
+          DMFieldDSdot(cD,T->T[1],elem,nq,feDim,(nc * dim),(PetscScalar));
           for (p = 0; p < nq; p++) {
             for (g = 0; g < nc; g++) {
               PetscScalar vs[3];
@@ -291,7 +294,7 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
         } else {
           PetscReal *cD = &cellDr[nc * dim * offset];
 
-          DMFieldDSdot(cD,fD,elem,nq,feDim,(nc * dim),PetscRealPart);
+          DMFieldDSdot(cD,T->T[1],elem,nq,feDim,(nc * dim),PetscRealPart);
           for (p = 0; p < nq; p++) {
             for (g = 0; g < nc; g++) {
               for (d = 0; d < dimR; d++) {
@@ -311,7 +314,7 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
         if (datatype == PETSC_SCALAR) {
           PetscScalar *cH = &cellHs[nc * dim * dim * offset];
 
-          DMFieldDSdot(cH,fH,elem,nq,feDim,(nc * dim * dim),(PetscScalar));
+          DMFieldDSdot(cH,T->T[2],elem,nq,feDim,(nc * dim * dim),(PetscScalar));
           for (p = 0; p < nq; p++) {
             for (g = 0; g < nc * dimR; g++) {
               PetscScalar vs[3];
@@ -345,7 +348,7 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
         } else {
           PetscReal *cH = &cellHr[nc * dim * dim * offset];
 
-          DMFieldDSdot(cH,fH,elem,nq,feDim,(nc * dim * dim),PetscRealPart);
+          DMFieldDSdot(cH,T->T[2],elem,nq,feDim,(nc * dim * dim),PetscRealPart);
           for (p = 0; p < nq; p++) {
             for (g = 0; g < nc * dimR; g++) {
               for (d = 0; d < dimR; d++) {
@@ -375,7 +378,7 @@ static PetscErrorCode DMFieldEvaluate_DS(DMField field, Vec points, PetscDataTyp
         }
       }
       ierr = DMPlexVecRestoreClosure(field->dm,section,dsfield->vec,c,&closureSize,&elem);CHKERRQ(ierr);
-      ierr = PetscFERestoreTabulation(cellFE,nq,coordsRef,B ? &fB : NULL,D ? &fD : NULL,H ? &fH : NULL);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);
     }
     offset += nq;
   }
@@ -789,12 +792,13 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc
     PetscClassId         faceId, cellId;
     PetscDualSpace       dsp;
     DM                   K;
+    DMPolytopeType       ct;
     PetscInt           (*co)[2][3];
     PetscInt             coneSize;
     PetscInt           **counts;
     PetscInt             f, i, o, q, s;
     const PetscInt      *coneK;
-    PetscInt             minOrient, maxOrient, numOrient;
+    PetscInt             eStart, minOrient, maxOrient, numOrient;
     PetscInt            *orients;
     PetscReal          **orientPoints;
     PetscReal           *cellPoints;
@@ -808,6 +812,8 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc
     if (faceId != PETSCFE_CLASSID || cellId != PETSCFE_CLASSID) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Not supported\n");
     ierr = PetscFEGetDualSpace((PetscFE)cellDisc, &dsp);CHKERRQ(ierr);
     ierr = PetscDualSpaceGetDM(dsp, &K); CHKERRQ(ierr);
+    ierr = DMPlexGetHeightStratum(K, 1, &eStart, NULL);CHKERRQ(ierr);
+    ierr = DMPlexGetCellType(K, eStart, &ct);CHKERRQ(ierr);
     ierr = DMPlexGetConeSize(K,0,&coneSize);CHKERRQ(ierr);
     ierr = DMPlexGetCone(K,0,&coneK);CHKERRQ(ierr);
     ierr = PetscMalloc2(numFaces, &co, coneSize, &counts);CHKERRQ(ierr);
@@ -871,10 +877,9 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc
 
         ierr = PetscMalloc1(Nq * dim, &orientPoints[o]);CHKERRQ(ierr);
         /* rotate the quadrature points appropriately */
-        switch (dim) {
-        case 0:
-          break;
-        case 1:
+        switch (ct) {
+        case DM_POLYTOPE_POINT: break;
+        case DM_POLYTOPE_SEGMENT:
           if (orient == -2 || orient == 1) {
             for (q = 0; q < Nq; q++) {
               orientPoints[o][q] = -geom->xi[q];
@@ -885,67 +890,61 @@ static PetscErrorCode DMFieldComputeFaceData_DS(DMField field, IS pointIS, Petsc
             }
           }
           break;
-        case 2:
-          switch (coneSize) {
-          case 3:
-            for (q = 0; q < Nq; q++) {
-              PetscReal lambda[3];
-              PetscReal lambdao[3];
-
-              /* convert to barycentric */
-              lambda[0] = - (geom->xi[2 * q] + geom->xi[2 * q + 1]) / 2.;
-              lambda[1] = (geom->xi[2 * q] + 1.) / 2.;
-              lambda[2] = (geom->xi[2 * q + 1] + 1.) / 2.;
-              if (orient >= 0) {
-                for (i = 0; i < 3; i++) {
-                  lambdao[i] = lambda[(orient + i) % 3];
-                }
-              } else {
-                for (i = 0; i < 3; i++) {
-                  lambdao[i] = lambda[(-(orient + i) + 3) % 3];
-                }
-              }
-              /* convert to coordinates */
-              orientPoints[o][2 * q + 0] = -(lambdao[0] + lambdao[2]) + lambdao[1];
-              orientPoints[o][2 * q + 1] = -(lambdao[0] + lambdao[1]) + lambdao[2];
-            }
-            break;
-          case 4:
-            for (q = 0; q < Nq; q++) {
-              PetscReal xi[2], xio[2];
-              PetscInt oabs = (orient >= 0) ? orient : -(orient + 1);
-
-              xi[0] = geom->xi[2 * q];
-              xi[1] = geom->xi[2 * q + 1];
-              switch (oabs) {
-              case 1:
-                xio[0] = xi[1];
-                xio[1] = -xi[0];
-                break;
-              case 2:
-                xio[0] = -xi[0];
-                xio[1] = -xi[1];
-              case 3:
-                xio[0] = -xi[1];
-                xio[1] = xi[0];
-              case 0:
-              default:
-                xio[0] = xi[0];
-                xio[1] = xi[1];
-                break;
+        case DM_POLYTOPE_TRIANGLE:
+          for (q = 0; q < Nq; q++) {
+            PetscReal lambda[3];
+            PetscReal lambdao[3];
+
+            /* convert to barycentric */
+            lambda[0] = - (geom->xi[2 * q] + geom->xi[2 * q + 1]) / 2.;
+            lambda[1] = (geom->xi[2 * q] + 1.) / 2.;
+            lambda[2] = (geom->xi[2 * q + 1] + 1.) / 2.;
+            if (orient >= 0) {
+              for (i = 0; i < 3; i++) {
+                lambdao[i] = lambda[(orient + i) % 3];
               }
-              if (orient < 0) {
-                xio[0] = -xio[0];
+            } else {
+              for (i = 0; i < 3; i++) {
+                lambdao[i] = lambda[(-(orient + i) + 3) % 3];
               }
-              orientPoints[o][2 * q + 0] = xio[0];
-              orientPoints[o][2 * q + 1] = xio[1];
             }
-            break;
-          default:
-            SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cone size %D not yet supported\n", coneSize);
+            /* convert to coordinates */
+            orientPoints[o][2 * q + 0] = -(lambdao[0] + lambdao[2]) + lambdao[1];
+            orientPoints[o][2 * q + 1] = -(lambdao[0] + lambdao[1]) + lambdao[2];
+          }
+          break;
+        case DM_POLYTOPE_QUADRILATERAL:
+          for (q = 0; q < Nq; q++) {
+            PetscReal xi[2], xio[2];
+            PetscInt oabs = (orient >= 0) ? orient : -(orient + 1);
+
+            xi[0] = geom->xi[2 * q];
+            xi[1] = geom->xi[2 * q + 1];
+            switch (oabs) {
+            case 1:
+              xio[0] = xi[1];
+              xio[1] = -xi[0];
+              break;
+            case 2:
+              xio[0] = -xi[0];
+              xio[1] = -xi[1];
+            case 3:
+              xio[0] = -xi[1];
+              xio[1] = xi[0];
+            case 0:
+            default:
+              xio[0] = xi[0];
+              xio[1] = xi[1];
+              break;
+            }
+            if (orient < 0) {
+              xio[0] = -xio[0];
+            }
+            orientPoints[o][2 * q + 0] = xio[0];
+            orientPoints[o][2 * q + 1] = xio[1];
           }
-        default:
-          SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Dimension %D not yet supported\n", dim);
+          break;
+        default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cell type %s not yet supported\n", DMPolytopeTypes[ct]);
         }
       }
     }
diff --git a/src/dm/impls/composite/f90-custom/makefile b/src/dm/impls/composite/f90-custom/makefile
index 9407ebf63f7..7a3b6ea0906 100644
--- a/src/dm/impls/composite/f90-custom/makefile
+++ b/src/dm/impls/composite/f90-custom/makefile
@@ -1,5 +1,4 @@
 #requiresdefine   'PETSC_HAVE_FORTRAN'
-#requiresdefine   'PETSC_USING_F90'
 ALL: lib
 
 CFLAGS   =
diff --git a/src/dm/impls/composite/pack.c b/src/dm/impls/composite/pack.c
index 8b1618c5102..54275b58a1b 100644
--- a/src/dm/impls/composite/pack.c
+++ b/src/dm/impls/composite/pack.c
@@ -950,6 +950,9 @@ PetscErrorCode  DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMappi
     /* Shift the sub-DM definition of the global space to the composite global space */
     for (i=0; igrstarts[rank]; continue; }
       /* Binary search to find which rank owns subi */
       while (hi-lo > 1) {
         t = lo + (hi-lo)/2;
diff --git a/src/dm/impls/da/dadist.c b/src/dm/impls/da/dadist.c
index c11bfc24cbd..1182bd37052 100644
--- a/src/dm/impls/da/dadist.c
+++ b/src/dm/impls/da/dadist.c
@@ -64,7 +64,7 @@ PetscErrorCode  DMCreateGlobalVector_DA(DM da,Vec *g)
 
 .seealso: DMCreateLocalVector(), VecDuplicate(), VecDuplicateVecs(),
           DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(),
-          DMGlobalToLocalEnd(), DMDALocalToGlobalBegin()
+          DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
 @*/
 PetscErrorCode  DMDACreateNaturalVector(DM da,Vec *g)
 {
diff --git a/src/dm/impls/da/daindex.c b/src/dm/impls/da/daindex.c
index 82812a3b728..2620f028260 100644
--- a/src/dm/impls/da/daindex.c
+++ b/src/dm/impls/da/daindex.c
@@ -65,7 +65,7 @@ PetscErrorCode DMDAGetNatural_Private(DM da,PetscInt *outNlocal,IS *isnatural)
    Notes:
    It will generate and error if an AO has already been obtained with a call to DMDAGetAO and the user sets a different AOType
 
-.seealso: DMDACreate2d(), DMDAGetAO(), DMDAGetGhostCorners(), DMDAGetCorners(), DMDALocalToGlocal()
+.seealso: DMDACreate2d(), DMDAGetAO(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobal()
           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetGlobalIndices(), DMDAGetOwnershipRanges(),
           AO, AOPetscToApplication(), AOApplicationToPetsc()
 @*/
@@ -114,7 +114,7 @@ PetscErrorCode  DMDASetAOType(DM da,AOType aotype)
 
    Do NOT call AODestroy() on the ao returned by this function.
 
-.seealso: DMDACreate2d(), DMDASetAOType(), DMDAGetGhostCorners(), DMDAGetCorners(), DMDALocalToGlocal()
+.seealso: DMDACreate2d(), DMDASetAOType(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobal()
           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMLocalToLocalEnd(),  DMDAGetOwnershipRanges(),
           AO, AOPetscToApplication(), AOApplicationToPetsc()
 @*/
diff --git a/src/dm/impls/da/dainterp.c b/src/dm/impls/da/dainterp.c
index 8376e6ac343..fb195aabd76 100644
--- a/src/dm/impls/da/dainterp.c
+++ b/src/dm/impls/da/dainterp.c
@@ -13,39 +13,6 @@
 
 #include     /*I   "petscdmda.h"   I*/
 
-/*@
-    DMCreateInterpolationScale - Forms L = R*1/diag(R*1) - L.*v is like a coarse grid average of the
-      nearby fine grid points.
-
-  Input Parameters:
-+      dac - DM that defines a coarse mesh
-.      daf - DM that defines a fine mesh
--      mat - the restriction (or interpolation operator) from fine to coarse
-
-  Output Parameter:
-.    scale - the scaled vector
-
-  Level: developer
-
-.seealso: DMCreateInterpolation()
-
-@*/
-PetscErrorCode  DMCreateInterpolationScale(DM dac,DM daf,Mat mat,Vec *scale)
-{
-  PetscErrorCode ierr;
-  Vec            fine;
-  PetscScalar    one = 1.0;
-
-  PetscFunctionBegin;
-  ierr = DMCreateGlobalVector(daf,&fine);CHKERRQ(ierr);
-  ierr = DMCreateGlobalVector(dac,scale);CHKERRQ(ierr);
-  ierr = VecSet(fine,one);CHKERRQ(ierr);
-  ierr = MatRestrict(mat,fine,*scale);CHKERRQ(ierr);
-  ierr = VecDestroy(&fine);CHKERRQ(ierr);
-  ierr = VecReciprocal(*scale);CHKERRQ(ierr);
-  PetscFunctionReturn(0);
-}
-
 /*
    Since the interpolation uses MATMAIJ for dof > 0 we convert request for non-MATAIJ baseded matrices to MATAIJ.
    This is a bit of a hack, the reason for it is partially because -dm_mat_type defines the
@@ -114,7 +81,7 @@ PetscErrorCode DMCreateInterpolation_DA_1D_Q1(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
    */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
   #endif
   ierr = MatSetSizes(mat,m_f,m_c,mx,Mx);CHKERRQ(ierr);
@@ -257,7 +224,7 @@ PetscErrorCode DMCreateInterpolation_DA_1D_Q0(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
    */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
   #endif
   ierr = MatSetSizes(mat,m_f,m_c,mx,Mx);CHKERRQ(ierr);
@@ -403,7 +370,7 @@ PetscErrorCode DMCreateInterpolation_DA_2D_Q1(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
   */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
 #endif
   ierr = MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);CHKERRQ(ierr);
@@ -625,7 +592,7 @@ PetscErrorCode DMCreateInterpolation_DA_2D_Q0(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
   */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
   #endif
   ierr = MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);CHKERRQ(ierr);
@@ -760,7 +727,7 @@ PetscErrorCode DMCreateInterpolation_DA_3D_Q0(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
   */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
   #endif
   ierr = MatSetSizes(mat,m_f*n_f*p_f,col_scale*m_c*n_c*p_c,mx*my*mz,col_scale*Mx*My*Mz);CHKERRQ(ierr);
@@ -921,7 +888,7 @@ PetscErrorCode DMCreateInterpolation_DA_3D_Q1(DM dac,DM daf,Mat *A)
      we don't want the original unconverted matrix copied to the GPU
   */
   if (dof > 1) {
-    ierr = MatPinToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(mat,PETSC_TRUE);CHKERRQ(ierr);
   }
   #endif
   ierr = MatSetSizes(mat,m_f*n_f*p_f,m_c*n_c*p_c,mx*my*mz,Mx*My*Mz);CHKERRQ(ierr);
diff --git a/src/dm/impls/da/f90-custom/makefile b/src/dm/impls/da/f90-custom/makefile
index 9ae1e6c900b..218458879ae 100644
--- a/src/dm/impls/da/f90-custom/makefile
+++ b/src/dm/impls/da/f90-custom/makefile
@@ -1,5 +1,4 @@
 #requiresdefine   'PETSC_HAVE_FORTRAN'
-#requiresdefine   'PETSC_USING_F90'
 ALL: lib
 
 CFLAGS   =
diff --git a/src/dm/impls/da/fdda.c b/src/dm/impls/da/fdda.c
index 8b959848718..52c3ca3f25c 100644
--- a/src/dm/impls/da/fdda.c
+++ b/src/dm/impls/da/fdda.c
@@ -1018,10 +1018,10 @@ PetscErrorCode DMCreateMatrix_DA_2d_MPISELL(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree2(rows,cols);CHKERRQ(ierr);
@@ -1131,10 +1131,10 @@ PetscErrorCode DMCreateMatrix_DA_3d_MPISELL(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree2(rows,cols);CHKERRQ(ierr);
@@ -1250,10 +1250,10 @@ PetscErrorCode DMCreateMatrix_DA_2d_MPIAIJ(DM da,Mat J,PetscBool isIS)
       }
     }
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
     if (bx == DM_BOUNDARY_NONE && by == DM_BOUNDARY_NONE) {
       ierr = MatSetOption(J,MAT_SORTED_FULL,PETSC_FALSE);CHKERRQ(ierr);
@@ -1384,10 +1384,10 @@ PetscErrorCode DMCreateMatrix_DA_2d_MPIAIJ_Fill(DM da,Mat J)
       }
     }
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
@@ -1517,13 +1517,13 @@ PetscErrorCode DMCreateMatrix_DA_3d_MPIAIJ(DM da,Mat J,PetscBool isIS)
       }
     }
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     if (!isIS && bx == DM_BOUNDARY_NONE && by == DM_BOUNDARY_NONE && bz == DM_BOUNDARY_NONE) {
       ierr = MatSetOption(J,MAT_SORTED_FULL,PETSC_FALSE);CHKERRQ(ierr);
     }
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree2(rows,cols);CHKERRQ(ierr);
@@ -1697,10 +1697,10 @@ PetscErrorCode DMCreateMatrix_DA_1d_MPIAIJ_Fill(DM da,Mat J)
     }
     ierr = PetscFree(cols);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   PetscFunctionReturn(0);
@@ -1765,13 +1765,13 @@ PetscErrorCode DMCreateMatrix_DA_1d_MPIAIJ(DM da,Mat J,PetscBool isIS)
       ierr = MatSetValuesLocal(J,nc,rows,cnt,cols,NULL,INSERT_VALUES);CHKERRQ(ierr);
     }
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     if (!isIS && bx == DM_BOUNDARY_NONE) {
       ierr = MatSetOption(J,MAT_SORTED_FULL,PETSC_FALSE);CHKERRQ(ierr);
     }
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
     ierr = PetscFree2(rows,cols);CHKERRQ(ierr);
   }
@@ -1861,10 +1861,10 @@ PetscErrorCode DMCreateMatrix_DA_2d_MPIBAIJ(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
@@ -1970,10 +1970,10 @@ PetscErrorCode DMCreateMatrix_DA_3d_MPIBAIJ(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
@@ -2086,10 +2086,10 @@ PetscErrorCode DMCreateMatrix_DA_2d_MPISBAIJ(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
@@ -2197,10 +2197,10 @@ PetscErrorCode DMCreateMatrix_DA_3d_MPISBAIJ(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
@@ -2351,10 +2351,10 @@ PetscErrorCode DMCreateMatrix_DA_3d_MPIAIJ_Fill(DM da,Mat J)
     }
     ierr = PetscFree(values);CHKERRQ(ierr);
     /* do not copy values to GPU since they are all zero and not yet needed there */
-    ierr = MatPinToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_TRUE);CHKERRQ(ierr);
     ierr = MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
     ierr = MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-    ierr = MatPinToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
+    ierr = MatBindToCPU(J,PETSC_FALSE);CHKERRQ(ierr);
     ierr = MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   }
   ierr = PetscFree(cols);CHKERRQ(ierr);
diff --git a/src/dm/impls/da/gr2.c b/src/dm/impls/da/gr2.c
index 4f12eb174c5..6cc57763c93 100644
--- a/src/dm/impls/da/gr2.c
+++ b/src/dm/impls/da/gr2.c
@@ -691,7 +691,7 @@ PetscErrorCode  VecView_MPI_DA(Vec xin,PetscViewer viewer)
         ierr = DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet);CHKERRQ(ierr);
         if (!compatibleSet || !compatible) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
       }
-      ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y);CHKERRQ(ierr);
+      ierr = PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_DEFAULT,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y);CHKERRQ(ierr);
     }
 #if defined(PETSC_HAVE_HDF5)
   } else if (ishdf5) {
diff --git a/src/dm/impls/forest/forest.c b/src/dm/impls/forest/forest.c
index af84c98f202..ff8d3e686e5 100644
--- a/src/dm/impls/forest/forest.c
+++ b/src/dm/impls/forest/forest.c
@@ -230,7 +230,7 @@ static PetscErrorCode DMDestroy_Forest(DM dm)
 
   Level: intermediate
 
-.seealso(): DMForestGetTopology(), DMForestSetBaseDM()
+.seealso: DMForestGetTopology(), DMForestSetBaseDM()
 @*/
 PetscErrorCode DMForestSetTopology(DM dm, DMForestTopology topology)
 {
@@ -287,7 +287,7 @@ PetscErrorCode DMForestGetTopology(DM dm, DMForestTopology *topology)
 
   Level: intermediate
 
-.seealso(): DMForestGetBaseDM()
+.seealso: DMForestGetBaseDM()
 @*/
 PetscErrorCode DMForestSetBaseDM(DM dm, DM base)
 {
@@ -337,7 +337,7 @@ PetscErrorCode DMForestSetBaseDM(DM dm, DM base)
 
   Level: intermediate
 
-.seealso(); DMForestSetBaseDM()
+.seealso: DMForestSetBaseDM()
 @*/
 PetscErrorCode DMForestGetBaseDM(DM dm, DM *base)
 {
diff --git a/src/dm/impls/forest/p4est/pforest.c b/src/dm/impls/forest/p4est/pforest.c
index 595594531b2..5bfe48b51c3 100644
--- a/src/dm/impls/forest/p4est/pforest.c
+++ b/src/dm/impls/forest/p4est/pforest.c
@@ -824,7 +824,7 @@ static PetscErrorCode DMSetUp_pforest(DM dm)
 
       ierr = DMGetNumLabels(base,&numLabels);CHKERRQ(ierr);
       for (l = 0; l < numLabels; l++) {
-        PetscBool  isDepth, isGhost, isVTK, isDim;
+        PetscBool  isDepth, isGhost, isVTK, isDim, isCellType;
         DMLabel    label, labelNew;
         PetscInt   defVal;
         const char *name;
@@ -835,6 +835,8 @@ static PetscErrorCode DMSetUp_pforest(DM dm)
         if (isDepth) continue;
         ierr = PetscStrcmp(name,"dim",&isDim);CHKERRQ(ierr);
         if (isDim) continue;
+        ierr = PetscStrcmp(name,"celltype",&isCellType);CHKERRQ(ierr);
+        if (isCellType) continue;
         ierr = PetscStrcmp(name,"ghost",&isGhost);CHKERRQ(ierr);
         if (isGhost) continue;
         ierr = PetscStrcmp(name,"vtk",&isVTK);CHKERRQ(ierr);
@@ -995,7 +997,7 @@ static PetscErrorCode DMSetUp_pforest(DM dm)
 
       ierr = DMGetNumLabels(adaptFrom,&numLabels);CHKERRQ(ierr);
       for (l = 0; l < numLabels; l++) {
-        PetscBool  isDepth, isGhost, isVTK;
+        PetscBool  isDepth, isCellType, isGhost, isVTK;
         DMLabel    label, labelNew;
         PetscInt   defVal;
         const char *name;
@@ -1004,6 +1006,8 @@ static PetscErrorCode DMSetUp_pforest(DM dm)
         ierr = DMGetLabelByNum(adaptFrom, l, &label);CHKERRQ(ierr);
         ierr = PetscStrcmp(name,"depth",&isDepth);CHKERRQ(ierr);
         if (isDepth) continue;
+        ierr = PetscStrcmp(name,"celltype",&isCellType);CHKERRQ(ierr);
+        if (isCellType) continue;
         ierr = PetscStrcmp(name,"ghost",&isGhost);CHKERRQ(ierr);
         if (isGhost) continue;
         ierr = PetscStrcmp(name,"vtk",&isVTK);CHKERRQ(ierr);
@@ -3226,7 +3230,7 @@ static PetscErrorCode DMPforestLabelsInitialize(DM dm, DM plex)
   while (next) {
     DMLabel   baseLabel;
     DMLabel   label = next->label;
-    PetscBool isDepth, isGhost, isVTK, isSpmap;
+    PetscBool isDepth, isCellType, isGhost, isVTK, isSpmap;
     const char *name;
 
     ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
@@ -3235,6 +3239,11 @@ static PetscErrorCode DMPforestLabelsInitialize(DM dm, DM plex)
       next = next->next;
       continue;
     }
+    ierr = PetscStrcmp(name,"celltype",&isCellType);CHKERRQ(ierr);
+    if (isCellType) {
+      next = next->next;
+      continue;
+    }
     ierr = PetscStrcmp(name,"ghost",&isGhost);CHKERRQ(ierr);
     if (isGhost) {
       next = next->next;
@@ -3633,7 +3642,7 @@ static PetscErrorCode DMPforestLabelsFinalize(DM dm, DM plex)
     while (next) {
       DMLabel    nextLabel = next->label;
       const char *name;
-      PetscBool  isDepth, isGhost, isVTK;
+      PetscBool  isDepth, isCellType, isGhost, isVTK;
       DMLabel    label;
       PetscInt   p;
 
@@ -3643,6 +3652,11 @@ static PetscErrorCode DMPforestLabelsFinalize(DM dm, DM plex)
         next = next->next;
         continue;
       }
+      ierr = PetscStrcmp(name,"celltype",&isCellType);CHKERRQ(ierr);
+      if (isCellType) {
+        next = next->next;
+        continue;
+      }
       ierr = PetscStrcmp(name,"ghost",&isGhost);CHKERRQ(ierr);
       if (isGhost) {
         next = next->next;
@@ -4343,6 +4357,10 @@ static PetscErrorCode DMConvert_pforest_plex(DM dm, DMType newtype, DM *plex)
     ierr = DMPlexSetTree(newPlex,parentSection,(PetscInt*)parents->array,(PetscInt*)childids->array);CHKERRQ(ierr);
     ierr = PetscSectionDestroy(&parentSection);CHKERRQ(ierr);
     ierr = PetscSFCreate(comm,&pointSF);CHKERRQ(ierr);
+    /*
+       These arrays defining the sf are from the p4est library, but the code there shows the leaves being populated in increasing order.
+       https://gitlab.com/petsc/petsc/merge_requests/2248#note_240186391
+    */
     ierr = PetscSFSetGraph(pointSF,pEnd - pStart,(PetscInt)leaves->elem_count,(PetscInt*)leaves->array,PETSC_COPY_VALUES,(PetscSFNode*)remotes->array,PETSC_COPY_VALUES);CHKERRQ(ierr);
     ierr = DMSetPointSF(newPlex,pointSF);CHKERRQ(ierr);
     ierr = DMSetPointSF(dm,pointSF);CHKERRQ(ierr);
@@ -4601,52 +4619,18 @@ static PetscErrorCode DMCreateInjection_pforest(DM dmCoarse, DM dmFine, Mat *inj
   PetscFunctionReturn(0);
 }
 
-static void transfer_func_0(PetscInt dim, PetscInt Nf, PetscInt NfAux,
-                            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
-                            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
-                            PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar uexact[])
-{
-  PetscInt f = 0; /* I would like to have f = (PetscInt)(*ctx) */
-  PetscInt oa, ou;
-  for (ou = 0, oa = aOff[f]; oa < aOff[f+1]; ou++, oa++) uexact[ou] = a[oa];
-}
-
-static void transfer_func_1(PetscInt dim, PetscInt Nf, PetscInt NfAux,
-                            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
-                            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
-                            PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar uexact[])
-{
-  PetscInt f = 1;
-  PetscInt oa, ou;
-  for (ou = 0, oa = aOff[f]; oa < aOff[f+1]; ou++, oa++) uexact[ou] = a[oa];
-}
-
-static void transfer_func_2(PetscInt dim, PetscInt Nf, PetscInt NfAux,
-                            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
-                            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
-                            PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar uexact[])
-{
-  PetscInt f = 2;
-  PetscInt oa, ou;
-  for (ou = 0, oa = aOff[f]; oa < aOff[f+1]; ou++, oa++) uexact[ou] = a[oa];
-}
-
 #define DMForestTransferVecFromBase_pforest _append_pforest(DMForestTransferVecFromBase)
 static PetscErrorCode DMForestTransferVecFromBase_pforest(DM dm, Vec vecIn, Vec vecOut)
 {
-  DM             dmIn, dmVecIn, base, basec, plex, dmAux, coarseDM;
+  DM             dmIn, dmVecIn, base, basec, plex, coarseDM;
   DM             *hierarchy;
   PetscSF        sfRed = NULL;
   PetscDS        ds;
-  Vec            dummy, vecInLocal, vecOutLocal;
+  Vec            vecInLocal, vecOutLocal;
   DMLabel        subpointMap;
   PetscInt       minLevel, mh, n_hi, i;
   PetscBool      hiforest, *hierarchy_forest;
   PetscErrorCode ierr;
-  void           (*funcs[3]) (PetscInt dim, PetscInt Nf, PetscInt NfAux,
-                              const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
-                              const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
-                              PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar uexact[]) = {transfer_func_0,transfer_func_1,transfer_func_2};
 
   PetscFunctionBegin;
   ierr = VecGetDM(vecIn,&dmVecIn);CHKERRQ(ierr);
@@ -4746,8 +4730,6 @@ static PetscErrorCode DMForestTransferVecFromBase_pforest(DM dm, Vec vecIn, Vec
     ierr = MPIU_Allreduce(ncells,gncells,2,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
     if (gncells[0] != gncells[1]) SETERRQ2(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Invalid number of base cells! Expected %D, found %D",gncells[0]+1,gncells[1]+1);
   }
-  ierr = PetscObjectQuery((PetscObject)plex,"dmAux",(PetscObject*)&dmAux);CHKERRQ(ierr);
-  if (dmAux) SETERRQ(PetscObjectComm((PetscObject)dmAux),PETSC_ERR_SUP,"Cannot currently transfer from base when a dmAux is present");
 
   ierr = DMGetLabel(dmIn,"_forest_base_subpoint_map",&subpointMap);CHKERRQ(ierr);
   if (!subpointMap) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing _forest_base_subpoint_map label");CHKERRQ(ierr);
@@ -4765,20 +4747,56 @@ static PetscErrorCode DMForestTransferVecFromBase_pforest(DM dm, Vec vecIn, Vec
     ierr = DMGlobalToLocalBegin(basec,vecIn,INSERT_VALUES,vecInLocal);CHKERRQ(ierr);
     ierr = DMGlobalToLocalEnd(basec,vecIn,INSERT_VALUES,vecInLocal);CHKERRQ(ierr);
   }
-  ierr = PetscObjectCompose((PetscObject)plex,"dmAux",(PetscObject)basec);CHKERRQ(ierr);
-  ierr = PetscObjectCompose((PetscObject)plex,"A",(PetscObject)vecInLocal);CHKERRQ(ierr);
-  ierr = VecDestroy(&vecInLocal);CHKERRQ(ierr);
-  ierr = VecDestroy(&vecIn);CHKERRQ(ierr);
-  ierr = DMPlexSetSubpointMap(basec,subpointMap);CHKERRQ(ierr);
-  ierr = DMViewFromOptions(basec,NULL,"-dm_basec_view");CHKERRQ(ierr);
-  ierr = DMDestroy(&basec);CHKERRQ(ierr);
 
-  ierr = DMGetLocalVector(dmIn,&dummy);CHKERRQ(ierr);
   ierr = DMGetLocalVector(dmIn,&vecOutLocal);CHKERRQ(ierr);
-  ierr = DMProjectFieldLocal(dmIn,0.0,dummy,funcs,INSERT_ALL_VALUES,vecOutLocal);CHKERRQ(ierr);
-  ierr = DMRestoreLocalVector(dmIn,&dummy);CHKERRQ(ierr);
-  ierr = PetscObjectCompose((PetscObject)plex,"A",NULL);CHKERRQ(ierr);
-  ierr = PetscObjectCompose((PetscObject)plex,"dmAux",NULL);CHKERRQ(ierr);
+  { /* get degrees of freedom ordered onto dmIn */
+    PetscSF            basetocoarse;
+    PetscInt           bStart, bEnd, nroots;
+    PetscInt           iStart, iEnd, nleaves, leaf;
+    PetscMPIInt        rank;
+    PetscSFNode       *remotes;
+    PetscSection       secIn, secOut;
+    PetscInt          *remoteOffsets;
+    PetscSF            transferSF;
+    const PetscScalar *inArray;
+    PetscScalar       *outArray;
+
+    ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)basec), &rank);CHKERRQ(ierr);
+    ierr = DMPlexGetChart(basec, &bStart, &bEnd);CHKERRQ(ierr);
+    nroots = PetscMax(bEnd - bStart, 0);
+    ierr = DMPlexGetChart(plex, &iStart, &iEnd);CHKERRQ(ierr);
+    nleaves = PetscMax(iEnd - iStart, 0);
+
+    ierr = PetscMalloc1(nleaves, &remotes);CHKERRQ(ierr);
+    for (leaf = iStart; leaf < iEnd; leaf++) {
+      PetscInt index;
+
+      remotes[leaf - iStart].rank = rank;
+      ierr = DMLabelGetValue(subpointMap, leaf, &index);CHKERRQ(ierr);
+      remotes[leaf - iStart].index = index;
+    }
+
+    ierr = PetscSFCreate(PetscObjectComm((PetscObject)basec), &basetocoarse);CHKERRQ(ierr);
+    ierr = PetscSFSetGraph(basetocoarse, nroots, nleaves, NULL, PETSC_OWN_POINTER, remotes, PETSC_OWN_POINTER);CHKERRQ(ierr);
+    ierr = PetscSFSetUp(basetocoarse);CHKERRQ(ierr);
+    ierr = DMGetLocalSection(basec,&secIn);CHKERRQ(ierr);
+    ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmIn),&secOut);CHKERRQ(ierr);
+    ierr = PetscSFDistributeSection(basetocoarse, secIn, &remoteOffsets, secOut);CHKERRQ(ierr);
+    ierr = PetscSFCreateSectionSF(basetocoarse, secIn, remoteOffsets, secOut, &transferSF);CHKERRQ(ierr);
+    ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
+    ierr = VecGetArrayWrite(vecOutLocal, &outArray);CHKERRQ(ierr);
+    ierr = VecGetArrayRead(vecInLocal, &inArray);CHKERRQ(ierr);
+    ierr = PetscSFBcastBegin(transferSF, MPIU_SCALAR, inArray, outArray);CHKERRQ(ierr);
+    ierr = PetscSFBcastEnd(transferSF, MPIU_SCALAR, inArray, outArray);CHKERRQ(ierr);
+    ierr = VecRestoreArrayRead(vecInLocal, &inArray);CHKERRQ(ierr);
+    ierr = VecRestoreArrayWrite(vecOutLocal, &outArray);CHKERRQ(ierr);
+    ierr = PetscSFDestroy(&transferSF);CHKERRQ(ierr);
+    ierr = PetscSectionDestroy(&secOut);CHKERRQ(ierr);
+    ierr = PetscSFDestroy(&basetocoarse);CHKERRQ(ierr);
+  }
+  ierr = VecDestroy(&vecInLocal);CHKERRQ(ierr);
+  ierr = DMDestroy(&basec);CHKERRQ(ierr);
+  ierr = VecDestroy(&vecIn);CHKERRQ(ierr);
 
   /* output */
   if (n_hi > 1) { /* downsweep the stored hierarchy */
diff --git a/src/dm/impls/moab/dmmbfem.cxx b/src/dm/impls/moab/dmmbfem.cxx
index 488f7847f66..ee7d6c41b6f 100644
--- a/src/dm/impls/moab/dmmbfem.cxx
+++ b/src/dm/impls/moab/dmmbfem.cxx
@@ -97,7 +97,7 @@ inline PetscErrorCode DMatrix_Invert_4x4_Internal (PetscReal *inmat, PetscReal *
 }
 
 
-/*@
+/*@C
   Compute_Lagrange_Basis_1D_Internal - Evaluate bases and derivatives at quadrature points for a EDGE2 or EDGE3 element.
 
   The routine is given the coordinates of the vertices of a linear or quadratic edge element.
@@ -229,7 +229,7 @@ PetscErrorCode Compute_Lagrange_Basis_1D_Internal ( const PetscInt nverts, const
 }
 
 
-/*@
+/*@C
   Compute_Lagrange_Basis_2D_Internal - Evaluate bases and derivatives at quadrature points for a QUAD4 or TRI3 element.
 
   The routine is given the coordinates of the vertices of a quadrangle or triangle.
@@ -404,7 +404,7 @@ PetscErrorCode Compute_Lagrange_Basis_2D_Internal ( const PetscInt nverts, const
 }
 
 
-/*@
+/*@C
   Compute_Lagrange_Basis_3D_Internal - Evaluate bases and derivatives at quadrature points for a HEX8 or TET4 element.
 
   The routine is given the coordinates of the vertices of a hexahedra or tetrahedra.
@@ -636,12 +636,12 @@ PetscErrorCode Compute_Lagrange_Basis_3D_Internal ( const PetscInt nverts, const
         dphidx[1 + offset] = Dx[1];
         dphidx[2 + offset] = Dx[2];
         dphidx[3 + offset] = Dx[3];
-      
+
         dphidy[0 + offset] = Dy[0];
         dphidy[1 + offset] = Dy[1];
         dphidy[2 + offset] = Dy[2];
         dphidy[3 + offset] = Dy[3];
-      
+
         dphidz[0 + offset] = Dz[0];
         dphidz[1 + offset] = Dz[1];
         dphidz[2 + offset] = Dz[2];
@@ -674,7 +674,7 @@ PetscErrorCode Compute_Lagrange_Basis_3D_Internal ( const PetscInt nverts, const
 
 
 
-/*@
+/*@C
   DMMoabFEMComputeBasis - Evaluate bases and derivatives at quadrature points for a linear EDGE/QUAD/TRI/HEX/TET element.
 
   The routine takes the coordinates of the vertices of an element and computes basis functions associated with
@@ -695,8 +695,8 @@ PetscErrorCode Compute_Lagrange_Basis_3D_Internal ( const PetscInt nverts, const
   Level: advanced
 
 @*/
-PetscErrorCode DMMoabFEMComputeBasis ( const PetscInt dim, const PetscInt nverts, const PetscReal *coordinates, const PetscQuadrature quadrature, 
-                                       PetscReal *phypts, PetscReal *jacobian_quadrature_weight_product, 
+PetscErrorCode DMMoabFEMComputeBasis ( const PetscInt dim, const PetscInt nverts, const PetscReal *coordinates, const PetscQuadrature quadrature,
+                                       PetscReal *phypts, PetscReal *jacobian_quadrature_weight_product,
                                        PetscReal *fe_basis, PetscReal **fe_basis_derivatives)
 {
   PetscErrorCode  ierr;
@@ -707,7 +707,7 @@ PetscErrorCode DMMoabFEMComputeBasis ( const PetscInt dim, const PetscInt nverts
 
   PetscFunctionBegin;
   PetscValidPointer(coordinates, 3);
-  PetscValidHeaderSpecific(quadrature, PETSC_OBJECT_CLASSID, 4);
+  PetscValidHeaderSpecific(quadrature, PETSCQUADRATURE_CLASSID, 4);
   PetscValidPointer(fe_basis, 7);
   compute_der = (fe_basis_derivatives != NULL);
 
@@ -748,7 +748,7 @@ PetscErrorCode DMMoabFEMComputeBasis ( const PetscInt dim, const PetscInt nverts
 
 
 
-/*@
+/*@C
   DMMoabFEMCreateQuadratureDefault - Create default quadrature rules for integration over an element with a given
   dimension and polynomial order (deciphered from number of element vertices).
 
@@ -1048,7 +1048,7 @@ PetscErrorCode FEMComputeBasis_JandF ( const PetscInt dim, const PetscInt nverts
 
 
 
-/*@
+/*@C
   DMMoabPToRMapping - Compute the mapping from the physical coordinate system for a given element to the
   canonical reference element. In addition to finding the inverse mapping evaluation through Newton iteration,
   the basis function at the parametric point is also evaluated optionally.
@@ -1158,4 +1158,3 @@ PetscErrorCode DMMoabPToRMapping( const PetscInt dim, const PetscInt nverts, con
   }
   PetscFunctionReturn(0);
 }
-
diff --git a/src/dm/impls/moab/dmmbmat.cxx b/src/dm/impls/moab/dmmbmat.cxx
index a03fd2309c5..dd5869585f6 100644
--- a/src/dm/impls/moab/dmmbmat.cxx
+++ b/src/dm/impls/moab/dmmbmat.cxx
@@ -206,7 +206,7 @@ static PetscErrorCode DMMoabSetBlockFills_Private(PetscInt w, const PetscInt *fi
 }
 
 
-/*@
+/*@C
     DMMoabSetBlockFills - Sets the fill pattern in each block for a multi-component problem
     of the matrix returned by DMCreateMatrix().
 
diff --git a/src/dm/impls/moab/dmmbmg.cxx b/src/dm/impls/moab/dmmbmg.cxx
index 379f60bf4f4..53682cd20b1 100644
--- a/src/dm/impls/moab/dmmbmg.cxx
+++ b/src/dm/impls/moab/dmmbmg.cxx
@@ -3,7 +3,7 @@
 #include 
 #include 
 
-/*@
+/*@C
   DMMoabGenerateHierarchy - Generate a multi-level uniform refinement hierarchy
   by succesively refining a coarse mesh, already defined in the DM object
   provided by the user.
@@ -80,7 +80,7 @@ PetscErrorCode DMMoabGenerateHierarchy(DM dm, PetscInt nlevels, PetscInt *ldegre
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMRefineHierarchy_Moab - Generate a multi-level DM hierarchy
   by succesively refining a coarse mesh.
 
@@ -110,7 +110,7 @@ PETSC_EXTERN PetscErrorCode  DMRefineHierarchy_Moab(DM dm, PetscInt nlevels, DM
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMCoarsenHierarchy_Moab - Generate a multi-level DM hierarchy
   by succesively coarsening a refined mesh.
 
@@ -142,7 +142,7 @@ PETSC_EXTERN PetscErrorCode DMCoarsenHierarchy_Moab(DM dm, PetscInt nlevels, DM
 
 PETSC_EXTERN PetscErrorCode DMMoab_Compute_NNZ_From_Connectivity(DM, PetscInt*, PetscInt*, PetscInt*, PetscInt*, PetscBool);
 
-/*@
+/*@C
   DMCreateInterpolation_Moab - Generate the interpolation operators to transform
   operators (matrices, vectors) from parent level to child level as defined by
   the DM inputs provided by the user.
@@ -384,7 +384,7 @@ PETSC_EXTERN PetscErrorCode DMCreateInterpolation_Moab(DM dmp, DM dmc, Mat* inte
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMCreateInjection_Moab - Generate a multi-level uniform refinement hierarchy
   by succesively refining a coarse mesh, already defined in the DM object
   provided by the user.
@@ -503,7 +503,7 @@ static PetscErrorCode DMMoab_UMR_Private(DM dm, MPI_Comm comm, PetscBool refine,
 }
 
 
-/*@
+/*@C
   DMRefine_Moab - Generate a multi-level uniform refinement hierarchy
   by succesively refining a coarse mesh, already defined in the DM object
   provided by the user.
@@ -533,7 +533,7 @@ PETSC_EXTERN PetscErrorCode DMRefine_Moab(DM dm, MPI_Comm comm, DM* dmf)
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMCoarsen_Moab - Generate a multi-level uniform refinement hierarchy
   by succesively refining a coarse mesh, already defined in the DM object
   provided by the user.
diff --git a/src/dm/impls/moab/dmmbutil.cxx b/src/dm/impls/moab/dmmbutil.cxx
index 789f6601a72..0fabb75966b 100644
--- a/src/dm/impls/moab/dmmbutil.cxx
+++ b/src/dm/impls/moab/dmmbutil.cxx
@@ -487,7 +487,7 @@ PetscErrorCode DMMBUtil_InitializeOptions(DMMoabMeshGeneratorCtx& genCtx, PetscI
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMMoabCreateBoxMesh - Creates a mesh on the tensor product (box) of intervals with genCtx specified bounds.
 
   Collective
@@ -799,7 +799,7 @@ PetscErrorCode DMMoab_GetReadOptions_Private(PetscBool by_rank, PetscInt numproc
 }
 
 
-/*@
+/*@C
   DMMoabLoadFromFile - Creates a DM object by loading the mesh from a user specified file.
 
   Collective
@@ -893,7 +893,7 @@ PetscErrorCode DMMoabLoadFromFile(MPI_Comm comm, PetscInt dim, PetscInt nghost,
 }
 
 
-/*@
+/*@C
   DMMoabRenumberMeshEntities - Order and number all entities (vertices->elements) to be contiguously ordered
   in parallel
 
diff --git a/src/dm/impls/moab/dmmoab.cxx b/src/dm/impls/moab/dmmoab.cxx
index 27a7c356612..750db301719 100644
--- a/src/dm/impls/moab/dmmoab.cxx
+++ b/src/dm/impls/moab/dmmoab.cxx
@@ -50,7 +50,7 @@ PETSC_EXTERN PetscErrorCode DMCreateSubDM_Moab(DM dm, PetscInt numFields, PetscI
 PETSC_EXTERN PetscErrorCode DMLocatePoints_Moab(DM dm, Vec v, IS *cellIS);
 */
 
-/*@
+/*@C
   DMMoabCreate - Creates a DMMoab object, which encapsulates a moab instance
 
   Collective
@@ -75,7 +75,7 @@ PetscErrorCode DMMoabCreate(MPI_Comm comm, DM *dmb)
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMMoabCreateMoab - Creates a DMMoab object, optionally from an instance and other data
 
   Collective
@@ -166,7 +166,7 @@ PetscErrorCode DMMoabCreateMoab(MPI_Comm comm, moab::Interface *mbiface, moab::T
 
 #ifdef MOAB_HAVE_MPI
 
-/*@
+/*@C
   DMMoabGetParallelComm - Get the ParallelComm used with this DMMoab
 
   Collective
@@ -191,7 +191,7 @@ PetscErrorCode DMMoabGetParallelComm(DM dm, moab::ParallelComm **pcomm)
 #endif /* MOAB_HAVE_MPI */
 
 
-/*@
+/*@C
   DMMoabSetInterface - Set the MOAB instance used with this DMMoab
 
   Collective
@@ -219,7 +219,7 @@ PetscErrorCode DMMoabSetInterface(DM dm, moab::Interface *mbiface)
 }
 
 
-/*@
+/*@C
   DMMoabGetInterface - Get the MOAB instance used with this DMMoab
 
   Collective
@@ -246,7 +246,7 @@ PetscErrorCode DMMoabGetInterface(DM dm, moab::Interface **mbiface)
 }
 
 
-/*@
+/*@C
   DMMoabSetLocalVertices - Set the entities having DOFs on this DMMoab
 
   Collective
@@ -297,7 +297,7 @@ PetscErrorCode DMMoabSetLocalVertices(DM dm, moab::Range *range)
 }
 
 
-/*@
+/*@C
   DMMoabGetAllVertices - Get the entities having DOFs on this DMMoab
 
   Collective
@@ -321,7 +321,7 @@ PetscErrorCode DMMoabGetAllVertices(DM dm, moab::Range *local)
 
 
 
-/*@
+/*@C
   DMMoabGetLocalVertices - Get the entities having DOFs on this DMMoab
 
   Collective
@@ -345,7 +345,7 @@ PetscErrorCode DMMoabGetLocalVertices(DM dm, const moab::Range **owned, const mo
   PetscFunctionReturn(0);
 }
 
-/*@
+/*@C
   DMMoabGetLocalElements - Get the higher-dimensional entities that are locally owned
 
   Collective
@@ -368,7 +368,7 @@ PetscErrorCode DMMoabGetLocalElements(DM dm, const moab::Range **range)
 }
 
 
-/*@
+/*@C
   DMMoabSetLocalElements - Set the entities having DOFs on this DMMoab
 
   Collective
@@ -407,7 +407,7 @@ PetscErrorCode DMMoabSetLocalElements(DM dm, moab::Range *range)
 }
 
 
-/*@
+/*@C
   DMMoabSetLocalToGlobalTag - Set the tag used for local to global numbering
 
   Collective
@@ -428,7 +428,7 @@ PetscErrorCode DMMoabSetLocalToGlobalTag(DM dm, moab::Tag ltogtag)
 }
 
 
-/*@
+/*@C
   DMMoabGetLocalToGlobalTag - Get the tag used for local to global numbering
 
   Collective
@@ -451,7 +451,7 @@ PetscErrorCode DMMoabGetLocalToGlobalTag(DM dm, moab::Tag *ltog_tag)
 }
 
 
-/*@
+/*@C
   DMMoabSetBlockSize - Set the block size used with this DMMoab
 
   Collective
@@ -472,7 +472,7 @@ PetscErrorCode DMMoabSetBlockSize(DM dm, PetscInt bs)
 }
 
 
-/*@
+/*@C
   DMMoabGetBlockSize - Get the block size used with this DMMoab
 
   Collective
@@ -495,7 +495,7 @@ PetscErrorCode DMMoabGetBlockSize(DM dm, PetscInt *bs)
 }
 
 
-/*@
+/*@C
   DMMoabGetSize - Get the global vertex size used with this DMMoab
 
   Collective on dm
@@ -520,7 +520,7 @@ PetscErrorCode DMMoabGetSize(DM dm, PetscInt *neg, PetscInt *nvg)
 }
 
 
-/*@
+/*@C
   DMMoabGetLocalSize - Get the local and ghosted vertex size used with this DMMoab
 
   Collective on dm
@@ -549,7 +549,7 @@ PetscErrorCode DMMoabGetLocalSize(DM dm, PetscInt *nel, PetscInt *neg, PetscInt
 }
 
 
-/*@
+/*@C
   DMMoabGetOffset - Get the local offset for the global vector
 
   Collective
@@ -572,7 +572,7 @@ PetscErrorCode DMMoabGetOffset(DM dm, PetscInt *offset)
 }
 
 
-/*@
+/*@C
   DMMoabGetDimension - Get the dimension of the DM Mesh
 
   Collective
@@ -595,7 +595,7 @@ PetscErrorCode DMMoabGetDimension(DM dm, PetscInt *dim)
 }
 
 
-/*@
+/*@C
   DMMoabGetHierarchyLevel - Get the current level of the mesh hierarchy
   generated through uniform refinement.
 
@@ -619,7 +619,7 @@ PetscErrorCode DMMoabGetHierarchyLevel(DM dm, PetscInt *nlevel)
 }
 
 
-/*@
+/*@C
   DMMoabGetMaterialBlock - Get the material ID corresponding to the current entity of the DM Mesh
 
   Collective
@@ -648,7 +648,7 @@ PetscErrorCode DMMoabGetMaterialBlock(DM dm, const moab::EntityHandle ehandle, P
 }
 
 
-/*@
+/*@C
   DMMoabGetVertexCoordinates - Get the coordinates corresponding to the requested vertex entities
 
   Collective
@@ -687,7 +687,7 @@ PetscErrorCode DMMoabGetVertexCoordinates(DM dm, PetscInt nconn, const moab::Ent
 }
 
 
-/*@
+/*@C
   DMMoabGetVertexConnectivity - Get the vertex adjacency for the given entity
 
   Collective
@@ -729,7 +729,7 @@ PetscErrorCode DMMoabGetVertexConnectivity(DM dm, moab::EntityHandle vhandle, Pe
 }
 
 
-/*@
+/*@C
   DMMoabRestoreVertexConnectivity - Restore the vertex connectivity for the given entity
 
   Collective
@@ -760,7 +760,7 @@ PetscErrorCode DMMoabRestoreVertexConnectivity(DM dm, moab::EntityHandle ehandle
 }
 
 
-/*@
+/*@C
   DMMoabGetElementConnectivity - Get the vertex adjacency for the given entity
 
   Collective
@@ -798,7 +798,7 @@ PetscErrorCode DMMoabGetElementConnectivity(DM dm, moab::EntityHandle ehandle, P
 }
 
 
-/*@
+/*@C
   DMMoabIsEntityOnBoundary - Check whether a given entity is on the boundary (vertex, edge, face, element)
 
   Collective
@@ -848,7 +848,7 @@ PetscErrorCode DMMoabIsEntityOnBoundary(DM dm, const moab::EntityHandle ent, Pet
 }
 
 
-/*@
+/*@C
   DMMoabIsEntityOnBoundary - Check whether a given entity is on the boundary (vertex, edge, face, element)
 
   Input Parameter:
@@ -881,7 +881,7 @@ PetscErrorCode DMMoabCheckBoundaryVertices(DM dm, PetscInt nconn, const moab::En
 }
 
 
-/*@
+/*@C
   DMMoabGetBoundaryMarkers - Return references to the vertices, faces, elements on the boundary
 
   Input Parameter:
@@ -1263,7 +1263,7 @@ PETSC_EXTERN PetscErrorCode DMSetUp_Moab(DM dm)
 }
 
 
-/*@
+/*@C
   DMMoabCreateVertices - Creates and adds several vertices to the primary set represented by the DM.
 
   Collective
@@ -1302,7 +1302,7 @@ PetscErrorCode DMMoabCreateVertices(DM dm, const PetscReal* coords, PetscInt nve
 }
 
 
-/*@
+/*@C
   DMMoabCreateElement - Adds an element of specified type to the primary set represented by the DM.
 
   Collective
@@ -1341,7 +1341,7 @@ PetscErrorCode DMMoabCreateElement(DM dm, const moab::EntityType type, const moa
 }
 
 
-/*@
+/*@C
   DMMoabCreateSubmesh - Creates a sub-DM object with a set that contains all vertices/elements of the parent
   in addition to providing support for dynamic mesh modifications. This is useful for AMR calculations to
   create a DM object on a refined level.
diff --git a/src/dm/impls/network/network.c b/src/dm/impls/network/network.c
index 144d7c2f55c..dd89353c906 100644
--- a/src/dm/impls/network/network.c
+++ b/src/dm/impls/network/network.c
@@ -22,6 +22,30 @@ PetscErrorCode DMNetworkGetPlex(DM netdm, DM *plexdm)
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMNetworkGetSizes - Gets the the number of subnetworks and coupling subnetworks
+
+  Collective on dm
+
+  Input Parameters:
++ dm - the dm object
+. Nsubnet - global number of subnetworks
+- NsubnetCouple - global number of coupling subnetworks
+
+  Level: Intermediate
+
+.seealso: DMNetworkCreate()
+@*/
+PetscErrorCode DMNetworkGetSizes(DM netdm, PetscInt *Nsubnet, PetscInt *Ncsubnet)
+{
+  DM_Network *network = (DM_Network*) netdm->data;
+
+  PetscFunctionBegin;
+  *Nsubnet = network->nsubnet;
+  *Ncsubnet = network->ncsubnet;
+  PetscFunctionReturn(0);
+}
+
 /*@
   DMNetworkSetSizes - Sets the number of subnetworks, local and global vertices and edges for each subnetwork.
 
@@ -143,18 +167,14 @@ PetscErrorCode DMNetworkSetSizes(DM dm,PetscInt Nsubnet,PetscInt nV[], PetscInt
 PetscErrorCode DMNetworkSetEdgeList(DM dm,PetscInt *edgelist[],PetscInt *edgelistCouple[])
 {
   DM_Network *network = (DM_Network*) dm->data;
-  PetscInt   i,nsubnet,ncsubnet=network->ncsubnet;
+  PetscInt   i;
 
   PetscFunctionBegin;
-  nsubnet = network->nsubnet - ncsubnet;
-  for(i=0; i < nsubnet; i++) {
-    network->subnet[i].edgelist = edgelist[i];
-  }
-  if (edgelistCouple) {
-    PetscInt j;
-    j = 0;
-    nsubnet = network->nsubnet;
-    while (i < nsubnet) network->subnet[i++].edgelist = edgelistCouple[j++];
+  for (i=0; i < (network->nsubnet-network->ncsubnet); i++) network->subnet[i].edgelist = edgelist[i];
+  if (network->ncsubnet) {
+    PetscInt j = 0;
+    if (!edgelistCouple) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must provide edgelist_couple");
+    while (i < network->nsubnet) network->subnet[i++].edgelist = edgelistCouple[j++];
   }
   PetscFunctionReturn(0);
 }
@@ -211,6 +231,7 @@ PetscErrorCode DMNetworkLayoutSetUp(DM dm)
   nsubnet = network->nsubnet;
   while (i < nsubnet) {
     edgelist_couple = network->subnet[i].edgelist;
+
     k = 0;
     for (j = 0; j < network->subnet[i].nedge; j++) {
       netid = edgelist_couple[k]; vid = edgelist_couple[k+1];
@@ -310,6 +331,7 @@ PetscErrorCode DMNetworkLayoutSetUp(DM dm)
       network->header[i].ndata = 0;
       ierr = PetscSectionAddDof(network->DataSection,i,network->dataheadersize);CHKERRQ(ierr);
       network->header[i].offset[0] = 0;
+      network->header[i].offsetvarrel[0] = 0;
       i++;
     }
     if (i >= network->subnet[j].eEnd) j++;
@@ -350,6 +372,7 @@ PetscErrorCode DMNetworkLayoutSetUp(DM dm)
     network->header[i].ndata = 0;
     ierr = PetscSectionAddDof(network->DataSection,i,network->dataheadersize);CHKERRQ(ierr);
     network->header[i].offset[0] = 0;
+    network->header[i].offsetvarrel[0] = 0;
   }
 
   ierr = PetscFree2(vidxlTog,eowners);CHKERRQ(ierr);
@@ -381,6 +404,7 @@ PetscErrorCode DMNetworkGetSubnetworkInfo(DM dm,PetscInt id,PetscInt *nv, PetscI
   DM_Network *network = (DM_Network*)dm->data;
 
   PetscFunctionBegin;
+  if (id >= network->nsubnet) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Subnet ID %D exceeds the num of subnets %D",id,network->nsubnet);
   *nv   = network->subnet[id].nvtx;
   *ne   = network->subnet[id].nedge;
   *vtx  = network->subnet[id].vertices;
@@ -409,11 +433,19 @@ PetscErrorCode DMNetworkGetSubnetworkInfo(DM dm,PetscInt id,PetscInt *nv, PetscI
 PetscErrorCode DMNetworkGetSubnetworkCoupleInfo(DM dm,PetscInt id,PetscInt *ne,const PetscInt **edge)
 {
   DM_Network *net = (DM_Network*)dm->data;
-  PetscInt   id1 = id + net->nsubnet - net->ncsubnet;
+  PetscInt   id1;
 
   PetscFunctionBegin;
-  *ne   = net->subnet[id1].nedge;
-  *edge = net->subnet[id1].edges;
+  if (net->ncsubnet) {
+    if (id >= net->ncsubnet) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Subnet ID %D exceeds the num of coupling subnets %D",id,net->ncsubnet);
+
+    id1   = id + net->nsubnet - net->ncsubnet;
+    *ne   = net->subnet[id1].nedge;
+    *edge = net->subnet[id1].edges;
+  } else {
+    *ne   = 0;
+    *edge = NULL;
+  }
   PetscFunctionReturn(0);
 }
 
@@ -472,7 +504,7 @@ PetscErrorCode DMNetworkRegisterComponent(DM dm,const char *name,size_t size,Pet
   Input Parameters:
 . dm - The DMNetwork object
 
-  Output Paramters:
+  Output Parameters:
 + vStart - The first vertex point
 - vEnd   - One beyond the last vertex point
 
@@ -498,7 +530,7 @@ PetscErrorCode DMNetworkGetVertexRange(DM dm,PetscInt *vStart,PetscInt *vEnd)
   Input Parameters:
 . dm - The DMNetwork object
 
-  Output Paramters:
+  Output Parameters:
 + eStart - The first edge point
 - eEnd   - One beyond the last edge point
 
@@ -525,7 +557,7 @@ PetscErrorCode DMNetworkGetEdgeRange(DM dm,PetscInt *eStart,PetscInt *eEnd)
 + dm - DMNetwork object
 - p  - edge point
 
-  Output Paramters:
+  Output Parameters:
 . index - user global numbering for the edge
 
   Level: intermediate
@@ -556,7 +588,7 @@ PetscErrorCode DMNetworkGetGlobalEdgeIndex(DM dm,PetscInt p,PetscInt *index)
 + dm - DMNetwork object
 - p  - vertex point
 
-  Output Paramters:
+  Output Parameters:
 . index - user global numbering for the vertex
 
   Level: intermediate
@@ -692,12 +724,41 @@ PetscErrorCode DMNetworkAddComponent(DM dm, PetscInt p,PetscInt componentkey,voi
   ierr = PetscSectionAddDof(network->DataSection,p,component->size);CHKERRQ(ierr);
   header->key[header->ndata] = componentkey;
   if (header->ndata != 0) header->offset[header->ndata] = header->offset[header->ndata-1] + header->size[header->ndata-1];
+  header->nvar[header->ndata] = 0;
 
   cvalue->data[header->ndata] = (void*)compvalue;
   header->ndata++;
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMNetworkSetComponentNumVariables - Sets the number of variables for a component
+
+  Not Collective
+
+  Input Parameters:
++ dm           - The DMNetwork object
+. p            - vertex/edge point
+. compnum      - component number (First component added = 0, second = 1, ...)
+- nvar         - number of variables for the component
+
+  Level: intermediate
+
+.seealso: DMNetworkAddComponent(), DMNetworkGetNumComponents(),DMNetworkRegisterComponent()
+@*/
+PetscErrorCode DMNetworkSetComponentNumVariables(DM dm, PetscInt p,PetscInt compnum,PetscInt nvar)
+{
+  DM_Network               *network = (DM_Network*)dm->data;
+  DMNetworkComponentHeader header = &network->header[p];
+  PetscErrorCode           ierr;
+
+  PetscFunctionBegin;
+  ierr = DMNetworkAddNumVariables(dm,p,nvar);CHKERRQ(ierr);
+  header->nvar[compnum] = nvar;
+  if (compnum != 0) header->offsetvarrel[compnum] = header->offsetvarrel[compnum-1] + header->nvar[compnum-1];
+  PetscFunctionReturn(0);
+}
+
 /*@
   DMNetworkGetNumComponents - Get the number of components at a vertex/edge
 
@@ -779,6 +840,70 @@ PetscErrorCode DMNetworkGetVariableGlobalOffset(DM dm,PetscInt p,PetscInt *offse
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMNetworkGetComponentVariableOffset - Get the offset for accessing the variable associated with a component for the given vertex/edge from the local vector.
+
+  Not Collective
+
+  Input Parameters:
++ dm     - The DMNetwork object
+. compnum - component number
+- p      - the edge/vertex point
+
+  Output Parameters:
+. offset - the offset
+
+  Level: intermediate
+
+.seealso: DMNetworkGetVariableGlobalOffset(), DMGetLocalVector(), DMNetworkSetComponentNumVariables()
+@*/
+PetscErrorCode DMNetworkGetComponentVariableOffset(DM dm,PetscInt p,PetscInt compnum,PetscInt *offset)
+{
+  PetscErrorCode ierr;
+  DM_Network     *network = (DM_Network*)dm->data;
+  PetscInt       offsetp,offsetd;
+  DMNetworkComponentHeader header;
+
+  PetscFunctionBegin;
+  ierr = DMNetworkGetVariableOffset(dm,p,&offsetp);CHKERRQ(ierr);
+  ierr = PetscSectionGetOffset(network->DataSection,p,&offsetd);CHKERRQ(ierr);
+  header = (DMNetworkComponentHeader)(network->componentdataarray+offsetd);
+  *offset = offsetp + header->offsetvarrel[compnum];
+  PetscFunctionReturn(0);
+}
+
+/*@
+  DMNetworkGetComponentVariableGlobalOffset - Get the global offset for accessing the variable associated with a component for the given vertex/edge from the local vector.
+
+  Not Collective
+
+  Input Parameters:
++ dm     - The DMNetwork object
+. compnum - component number
+- p      - the edge/vertex point
+
+  Output Parameters:
+. offsetg - the global offset
+
+  Level: intermediate
+
+.seealso: DMNetworkGetVariableGlobalOffset(), DMNetworkGetComponentVariableOffset(), DMGetLocalVector(), DMNetworkSetComponentNumVariables()
+@*/
+PetscErrorCode DMNetworkGetComponentVariableGlobalOffset(DM dm,PetscInt p,PetscInt compnum,PetscInt *offsetg)
+{
+  PetscErrorCode ierr;
+  DM_Network     *network = (DM_Network*)dm->data;
+  PetscInt       offsetp,offsetd;
+  DMNetworkComponentHeader header;
+
+  PetscFunctionBegin;
+  ierr = DMNetworkGetVariableGlobalOffset(dm,p,&offsetp);CHKERRQ(ierr);
+  ierr = PetscSectionGetOffset(network->DataSection,p,&offsetd);CHKERRQ(ierr);
+  header = (DMNetworkComponentHeader)(network->componentdataarray+offsetd);
+  *offsetg = offsetp + header->offsetvarrel[compnum];
+  PetscFunctionReturn(0);
+}
+
 /*@
   DMNetworkGetEdgeOffset - Get the offset for accessing the variable associated with the given edge from the local subvector.
 
@@ -1266,7 +1391,7 @@ PetscErrorCode PetscSFGetSubSF(PetscSF mastersf, ISLocalToGlobalMapping map, Pet
 + dm - The DMNetwork object
 - p  - the vertex point
 
-  Output Paramters:
+  Output Parameters:
 + nedges - number of edges connected to this vertex point
 - edges  - List of edge points
 
@@ -1298,7 +1423,7 @@ PetscErrorCode DMNetworkGetSupportingEdges(DM dm,PetscInt vertex,PetscInt *nedge
 + dm - The DMNetwork object
 - p  - the edge point
 
-  Output Paramters:
+  Output Parameters:
 . vertices  - vertices connected to this edge
 
   Level: intermediate
diff --git a/src/dm/impls/plex/examples/tests/ex1.c b/src/dm/impls/plex/examples/tests/ex1.c
index 6ac72da5d8f..ad6380195bd 100644
--- a/src/dm/impls/plex/examples/tests/ex1.c
+++ b/src/dm/impls/plex/examples/tests/ex1.c
@@ -2,7 +2,7 @@ static char help[] = "Tests various DMPlex routines to construct, refine and dis
 
 #include 
 
-typedef enum {BOX, CYLINDER} DomainShape;
+typedef enum {BOX, CYLINDER, SPHERE, BALL} DomainShape;
 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
 
 typedef struct {
@@ -37,7 +37,7 @@ typedef struct {
 
 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 {
-  const char       *dShapes[2] = {"box", "cylinder"};
+  const char       *dShapes[4] = {"box", "cylinder", "sphere", "ball"};
   PetscInt         shape, bd, n;
   static PetscInt  domainBoxSizes[3] = {1,1,1};
   static PetscReal domainBoxL[3] = {0.,0.,0.};
@@ -84,7 +84,7 @@ PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   ierr = PetscOptionsBool("-simplex2tensor", "Refine simplicial cells in tensor product cells", "ex1.c", options->simplex2tensor, &options->simplex2tensor, NULL);CHKERRQ(ierr);
   if (options->simplex2tensor) options->interpolate = PETSC_TRUE;
   shape = options->domainShape;
-  ierr = PetscOptionsEList("-domain_shape","The shape of the domain","ex1.c", dShapes, 2, dShapes[options->domainShape], &shape, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEList("-domain_shape","The shape of the domain","ex1.c", dShapes, 4, dShapes[options->domainShape], &shape, NULL);CHKERRQ(ierr);
   options->domainShape = (DomainShape) shape;
   ierr = PetscOptionsIntArray("-domain_box_sizes","The sizes of the box domain","ex1.c", domainBoxSizes, (n=3,&n), &flg);CHKERRQ(ierr);
   if (flg) { options->domainShape = BOX; options->domainBoxSizes = domainBoxSizes;}
@@ -124,6 +124,20 @@ PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   PetscFunctionReturn(0);
 }
 
+/* Overload time to be the sphere radius */
+static void snapToSphere(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+                         const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+                         const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+                         PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
+{
+  PetscReal norm2 = 0.0, fac;
+  PetscInt  n = uOff[1] - uOff[0], d;
+
+  for (d = 0; d < n; ++d) norm2 += PetscSqr(PetscRealPart(u[d]));
+  fac = t/PetscSqrtReal(norm2);
+  for (d = 0; d < n; ++d) f0[d] = u[d]*fac;
+}
+
 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 {
   PetscInt       dim                  = user->dim;
@@ -199,6 +213,43 @@ PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
         ierr = DMPlexCreateHexCylinderMesh(comm, 3, user->periodicity[2], dm);CHKERRQ(ierr);
       }
       break;
+    case SPHERE:
+      ierr = DMPlexCreateSphereMesh(comm, dim, cellSimplex, dm);CHKERRQ(ierr);
+      break;
+    case BALL:
+      {
+        DM       sdm;
+        PetscInt Nr = 0, r;
+
+        ierr = DMPlexCreateSphereMesh(comm, dim-1, cellSimplex, &sdm);CHKERRQ(ierr);
+        {
+          DM       cdm;
+          PetscFE  fe;
+          PetscInt dim, dE;
+
+          ierr = DMGetCoordinateDM(sdm, &cdm);CHKERRQ(ierr);
+          ierr = DMGetDimension(sdm, &dim);CHKERRQ(ierr);
+          ierr = DMGetCoordinateDim(sdm, &dE);CHKERRQ(ierr);
+          ierr = PetscFECreateLagrange(PETSC_COMM_SELF, dim, dE, PETSC_TRUE, 1, -1, &fe);CHKERRQ(ierr);
+          ierr = DMSetField(cdm, 0, NULL, (PetscObject) fe);CHKERRQ(ierr);
+          ierr = PetscFEDestroy(&fe);CHKERRQ(ierr);
+          ierr = DMCreateDS(cdm);CHKERRQ(ierr);
+        }
+        ierr = PetscOptionsGetInt(NULL, "bd_", "-dm_refine", &Nr, NULL);CHKERRQ(ierr);
+        for (r = 0; r < Nr; ++r) {
+          DM rdm, cdm, rcdm;
+          ierr = DMRefine(sdm, PETSC_COMM_WORLD, &rdm);CHKERRQ(ierr);
+          ierr = DMGetCoordinateDM(sdm, &cdm);CHKERRQ(ierr);
+          ierr = DMGetCoordinateDM(rdm, &rcdm);CHKERRQ(ierr);
+          ierr = DMCopyDisc(cdm, rcdm);CHKERRQ(ierr);
+          ierr = DMPlexRemapGeometry(rdm, 1.0, snapToSphere);CHKERRQ(ierr);
+          ierr = DMDestroy(&sdm);CHKERRQ(ierr);
+          sdm  = rdm;
+        }
+        ierr = DMPlexGenerate(sdm, NULL, interpolate, dm);CHKERRQ(ierr);
+        ierr = DMDestroy(&sdm);CHKERRQ(ierr);
+      }
+      break;
     default: SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Unknown domain shape %D", user->domainShape);
     }
   }
@@ -401,15 +452,15 @@ PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
   ierr = PetscObjectSetName((PetscObject) *dm, "Simplicial Mesh");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
   if (user->final_diagnostics) {
-    PetscBool interpolated = PETSC_TRUE;
+    DMPlexInterpolatedFlag interpolated;
     PetscInt  dim, depth;
 
     ierr = DMGetDimension(*dm, &dim);CHKERRQ(ierr);
     ierr = DMPlexGetDepth(*dm, &depth);CHKERRQ(ierr);
-    if (depth >= 0 && dim != depth) interpolated = PETSC_FALSE;
+    ierr = DMPlexIsInterpolatedCollective(*dm, &interpolated);CHKERRQ(ierr);
 
     ierr = DMPlexCheckSymmetry(*dm);CHKERRQ(ierr);
-    if (interpolated) {
+    if (interpolated == DMPLEX_INTERPOLATED_FULL) {
       ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr);
     }
     ierr = DMPlexCheckSkeleton(*dm, 0);CHKERRQ(ierr);
@@ -445,6 +496,11 @@ int main(int argc, char **argv)
     suffix: 1
     requires: ctetgen
     args: -dim 3 -ctetgen_verbose 4 -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail -info -info_exclude null
+  test:
+    # -dm_view exodusii:$PWD/mesh.exo -bd_dm_refine 2
+    suffix: ball_0
+    requires: ctetgen
+    args: -dim 3 -domain_shape ball -interpolate -dm_view
 
   # 2D LaTex and ASCII output 2-9
   test:
@@ -738,7 +794,7 @@ int main(int argc, char **argv)
   test:
     suffix: fluent_3
     requires: !complex
-    TODO: broken
+    TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -interpolate 1 -dm_view -final_diagnostics 0
 
   # Med mesh reader tests, including parallel file reads
@@ -936,7 +992,7 @@ int main(int argc, char **argv)
       args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
     test:
       suffix: p4est_par_periodic_3d
-      args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
+      args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
     test:
       suffix: p4est_par_gmsh_periodic
       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
diff --git a/src/dm/impls/plex/examples/tests/ex11.c b/src/dm/impls/plex/examples/tests/ex11.c
index f5d3ce0cbd5..a65089a069d 100644
--- a/src/dm/impls/plex/examples/tests/ex11.c
+++ b/src/dm/impls/plex/examples/tests/ex11.c
@@ -150,7 +150,7 @@ static PetscErrorCode TestEmptyStrata(MPI_Comm comm)
   ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
   ierr = DMPlexDistribute(dm, 1, NULL, &dmDist);CHKERRQ(ierr);
   if (dmDist) {
-    ierr = DMDestroy(&dm);CHKERRQ(ierr);CHKERRQ(ierr);
+    ierr = DMDestroy(&dm);CHKERRQ(ierr);
     dm   = dmDist;
   }
   {
diff --git a/src/dm/impls/plex/examples/tests/ex18.c b/src/dm/impls/plex/examples/tests/ex18.c
index 835ada71122..07421b35223 100644
--- a/src/dm/impls/plex/examples/tests/ex18.c
+++ b/src/dm/impls/plex/examples/tests/ex18.c
@@ -1,7 +1,6 @@
-static char help[] = "Tests for parallel mesh loading\n\n";
+static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
 
 #include 
-
 /* List of test meshes
 
 Network
@@ -187,7 +186,7 @@ cell   7-------------6-------------11 cell
 
 */
 
-typedef enum {NONE, SERIAL, PARALLEL} InterpType;
+typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType;
 
 typedef struct {
   PetscInt   debug;                        /* The debugging level */
@@ -198,6 +197,7 @@ typedef struct {
   InterpType interpolate;                  /* Interpolate the mesh before or after DMPlexDistribute() */
   PetscBool  useGenerator;                 /* Construct mesh with a mesh generator */
   PetscBool  testOrientIF;                 /* Test for different original interface orientations */
+  PetscBool  testHeavy;                    /* Run the heavy PointSF test */
   PetscBool  customView;                   /* Show results of DMPlexIsInterpolated() etc. */
   PetscInt   ornt[2];                      /* Orientation of interface on rank 0 and rank 1 */
   PetscInt   faces[3];                     /* Number of faces per dimension for generator */
@@ -210,9 +210,39 @@ typedef struct {
   char       filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
 } AppCtx;
 
+struct _n_PortableBoundary {
+  Vec coordinates;
+  PetscInt depth;
+  PetscSection *sections;
+};
+typedef struct _n_PortableBoundary * PortableBoundary;
+
+static PetscLogStage  stage[3];
+
+static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
+static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool);
+static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
+static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
+
+static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
+{
+  PetscInt       d;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  if (!*bnd) PetscFunctionReturn(0);
+  ierr = VecDestroy(&(*bnd)->coordinates);CHKERRQ(ierr);
+  for (d=0; d < (*bnd)->depth; d++) {
+    ierr = PetscSectionDestroy(&(*bnd)->sections[d]);CHKERRQ(ierr);
+  }
+  ierr = PetscFree((*bnd)->sections);CHKERRQ(ierr);
+  ierr = PetscFree(*bnd);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 {
-  const char    *interpTypes[3]  = {"none", "serial", "parallel"};
+  const char    *interpTypes[4]  = {"none", "create", "after_create", "after_distribute"};
   PetscInt       interp=NONE, dim;
   PetscBool      flg1, flg2;
   PetscErrorCode ierr;
@@ -226,6 +256,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   options->interpolate  = NONE;
   options->useGenerator = PETSC_FALSE;
   options->testOrientIF = PETSC_FALSE;
+  options->testHeavy    = PETSC_TRUE;
   options->customView   = PETSC_FALSE;
   options->testExpandPointsEmpty = PETSC_FALSE;
   options->ornt[0]      = 0;
@@ -239,34 +270,23 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   ierr = PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");CHKERRQ(ierr);
   ierr = PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0);CHKERRQ(ierr);
   ierr = PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0);CHKERRQ(ierr);
-  ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3);CHKERRQ(ierr);
-  if (options->dim < 1 || options->dim > 3) SETERRQ1(comm, PETSC_ERR_ARG_OUTOFRANGE, "dimension set to %d, must be between 1 and 3", options->dim);
   ierr = PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsEList("-interpolate", "Type of mesh interpolation, e.g. none, serial, parallel", "ex18.c", interpTypes, 3, interpTypes[options->interpolate], &interp, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL);CHKERRQ(ierr);
   options->interpolate = (InterpType) interp;
-  if (!options->distribute && options->interpolate == PARALLEL) SETERRQ(comm, PETSC_ERR_SUP, "-interpolate parallel  needs  -distribute 1");
+  if (!options->distribute && options->interpolate == AFTER_DISTRIBUTE) SETERRQ(comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
   ierr = PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate serial -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0);CHKERRQ(ierr);
-  ierr = PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate serial -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0);CHKERRQ(ierr);
   options->ncoords = 128;
   ierr = PetscOptionsRealArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL);CHKERRQ(ierr);
-  if (flg2 != options->testOrientIF) SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
   ierr = PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL);CHKERRQ(ierr);
   options->nPointsToExpand = 128;
   ierr = PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL);CHKERRQ(ierr);
   if (options->nPointsToExpand) {
     ierr = PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL);CHKERRQ(ierr);
   }
+  ierr = PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL);CHKERRQ(ierr);
-  if (options->testOrientIF) {
-    PetscInt i;
-    for (i=0; i<2; i++) {
-      if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
-    }
-    options->interpolate = SERIAL;
-    options->distribute = PETSC_FALSE;
-  }
+  ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3);CHKERRQ(ierr);
   dim = 3;
   ierr = PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2);CHKERRQ(ierr);
   if (flg2) {
@@ -274,6 +294,21 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
     options->dim = dim;
   }
   ierr = PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0);CHKERRQ(ierr);
+  ierr = PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0);CHKERRQ(ierr);
+  if (flg2 != options->testOrientIF) SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
+  if (options->testOrientIF) {
+    PetscInt i;
+    for (i=0; i<2; i++) {
+      if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
+    }
+    options->filename[0]  = 0;
+    options->useGenerator = PETSC_FALSE;
+    options->dim          = 3;
+    options->cellSimplex  = PETSC_TRUE;
+    options->interpolate  = CREATE;
+    options->distribute   = PETSC_FALSE;
+  }
   ierr = PetscOptionsEnd();
   PetscFunctionReturn(0);
 }
@@ -540,6 +575,7 @@ static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, App
     ierr = DMPlexOrientCell_Internal(*dm, ifp[rank], start, reverse);CHKERRQ(ierr);
     ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr);
     ierr = DMPlexOrientInterface_Internal(*dm);CHKERRQ(ierr);
+    ierr = PetscPrintf(comm, "Orientation test PASSED\n");CHKERRQ(ierr);
   }
   PetscFunctionReturn(0);
 }
@@ -632,78 +668,143 @@ static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx
 
 static PetscErrorCode CustomView(DM dm, PetscViewer v)
 {
-  MPI_Comm       comm;
   DMPlexInterpolatedFlag interpolated;
-  PetscErrorCode ierr;
+  PetscBool              distributed;
+  PetscErrorCode         ierr;
 
   PetscFunctionBegin;
-  ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
+  ierr = DMPlexIsDistributed(dm, &distributed);CHKERRQ(ierr);
   ierr = DMPlexIsInterpolatedCollective(dm, &interpolated);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]);CHKERRQ(ierr);
   ierr = PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
+{
+  const char    *filename       = user->filename;
+  PetscBool      testHeavy      = user->testHeavy;
+  PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
+  PetscBool      distributed    = PETSC_FALSE;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  *serialDM = NULL;
+  if (testHeavy && interpCreate) {ierr = DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE);CHKERRQ(ierr);}
+  ierr = PetscLogStagePush(stage[0]);CHKERRQ(ierr);
+  ierr = DMPlexCreateFromFile(comm, filename, interpCreate, dm);CHKERRQ(ierr); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
+  ierr = PetscLogStagePop();CHKERRQ(ierr);
+  if (testHeavy && interpCreate) {ierr = DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE);CHKERRQ(ierr);}
+  ierr = DMPlexIsDistributed(*dm, &distributed);CHKERRQ(ierr);
+  ierr = PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial");CHKERRQ(ierr);
+  if (testHeavy && distributed) {
+    ierr = PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL);CHKERRQ(ierr);
+    ierr = DMPlexCreateFromFile(comm, filename, interpCreate, serialDM);CHKERRQ(ierr);
+    ierr = DMPlexIsDistributed(*serialDM, &distributed);CHKERRQ(ierr);
+    if (distributed) SETERRQ(comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
+  }
+  ierr = DMGetDimension(*dm, &user->dim);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 {
-  PetscInt       dim            = user->dim;
+  PetscPartitioner part;
+  PortableBoundary boundary     = NULL;
+  DM             serialDM       = NULL;
   PetscBool      cellSimplex    = user->cellSimplex;
   PetscBool      useGenerator   = user->useGenerator;
-  PetscBool      interpSerial   = user->interpolate == SERIAL ? PETSC_TRUE : PETSC_FALSE;
-  PetscBool      interpParallel = user->interpolate == PARALLEL ? PETSC_TRUE : PETSC_FALSE;
-  const char    *filename       = user->filename;
-  size_t         len;
+  PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
+  PetscBool      interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
+  PetscBool      interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
+  PetscBool      testHeavy      = user->testHeavy;
   PetscMPIInt    rank;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
-  ierr = PetscStrlen(filename, &len);CHKERRQ(ierr);
-  if (len) {
-    ierr = DMPlexCreateFromFile(comm, filename, interpSerial, dm);CHKERRQ(ierr);
-    ierr = DMGetDimension(*dm, &dim);CHKERRQ(ierr);
-    user->dim = dim;
+  if (user->filename[0]) {
+    ierr = CreateMeshFromFile(comm, user, dm, &serialDM);CHKERRQ(ierr);
   } else if (useGenerator) {
-    ierr = DMPlexCreateBoxMesh(comm, dim, cellSimplex, user->faces, NULL, NULL, NULL, interpSerial, dm);CHKERRQ(ierr);
+    ierr = PetscLogStagePush(stage[0]);CHKERRQ(ierr);
+    ierr = DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm);CHKERRQ(ierr);
+    ierr = PetscLogStagePop();CHKERRQ(ierr);
   } else {
-    switch (dim) {
+    ierr = PetscLogStagePush(stage[0]);CHKERRQ(ierr);
+    switch (user->dim) {
     case 1:
-      ierr = CreateMesh_1D(comm, interpSerial, user, dm);CHKERRQ(ierr);
+      ierr = CreateMesh_1D(comm, interpCreate, user, dm);CHKERRQ(ierr);
       break;
     case 2:
       if (cellSimplex) {
-        ierr = CreateSimplex_2D(comm, interpSerial, user, dm);CHKERRQ(ierr);
+        ierr = CreateSimplex_2D(comm, interpCreate, user, dm);CHKERRQ(ierr);
       } else {
-        ierr = CreateQuad_2D(comm, interpSerial, user, dm);CHKERRQ(ierr);
+        ierr = CreateQuad_2D(comm, interpCreate, user, dm);CHKERRQ(ierr);
       }
       break;
     case 3:
       if (cellSimplex) {
-        ierr = CreateSimplex_3D(comm, interpSerial, user, dm);CHKERRQ(ierr);
+        ierr = CreateSimplex_3D(comm, interpCreate, user, dm);CHKERRQ(ierr);
       } else {
-        ierr = CreateHex_3D(comm, interpSerial, user, dm);CHKERRQ(ierr);
+        ierr = CreateHex_3D(comm, interpCreate, user, dm);CHKERRQ(ierr);
       }
       break;
     default:
-      SETERRQ1(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %D", dim);
+      SETERRQ1(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %D", user->dim);
     }
+    ierr = PetscLogStagePop();CHKERRQ(ierr);
   }
   if (user->ncoords % user->dim) SETERRQ2(comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %D must be divisable by spatial dimension %D", user->ncoords, user->dim);
   ierr = PetscObjectSetName((PetscObject) *dm, "Original Mesh");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*dm, NULL, "-orig_dm_view");CHKERRQ(ierr);
 
+  if (interpSerial) {
+    DM idm;
+
+    if (testHeavy) {ierr = DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE);CHKERRQ(ierr);}
+    ierr = PetscLogStagePush(stage[2]);CHKERRQ(ierr);
+    ierr = DMPlexInterpolate(*dm, &idm);CHKERRQ(ierr); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
+    ierr = PetscLogStagePop();CHKERRQ(ierr);
+    if (testHeavy) {ierr = DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE);CHKERRQ(ierr);}
+    ierr = DMDestroy(dm);CHKERRQ(ierr);
+    *dm = idm;
+    ierr = PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh");CHKERRQ(ierr);
+    ierr = DMViewFromOptions(*dm, NULL, "-intp_dm_view");CHKERRQ(ierr);
+  }
+
+  /* Set partitioner options */
+  ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
+  if (part) {
+    ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE);CHKERRQ(ierr);
+    ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
+  }
+
   if (user->customView) {ierr = CustomView(*dm, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);}
+  if (testHeavy) {
+    PetscBool distributed;
 
+    ierr = DMPlexIsDistributed(*dm, &distributed);CHKERRQ(ierr);
+    if (!serialDM && !distributed) {
+      serialDM = *dm;
+      ierr = PetscObjectReference((PetscObject)*dm);CHKERRQ(ierr);
+    }
+    if (serialDM) {
+      ierr = DMPlexGetExpandedBoundary_Private(serialDM, &boundary);CHKERRQ(ierr);
+    }
+    if (boundary) {
+      /* check DM which has been created in parallel and already interpolated */
+      ierr = DMPlexCheckPointSFHeavy(*dm, boundary);CHKERRQ(ierr);
+    }
+    /* Orient interface because it could be deliberately skipped above. It is idempotent. */
+    ierr = DMPlexOrientInterface_Internal(*dm);CHKERRQ(ierr);
+  }
   if (user->distribute) {
     DM               pdm = NULL;
-    PetscPartitioner part;
-
-    /* Set partitioner options */
-    ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
-    ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE);CHKERRQ(ierr);
-    ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
 
     /* Redistribute mesh over processes using that partitioner */
+    ierr = PetscLogStagePush(stage[1]);CHKERRQ(ierr);
     ierr = DMPlexDistribute(*dm, 0, NULL, &pdm);CHKERRQ(ierr);
+    ierr = PetscLogStagePop();CHKERRQ(ierr);
     if (pdm) {
       ierr = DMDestroy(dm);CHKERRQ(ierr);
       *dm  = pdm;
@@ -714,26 +815,41 @@ static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
     if (interpParallel) {
       DM idm;
 
-      ierr = DMPlexInterpolate(*dm, &idm);CHKERRQ(ierr);
+      if (testHeavy) {ierr = DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE);CHKERRQ(ierr);}
+      ierr = PetscLogStagePush(stage[2]);CHKERRQ(ierr);
+      ierr = DMPlexInterpolate(*dm, &idm);CHKERRQ(ierr); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
+      ierr = PetscLogStagePop();CHKERRQ(ierr);
+      if (testHeavy) {ierr = DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE);CHKERRQ(ierr);}
       ierr = DMDestroy(dm);CHKERRQ(ierr);
       *dm = idm;
       ierr = PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh");CHKERRQ(ierr);
       ierr = DMViewFromOptions(*dm, NULL, "-intp_dm_view");CHKERRQ(ierr);
     }
   }
+  if (testHeavy) {
+    if (boundary) {
+      ierr = DMPlexCheckPointSFHeavy(*dm, boundary);CHKERRQ(ierr);
+    }
+    /* Orient interface because it could be deliberately skipped above. It is idempotent. */
+    ierr = DMPlexOrientInterface_Internal(*dm);CHKERRQ(ierr);
+  }
+
   ierr = PetscObjectSetName((PetscObject) *dm, "Parallel Mesh");CHKERRQ(ierr);
   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
 
   if (user->customView) {ierr = CustomView(*dm, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);}
+  ierr = DMDestroy(&serialDM);CHKERRQ(ierr);
+  ierr = PortableBoundaryDestroy(&boundary);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-PETSC_STATIC_INLINE PetscErrorCode coord2str(char buf[], PetscInt len, PetscInt dim, PetscReal *coords, PetscReal tol)
+PETSC_STATIC_INLINE PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, PetscReal *coords, PetscReal tol)
 {
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  if (dim > 3) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
   if (tol >= 1e-3) {
     switch (dim) {
       case 1: ierr = PetscSNPrintf(buf,len,"(%12.3f)",(double)coords[0]);CHKERRQ(ierr);
@@ -767,7 +883,7 @@ static PetscErrorCode ViewVerticesFromCoords(DM dm, PetscInt npoints, PetscReal
   ierr = PetscMalloc1(npoints, &points);CHKERRQ(ierr);
   ierr = DMPlexFindVertices(dm, npoints, coords, tol, points);CHKERRQ(ierr);
   for (i=0; i < npoints; i++) {
-    ierr = coord2str(coordstr, 128, dim, &coords[i*dim], tol);CHKERRQ(ierr);
+    ierr = coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol);CHKERRQ(ierr);
     if (!rank && i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "-----\n");CHKERRQ(ierr);}
     ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%D] = %D\n", rank, coordstr, i, points[i]);CHKERRQ(ierr);
     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
@@ -785,7 +901,7 @@ static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
   PetscInt          d,depth;
   PetscMPIInt       rank;
   PetscErrorCode    ierr;
-  PetscViewer       sviewer;
+  PetscViewer       viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer;
 
   PetscFunctionBegin;
   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr);
@@ -795,21 +911,674 @@ static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
     ierr = ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is);CHKERRQ(ierr);
   }
   ierr = DMPlexGetConeRecursive(dm, is, &depth, &iss, §s);CHKERRQ(ierr);
-  ierr = PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
-  ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD, "[%d] ==========================\n",rank);CHKERRQ(ierr);
+  ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank);CHKERRQ(ierr);
   for (d=depth-1; d>=0; d--) {
-    ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD, "depth %D ---------------\n",d);CHKERRQ(ierr);
-    ierr = PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
+    IS          checkIS;
+    PetscBool   flg;
+
+    ierr = PetscViewerASCIIPrintf(sviewer, "depth %D ---------------\n",d);CHKERRQ(ierr);
     ierr = PetscSectionView(sects[d], sviewer);CHKERRQ(ierr);
     ierr = ISView(iss[d], sviewer);CHKERRQ(ierr);
-    ierr = PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
+    /* check reverse operation */
+    if (d < depth-1) {
+      ierr = DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS);CHKERRQ(ierr);
+      ierr = ISEqualUnsorted(checkIS, iss[d+1], &flg);CHKERRQ(ierr);
+      if (!flg) SETERRQ(PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
+      ierr = ISDestroy(&checkIS);CHKERRQ(ierr);
+    }
   }
-  ierr = PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
+  ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
   ierr = DMPlexRestoreConeRecursive(dm, is, &depth, &iss, §s);CHKERRQ(ierr);
   ierr = ISDestroy(&is);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
+{
+  PetscInt          n,n1,ncone,numCoveredPoints,o,p,q,start,end;
+  const PetscInt    *coveredPoints;
+  const PetscInt    *arr, *cone;
+  PetscInt          *newarr;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  ierr = ISGetLocalSize(is, &n);CHKERRQ(ierr);
+  ierr = PetscSectionGetStorageSize(section, &n1);CHKERRQ(ierr);
+  ierr = PetscSectionGetChart(section, &start, &end);CHKERRQ(ierr);
+  if (n != n1) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %D != %D = section storage size\n", n, n1);
+  ierr = ISGetIndices(is, &arr);CHKERRQ(ierr);
+  ierr = PetscMalloc1(end-start, &newarr);CHKERRQ(ierr);
+  for (q=start; q= 0) {
+        ierr = DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints);CHKERRQ(ierr);
+        if (numCoveredPoints > 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %D",q);
+        if (numCoveredPoints) p = coveredPoints[0];
+        else                  p = -2;
+        ierr = DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints);CHKERRQ(ierr);
+      }
+    }
+    newarr[q-start] = p;
+  }
+  ierr = ISRestoreIndices(is, &arr);CHKERRQ(ierr);
+  ierr = ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
+{
+  PetscInt          d;
+  IS                is,newis;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  is = boundary_expanded_is;
+  ierr = PetscObjectReference((PetscObject)is);CHKERRQ(ierr);
+  for (d = 0; d < depth-1; ++d) {
+    ierr = DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis);CHKERRQ(ierr);
+    ierr = ISDestroy(&is);CHKERRQ(ierr);
+    is = newis;
+  }
+  *boundary_is = is;
+  PetscFunctionReturn(0);
+}
+
+#define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; }
+
+static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
+{
+  PetscErrorCode    ierr;
+  PetscViewer       viewer;
+  PetscBool         flg;
+  static PetscBool  incall = PETSC_FALSE;
+  PetscViewerFormat format;
+
+  PetscFunctionBegin;
+  if (incall) PetscFunctionReturn(0);
+  incall = PETSC_TRUE;
+  ierr   = PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg);CHKERRQI(incall,ierr);
+  if (flg) {
+    ierr = PetscViewerPushFormat(viewer,format);CHKERRQI(incall,ierr);
+    ierr = DMLabelView(label, viewer);CHKERRQI(incall,ierr);
+    ierr = PetscViewerPopFormat(viewer);CHKERRQI(incall,ierr);
+    ierr = PetscViewerDestroy(&viewer);CHKERRQI(incall,ierr);
+  }
+  incall = PETSC_FALSE;
+  PetscFunctionReturn(0);
+}
+
+/* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
+PETSC_STATIC_INLINE PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
+{
+  IS                tmpis;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  ierr = DMLabelGetStratumIS(label, value, &tmpis);CHKERRQ(ierr);
+  if (!tmpis) {ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis);CHKERRQ(ierr);}
+  ierr = ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is);CHKERRQ(ierr);
+  ierr = ISDestroy(&tmpis);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/* currently only for simple PetscSection without fields or constraints */
+static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
+{
+  PetscSection      sec;
+  PetscInt          chart[2], p;
+  PetscInt          *dofarr;
+  PetscMPIInt       rank;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  if (rank == rootrank) {
+    ierr = PetscSectionGetChart(sec0, &chart[0], &chart[1]);CHKERRQ(ierr);
+  }
+  ierr = MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm);CHKERRQ(ierr);
+  ierr = PetscMalloc1(chart[1]-chart[0], &dofarr);CHKERRQ(ierr);
+  if (rank == rootrank) {
+    for (p = chart[0]; p < chart[1]; p++) {
+      ierr = PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]]);CHKERRQ(ierr);
+    }
+  }
+  ierr = MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm);CHKERRQ(ierr);
+  ierr = PetscSectionCreate(comm, &sec);CHKERRQ(ierr);
+  ierr = PetscSectionSetChart(sec, chart[0], chart[1]);CHKERRQ(ierr);
+  for (p = chart[0]; p < chart[1]; p++) {
+    ierr = PetscSectionSetDof(sec, p, dofarr[p-chart[0]]);CHKERRQ(ierr);
+  }
+  ierr = PetscSectionSetUp(sec);CHKERRQ(ierr);
+  ierr = PetscFree(dofarr);CHKERRQ(ierr);
+  *secout = sec;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode VecToPetscReal_Private(Vec vec, PetscReal *rvals[])
+{
+  PetscInt          n;
+  const PetscScalar *svals;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  ierr = VecGetLocalSize(vec, &n);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(vec, &svals);CHKERRQ(ierr);
+  ierr = PetscMalloc1(n, rvals);CHKERRQ(ierr);
+#if defined(PETSC_USE_COMPLEX)
+  {
+    PetscInt i;
+    for (i=0; icoordinates, &ncoords);CHKERRQ(ierr);
+  ierr = VecToPetscReal_Private(bnd->coordinates, &rcoords);CHKERRQ(ierr);
+  npoints = ncoords / dim;
+  ierr = PetscMalloc1(npoints, &points);CHKERRQ(ierr);
+  ierr = DMPlexFindVertices(ipdm, npoints, rcoords, 0.0, points);CHKERRQ(ierr);
+  ierr = ISCreateGeneral(PETSC_COMM_SELF, npoints, points, PETSC_OWN_POINTER, &faces_expanded_is);CHKERRQ(ierr);
+  ierr = DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is);CHKERRQ(ierr);
+  ierr = PetscFree(rcoords);CHKERRQ(ierr);
+  ierr = ISDestroy(&faces_expanded_is);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
+static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
+{
+  PetscOptions      options = NULL;
+  const char        *prefix = NULL;
+  const char        opt[] = "-dm_plex_interpolate_orient_interfaces";
+  char              prefix_opt[512];
+  PetscBool         flg, set;
+  static PetscBool  wasSetTrue = PETSC_FALSE;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  if (dm) {
+    ierr = PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix);CHKERRQ(ierr);
+    options = ((PetscObject)dm)->options;
+  }
+  ierr = PetscStrcpy(prefix_opt, "-");CHKERRQ(ierr);
+  ierr = PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt));CHKERRQ(ierr);
+  ierr = PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt));CHKERRQ(ierr);
+  ierr = PetscOptionsGetBool(options, prefix, opt, &flg, &set);CHKERRQ(ierr);
+  if (!enable) {
+    if (set && flg) wasSetTrue = PETSC_TRUE;
+    ierr = PetscOptionsSetValue(options, prefix_opt, "0");CHKERRQ(ierr);
+  } else if (set && !flg) {
+    if (wasSetTrue) {
+      ierr = PetscOptionsSetValue(options, prefix_opt, "1");CHKERRQ(ierr);
+    } else {
+      /* default is PETSC_TRUE */
+      ierr = PetscOptionsClearValue(options, prefix_opt);CHKERRQ(ierr);
+    }
+    wasSetTrue = PETSC_FALSE;
+  }
+#if defined(PETSC_USE_DEBUG)
+  {
+    ierr = PetscOptionsGetBool(options, prefix, opt, &flg, &set);CHKERRQ(ierr);
+    if (PetscUnlikely(set && flg != enable)) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
+  }
+#endif
+  PetscFunctionReturn(0);
+}
+
+/* get coordinate description of the whole-domain boundary */
+static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
+{
+  PortableBoundary       bnd0, bnd;
+  MPI_Comm               comm;
+  DM                     idm;
+  DMLabel                label;
+  PetscInt               d;
+  const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
+  IS                     boundary_is;
+  IS                     *boundary_expanded_iss;
+  PetscMPIInt            rootrank = 0;
+  PetscMPIInt            rank, size;
+  PetscInt               value = 1;
+  DMPlexInterpolatedFlag intp;
+  PetscBool              flg;
+  PetscErrorCode         ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscNew(&bnd);CHKERRQ(ierr);
+  ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
+  ierr = DMPlexIsDistributed(dm, &flg);CHKERRQ(ierr);
+  if (flg) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
+
+  /* interpolate serial DM if not yet interpolated */
+  ierr = DMPlexIsInterpolatedCollective(dm, &intp);CHKERRQ(ierr);
+  if (intp == DMPLEX_INTERPOLATED_FULL) {
+    idm = dm;
+    ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr);
+  } else {
+    ierr = DMPlexInterpolate(dm, &idm);CHKERRQ(ierr);
+    ierr = DMViewFromOptions(idm, NULL, "-idm_view");CHKERRQ(ierr);
+  }
+
+  /* mark whole-domain boundary of the serial DM */
+  ierr = DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label);CHKERRQ(ierr);
+  ierr = DMAddLabel(idm, label);CHKERRQ(ierr);
+  ierr = DMPlexMarkBoundaryFaces(idm, value, label);CHKERRQ(ierr);
+  ierr = DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm);CHKERRQ(ierr);
+  ierr = DMLabelGetStratumIS(label, value, &boundary_is);CHKERRQ(ierr);
+
+  /* translate to coordinates */
+  ierr = PetscNew(&bnd0);CHKERRQ(ierr);
+  ierr = DMGetCoordinatesLocalSetUp(idm);CHKERRQ(ierr);
+  if (rank == rootrank) {
+    ierr = DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections);CHKERRQ(ierr);
+    ierr = DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates);CHKERRQ(ierr);
+    /* self-check */
+    {
+      IS is0;
+      ierr = DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0);CHKERRQ(ierr);
+      ierr = ISEqual(is0, boundary_is, &flg);CHKERRQ(ierr);
+      if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
+      ierr = ISDestroy(&is0);CHKERRQ(ierr);
+    }
+  } else {
+    ierr = VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates);CHKERRQ(ierr);
+  }
+
+  {
+    Vec         tmp;
+    VecScatter  sc;
+    IS          xis;
+    PetscInt    n;
+
+    /* just convert seq vectors to mpi vector */
+    ierr = VecGetLocalSize(bnd0->coordinates, &n);CHKERRQ(ierr);
+    ierr = MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm);CHKERRQ(ierr);
+    if (rank == rootrank) {
+      ierr = VecCreateMPI(comm, n, n, &tmp);CHKERRQ(ierr);
+    } else {
+      ierr = VecCreateMPI(comm, 0, n, &tmp);CHKERRQ(ierr);
+    }
+    ierr = VecCopy(bnd0->coordinates, tmp);CHKERRQ(ierr);
+    ierr = VecDestroy(&bnd0->coordinates);CHKERRQ(ierr);
+    bnd0->coordinates = tmp;
+
+    /* replicate coordinates from root rank to all ranks */
+    ierr = VecCreateMPI(comm, n, n*size, &bnd->coordinates);CHKERRQ(ierr);
+    ierr = ISCreateStride(comm, n, 0, 1, &xis);CHKERRQ(ierr);
+    ierr = VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc);CHKERRQ(ierr);
+    ierr = VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD);CHKERRQ(ierr);
+    ierr = VecScatterEnd(  sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD);CHKERRQ(ierr);
+    ierr = VecScatterDestroy(&sc);CHKERRQ(ierr);
+    ierr = ISDestroy(&xis);CHKERRQ(ierr);
+  }
+  bnd->depth = bnd0->depth;
+  ierr = MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm);CHKERRQ(ierr);
+  ierr = PetscMalloc1(bnd->depth, &bnd->sections);CHKERRQ(ierr);
+  for (d=0; ddepth; d++) {
+    ierr = PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]);CHKERRQ(ierr);
+  }
+
+  if (rank == rootrank) {
+    ierr = DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections);CHKERRQ(ierr);
+  }
+  ierr = PortableBoundaryDestroy(&bnd0);CHKERRQ(ierr);
+  ierr = DMRemoveLabelBySelf(idm, &label, PETSC_TRUE);CHKERRQ(ierr);
+  ierr = DMLabelDestroy(&label);CHKERRQ(ierr);
+  ierr = ISDestroy(&boundary_is);CHKERRQ(ierr);
+  ierr = DMDestroy(&idm);CHKERRQ(ierr);
+  *boundary = bnd;
+  PetscFunctionReturn(0);
+}
+
+/* get faces of inter-partition interface */
+static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
+{
+  MPI_Comm               comm;
+  DMLabel                label;
+  IS                     part_boundary_faces_is;
+  const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
+  PetscInt               value = 1;
+  DMPlexInterpolatedFlag intp;
+  PetscErrorCode         ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)ipdm, &comm);CHKERRQ(ierr);
+  ierr = DMPlexIsInterpolatedCollective(ipdm, &intp);CHKERRQ(ierr);
+  if (intp != DMPLEX_INTERPOLATED_FULL) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
+
+  /* get ipdm partition boundary (partBoundary) */
+  ierr = DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label);CHKERRQ(ierr);
+  ierr = DMAddLabel(ipdm, label);CHKERRQ(ierr);
+  ierr = DMPlexMarkBoundaryFaces(ipdm, value, label);CHKERRQ(ierr);
+  ierr = DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm);CHKERRQ(ierr);
+  ierr = DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is);CHKERRQ(ierr);
+  ierr = DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE);CHKERRQ(ierr);
+  ierr = DMLabelDestroy(&label);CHKERRQ(ierr);
+
+  /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
+  ierr = ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is);CHKERRQ(ierr);
+  ierr = ISDestroy(&part_boundary_faces_is);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/* compute inter-partition interface including edges and vertices */
+static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
+{
+  DMLabel                label;
+  PetscInt               value = 1;
+  const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
+  DMPlexInterpolatedFlag intp;
+  MPI_Comm               comm;
+  PetscErrorCode         ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)ipdm, &comm);CHKERRQ(ierr);
+  ierr = DMPlexIsInterpolatedCollective(ipdm, &intp);CHKERRQ(ierr);
+  if (intp != DMPLEX_INTERPOLATED_FULL) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
+
+  ierr = DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label);CHKERRQ(ierr);
+  ierr = DMAddLabel(ipdm, label);CHKERRQ(ierr);
+  ierr = DMLabelSetStratumIS(label, value, interface_faces_is);CHKERRQ(ierr);
+  ierr = DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm);CHKERRQ(ierr);
+  ierr = DMPlexLabelComplete(ipdm, label);CHKERRQ(ierr);
+  ierr = DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm);CHKERRQ(ierr);
+  ierr = DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject)*interface_is, "interface_is");CHKERRQ(ierr);
+  ierr = ISViewFromOptions(*interface_is, NULL, "-interface_is_view");CHKERRQ(ierr);
+  ierr = DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE);CHKERRQ(ierr);
+  ierr = DMLabelDestroy(&label);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
+{
+  PetscInt        n;
+  const PetscInt  *arr;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscSFGetGraph(sf, NULL, &n, &arr, NULL);CHKERRQ(ierr);
+  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
+{
+  PetscInt        n;
+  const PetscInt  *rootdegree;
+  PetscInt        *arr;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
+  ierr = PetscSFComputeDegreeBegin(sf, &rootdegree);CHKERRQ(ierr);
+  ierr = PetscSFComputeDegreeEnd(sf, &rootdegree);CHKERRQ(ierr);
+  ierr = PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr);CHKERRQ(ierr);
+  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
+{
+  IS pointSF_out_is, pointSF_in_is;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is);CHKERRQ(ierr);
+  ierr = PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is);CHKERRQ(ierr);
+  ierr = ISExpand(pointSF_out_is, pointSF_in_is, is);CHKERRQ(ierr);
+  ierr = ISDestroy(&pointSF_out_is);CHKERRQ(ierr);
+  ierr = ISDestroy(&pointSF_in_is);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+#define MYCHKERRQ(ierr) do {if (PetscUnlikely(ierr)) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!");} while (0)
+
+static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
+{
+  DMLabel         label;
+  PetscSection    coordsSection;
+  Vec             coordsVec;
+  PetscScalar     *coordsScalar;
+  PetscInt        coneSize, depth, dim, i, p, npoints;
+  const PetscInt  *points;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMGetCoordinateSection(dm, &coordsSection);CHKERRQ(ierr);
+  ierr = DMGetCoordinatesLocal(dm, &coordsVec);CHKERRQ(ierr);
+  ierr = VecGetArray(coordsVec, &coordsScalar);CHKERRQ(ierr);
+  ierr = ISGetLocalSize(pointsIS, &npoints);CHKERRQ(ierr);
+  ierr = ISGetIndices(pointsIS, &points);CHKERRQ(ierr);
+  ierr = DMPlexGetDepthLabel(dm, &label);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
+  for (i=0; i EC
       c = 0
-      write(*,*) 'cell',c,pEC
+      write(*,1000) 'cell',c,pEC
+ 1000 format (a,i4,50i4)
       call DMPlexSetCone(dm, c , pEC, ierr);CHKERRA(ierr)
       call DMPlexGetCone(dm, c , pEC, ierr);CHKERRA(ierr)
-      write(*,*) 'cell',c,pEC
+      write(*,1000) 'cell',c,pEC
       EC(1) = 4
       EC(2) = 5
       EC(3) = 6
       EC(4) = 7
       pEC => EC
       c = 1
-      write(*,*) 'cell',c,pEC
+      write(*,1000) 'cell',c,pEC
       call DMPlexSetCone(dm, c , pEC, ierr);CHKERRA(ierr)
       call DMPlexGetCone(dm, c , pEC, ierr);CHKERRA(ierr)
-      write(*,*) 'cell',c,pEC
+      write(*,1000) 'cell',c,pEC
       call DMPlexRestoreCone(dm, c , pEC, ierr);CHKERRA(ierr)
 
       call DMPlexSymmetrize(dm, ierr);CHKERRA(ierr)
@@ -60,7 +61,7 @@ program main
 
       v = 4
       call DMPlexGetSupport(dm, v , pES, ierr);CHKERRA(ierr)
-      write(*,*) 'vertex',v,pES
+      write(*,1000) 'vertex',v,pES
       call DMPlexRestoreSupport(dm, v , pES, ierr);CHKERRA(ierr)
 
       call DMDestroy(dm,ierr);CHKERRA(ierr)
diff --git a/src/dm/impls/plex/examples/tests/ex22.c b/src/dm/impls/plex/examples/tests/ex22.c
index 58c83d5c8cd..cb4a73dc8ec 100644
--- a/src/dm/impls/plex/examples/tests/ex22.c
+++ b/src/dm/impls/plex/examples/tests/ex22.c
@@ -157,8 +157,8 @@ int main(int argc, char **argv)
             ierr = DMSetField(dm,0,NULL,(PetscObject)fe);CHKERRQ(ierr);
             ierr = DMCreateDS(dm);CHKERRQ(ierr);
             ierr = DMCreateLocalVector(dm,&localCoords);CHKERRQ(ierr);
-            ierr = VecSetDM(localCoords,NULL);CHKERRQ(ierr);
             ierr = DMProjectFunctionLocal(dm,0,funcs,ctxs,INSERT_VALUES,localCoords);CHKERRQ(ierr);
+            ierr = VecSetDM(localCoords,NULL);CHKERRQ(ierr); /* This is necessary to prevent a reference loop */
             ierr = DMClone(dm,&dmCoord);CHKERRQ(ierr);
             ierr = DMSetField(dmCoord,0,NULL,(PetscObject)fe);CHKERRQ(ierr);
             ierr = PetscFEDestroy(&fe);CHKERRQ(ierr);
diff --git a/src/dm/impls/plex/examples/tests/ex23.c b/src/dm/impls/plex/examples/tests/ex23.c
index f1d501fb014..5932c4d3b67 100644
--- a/src/dm/impls/plex/examples/tests/ex23.c
+++ b/src/dm/impls/plex/examples/tests/ex23.c
@@ -6,10 +6,13 @@ static char help[] = "Test for function and field projection\n\n";
 typedef struct {
   PetscInt  dim;         /* The topological mesh dimension */
   PetscBool cellSimplex; /* Flag for simplices */
-  PetscBool submesh;     /* Try with submesh */
+  PetscBool multifield;  /* Different numbers of input and output fields */
+  PetscBool subdomain;   /* Try with a volumetric submesh */
+  PetscBool submesh;     /* Try with a boundary submesh */
   PetscBool auxfield;    /* Try with auxiliary fields */
 } AppCtx;
 
+/* (x + y)*dim + d */
 static PetscErrorCode linear(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx)
 {
   PetscInt c;
@@ -17,15 +20,25 @@ static PetscErrorCode linear(PetscInt dim, PetscReal time, const PetscReal x[],
   return 0;
 }
 
+/* {x, y, z} */
+static PetscErrorCode linear2(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx)
+{
+  PetscInt c;
+  for (c = 0; c < Nc; ++c) u[c] = x[c];
+  return 0;
+}
+
+/* {u_x, u_y, u_z} */
 static void linear_vector(PetscInt dim, PetscInt Nf, PetscInt NfAux,
                           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
                           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
                           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[])
 {
   PetscInt d;
-  for (d = uOff[0]; d < uOff[1]; ++d) f[d-uOff[0]] = u[d];
+  for (d = 0; d < uOff[1]-uOff[0]; ++d) f[d] = u[d+uOff[0]];
 }
 
+/* p */
 static void linear_scalar(PetscInt dim, PetscInt Nf, PetscInt NfAux,
                           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
                           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
@@ -34,6 +47,18 @@ static void linear_scalar(PetscInt dim, PetscInt Nf, PetscInt NfAux,
   f[0] = u[uOff[1]];
 }
 
+/* {div u, p^2} */
+static void divergence_sq(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+                          const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+                          const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+                          PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[])
+{
+  PetscInt d;
+  f[0] = 0.0;
+  for (d = 0; d < dim; ++d) f[0] += u_x[uOff_x[0]+d*dim+d];
+  f[1] = PetscSqr(u[uOff[1]]);
+}
+
 static PetscErrorCode ProcessOptions(AppCtx *options)
 {
   PetscErrorCode ierr;
@@ -41,13 +66,17 @@ static PetscErrorCode ProcessOptions(AppCtx *options)
   PetscFunctionBegin;
   options->dim         = 2;
   options->cellSimplex = PETSC_TRUE;
+  options->multifield  = PETSC_FALSE;
+  options->subdomain   = PETSC_FALSE;
   options->submesh     = PETSC_FALSE;
   options->auxfield    = PETSC_FALSE;
 
   ierr = PetscOptionsBegin(PETSC_COMM_SELF, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
   ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex23.c", options->dim, &options->dim, NULL,1,3);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-cellSimplex", "Flag for simplices", "ex23.c", options->cellSimplex, &options->cellSimplex, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBool("-submesh", "Flag for trying submesh", "ex23.c", options->submesh, &options->submesh, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-multifield", "Flag for trying different numbers of input/output fields", "ex23.c", options->multifield, &options->multifield, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-subdomain", "Flag for trying volumetric submesh", "ex23.c", options->subdomain, &options->subdomain, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-submesh", "Flag for trying boundary submesh", "ex23.c", options->submesh, &options->submesh, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-auxfield", "Flag for trying auxiliary fields", "ex23.c", options->auxfield, &options->auxfield, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();CHKERRQ(ierr);
   PetscFunctionReturn(0);
@@ -96,6 +125,42 @@ static PetscErrorCode SetupDiscretization(DM dm, PetscInt dim, PetscBool simplex
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode SetupOutputDiscretization(DM dm, PetscInt dim, PetscBool simplex, AppCtx *user)
+{
+  PetscFE        fe;
+  MPI_Comm       comm;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
+  ierr = PetscFECreateDefault(comm, dim, dim, simplex, "output_", -1, &fe);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) fe, "output");CHKERRQ(ierr);
+  ierr = DMSetField(dm, 0, NULL, (PetscObject) fe);CHKERRQ(ierr);
+  ierr = PetscFEDestroy(&fe);CHKERRQ(ierr);
+  ierr = DMCreateDS(dm);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateSubdomainMesh(DM dm, DMLabel *domLabel, DM *subdm, AppCtx *user)
+{
+  DMLabel        label;
+  PetscInt       dim, cStart, cEnd, c;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMLabelCreate(PETSC_COMM_SELF, "subdomain", &label);CHKERRQ(ierr);
+  for (c = cStart + (cEnd-cStart)/2; c < cEnd; ++c) {ierr = DMLabelSetValue(label, c, 1);CHKERRQ(ierr);}
+  ierr = DMPlexFilter(dm, label, 1, subdm);CHKERRQ(ierr);
+  ierr = DMGetDimension(*subdm, &dim);CHKERRQ(ierr);
+  ierr = SetupDiscretization(*subdm, dim, user->cellSimplex, user);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) *subdm, "subdomain");CHKERRQ(ierr);
+  ierr = DMViewFromOptions(*subdm, NULL, "-sub_dm_view");CHKERRQ(ierr);
+  if (domLabel) *domLabel = label;
+  else          {ierr = DMLabelDestroy(&label);CHKERRQ(ierr);}
+  PetscFunctionReturn(0);
+}
+
 static PetscErrorCode CreateBoundaryMesh(DM dm, DMLabel *bdLabel, DM *subdm, AppCtx *user)
 {
   DMLabel        label;
@@ -112,7 +177,7 @@ static PetscErrorCode CreateBoundaryMesh(DM dm, DMLabel *bdLabel, DM *subdm, App
   ierr = PetscObjectSetName((PetscObject) *subdm, "boundary");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*subdm, NULL, "-sub_dm_view");CHKERRQ(ierr);
   if (bdLabel) *bdLabel = label;
-  else         ierr = DMLabelDestroy(&label);CHKERRQ(ierr);
+  else         {ierr = DMLabelDestroy(&label);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
@@ -136,7 +201,7 @@ static PetscErrorCode CreateAuxiliaryData(DM dm, DM *auxdm, Vec *la, AppCtx *use
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode TestFunctionProjection(DM dm, DM auxdm, DMLabel label, Vec la, const char name[], AppCtx *user)
+static PetscErrorCode TestFunctionProjection(DM dm, DM dmAux, DMLabel label, Vec la, const char name[], AppCtx *user)
 {
   PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *);
   Vec               x, lx;
@@ -146,8 +211,8 @@ static PetscErrorCode TestFunctionProjection(DM dm, DM auxdm, DMLabel label, Vec
   PetscErrorCode    ierr;
 
   PetscFunctionBeginUser;
-  if (auxdm) {
-    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) auxdm);CHKERRQ(ierr);
+  if (dmAux) {
+    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) dmAux);CHKERRQ(ierr);
     ierr = PetscObjectCompose((PetscObject) dm, "A", (PetscObject) la);CHKERRQ(ierr);
   }
   ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr);
@@ -170,14 +235,14 @@ static PetscErrorCode TestFunctionProjection(DM dm, DM auxdm, DMLabel label, Vec
   ierr = VecViewFromOptions(lx, NULL, "-local_func_view");CHKERRQ(ierr);
   ierr = DMRestoreLocalVector(dm, &lx);CHKERRQ(ierr);
   ierr = PetscFree(funcs);CHKERRQ(ierr);
-  if (auxdm) {
+  if (dmAux) {
     ierr = PetscObjectCompose((PetscObject) dm, "dmAux", NULL);CHKERRQ(ierr);
     ierr = PetscObjectCompose((PetscObject) dm, "A", NULL);CHKERRQ(ierr);
   }
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode TestFieldProjection(DM dm, DM auxdm, DMLabel label, Vec la, const char name[], AppCtx *user)
+static PetscErrorCode TestFieldProjection(DM dm, DM dmAux, DMLabel label, Vec la, const char name[], AppCtx *user)
 {
   PetscErrorCode (**afuncs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *);
   void           (**funcs)(PetscInt, PetscInt, PetscInt,
@@ -191,8 +256,8 @@ static PetscErrorCode TestFieldProjection(DM dm, DM auxdm, DMLabel label, Vec la
   PetscErrorCode    ierr;
 
   PetscFunctionBeginUser;
-  if (auxdm) {
-    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) auxdm);CHKERRQ(ierr);
+  if (dmAux) {
+    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) dmAux);CHKERRQ(ierr);
     ierr = PetscObjectCompose((PetscObject) dm, "A", (PetscObject) la);CHKERRQ(ierr);
   }
   ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr);
@@ -217,7 +282,55 @@ static PetscErrorCode TestFieldProjection(DM dm, DM auxdm, DMLabel label, Vec la
   ierr = DMRestoreLocalVector(dm, &lx);CHKERRQ(ierr);
   ierr = DMRestoreLocalVector(dm, &lu);CHKERRQ(ierr);
   ierr = PetscFree2(funcs, afuncs);CHKERRQ(ierr);
-  if (auxdm) {
+  if (dmAux) {
+    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", NULL);CHKERRQ(ierr);
+    ierr = PetscObjectCompose((PetscObject) dm, "A", NULL);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode TestFieldProjectionMultiple(DM dm, DM dmIn, DM dmAux, DMLabel label, Vec la, const char name[], AppCtx *user)
+{
+  PetscErrorCode (**afuncs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *);
+  void           (**funcs)(PetscInt, PetscInt, PetscInt,
+                           const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
+                           const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
+                           PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]);
+  Vec               lx, lu;
+  PetscInt          Nf, NfIn;
+  PetscInt          val[1] = {1};
+  char              lname[PETSC_MAX_PATH_LEN];
+  PetscErrorCode    ierr;
+
+  PetscFunctionBeginUser;
+  if (dmAux) {
+    ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) dmAux);CHKERRQ(ierr);
+    ierr = PetscObjectCompose((PetscObject) dm, "A", (PetscObject) la);CHKERRQ(ierr);
+  }
+  ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr);
+  ierr = DMGetNumFields(dmIn, &NfIn);CHKERRQ(ierr);
+  ierr = PetscMalloc2(Nf, &funcs, NfIn, &afuncs);CHKERRQ(ierr);
+  funcs[0]  = divergence_sq;
+  afuncs[0] = linear2;
+  afuncs[1] = linear;
+  ierr = DMGetLocalVector(dmIn, &lu);CHKERRQ(ierr);
+  ierr = PetscStrcpy(lname, "Local MultiField Input ");CHKERRQ(ierr);
+  ierr = PetscStrcat(lname, name);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) lu, lname);CHKERRQ(ierr);
+  if (!label) {ierr = DMProjectFunctionLocal(dmIn, 0.0, afuncs, NULL, INSERT_VALUES, lu);CHKERRQ(ierr);}
+  else        {ierr = DMProjectFunctionLabelLocal(dmIn, 0.0, label, 1, val, 0, NULL, afuncs, NULL, INSERT_VALUES, lu);CHKERRQ(ierr);}
+  ierr = VecViewFromOptions(lu, NULL, "-local_input_view");CHKERRQ(ierr);
+  ierr = DMGetLocalVector(dm, &lx);CHKERRQ(ierr);
+  ierr = PetscStrcpy(lname, "Local MultiField ");CHKERRQ(ierr);
+  ierr = PetscStrcat(lname, name);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) lx, lname);CHKERRQ(ierr);
+  if (!label) {ierr = DMProjectFieldLocal(dm, 0.0, lu, funcs, INSERT_VALUES, lx);CHKERRQ(ierr);}
+  else        {ierr = DMProjectFieldLabelLocal(dm, 0.0, label, 1, val, 0, NULL, lu, funcs, INSERT_VALUES, lx);CHKERRQ(ierr);}
+  ierr = VecViewFromOptions(lx, NULL, "-local_field_view");CHKERRQ(ierr);
+  ierr = DMRestoreLocalVector(dm, &lx);CHKERRQ(ierr);
+  ierr = DMRestoreLocalVector(dmIn, &lu);CHKERRQ(ierr);
+  ierr = PetscFree2(funcs, afuncs);CHKERRQ(ierr);
+  if (dmAux) {
     ierr = PetscObjectCompose((PetscObject) dm, "dmAux", NULL);CHKERRQ(ierr);
     ierr = PetscObjectCompose((PetscObject) dm, "A", NULL);CHKERRQ(ierr);
   }
@@ -236,8 +349,17 @@ int main(int argc, char **argv)
   ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr);
   ierr = SetupDiscretization(dm, user.dim, user.cellSimplex, &user);CHKERRQ(ierr);
   /* Volumetric Mesh Projection */
-  ierr = TestFunctionProjection(dm, NULL, NULL, NULL, "Volumetric Primary", &user);CHKERRQ(ierr);
-  ierr = TestFieldProjection(dm, NULL, NULL, NULL, "Volumetric Primary", &user);CHKERRQ(ierr);
+  if (!user.multifield) {
+    ierr = TestFunctionProjection(dm, NULL, NULL, NULL, "Volumetric Primary", &user);CHKERRQ(ierr);
+    ierr = TestFieldProjection(dm, NULL, NULL, NULL, "Volumetric Primary", &user);CHKERRQ(ierr);
+  } else {
+    DM dmOut;
+
+    ierr = DMClone(dm, &dmOut);CHKERRQ(ierr);
+    ierr = SetupOutputDiscretization(dmOut, user.dim, user.cellSimplex, &user);CHKERRQ(ierr);
+    ierr = TestFieldProjectionMultiple(dmOut, dm, NULL, NULL, NULL, "Volumetric Primary", &user);CHKERRQ(ierr);
+    ierr = DMDestroy(&dmOut);CHKERRQ(ierr);
+  }
   if (user.auxfield) {
     /* Volumetric Mesh Projection with Volumetric Data */
     ierr = CreateAuxiliaryData(dm, &auxdm, &la, &user);CHKERRQ(ierr);
@@ -251,6 +373,36 @@ int main(int argc, char **argv)
     ierr = DMRestoreLocalVector(dm, &la);CHKERRQ(ierr);
     ierr = DMDestroy(&auxdm);CHKERRQ(ierr);
   }
+  if (user.subdomain) {
+    DMLabel domLabel;
+
+    /* Subdomain Mesh Projection */
+    ierr = CreateSubdomainMesh(dm, &domLabel, &subdm, &user);CHKERRQ(ierr);
+    ierr = TestFunctionProjection(subdm, NULL, NULL, NULL, "Subdomain Primary", &user);CHKERRQ(ierr);
+    ierr = TestFieldProjection(subdm, NULL, NULL, NULL, "Subdomain Primary", &user);CHKERRQ(ierr);
+    if (user.auxfield) {
+      /* Subdomain Mesh Projection with Subdomain Data */
+      ierr = CreateAuxiliaryData(subdm, &auxdm, &la, &user);CHKERRQ(ierr);
+      ierr = TestFunctionProjection(subdm, auxdm, NULL, la, "Subdomain Primary and Subdomain Auxiliary", &user);CHKERRQ(ierr);
+      ierr = TestFieldProjection(subdm, auxdm, NULL, la, "Subdomain Primary and Subdomain Auxiliary", &user);CHKERRQ(ierr);
+      ierr = VecDestroy(&la);CHKERRQ(ierr);
+      ierr = DMDestroy(&auxdm);CHKERRQ(ierr);
+      /* Subdomain Mesh Projection with Volumetric Data */
+      ierr = CreateAuxiliaryData(dm, &auxdm, &la, &user);CHKERRQ(ierr);
+      ierr = TestFunctionProjection(subdm, auxdm, NULL, la, "Subdomain Primary and Volumetric Auxiliary", &user);CHKERRQ(ierr);
+      ierr = TestFieldProjection(subdm, auxdm, NULL, la, "Subdomain Primary and Volumetric Auxiliary", &user);CHKERRQ(ierr);
+      ierr = VecDestroy(&la);CHKERRQ(ierr);
+      ierr = DMDestroy(&auxdm);CHKERRQ(ierr);
+      /* Volumetric Mesh Projection with Subdomain Data */
+      ierr = CreateAuxiliaryData(subdm, &auxdm, &la, &user);CHKERRQ(ierr);
+      ierr = TestFunctionProjection(subdm, auxdm, domLabel, la, "Volumetric Primary and Subdomain Auxiliary", &user);CHKERRQ(ierr);
+      ierr = TestFieldProjection(subdm, auxdm, domLabel, la, "Volumetric Primary and Subdomain Auxiliary", &user);CHKERRQ(ierr);
+      ierr = VecDestroy(&la);CHKERRQ(ierr);
+      ierr = DMDestroy(&auxdm);CHKERRQ(ierr);
+    }
+    ierr = DMDestroy(&subdm);CHKERRQ(ierr);
+    ierr = DMLabelDestroy(&domLabel);CHKERRQ(ierr);
+  }
   if (user.submesh) {
     DMLabel bdLabel;
 
@@ -286,10 +438,21 @@ int main(int argc, char **argv)
     suffix: 0
     requires: triangle
     args: -dim 2 -func_view -local_func_view -local_input_view -local_field_view
+  test:
+    suffix: mf_0
+    requires: triangle
+    args: -dim 2 -velocity_petscspace_degree 1 -velocity_petscfe_default_quadrature_order 2 \
+         -pressure_petscspace_degree 2 -pressure_petscfe_default_quadrature_order 2 \
+         -multifield -output_petscspace_degree 1 -output_petscfe_default_quadrature_order 2 \
+         -local_input_view -local_field_view
   test:
     suffix: 1
     requires: triangle
     args: -dim 2 -velocity_petscspace_degree 1 -velocity_petscfe_default_quadrature_order 2 -pressure_petscspace_degree 2 -pressure_petscfe_default_quadrature_order 2 -func_view -local_func_view -local_input_view -local_field_view -submesh -auxfield
+  test:
+    suffix: 2
+    requires: triangle
+    args: -dim 2 -velocity_petscspace_degree 1 -velocity_petscfe_default_quadrature_order 2 -pressure_petscspace_degree 2 -pressure_petscfe_default_quadrature_order 2 -func_view -local_func_view -local_input_view -local_field_view -subdomain -auxfield
 
 TEST*/
 
diff --git a/src/dm/impls/plex/examples/tests/ex24.c b/src/dm/impls/plex/examples/tests/ex24.c
index afcfb4eed46..7d49a8f6feb 100644
--- a/src/dm/impls/plex/examples/tests/ex24.c
+++ b/src/dm/impls/plex/examples/tests/ex24.c
@@ -1,4 +1,4 @@
-static char help[] = "Test that MatPartitioning and PetscPartitioner interfaces to parmetis are equivalent - using PETSCPARTITIONERMATPARTITIONING\n\n";
+static char help[] = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
 static char FILENAME[] = "ex24.c";
 
 #include 
@@ -16,6 +16,8 @@ typedef struct {
   PetscBool simplex;                      /* Use simplices or hexes */
   PetscBool interpolate;                  /* Interpolate mesh */
   PetscBool compare_is;                   /* Compare ISs and PetscSections */
+  PetscBool compare_dm;                   /* Compare DM */
+  PetscBool tpw;                          /* Use target partition weights */
   char      filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
   char      partitioning[64];
   char      repartitioning[64];
@@ -25,17 +27,18 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 {
   PetscInt dim;
   PetscBool repartition = PETSC_TRUE;
-  PetscBool flg = PETSC_TRUE;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   options->compare_is   = PETSC_FALSE;
+  options->compare_dm   = PETSC_FALSE;
   options->dim          = 3;
   options->simplex      = PETSC_TRUE;
   options->interpolate  = PETSC_FALSE;
   options->filename[0]  = '\0';
   ierr = PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");CHKERRQ(ierr);
   ierr = PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", FILENAME, options->dim, &options->dim, NULL,1,3);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-simplex", "Use simplices if true, otherwise hexes", FILENAME, options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-interpolate", "Interpolate the mesh", FILENAME, options->interpolate, &options->interpolate, NULL);CHKERRQ(ierr);
@@ -43,6 +46,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   options->faces[0] = 1; options->faces[1] = 1; options->faces[2] = 1;
   dim = options->dim;
   ierr = PetscOptionsIntArray("-faces", "Number of faces per dimension", FILENAME, options->faces, &dim, NULL);CHKERRQ(ierr);
+  if (dim) options->dim = dim;
   ierr = PetscStrncpy(options->partitioning,MATPARTITIONINGPARMETIS,64);CHKERRQ(ierr);
   ierr = PetscOptionsString("-partitioning","The mat partitioning type to test","None",options->partitioning, options->partitioning,64,NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL);CHKERRQ(ierr);
@@ -52,10 +56,7 @@ static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
   } else {
     options->repartitioning[0] = '\0';
   }
-  if (dim) options->dim = dim;
-  /* Currently this is here only to always consume this option. It is actually queried in PTScotch_PartGraph_{Seq,MPI} functions. */
-  ierr = PetscOptionsBool("-petscpartititoner_ptscotch_vertex_weight", "Should PetscPartitionerPTScotch specify the vertex  weights to Scotch (0 to yield the same results as MatPartitioningPTScotch)?",  FILENAME, flg, &flg, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBool("-mat_partitioning_ptscotch_proc_weight",    "Should MatPartitioningPTScotch  specify the process weights to Scotch (0 to yield the same results as PetscPartitionerPTScotch)?", FILENAME, flg, &flg, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();
   PetscFunctionReturn(0);
 }
@@ -100,7 +101,9 @@ int main(int argc, char **argv)
   PetscPartitioner part1, part2;
   AppCtx         user;
   IS             is1=NULL, is2=NULL;
-  PetscSection   s1=NULL, s2=NULL;
+  IS             is1g, is2g;
+  PetscSection   s1=NULL, s2=NULL, tpws = NULL;
+  PetscInt       i;
   PetscBool      flg;
   PetscErrorCode ierr;
   PetscMPIInt    size;
@@ -112,6 +115,19 @@ int main(int argc, char **argv)
   ierr = CreateMesh(comm, &user, &dm1);CHKERRQ(ierr);
   ierr = CreateMesh(comm, &user, &dm2);CHKERRQ(ierr);
 
+  if (user.tpw) {
+    ierr = PetscSectionCreate(comm, &tpws);CHKERRQ(ierr);
+    ierr = PetscSectionSetChart(tpws, 0, size);CHKERRQ(ierr);
+    for (i=0;i 1) { /* test zero tpw entry */
+      ierr = PetscSectionSetDof(tpws, 0, 0);CHKERRQ(ierr);
+    }
+    ierr = PetscSectionSetUp(tpws);CHKERRQ(ierr);
+  }
+
   /* partition dm1 using PETSCPARTITIONERPARMETIS */
   ierr = ScotchResetRandomSeed();CHKERRQ(ierr);
   ierr = DMPlexGetPartitioner(dm1, &part1);CHKERRQ(ierr);
@@ -119,7 +135,7 @@ int main(int argc, char **argv)
   ierr = PetscPartitionerSetType(part1, user.partitioning);CHKERRQ(ierr);
   ierr = PetscPartitionerSetFromOptions(part1);CHKERRQ(ierr);
   ierr = PetscSectionCreate(comm, &s1);CHKERRQ(ierr);
-  ierr = PetscPartitionerPartition(part1, dm1, s1, &is1);CHKERRQ(ierr);
+  ierr = PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1);CHKERRQ(ierr);
 
   /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
   ierr = ScotchResetRandomSeed();CHKERRQ(ierr);
@@ -130,20 +146,23 @@ int main(int argc, char **argv)
   ierr = MatPartitioningSetType(mp, user.partitioning);CHKERRQ(ierr);
   ierr = PetscPartitionerSetFromOptions(part2);CHKERRQ(ierr);
   ierr = PetscSectionCreate(comm, &s2);CHKERRQ(ierr);
-  ierr = PetscPartitionerPartition(part2, dm2, s2, &is2);CHKERRQ(ierr);
+  ierr = PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2);CHKERRQ(ierr);
 
+  ierr = ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g);CHKERRQ(ierr);
+  ierr = ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g);CHKERRQ(ierr);
+  ierr = ISViewFromOptions(is1g, NULL, "-seq_is1_view");CHKERRQ(ierr);
+  ierr = ISViewFromOptions(is2g, NULL, "-seq_is2_view");CHKERRQ(ierr);
   /* compare the two ISs */
   if (user.compare_is) {
-    IS is1g, is2g;
-    ierr = ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g);CHKERRQ(ierr);
-    ierr = ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g);CHKERRQ(ierr);
     ierr = ISEqualUnsorted(is1g, is2g, &flg);CHKERRQ(ierr);
     if (!flg) PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n",user.partitioning,size);
-    ierr = ISDestroy(&is1g);CHKERRQ(ierr);
-    ierr = ISDestroy(&is2g);CHKERRQ(ierr);
   }
+  ierr = ISDestroy(&is1g);CHKERRQ(ierr);
+  ierr = ISDestroy(&is2g);CHKERRQ(ierr);
 
   /* compare the two PetscSections */
+  ierr = PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view");CHKERRQ(ierr);
+  ierr = PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view");CHKERRQ(ierr);
   if (user.compare_is) {
     ierr = PetscSectionCompare(s1, s2, &flg);CHKERRQ(ierr);
     if (!flg) PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n",user.partitioning,size);
@@ -156,6 +175,7 @@ int main(int argc, char **argv)
   ierr = DMPlexDistribute(dm2, 0, NULL, &dmdist2);CHKERRQ(ierr);
 
   /* cleanup */
+  ierr = PetscSectionDestroy(&tpws);CHKERRQ(ierr);
   ierr = PetscSectionDestroy(&s1);CHKERRQ(ierr);
   ierr = PetscSectionDestroy(&s2);CHKERRQ(ierr);
   ierr = ISDestroy(&is1);CHKERRQ(ierr);
@@ -166,8 +186,11 @@ int main(int argc, char **argv)
   /* if distributed DMs are NULL (sequential case), then quit */
   if (!dmdist1 && !dmdist2) return ierr;
 
+  ierr = DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view");CHKERRQ(ierr);
+  ierr = DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view");CHKERRQ(ierr);
+
   /* compare the two distributed DMs */
-  if (!user.interpolate) {
+  if (user.compare_dm) {
     ierr = DMPlexEqual(dmdist1, dmdist2, &flg);CHKERRQ(ierr);
     if (!flg) PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n",user.partitioning,size);
   }
@@ -175,16 +198,26 @@ int main(int argc, char **argv)
   /* if repartitioning is disabled, then quit */
   if (user.repartitioning[0] == '\0') return ierr;
 
-  /* repartition distributed DM dmdist1 using PETSCPARTITIONERPARMETIS */
+  if (user.tpw) {
+    ierr = PetscSectionCreate(comm, &tpws);CHKERRQ(ierr);
+    ierr = PetscSectionSetChart(tpws, 0, size);CHKERRQ(ierr);
+    for (i=0;i VE
       call DMPlexGetJoin(dm, size, pVE, nJoin, ierr);CHKERRA(ierr)
-      write(*,*) 'Join of',pVE,'is',nJoin
+      write(*,1001) 'Join of',pVE
+      write(*,1002) '  is',nJoin
       call DMPlexRestoreJoin(dm, size, pVE, nJoin, ierr);CHKERRA(ierr)
       size  = 2
       VE(1) = 9
       VE(2) = 7
       pVE => VE
       call DMPlexGetJoin(dm, size, pVE, nJoin, ierr);CHKERRA(ierr)
-      write(*,*) 'Join of',pVE,'is',nJoin
-      call DMPlexRestoreJoin(dm, size, pVE, nJoin, ierr);CHKERRA(ierr)
+      write(*,1001) 'Join of',pVE
+ 1001 format (a,10i5)
+       write(*,1002) '  is',nJoin
+ 1002  format (a,10i5)
+     call DMPlexRestoreJoin(dm, size, pVE, nJoin, ierr);CHKERRA(ierr)
 !     Test Full Join
       size  = 3
       EC(1) = 3
@@ -124,7 +129,8 @@ program main
       EC(3) = 5
       pEC => EC
       call DMPlexGetFullJoin(dm, size, pEC, nJoin, ierr);CHKERRA(ierr)
-      write(*,*) 'Full Join of',pEC,'is',nJoin
+      write(*,1001) 'Full Join of',pEC
+      write(*,1002) '  is',nJoin
       call DMPlexRestoreJoin(dm, size, pEC, nJoin, ierr);CHKERRA(ierr)
 !     Test Meet
       size  = 2
@@ -132,14 +138,16 @@ program main
       VE(2) = 1
       pVE => VE
       call DMPlexGetMeet(dm, size, pVE, nMeet, ierr);CHKERRA(ierr)
-      write(*,*) 'Meet of',pVE,'is',nMeet
+      write(*,1001) 'Meet of',pVE
+      write(*,1002) '  is',nMeet
       call DMPlexRestoreMeet(dm, size, pVE, nMeet, ierr);CHKERRA(ierr)
       size  = 2
       VE(1) = 6
       VE(2) = 7
       pVE => VE
       call DMPlexGetMeet(dm, size, pVE, nMeet, ierr);CHKERRA(ierr)
-      write(*,*) 'Meet of',pVE,'is',nMeet
+      write(*,1001) 'Meet of',pVE
+      write(*,1002) '  is',nMeet
       call DMPlexRestoreMeet(dm, size, pVE, nMeet, ierr);CHKERRA(ierr)
 
       call DMDestroy(dm, ierr);CHKERRA(ierr)
diff --git a/src/dm/impls/plex/examples/tests/ex3.c b/src/dm/impls/plex/examples/tests/ex3.c
index 276039476b0..6c733f9ce06 100644
--- a/src/dm/impls/plex/examples/tests/ex3.c
+++ b/src/dm/impls/plex/examples/tests/ex3.c
@@ -977,6 +977,7 @@ int main(int argc, char **argv)
     suffix: p1_2d_3
     requires: triangle pragmatic
     args: -petscspace_degree 1 -qorder 1 -dm_plex_hash_location -convergence -conv_refine 0
+    filter: grep -v DEBUG
   test:
     suffix: p1_2d_4
     requires: triangle pragmatic
@@ -1003,6 +1004,7 @@ int main(int argc, char **argv)
     suffix: p1_3d_3
     requires: ctetgen pragmatic
     args: -dim 3 -petscspace_degree 1 -qorder 1 -dm_plex_hash_location -convergence -conv_refine 0
+    filter: grep -v DEBUG
   test:
     suffix: p1_3d_4
     requires: ctetgen pragmatic
@@ -1029,6 +1031,7 @@ int main(int argc, char **argv)
     suffix: p2_2d_3
     requires: triangle pragmatic
     args: -petscspace_degree 2 -qorder 2 -dm_plex_hash_location -convergence -conv_refine 0
+    filter: grep -v DEBUG
   test:
     suffix: p2_2d_4
     requires: triangle pragmatic
@@ -1055,6 +1058,7 @@ int main(int argc, char **argv)
     suffix: p2_3d_3
     requires: ctetgen pragmatic
     args: -dim 3 -petscspace_degree 2 -qorder 2 -dm_plex_hash_location -convergence -conv_refine 0
+    filter: grep -v DEBUG
   test:
     suffix: p2_3d_4
     requires: ctetgen pragmatic
@@ -1159,6 +1163,7 @@ int main(int argc, char **argv)
     suffix: p3_2d_4
     requires: triangle pragmatic
     args: -petscspace_degree 3 -qorder 3 -dm_plex_hash_location -convergence -conv_refine 0
+    filter: grep -v DEBUG
   test:
     suffix: p3_2d_5
     requires: triangle pragmatic
diff --git a/src/dm/impls/plex/examples/tests/ex31.c b/src/dm/impls/plex/examples/tests/ex31.c
index 4045f50c8bd..fa6d44c061d 100644
--- a/src/dm/impls/plex/examples/tests/ex31.c
+++ b/src/dm/impls/plex/examples/tests/ex31.c
@@ -86,7 +86,7 @@ int main(int argc, char **argv)
   ierr = PetscPartitionerSetType(part, PETSCPARTITIONERPARMETIS);CHKERRQ(ierr);
   ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
   ierr = PetscSectionCreate(comm, &s);CHKERRQ(ierr);
-  ierr = PetscPartitionerPartition(part, dm, s, &is);CHKERRQ(ierr);
+  ierr = PetscPartitionerDMPlexPartition(part, dm, NULL, s, &is);CHKERRQ(ierr);
 
   ierr = DMPlexDistribute(dm, 0, NULL, &dmdist);CHKERRQ(ierr);
   if (dmdist) {
diff --git a/src/dm/impls/plex/examples/tests/ex33.c b/src/dm/impls/plex/examples/tests/ex33.c
new file mode 100644
index 00000000000..3c914e9df57
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/ex33.c
@@ -0,0 +1,100 @@
+static char help[] = "Tests PetscPartitioner.\n\n";
+
+#include 
+
+int main(int argc, char **argv)
+{
+  PetscErrorCode   ierr;
+  PetscPartitioner p;
+  PetscSection     partSection;
+  IS               partition,is;
+  PetscMPIInt      size,rank;
+  PetscInt         npar;
+  PetscInt         nv = 4;
+  PetscInt         vv[5] = {0,2,4,6,8};
+  PetscInt         vadj[8] = {3,1,0,2,1,3,2,0};
+
+  ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
+  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
+  npar = size;
+  ierr = PetscOptionsGetInt(NULL,NULL,"-nparts",&npar,NULL);CHKERRQ(ierr);
+
+  /* create PetscPartitioner */
+  ierr = PetscSectionCreate(PETSC_COMM_WORLD,&partSection);CHKERRQ(ierr);
+  ierr = PetscPartitionerCreate(PETSC_COMM_WORLD,&p);CHKERRQ(ierr);
+  ierr = PetscPartitionerSetType(p,PETSCPARTITIONERSIMPLE);CHKERRQ(ierr);
+  ierr = PetscPartitionerSetFromOptions(p);CHKERRQ(ierr);
+
+  /* test partitioning an empty graph */
+  ierr = PetscPartitionerPartition(p,npar,0,NULL,NULL,NULL,NULL,partSection,&partition);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject)partSection,"NULL SECTION");
+  ierr = PetscSectionView(partSection,NULL);CHKERRQ(ierr);
+  ierr = ISOnComm(partition,PETSC_COMM_WORLD,PETSC_USE_POINTER,&is);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject)is,"NULL PARTITION");
+  ierr = ISView(is,NULL);CHKERRQ(ierr);
+  ierr = ISDestroy(&is);CHKERRQ(ierr);
+  ierr = ISDestroy(&partition);CHKERRQ(ierr);
+
+  /* test partitioning a graph on one process only (not master) */
+  if (rank == size - 1) {
+    ierr = PetscPartitionerPartition(p,npar,nv,vv,vadj,NULL,NULL,partSection,&partition);CHKERRQ(ierr);
+  } else {
+    ierr = PetscPartitionerPartition(p,npar,0,NULL,NULL,NULL,NULL,partSection,&partition);CHKERRQ(ierr);
+  }
+  ierr = PetscObjectSetName((PetscObject)partSection,"SEQ SECTION");
+  ierr = PetscSectionView(partSection,NULL);CHKERRQ(ierr);
+  ierr = ISOnComm(partition,PETSC_COMM_WORLD,PETSC_USE_POINTER,&is);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject)is,"SEQ PARTITION");
+  ierr = ISView(is,NULL);CHKERRQ(ierr);
+  ierr = ISDestroy(&is);CHKERRQ(ierr);
+  ierr = ISDestroy(&partition);CHKERRQ(ierr);
+
+  /* test partitioning a graph on a subset of the processess only */
+  if (rank%2) {
+    ierr = PetscPartitionerPartition(p,npar,0,NULL,NULL,NULL,NULL,partSection,&partition);CHKERRQ(ierr);
+  } else {
+    PetscInt i,totv = nv*((size+1)/2),*pvadj;
+
+    ierr = PetscMalloc1(2*nv,&pvadj);CHKERRQ(ierr);
+    for (i = 0; i < nv; i++) {
+      pvadj[2*i]   = (nv*(rank/2) + totv + i - 1)%totv;
+      pvadj[2*i+1] = (nv*(rank/2) + totv + i + 1)%totv;
+    }
+    ierr = PetscPartitionerPartition(p,npar,nv,vv,pvadj,NULL,NULL,partSection,&partition);CHKERRQ(ierr);
+    ierr = PetscFree(pvadj);CHKERRQ(ierr);
+  }
+  ierr = PetscObjectSetName((PetscObject)partSection,"PARVOID SECTION");
+  ierr = PetscSectionView(partSection,NULL);CHKERRQ(ierr);
+  ierr = ISOnComm(partition,PETSC_COMM_WORLD,PETSC_USE_POINTER,&is);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject)is,"PARVOID PARTITION");
+  ierr = ISView(is,NULL);CHKERRQ(ierr);
+  ierr = ISDestroy(&is);CHKERRQ(ierr);
+  ierr = ISDestroy(&partition);CHKERRQ(ierr);
+
+  ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
+  ierr = PetscPartitionerDestroy(&p);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+/*TEST
+
+  test:
+    suffix: simple
+    nsize: {{1 2 3}separate output}
+    args: -nparts {{1 2 3}separate output} -petscpartitioner_type simple -petscpartitioner_view -petscpartitioner_view_graph
+
+  test:
+    requires: parmetis
+    suffix: parmetis
+    nsize: {{1 2 3}separate output}
+    args: -nparts {{1 2 3}separate output} -petscpartitioner_type parmetis -petscpartitioner_view -petscpartitioner_view_graph
+
+  test:
+    requires: ptscotch
+    suffix: ptscotch
+    nsize: {{1 2 3}separate output}
+    args: -nparts {{1 2 3}separate output} -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_view_graph
+
+TEST*/
diff --git a/src/dm/impls/plex/examples/tests/ex37.c b/src/dm/impls/plex/examples/tests/ex37.c
new file mode 100644
index 00000000000..4759b3d328d
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/ex37.c
@@ -0,0 +1,321 @@
+static const char help[] = "Test of EGADSLite CAD functionality";
+
+#include 
+
+#include 
+#include 
+
+typedef struct {
+  char filename[PETSC_MAX_PATH_LEN];
+} AppCtx;
+
+static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  options->filename[0] = '\0';
+
+  ierr = PetscOptionsBegin(comm, "", "EGADSPlex Problem Options", "EGADSLite");CHKERRQ(ierr);
+  ierr = PetscOptionsString("-filename", "The EGADSLite file", "ex9.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEnd();
+  PetscFunctionReturn(0);
+}
+
+int main(int argc, char *argv[])
+{
+  DMLabel        bodyLabel, faceLabel, edgeLabel;
+  PetscInt       cStart, cEnd, c;
+  /* EGADSLite variables */
+  ego            context, model, geom, *bodies, *objs, *nobjs, *mobjs, *lobjs;
+  int            oclass, mtype, nbodies, *senses;
+  int            b;
+  /* PETSc variables */
+  DM             dm;
+  PetscInt       dim = -1, cdim = -1, numCorners = 0, numVertices = 0, numCells = 0;
+  PetscInt      *cells  = NULL;
+  PetscReal     *coords = NULL;
+  MPI_Comm       comm;
+  PetscMPIInt    rank;
+  AppCtx         ctx;
+  PetscErrorCode ierr;
+
+  ierr = PetscInitialize(&argc, &argv, NULL, help); if (ierr) return ierr;
+  comm = PETSC_COMM_WORLD;
+  ierr = ProcessOptions(comm, &ctx);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  if (!rank) {
+    /* Open EGADs file and load EGADs model data */
+    ierr = EG_open(&context);CHKERRQ(ierr);
+    ierr = EG_loadModel(context, 0, ctx.filename, &model);CHKERRQ(ierr);
+
+    /* test bodyTopo functions */
+    ierr = EG_getTopology(model, &geom, &oclass, &mtype, NULL, &nbodies, &bodies, &senses);CHKERRQ(ierr);
+    ierr = PetscPrintf(PETSC_COMM_SELF, " Number of BODIES (nbodies): %d \n", nbodies);CHKERRQ(ierr);
+
+    for (b = 0; b < nbodies; ++b) {
+      ego body = bodies[b];
+      int id, Nsh, Nf, Nl, l, Ne, e, Nv, v;
+
+      /* Output Basic Model Topology */
+      ierr = EG_getBodyTopos(body, NULL, SHELL, &Nsh, &objs);CHKERRQ(ierr);
+      ierr = PetscPrintf(PETSC_COMM_SELF, "   Number of SHELLS: %d \n", Nsh);CHKERRQ(ierr);
+
+      ierr = EG_getBodyTopos(body, NULL, FACE,  &Nf, &objs);CHKERRQ(ierr);
+      ierr = PetscPrintf(PETSC_COMM_SELF, "   Number of FACES: %d \n", Nf);CHKERRQ(ierr);
+
+      ierr = EG_getBodyTopos(body, NULL, LOOP,  &Nl, &lobjs);CHKERRQ(ierr);
+      ierr = PetscPrintf(PETSC_COMM_SELF, "   Number of LOOPS: %d \n", Nl);CHKERRQ(ierr);
+
+      ierr = EG_getBodyTopos(body, NULL, EDGE,  &Ne, &objs);CHKERRQ(ierr);
+      ierr = PetscPrintf(PETSC_COMM_SELF, "   Number of EDGES: %d \n", Ne);CHKERRQ(ierr);
+
+      ierr = EG_getBodyTopos(body, NULL, NODE,  &Nv, &objs);CHKERRQ(ierr);
+      ierr = PetscPrintf(PETSC_COMM_SELF, "   Number of NODES: %d \n", Nv);CHKERRQ(ierr);
+
+      for (l = 0; l < Nl; ++l) {
+        ego loop = lobjs[l];
+
+        id   = EG_indexBodyTopo(body, loop);
+        ierr = PetscPrintf(PETSC_COMM_SELF, "          LOOP ID: %d\n", id);CHKERRQ(ierr);
+
+        /* Get EDGE info which associated with the current LOOP */
+        ierr = EG_getTopology(loop, &geom, &oclass, &mtype, NULL, &Ne, &objs, &senses);CHKERRQ(ierr);
+
+        for (e = 0; e < Ne; ++e) {
+          ego edge = objs[e];
+
+          id = EG_indexBodyTopo(body, edge);CHKERRQ(ierr);
+          ierr = PetscPrintf(PETSC_COMM_SELF, "            EDGE ID: %d\n", id);CHKERRQ(ierr);
+
+          double range[4] = {0., 0., 0., 0.};
+          double point[3] = {0., 0., 0.};
+          int    peri;
+
+          ierr = EG_getRange(objs[e], range, &peri);
+          ierr = PetscPrintf(PETSC_COMM_SELF, " Range = %lf, %lf, %lf, %lf \n", range[0], range[1], range[2], range[3]);
+
+          /* Get NODE info which associated with the current EDGE */
+          ierr = EG_getTopology(edge, &geom, &oclass, &mtype, NULL, &Nv, &nobjs, &senses);CHKERRQ(ierr);
+
+          for (v = 0; v < Nv; ++v) {
+            ego    vertex = nobjs[v];
+            double limits[4];
+            int    dummy;
+
+            ierr = EG_getTopology(vertex, &geom, &oclass, &mtype, limits, &dummy, &mobjs, &senses);CHKERRQ(ierr);
+            id   = EG_indexBodyTopo(body, vertex);
+            ierr = PetscPrintf(PETSC_COMM_SELF, "              NODE ID: %d \n", id);CHKERRQ(ierr);
+            ierr = PetscPrintf(PETSC_COMM_SELF, "                 (x, y, z) = (%lf, %lf, %lf) \n", limits[0], limits[1], limits[2]);
+
+            point[0] = point[0] + limits[0];
+            point[1] = point[1] + limits[1];
+            point[2] = point[2] + limits[2];
+          }
+        }
+      }
+    }
+
+    /* ---------------------------------------------------------------------------------------------------
+    Generate Petsc Plex
+      Get all Nodes in model, record coordinates in a correctly formatted array
+      Cycle through bodies, cycle through loops, recorde NODE IDs in a correctly formatted array */
+
+    /* Calculate cell and vertex sizes */
+    ierr = EG_getTopology(model, &geom, &oclass, &mtype, NULL, &nbodies, &bodies, &senses);CHKERRQ(ierr);
+    numCells    = 0;
+    numVertices = 0;
+    for (b = 0; b < nbodies; ++b) {
+      ego body = bodies[b];
+      int id, Nl, l, Nv, v;
+
+      ierr = EG_getBodyTopos(body, NULL, LOOP, &Nl, &lobjs);CHKERRQ(ierr);
+      ierr = EG_getBodyTopos(body, NULL, NODE, &Nv, &nobjs);CHKERRQ(ierr);
+      for (l = 0; l < Nl; ++l) {
+        ego loop = lobjs[l];
+
+        id = EG_indexBodyTopo(body, loop);
+        /* TODO: Instead of assuming contiguous ids, we could use a hash table */
+        numCells = PetscMax(id, numCells);
+      }
+      for (v = 0; v < Nv; ++v) {
+        ego vertex = nobjs[v];
+
+        id = EG_indexBodyTopo(body, vertex);
+        /* TODO: Instead of assuming contiguous ids, we could use a hash table */
+        numVertices = PetscMax(id, numVertices);
+      }
+    }
+    ierr = PetscPrintf(PETSC_COMM_SELF, "\nPLEX Input Array Checkouts\n");CHKERRQ(ierr);
+    ierr = PetscPrintf(PETSC_COMM_SELF, " Total Number of Unique Cells    = %d \n", numCells);CHKERRQ(ierr);
+    ierr = PetscPrintf(PETSC_COMM_SELF, " Total Number of Unique Vertices = %d \n", numVertices);CHKERRQ(ierr);
+
+    dim        = 2; /* Assume 3D Models :: Need to update to handle 2D Models in the future */
+    cdim       = 3; /* Assume 3D Models :: Need to update to handle 2D Models in the future */
+    numCorners = 3; /* TODO Check number of cell corners from EGADSLite */
+    ierr = PetscMalloc2(numVertices*cdim, &coords, numCells*numCorners, &cells);CHKERRQ(ierr);
+
+    /* Get vertex coordinates */
+    for (b = 0; b < nbodies; ++b) {
+      ego body = bodies[b];
+      int id, Nv, v;
+
+      ierr = EG_getBodyTopos(body, NULL, NODE, &Nv, &nobjs);CHKERRQ(ierr);
+      for (v = 0; v < Nv; ++v) {
+        ego    vertex = nobjs[v];
+        double limits[4];
+        int    dummy;
+
+        ierr = EG_getTopology(vertex, &geom, &oclass, &mtype, limits, &dummy, &mobjs, &senses);CHKERRQ(ierr);
+        id   = EG_indexBodyTopo(body, vertex);CHKERRQ(ierr);
+        coords[(id-1)*cdim+0] = limits[0];
+        coords[(id-1)*cdim+1] = limits[1];
+        coords[(id-1)*cdim+2] = limits[2];
+        ierr = PetscPrintf(PETSC_COMM_SELF, "    Node ID = %d \n", id);
+        ierr = PetscPrintf(PETSC_COMM_SELF, "      (x,y,z) = (%lf, %lf, %lf) \n \n", coords[(id-1)*cdim+0], coords[(id-1)*cdim+1],coords[(id-1)*cdim+2]);
+      }
+    }
+
+    /* Get cell vertices by traversing loops */
+    for (b = 0; b < nbodies; ++b) {
+      ego body = bodies[b];
+      int id, Nl, l;
+
+      ierr = EG_getBodyTopos(body, NULL, LOOP, &Nl, &lobjs);CHKERRQ(ierr);
+      for (l = 0; l < Nl; ++l) {
+        ego loop = lobjs[l];
+        int lid, Ne, e, nc = 0, c;
+
+        lid  = EG_indexBodyTopo(body, loop);CHKERRQ(ierr);
+        ierr = PetscPrintf(PETSC_COMM_SELF, "    LOOP ID: %d \n", lid);CHKERRQ(ierr);
+        ierr = EG_getTopology(loop, &geom, &oclass, &mtype, NULL, &Ne, &objs, &senses);CHKERRQ(ierr);
+
+        for (e = 0; e < Ne; ++e) {
+          ego edge = objs[e];
+          int Nv, v;
+
+          id   = EG_indexBodyTopo(body, edge);
+          ierr = PetscPrintf(PETSC_COMM_SELF, "      EDGE ID: %d \n", id);CHKERRQ(ierr);
+          if (mtype == DEGENERATE) {ierr = PetscPrintf(PETSC_COMM_SELF, "        EGDE %d is DEGENERATE \n", id);CHKERRQ(ierr);}
+          ierr = EG_getTopology(edge, &geom, &oclass, &mtype, NULL, &Nv, &nobjs, &senses);
+
+          /* Add unique vertices to cells, this handles mtype == DEGENERATE fine */
+          for (v = 0; v < Nv; ++v) {
+            ego vertex = nobjs[v];
+
+            id = EG_indexBodyTopo(body, vertex);
+            for (c = 0; c < nc; ++c) if (cells[(lid-1)*numCorners+c] == id-1) break;
+            if (c == nc) cells[(lid-1)*numCorners+nc++] = id-1;
+          }
+        }
+        if (nc != numCorners) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid number of cell corners %D, should be %D", nc, numCorners);
+        ierr = PetscPrintf(PETSC_COMM_SELF, "      LOOP Corner NODEs (");
+        for (c = 0; c < numCorners; ++c) {
+          if (c > 0) {ierr = PetscPrintf(PETSC_COMM_SELF, ", ");}
+          ierr = PetscPrintf(PETSC_COMM_SELF, "%D", cells[(lid-1)*numCorners+c]);
+        }
+        ierr = PetscPrintf(PETSC_COMM_SELF, ")\n");
+      }
+    }
+  }
+  ierr = DMPlexCreateFromCellList(PETSC_COMM_WORLD, dim, numCells, numVertices, numCorners, PETSC_TRUE, cells, cdim, coords, &dm);CHKERRQ(ierr);
+  ierr = PetscFree2(coords, cells);CHKERRQ(ierr);
+  {
+    PetscContainer modelObj;
+
+    ierr = PetscContainerCreate(PETSC_COMM_SELF, &modelObj);CHKERRQ(ierr);
+    ierr = PetscContainerSetPointer(modelObj, model);CHKERRQ(ierr);
+    ierr = PetscObjectCompose((PetscObject) dm, "EGADS Model", (PetscObject) modelObj);CHKERRQ(ierr);
+    ierr = PetscContainerDestroy(&modelObj);CHKERRQ(ierr);
+  }
+  ierr = DMCreateLabel(dm, "EGADS Body ID");CHKERRQ(ierr);
+  ierr = DMGetLabel(dm, "EGADS Body ID", &bodyLabel);CHKERRQ(ierr);
+  ierr = DMCreateLabel(dm, "EGADS Face ID");CHKERRQ(ierr);
+  ierr = DMGetLabel(dm, "EGADS Face ID", &faceLabel);CHKERRQ(ierr);
+  ierr = DMCreateLabel(dm, "EGADS Edge ID");CHKERRQ(ierr);
+  ierr = DMGetLabel(dm, "EGADS Edge ID", &edgeLabel);CHKERRQ(ierr);
+  for (b = 0; b < nbodies; ++b) {
+    ego body = bodies[b];
+    int id, Nl, l;
+
+    ierr = EG_getBodyTopos(body, NULL, LOOP, &Nl, &lobjs);CHKERRQ(ierr);
+    for (l = 0; l < Nl; ++l) {
+      ego loop = lobjs[l];
+      int lid, cell, Ne, e;
+
+      lid  = EG_indexBodyTopo(body, loop);CHKERRQ(ierr);
+      cell = lid-1;
+      ierr = DMLabelSetValue(bodyLabel, cell, b);CHKERRQ(ierr);
+      {
+        ego *fobjs;
+        int  Nf, fid;
+
+        ierr = EG_getBodyTopos(body, loop, FACE, &Nf, &fobjs);CHKERRQ(ierr);
+        fid  = EG_indexBodyTopo(body, fobjs[0]);CHKERRQ(ierr);
+        ierr = DMLabelSetValue(faceLabel, cell, fid);CHKERRQ(ierr);
+      }
+
+      ierr = EG_getTopology(loop, &geom, &oclass, &mtype, NULL, &Ne, &objs, &senses);CHKERRQ(ierr);
+      for (e = 0; e < Ne; ++e) {
+        ego             edge = objs[e];
+        int             eid, Nv, v;
+        PetscInt        support[2], numEdges;
+        const PetscInt *edges;
+
+        eid  = EG_indexBodyTopo(body, edge);
+        ierr = EG_getTopology(edge, &geom, &oclass, &mtype, NULL, &Nv, &nobjs, &senses);
+        if (Nv > 2) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Edge %d has %d vertices > 2", eid, Nv);
+        for (v = 0; v < Nv; ++v) {
+          ego vertex = nobjs[v];
+
+          id   = EG_indexBodyTopo(body, vertex);
+          ierr = DMLabelSetValue(edgeLabel, numCells + id-1, eid);CHKERRQ(ierr);
+          support[v] = numCells + id-1;
+        }
+        if (Nv == 2) {
+          ierr = DMPlexGetJoin(dm, 2, support, &numEdges, &edges);CHKERRQ(ierr);
+          if (numEdges != 1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "2 vertices should only bound 1 edge, not %D", numEdges);
+          ierr = DMLabelSetValue(edgeLabel, edges[0], eid);CHKERRQ(ierr);
+          ierr = DMPlexRestoreJoin(dm, 2, support, &numEdges, &edges);CHKERRQ(ierr);
+        }
+      }
+    }
+  }
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
+    PetscInt *closure = NULL;
+    PetscInt  clSize, cl, bval, fval;
+
+    ierr = DMPlexGetTransitiveClosure(dm, c, PETSC_TRUE, &clSize, &closure);CHKERRQ(ierr);
+    ierr = DMLabelGetValue(bodyLabel, c, &bval);CHKERRQ(ierr);
+    ierr = DMLabelGetValue(faceLabel, c, &fval);CHKERRQ(ierr);
+    for (cl = 0; cl < clSize*2; cl += 2) {
+      ierr = DMLabelSetValue(bodyLabel, closure[cl], bval);CHKERRQ(ierr);
+      ierr = DMLabelSetValue(faceLabel, closure[cl], fval);CHKERRQ(ierr);
+    }
+    ierr = DMPlexRestoreTransitiveClosure(dm, c, PETSC_TRUE, &clSize, &closure);CHKERRQ(ierr);
+  }
+  ierr = DMLabelView(bodyLabel, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = DMLabelView(faceLabel, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = DMLabelView(edgeLabel, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
+
+  ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
+
+  /* Close EGADSlite file */
+  ierr = EG_close(context);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+/*TEST
+
+  build:
+    requires: egads
+
+  test:
+    suffix: sphere_0
+    args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/unit_sphere.egadslite -dm_view ::ascii_info_detail
+
+TEST*/
diff --git a/src/dm/impls/plex/examples/tests/ex4.c b/src/dm/impls/plex/examples/tests/ex4.c
index 75b156c2fc4..cceb55623a7 100644
--- a/src/dm/impls/plex/examples/tests/ex4.c
+++ b/src/dm/impls/plex/examples/tests/ex4.c
@@ -1021,14 +1021,14 @@ int main(int argc, char **argv)
       suffix: hybint_3d_0
       args: -dim 3 -dm_refine 1
     test:
-      TODO: fails due to wrong SF
+      TODO: fails due to Cone size 10 not supported for dimension 3
       suffix: hybint_3d_s2t_0
       args: -dim 3 -dm_refine 1 -simplex2tensor
     test:
       suffix: hybint_3d_1
       args: -dim 3 -dm_refine 1 -test_num 1
     test:
-      TODO: fails due to wrong SF
+      TODO: fails due to Cone size 12 not supported for dimension 3
       suffix: hybint_3d_s2t_1
       args: -dim 3 -dm_refine 1  -simplex2tensor -test_num 1
 
diff --git a/src/dm/impls/plex/examples/tests/ex5.c b/src/dm/impls/plex/examples/tests/ex5.c
index 28bedbdda71..95447a3d543 100644
--- a/src/dm/impls/plex/examples/tests/ex5.c
+++ b/src/dm/impls/plex/examples/tests/ex5.c
@@ -826,7 +826,6 @@ int main(int argc, char **argv)
       suffix: quad_t1_0
       args: -dim 2 -cell_simplex 0 -test_num 1 \
             -faulted_dm_plex_check_symmetry -faulted_dm_plex_check_skeleton -faulted_dm_plex_check_faces
-      TODO: turn on test
     # 3D Hex
     test:
       suffix: hex_0
diff --git a/src/dm/impls/plex/examples/tests/ex9.c b/src/dm/impls/plex/examples/tests/ex9.c
index 8e441b34a31..69e25cbb4f5 100644
--- a/src/dm/impls/plex/examples/tests/ex9.c
+++ b/src/dm/impls/plex/examples/tests/ex9.c
@@ -18,6 +18,7 @@ typedef struct {
   PetscReal maxConeTime;       /* Max time per run for DMPlexGetCone() */
   PetscReal maxClosureTime;    /* Max time per run for DMPlexGetTransitiveClosure() */
   PetscReal maxVecClosureTime; /* Max time per run for DMPlexVecGetClosure() */
+  PetscBool printTimes;        /* Print total times, do not check limits */
 } AppCtx;
 
 static PetscErrorCode ProcessOptions(AppCtx *options)
@@ -41,6 +42,7 @@ static PetscErrorCode ProcessOptions(AppCtx *options)
   options->maxConeTime       = 0.0;
   options->maxClosureTime    = 0.0;
   options->maxVecClosureTime = 0.0;
+  options->printTimes        = PETSC_FALSE;
 
   ierr = PetscOptionsBegin(PETSC_COMM_SELF, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
   ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex9.c", options->dim, &options->dim, NULL,1,3);CHKERRQ(ierr);
@@ -48,7 +50,7 @@ static PetscErrorCode ProcessOptions(AppCtx *options)
   ierr = PetscOptionsBool("-spectral", "Flag for spectral element layout", "ex9.c", options->spectral, &options->spectral, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBool("-interpolate", "Flag for mesh interpolation", "ex9.c", options->interpolate, &options->interpolate, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsReal("-refinement_limit", "The maximum volume of a refined cell", "ex9.c", options->refinementLimit, &options->refinementLimit, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBoundedInt("-num_fields", "The number of section fields", "ex9.c", options->numFields, &options->numFields, NULL,1);CHKERRQ(ierr);
+  ierr = PetscOptionsBoundedInt("-num_fields", "The number of section fields", "ex9.c", options->numFields, &options->numFields, NULL, 0);CHKERRQ(ierr);
   if (options->numFields) {
     len  = options->numFields;
     ierr = PetscMalloc1(len, &options->numComponents);CHKERRQ(ierr);
@@ -75,6 +77,7 @@ static PetscErrorCode ProcessOptions(AppCtx *options)
   ierr = PetscOptionsReal("-max_cone_time", "The maximum time per run for DMPlexGetCone()", "ex9.c", options->maxConeTime, &options->maxConeTime, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsReal("-max_closure_time", "The maximum time per run for DMPlexGetTransitiveClosure()", "ex9.c", options->maxClosureTime, &options->maxClosureTime, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsReal("-max_vec_closure_time", "The maximum time per run for DMPlexVecGetClosure()", "ex9.c", options->maxVecClosureTime, &options->maxVecClosureTime, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-print_times", "Print total times, do not check limits", "ex9.c", options->printTimes, &options->printTimes, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -236,9 +239,13 @@ static PetscErrorCode TestCone(DM dm, AppCtx *user)
   PetscLogStage      stage;
   PetscLogEvent      event;
   PetscEventPerfInfo eventInfo;
+  MPI_Comm           comm;
+  PetscMPIInt        rank;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   ierr = PetscLogStageRegister("DMPlex Cone Test", &stage);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("Cone", PETSC_OBJECT_CLASSID, &event);CHKERRQ(ierr);
   ierr = PetscLogStagePush(stage);CHKERRQ(ierr);
@@ -258,8 +265,12 @@ static PetscErrorCode TestCone(DM dm, AppCtx *user)
   numRuns = (cEnd-cStart) * user->iterations;
   if (eventInfo.count != 1) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event calls %d should be %d", eventInfo.count, 1);
   if ((PetscInt) eventInfo.flops != 0) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event flops %d should be %d", (PetscInt) eventInfo.flops, 0);
-  if (eventInfo.time > maxTimePerRun * numRuns) {
-    ierr = PetscPrintf(PETSC_COMM_SELF, "Cones: %d Average time per cone: %gs standard: %gs\n", numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
+  if (user->printTimes) {
+    ierr = PetscSynchronizedPrintf(comm, "[%d] Cones: %d Total time: %.3es Average time per cone: %.3es\n", rank, numRuns, eventInfo.time, eventInfo.time/numRuns);CHKERRQ(ierr);
+    ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
+  } else if (eventInfo.time > maxTimePerRun * numRuns) {
+    ierr = PetscSynchronizedPrintf(comm, "[%d] Cones: %d Average time per cone: %gs standard: %gs\n", rank, numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
+    ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
     if (user->errors) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Average time for cone %g > standard %g", eventInfo.time/numRuns, maxTimePerRun);
   }
   PetscFunctionReturn(0);
@@ -272,9 +283,13 @@ static PetscErrorCode TestTransitiveClosure(DM dm, AppCtx *user)
   PetscLogStage      stage;
   PetscLogEvent      event;
   PetscEventPerfInfo eventInfo;
+  MPI_Comm           comm;
+  PetscMPIInt        rank;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   ierr = PetscLogStageRegister("DMPlex Transitive Closure Test", &stage);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("TransitiveClosure", PETSC_OBJECT_CLASSID, &event);CHKERRQ(ierr);
   ierr = PetscLogStagePush(stage);CHKERRQ(ierr);
@@ -296,8 +311,12 @@ static PetscErrorCode TestTransitiveClosure(DM dm, AppCtx *user)
   numRuns = (cEnd-cStart) * user->iterations;
   if (eventInfo.count != 1) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event calls %d should be %d", eventInfo.count, 1);
   if ((PetscInt) eventInfo.flops != 0) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event flops %d should be %d", (PetscInt) eventInfo.flops, 0);
-  if (eventInfo.time > maxTimePerRun * numRuns) {
-    ierr = PetscPrintf(PETSC_COMM_SELF, "Closures: %d Average time per cone: %gs standard: %gs\n", numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
+  if (user->printTimes) {
+    ierr = PetscSynchronizedPrintf(comm, "[%d] Closures: %d Total time: %.3es Average time per cone: %.3es\n", rank, numRuns, eventInfo.time, eventInfo.time/numRuns);CHKERRQ(ierr);
+    ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
+  } else if (eventInfo.time > maxTimePerRun * numRuns) {
+    ierr = PetscSynchronizedPrintf(comm, "[%d] Closures: %d Average time per cone: %gs standard: %gs\n", rank, numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
+    ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
     if (user->errors) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Average time for closure %g > standard %g", eventInfo.time/numRuns, maxTimePerRun);
   }
   PetscFunctionReturn(0);
@@ -314,9 +333,13 @@ static PetscErrorCode TestVecClosure(DM dm, PetscBool useIndex, PetscBool useSpe
   PetscLogStage      stage;
   PetscLogEvent      event;
   PetscEventPerfInfo eventInfo;
+  MPI_Comm           comm;
+  PetscMPIInt        rank;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   if (useIndex) {
     if (useSpectral) {
       ierr = PetscLogStageRegister("DMPlex Vector Closure with Index Test", &stage);CHKERRQ(ierr);
@@ -361,14 +384,20 @@ static PetscErrorCode TestVecClosure(DM dm, PetscBool useIndex, PetscBool useSpe
   numRuns = (cEnd-cStart) * user->iterations;
   if (eventInfo.count != 1) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event calls %d should be %d", eventInfo.count, 1);
   if ((PetscInt) eventInfo.flops != 0) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of event flops %d should be %d", (PetscInt) eventInfo.flops, 0);
-  if (eventInfo.time > maxTimePerRun * numRuns) {
+  if (user->printTimes || eventInfo.time > maxTimePerRun * numRuns) {
     const char *title = "VecClosures";
     const char *titleIndex = "VecClosures with Index";
     const char *titleSpec = "VecClosures Spectral";
     const char *titleSpecIndex = "VecClosures Spectral with Index";
 
-    ierr = PetscPrintf(PETSC_COMM_SELF, "%s: %d Average time per vector closure: %gs standard: %gs\n", useIndex ? (useSpectral ? titleSpecIndex : titleIndex) : (useSpectral ? titleSpec : title), numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
-    if (user->errors) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Average time for vector closure %g > standard %g", eventInfo.time/numRuns, maxTimePerRun);
+    if (user->printTimes) {
+      ierr = PetscSynchronizedPrintf(comm, "[%d] %s: %d Total time: %.3es Average time per vector closure: %.3es\n", rank, useIndex ? (useSpectral ? titleSpecIndex : titleIndex) : (useSpectral ? titleSpec : title), numRuns, eventInfo.time, eventInfo.time/numRuns);CHKERRQ(ierr);
+      ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
+    } else {
+      ierr = PetscSynchronizedPrintf(comm, "[%d] %s: %d Average time per vector closure: %gs standard: %gs\n", rank, useIndex ? (useSpectral ? titleSpecIndex : titleIndex) : (useSpectral ? titleSpec : title), numRuns, eventInfo.time/numRuns, maxTimePerRun);CHKERRQ(ierr);
+      ierr = PetscSynchronizedFlush(comm, PETSC_STDOUT);CHKERRQ(ierr);
+      if (user->errors) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Average time for vector closure %g > standard %g", eventInfo.time/numRuns, maxTimePerRun);
+    }
   }
   PetscFunctionReturn(0);
 }
@@ -392,7 +421,7 @@ int main(int argc, char **argv)
   ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
   ierr = ProcessOptions(&user);CHKERRQ(ierr);
   ierr = PetscLogDefaultBegin();CHKERRQ(ierr);
-  ierr = CreateMesh(PETSC_COMM_SELF, &user, &dm);CHKERRQ(ierr);
+  ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr);
   ierr = TestCone(dm, &user);CHKERRQ(ierr);
   ierr = TestTransitiveClosure(dm, &user);CHKERRQ(ierr);
   ierr = TestVecClosure(dm, PETSC_FALSE, PETSC_FALSE, &user);CHKERRQ(ierr);
@@ -409,46 +438,56 @@ int main(int argc, char **argv)
 
 /*TEST
 
+  build:
+    requires: define(PETSC_USE_LOG)
+
   # 2D Simplex P_1 scalar tests
+  testset:
+    args: -num_dof 1,0,0 -iterations 2 -print_times
+    test:
+      suffix: correctness_0
+    test:
+      suffix: correctness_1
+      args: -interpolate -dm_refine 2
+    test:
+      suffix: correctness_2
+      requires: triangle
+      args: -interpolate -refinement_limit 1.0e-5
   test:
     suffix: 0
-    requires: performance
-    TODO: missing output file
+    TODO: Only for performance testing
     args: -num_dof 1,0,0 -iterations 10000 -max_cone_time 1.1e-8 -max_closure_time 1.3e-7 -max_vec_closure_time 3.6e-7
   test:
     suffix: 1
-    requires: performance
-    TODO: missing output file
+    requires: triangle
+    TODO: Only for performance testing
     args: -refinement_limit 1.0e-5 -num_dof 1,0,0 -iterations 2 -max_cone_time 2.1e-8 -max_closure_time 1.5e-7 -max_vec_closure_time 3.6e-7
   test:
     suffix: 2
-    requires: performance
-    TODO: missing output file
+    TODO: Only for performance testing
     args: -num_fields 1 -num_components 1 -num_dof 1,0,0 -iterations 10000 -max_cone_time 1.1e-8 -max_closure_time 1.3e-7 -max_vec_closure_time 4.5e-7
   test:
     suffix: 3
-    requires: performance
-    TODO: missing output file
+    requires: triangle
+    TODO: Only for performance testing
     args: -refinement_limit 1.0e-5 -num_fields 1 -num_components 1 -num_dof 1,0,0 -iterations 2 -max_cone_time 2.1e-8 -max_closure_time 1.5e-7 -max_vec_closure_time 4.7e-7
   test:
     suffix: 4
-    requires: performance
-    TODO: missing output file
+    TODO: Only for performance testing
     args: -interpolate -num_dof 1,0,0 -iterations 10000 -max_cone_time 1.1e-8 -max_closure_time 6.5e-7 -max_vec_closure_time 1.0e-6
   test:
     suffix: 5
-    requires: performance
-    TODO: missing output file
+    requires: triangle
+    TODO: Only for performance testing
     args: -interpolate -refinement_limit 1.0e-4 -num_dof 1,0,0 -iterations 2 -max_cone_time 2.1e-8 -max_closure_time 6.5e-7 -max_vec_closure_time 1.0e-6
   test:
     suffix: 6
-    requires: performance
-    TODO: missing output file
+    TODO: Only for performance testing
     args: -interpolate -num_fields 1 -num_components 1 -num_dof 1,0,0 -iterations 10000 -max_cone_time 1.1e-8 -max_closure_time 6.5e-7 -max_vec_closure_time 1.1e-6
   test:
     suffix: 7
-    requires: performance
-    TODO: missing output file
+    requires: triangle
+    TODO: Only for performance testing
     args: -interpolate -refinement_limit 1.0e-4 -num_fields 1 -num_components 1 -num_dof 1,0,0 -iterations 2 -max_cone_time 2.1e-8 -max_closure_time 6.5e-7 -max_vec_closure_time 1.2e-6
 
   # 2D Simplex P_1 vector tests
diff --git a/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis.out b/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis_int.out b/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis_int.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/7_par_hdf5_parmetis_int.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex12_lb_0.out b/src/dm/impls/plex/examples/tests/output/ex12_lb_0.out
index df65da3daea..e73f3949e45 100644
--- a/src/dm/impls/plex/examples/tests/output/ex12_lb_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex12_lb_0.out
@@ -6,6 +6,7 @@ Graph Partitioner: 4 MPI Processes
   type: parmetis
   edge cut: 8
   balance:  0
+  use vertex weights: 1
   ParMetis type: kway
   load imbalance ratio 1.05
   debug flag 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex12_lb_1.out b/src/dm/impls/plex/examples/tests/output/ex12_lb_1.out
index df65da3daea..e73f3949e45 100644
--- a/src/dm/impls/plex/examples/tests/output/ex12_lb_1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex12_lb_1.out
@@ -6,6 +6,7 @@ Graph Partitioner: 4 MPI Processes
   type: parmetis
   edge cut: 8
   balance:  0
+  use vertex weights: 1
   ParMetis type: kway
   load imbalance ratio 1.05
   debug flag 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex14_0.out b/src/dm/impls/plex/examples/tests/output/ex14_0.out
index 50704280038..c160ba03eb8 100644
--- a/src/dm/impls/plex/examples/tests/output/ex14_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex14_0.out
@@ -6,5 +6,6 @@ Test Mesh in 2 dimensions:
   2-cells: 32
 Labels:
   depth: 3 strata with value/size (0 (25), 1 (56), 2 (32))
+  celltype: 3 strata with value/size (2 (32), 0 (25), 1 (56))
   marker: 1 strata with value/size (1 (32))
   Face Sets: 1 strata with value/size (1 (24))
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5.out b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int.out b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int_parmetis.out b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int_parmetis.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_10_par_hdf5_int_parmetis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist0_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist0_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist0_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist0_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-parallel.out b/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-after_distribute.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-parallel.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-after_distribute.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_quad_dist1_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist0_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist0_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist0_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist0_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-parallel.out b/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-after_distribute.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-parallel.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-after_distribute.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_1_tri_dist1_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_2a.out b/src/dm/impls/plex/examples/tests/output/ex18_2a.out
index ff8d0de24f7..4d0901630ad 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_2a.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_2a.out
@@ -1,5 +1,5 @@
 ============
-DMPlexCheckConesConformOnInterfaces output
+DMPlexCheckInterfaceCones output
 ============
 [0] --------
   r=0 ranks[r]=1 sntCoordinatesPerRank[r]:
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist0_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist0_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist0_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist0_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-parallel.out b/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-after_distribute.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-parallel.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-after_distribute.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_hex_dist1_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist0_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist0_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist0_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist0_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-parallel.out b/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-after_distribute.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-parallel.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-after_distribute.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-create.out
similarity index 100%
rename from src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_4_tet_dist1_interpolate-create.out
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_4_tet_test_orient.out b/src/dm/impls/plex/examples/tests/output/ex18_4_tet_test_orient.out
index e69de29bb2d..b591ad43ffb 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_4_tet_test_orient.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_4_tet_test_orient.out
@@ -0,0 +1 @@
+Orientation test PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-create.out
similarity index 90%
rename from src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-create.out
index 2e042e43849..41054818a74 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-serial.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-create.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: full
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -66,4 +70,5 @@ PetscSF Object: 2 MPI processes
   type: basic
     sort=rank-order
   PetscSFSetGraph() has not been called yet
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: full
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-none.out b/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-none.out
index 019470a0b0e..b1095ffcf2e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-none.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_dist0_interpolate-none.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -38,4 +42,5 @@ PetscSF Object: 2 MPI processes
   type: basic
     sort=rank-order
   PetscSFSetGraph() has not been called yet
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-parallel.out b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-after_distribute.out
similarity index 91%
rename from src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-parallel.out
rename to src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-after_distribute.out
index ba07883eca2..d2451599c9e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-parallel.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-after_distribute.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -82,4 +86,5 @@ PetscSF Object: 2 MPI processes
   [0]    2 <- 2
   [0]    5 <- 5
   [1] Roots referenced by my leaves, by rank
+DMPlexIsDistributed: TRUE
 DMPlexIsInterpolatedCollective: full
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-create.out
similarity index 91%
rename from src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-create.out
index ea8e11a050a..f27d40f498e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-serial.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-create.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: full
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -82,4 +86,5 @@ PetscSF Object: 2 MPI processes
   [0]    2 <- 2
   [0]    5 <- 5
   [1] Roots referenced by my leaves, by rank
+DMPlexIsDistributed: TRUE
 DMPlexIsInterpolatedCollective: full
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-none.out b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-none.out
index bdc860bbf8f..3078de6fddb 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-none.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_dist1_interpolate-none.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -48,4 +52,5 @@ PetscSF Object: 2 MPI processes
   [0]    1 <- 1
   [0]    2 <- 2
   [1] Roots referenced by my leaves, by rank
+DMPlexIsDistributed: TRUE
 DMPlexIsInterpolatedCollective: none
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-serial.out b/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-create.out
similarity index 88%
rename from src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-serial.out
rename to src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-create.out
index 4c9cac603cd..4a1ee6037da 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-serial.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-create.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: full
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 1 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -59,4 +63,5 @@ Process 0:
   (   5) dim  2 offset   6 -1. -0.5
   (   6) dim  2 offset   8 1. -0.5
   (   7) dim  2 offset  10 1. 0.5
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: full
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-none.out b/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-none.out
index c94930f77fb..55bb9d0c283 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-none.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_5_seq_interpolate-none.out
@@ -1,4 +1,8 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
 DM Object: Parallel Mesh 1 MPI processes
   type: plex
 Parallel Mesh in 2 dimensions:
@@ -31,4 +35,5 @@ Process 0:
   (   5) dim  2 offset   6 -1. -0.5
   (   6) dim  2 offset   8 1. -0.5
   (   7) dim  2 offset  10 1. 0.5
+DMPlexIsDistributed: FALSE
 DMPlexIsInterpolatedCollective: none
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_hex.out b/src/dm/impls/plex/examples/tests/output/ex18_6_hex.out
index e69de29bb2d..ba92ca55809 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_6_hex.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_hex.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_int_hex.out b/src/dm/impls/plex/examples/tests/output/ex18_6_int_hex.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_int_hex.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_int_quad.out b/src/dm/impls/plex/examples/tests/output/ex18_6_int_quad.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_int_quad.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_int_tet.out b/src/dm/impls/plex/examples/tests/output/ex18_6_int_tet.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_int_tet.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_int_tri.out b/src/dm/impls/plex/examples/tests/output/ex18_6_int_tri.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_int_tri.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_parint_hex.out b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_hex.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_hex.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_parint_quad.out b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_quad.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_quad.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tet.out b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tet.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tet.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tri.out b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tri.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_parint_tri.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_quad.out b/src/dm/impls/plex/examples/tests/output/ex18_6_quad.out
index e69de29bb2d..ba92ca55809 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_6_quad.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_quad.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_tet.out b/src/dm/impls/plex/examples/tests/output/ex18_6_tet.out
index e69de29bb2d..ba92ca55809 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_6_tet.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_tet.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_6_tri.out b/src/dm/impls/plex/examples/tests/output/ex18_6_tri.out
index e69de29bb2d..ba92ca55809 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_6_tri.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_6_tri.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo.out b/src/dm/impls/plex/examples/tests/output/ex18_7_exo.out
index e69de29bb2d..51865ada83e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_7_exo.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_exo.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_metis.out b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_metis.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_metis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_simple.out b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_simple.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_int_simple.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo_metis_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_metis_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_metis_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_exo_simple_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_simple_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_exo_simple_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-2.out b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-2.out
index c340cca4697..f28af15bd64 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-2.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-2.out
@@ -1,19 +1,26 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
 Graph Partitioner: 2 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: 2 MPI processes
     type: hierarch
+    Using vertex weights
      Number of coarse parts: 1
      Coarse partitioner: ptscotch
       MatPartitioning Object: (hierarch_coarse_) 2 MPI processes
         type: ptscotch
+        Using vertex weights
           Strategy=Default behavior
           Load imbalance ratio=0.01
      Number of fine parts: 2
      Fine partitioner: ptscotch
     MatPartitioning Object: (hierarch_fine_) 1 MPI processes
       type: ptscotch
+      Using vertex weights
         Strategy=Default behavior
         Load imbalance ratio=0.01
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-3.out b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-3.out
index e1044221981..e7fdd9fa97f 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-3.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-3.out
@@ -1,19 +1,26 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
 Graph Partitioner: 3 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: 3 MPI processes
     type: hierarch
+    Using vertex weights
      Number of coarse parts: 2
      Coarse partitioner: ptscotch
       MatPartitioning Object: (hierarch_coarse_) 3 MPI processes
         type: ptscotch
+        Using vertex weights
           Strategy=Default behavior
           Load imbalance ratio=0.01
      Number of fine parts: 2
      Fine partitioner: ptscotch
     MatPartitioning Object: (hierarch_fine_) 1 MPI processes
       type: ptscotch
+      Using vertex weights
         Strategy=Default behavior
         Load imbalance ratio=0.01
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-4.out b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-4.out
index 0006a7041fe..fa8910f0e60 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-4.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_hdf5_hierarch_nsize-4.out
@@ -1,19 +1,26 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
 Graph Partitioner: 4 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: 4 MPI processes
     type: hierarch
+    Using vertex weights
      Number of coarse parts: 2
      Coarse partitioner: ptscotch
       MatPartitioning Object: (hierarch_coarse_) 4 MPI processes
         type: ptscotch
+        Using vertex weights
           Strategy=Default behavior
           Load imbalance ratio=0.01
      Number of fine parts: 2
      Fine partitioner: ptscotch
     MatPartitioning Object: (hierarch_fine_) 1 MPI processes
       type: ptscotch
+      Using vertex weights
         Strategy=Default behavior
         Load imbalance ratio=0.01
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5.out b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int_parmetis.out b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int_parmetis.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_int_parmetis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis.out b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis_int.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_par_hdf5_parmetis_int.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_metis.out b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_metis.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_metis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_simple.out b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_simple.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_int_simple.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_metis_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_metis_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_metis_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple.out b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple_int.out b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_7_seq_hdf5_simple_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_8.out b/src/dm/impls/plex/examples/tests/output/ex18_8.out
index 0d8cd7aa418..9541592a771 100644
--- a/src/dm/impls/plex/examples/tests/output/ex18_8.out
+++ b/src/dm/impls/plex/examples/tests/output/ex18_8.out
@@ -1,4 +1,9 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexIsDistributed: TRUE
 DMPlexIsInterpolatedCollective: full
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
+DMPlexIsDistributed: TRUE
 DMPlexIsInterpolatedCollective: full
 [0] (       0.000,        1.000,        0.000) --> points[0] = 36
 [1] (       0.000,        1.000,        0.000) --> points[0] = 16
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5.out b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int.out b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int_parmetis.out b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int_parmetis.out
new file mode 100644
index 00000000000..9261dd7272b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_int_parmetis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced distributed mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis.out b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis_int.out b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis_int.out
new file mode 100644
index 00000000000..ba92ca55809
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_par_hdf5_parmetis_int.out
@@ -0,0 +1,2 @@
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_metis.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_metis.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_metis.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_simple.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_simple.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_int_simple.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_metis_int.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_metis_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_metis_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int.out
new file mode 100644
index 00000000000..51865ada83e
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int.out
@@ -0,0 +1,3 @@
+DMPlexCreateFromFile produced serial mesh.
+DMPlexCheckPointSFHeavy PASSED
+DMPlexCheckPointSFHeavy PASSED
diff --git a/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int_err.out b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int_err.out
new file mode 100644
index 00000000000..b53996dc324
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex18_9_seq_hdf5_simple_int_err.out
@@ -0,0 +1,79 @@
+DMPlexCheckPointSFHeavy PASSED
+Points missing in PointSF:
+[0] --------
+  face 76
+    edge 136
+      vertex 14 w/ coordinates (       0.000,    50000.000,    83333.333)
+      vertex 13 w/ coordinates (       0.000,    50000.000,    75000.000)
+    edge 139
+      vertex 20 w/ coordinates (   25000.000,    50000.000,    83333.333)
+      vertex 14 w/ coordinates (       0.000,    50000.000,    83333.333)
+    edge 137
+      vertex 24 w/ coordinates (   50000.000,    50000.000,    75000.000)
+      vertex 20 w/ coordinates (   25000.000,    50000.000,    83333.333)
+    edge 89
+      vertex 13 w/ coordinates (       0.000,    50000.000,    75000.000)
+      vertex 24 w/ coordinates (   50000.000,    50000.000,    75000.000)
+  face 80
+    edge 141
+      vertex 30 w/ coordinates (   75000.000,    50000.000,    83333.333)
+      vertex 24 w/ coordinates (   50000.000,    50000.000,    75000.000)
+    edge 144
+      vertex 37 w/ coordinates (  100000.000,    50000.000,    83333.333)
+      vertex 30 w/ coordinates (   75000.000,    50000.000,    83333.333)
+    edge 142
+      vertex 36 w/ coordinates (  100000.000,    50000.000,    75000.000)
+      vertex 37 w/ coordinates (  100000.000,    50000.000,    83333.333)
+    edge 97
+      vertex 24 w/ coordinates (   50000.000,    50000.000,    75000.000)
+      vertex 36 w/ coordinates (  100000.000,    50000.000,    75000.000)
+  edge 108
+    vertex 25 w/ coordinates (   50000.000,    75000.000,    83333.333)
+    vertex 24 w/ coordinates (   50000.000,    50000.000,    75000.000)
+  edge 136
+    vertex 14 w/ coordinates (       0.000,    50000.000,    83333.333)
+    vertex 13 w/ coordinates (       0.000,    50000.000,    75000.000)
+  edge 142
+    vertex 36 w/ coordinates (  100000.000,    50000.000,    75000.000)
+    vertex 37 w/ coordinates (  100000.000,    50000.000,    83333.333)
+[1] --------
+  face 54
+    edge 101
+      vertex 29 w/ coordinates (   50000.000,    50000.000,    75000.000)
+      vertex 12 w/ coordinates (       0.000,    50000.000,    75000.000)
+    edge 99
+      vertex 22 w/ coordinates (   25000.000,    50000.000,    83333.333)
+      vertex 29 w/ coordinates (   50000.000,    50000.000,    75000.000)
+    edge 102
+      vertex 13 w/ coordinates (       0.000,    50000.000,    83333.333)
+      vertex 22 w/ coordinates (   25000.000,    50000.000,    83333.333)
+    edge 92
+      vertex 12 w/ coordinates (       0.000,    50000.000,    75000.000)
+      vertex 13 w/ coordinates (       0.000,    50000.000,    83333.333)
+  face 60
+    edge 112
+      vertex 43 w/ coordinates (  100000.000,    50000.000,    75000.000)
+      vertex 29 w/ coordinates (   50000.000,    50000.000,    75000.000)
+    edge 110
+      vertex 44 w/ coordinates (  100000.000,    50000.000,    83333.333)
+      vertex 43 w/ coordinates (  100000.000,    50000.000,    75000.000)
+    edge 113
+      vertex 36 w/ coordinates (   75000.000,    50000.000,    83333.333)
+      vertex 44 w/ coordinates (  100000.000,    50000.000,    83333.333)
+    edge 104
+      vertex 29 w/ coordinates (   50000.000,    50000.000,    75000.000)
+      vertex 36 w/ coordinates (   75000.000,    50000.000,    83333.333)
+  edge 92
+    vertex 12 w/ coordinates (       0.000,    50000.000,    75000.000)
+    vertex 13 w/ coordinates (       0.000,    50000.000,    83333.333)
+  edge 96
+    vertex 29 w/ coordinates (   50000.000,    50000.000,    75000.000)
+    vertex 30 w/ coordinates (   50000.000,    75000.000,    83333.333)
+  edge 110
+    vertex 44 w/ coordinates (  100000.000,    50000.000,    83333.333)
+    vertex 43 w/ coordinates (  100000.000,    50000.000,    75000.000)
+[2] --------
+  edge 120
+    vertex 22 w/ coordinates (   50000.000,    50000.000,    75000.000)
+    vertex 24 w/ coordinates (   50000.000,    75000.000,    83333.333)
+Extra points in PointSF:
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_ball_0.out b/src/dm/impls/plex/examples/tests/output/ex1_ball_0.out
new file mode 100644
index 00000000000..4a7cf31b775
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex1_ball_0.out
@@ -0,0 +1,9 @@
+DM Object: Simplicial Mesh 1 MPI processes
+  type: plex
+Simplicial Mesh in 3 dimensions:
+  0-cells: 12
+  1-cells: 36
+  2-cells: 40
+  3-cells: 15
+Labels:
+  depth: 4 strata with value/size (0 (12), 1 (36), 2 (40), 3 (15))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_box_2d.out b/src/dm/impls/plex/examples/tests/output/ex1_box_2d.out
index cc36d9a91f8..926e0a90e85 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_box_2d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_box_2d.out
@@ -6,6 +6,7 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 64
 Labels:
   depth: 3 strata with value/size (0 (81), 1 (144), 2 (64))
+  celltype: 3 strata with value/size (3 (64), 0 (81), 1 (144))
   marker: 1 strata with value/size (1 (64))
   Face Sets: 4 strata with value/size (4 (14), 2 (14), 1 (14), 3 (14))
 Mesh with 64 cells, shape condition numbers: min = 2., max = 2., mean = 2., stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_box_2d_per.out b/src/dm/impls/plex/examples/tests/output/ex1_box_2d_per.out
index cc36d9a91f8..926e0a90e85 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_box_2d_per.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_box_2d_per.out
@@ -6,6 +6,7 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 64
 Labels:
   depth: 3 strata with value/size (0 (81), 1 (144), 2 (64))
+  celltype: 3 strata with value/size (3 (64), 0 (81), 1 (144))
   marker: 1 strata with value/size (1 (64))
   Face Sets: 4 strata with value/size (4 (14), 2 (14), 1 (14), 3 (14))
 Mesh with 64 cells, shape condition numbers: min = 2., max = 2., mean = 2., stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_box_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_box_3d.out
index eac651b23cd..0cb620ccffc 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_box_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_box_3d.out
@@ -7,6 +7,7 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 512
 Labels:
   depth: 4 strata with value/size (0 (729), 1 (1944), 2 (1728), 3 (512))
+  celltype: 4 strata with value/size (6 (512), 0 (729), 3 (1728), 1 (1944))
   marker: 1 strata with value/size (1 (1530))
   Face Sets: 6 strata with value/size (6 (225), 5 (225), 3 (225), 4 (225), 1 (225), 2 (225))
 Mesh with 512 cells, shape condition numbers: min = 3., max = 3., mean = 3., stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t.out b/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t.out
index ff019761f11..3e50c446679 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 36
 Labels:
   depth: 4 strata with value/size (0 (94), 1 (211), 2 (154), 3 (36))
+  celltype: 4 strata with value/size (6 (36), 0 (94), 3 (154), 1 (211))
   dim: 3 strata with value/size (3 (0), 2 (144), 1 (138))
 Mesh with 36 cells, shape condition numbers: min = 7.60163, max = 12.0058, mean = 10.0291, stddev = 1.43728
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t_parallel.out b/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t_parallel.out
index 170897c956d..8debd5f5fc6 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t_parallel.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_box_wedge_s2t_parallel.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 12 12 12
 Labels:
   depth: 4 strata with value/size (0 (42), 1 (85), 2 (56), 3 (12))
+  celltype: 4 strata with value/size (6 (12), 0 (42), 3 (56), 1 (85))
   dim: 2 strata with value/size (1 (60), 2 (48))
 Mesh with 36 cells, shape condition numbers: min = 7.60163, max = 12.0058, mean = 10.0291, stddev = 1.43728
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_cylinder.out b/src/dm/impls/plex/examples/tests/output/ex1_cylinder.out
index b3ae09b1291..0eb5ac5443c 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_cylinder.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_cylinder.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 2560
 Labels:
   depth: 4 strata with value/size (0 (3033), 1 (8600), 2 (8128), 3 (2560))
+  celltype: 4 strata with value/size (6 (2560), 0 (3033), 3 (8128), 1 (8600))
 Mesh with 2560 cells, shape condition numbers: min = 3.16228, max = 4.7697, mean = 3.56242, stddev = 0.332803
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_cylinder_per.out b/src/dm/impls/plex/examples/tests/output/ex1_cylinder_per.out
index 4e24a7ac050..001efee29df 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_cylinder_per.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_cylinder_per.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 7680
 Labels:
   depth: 4 strata with value/size (0 (8088), 1 (23832), 2 (23424), 3 (7680))
+  celltype: 4 strata with value/size (6 (7680), 0 (8088), 3 (23424), 1 (23832))
 Mesh with 7680 cells, shape condition numbers: min = 3.13083, max = 5.50814, mean = 3.74168, stddev = 0.565147
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_fluent_3.out b/src/dm/impls/plex/examples/tests/output/ex1_fluent_3.out
index 0520d151a39..6ce6e117040 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_fluent_3.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_fluent_3.out
@@ -6,5 +6,5 @@ Simplicial Mesh in 3 dimensions:
   2-cells: 16
   3-cells: 5
 Labels:
-  Face Sets: 2 strata of sizes (4, 12)
-  depth: 4 strata of sizes (8, 18, 16, 5)
+  depth: 4 strata with value/size (0 (8), 1 (18), 2 (16), 3 (5))
+  Face Sets: 2 strata with value/size (11 (4), 12 (12))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_13_hybs2t.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_13_hybs2t.out
index 7c6c1175f10..a7217027ba4 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_13_hybs2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_13_hybs2t.out
@@ -6,6 +6,7 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 42 42 45 52
 Labels:
   depth: 3 strata with value/size (0 (53), 1 (94), 2 (42))
+  celltype: 3 strata with value/size (3 (42), 0 (53), 1 (94))
   dim: 2 strata with value/size (1 (78), 2 (98))
   Cell Sets: 1 strata with value/size (1 (98))
 Mesh with 181 cells, shape condition numbers: min = 2., max = 7.04333, mean = 2.48443, stddev = 0.816729
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_14_ext_s2t.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_14_ext_s2t.out
index 5128760d51a..9dbb2a74b54 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_14_ext_s2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_14_ext_s2t.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 252
 Labels:
   depth: 4 strata with value/size (0 (429), 1 (1090), 2 (914), 3 (252))
+  celltype: 4 strata with value/size (6 (252), 0 (429), 3 (914), 1 (1090))
   dim: 3 strata with value/size (3 (0), 2 (756), 1 (639))
 Mesh with 252 cells, shape condition numbers: min = 8.86936, max = 19.575, mean = 12.6699, stddev = 1.94973
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d.out
index b5324ce0e21..44fa10c3c2e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d.out
@@ -9,3 +9,4 @@ Labels:
   depth: 4 strata with value/size (0 (120), 1 (432), 2 (511), 3 (198))
   dim: 3 strata with value/size (3 (99), 2 (343), 1 (360))
   Cell Sets: 1 strata with value/size (1 (198))
+  celltype: 6 strata with value/size (5 (99), 8 (99), 0 (120), 2 (343), 3 (168), 1 (432))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d_s2t.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d_s2t.out
index b88fd4f6feb..2f5f7889301 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d_s2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_15_hyb3d_s2t.out
@@ -7,6 +7,7 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 693
 Labels:
   depth: 4 strata with value/size (0 (922), 1 (2484), 2 (2256), 3 (693))
+  celltype: 4 strata with value/size (6 (693), 0 (922), 3 (2256), 1 (2484))
   dim: 3 strata with value/size (3 (1485), 2 (2058), 1 (1080))
   Cell Sets: 1 strata with value/size (1 (2178))
 Mesh with 693 cells, shape condition numbers: min = 3.10265, max = 11.2622, mean = 6.51596, stddev = 2.39343
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface.out
index 0a56385dfc0..e810dcdab29 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (61), 1 (160), 2 (99))
   Cell Sets: 1 strata with value/size (1 (99))
+  celltype: 3 strata with value/size (2 (99), 0 (61), 1 (160))
 Mesh with 396 cells, shape condition numbers: min = 9.72788, max = 20.4571, mean = 13.4549, stddev = 2.59145
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded.out
index 1eddaa68ea6..155aa8328b4 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded.out
@@ -8,3 +8,4 @@ Simplicial Mesh in 3 dimensions:
 Labels:
   depth: 4 strata with value/size (0 (244), 1 (823), 2 (876), 3 (297))
   dim: 2 strata with value/size (1 (640), 2 (396))
+  celltype: 5 strata with value/size (8 (297), 0 (244), 2 (396), 3 (480), 1 (823))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded_s2t.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded_s2t.out
index fcf050bb65a..729572a51c5 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded_s2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_extruded_s2t.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 891 891 891 891
 Labels:
   depth: 4 strata with value/size (0 (1280), 1 (3428), 2 (3039), 3 (891))
+  celltype: 4 strata with value/size (6 (891), 0 (1280), 3 (3039), 1 (3428))
   dim: 2 strata with value/size (1 (1920), 2 (2376))
 Mesh with 3564 cells, shape condition numbers: min = 3.85845, max = 8.94689, mean = 5.76779, stddev = 0.908615
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_s2t.out b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_s2t.out
index 4913f0b8b15..d29ce1425f2 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_s2t.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_gmsh_16_spheresurface_s2t.out
@@ -6,5 +6,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 297 297 297 297
 Labels:
   depth: 3 strata with value/size (0 (320), 1 (617), 2 (297))
+  celltype: 3 strata with value/size (3 (297), 0 (320), 1 (617))
   Cell Sets: 1 strata with value/size (1 (693))
 Mesh with 1188 cells, shape condition numbers: min = 18.1354, max = 46.4679, mean = 28.3425, stddev = 5.86837
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_distribute_overlap.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_distribute_overlap.out
index ba0aaf8f68b..0ce4e7cd5e0 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_distribute_overlap.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_distribute_overlap.out
@@ -10,8 +10,9 @@ Labels:
   Face Sets: 3 strata with value/size (1 (13), 2 (9), 4 (5))
   _forest_base_subpoint_map: 28 strata with value/size (0 (9), 1 (33), 2 (49), 3 (6), 4 (23), 5 (6), 9 (1), 10 (1), 11 (1), 12 (1), 13 (1), 14 (1), 15 (1), 16 (1), 25 (3), 26 (3), 27 (7), 28 (3), 29 (8), 30 (9), 37 (3), 38 (2), 40 (6), 41 (5), 43 (8), 44 (2), 46 (7), 47 (2))
   depth: 3 strata with value/size (0 (57), 1 (106), 2 (39))
+  celltype: 3 strata with value/size (0 (57), 1 (106), 3 (39))
 DM Object: Simplicial Mesh 2 MPI processes
   type: plex
   Cell balance: 1.10 (max 33, min 30, empty 0)
-  Edge Cut: 10 (on node 1.000)
+  Edge Cut: 10 (on node 0.000)
 Mesh with 83 cells, shape condition numbers: min = 2., max = 2., mean = 2., stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_overlapsf.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_overlapsf.out
index b1a853c6282..62ef4a73151 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_overlapsf.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_bug_overlapsf.out
@@ -9,4 +9,5 @@ Labels:
   marker: 1 strata with value/size (1 (36))
   Face Sets: 5 strata with value/size (6 (1), 3 (2), 1 (2), 2 (2), 5 (1))
   depth: 4 strata with value/size (0 (17), 1 (32), 2 (15), 3 (2))
+  celltype: 4 strata with value/size (6 (2), 3 (15), 1 (32), 0 (17))
 Mesh with 11 cells, shape condition numbers: min = 3.67423, max = 3.67423, mean = 3.67423, stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic.out
index 12275eb143e..e9125ad1d1a 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic.out
@@ -9,4 +9,5 @@ Labels:
   Face Sets: 1 strata with value/size (0 (122))
   Cell Sets: 1 strata with value/size (0 (4380))
   depth: 3 strata with value/size (0 (1582), 1 (3432), 2 (1314))
+  celltype: 3 strata with value/size (3 (1314), 1 (3432), 0 (1582))
 Mesh with 1314 cells, shape condition numbers: min = 2.00002, max = 27.9568, mean = 2.62099, stddev = 1.96277
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic_3d.out
index 9a25ccaa0cc..996600c5f3e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_periodic_3d.out
@@ -6,7 +6,7 @@ Simplicial Mesh in 3 dimensions:
   2-cells: 30619
   3-cells: 7663
 Labels:
-  depth: 4 strata with value/size (0 (12615), 1 (37910), 2 (30619), 3 (7663))
-  Cell Sets: 2 strata with value/size (1 (42621), 2 (7384))
   Face Sets: 1 strata with value/size (1 (2326))
+  Cell Sets: 2 strata with value/size (1 (42621), 2 (7384))
+  depth: 4 strata with value/size (0 (12615), 1 (37910), 2 (30619), 3 (7663))
 Mesh with 7663 cells, shape condition numbers: min = 3.00803, max = 12.8931, mean = 4.8284, stddev = 1.35862
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d.out
index 74dc7c0d9e9..1cbfe16e6b6 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 64
 Labels:
   depth: 4 strata with value/size (0 (111), 1 (274), 2 (228), 3 (64))
+  celltype: 4 strata with value/size (6 (64), 3 (228), 1 (274), 0 (111))
 Mesh with 64 cells, shape condition numbers: min = 3.43191, max = 4.81894, mean = 3.83575, stddev = 0.333372
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d_hash.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d_hash.out
index ff34a539e04..e9819b92ae4 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d_hash.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_s2t_3d_hash.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 218
 Labels:
   depth: 4 strata with value/size (0 (424), 1 (1177), 2 (891), 3 (218))
+  celltype: 4 strata with value/size (6 (218), 3 (891), 1 (1177), 0 (424))
 Mesh with 218 cells, shape condition numbers: min = 3.35466, max = 4.81894, mean = 3.73687, stddev = 0.278858
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_surface.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_surface.out
index e9d9fb6609c..d85aaeacb42 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_surface.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_gmsh_surface.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 2 dimensions:
 Labels:
   Cell Sets: 1 strata with value/size (1 (10284))
   depth: 3 strata with value/size (0 (3680), 1 (7956), 2 (3078))
+  celltype: 3 strata with value/size (3 (3078), 1 (7956), 0 (3680))
 Mesh with 3078 cells, shape condition numbers: min = 18.1354, max = 101.203, mean = 52.6631, stddev = 15.5332
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_2d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_2d.out
index 9846aea2fb5..0e916d72b92 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_2d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_2d.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 2 dimensions:
 Labels:
   Cell Sets: 1 strata with value/size (1 (1485))
   depth: 3 strata with value/size (0 (563), 1 (1176), 2 (445))
+  celltype: 3 strata with value/size (3 (445), 1 (1176), 0 (563))
 Mesh with 445 cells, shape condition numbers: min = 2., max = 7.04333, mean = 2.33348, stddev = 0.637205
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_3d.out
index 769df1b509a..e8f4124ed19 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_hyb_3d.out
@@ -8,4 +8,5 @@ Simplicial Mesh in 3 dimensions:
 Labels:
   Cell Sets: 1 strata with value/size (1 (13750))
   depth: 4 strata with value/size (0 (4181), 1 (12235), 2 (9557), 3 (2338))
+  celltype: 4 strata with value/size (6 (2338), 3 (9557), 1 (12235), 0 (4181))
 Mesh with 2338 cells, shape condition numbers: min = 3.03738, max = 11.2622, mean = 6.07228, stddev = 2.35005
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_par_ovl_gmsh_periodic_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_par_ovl_gmsh_periodic_3d.out
index 8257b28e539..d640e2c2d10 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_par_ovl_gmsh_periodic_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_par_ovl_gmsh_periodic_3d.out
@@ -1 +1 @@
-Mesh with 34250 cells, shape condition numbers: min = 3.00803, max = 12.8931, mean = 4.84864, stddev = 1.37186
+Mesh with 34546 cells, shape condition numbers: min = 3.00325, max = 12.8931, mean = 4.88444, stddev = 1.40835
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic.out
index 09b383862a5..eadec131f93 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic.out
@@ -8,4 +8,5 @@ Labels:
   marker: 0 strata with value/size ()
   Face Sets: 0 strata with value/size ()
   depth: 3 strata with value/size (0 (129), 1 (276), 2 (111))
+  celltype: 3 strata with value/size (3 (111), 1 (276), 0 (129))
 Mesh with 111 cells, shape condition numbers: min = 2.26667, max = 2.26667, mean = 2.26667, stddev = 0.
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic_3d.out
index 57a284a02bc..5d1f18cb561 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_periodic_3d.out
@@ -9,4 +9,5 @@ Labels:
   marker: 1 strata with value/size (1 (662))
   Face Sets: 2 strata with value/size (1 (229), 2 (191))
   depth: 4 strata with value/size (0 (1324), 1 (3868), 2 (3144), 3 (830))
+  celltype: 4 strata with value/size (6 (830), 3 (3144), 1 (3868), 0 (1324))
 Mesh with 830 cells, shape condition numbers: min = 3.26811, max = 3.26811, mean = 3.26811, stddev = 4.34397e-07
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_p4est_s2t_bugfaces_3d.out b/src/dm/impls/plex/examples/tests/output/ex1_p4est_s2t_bugfaces_3d.out
index 24c547fb978..2ea50d06f22 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_p4est_s2t_bugfaces_3d.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_p4est_s2t_bugfaces_3d.out
@@ -8,4 +8,5 @@ Simplicial Mesh in 3 dimensions:
 Labels:
   marker: 1 strata with value/size (1 (80))
   depth: 4 strata with value/size (0 (51), 1 (116), 2 (90), 3 (24))
+  celltype: 4 strata with value/size (6 (24), 3 (90), 1 (116), 0 (51))
 Mesh with 24 cells, shape condition numbers: min = 3.55903, max = 5.90198, mean = 4.34301, stddev = 0.595366
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_part_parmetis_0.out b/src/dm/impls/plex/examples/tests/output/ex1_part_parmetis_0.out
index 9e3b7703b5c..ca7af51c58c 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_part_parmetis_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_part_parmetis_0.out
@@ -7,7 +7,8 @@
 Graph Partitioner: 2 MPI Processes
   type: parmetis
   edge cut: 2
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   ParMetis type: kway
   load imbalance ratio 1.05
   debug flag 0
@@ -15,7 +16,7 @@ Graph Partitioner: 2 MPI Processes
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
   Cell balance: 1.00 (max 8, min 8, empty 0)
-  Edge Cut: 4 (on node 1.000)
+  Edge Cut: 4 (on node 0.000)
 [0]Nv: 8
 [0]  1 3 9 [0-3)
 [0]  0 2 [3-5)
@@ -37,7 +38,8 @@ DM Object: Parallel Mesh 2 MPI processes
 Graph Partitioner: 2 MPI Processes
   type: parmetis
   edge cut: 4
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   ParMetis type: kway
   load imbalance ratio 1.05
   debug flag 0
@@ -45,7 +47,7 @@ Graph Partitioner: 2 MPI Processes
 DM Object: Parallel Mesh 2 MPI processes
   type: plex
   Cell balance: 1.00 (max 8, min 8, empty 0)
-  Edge Cut: 4 (on node 1.000)
+  Edge Cut: 4 (on node 0.000)
 DM Object: Simplicial Mesh 2 MPI processes
   type: plex
 Simplicial Mesh in 2 dimensions:
@@ -54,5 +56,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 8 8
 Labels:
   depth: 3 strata with value/size (0 (15), 1 (22), 2 (8))
+  celltype: 3 strata with value/size (0 (15), 1 (22), 3 (8))
   marker: 1 strata with value/size (1 (17))
   Face Sets: 3 strata with value/size (2 (3), 3 (6), 4 (3))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_0.out b/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_0.out
index e434ff7de72..9a2134ad710 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_0.out
@@ -7,7 +7,8 @@
 Graph Partitioner: 2 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy QUALITY
   using load imbalance ratio 0.01
 DM Object: Parallel Mesh 2 MPI processes
@@ -23,7 +24,8 @@ DM Object: Parallel Mesh 2 MPI processes
 Graph Partitioner: 2 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy QUALITY
   using load imbalance ratio 0.01
 DM Object: Parallel Mesh 2 MPI processes
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_1.out b/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_1.out
index 4490e635ae1..56000e1f475 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_part_ptscotch_1.out
@@ -1,6 +1,7 @@
 Graph Partitioner: 8 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy DEFAULT
   using load imbalance ratio 0.1
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-0.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-0.out
index 745250626ed..0a2568e13e3 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-0.out
@@ -5,5 +5,6 @@ Simplicial Mesh in 1 dimension:
   1-cells: 4 4
 Labels:
   depth: 2 strata with value/size (0 (5), 1 (4))
+  celltype: 2 strata with value/size (1 (4), 0 (5))
   marker: 1 strata with value/size (1 (1))
   Face Sets: 1 strata with value/size (1 (1))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-1.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-1.out
index 1d947f9a157..7e6e200cc87 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-1.out
@@ -6,5 +6,6 @@ Simplicial Mesh in 1 dimension:
   1-cells: 5 5
 Labels:
   depth: 2 strata with value/size (0 (6), 1 (5))
+  celltype: 2 strata with value/size (0 (6), 1 (5))
   marker: 1 strata with value/size (1 (1))
   Face Sets: 1 strata with value/size (1 (1))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-2.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-2.out
index 5484c6bf491..d1ff974e978 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-2.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_1d_overlap-2.out
@@ -6,5 +6,6 @@ Simplicial Mesh in 1 dimension:
   1-cells: 6 6
 Labels:
   depth: 2 strata with value/size (0 (7), 1 (6))
+  celltype: 2 strata with value/size (0 (7), 1 (6))
   marker: 1 strata with value/size (1 (1))
   Face Sets: 1 strata with value/size (1 (1))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-0.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-0.out
index 7f8d9146709..339e4c6114e 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-0.out
@@ -6,5 +6,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 16 16
 Labels:
   depth: 3 strata with value/size (0 (15), 1 (30), 2 (16))
+  celltype: 3 strata with value/size (2 (16), 0 (15), 1 (30))
   marker: 1 strata with value/size (1 (17))
   Face Sets: 1 strata with value/size (1 (12))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-1.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-1.out
index 6fc3094d020..d3109189dec 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-1.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 23 23
 Labels:
   depth: 3 strata with value/size (0 (19), 1 (41), 2 (23))
+  celltype: 3 strata with value/size (0 (19), 1 (41), 2 (23))
   marker: 1 strata with value/size (1 (21))
   Face Sets: 1 strata with value/size (1 (16))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-2.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-2.out
index 98b06d3150b..918bcfbc098 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-2.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-2_overlap-2.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 28 28
 Labels:
   depth: 3 strata with value/size (0 (22), 1 (49), 2 (28))
+  celltype: 3 strata with value/size (0 (22), 1 (49), 2 (28))
   marker: 1 strata with value/size (1 (25))
   Face Sets: 1 strata with value/size (1 (18))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-0.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-0.out
index c6ddec1cac7..e1e15fd858f 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-0.out
@@ -6,5 +6,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 4 4 4 4 4 4 4 4
 Labels:
   depth: 3 strata with value/size (0 (6), 1 (9), 2 (4))
+  celltype: 3 strata with value/size (2 (4), 0 (6), 1 (9))
   marker: 1 strata with value/size (1 (9))
   Face Sets: 1 strata with value/size (1 (6))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-1.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-1.out
index d0d430a28af..14380f22e24 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-1.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 9 14 16 14 14 9 16 14
 Labels:
   depth: 3 strata with value/size (0 (10), 1 (18), 2 (9))
+  celltype: 3 strata with value/size (0 (10), 1 (18), 2 (9))
   marker: 1 strata with value/size (1 (13))
   Face Sets: 1 strata with value/size (1 (10))
diff --git a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-2.out b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-2.out
index 925b754c2b2..03678260f14 100644
--- a/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-2.out
+++ b/src/dm/impls/plex/examples/tests/output/ex1_refine_overlap_2d_nsize-8_overlap-2.out
@@ -7,5 +7,6 @@ Simplicial Mesh in 2 dimensions:
   2-cells: 16 28 28 28 28 16 28 28
 Labels:
   depth: 3 strata with value/size (0 (15), 1 (30), 2 (16))
+  celltype: 3 strata with value/size (0 (15), 1 (30), 2 (16))
   marker: 1 strata with value/size (1 (17))
   Face Sets: 1 strata with value/size (1 (12))
diff --git a/src/dm/impls/plex/examples/tests/output/ex20_3d_tetgen.out b/src/dm/impls/plex/examples/tests/output/ex20_3d_tetgen.out
index dea6f0e8988..91f12b7fcd9 100644
--- a/src/dm/impls/plex/examples/tests/output/ex20_3d_tetgen.out
+++ b/src/dm/impls/plex/examples/tests/output/ex20_3d_tetgen.out
@@ -1,9 +1,8 @@
 DM Object: Pre Adaptation Mesh 1 MPI processes
   type: plex
-Mesh 'Pre Adaptation Mesh':
-orientation is missing
-cap --> base:
-[0] Max sizes cone: 4 support: 14
+Pre Adaptation Mesh in 3 dimensions:
+Supports:
+[0] Max support size: 14
 [0]: 162 ----> 682
 [0]: 162 ----> 683
 [0]: 162 ----> 686
@@ -2344,7 +2343,8 @@ cap --> base:
 [0]: 881 ----> 580
 [0]: 882 ----> 596
 [0]: 882 ----> 597
-base <-- cap:
+Cones:
+[0] Max cone size: 4
 [0]: 0 <---- 226 (0)
 [0]: 0 <---- 227 (0)
 [0]: 0 <---- 228 (0)
@@ -4774,12 +4774,8 @@ Label 'marker':
 [0]: 180 (1)
 [0]: 181 (1)
 [0]: 182 (1)
-[0]: 183 (1)
-[0]: 184 (1)
 [0]: 185 (1)
 [0]: 186 (1)
-[0]: 187 (1)
-[0]: 188 (1)
 [0]: 189 (1)
 [0]: 190 (1)
 [0]: 191 (1)
@@ -4790,12 +4786,8 @@ Label 'marker':
 [0]: 196 (1)
 [0]: 197 (1)
 [0]: 198 (1)
-[0]: 199 (1)
-[0]: 200 (1)
 [0]: 201 (1)
 [0]: 202 (1)
-[0]: 203 (1)
-[0]: 204 (1)
 [0]: 205 (1)
 [0]: 206 (1)
 [0]: 207 (1)
@@ -4927,10 +4919,9 @@ Label 'marker':
 [0]: 603 (1)
 DM Object: Post Adaptation Mesh 1 MPI processes
   type: plex
-Mesh 'Post Adaptation Mesh':
-orientation is missing
-cap --> base:
-[0] Max sizes cone: 4 support: 16
+Post Adaptation Mesh in 3 dimensions:
+Supports:
+[0] Max support size: 16
 [0]: 191 ----> 792
 [0]: 191 ----> 793
 [0]: 191 ----> 796
@@ -7677,7 +7668,8 @@ cap --> base:
 [0]: 1032 ----> 705
 [0]: 1032 ----> 706
 [0]: 1032 ----> 707
-base <-- cap:
+Cones:
+[0] Max cone size: 4
 [0]: 0 <---- 264 (0)
 [0]: 0 <---- 265 (0)
 [0]: 0 <---- 266 (0)
@@ -10522,12 +10514,8 @@ Label 'marker':
 [0]: 209 (1)
 [0]: 210 (1)
 [0]: 211 (1)
-[0]: 212 (1)
-[0]: 213 (1)
 [0]: 214 (1)
 [0]: 215 (1)
-[0]: 216 (1)
-[0]: 217 (1)
 [0]: 218 (1)
 [0]: 219 (1)
 [0]: 220 (1)
@@ -10538,12 +10526,8 @@ Label 'marker':
 [0]: 225 (1)
 [0]: 226 (1)
 [0]: 227 (1)
-[0]: 228 (1)
-[0]: 229 (1)
 [0]: 230 (1)
 [0]: 231 (1)
-[0]: 232 (1)
-[0]: 233 (1)
 [0]: 234 (1)
 [0]: 235 (1)
 [0]: 236 (1)
@@ -10565,3 +10549,135 @@ Label 'marker':
 [0]: 252 (1)
 [0]: 253 (1)
 [0]: 254 (1)
+[0]: 255 (1)
+[0]: 256 (1)
+[0]: 257 (1)
+[0]: 258 (1)
+[0]: 259 (1)
+[0]: 260 (1)
+[0]: 261 (1)
+[0]: 262 (1)
+[0]: 264 (1)
+[0]: 279 (1)
+[0]: 282 (1)
+[0]: 287 (1)
+[0]: 300 (1)
+[0]: 307 (1)
+[0]: 309 (1)
+[0]: 323 (1)
+[0]: 325 (1)
+[0]: 336 (1)
+[0]: 338 (1)
+[0]: 342 (1)
+[0]: 345 (1)
+[0]: 359 (1)
+[0]: 360 (1)
+[0]: 361 (1)
+[0]: 370 (1)
+[0]: 371 (1)
+[0]: 372 (1)
+[0]: 377 (1)
+[0]: 379 (1)
+[0]: 382 (1)
+[0]: 383 (1)
+[0]: 386 (1)
+[0]: 391 (1)
+[0]: 393 (1)
+[0]: 394 (1)
+[0]: 396 (1)
+[0]: 404 (1)
+[0]: 412 (1)
+[0]: 418 (1)
+[0]: 424 (1)
+[0]: 425 (1)
+[0]: 426 (1)
+[0]: 427 (1)
+[0]: 431 (1)
+[0]: 436 (1)
+[0]: 437 (1)
+[0]: 438 (1)
+[0]: 444 (1)
+[0]: 447 (1)
+[0]: 449 (1)
+[0]: 454 (1)
+[0]: 457 (1)
+[0]: 459 (1)
+[0]: 461 (1)
+[0]: 462 (1)
+[0]: 464 (1)
+[0]: 465 (1)
+[0]: 470 (1)
+[0]: 472 (1)
+[0]: 474 (1)
+[0]: 475 (1)
+[0]: 476 (1)
+[0]: 477 (1)
+[0]: 481 (1)
+[0]: 484 (1)
+[0]: 487 (1)
+[0]: 505 (1)
+[0]: 506 (1)
+[0]: 508 (1)
+[0]: 513 (1)
+[0]: 516 (1)
+[0]: 518 (1)
+[0]: 519 (1)
+[0]: 521 (1)
+[0]: 523 (1)
+[0]: 526 (1)
+[0]: 531 (1)
+[0]: 536 (1)
+[0]: 538 (1)
+[0]: 544 (1)
+[0]: 545 (1)
+[0]: 548 (1)
+[0]: 549 (1)
+[0]: 553 (1)
+[0]: 557 (1)
+[0]: 563 (1)
+[0]: 566 (1)
+[0]: 568 (1)
+[0]: 581 (1)
+[0]: 583 (1)
+[0]: 586 (1)
+[0]: 587 (1)
+[0]: 588 (1)
+[0]: 599 (1)
+[0]: 607 (1)
+[0]: 611 (1)
+[0]: 613 (1)
+[0]: 615 (1)
+[0]: 620 (1)
+[0]: 624 (1)
+[0]: 626 (1)
+[0]: 627 (1)
+[0]: 628 (1)
+[0]: 631 (1)
+[0]: 632 (1)
+[0]: 633 (1)
+[0]: 637 (1)
+[0]: 641 (1)
+[0]: 642 (1)
+[0]: 649 (1)
+[0]: 650 (1)
+[0]: 651 (1)
+[0]: 652 (1)
+[0]: 653 (1)
+[0]: 657 (1)
+[0]: 658 (1)
+[0]: 659 (1)
+[0]: 661 (1)
+[0]: 663 (1)
+[0]: 665 (1)
+[0]: 667 (1)
+[0]: 668 (1)
+[0]: 670 (1)
+[0]: 676 (1)
+[0]: 678 (1)
+[0]: 679 (1)
+[0]: 681 (1)
+[0]: 682 (1)
+[0]: 684 (1)
+[0]: 688 (1)
+[0]: 700 (1)
+[0]: 703 (1)
diff --git a/src/dm/impls/plex/examples/tests/output/ex23_2.out b/src/dm/impls/plex/examples/tests/output/ex23_2.out
new file mode 100644
index 00000000000..01c456dc1e9
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex23_2.out
@@ -0,0 +1,414 @@
+Vec Object: Function Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Function Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Input Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+-2.46519e-32
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Function Volumetric Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Function Volumetric Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Input Volumetric Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Volumetric Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+0.
+1.
+-2.46519e-32
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Input Volumetric Auxiliary Update with Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+0.
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local Field Volumetric Auxiliary Update with Volumetric Primary 1 MPI processes
+  type: seq
+0.
+1.
+-2.46519e-32
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Function Subdomain Primary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Function Subdomain Primary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Input Subdomain Primary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Subdomain Primary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Function Subdomain Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Function Subdomain Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Input Subdomain Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Subdomain Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Function Subdomain Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Function Subdomain Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Input Subdomain Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Subdomain Primary and Volumetric Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Function Volumetric Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Function Volumetric Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Input Volumetric Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
+Vec Object: Local Field Volumetric Primary and Subdomain Auxiliary 1 MPI processes
+  type: seq
+2.
+3.
+1.
+2.
+3.
+1.
+4.
+5.
+2.
+1.
+1.5
+1.5
diff --git a/src/dm/impls/plex/examples/tests/output/ex23_mf_0.out b/src/dm/impls/plex/examples/tests/output/ex23_mf_0.out
new file mode 100644
index 00000000000..3738b7baec0
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex23_mf_0.out
@@ -0,0 +1,29 @@
+Vec Object: Local MultiField Input Volumetric Primary 1 MPI processes
+  type: seq
+0.
+0.
+0.
+1.
+5.55112e-17
+1.
+1.11022e-16
+1.
+1.
+1.
+1.
+2.
+0.5
+0.5
+1.
+1.5
+1.5
+Vec Object: Local MultiField Volumetric Primary 1 MPI processes
+  type: seq
+2.
+6.07716e-64
+2.
+1.
+2.
+1.
+2.
+4.
diff --git a/src/dm/impls/plex/examples/tests/output/ex24_3.out b/src/dm/impls/plex/examples/tests/output/ex24_3.out
index 6740a8f9f99..2978ea85d2a 100644
--- a/src/dm/impls/plex/examples/tests/output/ex24_3.out
+++ b/src/dm/impls/plex/examples/tests/output/ex24_3.out
@@ -1,60 +1,72 @@
 Graph Partitioner: 4 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy DEFAULT
   using load imbalance ratio 0.01
 Graph Partitioner: 4 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: (p2_) 4 MPI processes
     type: ptscotch
+    Using vertex weights
       Strategy=Default behavior
       Load imbalance ratio=0.01
 Graph Partitioner: 4 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy DEFAULT
   using load imbalance ratio 0.01
 Graph Partitioner: 4 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: (p2_) 4 MPI processes
     type: ptscotch
+    Using vertex weights
       Strategy=Default behavior
       Load imbalance ratio=0.01
 Graph Partitioner: 4 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy DEFAULT
   using load imbalance ratio 0.01
 Graph Partitioner: 4 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: (dp2_) 4 MPI processes
     type: ptscotch
+    Using vertex weights
       Strategy=Default behavior
       Load imbalance ratio=0.01
 Graph Partitioner: 4 MPI Processes
   type: ptscotch
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
   using partitioning strategy DEFAULT
   using load imbalance ratio 0.01
 Graph Partitioner: 4 MPI Processes
   type: matpartitioning
   edge cut: 0
-  balance:  0
+  balance: 0
+  use vertex weights: 1
 MatPartitioning Graph Partitioner:
   MatPartitioning Object: (dp2_) 4 MPI processes
     type: ptscotch
+    Using vertex weights
       Strategy=Default behavior
       Load imbalance ratio=0.01
diff --git a/src/dm/impls/plex/examples/tests/output/ex29_0.out b/src/dm/impls/plex/examples/tests/output/ex29_0.out
index 77bb31a4e04..dc01d7d09a5 100644
--- a/src/dm/impls/plex/examples/tests/output/ex29_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex29_0.out
@@ -7,4 +7,5 @@ Simplicial Mesh in 3 dimensions:
   3-cells: 384
 Labels:
   depth: 4 strata with value/size (0 (125), 1 (604), 2 (864), 3 (384))
+  celltype: 4 strata with value/size (5 (384), 0 (125), 2 (864), 1 (604))
   marker: 1 strata with value/size (1 (452))
diff --git a/src/dm/impls/plex/examples/tests/output/ex29_1.out b/src/dm/impls/plex/examples/tests/output/ex29_1.out
index f057d1db318..d16191d0f85 100644
--- a/src/dm/impls/plex/examples/tests/output/ex29_1.out
+++ b/src/dm/impls/plex/examples/tests/output/ex29_1.out
@@ -7,5 +7,6 @@ Tensor Product Mesh in 3 dimensions:
   3-cells: 64
 Labels:
   depth: 4 strata with value/size (0 (125), 1 (300), 2 (240), 3 (64))
+  celltype: 4 strata with value/size (6 (64), 0 (125), 3 (240), 1 (300))
   marker: 1 strata with value/size (1 (378))
   Face Sets: 6 strata with value/size (6 (49), 5 (49), 3 (49), 4 (49), 1 (49), 2 (49))
diff --git a/src/dm/impls/plex/examples/tests/output/ex29_quad_0.out b/src/dm/impls/plex/examples/tests/output/ex29_quad_0.out
index a4828a76c1f..5ad7a81f0ca 100644
--- a/src/dm/impls/plex/examples/tests/output/ex29_quad_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex29_quad_0.out
@@ -7,6 +7,7 @@ Tensor Product Mesh in 3 dimensions:
   3-cells: 64 0
 Labels:
   depth: 4 strata with value/size (0 (125), 1 (300), 2 (240), 3 (64))
+  celltype: 4 strata with value/size (6 (64), 0 (125), 3 (240), 1 (300))
   marker: 1 strata with value/size (1 (378))
   Face Sets: 6 strata with value/size (1 (49), 2 (49), 3 (49), 4 (49), 5 (49), 6 (49))
 DM Object: Parallel Mesh 2 MPI processes
@@ -18,5 +19,6 @@ Parallel Mesh in 3 dimensions:
   3-cells: 32 32
 Labels:
   depth: 4 strata with value/size (0 (75), 1 (170), 2 (128), 3 (32))
+  celltype: 4 strata with value/size (0 (75), 1 (170), 3 (128), 6 (32))
   marker: 1 strata with value/size (1 (205))
   Face Sets: 5 strata with value/size (1 (49), 3 (28), 4 (28), 5 (28), 6 (28))
diff --git a/src/dm/impls/plex/examples/tests/output/ex2f90_0.out b/src/dm/impls/plex/examples/tests/output/ex2f90_0.out
index 4a87e579798..3506448c52d 100644
--- a/src/dm/impls/plex/examples/tests/output/ex2f90_0.out
+++ b/src/dm/impls/plex/examples/tests/output/ex2f90_0.out
@@ -6,8 +6,15 @@ Mesh in 2 dimensions:
   2-cells: 2
 Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
- Join of           6           7 is           0
- Join of           9           7 is           1
- Full Join of           3           4           5 is           1
- Meet of           0           1 is           7
- Meet of           6           7 is           3
+nClosure    0   0   6   0   7   0   8   0   2   0   3   0   4   0
+nClosure    1   0   7   0   9   0  10   0   3   0   4   0   5   0
+Join of    6    7
+  is    0
+Join of    9    7
+  is    1
+Full Join of    3    4    5
+  is    1
+Meet of    0    1
+  is    7
+Meet of    6    7
+  is    3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-1.out
new file mode 100644
index 00000000000..58a7531a7a5
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-1.out
@@ -0,0 +1,67 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-2.out
new file mode 100644
index 00000000000..b15c4a889e8
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-2.out
@@ -0,0 +1,70 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: general
+Number of indices in set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 2
+1 3
+2 0
+3 1
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 2
+1 3
+2 0
+3 1
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-3.out
new file mode 100644
index 00000000000..4ef33fb4773
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-1_nparts-3.out
@@ -0,0 +1,73 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: general
+Number of indices in set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-1.out
new file mode 100644
index 00000000000..8af1cfde374
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-1.out
@@ -0,0 +1,79 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-2.out
new file mode 100644
index 00000000000..d752ace2cff
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-2.out
@@ -0,0 +1,85 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 4
+[1] 0 2
+[1] 1 3
+[1] 2 0
+[1] 3 1
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 2
+[0] 1 3
+[0] 2 0
+[0] 3 1
+[1] Number of indices in set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-3.out
new file mode 100644
index 00000000000..48b219fb6de
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-2_nparts-3.out
@@ -0,0 +1,91 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-1.out
new file mode 100644
index 00000000000..809cdd3e66c
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-1.out
@@ -0,0 +1,99 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-2.out
new file mode 100644
index 00000000000..38f10635aa5
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-2.out
@@ -0,0 +1,108 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 2
+[2] 1 3
+[2] 2 0
+[2] 3 1
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 2
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+  (   1) dim  0 offset   4
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  4 offset   0
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-3.out
new file mode 100644
index 00000000000..3f55fda6134
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_parmetis_nsize-3_nparts-3.out
@@ -0,0 +1,117 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: parmetis
+  edge cut: 3
+  balance: 0
+  use vertex weights: 1
+  ParMetis type: kway
+  load imbalance ratio 1.05
+  debug flag 0
+  random seed -1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  1 offset   0
+  (   1) dim  2 offset   1
+  (   2) dim  1 offset   3
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  0 offset   2
+  (   2) dim  2 offset   2
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 3
+[0] 1 1
+[0] 2 2
+[0] 3 0
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-1.out
new file mode 100644
index 00000000000..bf031aa7463
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-1.out
@@ -0,0 +1,61 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-2.out
new file mode 100644
index 00000000000..9f64e059669
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-2.out
@@ -0,0 +1,64 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: general
+Number of indices in set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 1
+1 2
+2 0
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 0
+1 3
+2 1
+3 2
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-3.out
new file mode 100644
index 00000000000..771492f1f06
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-1_nparts-3.out
@@ -0,0 +1,67 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: general
+Number of indices in set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  1 offset   0
+  (   1) dim  1 offset   1
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 2
+1 3
+2 0
+3 1
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  1 offset   0
+  (   1) dim  1 offset   1
+  (   2) dim  2 offset   2
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: general
+Number of indices in set 4
+0 2
+1 3
+2 0
+3 1
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-1.out
new file mode 100644
index 00000000000..59e37634d18
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-1.out
@@ -0,0 +1,73 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-2.out
new file mode 100644
index 00000000000..06d19ad66ac
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-2.out
@@ -0,0 +1,79 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 4
+[1] 0 1
+[1] 1 2
+[1] 2 0
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 1
+[0] 1 2
+[0] 2 0
+[0] 3 3
+[1] Number of indices in set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-3.out
new file mode 100644
index 00000000000..7ede4460dd9
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-2_nparts-3.out
@@ -0,0 +1,85 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  1 offset   0
+  (   1) dim  1 offset   1
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 4
+[1] 0 2
+[1] 1 3
+[1] 2 0
+[1] 3 1
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  1 offset   0
+  (   1) dim  1 offset   1
+  (   2) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 2
+[0] 1 3
+[0] 2 0
+[0] 3 1
+[1] Number of indices in set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-1.out
new file mode 100644
index 00000000000..3b42f64aaec
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-1.out
@@ -0,0 +1,93 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-2.out
new file mode 100644
index 00000000000..5d328e67983
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-2.out
@@ -0,0 +1,102 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 1
+[2] 1 2
+[2] 2 0
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  1 offset   0
+  (   1) dim  3 offset   1
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  3 offset   0
+  (   1) dim  1 offset   3
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 3
+[0] 1 0
+[0] 2 1
+[0] 3 2
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-3.out
new file mode 100644
index 00000000000..84ed9a7b1cd
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_ptscotch_nsize-3_nparts-3.out
@@ -0,0 +1,111 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  1 offset   0
+  (   1) dim  1 offset   1
+  (   2) dim  2 offset   2
+IS Object: SEQ PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 0
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 2
+[2] 1 3
+[2] 2 0
+[2] 3 1
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: ptscotch
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+  using partitioning strategy DEFAULT
+  using load imbalance ratio 0.01
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: general
+[0] Number of indices in set 4
+[0] 0 2
+[0] 1 3
+[0] 2 0
+[0] 3 1
+[1] Number of indices in set 0
+[2] Number of indices in set 4
+[2] 0 1
+[2] 1 2
+[2] 2 0
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-1.out
new file mode 100644
index 00000000000..47cd7b6b2a0
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-1.out
@@ -0,0 +1,55 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-2.out
new file mode 100644
index 00000000000..0e044721312
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-2.out
@@ -0,0 +1,58 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-3.out
new file mode 100644
index 00000000000..2f72a8df766
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-1_nparts-3.out
@@ -0,0 +1,61 @@
+[0]Nv: 0
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 0
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+IS Object: SEQ PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+Graph Partitioner: 1 MPI Process
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 1 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+IS Object: PARVOID PARTITION 1 MPI processes
+  type: stride
+Number of indices in (stride) set 4
+0 0
+1 1
+2 2
+3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-1.out
new file mode 100644
index 00000000000..5dbd7039a89
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-1.out
@@ -0,0 +1,67 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-2.out
new file mode 100644
index 00000000000..531120b1f0f
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-2.out
@@ -0,0 +1,73 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-3.out
new file mode 100644
index 00000000000..5b49bec16d0
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-2_nparts-3.out
@@ -0,0 +1,79 @@
+[0]Nv: 0
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 4
+[1]  3 1 [0-2)
+[1]  0 2 [2-4)
+[1]  1 3 [4-6)
+[1]  2 0 [6-8)
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+IS Object: SEQ PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 4
+[1] 0 0
+[1] 1 1
+[1] 2 2
+[1] 3 3
+[0]Nv: 4
+[0]  3 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 0 [6-8)
+[1]Nv: 0
+Graph Partitioner: 2 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 2 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: PARVOID PARTITION 2 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-1.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-1.out
new file mode 100644
index 00000000000..a44d101883b
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-1.out
@@ -0,0 +1,87 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: SEQ PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+Process 2:
+  (   0) dim  4 offset   0
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-2.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-2.out
new file mode 100644
index 00000000000..7e242962601
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-2.out
@@ -0,0 +1,96 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  2 offset   2
+IS Object: SEQ PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  4 offset   0
+  (   1) dim  0 offset   4
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  4 offset   0
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-3.out b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-3.out
new file mode 100644
index 00000000000..743588a01f0
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex33_simple_nsize-3_nparts-3.out
@@ -0,0 +1,105 @@
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 0
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: NULL SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+IS Object: NULL PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 0
+[0]Nv: 0
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 1 [0-2)
+[2]  0 2 [2-4)
+[2]  1 3 [4-6)
+[2]  2 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: SEQ SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  2 offset   0
+  (   1) dim  1 offset   2
+  (   2) dim  1 offset   3
+IS Object: SEQ PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 0
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
+[0]Nv: 4
+[0]  7 1 [0-2)
+[0]  0 2 [2-4)
+[0]  1 3 [4-6)
+[0]  2 4 [6-8)
+[1]Nv: 0
+[2]Nv: 4
+[2]  3 5 [0-2)
+[2]  4 6 [2-4)
+[2]  5 7 [4-6)
+[2]  6 0 [6-8)
+Graph Partitioner: 3 MPI Processes
+  type: simple
+  edge cut: 0
+  balance: 0
+  use vertex weights: 1
+PetscSection Object: PARVOID SECTION 3 MPI processes
+  type not yet set
+Process 0:
+  (   0) dim  3 offset   0
+  (   1) dim  1 offset   3
+  (   2) dim  0 offset   4
+Process 1:
+  (   0) dim  0 offset   0
+  (   1) dim  0 offset   0
+  (   2) dim  0 offset   0
+Process 2:
+  (   0) dim  0 offset   0
+  (   1) dim  2 offset   0
+  (   2) dim  2 offset   2
+IS Object: PARVOID PARTITION 3 MPI processes
+  type: stride
+[0] Number of indices in (stride) set 4
+[0] 0 0
+[0] 1 1
+[0] 2 2
+[0] 3 3
+[1] Number of indices in (stride) set 0
+[2] Number of indices in (stride) set 4
+[2] 0 0
+[2] 1 1
+[2] 2 2
+[2] 3 3
diff --git a/src/dm/impls/plex/examples/tests/output/ex37_sphere_0.out b/src/dm/impls/plex/examples/tests/output/ex37_sphere_0.out
new file mode 100644
index 00000000000..362175ad2e2
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex37_sphere_0.out
@@ -0,0 +1,509 @@
+ Number of BODIES (nbodies): 1 
+   Number of SHELLS: 1 
+   Number of FACES: 8 
+   Number of LOOPS: 8 
+   Number of EDGES: 20 
+   Number of NODES: 6 
+          LOOP ID: 1
+            EDGE ID: 1
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 2
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 3
+ Range = 1.570796, 3.141593, 0.000000, 0.000000 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+            EDGE ID: 4
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+          LOOP ID: 2
+            EDGE ID: 5
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 4
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 6
+ Range = 3.141593, 4.712389, 0.000000, 0.000000 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+            EDGE ID: 7
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+          LOOP ID: 3
+            EDGE ID: 8
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 7
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 9
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+            EDGE ID: 10
+ Range = 1.570796, 3.141593, 0.000000, 0.000000 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+          LOOP ID: 4
+            EDGE ID: 9
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+            EDGE ID: 11
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+            EDGE ID: 12
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+            EDGE ID: 13
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+          LOOP ID: 5
+            EDGE ID: 3
+ Range = 1.570796, 3.141593, 0.000000, 0.000000 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+            EDGE ID: 14
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+            EDGE ID: 15
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+            EDGE ID: 16
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+          LOOP ID: 6
+            EDGE ID: 6
+ Range = 3.141593, 4.712389, 0.000000, 0.000000 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+            EDGE ID: 16
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 3 
+                 (x, y, z) = (-0.025400, 0.000000, 0.000000) 
+            EDGE ID: 17
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+            EDGE ID: 11
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 4 
+                 (x, y, z) = (-0.000000, -0.025400, 0.000000) 
+          LOOP ID: 7
+            EDGE ID: 18
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+            EDGE ID: 13
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+            EDGE ID: 19
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+            EDGE ID: 14
+ Range = 4.712389, 6.283185, 0.000000, 0.000000 
+              NODE ID: 6 
+                 (x, y, z) = (0.000000, 0.000000, -0.025400) 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+          LOOP ID: 8
+            EDGE ID: 20
+ Range = 0.000000, 0.000000, 0.000000, 0.000000 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 10
+ Range = 1.570796, 3.141593, 0.000000, 0.000000 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+            EDGE ID: 18
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 5 
+                 (x, y, z) = (0.025400, 0.000000, 0.000000) 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+            EDGE ID: 2
+ Range = 0.000000, 1.570796, 0.000000, 0.000000 
+              NODE ID: 2 
+                 (x, y, z) = (0.000000, 0.025400, 0.000000) 
+              NODE ID: 1 
+                 (x, y, z) = (0.000000, 0.000000, 0.025400) 
+
+PLEX Input Array Checkouts
+ Total Number of Unique Cells    = 8 
+ Total Number of Unique Vertices = 6 
+    Node ID = 1 
+      (x,y,z) = (0.000000, 0.000000, 0.025400) 
+ 
+    Node ID = 2 
+      (x,y,z) = (0.000000, 0.025400, 0.000000) 
+ 
+    Node ID = 3 
+      (x,y,z) = (-0.025400, 0.000000, 0.000000) 
+ 
+    Node ID = 4 
+      (x,y,z) = (-0.000000, -0.025400, 0.000000) 
+ 
+    Node ID = 5 
+      (x,y,z) = (0.025400, 0.000000, 0.000000) 
+ 
+    Node ID = 6 
+      (x,y,z) = (0.000000, 0.000000, -0.025400) 
+ 
+    LOOP ID: 1 
+      EDGE ID: 1 
+      EDGE ID: 2 
+        EGDE 2 is DEGENERATE 
+      EDGE ID: 3 
+      EDGE ID: 4 
+      LOOP Corner NODEs (0, 1, 2)
+    LOOP ID: 2 
+      EDGE ID: 5 
+      EDGE ID: 4 
+        EGDE 4 is DEGENERATE 
+      EDGE ID: 6 
+      EDGE ID: 7 
+      LOOP Corner NODEs (0, 2, 3)
+    LOOP ID: 3 
+      EDGE ID: 8 
+      EDGE ID: 7 
+        EGDE 7 is DEGENERATE 
+      EDGE ID: 9 
+      EDGE ID: 10 
+      LOOP Corner NODEs (0, 3, 4)
+    LOOP ID: 4 
+      EDGE ID: 9 
+      EDGE ID: 11 
+      EDGE ID: 12 
+      EDGE ID: 13 
+        EGDE 13 is DEGENERATE 
+      LOOP Corner NODEs (3, 4, 5)
+    LOOP ID: 5 
+      EDGE ID: 3 
+      EDGE ID: 14 
+      EDGE ID: 15 
+      EDGE ID: 16 
+        EGDE 16 is DEGENERATE 
+      LOOP Corner NODEs (1, 2, 5)
+    LOOP ID: 6 
+      EDGE ID: 6 
+      EDGE ID: 16 
+      EDGE ID: 17 
+      EDGE ID: 11 
+        EGDE 11 is DEGENERATE 
+      LOOP Corner NODEs (2, 3, 5)
+    LOOP ID: 7 
+      EDGE ID: 18 
+      EDGE ID: 13 
+      EDGE ID: 19 
+      EDGE ID: 14 
+        EGDE 14 is DEGENERATE 
+      LOOP Corner NODEs (4, 1, 5)
+    LOOP ID: 8 
+      EDGE ID: 20 
+      EDGE ID: 10 
+        EGDE 10 is DEGENERATE 
+      EDGE ID: 18 
+      EDGE ID: 2 
+      LOOP Corner NODEs (0, 4, 1)
+Label 'EGADS Body ID':
+[0]: 0 (0)
+[0]: 1 (0)
+[0]: 2 (0)
+[0]: 3 (0)
+[0]: 4 (0)
+[0]: 5 (0)
+[0]: 6 (0)
+[0]: 7 (0)
+[0]: 8 (0)
+[0]: 9 (0)
+[0]: 10 (0)
+[0]: 11 (0)
+[0]: 12 (0)
+[0]: 13 (0)
+[0]: 14 (0)
+[0]: 15 (0)
+[0]: 16 (0)
+[0]: 17 (0)
+[0]: 18 (0)
+[0]: 19 (0)
+[0]: 20 (0)
+[0]: 21 (0)
+[0]: 22 (0)
+[0]: 23 (0)
+[0]: 24 (0)
+[0]: 25 (0)
+Label 'EGADS Face ID':
+[0]: 0 (1)
+[0]: 8 (1)
+[0]: 9 (1)
+[0]: 10 (1)
+[0]: 14 (1)
+[0]: 15 (1)
+[0]: 16 (1)
+[0]: 1 (2)
+[0]: 8 (2)
+[0]: 10 (2)
+[0]: 11 (2)
+[0]: 16 (2)
+[0]: 17 (2)
+[0]: 18 (2)
+[0]: 2 (3)
+[0]: 8 (3)
+[0]: 11 (3)
+[0]: 12 (3)
+[0]: 18 (3)
+[0]: 19 (3)
+[0]: 20 (3)
+[0]: 3 (4)
+[0]: 11 (4)
+[0]: 12 (4)
+[0]: 13 (4)
+[0]: 19 (4)
+[0]: 21 (4)
+[0]: 22 (4)
+[0]: 4 (5)
+[0]: 9 (5)
+[0]: 10 (5)
+[0]: 13 (5)
+[0]: 15 (5)
+[0]: 23 (5)
+[0]: 24 (5)
+[0]: 5 (6)
+[0]: 10 (6)
+[0]: 11 (6)
+[0]: 13 (6)
+[0]: 17 (6)
+[0]: 22 (6)
+[0]: 23 (6)
+[0]: 6 (7)
+[0]: 9 (7)
+[0]: 12 (7)
+[0]: 13 (7)
+[0]: 21 (7)
+[0]: 24 (7)
+[0]: 25 (7)
+[0]: 7 (8)
+[0]: 8 (8)
+[0]: 9 (8)
+[0]: 12 (8)
+[0]: 14 (8)
+[0]: 20 (8)
+[0]: 25 (8)
+Label 'EGADS Edge ID':
+[0]: 8 (1)
+[0]: 8 (2)
+[0]: 9 (2)
+[0]: 14 (2)
+[0]: 9 (3)
+[0]: 10 (3)
+[0]: 15 (3)
+[0]: 8 (4)
+[0]: 10 (4)
+[0]: 16 (4)
+[0]: 8 (5)
+[0]: 10 (6)
+[0]: 11 (6)
+[0]: 17 (6)
+[0]: 8 (7)
+[0]: 11 (7)
+[0]: 18 (7)
+[0]: 8 (8)
+[0]: 11 (9)
+[0]: 12 (9)
+[0]: 19 (9)
+[0]: 8 (10)
+[0]: 12 (10)
+[0]: 20 (10)
+[0]: 11 (11)
+[0]: 13 (11)
+[0]: 22 (11)
+[0]: 13 (12)
+[0]: 12 (13)
+[0]: 13 (13)
+[0]: 21 (13)
+[0]: 9 (14)
+[0]: 13 (14)
+[0]: 24 (14)
+[0]: 13 (15)
+[0]: 10 (16)
+[0]: 13 (16)
+[0]: 23 (16)
+[0]: 13 (17)
+[0]: 9 (18)
+[0]: 12 (18)
+[0]: 25 (18)
+[0]: 13 (19)
+[0]: 8 (20)
+DM Object: DM_0x84000000_0 1 MPI processes
+  type: plex
+DM_0x84000000_0 in 2 dimensions:
+Supports:
+[0] Max support size: 4
+[0]: 8 ----> 14
+[0]: 8 ----> 16
+[0]: 8 ----> 18
+[0]: 8 ----> 20
+[0]: 9 ----> 14
+[0]: 9 ----> 15
+[0]: 9 ----> 24
+[0]: 9 ----> 25
+[0]: 10 ----> 15
+[0]: 10 ----> 16
+[0]: 10 ----> 17
+[0]: 10 ----> 23
+[0]: 11 ----> 17
+[0]: 11 ----> 18
+[0]: 11 ----> 19
+[0]: 11 ----> 22
+[0]: 12 ----> 19
+[0]: 12 ----> 20
+[0]: 12 ----> 21
+[0]: 12 ----> 25
+[0]: 13 ----> 21
+[0]: 13 ----> 22
+[0]: 13 ----> 23
+[0]: 13 ----> 24
+[0]: 14 ----> 0
+[0]: 14 ----> 7
+[0]: 15 ----> 0
+[0]: 15 ----> 4
+[0]: 16 ----> 0
+[0]: 16 ----> 1
+[0]: 17 ----> 1
+[0]: 17 ----> 5
+[0]: 18 ----> 1
+[0]: 18 ----> 2
+[0]: 19 ----> 2
+[0]: 19 ----> 3
+[0]: 20 ----> 2
+[0]: 20 ----> 7
+[0]: 21 ----> 3
+[0]: 21 ----> 6
+[0]: 22 ----> 3
+[0]: 22 ----> 5
+[0]: 23 ----> 4
+[0]: 23 ----> 5
+[0]: 24 ----> 4
+[0]: 24 ----> 6
+[0]: 25 ----> 6
+[0]: 25 ----> 7
+Cones:
+[0] Max cone size: 3
+[0]: 0 <---- 14 (0)
+[0]: 0 <---- 15 (0)
+[0]: 0 <---- 16 (0)
+[0]: 1 <---- 16 (-2)
+[0]: 1 <---- 17 (0)
+[0]: 1 <---- 18 (0)
+[0]: 2 <---- 18 (-2)
+[0]: 2 <---- 19 (0)
+[0]: 2 <---- 20 (0)
+[0]: 3 <---- 19 (0)
+[0]: 3 <---- 21 (0)
+[0]: 3 <---- 22 (0)
+[0]: 4 <---- 15 (0)
+[0]: 4 <---- 23 (0)
+[0]: 4 <---- 24 (0)
+[0]: 5 <---- 17 (0)
+[0]: 5 <---- 22 (-2)
+[0]: 5 <---- 23 (-2)
+[0]: 6 <---- 25 (0)
+[0]: 6 <---- 24 (-2)
+[0]: 6 <---- 21 (-2)
+[0]: 7 <---- 20 (-2)
+[0]: 7 <---- 25 (0)
+[0]: 7 <---- 14 (-2)
+[0]: 14 <---- 8 (0)
+[0]: 14 <---- 9 (0)
+[0]: 15 <---- 9 (0)
+[0]: 15 <---- 10 (0)
+[0]: 16 <---- 10 (0)
+[0]: 16 <---- 8 (0)
+[0]: 17 <---- 10 (0)
+[0]: 17 <---- 11 (0)
+[0]: 18 <---- 11 (0)
+[0]: 18 <---- 8 (0)
+[0]: 19 <---- 11 (0)
+[0]: 19 <---- 12 (0)
+[0]: 20 <---- 12 (0)
+[0]: 20 <---- 8 (0)
+[0]: 21 <---- 12 (0)
+[0]: 21 <---- 13 (0)
+[0]: 22 <---- 13 (0)
+[0]: 22 <---- 11 (0)
+[0]: 23 <---- 10 (0)
+[0]: 23 <---- 13 (0)
+[0]: 24 <---- 13 (0)
+[0]: 24 <---- 9 (0)
+[0]: 25 <---- 12 (0)
+[0]: 25 <---- 9 (0)
+coordinates with 1 fields
+  field 0 with 3 components
+Process 0:
+  (   8) dim  3 offset   0 1.5553e-18 0. 0.0254001
+  (   9) dim  3 offset   3 1.5553e-18 0.0254001 0.
+  (  10) dim  3 offset   6 -0.0254001 3.11061e-18 0.
+  (  11) dim  3 offset   9 -4.66591e-18 -0.0254001 0.
+  (  12) dim  3 offset  12 0.0254001 0. 0.
+  (  13) dim  3 offset  15 1.5553e-18 0. -0.0254001
diff --git a/src/dm/impls/plex/examples/tests/output/ex9_correctness_0.out b/src/dm/impls/plex/examples/tests/output/ex9_correctness_0.out
new file mode 100644
index 00000000000..378631f9c74
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex9_correctness_0.out
@@ -0,0 +1,4 @@
+[0] Cones: 4 Total time: 1.192e-06s Average time per cone: 2.980e-07s
+[0] Closures: 4 Total time: 7.868e-06s Average time per cone: 1.967e-06s
+[0] VecClosures: 4 Total time: 3.338e-06s Average time per vector closure: 8.345e-07s
+[0] VecClosures with Index: 4 Total time: 7.153e-07s Average time per vector closure: 1.788e-07s
diff --git a/src/dm/impls/plex/examples/tests/output/ex9_correctness_1.out b/src/dm/impls/plex/examples/tests/output/ex9_correctness_1.out
new file mode 100644
index 00000000000..271b1fcf6b8
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex9_correctness_1.out
@@ -0,0 +1,4 @@
+[0] Cones: 64 Total time: 1.669e-06s Average time per cone: 2.608e-08s
+[0] Closures: 64 Total time: 1.693e-05s Average time per cone: 2.645e-07s
+[0] VecClosures: 64 Total time: 4.506e-05s Average time per vector closure: 7.041e-07s
+[0] VecClosures with Index: 64 Total time: 4.292e-06s Average time per vector closure: 6.706e-08s
diff --git a/src/dm/impls/plex/examples/tests/output/ex9_correctness_2.out b/src/dm/impls/plex/examples/tests/output/ex9_correctness_2.out
new file mode 100644
index 00000000000..fbec199efe2
--- /dev/null
+++ b/src/dm/impls/plex/examples/tests/output/ex9_correctness_2.out
@@ -0,0 +1,4 @@
+[0] Cones: 131072 Total time: 4.203e-04s Average time per cone: 3.207e-09s
+[0] Closures: 131072 Total time: 2.625e-02s Average time per cone: 2.003e-07s
+[0] VecClosures: 131072 Total time: 5.952e-02s Average time per vector closure: 4.541e-07s
+[0] VecClosures with Index: 131072 Total time: 6.382e-03s Average time per vector closure: 4.869e-08s
diff --git a/src/dm/impls/plex/examples/tutorials/ex1.c b/src/dm/impls/plex/examples/tutorials/ex1.c
index 4ba4da4c5d0..6c48d5a79db 100644
--- a/src/dm/impls/plex/examples/tutorials/ex1.c
+++ b/src/dm/impls/plex/examples/tutorials/ex1.c
@@ -73,9 +73,10 @@ int main(int argc, char **argv)
   test:
     suffix: 0
     requires: triangle
+    args: -info -info_exclude null
   test:
     suffix: 1
     requires: ctetgen
-    args: -dim 3
+    args: -dim 3 -info -info_exclude null
 
 TEST*/
diff --git a/src/dm/impls/plex/examples/tutorials/ex1f90.F90 b/src/dm/impls/plex/examples/tutorials/ex1f90.F90
index 4abfe130cb9..e5317127188 100644
--- a/src/dm/impls/plex/examples/tutorials/ex1f90.F90
+++ b/src/dm/impls/plex/examples/tutorials/ex1f90.F90
@@ -59,11 +59,11 @@ program DMPlexTestField
       call DMGetLabel(dm, 'marker', label, ierr);CHKERRA(ierr)
       call DMLabelGetValue(label, zero, val, ierr);CHKERRA(ierr)
       if (val .ne. -1) then
-        CHKERRA(1)
+        SETERRA(PETSC_COMM_SELF,PETSC_ERR_PLIB,'Error in library')
       endif
       call DMLabelGetValue(label, eight, val, ierr);CHKERRA(ierr)
       if (val .ne. 1) then
-        CHKERRA(1)
+        SETERRA(PETSC_COMM_SELF,PETSC_ERR_PLIB,'Error in library')
       endif
 !     Prescribe a Dirichlet condition on u on the boundary
 !       Label "marker" is made by the mesh creation routine
@@ -109,10 +109,11 @@ end program DMPlexTestField
 !  test:
 !    suffix: 0
 !    requires: triangle
+!    args: -info -info_exclude null
 !
 !  test:
 !    suffix: 1
 !    requires: ctetgen
-!    args: -dim 3
+!    args: -dim 3 -info -info_exclude null
 !
 !TEST*/
diff --git a/src/dm/impls/plex/examples/tutorials/ex7.c b/src/dm/impls/plex/examples/tutorials/ex7.c
index 313747f2744..5ca5204c946 100644
--- a/src/dm/impls/plex/examples/tutorials/ex7.c
+++ b/src/dm/impls/plex/examples/tutorials/ex7.c
@@ -26,13 +26,15 @@ static PetscErrorCode ProjectToUnitSphere(DM dm)
 {
   Vec            coordinates;
   PetscScalar   *coords;
-  PetscInt       Nv, v, dim, d;
+  PetscInt       Nv, v, bs, dim, d;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
   ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
   ierr = VecGetLocalSize(coordinates, &Nv);CHKERRQ(ierr);
-  ierr = VecGetBlockSize(coordinates, &dim);CHKERRQ(ierr);
+  ierr = VecGetBlockSize(coordinates, &bs);CHKERRQ(ierr);
+  ierr = DMGetCoordinateDim(dm, &dim);CHKERRQ(ierr);
+  if (dim != bs) SETERRQ2(PetscObjectComm((PetscObject)dm),PETSC_ERR_PLIB,"Coordinate bs %D does not match dim %D",bs,dim);
   Nv  /= dim;
   ierr = VecGetArray(coordinates, &coords);CHKERRQ(ierr);
   for (v = 0; v < Nv; ++v) {
@@ -114,7 +116,7 @@ int main(int argc, char **argv)
   test:
     suffix: 2d_quad_parallel
     requires: !__float128
-    args: -dm_view
+    args: -dm_view -petscpartitioner_type simple
     nsize: 2
 
   test:
@@ -125,7 +127,7 @@ int main(int argc, char **argv)
   test:
     suffix: 2d_tri_parallel
     requires: !__float128
-    args: -simplex -dm_view
+    args: -simplex -dm_view -petscpartitioner_type simple
     nsize: 2
 
   test:
@@ -136,7 +138,7 @@ int main(int argc, char **argv)
   test:
     suffix: 3d_tri_parallel
     requires: !__float128
-    args: -dim 3 -simplex -dm_view
+    args: -dim 3 -simplex -dm_view -petscpartitioner_type simple
     nsize: 2
 
 TEST*/
diff --git a/src/dm/impls/plex/examples/tutorials/makefile b/src/dm/impls/plex/examples/tutorials/makefile
index 09329f5f18f..2432cb8316a 100644
--- a/src/dm/impls/plex/examples/tutorials/makefile
+++ b/src/dm/impls/plex/examples/tutorials/makefile
@@ -3,8 +3,8 @@ FFLAGS	        =
 CPPFLAGS        =
 FPPFLAGS        =
 LOCDIR          = src/dm/impls/plex/examples/tutorials/
-EXAMPLESC       = ex1.c ex2.c ex5.c
-EXAMPLESF       = ex1f90.F90
+EXAMPLESC       = ex1.c ex2.c ex5.c ex6.c ex7.c ex8.c ex9.c
+EXAMPLESF       = ex1f90.F90 ex3f90.F90 ex4f90.F90
 MANSEC          = DM
 
 include ${PETSC_DIR}/lib/petsc/conf/variables
diff --git a/src/dm/impls/plex/examples/tutorials/output/ex1_0.out b/src/dm/impls/plex/examples/tutorials/output/ex1_0.out
index 6f8586d984c..d5eb1c73f84 100644
--- a/src/dm/impls/plex/examples/tutorials/output/ex1_0.out
+++ b/src/dm/impls/plex/examples/tutorials/output/ex1_0.out
@@ -106,3 +106,4 @@ Process 0:
   (  30) dim  1 offset  38
   (  31) dim  1 offset  39
   (  32) dim  1 offset  40
+[0] DMPlexGetFieldType_Internal(): Could not classify VTK output type of section field 2 "w"
diff --git a/src/dm/impls/plex/examples/tutorials/output/ex1_1.out b/src/dm/impls/plex/examples/tutorials/output/ex1_1.out
index 1b7edd443f7..c67a15e220c 100644
--- a/src/dm/impls/plex/examples/tutorials/output/ex1_1.out
+++ b/src/dm/impls/plex/examples/tutorials/output/ex1_1.out
@@ -1,3 +1,218 @@
+[0] TetGenMeshDelaunizeVertices(): Constructing Delaunay tetrahedralization.
+[0] TetGenMeshDelaunizeVertices():   Sorting vertices by a bsp-tree.
+[0] TetGenMeshBTreeSort():   Depth 0, 8 verts.
+[0] TetGenMeshBTreeSort():   Bbox (0., 0., 0.),(1., 1., 1.). x-axis
+[0] TetGenMeshBTreeSort():     leftsize = 4, rightsize = 4
+[0] TetGenMeshDelaunizeVertices():   Number of tree nodes: 2.
+[0] TetGenMeshDelaunizeVertices():   Maximum tree node size: 4.
+[0] TetGenMeshDelaunizeVertices():   Maximum tree depth: 0.
+[0] TetGenMeshDelaunizeVertices():   Incrementally inserting vertices.
+[0] TetGenMeshDelaunayIncrFlip():     Create the first tet (0, 3, 5, 1).
+[0] TetGenMeshDelaunayIncrFlip():     Incrementally inserting points.
+[0] TetGenMeshInsertVertexBW():     Insert point 7
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (3, 0, 5, 1).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 6
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (0, 5, 7, 1).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 2
+[0] TetGenMeshBTreeSearch():     Get point 6 (cell size 1).
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (3, 0, 6, 7).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 4
+[0] TetGenMeshBTreeSearch():     Get point 6 (cell size 2).
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 3
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (6, 0, 5, 7).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshMeshSurface(): Creating surface mesh.
+[0] TetGenMeshMakeIndex2PointMap():   Constructing mapping from indices to points.
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 1.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 1.
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 5).
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 6).
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 4).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 2.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 6 for facet 2.
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 0).
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 1).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 3.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 6 for facet 3.
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 1).
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 5).
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 4).
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 0).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 4.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 4.
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 6).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 5.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 7 for facet 5.
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 4).
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 6).
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 0).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 6.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 6.
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 1).
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 5).
+[0] TetGenMeshUnifySegments():   Unifying segments.
+[0] TetGenMeshMakeSubfaceMap():   Constructing mapping from points to subfaces.
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (5  4).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 4, 7) and (5, 4, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 4, 0) and (5, 4, 7).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (5  7).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 7, 4) and (5, 7, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 7, 3) and (5, 7, 4).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (6  7).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 7, 4) and (6, 7, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 7, 3) and (6, 7, 4).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (4  6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (4, 6, 7) and (4, 6, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (4, 6, 0) and (4, 6, 7).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  1).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 1, 2) and (0, 1, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 1, 5) and (0, 1, 2).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 2, 1) and (0, 2, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 2, 6) and (0, 2, 1).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (3  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (3, 2, 1) and (3, 2, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (3, 2, 6) and (3, 2, 1).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (1  3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 3, 2) and (1, 3, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 3, 5) and (1, 3, 2).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (1  5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 5, 0) and (1, 5, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 5, 3) and (1, 5, 0).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  4).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 4, 5) and (0, 4, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 4, 6) and (0, 4, 5).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (7  3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (7, 3, 6) and (7, 3, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (7, 3, 5) and (7, 3, 6).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (6  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 2, 3) and (6, 2, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 2, 0) and (6, 2, 3).
+[0] TetGenMeshMergeFacets():   Merging coplanar facets.
+[0] TetGenMeshMakePoint2SegMap():   Constructing mapping from points to segments.
+[0] TetGenMeshMarkAcuteVertices():   Marking acute vertices.
+[0] TetGenMeshMakeSegmentMap():   Constructing mapping from points to segments.
+[0] TetGenMeshMarkAcuteVertices():   0 acute vertices.
+[0] TetGenMeshFormSkeleton(): Recovering boundaries.
+[0] TetGenMeshDelaunizeSegments2():   Delaunizing segments.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 1).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 4, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 5, 1) to 1.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 2).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (0, 6, 7, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (1, 5).
+[0] TetGenMeshScoutSegment2():     Scout seg (3, 2).
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (6, 7).
+[0] TetGenMeshScoutSegment2():     Scout seg (6, 2).
+[0] TetGenMeshFindDirection2():       From tet (6, 0, 7, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (6, 7, 0, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (5, 4).
+[0] TetGenMeshFindDirection2():       From tet (5, 7, 0, 4) to 4.
+[0] TetGenMeshScoutSegment2():     Scout seg (5, 7).
+[0] TetGenMeshScoutSegment2():     Scout seg (7, 3).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 3.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 6, 2) to 3.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 2, 3) to 3.
+[0] TetGenMeshScoutSegment2():     Scout seg (1, 3).
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 5, 7) to 3.
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 7, 3) to 3.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 4).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 4.
+[0] TetGenMeshScoutSegment2():     Scout seg (4, 6).
+[0] TetGenMeshDelaunizeSegments2():   0 protecting points.
+[0] TetGenMeshConstrainedFacets2():   Constraining facets.
+[0] TetGenMeshConstrainedFacets2():   Recover facet #1: 2 subfaces, 4 vertices.
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 5.
+[0] TetGenMeshFindDirection2():       From tet (3, 7, 0, 1) to 5.
+[0] TetGenMeshScoutSubface():     Scout subface (5, 7, 3) (11).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (5, 7, 1) op (0).
+[0] TetGenMeshFlip22Sub():     Flip subedge (5, 3) to (7, 1).
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 5, 7) to 3.
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 7, 3) to 3.
+[0] TetGenMeshScoutSubface():     Scout subface (1, 3, 7) (11).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (7, 4, 0, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (7, 5, 0, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 1, 5) (11).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #2: 2 subfaces, 4 vertices.
+[0] TetGenMeshFindDirection2():       From tet (4, 6, 7, 0) to 5.
+[0] TetGenMeshFindDirection2():       From tet (4, 0, 7, 5) to 5.
+[0] TetGenMeshScoutSubface():     Scout subface (4, 5, 7) (10).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 4.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 4, 6) (10).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #3: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (6, 0, 2) (9).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 4.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 4, 6) (9).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #4: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (5, 0, 4) (8).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 4, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 5, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 1, 5) (8).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #5: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (1, 0, 2) (7).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (1, 0, 3) op (7).
+[0] TetGenMeshFlip22Sub():     Flip subedge (1, 2) to (0, 3).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 3.
+[0] TetGenMeshFindDirection2():       From tet (0, 6, 7, 2) to 3.
+[0] TetGenMeshFindDirection2():       From tet (0, 2, 7, 3) to 3.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 3, 2) (7).
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 1.
+[0] TetGenMeshFindDirection2():       From tet (3, 7, 0, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (3, 1, 0) (7).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #6: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (6, 7, 3) (2).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (7, 6, 2) op (0).
+[0] TetGenMeshFlip22Sub():     Flip subedge (6, 3) to (7, 2).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 6, 2) to 2.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 2, 3) (2).
+[0] TetGenMeshScoutSubface():     Scout subface (2, 6, 7) (2).
+[0] TetGenMeshConstrainedFacets2():   3 subedge flips  0 cavities remeshed.
+[0] TetGenMeshCarveHoles(): Removing exterior tetrahedra.
+[0] TetGenMeshInfectHull():   Marking concavities for elimination.
+[0] TetGenMeshPlague():   Marking neighbors of marked tetrahedra.
+[0] TetGenMeshRemoveHoleTets():   Deleting marked tetrahedra.
+[0] TetGenMeshMakePoint2TetMap():   Constructing mapping from points to tetrahedra.
+[0] TetGenMeshOptimize(): Repairing mesh.
+[0] TetGenMeshEnforceQuality(): Adding Steiner points to enforce quality.
+[0] TetGenMeshMarkSharpSegments():   Marking sharp segments.
+[0] TetGenMeshDecideFeaturePointSizes():   Deciding feature-point sizes.
+[0] TetGenMeshMakeSegmentMap():   Constructing mapping from points to segments.
+[0] TetGenMeshDecideFeaturePointSizes():   0 feature points.
+[0] TetGenMeshEnforceQuality():   0 split points.
+[0] TetGenMeshEnforceQuality():   0 split points.
+[0] TetGenMeshEnforceQuality():   0 refinement points.
+[0] TetGenMeshEnforceQuality():   Totally added 0 points.
+[0] TetGenMeshOptimize(): Optimizing mesh.
+[0] TetGenMeshOptimize():   level = 3.
+[0] TetGenMeshOptimize():   level = 3 pass 0.
+[0] TetGenMeshOutputNodes(): Writing nodes.
+[0] TetGenMeshOutputElements(): Writing elements.
+[0] TetGenMeshOutputSubfaces(): Writing faces.
 PetscSection Object: 1 MPI processes
   type not yet set
 3 fields
@@ -160,3 +375,4 @@ Process 0:
   (  48) dim  0 offset  62
   (  49) dim  0 offset  62
   (  50) dim  0 offset  62
+[0] DMPlexGetFieldType_Internal(): Could not classify VTK output type of section field 2 "w"
diff --git a/src/dm/impls/plex/examples/tutorials/output/ex1f90_0.out b/src/dm/impls/plex/examples/tutorials/output/ex1f90_0.out
index 6f8586d984c..d5eb1c73f84 100644
--- a/src/dm/impls/plex/examples/tutorials/output/ex1f90_0.out
+++ b/src/dm/impls/plex/examples/tutorials/output/ex1f90_0.out
@@ -106,3 +106,4 @@ Process 0:
   (  30) dim  1 offset  38
   (  31) dim  1 offset  39
   (  32) dim  1 offset  40
+[0] DMPlexGetFieldType_Internal(): Could not classify VTK output type of section field 2 "w"
diff --git a/src/dm/impls/plex/examples/tutorials/output/ex1f90_1.out b/src/dm/impls/plex/examples/tutorials/output/ex1f90_1.out
index 1b7edd443f7..c67a15e220c 100644
--- a/src/dm/impls/plex/examples/tutorials/output/ex1f90_1.out
+++ b/src/dm/impls/plex/examples/tutorials/output/ex1f90_1.out
@@ -1,3 +1,218 @@
+[0] TetGenMeshDelaunizeVertices(): Constructing Delaunay tetrahedralization.
+[0] TetGenMeshDelaunizeVertices():   Sorting vertices by a bsp-tree.
+[0] TetGenMeshBTreeSort():   Depth 0, 8 verts.
+[0] TetGenMeshBTreeSort():   Bbox (0., 0., 0.),(1., 1., 1.). x-axis
+[0] TetGenMeshBTreeSort():     leftsize = 4, rightsize = 4
+[0] TetGenMeshDelaunizeVertices():   Number of tree nodes: 2.
+[0] TetGenMeshDelaunizeVertices():   Maximum tree node size: 4.
+[0] TetGenMeshDelaunizeVertices():   Maximum tree depth: 0.
+[0] TetGenMeshDelaunizeVertices():   Incrementally inserting vertices.
+[0] TetGenMeshDelaunayIncrFlip():     Create the first tet (0, 3, 5, 1).
+[0] TetGenMeshDelaunayIncrFlip():     Incrementally inserting points.
+[0] TetGenMeshInsertVertexBW():     Insert point 7
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (3, 0, 5, 1).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 6
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (0, 5, 7, 1).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 2
+[0] TetGenMeshBTreeSearch():     Get point 6 (cell size 1).
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 2
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (3, 0, 6, 7).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshInsertVertexBW():     Insert point 4
+[0] TetGenMeshBTreeSearch():     Get point 6 (cell size 2).
+[0] TetGenMeshInsertVertexBW():     Walk distance (# tets): 3
+[0] TetGenMeshInsertVertexBW():     Located (4) tet (6, 0, 5, 7).
+[0] TetGenMeshInsertVertexBW():     Insert a hull vertex.
+[0] TetGenMeshInsertVertexBW():     Cavity formed: 2 tets, 2 faces.
+[0] TetGenMeshMeshSurface(): Creating surface mesh.
+[0] TetGenMeshMakeIndex2PointMap():   Constructing mapping from indices to points.
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 1.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 1.
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 5).
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 6).
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 4).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 2.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 6 for facet 2.
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 0).
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 1).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 3.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 6 for facet 3.
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 1).
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 5).
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 4).
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 0).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 4.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 4.
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 6).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 5.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 7 for facet 5.
+[0] TetGenMeshRecoverSegment():     Insert seg (0, 4).
+[0] TetGenMeshRecoverSegment():     Insert seg (4, 6).
+[0] TetGenMeshRecoverSegment():     Insert seg (6, 2).
+[0] TetGenMeshRecoverSegment():     Insert seg (2, 0).
+[0] TetGenMeshTriangulate():     4 vertices, 4 segments, 0 holes, shmark: 6.
+[0] TetGenMeshGetFacetAbovePoint():     Chosen abovepoint 0 for facet 6.
+[0] TetGenMeshRecoverSegment():     Insert seg (5, 1).
+[0] TetGenMeshRecoverSegment():     Insert seg (1, 3).
+[0] TetGenMeshRecoverSegment():     Insert seg (3, 7).
+[0] TetGenMeshRecoverSegment():     Insert seg (7, 5).
+[0] TetGenMeshUnifySegments():   Unifying segments.
+[0] TetGenMeshMakeSubfaceMap():   Constructing mapping from points to subfaces.
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (5  4).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 4, 7) and (5, 4, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 4, 0) and (5, 4, 7).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (5  7).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 7, 4) and (5, 7, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (5, 7, 3) and (5, 7, 4).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (6  7).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 7, 4) and (6, 7, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 7, 3) and (6, 7, 4).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (4  6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (4, 6, 7) and (4, 6, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (4, 6, 0) and (4, 6, 7).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  1).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 1, 2) and (0, 1, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 1, 5) and (0, 1, 2).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 2, 1) and (0, 2, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 2, 6) and (0, 2, 1).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (3  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (3, 2, 1) and (3, 2, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (3, 2, 6) and (3, 2, 1).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (1  3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 3, 2) and (1, 3, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 3, 5) and (1, 3, 2).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (1  5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 5, 0) and (1, 5, 3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (1, 5, 3) and (1, 5, 0).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (0  4).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 4, 5) and (0, 4, 6).
+[0] TetGenMeshUnifySegments():     Bond subfaces (0, 4, 6) and (0, 4, 5).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (7  3).
+[0] TetGenMeshUnifySegments():     Bond subfaces (7, 3, 6) and (7, 3, 5).
+[0] TetGenMeshUnifySegments():     Bond subfaces (7, 3, 5) and (7, 3, 6).
+[0] TetGenMeshUnifySegments():     Identifying 2 segments of (6  2).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 2, 3) and (6, 2, 0).
+[0] TetGenMeshUnifySegments():     Bond subfaces (6, 2, 0) and (6, 2, 3).
+[0] TetGenMeshMergeFacets():   Merging coplanar facets.
+[0] TetGenMeshMakePoint2SegMap():   Constructing mapping from points to segments.
+[0] TetGenMeshMarkAcuteVertices():   Marking acute vertices.
+[0] TetGenMeshMakeSegmentMap():   Constructing mapping from points to segments.
+[0] TetGenMeshMarkAcuteVertices():   0 acute vertices.
+[0] TetGenMeshFormSkeleton(): Recovering boundaries.
+[0] TetGenMeshDelaunizeSegments2():   Delaunizing segments.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 1).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 4, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 5, 1) to 1.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 2).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (0, 6, 7, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (1, 5).
+[0] TetGenMeshScoutSegment2():     Scout seg (3, 2).
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (6, 7).
+[0] TetGenMeshScoutSegment2():     Scout seg (6, 2).
+[0] TetGenMeshFindDirection2():       From tet (6, 0, 7, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (6, 7, 0, 2) to 2.
+[0] TetGenMeshScoutSegment2():     Scout seg (5, 4).
+[0] TetGenMeshFindDirection2():       From tet (5, 7, 0, 4) to 4.
+[0] TetGenMeshScoutSegment2():     Scout seg (5, 7).
+[0] TetGenMeshScoutSegment2():     Scout seg (7, 3).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 3.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 6, 2) to 3.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 2, 3) to 3.
+[0] TetGenMeshScoutSegment2():     Scout seg (1, 3).
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 5, 7) to 3.
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 7, 3) to 3.
+[0] TetGenMeshScoutSegment2():     Scout seg (0, 4).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 4.
+[0] TetGenMeshScoutSegment2():     Scout seg (4, 6).
+[0] TetGenMeshDelaunizeSegments2():   0 protecting points.
+[0] TetGenMeshConstrainedFacets2():   Constraining facets.
+[0] TetGenMeshConstrainedFacets2():   Recover facet #1: 2 subfaces, 4 vertices.
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 5.
+[0] TetGenMeshFindDirection2():       From tet (3, 7, 0, 1) to 5.
+[0] TetGenMeshScoutSubface():     Scout subface (5, 7, 3) (11).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (5, 7, 1) op (0).
+[0] TetGenMeshFlip22Sub():     Flip subedge (5, 3) to (7, 1).
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 5, 7) to 3.
+[0] TetGenMeshFindDirection2():       From tet (1, 0, 7, 3) to 3.
+[0] TetGenMeshScoutSubface():     Scout subface (1, 3, 7) (11).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (7, 4, 0, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (7, 5, 0, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 1, 5) (11).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #2: 2 subfaces, 4 vertices.
+[0] TetGenMeshFindDirection2():       From tet (4, 6, 7, 0) to 5.
+[0] TetGenMeshFindDirection2():       From tet (4, 0, 7, 5) to 5.
+[0] TetGenMeshScoutSubface():     Scout subface (4, 5, 7) (10).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 4.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 4, 6) (10).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #3: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (6, 0, 2) (9).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 4.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 4, 6) (9).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #4: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (5, 0, 4) (8).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 4, 5) to 1.
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 5, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 1, 5) (8).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #5: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (1, 0, 2) (7).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (1, 0, 3) op (7).
+[0] TetGenMeshFlip22Sub():     Flip subedge (1, 2) to (0, 3).
+[0] TetGenMeshFindDirection2():       From tet (0, 7, 6, 4) to 3.
+[0] TetGenMeshFindDirection2():       From tet (0, 6, 7, 2) to 3.
+[0] TetGenMeshFindDirection2():       From tet (0, 2, 7, 3) to 3.
+[0] TetGenMeshScoutSubface():     Scout subface (0, 3, 2) (7).
+[0] TetGenMeshFindDirection2():       From tet (3, 0, 7, 2) to 1.
+[0] TetGenMeshFindDirection2():       From tet (3, 7, 0, 1) to 1.
+[0] TetGenMeshScoutSubface():     Scout subface (3, 1, 0) (7).
+[0] TetGenMeshConstrainedFacets2():   Recover facet #6: 2 subfaces, 4 vertices.
+[0] TetGenMeshScoutSubface():     Scout subface (6, 7, 3) (2).
+[0] TetGenMeshScoutCrossTet():     Found a co-facet face (7, 6, 2) op (0).
+[0] TetGenMeshFlip22Sub():     Flip subedge (6, 3) to (7, 2).
+[0] TetGenMeshFindDirection2():       From tet (7, 6, 0, 4) to 2.
+[0] TetGenMeshFindDirection2():       From tet (7, 0, 6, 2) to 2.
+[0] TetGenMeshScoutSubface():     Scout subface (7, 2, 3) (2).
+[0] TetGenMeshScoutSubface():     Scout subface (2, 6, 7) (2).
+[0] TetGenMeshConstrainedFacets2():   3 subedge flips  0 cavities remeshed.
+[0] TetGenMeshCarveHoles(): Removing exterior tetrahedra.
+[0] TetGenMeshInfectHull():   Marking concavities for elimination.
+[0] TetGenMeshPlague():   Marking neighbors of marked tetrahedra.
+[0] TetGenMeshRemoveHoleTets():   Deleting marked tetrahedra.
+[0] TetGenMeshMakePoint2TetMap():   Constructing mapping from points to tetrahedra.
+[0] TetGenMeshOptimize(): Repairing mesh.
+[0] TetGenMeshEnforceQuality(): Adding Steiner points to enforce quality.
+[0] TetGenMeshMarkSharpSegments():   Marking sharp segments.
+[0] TetGenMeshDecideFeaturePointSizes():   Deciding feature-point sizes.
+[0] TetGenMeshMakeSegmentMap():   Constructing mapping from points to segments.
+[0] TetGenMeshDecideFeaturePointSizes():   0 feature points.
+[0] TetGenMeshEnforceQuality():   0 split points.
+[0] TetGenMeshEnforceQuality():   0 split points.
+[0] TetGenMeshEnforceQuality():   0 refinement points.
+[0] TetGenMeshEnforceQuality():   Totally added 0 points.
+[0] TetGenMeshOptimize(): Optimizing mesh.
+[0] TetGenMeshOptimize():   level = 3.
+[0] TetGenMeshOptimize():   level = 3 pass 0.
+[0] TetGenMeshOutputNodes(): Writing nodes.
+[0] TetGenMeshOutputElements(): Writing elements.
+[0] TetGenMeshOutputSubfaces(): Writing faces.
 PetscSection Object: 1 MPI processes
   type not yet set
 3 fields
@@ -160,3 +375,4 @@ Process 0:
   (  48) dim  0 offset  62
   (  49) dim  0 offset  62
   (  50) dim  0 offset  62
+[0] DMPlexGetFieldType_Internal(): Could not classify VTK output type of section field 2 "w"
diff --git a/src/dm/impls/plex/f90-custom/makefile b/src/dm/impls/plex/f90-custom/makefile
index be7772237aa..d1e1f7206f5 100644
--- a/src/dm/impls/plex/f90-custom/makefile
+++ b/src/dm/impls/plex/f90-custom/makefile
@@ -1,5 +1,4 @@
 #requiresdefine   'PETSC_HAVE_FORTRAN'
-#requiresdefine   'PETSC_USING_F90'
 ALL: lib
 
 CFLAGS   =
diff --git a/src/dm/impls/plex/ftn-custom/zplexcreate.c b/src/dm/impls/plex/ftn-custom/zplexcreate.c
index d6ba7ffd9ab..287723e091f 100644
--- a/src/dm/impls/plex/ftn-custom/zplexcreate.c
+++ b/src/dm/impls/plex/ftn-custom/zplexcreate.c
@@ -4,9 +4,11 @@
 #if defined(PETSC_HAVE_FORTRAN_CAPS)
 #define dmplexcreateboxmesh_  DMPLEXCREATEBOXMESH
 #define dmplexcreatefromfile_ DMPLEXCREATEFROMFILE
+#define petscpartitionerviewfromoptions_ PETSCPARTITIONERVIEWFROMOPTIONS
 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) && !defined(FORTRANDOUBLEUNDERSCORE)
 #define dmplexcreateboxmesh_  dmplexcreateboxmesh
 #define dmplexcreatefromfile_ dmplexcreatefromfile
+#define petscpartitionerviewfromoptions_ petscpartitionerviewfromoptions
 #endif
 
 /* Definitions of Fortran Wrapper routines */
@@ -28,3 +30,13 @@ PETSC_EXTERN void PETSC_STDCALL dmplexcreatefromfile_(MPI_Fint *comm, char* name
   *ierr = DMPlexCreateFromFile(MPI_Comm_f2c(*(comm)), filename, *interpolate, dm);if (*ierr) return;
   FREECHAR(name, filename);
 }
+
+PETSC_EXTERN void PETSC_STDCALL petscpartitionerviewfromoptions_(PetscPartitioner *part,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = PetscPartitionerViewFromOptions(*part,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
+
diff --git a/src/dm/impls/plex/makefile b/src/dm/impls/plex/makefile
index 12bc8289d60..53d543f7309 100644
--- a/src/dm/impls/plex/makefile
+++ b/src/dm/impls/plex/makefile
@@ -3,7 +3,7 @@ ALL: lib
 CPPFLAGS = ${NETCFD_INCLUDE} ${EXODUSII_INCLUDE}
 CFLAGS   =
 FFLAGS   =
-SOURCEC  = plexcreate.c plex.c plexpartition.c plexdistribute.c plexrefine.c plexadapt.c plexcoarsen.c plexinterpolate.c plexpreallocate.c plexreorder.c plexgeometry.c plexsubmesh.c plexhdf5.c plexhdf5xdmf.c plexexodusii.c plexgmsh.c plexfluent.c plexcgns.c plexmed.c plexply.c plexvtk.c plexpoint.c plexvtu.c plexfem.c plexfvm.c plexindices.c plextree.c plexgenerate.c plexorient.c plexnatural.c plexproject.c plexglvis.c glexg.c petscpartmatpart.c plexcheckinterface.c plexsection.c plexhpddm.c
+SOURCEC  = plexcreate.c plex.c plexpartition.c plexdistribute.c plexrefine.c plexadapt.c plexcoarsen.c plexinterpolate.c plexpreallocate.c plexreorder.c plexgeometry.c plexsubmesh.c plexhdf5.c plexhdf5xdmf.c plexexodusii.c plexgmsh.c plexfluent.c plexcgns.c plexmed.c plexply.c plexvtk.c plexpoint.c plexvtu.c plexfem.c plexfvm.c plexindices.c plextree.c plexgenerate.c plexorient.c plexnatural.c plexproject.c plexglvis.c glexg.c petscpartmatpart.c plexcheckinterface.c plexsection.c plexhpddm.c plexegads.c
 SOURCEF  =
 SOURCEH  =
 DIRS     = generators examples
diff --git a/src/dm/impls/plex/petscpartmatpart.c b/src/dm/impls/plex/petscpartmatpart.c
index 640b72370cd..f5c920611dc 100644
--- a/src/dm/impls/plex/petscpartmatpart.c
+++ b/src/dm/impls/plex/petscpartmatpart.c
@@ -89,19 +89,19 @@ static PetscErrorCode PetscPartitionerSetFromOptions_MatPartitioning(PetscOption
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *is)
+static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *is)
 {
   PetscPartitioner_MatPartitioning  *p = (PetscPartitioner_MatPartitioning *) part->data;
   Mat                               matadj;
   IS                                is1, is2, is3;
+  PetscReal                         *tpwgts = NULL;
   PetscInt                          numVerticesGlobal, numEdges;
-  PetscInt                          *i, *j;
+  PetscInt                          *i, *j, *vwgt = NULL;
   MPI_Comm                          comm;
   PetscErrorCode                    ierr;
 
   PetscFunctionBegin;
   ierr = PetscObjectGetComm((PetscObject)part, &comm);CHKERRQ(ierr);
-  if (numVertices < 0) SETERRQ(comm, PETSC_ERR_PLIB, "number of vertices must be specified");
 
   /* TODO: MatCreateMPIAdj should maybe take global number of ROWS */
   /* TODO: And vertex distribution in PetscPartitionerPartition_ParMetis should be done using PetscSplitOwnership */
@@ -120,6 +120,45 @@ static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner
   ierr = MatPartitioningSetAdjacency(p->mp, matadj);CHKERRQ(ierr);
   ierr = MatPartitioningSetNParts(p->mp, nparts);CHKERRQ(ierr);
 
+  /* calculate partition weights */
+  if (targetSection) {
+    PetscReal sumt;
+    PetscInt  p;
+
+    sumt = 0.0;
+    ierr = PetscMalloc1(nparts,&tpwgts);CHKERRQ(ierr);
+    for (p = 0; p < nparts; ++p) {
+      PetscInt tpd;
+
+      ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
+      sumt += tpd;
+      tpwgts[p] = tpd;
+    }
+    if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
+      for (p = 0, sumt = 0.0; p < nparts; ++p) {
+        tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
+        sumt += tpwgts[p];
+      }
+      for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
+      for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
+      tpwgts[nparts - 1] = 1. - sumt;
+    } else {
+      ierr = PetscFree(tpwgts);CHKERRQ(ierr);
+    }
+  }
+  ierr = MatPartitioningSetPartitionWeights(p->mp, tpwgts);CHKERRQ(ierr);
+
+  /* calculate vertex weights */
+  if (vertSection) {
+    PetscInt v;
+
+    ierr = PetscMalloc1(numVertices,&vwgt);CHKERRQ(ierr);
+    for (v = 0; v < numVertices; ++v) {
+      ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
+    }
+  }
+  ierr = MatPartitioningSetVertexWeights(p->mp, vwgt);CHKERRQ(ierr);
+
   /* apply the partitioning */
   ierr = MatPartitioningApply(p->mp, &is1);CHKERRQ(ierr);
 
@@ -128,11 +167,9 @@ static PetscErrorCode PetscPartitionerPartition_MatPartitioning(PetscPartitioner
     PetscInt v;
     const PetscInt *assignment_arr;
 
-    ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
     ierr = ISGetIndices(is1, &assignment_arr);CHKERRQ(ierr);
     for (v = 0; v < numVertices; ++v) {ierr = PetscSectionAddDof(partSection, assignment_arr[v], 1);CHKERRQ(ierr);}
     ierr = ISRestoreIndices(is1, &assignment_arr);CHKERRQ(ierr);
-    ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
   }
 
   /* convert assignment IS to global numbering IS */
diff --git a/src/dm/impls/plex/plex.c b/src/dm/impls/plex/plex.c
index 5e272c55791..cc9ea0b3a69 100644
--- a/src/dm/impls/plex/plex.c
+++ b/src/dm/impls/plex/plex.c
@@ -32,8 +32,9 @@ PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
 @*/
 PetscErrorCode DMPlexRefineSimplexToTensor(DM dm, DM *dmRefined)
 {
-  PetscInt         dim, cMax, fMax, cStart, cEnd, coneSize;
   CellRefiner      cellRefiner;
+  DMPolytopeType   ct;
+  PetscInt         dim, cMax, fMax, cStart, cEnd;
   PetscBool        lop, allnoop, localized;
   PetscErrorCode   ierr;
 
@@ -45,42 +46,30 @@ PetscErrorCode DMPlexRefineSimplexToTensor(DM dm, DM *dmRefined)
   ierr = DMPlexGetHeightStratum(dm,0,&cStart,&cEnd);CHKERRQ(ierr);
   if (!(cEnd - cStart)) cellRefiner = REFINER_NOOP;
   else {
-    ierr = DMPlexGetConeSize(dm,cStart,&coneSize);CHKERRQ(ierr);
-    switch (dim) {
-    case 1:
-      cellRefiner = REFINER_NOOP;
-    break;
-    case 2:
-      switch (coneSize) {
-      case 3:
+    ierr = DMPlexGetCellType(dm, cStart, &ct);CHKERRQ(ierr);
+    switch (ct) {
+      case DM_POLYTOPE_POINT:
+      case DM_POLYTOPE_SEGMENT:
+        cellRefiner = REFINER_NOOP;break;
+      case DM_POLYTOPE_TRIANGLE:
         if (cMax >= 0) cellRefiner = REFINER_HYBRID_SIMPLEX_TO_HEX_2D;
-        else cellRefiner = REFINER_SIMPLEX_TO_HEX_2D;
-      break;
-      case 4:
+        else           cellRefiner = REFINER_SIMPLEX_TO_HEX_2D;
+        break;
+      case DM_POLYTOPE_QUADRILATERAL:
         if (cMax >= 0) cellRefiner = REFINER_HYBRID_SIMPLEX_TO_HEX_2D;
-        else cellRefiner = REFINER_NOOP;
-      break;
-      default: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot handle coneSize %D with dimension %D",coneSize,dim);
-      }
-    break;
-    case 3:
-      switch (coneSize) {
-      case 4:
-        if (cMax >= 0) cellRefiner = REFINER_HYBRID_SIMPLEX_TO_HEX_3D;
-        else cellRefiner = REFINER_SIMPLEX_TO_HEX_3D;
-      break;
-      case 5:
+        else           cellRefiner = REFINER_NOOP;
+        break;
+      case DM_POLYTOPE_TETRAHEDRON:
         if (cMax >= 0) cellRefiner = REFINER_HYBRID_SIMPLEX_TO_HEX_3D;
-        else cellRefiner = REFINER_NOOP;
-      break;
-      case 6:
-        if (cMax >= 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Simplex2Tensor in 3D with Hybrid mesh not yet done");
-        cellRefiner = REFINER_NOOP;
-      break;
-      default: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot handle coneSize %D with dimension %D",coneSize,dim);
-      }
-    break;
-    default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot handle dimension %D",dim);
+        else           cellRefiner = REFINER_SIMPLEX_TO_HEX_3D;
+        break;
+      case DM_POLYTOPE_TRI_PRISM_TENSOR:
+        cellRefiner = REFINER_HYBRID_SIMPLEX_TO_HEX_3D;break;
+      case DM_POLYTOPE_HEXAHEDRON:
+        if (cMax >= 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Simplex2Tensor in 3D with Hybrid mesh not yet done");
+        else           cellRefiner = REFINER_NOOP;
+        break;
+      default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle cell polytope type %s", DMPolytopeTypes[ct]);
     }
   }
   /* return if we don't need to refine */
@@ -101,13 +90,13 @@ PetscErrorCode DMPlexRefineSimplexToTensor(DM dm, DM *dmRefined)
 
 PetscErrorCode DMPlexGetFieldType_Internal(DM dm, PetscSection section, PetscInt field, PetscInt *sStart, PetscInt *sEnd, PetscViewerVTKFieldType *ft)
 {
-  PetscInt       dim, pStart, pEnd, vStart, vEnd, cStart, cEnd, cMax;
+  PetscInt       cdim, pStart, pEnd, vStart, vEnd, cStart, cEnd, cMax;
   PetscInt       vcdof[2] = {0,0}, globalvcdof[2];
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  *ft  = PETSC_VTK_POINT_FIELD;
-  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  *ft  = PETSC_VTK_INVALID;
+  ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
   ierr = DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
   ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = DMPlexGetHybridBounds(dm, &cMax, NULL, NULL, NULL);CHKERRQ(ierr);
@@ -124,14 +113,23 @@ PetscErrorCode DMPlexGetFieldType_Internal(DM dm, PetscSection section, PetscInt
   if (globalvcdof[0]) {
     *sStart = vStart;
     *sEnd   = vEnd;
-    if (globalvcdof[0] == dim) *ft = PETSC_VTK_POINT_VECTOR_FIELD;
-    else                       *ft = PETSC_VTK_POINT_FIELD;
+    if (globalvcdof[0] == cdim) *ft = PETSC_VTK_POINT_VECTOR_FIELD;
+    else                        *ft = PETSC_VTK_POINT_FIELD;
   } else if (globalvcdof[1]) {
     *sStart = cStart;
     *sEnd   = cEnd;
-    if (globalvcdof[1] == dim) *ft = PETSC_VTK_CELL_VECTOR_FIELD;
-    else                       *ft = PETSC_VTK_CELL_FIELD;
-  } else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Could not classify input Vec for VTK");
+    if (globalvcdof[1] == cdim) *ft = PETSC_VTK_CELL_VECTOR_FIELD;
+    else                        *ft = PETSC_VTK_CELL_FIELD;
+  } else {
+    if (field >= 0) {
+      const char *fieldname;
+
+      ierr = PetscSectionGetFieldName(section, field, &fieldname);CHKERRQ(ierr);
+      ierr = PetscInfo2((PetscObject) dm, "Could not classify VTK output type of section field %D \"%s\"\n", field, fieldname);CHKERRQ(ierr);
+    } else {
+      ierr = PetscInfo((PetscObject) dm, "Could not classify VTK output typp of section\"%s\"\n");CHKERRQ(ierr);
+    }
+  }
   PetscFunctionReturn(0);
 }
 
@@ -281,6 +279,7 @@ static PetscErrorCode VecView_Plex_Local_VTK(Vec v, PetscViewer viewer)
   const char              *name;
   PetscSection            section;
   PetscInt                pStart, pEnd;
+  PetscInt                numFields;
   PetscViewerVTKFieldType ft;
   PetscErrorCode          ierr;
 
@@ -291,8 +290,21 @@ static PetscErrorCode VecView_Plex_Local_VTK(Vec v, PetscViewer viewer)
   ierr = PetscObjectSetName((PetscObject) locv, name);CHKERRQ(ierr);
   ierr = VecCopy(v, locv);CHKERRQ(ierr);
   ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
-  ierr = DMPlexGetFieldType_Internal(dm, section, PETSC_DETERMINE, &pStart, &pEnd, &ft);CHKERRQ(ierr);
-  ierr = PetscViewerVTKAddField(viewer, (PetscObject) dm, DMPlexVTKWriteAll, ft, PETSC_TRUE,(PetscObject) locv);CHKERRQ(ierr);
+  ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);
+  if (!numFields) {
+    ierr = DMPlexGetFieldType_Internal(dm, section, PETSC_DETERMINE, &pStart, &pEnd, &ft);CHKERRQ(ierr);
+    ierr = PetscViewerVTKAddField(viewer, (PetscObject) dm, DMPlexVTKWriteAll, PETSC_DEFAULT, ft, PETSC_TRUE,(PetscObject) locv);CHKERRQ(ierr);
+  } else {
+    PetscInt f;
+
+    for (f = 0; f < numFields; f++) {
+      ierr = DMPlexGetFieldType_Internal(dm, section, f, &pStart, &pEnd, &ft);CHKERRQ(ierr);
+      if (ft == PETSC_VTK_INVALID) continue;
+      ierr = PetscObjectReference((PetscObject)locv);CHKERRQ(ierr);
+      ierr = PetscViewerVTKAddField(viewer, (PetscObject) dm, DMPlexVTKWriteAll, f, ft, PETSC_TRUE,(PetscObject) locv);CHKERRQ(ierr);
+    }
+    ierr = VecDestroy(&locv);CHKERRQ(ierr);
+  }
   PetscFunctionReturn(0);
 }
 
@@ -547,7 +559,7 @@ PETSC_UNUSED static PetscErrorCode DMPlexView_Ascii_Geometry(DM dm, PetscViewer
 {
   PetscSection       coordSection;
   Vec                coordinates;
-  DMLabel            depthLabel;
+  DMLabel            depthLabel, celltypeLabel;
   const char        *name[4];
   const PetscScalar *a;
   PetscInt           dim, pStart, pEnd, cStart, cEnd, c;
@@ -558,6 +570,7 @@ PETSC_UNUSED static PetscErrorCode DMPlexView_Ascii_Geometry(DM dm, PetscViewer
   ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
   ierr = DMGetCoordinateSection(dm, &coordSection);CHKERRQ(ierr);
   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
+  ierr = DMPlexGetCellTypeLabel(dm, &celltypeLabel);CHKERRQ(ierr);
   ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = PetscSectionGetChart(coordSection, &pStart, &pEnd);CHKERRQ(ierr);
   ierr = VecGetArrayRead(coordinates, &a);CHKERRQ(ierr);
@@ -567,9 +580,10 @@ PETSC_UNUSED static PetscErrorCode DMPlexView_Ascii_Geometry(DM dm, PetscViewer
   name[dim]   = "cell";
   for (c = cStart; c < cEnd; ++c) {
     PetscInt *closure = NULL;
-    PetscInt  closureSize, cl;
+    PetscInt  closureSize, cl, ct;
 
-    ierr = PetscViewerASCIIPrintf(viewer, "Geometry for cell %D:\n", c);CHKERRQ(ierr);
+    ierr = DMLabelGetValue(celltypeLabel, c, &ct);CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPrintf(viewer, "Geometry for cell %D polytope type %s:\n", c, DMPolytopeTypes[ct]);CHKERRQ(ierr);
     ierr = DMPlexGetTransitiveClosure(dm, c, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
     for (cl = 0; cl < closureSize*2; cl += 2) {
@@ -1200,19 +1214,20 @@ static PetscErrorCode DMPlexView_Draw(DM dm, PetscViewer viewer)
 
   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
   for (c = cStart; c < cEnd; ++c) {
-    PetscScalar *coords = NULL;
-    PetscInt     numCoords,coneSize;
+    PetscScalar   *coords = NULL;
+    DMPolytopeType ct;
+    PetscInt       numCoords;
 
-    ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
+    ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
     ierr = DMPlexVecGetClosure(dm, coordSection, coordinates, c, &numCoords, &coords);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 3:
+    switch (ct) {
+    case DM_POLYTOPE_TRIANGLE:
       ierr = PetscDrawTriangle(draw, PetscRealPart(coords[0]), PetscRealPart(coords[1]), PetscRealPart(coords[2]), PetscRealPart(coords[3]), PetscRealPart(coords[4]), PetscRealPart(coords[5]),
                                PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2,
                                PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2,
                                PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2);CHKERRQ(ierr);
       break;
-    case 4:
+    case DM_POLYTOPE_QUADRILATERAL:
       ierr = PetscDrawTriangle(draw, PetscRealPart(coords[0]), PetscRealPart(coords[1]), PetscRealPart(coords[2]), PetscRealPart(coords[3]), PetscRealPart(coords[4]), PetscRealPart(coords[5]),
                                 PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2,
                                 PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2,
@@ -1222,29 +1237,30 @@ static PetscErrorCode DMPlexView_Draw(DM dm, PetscViewer viewer)
                                 PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2,
                                 PETSC_DRAW_WHITE + rank % (PETSC_DRAW_BASIC_COLORS-2) + 2);CHKERRQ(ierr);
       break;
-    default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot draw cells with %D facets", coneSize);
+    default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot draw cells of type %s", DMPolytopeTypes[ct]);
     }
     ierr = DMPlexVecRestoreClosure(dm, coordSection, coordinates, c, &numCoords, &coords);CHKERRQ(ierr);
   }
   for (c = cStart; c < cEnd; ++c) {
-    PetscScalar *coords = NULL;
-    PetscInt     numCoords,coneSize;
+    PetscScalar   *coords = NULL;
+    DMPolytopeType ct;
+    PetscInt       numCoords;
 
-    ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
+    ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
     ierr = DMPlexVecGetClosure(dm, coordSection, coordinates, c, &numCoords, &coords);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 3:
+    switch (ct) {
+    case DM_POLYTOPE_TRIANGLE:
       ierr = PetscDrawLine(draw, PetscRealPart(coords[0]), PetscRealPart(coords[1]), PetscRealPart(coords[2]), PetscRealPart(coords[3]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       ierr = PetscDrawLine(draw, PetscRealPart(coords[2]), PetscRealPart(coords[3]), PetscRealPart(coords[4]), PetscRealPart(coords[5]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       ierr = PetscDrawLine(draw, PetscRealPart(coords[4]), PetscRealPart(coords[5]), PetscRealPart(coords[0]), PetscRealPart(coords[1]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       break;
-    case 4:
+    case DM_POLYTOPE_QUADRILATERAL:
       ierr = PetscDrawLine(draw, PetscRealPart(coords[0]), PetscRealPart(coords[1]), PetscRealPart(coords[2]), PetscRealPart(coords[3]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       ierr = PetscDrawLine(draw, PetscRealPart(coords[2]), PetscRealPart(coords[3]), PetscRealPart(coords[4]), PetscRealPart(coords[5]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       ierr = PetscDrawLine(draw, PetscRealPart(coords[4]), PetscRealPart(coords[5]), PetscRealPart(coords[6]), PetscRealPart(coords[7]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       ierr = PetscDrawLine(draw, PetscRealPart(coords[6]), PetscRealPart(coords[7]), PetscRealPart(coords[0]), PetscRealPart(coords[1]), PETSC_DRAW_BLACK);CHKERRQ(ierr);
       break;
-    default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot draw cells with %D facets", coneSize);
+    default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot draw cells of type %s", DMPolytopeTypes[ct]);
     }
     ierr = DMPlexVecRestoreClosure(dm, coordSection, coordinates, c, &numCoords, &coords);CHKERRQ(ierr);
   }
@@ -1254,20 +1270,25 @@ static PetscErrorCode DMPlexView_Draw(DM dm, PetscViewer viewer)
   PetscFunctionReturn(0);
 }
 
+#if defined(PETSC_HAVE_EXODUSII)
+#include 
+#endif
+
 PetscErrorCode DMView_Plex(DM dm, PetscViewer viewer)
 {
-  PetscBool      iascii, ishdf5, isvtk, isdraw, flg, isglvis;
+  PetscBool      iascii, ishdf5, isvtk, isdraw, flg, isglvis, isexodus;
   char           name[PETSC_MAX_PATH_LEN];
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
-  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
-  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERVTK,   &isvtk);CHKERRQ(ierr);
-  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERHDF5,  &ishdf5);CHKERRQ(ierr);
-  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERDRAW,  &isdraw);CHKERRQ(ierr);
-  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERGLVIS, &isglvis);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII,    &iascii);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERVTK,      &isvtk);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERHDF5,     &ishdf5);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERDRAW,     &isdraw);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERGLVIS,    &isglvis);CHKERRQ(ierr);
+  ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWEREXODUSII, &isexodus);CHKERRQ(ierr);
   if (iascii) {
     PetscViewerFormat format;
     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
@@ -1288,6 +1309,17 @@ PetscErrorCode DMView_Plex(DM dm, PetscViewer viewer)
     ierr = DMPlexView_Draw(dm, viewer);CHKERRQ(ierr);
   } else if (isglvis) {
     ierr = DMPlexView_GLVis(dm, viewer);CHKERRQ(ierr);
+#if defined(PETSC_HAVE_EXODUSII)
+  } else if (isexodus) {
+    int exoid;
+    PetscInt cStart, cEnd, c;
+
+    ierr = DMCreateLabel(dm, "Cell Sets");CHKERRQ(ierr);
+    ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+    for (c = cStart; c < cEnd; ++c) {ierr = DMSetLabelValue(dm, "Cell Sets", c, 1);CHKERRQ(ierr);}
+    ierr = PetscViewerExodusIIGetId(viewer, &exoid);CHKERRQ(ierr);
+    ierr = DMPlexView_ExodusII_Internal(dm, exoid, 1);CHKERRQ(ierr);
+#endif
   } else {
     SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMPlex writing", ((PetscObject)viewer)->type_name);
   }
@@ -1842,6 +1874,7 @@ PetscErrorCode DMPlexGetConeRecursive(DM dm, IS points, PetscInt *depth, IS *exp
     arr = newarr;
     n = newn;
   }
+  ierr = ISRestoreIndices(points, &arr0);CHKERRQ(ierr);
   *depth = depth_;
   if (expandedPoints) *expandedPoints = expandedPoints_;
   else {
@@ -2662,7 +2695,7 @@ PetscErrorCode DMCreateSubDM_Plex(DM dm, PetscInt numFields, const PetscInt fiel
 
     (*subdm)->sfMigration = dm->sfMigration;
     ierr = PetscObjectReference((PetscObject) dm->sfMigration);CHKERRQ(ierr);
-    ierr = DMGetLocalSection((*subdm), §ion);CHKERRQ(ierr);CHKERRQ(ierr);
+    ierr = DMGetLocalSection((*subdm), §ion);CHKERRQ(ierr);
     ierr = PetscSFCreateInverseSF((*subdm)->sfMigration, &sfMigrationInv);CHKERRQ(ierr);
     ierr = PetscSectionCreate(PetscObjectComm((PetscObject) (*subdm)), §ionSeq);CHKERRQ(ierr);
     ierr = PetscSFDistributeSection(sfMigrationInv, section, NULL, sectionSeq);CHKERRQ(ierr);
@@ -2830,18 +2863,18 @@ static PetscErrorCode DMPlexCreateDimStratum(DM,DMLabel,DMLabel,PetscInt,PetscIn
   if run on a partially interpolated mesh, meaning one that had some edges and faces, but not others. For example, suppose that
   we had a mesh consisting of one triangle (c0) and three vertices (v0, v1, v2), and only one edge is on the boundary so we choose
   to interpolate only that one (e0), so that
-  $  cone(c0) = {e0, v2}
-  $  cone(e0) = {v0, v1}
+$  cone(c0) = {e0, v2}
+$  cone(e0) = {v0, v1}
   If DMPlexStratify() is run on this mesh, it will give depths
-  $  depth 0 = {v0, v1, v2}
-  $  depth 1 = {e0, c0}
+$  depth 0 = {v0, v1, v2}
+$  depth 1 = {e0, c0}
   where the triangle has been given depth 1, instead of 2, because it is reachable from vertex v2.
 
   DMPlexStratify() should be called after all calls to DMPlexSymmetrize()
 
   Level: beginner
 
-.seealso: DMPlexCreate(), DMPlexSymmetrize()
+.seealso: DMPlexCreate(), DMPlexSymmetrize(), DMPlexComputeCellTypes()
 @*/
 PetscErrorCode DMPlexStratify(DM dm)
 {
@@ -2952,6 +2985,112 @@ PetscErrorCode DMPlexStratify(DM dm)
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMPlexComputeCellTypes - Infer the polytope type of every cell using its dimension and cone size.
+
+  Collective on dm
+
+  Input Parameter:
+. mesh - The DMPlex
+
+  DMPlexComputeCellTypes() should be called after all calls to DMPlexSymmetrize() and DMPlexStratify()
+
+  Level: beginner
+
+.seealso: DMPlexCreate(), DMPlexSymmetrize(), DMPlexStratify()
+@*/
+PetscErrorCode DMPlexComputeCellTypes(DM dm)
+{
+  DM_Plex       *mesh;
+  DMLabel        label;
+  PetscInt       dim, depth, gcStart, gcEnd, pStart, pEnd, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  mesh = (DM_Plex *) dm->data;
+  ierr = DMCreateLabel(dm, "celltype");CHKERRQ(ierr);
+  ierr = DMPlexGetCellTypeLabel(dm, &label);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
+  ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
+  ierr = DMPlexGetGhostCellStratum(dm, &gcStart, &gcEnd);CHKERRQ(ierr);
+  for (p = pStart; p < pEnd; ++p) {
+    DMPolytopeType ct = DM_POLYTOPE_UNKNOWN;
+    PetscInt       pdepth, pheight, coneSize;
+
+    ierr = DMPlexGetPointDepth(dm, p, &pdepth);CHKERRQ(ierr);
+    ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
+    pheight = depth - pdepth;
+    if (depth <= 1) {
+      switch (pdepth) {
+        case 0: ct = DM_POLYTOPE_POINT;break;
+        case 1:
+          switch (coneSize) {
+            case 2: ct = DM_POLYTOPE_SEGMENT;break;
+            case 3: ct = DM_POLYTOPE_TRIANGLE;break;
+            case 4:
+            switch (dim) {
+              case 2: ct = DM_POLYTOPE_QUADRILATERAL;break;
+              case 3: ct = DM_POLYTOPE_TETRAHEDRON;break;
+              default: break;
+            }
+            break;
+          case 6: ct = DM_POLYTOPE_TRI_PRISM_TENSOR;break;
+          case 8: ct = DM_POLYTOPE_HEXAHEDRON;break;
+          default: break;
+        }
+      }
+    } else {
+      if (pdepth == 0) {
+        ct = DM_POLYTOPE_POINT;
+      } else if (pheight == 0) {
+        if ((p >= gcStart) && (p < gcEnd)) {
+          if (coneSize == 1) ct = DM_POLYTOPE_FV_GHOST;
+          else SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Ghost cell %D should have a cone size of 1, not %D", p, coneSize);
+        } else {
+          switch (dim) {
+            case 1:
+              switch (coneSize) {
+                case 2: ct = DM_POLYTOPE_SEGMENT;break;
+                default: break;
+              }
+              break;
+            case 2:
+              switch (coneSize) {
+                case 3: ct = DM_POLYTOPE_TRIANGLE;break;
+                case 4: ct = DM_POLYTOPE_QUADRILATERAL;break;
+                default: break;
+              }
+              break;
+            case 3:
+              switch (coneSize) {
+                case 4: ct = DM_POLYTOPE_TETRAHEDRON;break;
+                case 5: ct = DM_POLYTOPE_TRI_PRISM_TENSOR;break;
+                case 6: ct = DM_POLYTOPE_HEXAHEDRON;break;
+                default: break;
+              }
+              break;
+            default: break;
+          }
+        }
+      } else if (pheight > 0) {
+        switch (coneSize) {
+          case 2: ct = DM_POLYTOPE_SEGMENT;break;
+          case 3: ct = DM_POLYTOPE_TRIANGLE;break;
+          case 4: ct = DM_POLYTOPE_QUADRILATERAL;break;
+          default: break;
+        }
+      }
+    }
+    if (ct == DM_POLYTOPE_UNKNOWN) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Point %D is screwed up", p);
+    ierr = DMLabelSetValue(label, p, ct);CHKERRQ(ierr);
+  }
+  ierr = PetscObjectStateGet((PetscObject) label, &mesh->celltypeState);CHKERRQ(ierr);
+  ierr = PetscObjectViewFromOptions((PetscObject) label, NULL, "-dm_plex_celltypes_view");CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   DMPlexGetJoin - Get an array for the join of the set of points
 
@@ -3578,7 +3717,7 @@ PetscErrorCode DMPlexGetDepthLabel(DM dm, DMLabel *depthLabel)
   This returns maximum of point depths over all points, i.e. maximum value of the label returned by DMPlexGetDepthLabel().
   The point depth is described more in detail in DMPlexSymmetrize().
 
-.seealso: DMPlexGetDepthLabel(), DMPlexGetHeightStratum(), DMPlexGetDepthStratum(), DMPlexSymmetrize()
+.seealso: DMPlexGetDepthLabel(), DMPlexGetHeightStratum(), DMPlexGetDepthStratum(), DMPlexGetPointDepth(), DMPlexGetPointHeight(), DMPlexSymmetrize()
 @*/
 PetscErrorCode DMPlexGetDepth(DM dm, PetscInt *depth)
 {
@@ -3615,7 +3754,7 @@ PetscErrorCode DMPlexGetDepth(DM dm, PetscInt *depth)
 
   Level: developer
 
-.seealso: DMPlexGetHeightStratum(), DMPlexGetDepth()
+.seealso: DMPlexGetHeightStratum(), DMPlexGetDepth(), DMPlexGetPointDepth()
 @*/
 PetscErrorCode DMPlexGetDepthStratum(DM dm, PetscInt stratumValue, PetscInt *start, PetscInt *end)
 {
@@ -3660,7 +3799,7 @@ PetscErrorCode DMPlexGetDepthStratum(DM dm, PetscInt stratumValue, PetscInt *sta
 
   Level: developer
 
-.seealso: DMPlexGetDepthStratum(), DMPlexGetDepth()
+.seealso: DMPlexGetDepthStratum(), DMPlexGetDepth(), DMPlexGetPointHeight()
 @*/
 PetscErrorCode DMPlexGetHeightStratum(DM dm, PetscInt stratumValue, PetscInt *start, PetscInt *end)
 {
@@ -3686,6 +3825,121 @@ PetscErrorCode DMPlexGetHeightStratum(DM dm, PetscInt stratumValue, PetscInt *st
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMPlexGetPointDepth - Get the depth of a given point
+
+  Not Collective
+
+  Input Parameter:
++ dm    - The DMPlex object
+- point - The point
+
+  Output Parameter:
+. depth - The depth of the point
+
+  Level: intermediate
+
+.seealso: DMPlexGetCellType(), DMPlexGetDepthLabel(), DMPlexGetDepth(), DMPlexGetPointHeight()
+@*/
+PetscErrorCode DMPlexGetPointDepth(DM dm, PetscInt point, PetscInt *depth)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidIntPointer(depth, 3);
+  ierr = DMLabelGetValue(dm->depthLabel, point, depth);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/*@
+  DMPlexGetPointHeight - Get the height of a given point
+
+  Not Collective
+
+  Input Parameter:
++ dm    - The DMPlex object
+- point - The point
+
+  Output Parameter:
+. height - The height of the point
+
+  Level: intermediate
+
+.seealso: DMPlexGetCellType(), DMPlexGetDepthLabel(), DMPlexGetDepth(), DMPlexGetPointDepth()
+@*/
+PetscErrorCode DMPlexGetPointHeight(DM dm, PetscInt point, PetscInt *height)
+{
+  PetscInt       n, pDepth;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidIntPointer(height, 3);
+  ierr = DMLabelGetNumValues(dm->depthLabel, &n);CHKERRQ(ierr);
+  ierr = DMLabelGetValue(dm->depthLabel, point, &pDepth);CHKERRQ(ierr);
+  *height = n - 1 - pDepth;  /* DAG depth is n-1 */
+  PetscFunctionReturn(0);
+}
+
+/*@
+  DMPlexGetCellTypeLabel - Get the DMLabel recording the polytope type of each cell
+
+  Not Collective
+
+  Input Parameter:
+. dm - The DMPlex object
+
+  Output Parameter:
+. celltypeLabel - The DMLabel recording cell polytope type
+
+  Level: developer
+
+.seealso: DMPlexGetCellType(), DMPlexGetDepthLabel(), DMPlexGetDepth()
+@*/
+PetscErrorCode DMPlexGetCellTypeLabel(DM dm, DMLabel *celltypeLabel)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidPointer(celltypeLabel, 2);
+  if (!dm->celltypeLabel) {ierr = DMPlexComputeCellTypes(dm);CHKERRQ(ierr);}
+  *celltypeLabel = dm->celltypeLabel;
+  PetscFunctionReturn(0);
+}
+
+/*@
+  DMPlexGetCellType - Get the polytope type of a given cell
+
+  Not Collective
+
+  Input Parameter:
++ dm   - The DMPlex object
+- cell - The cell
+
+  Output Parameter:
+. celltype - The polytope type of the cell
+
+  Level: intermediate
+
+.seealso: DMPlexGetCellTypeLabel(), DMPlexGetDepthLabel(), DMPlexGetDepth()
+@*/
+PetscErrorCode DMPlexGetCellType(DM dm, PetscInt cell, DMPolytopeType *celltype)
+{
+  DMLabel        label;
+  PetscInt       ct;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidPointer(celltype, 3);
+  ierr = DMPlexGetCellTypeLabel(dm, &label);CHKERRQ(ierr);
+  ierr = DMLabelGetValue(label, cell, &ct);CHKERRQ(ierr);
+  *celltype = (DMPolytopeType) ct;
+  PetscFunctionReturn(0);
+}
+
 PetscErrorCode DMCreateCoordinateDM_Plex(DM dm, DM *cdm)
 {
   PetscSection   section, s;
@@ -3907,7 +4161,7 @@ static PetscErrorCode PetscSectionFieldGetTensorDegree_Private(PetscSection sect
 
   Level: developer
 
-.seealso: DMGetLocalSection(), PetscSectionSetClosurePermutation(), DMSetGlocalSection()
+.seealso: DMGetLocalSection(), PetscSectionSetClosurePermutation(), DMSetGlobalSection()
 @*/
 PetscErrorCode DMPlexSetClosurePermutationTensor(DM dm, PetscInt point, PetscSection section)
 {
@@ -4245,6 +4499,7 @@ PETSC_STATIC_INLINE PetscErrorCode DMPlexVecGetClosure_Depth1_Static(DM dm, Pets
   PetscFunctionReturn(0);
 }
 
+/* Compressed closure does not apply closure permutation */
 PetscErrorCode DMPlexGetCompressedClosure(DM dm, PetscSection section, PetscInt point, PetscInt *numPoints, PetscInt **points, PetscSection *clSec, IS *clPoints, const PetscInt **clp)
 {
   const PetscInt *cla;
@@ -4700,11 +4955,12 @@ PETSC_STATIC_INLINE PetscErrorCode updatePointFieldsBC_private(PetscSection sect
   PetscScalar    *a;
   PetscInt        fdof, foff, fcdof, foffset = *offset;
   const PetscInt *fcdofs; /* The indices of the constrained dofs for field f on this point */
-  PetscInt        cind = 0, ncind = 0, b;
+  PetscInt        Nc, cind = 0, ncind = 0, b;
   PetscBool       ncSet, fcSet;
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
+  ierr = PetscSectionGetFieldComponents(section, f, &Nc);CHKERRQ(ierr);
   ierr = PetscSectionGetFieldDof(section, point, f, &fdof);CHKERRQ(ierr);
   ierr = PetscSectionGetFieldConstraintDof(section, point, f, &fcdof);CHKERRQ(ierr);
   ierr = PetscSectionGetFieldOffset(section, point, f, &foff);CHKERRQ(ierr);
@@ -4717,7 +4973,7 @@ PETSC_STATIC_INLINE PetscErrorCode updatePointFieldsBC_private(PetscSection sect
         if (comps) {
           for (b = 0; b < fdof; b++) {
             ncSet = fcSet = PETSC_FALSE;
-            if ((ncind < Ncc)  && (b == comps[ncind])) {++ncind; ncSet = PETSC_TRUE;}
+            if (b%Nc == comps[ncind]) {ncind = (ncind+1)%Ncc; ncSet = PETSC_TRUE;}
             if ((cind < fcdof) && (b == fcdofs[cind])) {++cind;  fcSet = PETSC_TRUE;}
             if (ncSet && fcSet) {fuse(&a[b], values[clperm[foffset+perm[b]]] * (flip ? flip[perm[b]] : 1.));}
           }
@@ -4733,7 +4989,7 @@ PETSC_STATIC_INLINE PetscErrorCode updatePointFieldsBC_private(PetscSection sect
         if (comps) {
           for (b = 0; b < fdof; b++) {
             ncSet = fcSet = PETSC_FALSE;
-            if ((ncind < Ncc)  && (b == comps[ncind])) {++ncind; ncSet = PETSC_TRUE;}
+            if (b%Nc == comps[ncind]) {ncind = (ncind+1)%Ncc; ncSet = PETSC_TRUE;}
             if ((cind < fcdof) && (b == fcdofs[cind])) {++cind;  fcSet = PETSC_TRUE;}
             if (ncSet && fcSet) {fuse(&a[b], values[clperm[foffset+     b ]] * (flip ? flip[     b ] : 1.));}
           }
@@ -4751,7 +5007,7 @@ PETSC_STATIC_INLINE PetscErrorCode updatePointFieldsBC_private(PetscSection sect
         if (comps) {
           for (b = 0; b < fdof; b++) {
             ncSet = fcSet = PETSC_FALSE;
-            if ((ncind < Ncc)  && (b == comps[ncind])) {++ncind; ncSet = PETSC_TRUE;}
+            if (b%Nc == comps[ncind]) {ncind = (ncind+1)%Ncc; ncSet = PETSC_TRUE;}
             if ((cind < fcdof) && (b == fcdofs[cind])) {++cind;  fcSet = PETSC_TRUE;}
             if (ncSet && fcSet) {fuse(&a[b], values[foffset+perm[b]] * (flip ? flip[perm[b]] : 1.));}
           }
@@ -4767,7 +5023,7 @@ PETSC_STATIC_INLINE PetscErrorCode updatePointFieldsBC_private(PetscSection sect
         if (comps) {
           for (b = 0; b < fdof; b++) {
             ncSet = fcSet = PETSC_FALSE;
-            if ((ncind < Ncc)  && (b == comps[ncind])) {++ncind; ncSet = PETSC_TRUE;}
+            if (b%Nc == comps[ncind]) {ncind = (ncind+1)%Ncc; ncSet = PETSC_TRUE;}
             if ((cind < fcdof) && (b == fcdofs[cind])) {++cind;  fcSet = PETSC_TRUE;}
             if (ncSet && fcSet) {fuse(&a[b], values[foffset+     b ] * (flip ? flip[     b ] : 1.));}
           }
@@ -5014,13 +5270,14 @@ PetscErrorCode DMPlexVecSetClosure(DM dm, PetscSection section, Vec v, PetscInt
   PetscFunctionReturn(0);
 }
 
+/* Unlike DMPlexVecSetClosure(), this uses plex-native closure permutation, not a user-specified permutation such as DMPlexSetClosurePermutationTensor(). */
 PetscErrorCode DMPlexVecSetFieldClosure_Internal(DM dm, PetscSection section, Vec v, PetscBool fieldActive[], PetscInt point, PetscInt Ncc, const PetscInt comps[], const PetscScalar values[], InsertMode mode)
 {
   PetscSection      clSection;
   IS                clPoints;
   PetscScalar       *array;
   PetscInt          *points = NULL;
-  const PetscInt    *clp, *clperm;
+  const PetscInt    *clp;
   PetscInt          numFields, numPoints, p;
   PetscInt          offset = 0, f;
   PetscErrorCode    ierr;
@@ -5032,7 +5289,6 @@ PetscErrorCode DMPlexVecSetFieldClosure_Internal(DM dm, PetscSection section, Ve
   PetscValidHeaderSpecific(v, VEC_CLASSID, 3);
   ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);
   /* Get points */
-  ierr = PetscSectionGetClosureInversePermutation_Internal(section, (PetscObject) dm, NULL, &clperm);CHKERRQ(ierr);
   ierr = DMPlexGetCompressedClosure(dm,section,point,&numPoints,&points,&clSection,&clPoints,&clp);CHKERRQ(ierr);
   /* Get array */
   ierr = VecGetArray(v, &array);CHKERRQ(ierr);
@@ -5056,35 +5312,35 @@ PetscErrorCode DMPlexVecSetFieldClosure_Internal(DM dm, PetscSection section, Ve
         const PetscInt    point = points[2*p];
         const PetscInt    *perm = perms ? perms[p] : NULL;
         const PetscScalar *flip = flips ? flips[p] : NULL;
-        updatePointFields_private(section, point, perm, flip, f, insert, PETSC_FALSE, clperm, values, &offset, array);
+        updatePointFields_private(section, point, perm, flip, f, insert, PETSC_FALSE, NULL, values, &offset, array);
       } break;
     case INSERT_ALL_VALUES:
       for (p = 0; p < numPoints; p++) {
         const PetscInt    point = points[2*p];
         const PetscInt    *perm = perms ? perms[p] : NULL;
         const PetscScalar *flip = flips ? flips[p] : NULL;
-        updatePointFields_private(section, point, perm, flip, f, insert, PETSC_TRUE, clperm, values, &offset, array);
+        updatePointFields_private(section, point, perm, flip, f, insert, PETSC_TRUE, NULL, values, &offset, array);
         } break;
     case INSERT_BC_VALUES:
       for (p = 0; p < numPoints; p++) {
         const PetscInt    point = points[2*p];
         const PetscInt    *perm = perms ? perms[p] : NULL;
         const PetscScalar *flip = flips ? flips[p] : NULL;
-        updatePointFieldsBC_private(section, point, perm, flip, f, Ncc, comps, insert, clperm, values, &offset, array);
+        updatePointFieldsBC_private(section, point, perm, flip, f, Ncc, comps, insert, NULL, values, &offset, array);
       } break;
     case ADD_VALUES:
       for (p = 0; p < numPoints; p++) {
         const PetscInt    point = points[2*p];
         const PetscInt    *perm = perms ? perms[p] : NULL;
         const PetscScalar *flip = flips ? flips[p] : NULL;
-        updatePointFields_private(section, point, perm, flip, f, add, PETSC_FALSE, clperm, values, &offset, array);
+        updatePointFields_private(section, point, perm, flip, f, add, PETSC_FALSE, NULL, values, &offset, array);
       } break;
     case ADD_ALL_VALUES:
       for (p = 0; p < numPoints; p++) {
         const PetscInt    point = points[2*p];
         const PetscInt    *perm = perms ? perms[p] : NULL;
         const PetscScalar *flip = flips ? flips[p] : NULL;
-        updatePointFields_private(section, point, perm, flip, f, add, PETSC_TRUE, clperm, values, &offset, array);
+        updatePointFields_private(section, point, perm, flip, f, add, PETSC_TRUE, NULL, values, &offset, array);
       } break;
     default:
       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insert mode %d", mode);
@@ -5129,6 +5385,7 @@ static PetscErrorCode DMPlexPrintMatSetValues(PetscViewer viewer, Mat A, PetscIn
 
   Input Parameters:
 + section - The section for this data layout
+. islocal - Is the section (and thus indices being requested) local or global?
 . point   - The point contributing dofs with these indices
 . off     - The global offset of this point
 . loff    - The local offset of each field
@@ -5143,7 +5400,7 @@ static PetscErrorCode DMPlexPrintMatSetValues(PetscViewer viewer, Mat A, PetscIn
 
   Note: The indices could be local or global, depending on the value of 'off'.
 */
-PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection section, PetscInt point, PetscInt off, PetscInt *loff, PetscBool setBC, const PetscInt perm[], const PetscInt indperm[], PetscInt indices[])
+PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection section, PetscBool islocal,PetscInt point, PetscInt off, PetscInt *loff, PetscBool setBC, const PetscInt perm[], const PetscInt indperm[], PetscInt indices[])
 {
   PetscInt        dof;   /* The number of unknowns on this point */
   PetscInt        cdof;  /* The number of constraints on this point */
@@ -5152,6 +5409,7 @@ PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection section, PetscInt poi
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
+  if (!islocal && setBC) SETERRQ(PetscObjectComm((PetscObject)section),PETSC_ERR_ARG_INCOMP,"setBC incompatible with global indices; use a local section or disable setBC");
   ierr = PetscSectionGetDof(section, point, &dof);CHKERRQ(ierr);
   ierr = PetscSectionGetConstraintDof(section, point, &cdof);CHKERRQ(ierr);
   if (!cdof || setBC) {
@@ -5172,7 +5430,7 @@ PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection section, PetscInt poi
         indices[ind] = -(off+k+1);
         ++cind;
       } else {
-        indices[ind] = off+k-cind;
+        indices[ind] = off + k - (islocal ? 0 : cind);
       }
     }
   }
@@ -5181,16 +5439,51 @@ PetscErrorCode DMPlexGetIndicesPoint_Internal(PetscSection section, PetscInt poi
 }
 
 /*
-  This version only believes the point offset from the globalSection
-
- . off - The global offset of this point
+ DMPlexGetIndicesPointFields_Internal - gets section indices for a point in its canonical ordering.
+
+ Input Parameters:
++ section - a section (global or local)
+- islocal - PETSC_TRUE if requesting local indices (i.e., section is local); PETSC_FALSE for global
+. point - point within section
+. off - The offset of this point in the (local or global) indexed space - should match islocal and (usually) the section
+. foffs - array of length numFields containing the offset in canonical point ordering (the location in indices) of each field
+. setBC - identify constrained (boundary condition) points via involution.
+. perms - perms[f][permsoff][:] is a permutation of dofs within each field
+. permsoff - offset
+- indperm - index permutation
+
+ Output Parameter:
+. foffs - each entry is incremented by the number of (unconstrained if setBC=FALSE) dofs in that field
+. indices - array to hold indices (as defined by section) of each dof associated with point
+
+ Notes:
+ If section is local and setBC=true, there is no distinction between constrained and unconstrained dofs.
+ If section is local and setBC=false, the indices for constrained points are the involution -(i+1) of their position
+ in the local vector.
+
+ If section is global and setBC=false, the indices for constrained points are negative (and their value is not
+ significant).  It is invalid to call with a global section and setBC=true.
+
+ Developer Note:
+ The section is only used for field layout, so islocal is technically a statement about the offset (off).  At some point
+ in the future, global sections may have fields set, in which case we could pass the global section and obtain the
+ offset could be obtained from the section instead of passing it explicitly as we do now.
+
+ Example:
+ Suppose a point contains one field with three components, and for which the unconstrained indices are {10, 11, 12}.
+ When the middle component is constrained, we get the array {10, -12, 12} for (islocal=TRUE, setBC=FALSE).
+ Note that -12 is the involution of 11, so the user can involute negative indices to recover local indices.
+ The global vector does not store constrained dofs, so when this function returns global indices, say {110, -112, 111}, the value of -112 is an arbitrary flag that should not be interpreted beyond its sign.
+
+ Level: developer
 */
-PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection section, PetscInt point, PetscInt off, PetscInt foffs[], PetscBool setBC, const PetscInt ***perms, PetscInt permsoff, const PetscInt indperm[], PetscInt indices[])
+PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection section, PetscBool islocal, PetscInt point, PetscInt off, PetscInt foffs[], PetscBool setBC, const PetscInt ***perms, PetscInt permsoff, const PetscInt indperm[], PetscInt indices[])
 {
   PetscInt       numFields, foff, f;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  if (!islocal && setBC) SETERRQ(PetscObjectComm((PetscObject)section),PETSC_ERR_ARG_INCOMP,"setBC incompatible with global indices; use a local section or disable setBC");
   ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);
   for (f = 0, foff = 0; f < numFields; ++f) {
     PetscInt        fdof, cfdof;
@@ -5217,11 +5510,11 @@ PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection section, PetscI
           indices[ind] = -(off+foff+b+1);
           ++cind;
         } else {
-          indices[ind] = off+foff+b-cind;
+          indices[ind] = off + foff + b - (islocal ? 0 : cind);
         }
       }
     }
-    foff     += (setBC ? fdof : (fdof - cfdof));
+    foff     += (setBC || islocal ? fdof : (fdof - cfdof));
     foffs[f] += fdof;
   }
   PetscFunctionReturn(0);
@@ -5231,8 +5524,12 @@ PetscErrorCode DMPlexGetIndicesPointFields_Internal(PetscSection section, PetscI
   This version believes the globalSection offsets for each field, rather than just the point offset
 
  . foffs - The offset into 'indices' for each field, since it is segregated by field
+
+ Notes:
+ The semantics of this function relate to that of setBC=FALSE in DMPlexGetIndicesPointFields_Internal.
+ Since this function uses global indices, setBC=TRUE would be invalid, so no such argument exists.
 */
-PetscErrorCode DMPlexGetIndicesPointFieldsSplit_Internal(PetscSection section, PetscSection globalSection, PetscInt point, PetscInt foffs[], PetscBool setBC, const PetscInt ***perms, PetscInt permsoff, const PetscInt indperm[], PetscInt indices[])
+static PetscErrorCode DMPlexGetIndicesPointFieldsSplit_Internal(PetscSection section, PetscSection globalSection, PetscInt point, PetscInt foffs[], const PetscInt ***perms, PetscInt permsoff, const PetscInt indperm[], PetscInt indices[])
 {
   PetscInt       numFields, foff, f;
   PetscErrorCode ierr;
@@ -5248,7 +5545,7 @@ PetscErrorCode DMPlexGetIndicesPointFieldsSplit_Internal(PetscSection section, P
     ierr = PetscSectionGetFieldDof(section, point, f, &fdof);CHKERRQ(ierr);
     ierr = PetscSectionGetFieldConstraintDof(section, point, f, &cfdof);CHKERRQ(ierr);
     ierr = PetscSectionGetFieldOffset(globalSection, point, f, &foff);CHKERRQ(ierr);
-    if (!cfdof || setBC) {
+    if (!cfdof) {
       for (b = 0; b < fdof; ++b) {
         const PetscInt preind = perm ? foffs[f]+perm[b] : foffs[f]+b;
         const PetscInt ind    = indperm ? indperm[preind] : preind;
@@ -5526,7 +5823,7 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
           fEnd[f+1]   = fStart[f+1];
         }
         ierr = PetscSectionGetOffset(cSec, b, &bOff);CHKERRQ(ierr);
-        ierr = DMPlexGetIndicesPointFields_Internal(cSec, b, bOff, fEnd, PETSC_TRUE, perms, p, NULL, indices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPointFields_Internal(cSec, PETSC_TRUE, b, bOff, fEnd, PETSC_TRUE, perms, p, NULL, indices);CHKERRQ(ierr);
 
         fAnchorStart[0] = 0;
         fAnchorEnd[0]   = 0;
@@ -5544,7 +5841,7 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
           newPoints[2*(newP + q)]     = a;
           newPoints[2*(newP + q) + 1] = 0;
           ierr = PetscSectionGetOffset(section, a, &aOff);CHKERRQ(ierr);
-          ierr = DMPlexGetIndicesPointFields_Internal(section, a, aOff, fAnchorEnd, PETSC_TRUE, NULL, -1, NULL, newIndices);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPointFields_Internal(section, PETSC_TRUE, a, aOff, fAnchorEnd, PETSC_TRUE, NULL, -1, NULL, newIndices);CHKERRQ(ierr);
         }
         newP += bDof;
 
@@ -5578,7 +5875,7 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
         PetscInt bEnd = 0, bAnchorEnd = 0, bOff;
 
         ierr = PetscSectionGetOffset(cSec, b, &bOff);CHKERRQ(ierr);
-        ierr = DMPlexGetIndicesPoint_Internal(cSec, b, bOff, &bEnd, PETSC_TRUE, (perms && perms[0]) ? perms[0][p] : NULL, NULL, indices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPoint_Internal(cSec, PETSC_TRUE, b, bOff, &bEnd, PETSC_TRUE, (perms && perms[0]) ? perms[0][p] : NULL, NULL, indices);CHKERRQ(ierr);
 
         ierr = PetscSectionGetOffset (aSec, b, &bOff);CHKERRQ(ierr);
         for (q = 0; q < bDof; q++) {
@@ -5589,7 +5886,7 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
           newPoints[2*(newP + q)]     = a;
           newPoints[2*(newP + q) + 1] = 0;
           ierr = PetscSectionGetOffset(section, a, &aOff);CHKERRQ(ierr);
-          ierr = DMPlexGetIndicesPoint_Internal(section, a, aOff, &bAnchorEnd, PETSC_TRUE, NULL, NULL, newIndices);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPoint_Internal(section, PETSC_TRUE, a, aOff, &bAnchorEnd, PETSC_TRUE, NULL, NULL, newIndices);CHKERRQ(ierr);
         }
         newP += bDof;
 
@@ -5805,8 +6102,8 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
 
   Input Parameters:
 + dm - The DM
-. section - The section describing the local layout
-. globalSection - The section describing the parallel layout
+. section - The section describing the points (a local section)
+. idxSection - The section on which to obtain indices (may be local or global)
 - point - The mesh point
 
   Output parameters:
@@ -5814,14 +6111,22 @@ PetscErrorCode DMPlexAnchorsModifyMat(DM dm, PetscSection section, PetscInt numP
 . indices - The indices
 - outOffsets - Field offset if not NULL
 
-  Note: Must call DMPlexRestoreClosureIndices() to free allocated memory
+  Notes:
+  Must call DMPlexRestoreClosureIndices() to free allocated memory
+
+  If idxSection is global, any constrained dofs (see DMAddBoundary(), for example) will get negative indices.  The value
+  of those indices is not significant.  If idxSection is local, the constrained dofs will yield the involution -(idx+1)
+  of their index in a local vector.  A caller who does not wish to distinguish those points may recover the nonnegative
+  indices via involution, -(-(idx+1)+1)==idx.  Local indices are provided when idxSection == section, otherwise global
+  indices (with the above semantics) are implied.
 
   Level: advanced
 
-.seealso DMPlexRestoreClosureIndices(), DMPlexVecGetClosure(), DMPlexMatSetClosure()
+.seealso DMPlexRestoreClosureIndices(), DMPlexVecGetClosure(), DMPlexMatSetClosure(), DMGetLocalSection(), DMGetGlobalSection()
 @*/
-PetscErrorCode DMPlexGetClosureIndices(DM dm, PetscSection section, PetscSection globalSection, PetscInt point, PetscInt *numIndices, PetscInt **indices, PetscInt *outOffsets)
+PetscErrorCode DMPlexGetClosureIndices(DM dm, PetscSection section, PetscSection idxSection, PetscInt point, PetscInt *numIndices, PetscInt **indices, PetscInt *outOffsets)
 {
+  PetscBool       isLocal = (PetscBool)(section == idxSection);
   PetscSection    clSection;
   IS              clPoints;
   const PetscInt *clp, *clperm;
@@ -5829,13 +6134,13 @@ PetscErrorCode DMPlexGetClosureIndices(DM dm, PetscSection section, PetscSection
   PetscInt       *points = NULL, *pointsNew;
   PetscInt        numPoints, numPointsNew;
   PetscInt        offsets[32];
-  PetscInt        Nf, Nind, NindNew, off, globalOff, f, p;
+  PetscInt        Nf, Nind, NindNew, off, idxOff, f, p;
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
   PetscValidHeaderSpecific(section, PETSC_SECTION_CLASSID, 2);
-  PetscValidHeaderSpecific(globalSection, PETSC_SECTION_CLASSID, 3);
+  PetscValidHeaderSpecific(idxSection, PETSC_SECTION_CLASSID, 3);
   if (numIndices) PetscValidPointer(numIndices, 4);
   PetscValidPointer(indices, 5);
   ierr = PetscSectionGetNumFields(section, &Nf);CHKERRQ(ierr);
@@ -5892,15 +6197,15 @@ PetscErrorCode DMPlexGetClosureIndices(DM dm, PetscSection section, PetscSection
       }
     }
     for (p = 0; p < numPoints; p++) {
-      ierr = PetscSectionGetOffset(globalSection, points[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPointFields_Internal(section, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, offsets, PETSC_FALSE, perms, p, clperm, *indices);CHKERRQ(ierr);
+      ierr = PetscSectionGetOffset(idxSection, points[2*p], &idxOff);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(section, isLocal, points[2*p], idxOff < 0 ? -(idxOff+1) : idxOff, offsets, PETSC_FALSE, perms, p, clperm, *indices);CHKERRQ(ierr);
     }
   } else {
     for (p = 0, off = 0; p < numPoints; p++) {
       const PetscInt *perm = perms[0] ? perms[0][p] : NULL;
 
-      ierr = PetscSectionGetOffset(globalSection, points[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPoint_Internal(section, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, clperm, *indices);CHKERRQ(ierr);
+      ierr = PetscSectionGetOffset(idxSection, points[2*p], &idxOff);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(section, isLocal, points[2*p], idxOff < 0 ? -(idxOff+1) : idxOff, &off, PETSC_FALSE, perm, clperm, *indices);CHKERRQ(ierr);
     }
   }
   /* Cleanup points */
@@ -6071,23 +6376,29 @@ PetscErrorCode DMPlexMatSetClosure(DM dm, PetscSection section, PetscSection glo
     ierr = PetscSectionGetUseFieldOffsets(globalSection, &useFieldOffsets);CHKERRQ(ierr);
     if (useFieldOffsets) {
       for (p = 0; p < numPoints; p++) {
-        DMPlexGetIndicesPointFieldsSplit_Internal(section, globalSection, points[2*p], offsets, PETSC_FALSE, perms, p, clperm, indices);
+        ierr = DMPlexGetIndicesPointFieldsSplit_Internal(section, globalSection, points[2*p], offsets, perms, p, clperm, indices);CHKERRQ(ierr);
       }
     } else {
       for (p = 0; p < numPoints; p++) {
         ierr = PetscSectionGetOffset(globalSection, points[2*p], &globalOff);CHKERRQ(ierr);
-        DMPlexGetIndicesPointFields_Internal(section, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, offsets, PETSC_FALSE, perms, p, clperm, indices);
+        /* Note that we pass a local section even though we're using global offsets.  This is because global sections do
+         * not (at the time of this writing) have fields set. They probably should, in which case we would pass the
+         * global section. */
+        ierr = DMPlexGetIndicesPointFields_Internal(section, PETSC_FALSE, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, offsets, PETSC_FALSE, perms, p, clperm, indices);CHKERRQ(ierr);
       }
     }
   } else {
     for (p = 0, off = 0; p < numPoints; p++) {
       const PetscInt *perm = perms[0] ? perms[0][p] : NULL;
       ierr = PetscSectionGetOffset(globalSection, points[2*p], &globalOff);CHKERRQ(ierr);
-      DMPlexGetIndicesPoint_Internal(section, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, clperm, indices);
+      /* Note that we pass a local section even though we're using global offsets.  This is because global sections do
+       * not (at the time of this writing) have fields set. They probably should, in which case we would pass the
+       * global section. */
+      ierr = DMPlexGetIndicesPoint_Internal(section, PETSC_FALSE, points[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, clperm, indices);CHKERRQ(ierr);
     }
   }
   if (mesh->printSetValues) {ierr = DMPlexPrintMatSetValues(PETSC_VIEWER_STDOUT_SELF, A, point, numIndices, indices, 0, NULL, values);CHKERRQ(ierr);}
-  ierr = MatSetValues(A, numIndices, indices, numIndices, indices, values, mode);
+  ierr = MatSetValues(A, numIndices, indices, numIndices, indices, values, mode);CHKERRQ(ierr);
   if (mesh->printFEM > 1) {
     PetscInt i;
     ierr = PetscPrintf(PETSC_COMM_SELF, "  Indices:");CHKERRQ(ierr);
@@ -6128,7 +6439,7 @@ PetscErrorCode DMPlexMatSetClosureRefined(DM dmf, PetscSection fsection, PetscSe
   PetscInt       *fpoints = NULL, *ftotpoints = NULL;
   PetscInt       *cpoints = NULL;
   PetscInt       *findices, *cindices;
-  const PetscInt *fclperm, *cclperm;
+  const PetscInt *fclperm = NULL, *cclperm = NULL; /* Closure permutations cannot work here */
   PetscInt        foffsets[32], coffsets[32];
   CellRefiner     cellRefiner;
   PetscInt        numFields, numSubcells, maxFPoints, numFPoints, numCPoints, numFIndices, numCIndices, dof, off, globalOff, pStart, pEnd, p, q, r, s, f;
@@ -6150,8 +6461,6 @@ PetscErrorCode DMPlexMatSetClosureRefined(DM dmf, PetscSection fsection, PetscSe
   if (numFields > 31) SETERRQ1(PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_OUTOFRANGE, "Number of fields %D limited to 31", numFields);
   ierr = PetscArrayzero(foffsets, 32);CHKERRQ(ierr);
   ierr = PetscArrayzero(coffsets, 32);CHKERRQ(ierr);
-  ierr = PetscSectionGetClosureInversePermutation_Internal(fsection, (PetscObject) dmf, NULL, &fclperm);CHKERRQ(ierr);
-  ierr = PetscSectionGetClosureInversePermutation_Internal(csection, (PetscObject) dmc, NULL, &cclperm);CHKERRQ(ierr);
   /* Column indices */
   ierr = DMPlexGetTransitiveClosure(dmc, point, PETSC_TRUE, &numCPoints, &cpoints);CHKERRQ(ierr);
   maxFPoints = numCPoints;
@@ -6228,11 +6537,11 @@ PetscErrorCode DMPlexMatSetClosureRefined(DM dmf, PetscSection fsection, PetscSe
     }
     for (p = 0; p < numFPoints; p++) {
       ierr = PetscSectionGetOffset(globalFSection, ftotpoints[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPointFields_Internal(fsection, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, foffsets, PETSC_FALSE, permsF, p, fclperm, findices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(fsection, PETSC_FALSE, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, foffsets, PETSC_FALSE, permsF, p, fclperm, findices);CHKERRQ(ierr);
     }
     for (p = 0; p < numCPoints; p++) {
       ierr = PetscSectionGetOffset(globalCSection, cpoints[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPointFields_Internal(csection, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, coffsets, PETSC_FALSE, permsC, p, cclperm, cindices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(csection, PETSC_FALSE, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, coffsets, PETSC_FALSE, permsC, p, cclperm, cindices);CHKERRQ(ierr);
     }
     for (f = 0; f < numFields; f++) {
       ierr = PetscSectionRestoreFieldPointSyms(fsection,f,numFPoints,ftotpoints,&permsF[f],NULL);CHKERRQ(ierr);
@@ -6248,13 +6557,13 @@ PetscErrorCode DMPlexMatSetClosureRefined(DM dmf, PetscSection fsection, PetscSe
       const PetscInt *perm = permsF ? permsF[p] : NULL;
 
       ierr = PetscSectionGetOffset(globalFSection, ftotpoints[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPoint_Internal(fsection, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, fclperm, findices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(fsection, PETSC_FALSE, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, fclperm, findices);CHKERRQ(ierr);
     }
     for (p = 0, off = 0; p < numCPoints; p++) {
       const PetscInt *perm = permsC ? permsC[p] : NULL;
 
       ierr = PetscSectionGetOffset(globalCSection, cpoints[2*p], &globalOff);CHKERRQ(ierr);
-      ierr = DMPlexGetIndicesPoint_Internal(csection, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, cclperm, cindices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(csection, PETSC_FALSE, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, cclperm, cindices);CHKERRQ(ierr);
     }
     ierr = PetscSectionRestorePointSyms(fsection,numFPoints,ftotpoints,&permsF,NULL);CHKERRQ(ierr);
     ierr = PetscSectionRestorePointSyms(csection,numCPoints,cpoints,&permsC,NULL);CHKERRQ(ierr);
@@ -6285,7 +6594,7 @@ PetscErrorCode DMPlexMatGetClosureIndicesRefined(DM dmf, PetscSection fsection,
   PetscInt      *fpoints = NULL, *ftotpoints = NULL;
   PetscInt      *cpoints = NULL;
   PetscInt       foffsets[32], coffsets[32];
-  const PetscInt *fclperm, *cclperm;
+  const PetscInt *fclperm = NULL, *cclperm = NULL; /* Closure permutations cannot work here */
   CellRefiner    cellRefiner;
   PetscInt       numFields, numSubcells, maxFPoints, numFPoints, numCPoints, numFIndices, numCIndices, dof, off, globalOff, pStart, pEnd, p, q, r, s, f;
   PetscErrorCode ierr;
@@ -6305,8 +6614,6 @@ PetscErrorCode DMPlexMatGetClosureIndicesRefined(DM dmf, PetscSection fsection,
   if (numFields > 31) SETERRQ1(PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_OUTOFRANGE, "Number of fields %D limited to 31", numFields);
   ierr = PetscArrayzero(foffsets, 32);CHKERRQ(ierr);
   ierr = PetscArrayzero(coffsets, 32);CHKERRQ(ierr);
-  ierr = PetscSectionGetClosureInversePermutation_Internal(fsection, (PetscObject) dmf, NULL, &fclperm);CHKERRQ(ierr);
-  ierr = PetscSectionGetClosureInversePermutation_Internal(csection, (PetscObject) dmc, NULL, &cclperm);CHKERRQ(ierr);
   /* Column indices */
   ierr = DMPlexGetTransitiveClosure(dmc, point, PETSC_TRUE, &numCPoints, &cpoints);CHKERRQ(ierr);
   maxFPoints = numCPoints;
@@ -6381,11 +6688,11 @@ PetscErrorCode DMPlexMatGetClosureIndicesRefined(DM dmf, PetscSection fsection,
     }
     for (p = 0; p < numFPoints; p++) {
       ierr = PetscSectionGetOffset(globalFSection, ftotpoints[2*p], &globalOff);CHKERRQ(ierr);
-      DMPlexGetIndicesPointFields_Internal(fsection, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, foffsets, PETSC_FALSE, permsF, p, fclperm, findices);
+      ierr = DMPlexGetIndicesPointFields_Internal(fsection, PETSC_FALSE, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, foffsets, PETSC_FALSE, permsF, p, fclperm, findices);CHKERRQ(ierr);
     }
     for (p = 0; p < numCPoints; p++) {
       ierr = PetscSectionGetOffset(globalCSection, cpoints[2*p], &globalOff);CHKERRQ(ierr);
-      DMPlexGetIndicesPointFields_Internal(csection, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, coffsets, PETSC_FALSE, permsC, p, cclperm, cindices);
+      ierr = DMPlexGetIndicesPointFields_Internal(csection, PETSC_FALSE, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, coffsets, PETSC_FALSE, permsC, p, cclperm, cindices);CHKERRQ(ierr);
     }
     for (f = 0; f < numFields; f++) {
       ierr = PetscSectionRestoreFieldPointSyms(fsection,f,numFPoints,ftotpoints,&permsF[f],NULL);CHKERRQ(ierr);
@@ -6401,13 +6708,13 @@ PetscErrorCode DMPlexMatGetClosureIndicesRefined(DM dmf, PetscSection fsection,
       const PetscInt *perm = permsF ? permsF[p] : NULL;
 
       ierr = PetscSectionGetOffset(globalFSection, ftotpoints[2*p], &globalOff);CHKERRQ(ierr);
-      DMPlexGetIndicesPoint_Internal(fsection, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, fclperm, findices);
+      ierr = DMPlexGetIndicesPoint_Internal(fsection, PETSC_FALSE, ftotpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, fclperm, findices);CHKERRQ(ierr);
     }
     for (p = 0, off = 0; p < numCPoints; p++) {
       const PetscInt *perm = permsC ? permsC[p] : NULL;
 
       ierr = PetscSectionGetOffset(globalCSection, cpoints[2*p], &globalOff);CHKERRQ(ierr);
-      DMPlexGetIndicesPoint_Internal(csection, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, cclperm, cindices);
+      ierr = DMPlexGetIndicesPoint_Internal(csection, PETSC_FALSE, cpoints[2*p], globalOff < 0 ? -(globalOff+1) : globalOff, &off, PETSC_FALSE, perm, cclperm, cindices);CHKERRQ(ierr);
     }
     ierr = PetscSectionRestorePointSyms(fsection,numFPoints,ftotpoints,&permsF,NULL);CHKERRQ(ierr);
     ierr = PetscSectionRestorePointSyms(csection,numCPoints,cpoints,&permsC,NULL);CHKERRQ(ierr);
@@ -6460,9 +6767,7 @@ static PetscErrorCode DMPlexCreateDimStratum(DM dm, DMLabel depthLabel, DMLabel
   PetscFunctionBegin;
   ierr = DMLabelGetStratumIS(depthLabel, d, &is);CHKERRQ(ierr);
   ierr = PetscObjectTypeCompare((PetscObject) is, ISSTRIDE, &isStride);CHKERRQ(ierr);
-  if (isStride) {
-    ierr = ISStrideGetInfo(is, &first, &stride);CHKERRQ(ierr);
-  }
+  if (isStride) {ierr = ISStrideGetInfo(is, &first, &stride);CHKERRQ(ierr);}
   if (is && (!isStride || stride != 1)) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM is not stratified: depth %D IS is not contiguous", d);
   ierr = ISCreateStride(PETSC_COMM_SELF, (dMax - first), first, 1, &his);CHKERRQ(ierr);
   ierr = DMLabelSetStratumIS(dimLabel, d, his);CHKERRQ(ierr);
@@ -6642,7 +6947,7 @@ PetscErrorCode DMPlexGetInteriorCellStratum(DM dm, PetscInt *cStartInterior, Pet
 }
 
 /* We can easily have a form that takes an IS instead */
-PetscErrorCode DMPlexCreateNumbering_Internal(DM dm, PetscInt pStart, PetscInt pEnd, PetscInt shift, PetscInt *globalSize, PetscSF sf, IS *numbering)
+PetscErrorCode DMPlexCreateNumbering_Plex(DM dm, PetscInt pStart, PetscInt pEnd, PetscInt shift, PetscInt *globalSize, PetscSF sf, IS *numbering)
 {
   PetscSection   section, globalSection;
   PetscInt      *numbers, p;
@@ -6684,7 +6989,7 @@ PetscErrorCode DMPlexCreateCellNumbering_Internal(DM dm, PetscBool includeHybrid
   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = DMPlexGetHybridBounds(dm, &cMax, NULL, NULL, NULL);CHKERRQ(ierr);
   if (cMax >= 0 && !includeHybrid) cEnd = PetscMin(cEnd, cMax);
-  ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, NULL, dm->sf, globalCellNumbers);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, NULL, dm->sf, globalCellNumbers);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -6723,7 +7028,7 @@ PetscErrorCode DMPlexCreateVertexNumbering_Internal(DM dm, PetscBool includeHybr
   ierr = DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
   ierr = DMPlexGetHybridBounds(dm, NULL, NULL, NULL, &vMax);CHKERRQ(ierr);
   if (vMax >= 0 && !includeHybrid) vEnd = PetscMin(vEnd, vMax);
-  ierr = DMPlexCreateNumbering_Internal(dm, vStart, vEnd, 0, NULL, dm->sf, globalVertexNumbers);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, vStart, vEnd, 0, NULL, dm->sf, globalVertexNumbers);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -6793,7 +7098,7 @@ PetscErrorCode DMPlexCreatePointNumbering(DM dm, IS *globalPointNumbers)
     PetscInt pStart, pEnd, gsize;
 
     ierr = DMPlexGetDepthStratum(dm, gdepths[d], &pStart, &pEnd);CHKERRQ(ierr);
-    ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, shift, &gsize, dm->sf, &nums[d]);CHKERRQ(ierr);
+    ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, shift, &gsize, dm->sf, &nums[d]);CHKERRQ(ierr);
     shift += gsize;
   }
   ierr = ISConcatenate(PetscObjectComm((PetscObject) dm), depth+1, nums, globalPointNumbers);CHKERRQ(ierr);
@@ -6846,7 +7151,7 @@ PetscErrorCode DMPlexCreateRankField(DM dm, Vec *ranks)
     PetscScalar *lr;
 
     ierr = DMPlexPointGlobalRef(rdm, c, r, &lr);CHKERRQ(ierr);
-    *lr = rank;
+    if (lr) *lr = rank;
   }
   ierr = VecRestoreArray(*ranks, &r);CHKERRQ(ierr);
   ierr = DMDestroy(&rdm);CHKERRQ(ierr);
@@ -6912,11 +7217,14 @@ PetscErrorCode DMPlexCreateLabelField(DM dm, DMLabel label, Vec *val)
   Input Parameter:
 . dm - The DMPlex object
 
-  Note: This is a useful diagnostic when creating meshes programmatically.
+  Notes:
+  This is a useful diagnostic when creating meshes programmatically.
+
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
 
   Level: developer
 
-.seealso: DMCreate(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
+.seealso: DMCreate(), DMSetFromOptions()
 @*/
 PetscErrorCode DMPlexCheckSymmetry(DM dm)
 {
@@ -7004,12 +7312,15 @@ PetscErrorCode DMPlexCheckSymmetry(DM dm)
 + dm - The DMPlex object
 - cellHeight - Normally 0
 
-  Note: This is a useful diagnostic when creating meshes programmatically.
+  Notes:
+  This is a useful diagnostic when creating meshes programmatically.
   Currently applicable only to homogeneous simplex or tensor meshes.
 
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
+
   Level: developer
 
-.seealso: DMCreate(), DMPlexCheckSymmetry(), DMPlexCheckFaces()
+.seealso: DMCreate(), DMSetFromOptions()
 @*/
 PetscErrorCode DMPlexCheckSkeleton(DM dm, PetscInt cellHeight)
 {
@@ -7063,24 +7374,44 @@ PetscErrorCode DMPlexCheckSkeleton(DM dm, PetscInt cellHeight)
 /*@
   DMPlexCheckFaces - Check that the faces of each cell give a vertex order this is consistent with what we expect from the cell type
 
+  Not Collective
+
   Input Parameters:
 + dm - The DMPlex object
 - cellHeight - Normally 0
 
-  Note: This is a useful diagnostic when creating meshes programmatically.
+  Notes:
+  This is a useful diagnostic when creating meshes programmatically.
+  This routine is only relevant for meshes that are fully interpolated across all ranks.
+  It will error out if a partially interpolated mesh is given on some rank.
+  It will do nothing for locally uninterpolated mesh (as there is nothing to check).
+
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
 
   Level: developer
 
-.seealso: DMCreate(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton()
+.seealso: DMCreate(), DMPlexGetVTKCellHeight(), DMSetFromOptions()
 @*/
 PetscErrorCode DMPlexCheckFaces(DM dm, PetscInt cellHeight)
 {
   PetscInt       pMax[4];
   PetscInt       dim, depth, vStart, vEnd, cStart, cEnd, c, h;
   PetscErrorCode ierr;
+  DMPlexInterpolatedFlag interpEnum;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  ierr = DMPlexIsInterpolated(dm, &interpEnum);CHKERRQ(ierr);
+  if (interpEnum == DMPLEX_INTERPOLATED_NONE) PetscFunctionReturn(0);
+  if (interpEnum == DMPLEX_INTERPOLATED_PARTIAL) {
+    PetscMPIInt	rank;
+    MPI_Comm	comm;
+
+    ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
+    ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+    SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Mesh is only partially interpolated on rank %d, this is currently not supported", rank);
+  }
+
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
   ierr = DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
@@ -7089,10 +7420,12 @@ PetscErrorCode DMPlexCheckFaces(DM dm, PetscInt cellHeight)
     ierr = DMPlexGetHeightStratum(dm, h, &cStart, &cEnd);CHKERRQ(ierr);
     for (c = cStart; c < cEnd; ++c) {
       const PetscInt *cone, *ornt, *faces;
+      DMPolytopeType  ct;
       PetscInt        numFaces, faceSize, coneSize,f;
       PetscInt       *closure = NULL, closureSize, cl, numCorners = 0;
 
       if (pMax[dim-h] >= 0 && c >= pMax[dim-h]) continue;
+      ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
       ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
       ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
       ierr = DMPlexGetConeOrientation(dm, c, &ornt);CHKERRQ(ierr);
@@ -7101,7 +7434,7 @@ PetscErrorCode DMPlexCheckFaces(DM dm, PetscInt cellHeight)
         const PetscInt p = closure[cl];
         if ((p >= vStart) && (p < vEnd)) closure[numCorners++] = p;
       }
-      ierr = DMPlexGetRawFaces_Internal(dm, dim-h, numCorners, closure, &numFaces, &faceSize, &faces);CHKERRQ(ierr);
+      ierr = DMPlexGetRawFaces_Internal(dm, ct, closure, &numFaces, &faceSize, &faces);CHKERRQ(ierr);
       if (coneSize != numFaces) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cell %D has %D faces but should have %D", c, coneSize, numFaces);
       for (f = 0; f < numFaces; ++f) {
         PetscInt *fclosure = NULL, fclosureSize, cl, fnumCorners = 0, v;
@@ -7117,7 +7450,7 @@ PetscErrorCode DMPlexCheckFaces(DM dm, PetscInt cellHeight)
         }
         ierr = DMPlexRestoreTransitiveClosure(dm, cone[f], PETSC_TRUE, &fclosureSize, &fclosure);CHKERRQ(ierr);
       }
-      ierr = DMPlexRestoreFaces_Internal(dm, dim, c, &numFaces, &faceSize, &faces);CHKERRQ(ierr);
+      ierr = DMPlexRestoreFaces_Internal(dm, ct, &numFaces, &faceSize, &faces);CHKERRQ(ierr);
       ierr = DMPlexRestoreTransitiveClosure(dm, c, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
     }
   }
@@ -7130,11 +7463,14 @@ PetscErrorCode DMPlexCheckFaces(DM dm, PetscInt cellHeight)
   Input Parameter:
 . dm - The DMPlex object
 
-  Note: This is a useful diagnostic when creating meshes programmatically.
+  Notes:
+  This is a useful diagnostic when creating meshes programmatically.
+
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
 
   Level: developer
 
-.seealso: DMCreate(), DMCheckSymmetry(), DMCheckSkeleton(), DMCheckFaces()
+.seealso: DMCreate(), DMSetFromOptions()
 @*/
 PetscErrorCode DMPlexCheckGeometry(DM dm)
 {
@@ -7163,26 +7499,6 @@ PetscErrorCode DMPlexCheckGeometry(DM dm)
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexAreAllConePointsInArray_Private(DM dm, PetscInt p, PetscInt npoints, const PetscInt *points, PetscInt *missingPoint)
-{
-  PetscInt i,l,n;
-  const PetscInt *cone;
-  PetscErrorCode ierr;
-
-  PetscFunctionBegin;
-  *missingPoint = -1;
-  ierr = DMPlexGetConeSize(dm, p, &n);CHKERRQ(ierr);
-  ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
-  for (i=0; i= plo && p < phi) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "point SF contains %d which is a cell",p);
+  ierr = DMPlexGetVTKCellHeight(dm, &cellHeight);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
+  for (l = 0; l < nleaves; ++l) {
+    const PetscInt point = locals[l];
+
+    if (point >= cStart && point < cEnd) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Point SF contains %D which is a cell", point);
   }
 
-  /* 2) if some point is in interface, then all its cone points must be also in interface  */
-  for (i=0; i= 0) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "point SF contains %d but not %d from its cone",p,missingPoint);
+  /* 2) if some point is in interface, then all its cone points must be also in interface (either as leaves or roots) */
+  for (l = 0; l < nleaves; ++l) {
+    const PetscInt  point = locals[l];
+    const PetscInt *cone;
+    PetscInt        coneSize, c, idx;
+
+    ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
+    ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
+    for (c = 0; c < coneSize; ++c) {
+      if (!rootdegree[cone[c]]) {
+        ierr = PetscFindInt(cone[c], nleaves, locals, &idx);CHKERRQ(ierr);
+        if (idx < 0) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Point SF contains %D but not %D from its cone", point, cone[c]);
+      }
+    }
   }
   PetscFunctionReturn(0);
 }
@@ -7264,11 +7598,14 @@ static void MPIAPI cell_stats_reduce(void *a, void *b, int * len, MPI_Datatype *
 . output    - If true, statistics will be displayed on stdout
 - condLimit - Display all cells above this condition number, or PETSC_DETERMINE for no cell output
 
-  Note: This is mainly intended for debugging/testing purposes.
+  Notes:
+  This is mainly intended for debugging/testing purposes.
+
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
 
   Level: developer
 
-.seealso: DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
+.seealso: DMSetFromOptions()
 @*/
 PetscErrorCode DMPlexCheckCellShape(DM dm, PetscBool output, PetscReal condLimit)
 {
@@ -7914,3 +8251,47 @@ PetscErrorCode DMCreateSubDomainDM_Plex(DM dm, DMLabel label, PetscInt value, IS
   }
   PetscFunctionReturn(0);
 }
+
+/*@
+  DMPlexMonitorThroughput - Report the cell throughput of FE integration
+
+  Input Parameter:
+- dm - The DM
+
+  Level: developer
+
+  Options Database Keys:
+. -dm_plex_monitor_throughput - Activate the monitor
+
+.seealso: DMSetFromOptions(), DMPlexCreate()
+@*/
+PetscErrorCode DMPlexMonitorThroughput(DM dm, void *dummy)
+{
+  PetscStageLog      stageLog;
+  PetscLogEvent      event;
+  PetscLogStage      stage;
+  PetscEventPerfInfo eventInfo;
+  PetscReal          cellRate, flopRate;
+  PetscInt           cStart, cEnd, Nf, N;
+  const char        *name;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+#if defined(PETSC_USE_LOG)
+  ierr = PetscObjectGetName((PetscObject) dm, &name);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr);
+  ierr = PetscLogGetStageLog(&stageLog);CHKERRQ(ierr);
+  ierr = PetscStageLogGetCurrent(stageLog, &stage);CHKERRQ(ierr);
+  ierr = PetscLogEventGetId("DMPlexResidualFE", &event);CHKERRQ(ierr);
+  ierr = PetscLogEventGetPerfInfo(stage, event, &eventInfo);CHKERRQ(ierr);
+  N        = (cEnd - cStart)*Nf*eventInfo.count;
+  flopRate = eventInfo.flops/eventInfo.time;
+  cellRate = N/eventInfo.time;
+  ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "DM (%s) FE Residual Integration: %D integrals %D reps\n  Cell rate: %.2g/s flop rate: %.2g MF/s\n", name ? name : "unknown", N, eventInfo.count, (double) cellRate, (double) (flopRate/1.e6));CHKERRQ(ierr);
+#else
+  SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Plex Throughput Monitor is not supported if logging is turned off. Reconfigure using --with-log.");
+#endif
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/impls/plex/plexcheckinterface.c b/src/dm/impls/plex/plexcheckinterface.c
index 3083fa07167..383be21d55f 100644
--- a/src/dm/impls/plex/plexcheckinterface.c
+++ b/src/dm/impls/plex/plexcheckinterface.c
@@ -157,22 +157,27 @@ static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(Pet
 }
 
 /*@
-  DMPlexCheckConesConformOnInterfaces - Check that points on inter-partition interfaces have conforming order of cone points.
-    For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point SF containts connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
-    then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.
+  DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.
 
   Input Parameters:
 . dm - The DMPlex object
 
-  Note: This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use DMPlexCheckFaces().
+  Notes:
+  For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point SF containts connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
+  then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.
 
-  Developer Note: Interface cones are expanded into vertices and then their coordinates are compared.
+  This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use DMPlexCheckFaces().
+
+  For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
+
+  Developer Note:
+  Interface cones are expanded into vertices and then their coordinates are compared.
 
   Level: developer
 
-.seealso: DMPlexGetCone(), DMPlexGetConeSize(), DMGetPointSF(), DMGetCoordinates(), DMPlexCheckFaces(), DMPlexCheckPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton()
+.seealso: DMPlexGetCone(), DMPlexGetConeSize(), DMGetPointSF(), DMGetCoordinates(), DMSetFromOptions()
 @*/
-PetscErrorCode DMPlexCheckConesConformOnInterfaces(DM dm)
+PetscErrorCode DMPlexCheckInterfaceCones(DM dm)
 {
   PetscSF             sf;
   PetscInt            nleaves, nranks, nroots;
@@ -233,7 +238,7 @@ PetscErrorCode DMPlexCheckConesConformOnInterfaces(DM dm)
   ierr = PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL);CHKERRQ(ierr);
   if (verbose) {
     PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
-    ierr = PetscViewerASCIIPrintf(v, "============\nDMPlexCheckConesConformOnInterfaces output\n============\n");CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n");CHKERRQ(ierr);
     ierr = PetscViewerASCIIPushSynchronized(v);CHKERRQ(ierr);
     ierr = PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", myrank);CHKERRQ(ierr);
     for (r=0; r          /*I   "petscdmplex.h"   I*/
 #include 
 
+PetscLogEvent DMPLEX_CreateFromFile, DMPLEX_CreateFromCellList, DMPLEX_CreateFromCellList_Coordinates;
+
 /*@
   DMPlexCreateDoublet - Creates a mesh of two cells of the specified type, optionally with later refinement.
 
@@ -297,7 +299,7 @@ PetscErrorCode DMPlexCreateSquareBoundary(DM dm, const PetscReal lower[], const
 + comm  - The communicator for the DM object
 . lower - The lower left front corner coordinates
 . upper - The upper right back corner coordinates
-- edges - The number of cells in each direction
+- faces - The number of faces in each direction (the same as the number of cells)
 
   Output Parameter:
 . dm  - The DM object
@@ -1001,9 +1003,13 @@ static PetscErrorCode DMPlexCreateBoxMesh_Tensor_Internal(MPI_Comm comm, PetscIn
 
   Options Database Keys:
 + -dm_plex_box_lower  - Specify lower-left-bottom coordinates for the box
-- -dm_plex_box_upper  - Specify upper-right-top coordinates for the box
+. -dm_plex_box_upper  - Specify upper-right-top coordinates for the box
+- -dm_plex_box_faces  - Number of faces in each linear direction
+
+  Notes:
+  The options database keys above take lists of length d in d dimensions.
 
-  Note: Here is the numbering returned for 2 faces in each direction for tensor cells:
+  Here is the numbering returned for 2 faces in each direction for tensor cells:
 $ 10---17---11---18----12
 $  |         |         |
 $  |         |         |
@@ -2235,6 +2241,9 @@ static PetscErrorCode DMPlexSwap_Static(DM dmA, DM dmB)
   depthTmp  = dmA->depthLabel;
   dmA->depthLabel = dmB->depthLabel;
   dmB->depthLabel = depthTmp;
+  depthTmp  = dmA->celltypeLabel;
+  dmA->celltypeLabel = dmB->celltypeLabel;
+  dmB->celltypeLabel = depthTmp;
   tmpI         = dmA->levelup;
   dmA->levelup = dmB->levelup;
   dmB->levelup = tmpI;
@@ -2244,6 +2253,7 @@ static PetscErrorCode DMPlexSwap_Static(DM dmA, DM dmB)
 PetscErrorCode DMSetFromOptions_NonRefinement_Plex(PetscOptionItems *PetscOptionsObject,DM dm)
 {
   DM_Plex       *mesh = (DM_Plex*) dm->data;
+  PetscBool      flg;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
@@ -2252,6 +2262,8 @@ PetscErrorCode DMSetFromOptions_NonRefinement_Plex(PetscOptionItems *PetscOption
   ierr = PetscOptionsBoundedInt("-dm_plex_print_fem", "Debug output level all fem computations", "DMPlexSNESComputeResidualFEM", 0, &mesh->printFEM, NULL,0);CHKERRQ(ierr);
   ierr = PetscOptionsReal("-dm_plex_print_tol", "Tolerance for FEM output", "DMPlexSNESComputeResidualFEM", mesh->printTol, &mesh->printTol, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsBoundedInt("-dm_plex_print_l2", "Debug output level all L2 diff computations", "DMComputeL2Diff", 0, &mesh->printL2, NULL,0);CHKERRQ(ierr);
+  ierr = DMMonitorSetFromOptions(dm, "-dm_plex_monitor_throughput", "Monitor the simulation throughput", "DMPlexMonitorThroughput", DMPlexMonitorThroughput, NULL, &flg);CHKERRQ(ierr);
+  if (flg) {ierr = PetscLogDefaultBegin();CHKERRQ(ierr);}
   /* Point Location */
   ierr = PetscOptionsBool("-dm_plex_hash_location", "Use grid hashing for point location", "DMInterpolate", PETSC_FALSE, &mesh->useHashLocation, NULL);CHKERRQ(ierr);
   /* Partitioning and distribution */
@@ -2263,16 +2275,21 @@ PetscErrorCode DMSetFromOptions_NonRefinement_Plex(PetscOptionItems *PetscOption
   ierr = PetscOptionsBool("-dm_plex_regular_refinement", "Use special nested projection algorithm for regular refinement", "DMPlexSetRegularRefinement", mesh->regularRefinement, &mesh->regularRefinement, NULL);CHKERRQ(ierr);
   /* Checking structure */
   {
-    PetscBool   flg = PETSC_FALSE, flg2 = PETSC_FALSE;
+    PetscBool   flg = PETSC_FALSE, flg2 = PETSC_FALSE, all = PETSC_FALSE;
 
+    ierr = PetscOptionsBool("-dm_plex_check_all", "Perform all checks", NULL, PETSC_FALSE, &all, &flg2);CHKERRQ(ierr);
     ierr = PetscOptionsBool("-dm_plex_check_symmetry", "Check that the adjacency information in the mesh is symmetric", "DMPlexCheckSymmetry", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
-    if (flg && flg2) {ierr = DMPlexCheckSymmetry(dm);CHKERRQ(ierr);}
+    if (all || (flg && flg2)) {ierr = DMPlexCheckSymmetry(dm);CHKERRQ(ierr);}
     ierr = PetscOptionsBool("-dm_plex_check_skeleton", "Check that each cell has the correct number of vertices (only for homogeneous simplex or tensor meshes)", "DMPlexCheckSkeleton", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
-    if (flg && flg2) {ierr = DMPlexCheckSkeleton(dm, 0);CHKERRQ(ierr);}
+    if (all || (flg && flg2)) {ierr = DMPlexCheckSkeleton(dm, 0);CHKERRQ(ierr);}
     ierr = PetscOptionsBool("-dm_plex_check_faces", "Check that the faces of each cell give a vertex order this is consistent with what we expect from the cell type", "DMPlexCheckFaces", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
-    if (flg && flg2) {ierr = DMPlexCheckFaces(dm, 0);CHKERRQ(ierr);}
+    if (all || (flg && flg2)) {ierr = DMPlexCheckFaces(dm, 0);CHKERRQ(ierr);}
     ierr = PetscOptionsBool("-dm_plex_check_geometry", "Check that cells have positive volume", "DMPlexCheckGeometry", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
-    if (flg && flg2) {ierr = DMPlexCheckGeometry(dm);CHKERRQ(ierr);}
+    if (all || (flg && flg2)) {ierr = DMPlexCheckGeometry(dm);CHKERRQ(ierr);}
+    ierr = PetscOptionsBool("-dm_plex_check_pointsf", "Check some necessary conditions for PointSF", "DMPlexCheckPointSF", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
+    if (all || (flg && flg2)) {ierr = DMPlexCheckPointSF(dm);CHKERRQ(ierr);}
+    ierr = PetscOptionsBool("-dm_plex_check_interface_cones", "Check points on inter-partition interfaces have conforming order of cone points", "DMPlexCheckInterfaceCones", PETSC_FALSE, &flg, &flg2);CHKERRQ(ierr);
+    if (all || (flg && flg2)) {ierr = DMPlexCheckInterfaceCones(dm);CHKERRQ(ierr);}
   }
 
   ierr = PetscPartitionerSetFromOptions(mesh->partitioner);CHKERRQ(ierr);
@@ -2547,6 +2564,7 @@ PETSC_EXTERN PetscErrorCode DMCreate_Plex(DM dm)
 
   mesh->regularRefinement   = PETSC_FALSE;
   mesh->depthState          = -1;
+  mesh->celltypeState       = -1;
   mesh->globalVertexNumbers = NULL;
   mesh->globalCellNumbers   = NULL;
   mesh->anchorSection       = NULL;
@@ -2615,6 +2633,7 @@ PetscErrorCode DMPlexBuildFromCellList_Parallel_Internal(DM dm, PetscInt spaceDi
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
+  ierr = PetscLogEventBegin(DMPLEX_CreateFromCellList,dm,0,0,0);CHKERRQ(ierr);
   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
   /* Partition vertices */
@@ -2699,6 +2718,7 @@ PetscErrorCode DMPlexBuildFromCellList_Parallel_Internal(DM dm, PetscInt spaceDi
   /* Fill in the rest of the topology structure */
   ierr = DMPlexSymmetrize(dm);CHKERRQ(ierr);
   ierr = DMPlexStratify(dm);CHKERRQ(ierr);
+  ierr = PetscLogEventEnd(DMPLEX_CreateFromCellList,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -2714,6 +2734,7 @@ PetscErrorCode DMPlexBuildCoordinates_Parallel_Internal(DM dm, PetscInt spaceDim
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  ierr = PetscLogEventBegin(DMPLEX_CreateFromCellList_Coordinates,dm,0,0,0);CHKERRQ(ierr);
   ierr = DMSetCoordinateDim(dm, spaceDim);CHKERRQ(ierr);
   ierr = PetscSFGetGraph(sfVert, &numVertices, &numVerticesAdj, NULL, NULL);CHKERRQ(ierr);
   ierr = DMGetCoordinateSection(dm, &coordSection);CHKERRQ(ierr);
@@ -2757,6 +2778,7 @@ PetscErrorCode DMPlexBuildCoordinates_Parallel_Internal(DM dm, PetscInt spaceDim
   ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
   ierr = DMSetCoordinatesLocal(dm, coordinates);CHKERRQ(ierr);
   ierr = VecDestroy(&coordinates);CHKERRQ(ierr);
+  ierr = PetscLogEventEnd(DMPLEX_CreateFromCellList_Coordinates,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -2843,6 +2865,7 @@ PetscErrorCode DMPlexBuildFromCellList_Internal(DM dm, PetscInt spaceDim, PetscI
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  ierr = PetscLogEventBegin(DMPLEX_CreateFromCellList,dm,0,0,0);CHKERRQ(ierr);
   ierr = DMPlexSetChart(dm, 0, numCells+numVertices);CHKERRQ(ierr);
   for (c = 0; c < numCells; ++c) {
     ierr = DMPlexSetConeSize(dm, c, numCorners);CHKERRQ(ierr);
@@ -2859,6 +2882,7 @@ PetscErrorCode DMPlexBuildFromCellList_Internal(DM dm, PetscInt spaceDim, PetscI
   ierr = DMRestoreWorkArray(dm, numCorners, MPIU_INT, &cone);CHKERRQ(ierr);
   ierr = DMPlexSymmetrize(dm);CHKERRQ(ierr);
   ierr = DMPlexStratify(dm);CHKERRQ(ierr);
+  ierr = PetscLogEventEnd(DMPLEX_CreateFromCellList,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -2875,6 +2899,7 @@ PetscErrorCode DMPlexBuildCoordinates_Internal(DM dm, PetscInt spaceDim, PetscIn
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  ierr = PetscLogEventBegin(DMPLEX_CreateFromCellList_Coordinates,dm,0,0,0);CHKERRQ(ierr);
   ierr = DMSetCoordinateDim(dm, spaceDim);CHKERRQ(ierr);
   ierr = DMGetCoordinateSection(dm, &coordSection);CHKERRQ(ierr);
   ierr = PetscSectionSetNumFields(coordSection, 1);CHKERRQ(ierr);
@@ -2899,6 +2924,7 @@ PetscErrorCode DMPlexBuildCoordinates_Internal(DM dm, PetscInt spaceDim, PetscIn
   ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
   ierr = DMSetCoordinatesLocal(dm, coordinates);CHKERRQ(ierr);
   ierr = VecDestroy(&coordinates);CHKERRQ(ierr);
+  ierr = PetscLogEventEnd(DMPLEX_CreateFromCellList_Coordinates,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -3205,6 +3231,9 @@ PetscErrorCode DMPlexCreateCellVertexFromFile(MPI_Comm comm, const char filename
   Options Database Keys:
 . -dm_plex_create_from_hdf5_xdmf - use the PETSC_VIEWER_HDF5_XDMF format for reading HDF5
 
+  Use -dm_plex_create_ prefix to pass options to the internal PetscViewer, e.g.
+$ -dm_plex_create_viewer_hdf5_collective
+
   Level: beginner
 
 .seealso: DMPlexCreateFromDAG(), DMPlexCreateFromCellList(), DMPlexCreate()
@@ -3230,6 +3259,8 @@ PetscErrorCode DMPlexCreateFromFile(MPI_Comm comm, const char filename[], PetscB
   PetscFunctionBegin;
   PetscValidCharPointer(filename, 2);
   PetscValidPointer(dm, 4);
+  ierr = DMInitializePackage();CHKERRQ(ierr);
+  ierr = PetscLogEventBegin(DMPLEX_CreateFromFile,0,0,0,0);CHKERRQ(ierr);
   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   ierr = PetscStrlen(filename, &len);CHKERRQ(ierr);
   if (!len) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Filename must be a valid path");
@@ -3261,6 +3292,8 @@ PetscErrorCode DMPlexCreateFromFile(MPI_Comm comm, const char filename[], PetscB
     ierr = PetscOptionsGetBool(NULL, NULL, "-dm_plex_create_from_hdf5_xdmf", &load_hdf5_xdmf, NULL);CHKERRQ(ierr);
     ierr = PetscViewerCreate(comm, &viewer);CHKERRQ(ierr);
     ierr = PetscViewerSetType(viewer, PETSCVIEWERHDF5);CHKERRQ(ierr);
+    ierr = PetscViewerSetOptionsPrefix(viewer, "dm_plex_create_");CHKERRQ(ierr);
+    ierr = PetscViewerSetFromOptions(viewer);CHKERRQ(ierr);
     ierr = PetscViewerFileSetMode(viewer, FILE_MODE_READ);CHKERRQ(ierr);
     ierr = PetscViewerFileSetName(viewer, filename);CHKERRQ(ierr);
     ierr = DMCreate(comm, dm);CHKERRQ(ierr);
@@ -3284,6 +3317,7 @@ PetscErrorCode DMPlexCreateFromFile(MPI_Comm comm, const char filename[], PetscB
   } else if (isCV) {
     ierr = DMPlexCreateCellVertexFromFile(comm, filename, interpolate, dm);CHKERRQ(ierr);
   } else SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot load file %s: unrecognized extension", filename);
+  ierr = PetscLogEventEnd(DMPLEX_CreateFromFile,0,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/impls/plex/plexdistribute.c b/src/dm/impls/plex/plexdistribute.c
index d1286028acc..696d0c0e183 100644
--- a/src/dm/impls/plex/plexdistribute.c
+++ b/src/dm/impls/plex/plexdistribute.c
@@ -458,7 +458,7 @@ PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *ro
 . leafSection - The number of processes sharing a given leaf point
 - leafrank    - The rank of each process sharing a leaf point
 
-  Output Parameters:
+  Output Parameter:
 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
 
   Level: developer
@@ -570,7 +570,7 @@ PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection roo
 + dm          - The DM
 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
 
-  Output Parameters:
+  Output Parameter:
 . migrationSF - An SF that maps original points in old locations to points in new locations
 
   Level: developer
@@ -675,7 +675,7 @@ PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF
 /*@
   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
 
-  Input Parameter:
+  Input Parameters:
 + dm          - The DM
 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
 
@@ -1378,7 +1378,7 @@ static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype
 /*@C
   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
 
-  Input Parameter:
+  Input Parameters:
 + dm          - The source DMPlex object
 . migrationSF - The star forest that describes the parallel point remapping
 . ownership   - Flag causing a vote to determine point ownership
@@ -1386,6 +1386,9 @@ static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype
   Output Parameter:
 - pointSF     - The star forest describing the point overlap in the remapped DM
 
+  Notes:
+  Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
+
   Level: developer
 
 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
@@ -1485,6 +1488,7 @@ PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownersh
   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
   for (idx = 0, p = 0; p < nleaves; p++) {
     if (leafNodes[p].rank != rank) {
+      /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
       pointLocal[idx] = p;
       pointRemote[idx] = leafNodes[p];
       idx++;
@@ -1508,7 +1512,7 @@ PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownersh
 
   Collective on dm
 
-  Input Parameter:
+  Input Parameters:
 + dm       - The source DMPlex object
 . sf       - The star forest communication context describing the migration pattern
 
@@ -1587,11 +1591,11 @@ PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
 
   Collective on dm
 
-  Input Parameter:
+  Input Parameters:
 + dm  - The original DMPlex object
 - overlap - The overlap of partitions, 0 is the default
 
-  Output Parameter:
+  Output Parameters:
 + sf - The PetscSF used for point distribution, or NULL if not needed
 - dmParallel - The distributed DMPlex object
 
@@ -1635,7 +1639,7 @@ PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmPara
   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
-  ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
+  ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
   {
     /* Convert partition to DMLabel */
@@ -1768,11 +1772,11 @@ PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmPara
 
   Collective on dm
 
-  Input Parameter:
-+ dm  - The non-overlapping distrbuted DMPlex object
+  Input Parameters:
++ dm  - The non-overlapping distributed DMPlex object
 - overlap - The overlap of partitions (the same on all ranks)
 
-  Output Parameter:
+  Output Parameters:
 + sf - The PetscSF used for point distribution
 - dmOverlap - The overlapping distributed DMPlex object, or NULL
 
@@ -1868,7 +1872,7 @@ PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
   Input Parameter:
 . dm - The DM
 
-  Output Parameters:
+  Output Parameter:
 . overlap - The overlap of this DM
 
   Level: intermediate
@@ -1892,7 +1896,7 @@ PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
 
   Collective on dm
 
-  Input Parameters:
+  Input Parameter:
 . dm - the original DMPlex object
 
   Output Parameters:
@@ -1936,7 +1940,7 @@ PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
 
   Collective on dm
 
-  Input Parameters:
+  Input Parameter:
 . dm - the original DMPlex object
 
   Output Parameters:
@@ -2003,3 +2007,40 @@ PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
+
+/*@
+  DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
+
+  Collective
+
+  Input Parameter:
+. dm      - The DM object
+
+  Output Parameter:
+. distributed - Flag whether the DM is distributed
+
+  Level: intermediate
+
+  Notes:
+  This currently finds out whether at least two ranks have any DAG points.
+  This involves MPI_Allreduce() with one integer.
+  The result is currently not stashed so every call to this routine involves this global communication.
+
+.seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
+@*/
+PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
+{
+  PetscInt          pStart, pEnd, count;
+  MPI_Comm          comm;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidPointer(distributed,2);
+  ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
+  ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
+  count = !!(pEnd - pStart);
+  ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
+  *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/impls/plex/plexegads.c b/src/dm/impls/plex/plexegads.c
new file mode 100644
index 00000000000..31a6c95a170
--- /dev/null
+++ b/src/dm/impls/plex/plexegads.c
@@ -0,0 +1,85 @@
+#include    /*I      "petscdmplex.h"   I*/
+
+#ifdef PETSC_HAVE_EGADS
+#include 
+#endif
+
+/* We need to understand how to natively parse STEP files. There seems to be only one open source implementation of
+   the STEP parser contained in the OpenCASCADE package. It is enough to make a strong man weep:
+
+     https://github.com/tpaviot/oce/tree/master/src/STEPControl
+
+   The STEP, and inner EXPRESS, formats are ISO standards, so they are documented
+
+     https://stackoverflow.com/questions/26774037/documentation-or-specification-for-step-and-stp-files
+     http://stepmod.sourceforge.net/express_model_spec/
+
+   but again it seems that there has been a deliberate effort at obfuscation, probably to raise the bar for entrants.
+*/
+
+
+/*@
+  DMPlexSnapToGeomModel - Given a coordinate point 'mcoords' on the mesh point 'p', return the closest coordinate point 'gcoords' on the geometry model associated with that point.
+
+  Not collective
+
+  Input Parameters:
++ dm      - The DMPlex object
+. p       - The mesh point
+- mcoords - A coordinate point lying on the mesh point
+
+  Output Parameter:
+. gcoords - The closest coordinate point on the geometry model associated with 'p' to the given point
+
+  Note: Returns the original coordinates if no geometry model is found. Right now the only supported geometry model is EGADS.
+
+  Level: intermediate
+
+.seealso: DMRefine(), DMPlexCreate(), DMPlexSetRefinementUniform()
+@*/
+PetscErrorCode DMPlexSnapToGeomModel(DM dm, PetscInt p, const PetscScalar mcoords[], PetscScalar gcoords[])
+{
+#ifdef PETSC_HAVE_EGADS
+  DMLabel        bodyLabel, faceLabel, edgeLabel;
+  PetscContainer modelObj;
+  PetscInt       bodyID, faceID, edgeID;
+  ego           *bodies;
+  ego            model, geom, body, face, edge;
+  double         point[3], params[3], result[3];
+  int            Nb, oclass, mtype, *senses;
+#endif
+  PetscInt       cdim, d;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
+#ifdef PETSC_HAVE_EGADS
+  ierr = DMGetLabel(dm, "EGADS Body ID", &bodyLabel);CHKERRQ(ierr);
+  ierr = DMGetLabel(dm, "EGADS Face ID", &faceLabel);CHKERRQ(ierr);
+  ierr = DMGetLabel(dm, "EGADS Edge ID", &edgeLabel);CHKERRQ(ierr);
+  if (!bodyLabel || !faceLabel || !edgeLabel) {
+    for (d = 0; d < cdim; ++d) gcoords[d] = mcoords[d];
+    PetscFunctionReturn(0);
+  }
+  ierr = DMLabelGetValue(bodyLabel, p, &bodyID);CHKERRQ(ierr);
+  ierr = DMLabelGetValue(faceLabel, p, &faceID);CHKERRQ(ierr);
+  ierr = DMLabelGetValue(edgeLabel, p, &edgeID);CHKERRQ(ierr);
+  ierr = PetscObjectQuery((PetscObject) dm, "EGADS Model", (PetscObject *) &modelObj);CHKERRQ(ierr);
+  ierr = PetscContainerGetPointer(modelObj, (void **) &model);CHKERRQ(ierr);
+  ierr = EG_getTopology(model, &geom, &oclass, &mtype, NULL, &Nb, &bodies, &senses);CHKERRQ(ierr);
+  if (bodyID >= Nb) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Body %D is not in [0, %d)", bodyID, Nb);
+  body = bodies[bodyID];
+  for (d = 0; d < cdim; ++d) point[d] = mcoords[d];
+  if (edgeID >= 0) {
+    ierr = EG_objectBodyTopo(body, EDGE, edgeID, &edge);CHKERRQ(ierr);
+    ierr = EG_invEvaluate(edge, point, params, result);
+  } else {
+    ierr = EG_objectBodyTopo(body, FACE, faceID, &face);CHKERRQ(ierr);
+    ierr = EG_invEvaluate(face, point, params, result);
+  }
+  for (d = 0; d < cdim; ++d) gcoords[d] = result[d];
+#else
+  for (d = 0; d < cdim; ++d) gcoords[d] = mcoords[d];
+#endif
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/impls/plex/plexexodusii.c b/src/dm/impls/plex/plexexodusii.c
index 280e7fbcb8e..4c9c599c917 100644
--- a/src/dm/impls/plex/plexexodusii.c
+++ b/src/dm/impls/plex/plexexodusii.c
@@ -6,7 +6,234 @@
 #include 
 #endif
 
+#include 
+#include 
 #if defined(PETSC_HAVE_EXODUSII)
+/*
+  PETSC_VIEWER_EXODUSII_ - Creates an ExodusII PetscViewer shared by all processors in a communicator.
+
+  Collective
+
+  Input Parameter:
+. comm - the MPI communicator to share the ExodusII PetscViewer
+
+  Level: intermediate
+
+  Notes:
+    misses Fortran bindings
+
+  Notes:
+  Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return
+  an error code.  The GLVIS PetscViewer is usually used in the form
+$       XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm));
+
+.seealso: PetscViewerGLVISOpen(), PetscViewerGLVisType, PetscViewerCreate(), PetscViewerDestroy()
+*/
+PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm)
+{
+  PetscViewer    viewer;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer);
+  if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_GLVIS_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);}
+  ierr = PetscObjectRegisterDestroy((PetscObject) viewer);
+  if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_GLVIS_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);}
+  PetscFunctionReturn(viewer);
+}
+
+static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+  PetscErrorCode        ierr;
+
+  PetscFunctionBegin;
+  if (exo->filename) {ierr = PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename);CHKERRQ(ierr);}
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscOptionItems *PetscOptionsObject, PetscViewer v)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscOptionsHead(PetscOptionsObject, "ExodusII PetscViewer Options");CHKERRQ(ierr);
+  ierr = PetscOptionsTail();CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer)
+{
+  PetscFunctionBegin;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+  PetscErrorCode        ierr;
+
+  PetscFunctionBegin;
+  if (exo->exoid >= 0) {ierr = ex_close(exo->exoid);CHKERRQ(ierr);}
+  ierr = PetscFree(exo);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetName_C",NULL);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileGetName_C",NULL);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetMode_C",NULL);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerExodusIIGetId",NULL);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[])
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+  PetscMPIInt           rank;
+  int                   CPU_word_size, IO_word_size, EXO_mode;
+  PetscErrorCode        ierr;
+
+
+  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
+  CPU_word_size = sizeof(PetscReal);
+  IO_word_size  = sizeof(PetscReal);
+
+  PetscFunctionBegin;
+  if (exo->exoid >= 0) {ex_close(exo->exoid); exo->exoid = -1;}
+  if (exo->filename) {ierr = PetscFree(exo->filename);CHKERRQ(ierr);}
+  ierr = PetscStrallocpy(name, &exo->filename);CHKERRQ(ierr);
+  /* Create or open the file collectively */
+  switch (exo->btype) {
+  case FILE_MODE_READ:
+    EXO_mode = EX_CLOBBER;
+    break;
+  case FILE_MODE_APPEND:
+    EXO_mode = EX_CLOBBER;
+    break;
+  case FILE_MODE_WRITE:
+    EXO_mode = EX_CLOBBER;
+    break;
+  default:
+    SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
+  }
+  #if defined(PETSC_USE_64BIT_INDICES)
+  EXO_mode += EX_ALL_INT64_API;
+  #endif
+  exo->exoid = ex_create(name, EXO_mode, &CPU_word_size, &IO_word_size);
+  if (exo->exoid < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", name);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+
+  PetscFunctionBegin;
+  *name = exo->filename;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+
+  PetscFunctionBegin;
+  exo->btype = type;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+
+  PetscFunctionBegin;
+  *type = exo->btype;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid)
+{
+  PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
+
+  PetscFunctionBegin;
+  *exoid = exo->exoid;
+  PetscFunctionReturn(0);
+}
+
+/*@C
+   PetscViewerExodusIIOpen - Opens a file for ExodusII input/output.
+
+   Collective
+
+   Input Parameters:
++  comm - MPI communicator
+.  name - name of file
+-  type - type of file
+$    FILE_MODE_WRITE - create new file for binary output
+$    FILE_MODE_READ - open existing file for binary input
+$    FILE_MODE_APPEND - open existing file for binary output
+
+   Output Parameter:
+.  exo - PetscViewer for HDF5 input/output to use with the specified file
+
+   Level: beginner
+
+   Note:
+   This PetscViewer should be destroyed with PetscViewerDestroy().
+
+
+.seealso: PetscViewerASCIIOpen(), PetscViewerPushFormat(), PetscViewerDestroy(), PetscViewerHDF5SetBaseDimension2(),
+          PetscViewerHDF5SetSPOutput(), PetscViewerHDF5GetBaseDimension2(), VecView(), MatView(), VecLoad(),
+          MatLoad(), PetscFileMode, PetscViewer, PetscViewerSetType(), PetscViewerFileSetMode(), PetscViewerFileSetName()
+@*/
+PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscViewerCreate(comm, exo);CHKERRQ(ierr);
+  ierr = PetscViewerSetType(*exo, PETSCVIEWEREXODUSII);CHKERRQ(ierr);
+  ierr = PetscViewerFileSetMode(*exo, type);CHKERRQ(ierr);
+  ierr = PetscViewerFileSetName(*exo, name);CHKERRQ(ierr);
+  ierr = PetscViewerSetFromOptions(*exo);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/*MC
+   PETSCVIEWERHDF5 - A viewer that writes to an HDF5 file
+
+
+.seealso:  PetscViewerHDF5Open(), PetscViewerStringSPrintf(), PetscViewerSocketOpen(), PetscViewerDrawOpen(), PETSCVIEWERSOCKET,
+           PetscViewerCreate(), PetscViewerASCIIOpen(), PetscViewerBinaryOpen(), PETSCVIEWERBINARY, PETSCVIEWERDRAW, PETSCVIEWERSTRING,
+           PetscViewerMatlabOpen(), VecView(), DMView(), PetscViewerMatlabPutArray(), PETSCVIEWERASCII, PETSCVIEWERMATLAB,
+           PetscViewerFileSetName(), PetscViewerFileSetMode(), PetscViewerFormat, PetscViewerType, PetscViewerSetType()
+
+  Level: beginner
+M*/
+
+PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v)
+{
+  PetscViewer_ExodusII *exo;
+  PetscErrorCode        ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscNewLog(v,&exo);CHKERRQ(ierr);
+
+  v->data                = (void*) exo;
+  v->ops->destroy        = PetscViewerDestroy_ExodusII;
+  v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII;
+  v->ops->setup          = PetscViewerSetUp_ExodusII;
+  v->ops->view           = PetscViewerView_ExodusII;
+  v->ops->flush          = 0;
+  exo->btype             = (PetscFileMode) -1;
+  exo->filename          = 0;
+  exo->exoid             = -1;
+
+  ierr = PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetName_C",PetscViewerFileSetName_ExodusII);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetName_C",PetscViewerFileGetName_ExodusII);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetMode_C",PetscViewerFileSetMode_ExodusII);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetMode_C",PetscViewerFileGetMode_ExodusII);CHKERRQ(ierr);
+  ierr = PetscObjectComposeFunction((PetscObject)v,"PetscViewerExodusIIGetId_C",PetscViewerExodusIIGetId_ExodusII);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*
   EXOGetVarIndex - Locate a result in an exodus file based on its name
 
@@ -316,10 +543,12 @@ PetscErrorCode DMPlexView_ExodusII_Internal(DM dm, int exoid, PetscInt degree)
   /* --- Coordinates --- */
   ierr = PetscSectionCreate(comm, §ion);CHKERRQ(ierr);
   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
-  for (d = 0; d < depth; ++d) {
-    ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
-    for (p = pStart; p < pEnd; ++p) {
-      ierr = PetscSectionSetDof(section, p, nodes[0][d] > 0);CHKERRQ(ierr);
+  if (num_cs) {
+    for (d = 0; d < depth; ++d) {
+      ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
+      for (p = pStart; p < pEnd; ++p) {
+        ierr = PetscSectionSetDof(section, p, nodes[0][d] > 0);CHKERRQ(ierr);
+      }
     }
   }
   for (cs = 0; cs < num_cs; ++cs) {
@@ -838,6 +1067,31 @@ PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step)
 }
 #endif
 
+/*@
+  PetscViewerExodusIIGetId - Get the file id of the ExodusII file
+
+  Logically Collective on PetscViewer
+
+  Input Parameter:
+.  viewer - the PetscViewer
+
+  Output Parameter:
+-  exoid - The ExodusII file id
+
+  Level: intermediate
+
+.seealso: PetscViewerFileSetMode(), PetscViewerCreate(), PetscViewerSetType(), PetscViewerBinaryOpen()
+@*/
+PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1);
+  ierr = PetscTryMethod(viewer, "PetscViewerExodusIIGetId_C",(PetscViewer,int*),(viewer,exoid));CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   DMPlexCreateExodusFromFile - Create a DMPlex mesh from an ExodusII file.
 
diff --git a/src/dm/impls/plex/plexfem.c b/src/dm/impls/plex/plexfem.c
index 1046fcec233..1fc096df7d0 100644
--- a/src/dm/impls/plex/plexfem.c
+++ b/src/dm/impls/plex/plexfem.c
@@ -2000,6 +2000,7 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point
                                                        PetscScalar *fintegral, void *user)
 {
   DM                 plex = NULL, plexA = NULL;
+  DMEnclosureType    encAux;
   PetscDS            prob, probAux = NULL;
   PetscSection       section, sectionAux = NULL;
   Vec                locA = NULL;
@@ -2037,6 +2038,7 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point
     DM dmAux;
 
     ierr = VecGetDM(locA, &dmAux);CHKERRQ(ierr);
+    ierr = DMGetEnclosureRelation(dmAux, dm, &encAux);CHKERRQ(ierr);
     ierr = DMConvert(dmAux, DMPLEX, &plexA);CHKERRQ(ierr);
     ierr = DMGetDS(dmAux, &probAux);CHKERRQ(ierr);
     ierr = PetscDSGetNumFields(probAux, &NfAux);CHKERRQ(ierr);
@@ -2084,7 +2086,7 @@ static PetscErrorCode DMPlexComputeBdIntegral_Internal(DM dm, Vec locX, IS point
         ierr = DMPlexVecRestoreClosure(plex, section, locX, support[0], NULL, &x);CHKERRQ(ierr);
         if (locA) {
           PetscInt subp;
-          ierr = DMPlexGetSubpoint(plexA, support[0], &subp);CHKERRQ(ierr);
+          ierr = DMGetEnclosurePoint(plexA, dm, encAux, support[0], &subp);CHKERRQ(ierr);
           ierr = DMPlexVecGetClosure(plexA, sectionAux, locA, subp, NULL, &x);CHKERRQ(ierr);
           for (i = 0; i < totDimAux; ++i) a[f*totDimAux+i] = x[i];
           ierr = DMPlexVecRestoreClosure(plexA, sectionAux, locA, subp, NULL, &x);CHKERRQ(ierr);
@@ -2229,7 +2231,7 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
 {
   DM_Plex          *mesh  = (DM_Plex *) dmc->data;
   const char       *name  = "Interpolator";
-  PetscDS           prob;
+  PetscDS           cds, rds;
   PetscFE          *feRef;
   PetscFV          *fvRef;
   PetscSection      fsection, fglobalSection;
@@ -2248,14 +2250,15 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
   ierr = DMGetGlobalSection(dmc, &cglobalSection);CHKERRQ(ierr);
   ierr = PetscSectionGetNumFields(fsection, &Nf);CHKERRQ(ierr);
   ierr = DMPlexGetInteriorCellStratum(dmc, &cStart, &cEnd);CHKERRQ(ierr);
-  ierr = DMGetDS(dmf, &prob);CHKERRQ(ierr);
-  ierr = PetscCalloc2(Nf,&feRef,Nf,&fvRef);CHKERRQ(ierr);
+  ierr = DMGetDS(dmc, &cds);CHKERRQ(ierr);
+  ierr = DMGetDS(dmf, &rds);CHKERRQ(ierr);
+  ierr = PetscCalloc2(Nf, &feRef, Nf, &fvRef);CHKERRQ(ierr);
   for (f = 0; f < Nf; ++f) {
     PetscObject  obj;
     PetscClassId id;
     PetscInt     rNb = 0, Nc = 0;
 
-    ierr = PetscDSGetDiscretization(prob, f, &obj);CHKERRQ(ierr);
+    ierr = PetscDSGetDiscretization(rds, f, &obj);CHKERRQ(ierr);
     ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
     if (id == PETSCFE_CLASSID) {
       PetscFE fe = (PetscFE) obj;
@@ -2274,7 +2277,7 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
     }
     rTotDim += rNb;
   }
-  ierr = PetscDSGetTotalDimension(prob, &cTotDim);CHKERRQ(ierr);
+  ierr = PetscDSGetTotalDimension(cds, &cTotDim);CHKERRQ(ierr);
   ierr = PetscMalloc1(rTotDim*cTotDim,&elemMat);CHKERRQ(ierr);
   ierr = PetscArrayzero(elemMat, rTotDim*cTotDim);CHKERRQ(ierr);
   for (fieldI = 0, offsetI = 0; fieldI < Nf; ++fieldI) {
@@ -2308,13 +2311,13 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
     for (fieldJ = 0, offsetJ = 0; fieldJ < Nf; ++fieldJ) {
       PetscObject  obj;
       PetscClassId id;
-      PetscReal   *B;
       PetscInt     NcJ = 0, cpdim = 0, j, qNc;
 
-      ierr = PetscDSGetDiscretization(prob, fieldJ, &obj);CHKERRQ(ierr);
+      ierr = PetscDSGetDiscretization(cds, fieldJ, &obj);CHKERRQ(ierr);
       ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
       if (id == PETSCFE_CLASSID) {
-        PetscFE fe = (PetscFE) obj;
+        PetscFE           fe = (PetscFE) obj;
+        PetscTabulation T  = NULL;
 
         /* Evaluate basis at points */
         ierr = PetscFEGetNumComponents(fe, &NcJ);CHKERRQ(ierr);
@@ -2322,7 +2325,7 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
         /* For now, fields only interpolate themselves */
         if (fieldI == fieldJ) {
           if (Nc != NcJ) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Number of components in fine space field %D does not match coarse field %D", Nc, NcJ);
-          ierr = PetscFEGetTabulation(fe, npoints, points, &B, NULL, NULL);CHKERRQ(ierr);
+          ierr = PetscFECreateTabulation(fe, 1, npoints, points, 0, &T);CHKERRQ(ierr);
           for (i = 0, k = 0; i < fpdim; ++i) {
             ierr = PetscDualSpaceGetFunctional(Qref, i, &f);CHKERRQ(ierr);
             ierr = PetscQuadratureGetData(f, NULL, &qNc, &Np, NULL, &qweights);CHKERRQ(ierr);
@@ -2337,11 +2340,11 @@ PetscErrorCode DMPlexComputeInterpolatorNested(DM dmc, DM dmf, Mat In, void *use
                    Np, p:              Number of quad points in the fine grid functional i
                    k:                  i*Np + p, overall point number for the interpolation
                 */
-                for (c = 0; c < Nc; ++c) elemMat[(offsetI + i)*cTotDim + offsetJ + j] += B[k*cpdim*NcJ+j*Nc+c]*qweights[p*qNc+c];
+                for (c = 0; c < Nc; ++c) elemMat[(offsetI + i)*cTotDim + offsetJ + j] += T->T[0][k*cpdim*NcJ+j*Nc+c]*qweights[p*qNc+c];
               }
             }
           }
-          ierr = PetscFERestoreTabulation(fe, npoints, points, &B, NULL, NULL);CHKERRQ(ierr);CHKERRQ(ierr);
+          ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);CHKERRQ(ierr);
         }
       } else if (id == PETSCFV_CLASSID) {
         PetscFV        fv = (PetscFV) obj;
@@ -2564,12 +2567,13 @@ PetscErrorCode DMPlexComputeInterpolatorGeneral(DM dmc, DM dmf, Mat In, void *us
   ierr = MatSetOption(In, MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
   for (field = 0; field < Nf; ++field) {
-    PetscObject      obj;
-    PetscClassId     id;
-    PetscDualSpace   Q = NULL;
-    PetscQuadrature  f;
-    const PetscReal *qpoints, *qweights;
-    PetscInt         Nc, qNc, Np, fpdim, i, d;
+    PetscObject       obj;
+    PetscClassId      id;
+    PetscDualSpace    Q = NULL;
+    PetscTabulation T = NULL;
+    PetscQuadrature   f;
+    const PetscReal  *qpoints, *qweights;
+    PetscInt          Nc, qNc, Np, fpdim, i, d;
 
     ierr = PetscDSGetDiscretization(prob, field, &obj);CHKERRQ(ierr);
     ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
@@ -2578,6 +2582,7 @@ PetscErrorCode DMPlexComputeInterpolatorGeneral(DM dmc, DM dmf, Mat In, void *us
 
       ierr = PetscFEGetDualSpace(fe, &Q);CHKERRQ(ierr);
       ierr = PetscFEGetNumComponents(fe, &Nc);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(fe, 1, 1, x, 0, &T);CHKERRQ(ierr);
     } else if (id == PETSCFV_CLASSID) {
       PetscFV fv = (PetscFV) obj;
 
@@ -2633,18 +2638,16 @@ PetscErrorCode DMPlexComputeInterpolatorGeneral(DM dmc, DM dmf, Mat In, void *us
           CoordinatesRealToRef(dim, dim, xi0, v0c, invJc, pVReal, x);
 
           if (id == PETSCFE_CLASSID) {
-            PetscFE    fe = (PetscFE) obj;
-            PetscReal *B;
+            PetscFE fe = (PetscFE) obj;
 
             /* Evaluate coarse basis on contained point */
             ierr = PetscFEGetDimension(fe, &cpdim);CHKERRQ(ierr);
-            ierr = PetscFEGetTabulation(fe, 1, x, &B, NULL, NULL);CHKERRQ(ierr);
+            ierr = PetscFEComputeTabulation(fe, 1, x, 0, T);CHKERRQ(ierr);
             ierr = PetscArrayzero(elemMat, cpdim);CHKERRQ(ierr);
             /* Get elemMat entries by multiplying by weight */
             for (j = 0; j < cpdim; ++j) {
-              for (c = 0; c < Nc; ++c) elemMat[j] += B[j*Nc + c]*qweights[ccell*qNc + c];
+              for (c = 0; c < Nc; ++c) elemMat[j] += T->T[0][j*Nc + c]*qweights[ccell*qNc + c];
             }
-            ierr = PetscFERestoreTabulation(fe, 1, x, &B, NULL, NULL);CHKERRQ(ierr);CHKERRQ(ierr);
           } else {
             cpdim = 1;
             for (j = 0; j < cpdim; ++j) {
@@ -2663,6 +2666,7 @@ PetscErrorCode DMPlexComputeInterpolatorGeneral(DM dmc, DM dmf, Mat In, void *us
       }
       ierr = DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, &numFIndices, &findices, NULL);CHKERRQ(ierr);
     }
+    if (id == PETSCFE_CLASSID) {ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);}
   }
   ierr = PetscFree3(v0,J,invJ);CHKERRQ(ierr);
   ierr = PetscFree3(v0c,Jc,invJc);CHKERRQ(ierr);
@@ -2804,17 +2808,22 @@ PetscErrorCode DMPlexComputeMassMatrixGeneral(DM dmc, DM dmf, Mat mass, void *us
   ierr = MatSetOption(mass, MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
   for (field = 0; field < Nf; ++field) {
-    PetscObject      obj;
-    PetscClassId     id;
-    PetscQuadrature  quad;
-    PetscReal       *Bfine;
-    const PetscReal *qpoints, *qweights;
-    PetscInt         Nq, Nc, i, d;
+    PetscObject       obj;
+    PetscClassId      id;
+    PetscTabulation T, Tfine;
+    PetscQuadrature   quad;
+    const PetscReal  *qpoints, *qweights;
+    PetscInt          Nq, Nc, i, d;
 
     ierr = PetscDSGetDiscretization(prob, field, &obj);CHKERRQ(ierr);
     ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
-    if (id == PETSCFE_CLASSID) {ierr = PetscFEGetQuadrature((PetscFE) obj, &quad);CHKERRQ(ierr);ierr = PetscFEGetDefaultTabulation((PetscFE) obj, &Bfine, NULL, NULL);CHKERRQ(ierr);}
-    else                       {ierr = PetscFVGetQuadrature((PetscFV) obj, &quad);CHKERRQ(ierr);}
+    if (id == PETSCFE_CLASSID) {
+      ierr = PetscFEGetQuadrature((PetscFE) obj, &quad);CHKERRQ(ierr);
+      ierr = PetscFEGetCellTabulation((PetscFE) obj, &Tfine);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation((PetscFE) obj, 1, 1, x, 0, &T);CHKERRQ(ierr);
+    } else {
+      ierr = PetscFVGetQuadrature((PetscFV) obj, &quad);CHKERRQ(ierr);
+    }
     ierr = PetscQuadratureGetData(quad, NULL, &Nc, &Nq, &qpoints, &qweights);CHKERRQ(ierr);
     /* For each fine grid cell */
     for (cell = cStart; cell < cEnd; ++cell) {
@@ -2858,24 +2867,22 @@ PetscErrorCode DMPlexComputeMassMatrixGeneral(DM dmc, DM dmf, Mat mass, void *us
         CoordinatesRealToRef(dim, dim, xi0, v0c, invJc, pVReal, x);
 
         if (id == PETSCFE_CLASSID) {
-          PetscFE    fe = (PetscFE) obj;
-          PetscReal *B;
+          PetscFE fe = (PetscFE) obj;
 
           /* Evaluate coarse basis on contained point */
           ierr = PetscFEGetDimension(fe, &cpdim);CHKERRQ(ierr);
-          ierr = PetscFEGetTabulation(fe, 1, x, &B, NULL, NULL);CHKERRQ(ierr);
+          ierr = PetscFEComputeTabulation(fe, 1, x, 0, T);CHKERRQ(ierr);
           /* Get elemMat entries by multiplying by weight */
           for (i = 0; i < numFIndices; ++i) {
             ierr = PetscArrayzero(elemMat, cpdim);CHKERRQ(ierr);
             for (j = 0; j < cpdim; ++j) {
-              for (c = 0; c < Nc; ++c) elemMat[j] += B[j*Nc + c]*Bfine[(ccell*numFIndices + i)*Nc + c]*qweights[ccell*Nc + c]*detJ;
+              for (c = 0; c < Nc; ++c) elemMat[j] += T->T[0][j*Nc + c]*Tfine->T[0][(ccell*numFIndices + i)*Nc + c]*qweights[ccell*Nc + c]*detJ;
             }
             /* Update interpolator */
             if (mesh->printFEM > 1) {ierr = DMPrintCellMatrix(cell, name, 1, numCIndices, elemMat);CHKERRQ(ierr);}
             if (numCIndices != cpdim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of element matrix columns %D != %D", numCIndices, cpdim);
             ierr = MatSetValues(mass, 1, &findices[i], numCIndices, cindices, elemMat, ADD_VALUES);CHKERRQ(ierr);
           }
-          ierr = PetscFERestoreTabulation(fe, 1, x, &B, NULL, NULL);CHKERRQ(ierr);CHKERRQ(ierr);
         } else {
           cpdim = 1;
           for (i = 0; i < numFIndices; ++i) {
@@ -2897,6 +2904,7 @@ PetscErrorCode DMPlexComputeMassMatrixGeneral(DM dmc, DM dmf, Mat mass, void *us
       ierr = VecDestroy(&pointVec);CHKERRQ(ierr);
       ierr = DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, &numFIndices, &findices, NULL);CHKERRQ(ierr);
     }
+    if (id == PETSCFE_CLASSID) {ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);}
   }
   ierr = PetscFree3(v0,J,invJ);CHKERRQ(ierr);
   ierr = PetscFree3(v0c,Jc,invJc);CHKERRQ(ierr);
@@ -3096,6 +3104,7 @@ PetscErrorCode DMPlexComputeInjectorFEM(DM dmc, DM dmf, VecScatter *sc, void *us
 PetscErrorCode DMPlexGetCellFields(DM dm, IS cellIS, Vec locX, Vec locX_t, Vec locA, PetscScalar **u, PetscScalar **u_t, PetscScalar **a)
 {
   DM              plex, plexA = NULL;
+  DMEnclosureType encAux;
   PetscSection    section, sectionAux;
   PetscDS         prob;
   const PetscInt *cells;
@@ -3120,6 +3129,7 @@ PetscErrorCode DMPlexGetCellFields(DM dm, IS cellIS, Vec locX, Vec locX_t, Vec l
     PetscDS probAux;
 
     ierr = VecGetDM(locA, &dmAux);CHKERRQ(ierr);
+    ierr = DMGetEnclosureRelation(dmAux, dm, &encAux);CHKERRQ(ierr);
     ierr = DMPlexConvertPlex(dmAux, &plexA, PETSC_FALSE);CHKERRQ(ierr);
     ierr = DMGetLocalSection(dmAux, §ionAux);CHKERRQ(ierr);
     ierr = DMGetDS(dmAux, &probAux);CHKERRQ(ierr);
@@ -3145,7 +3155,7 @@ PetscErrorCode DMPlexGetCellFields(DM dm, IS cellIS, Vec locX, Vec locX_t, Vec l
     }
     if (locA) {
       PetscInt subcell;
-      ierr = DMPlexGetAuxiliaryPoint(plex, plexA, cell, &subcell);CHKERRQ(ierr);
+      ierr = DMGetEnclosurePoint(plexA, dm, encAux, cell, &subcell);CHKERRQ(ierr);
       ierr = DMPlexVecGetClosure(plexA, sectionAux, locA, subcell, NULL, &x);CHKERRQ(ierr);
       for (i = 0; i < totDimAux; ++i) al[cind*totDimAux+i] = x[i];
       ierr = DMPlexVecRestoreClosure(plexA, sectionAux, locA, subcell, NULL, &x);CHKERRQ(ierr);
diff --git a/src/dm/impls/plex/plexfluent.c b/src/dm/impls/plex/plexfluent.c
index 3e32a995599..8289d61f94b 100644
--- a/src/dm/impls/plex/plexfluent.c
+++ b/src/dm/impls/plex/plexfluent.c
@@ -40,7 +40,7 @@ static PetscErrorCode DMPlexCreateFluent_ReadString(PetscViewer viewer, char *bu
   PetscFunctionBegin;
   do {ierr = PetscViewerRead(viewer, &(buffer[i++]), 1, &ret, PETSC_CHAR);CHKERRQ(ierr);}
   while (ret > 0 && buffer[i-1] != '\0' && buffer[i-1] != delim);
-  buffer[i] = '\0';
+  if (!ret) buffer[i-1] = '\0'; else buffer[i] = '\0';
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/impls/plex/plexgenerate.c b/src/dm/impls/plex/plexgenerate.c
index aeb7a688545..cbd3bf912a5 100644
--- a/src/dm/impls/plex/plexgenerate.c
+++ b/src/dm/impls/plex/plexgenerate.c
@@ -137,6 +137,10 @@ struct _n_PetscFunctionList {
   Output Parameter:
 . mesh - The DMPlex object
 
+  Options Database:
++  -dm_plex_generate  - package to generate mesh, for example, triangle, ctetgen or tetgen
+-  -dm_plex_generator  - package to generate mesh, for example, triangle, ctetgen or tetgen (deprecated)
+
   Level: intermediate
 
 .seealso: DMPlexCreate(), DMRefine()
@@ -148,6 +152,7 @@ PetscErrorCode DMPlexGenerate(DM boundary, const char name[], PetscBool interpol
   PetscBool         flg;
   PetscErrorCode    ierr;
   PetscFunctionList fl;
+  const char*       suggestions;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(boundary, DM_CLASSID, 1);
@@ -155,6 +160,10 @@ PetscErrorCode DMPlexGenerate(DM boundary, const char name[], PetscBool interpol
   ierr = DMGetDimension(boundary, &dim);CHKERRQ(ierr);
   ierr = PetscOptionsGetString(((PetscObject) boundary)->options,((PetscObject) boundary)->prefix, "-dm_plex_generator", genname, 1024, &flg);CHKERRQ(ierr);
   if (flg) name = genname;
+  else {
+    ierr = PetscOptionsGetString(((PetscObject) boundary)->options,((PetscObject) boundary)->prefix, "-dm_plex_generate", genname, 1024, &flg);CHKERRQ(ierr);
+    if (flg) name = genname;
+  }
 
   fl = DMPlexGenerateList;
   if (name) {
@@ -166,7 +175,7 @@ PetscErrorCode DMPlexGenerate(DM boundary, const char name[], PetscBool interpol
       }
       fl = fl->next;
     }
-    SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Grid generator %g not registered",name);
+    SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Grid generator %s not registered; you may need to add --download-%s to your ./configure options",name,name);
   } else {
     while (fl) {
       if (boundary->dim == fl->dim) {
@@ -175,7 +184,10 @@ PetscErrorCode DMPlexGenerate(DM boundary, const char name[], PetscBool interpol
       }
       fl = fl->next;
     }
-    SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"No grid generator of dimension %D registered",boundary->dim+1);
+    suggestions = "";
+    if (boundary->dim+1 == 2) suggestions = " You may need to add --download-triangle to your ./configure options";
+    else if (boundary->dim+1 == 3) suggestions = " You may need to add --download-ctetgen or --download-tetgen in your ./configure options";
+    SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"No grid generator of dimension %D registered%s",boundary->dim+1,suggestions);
   }
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexgeometry.c b/src/dm/impls/plex/plexgeometry.c
index 829f824d666..6e88bdbb8d7 100644
--- a/src/dm/impls/plex/plexgeometry.c
+++ b/src/dm/impls/plex/plexgeometry.c
@@ -28,12 +28,13 @@
 
   The tolerance is interpreted as the maximum Euclidean (L2) distance of the sought point from the specified coordinates.
 
+  Complexity of this function is currently O(mn) with m number of vertices to find and n number of vertices in the local mesh. This could probably be improved.
+
 .seealso: DMPlexCreate(), DMGetCoordinates()
 @*/
 PetscErrorCode DMPlexFindVertices(DM dm, PetscInt npoints, const PetscReal coord[], PetscReal eps, PetscInt dagPoints[])
 {
-  PetscInt          c, dim, i, j, ndof, o, p, vStart, vEnd;
-  PetscSection      cs;
+  PetscInt          c, dim, i, j, o, p, vStart, vEnd;
   Vec               allCoordsVec;
   const PetscScalar *allCoords;
   PetscReal         norm;
@@ -42,16 +43,41 @@ PetscErrorCode DMPlexFindVertices(DM dm, PetscInt npoints, const PetscReal coord
   PetscFunctionBegin;
   if (eps < 0) eps = PETSC_SQRT_MACHINE_EPSILON;
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = DMGetCoordinateSection(dm, &cs);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(dm, &allCoordsVec);CHKERRQ(ierr);
   ierr = VecGetArrayRead(allCoordsVec, &allCoords);CHKERRQ(ierr);
   ierr = DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
-  for (i=0,j=0; i < npoints; i++,j+=dim) {
-    dagPoints[i] = -1;
+#if defined(PETSC_USE_DEBUG)
+  /* check coordinate section is consistent with DM dimension */
+  {
+    PetscSection      cs;
+    PetscInt          ndof;
+
+    ierr = DMGetCoordinateSection(dm, &cs);CHKERRQ(ierr);
     for (p = vStart; p < vEnd; p++) {
-      ierr = PetscSectionGetOffset(cs, p, &o);CHKERRQ(ierr);
       ierr = PetscSectionGetDof(cs, p, &ndof);CHKERRQ(ierr);
       if (PetscUnlikely(ndof != dim)) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "point %D: ndof = %D != %D = dim", p, ndof, dim);
+    }
+  }
+#endif
+  if (eps == 0.0) {
+    for (i=0,j=0; i < npoints; i++,j+=dim) {
+      dagPoints[i] = -1;
+      for (p = vStart,o=0; p < vEnd; p++,o+=dim) {
+        for (c = 0; c < dim; c++) {
+          if (coord[j+c] != PetscRealPart(allCoords[o+c])) break;
+        }
+        if (c == dim) {
+          dagPoints[i] = p;
+          break;
+        }
+      }
+    }
+    ierr = VecRestoreArrayRead(allCoordsVec, &allCoords);CHKERRQ(ierr);
+    PetscFunctionReturn(0);
+  }
+  for (i=0,j=0; i < npoints; i++,j+=dim) {
+    dagPoints[i] = -1;
+    for (p = vStart,o=0; p < vEnd; p++,o+=dim) {
       norm = 0.0;
       for (c = 0; c < dim; c++) {
         norm += PetscSqr(coord[j+c] - PetscRealPart(allCoords[o+c]));
@@ -147,7 +173,7 @@ static PetscErrorCode DMPlexClosestPoint_Simplex_2D_Internal(DM dm, const PetscS
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexLocatePoint_General_2D_Internal(DM dm, const PetscScalar point[], PetscInt c, PetscInt *cell)
+static PetscErrorCode DMPlexLocatePoint_Quad_2D_Internal(DM dm, const PetscScalar point[], PetscInt c, PetscInt *cell)
 {
   PetscSection       coordSection;
   Vec             coordsLocal;
@@ -419,39 +445,21 @@ PetscErrorCode PetscGridHashDestroy(PetscGridHash *box)
 
 PetscErrorCode DMPlexLocatePoint_Internal(DM dm, PetscInt dim, const PetscScalar point[], PetscInt cellStart, PetscInt *cell)
 {
-  PetscInt       coneSize;
+  DMPolytopeType ct;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  switch (dim) {
-  case 2:
-    ierr = DMPlexGetConeSize(dm, cellStart, &coneSize);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 3:
-      ierr = DMPlexLocatePoint_Simplex_2D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);
-      break;
-    case 4:
-      ierr = DMPlexLocatePoint_General_2D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);
-      break;
-    default:
-      SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No point location for cell with cone size %D", coneSize);
-    }
-    break;
-  case 3:
-    ierr = DMPlexGetConeSize(dm, cellStart, &coneSize);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 4:
-      ierr = DMPlexLocatePoint_Simplex_3D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);
-      break;
-    case 6:
-      ierr = DMPlexLocatePoint_General_3D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);
-      break;
-    default:
-      SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No point location for cell with cone size %D", coneSize);
-    }
-    break;
-  default:
-    SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No point location for mesh dimension %D", dim);
+  ierr = DMPlexGetCellType(dm, cellStart, &ct);CHKERRQ(ierr);
+  switch (ct) {
+    case DM_POLYTOPE_TRIANGLE:
+    ierr = DMPlexLocatePoint_Simplex_2D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);break;
+    case DM_POLYTOPE_QUADRILATERAL:
+    ierr = DMPlexLocatePoint_Quad_2D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);break;
+    case DM_POLYTOPE_TETRAHEDRON:
+    ierr = DMPlexLocatePoint_Simplex_3D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);break;
+    case DM_POLYTOPE_HEXAHEDRON:
+    ierr = DMPlexLocatePoint_General_3D_Internal(dm, point, cellStart, cell);CHKERRQ(ierr);break;
+    default: SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No point location for cell %D with type %s", cellStart, DMPolytopeTypes[ct]);
   }
   PetscFunctionReturn(0);
 }
@@ -461,43 +469,23 @@ PetscErrorCode DMPlexLocatePoint_Internal(DM dm, PetscInt dim, const PetscScalar
 */
 PetscErrorCode DMPlexClosestPoint_Internal(DM dm, PetscInt dim, const PetscScalar point[], PetscInt cell, PetscReal cpoint[])
 {
-  PetscInt       coneSize;
+  DMPolytopeType ct;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  switch (dim) {
-  case 2:
-    ierr = DMPlexGetConeSize(dm, cell, &coneSize);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 3:
-      ierr = DMPlexClosestPoint_Simplex_2D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);
-      break;
+  ierr = DMPlexGetCellType(dm, cell, &ct);CHKERRQ(ierr);
+  switch (ct) {
+    case DM_POLYTOPE_TRIANGLE:
+    ierr = DMPlexClosestPoint_Simplex_2D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);break;
 #if 0
-    case 4:
-      ierr = DMPlexClosestPoint_General_2D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);
-      break;
+    case DM_POLYTOPE_QUADRILATERAL:
+    ierr = DMPlexClosestPoint_General_2D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);break;
+    case DM_POLYTOPE_TETRAHEDRON:
+    ierr = DMPlexClosestPoint_Simplex_3D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);break;
+    case DM_POLYTOPE_HEXAHEDRON:
+    ierr = DMPlexClosestPoint_General_3D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);break;
 #endif
-    default:
-      SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No closest point location for cell with cone size %D", coneSize);
-    }
-    break;
-#if 0
-  case 3:
-    ierr = DMPlexGetConeSize(dm, cell, &coneSize);CHKERRQ(ierr);
-    switch (coneSize) {
-    case 4:
-      ierr = DMPlexClosestPoint_Simplex_3D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);
-      break;
-    case 6:
-      ierr = DMPlexClosestPoint_General_3D_Internal(dm, point, cell, cpoint);CHKERRQ(ierr);
-      break;
-    default:
-      SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No closest point location for cell with cone size %D", coneSize);
-    }
-    break;
-#endif
-  default:
-    SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No closest point location for mesh dimension %D", dim);
+    default: SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No closest point location for cell %D with type %s", cell, DMPolytopeTypes[ct]);
   }
   PetscFunctionReturn(0);
 }
@@ -1507,6 +1495,7 @@ static PetscErrorCode DMPlexComputeHexahedronGeometry_Internal(DM dm, PetscInt e
 
 static PetscErrorCode DMPlexComputeCellGeometryFEM_Implicit(DM dm, PetscInt cell, PetscQuadrature quad, PetscReal *v, PetscReal *J, PetscReal *invJ, PetscReal *detJ)
 {
+  DMPolytopeType  ct;
   PetscInt        depth, dim, coordDim, coneSize, i;
   PetscInt        Nq = 0;
   const PetscReal *points = NULL;
@@ -1526,55 +1515,33 @@ static PetscErrorCode DMPlexComputeCellGeometryFEM_Implicit(DM dm, PetscInt cell
   ierr = DMGetCoordinateDim(dm, &coordDim);CHKERRQ(ierr);
   if (coordDim > 3) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported coordinate dimension %D > 3", coordDim);
   if (quad) {ierr = PetscQuadratureGetData(quad, NULL, NULL, &Nq, &points, NULL);CHKERRQ(ierr);}
-  switch (dim) {
-  case 0:
+  ierr = DMPlexGetCellType(dm, cell, &ct);CHKERRQ(ierr);
+  switch (ct) {
+    case DM_POLYTOPE_POINT:
     ierr = DMPlexComputePointGeometry_Internal(dm, cell, v, J, invJ, detJ);CHKERRQ(ierr);
     isAffine = PETSC_FALSE;
     break;
-  case 1:
-    if (Nq) {
-      ierr = DMPlexComputeLineGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);
-    } else {
-      ierr = DMPlexComputeLineGeometry_Internal(dm, cell, v, J, invJ, detJ);CHKERRQ(ierr);
-    }
+    case DM_POLYTOPE_SEGMENT:
+    if (Nq) {ierr = DMPlexComputeLineGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);}
+    else    {ierr = DMPlexComputeLineGeometry_Internal(dm, cell, v,  J,  invJ,  detJ);CHKERRQ(ierr);}
     break;
-  case 2:
-    switch (coneSize) {
-    case 3:
-      if (Nq) {
-        ierr = DMPlexComputeTriangleGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);
-      } else {
-        ierr = DMPlexComputeTriangleGeometry_Internal(dm, cell, v, J, invJ, detJ);CHKERRQ(ierr);
-      }
-      break;
-    case 4:
-      ierr = DMPlexComputeRectangleGeometry_Internal(dm, cell, Nq, points, v, J, invJ, detJ);CHKERRQ(ierr);
-      isAffine = PETSC_FALSE;
-      break;
-    default:
-      SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported number of faces %D in cell %D for element geometry computation", coneSize, cell);
-    }
+    case DM_POLYTOPE_TRIANGLE:
+    if (Nq) {ierr = DMPlexComputeTriangleGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);}
+    else    {ierr = DMPlexComputeTriangleGeometry_Internal(dm, cell, v,  J,  invJ,  detJ);CHKERRQ(ierr);}
     break;
-  case 3:
-    switch (coneSize) {
-    case 4:
-      if (Nq) {
-        ierr = DMPlexComputeTetrahedronGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);
-      } else {
-        ierr = DMPlexComputeTetrahedronGeometry_Internal(dm, cell, v, J, invJ, detJ);CHKERRQ(ierr);
-      }
-      break;
-    case 6: /* Faces */
-    case 8: /* Vertices */
-      ierr = DMPlexComputeHexahedronGeometry_Internal(dm, cell, Nq, points, v, J, invJ, detJ);CHKERRQ(ierr);
-      isAffine = PETSC_FALSE;
-      break;
-    default:
-      SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported number of faces %D in cell %D for element geometry computation", coneSize, cell);
-    }
+    case DM_POLYTOPE_QUADRILATERAL:
+    ierr = DMPlexComputeRectangleGeometry_Internal(dm, cell, Nq, points, v, J, invJ, detJ);CHKERRQ(ierr);
+    isAffine = PETSC_FALSE;
     break;
-  default:
-    SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported dimension %D for element geometry computation", dim);
+    case DM_POLYTOPE_TETRAHEDRON:
+    if (Nq) {ierr = DMPlexComputeTetrahedronGeometry_Internal(dm, cell, v0, J0, NULL, &detJ0);CHKERRQ(ierr);}
+    else    {ierr = DMPlexComputeTetrahedronGeometry_Internal(dm, cell, v,  J,  invJ,  detJ);CHKERRQ(ierr);}
+    break;
+    case DM_POLYTOPE_HEXAHEDRON:
+    ierr = DMPlexComputeHexahedronGeometry_Internal(dm, cell, Nq, points, v, J, invJ, detJ);CHKERRQ(ierr);
+    isAffine = PETSC_FALSE;
+    break;
+    default: SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No element geometry for cell %D with type %s", cell, DMPolytopeTypes[ct]);
   }
   if (isAffine && Nq) {
     if (v) {
@@ -1659,14 +1626,14 @@ PetscErrorCode DMPlexComputeCellGeometryAffineFEM(DM dm, PetscInt cell, PetscRea
 
 static PetscErrorCode DMPlexComputeCellGeometryFEM_FE(DM dm, PetscFE fe, PetscInt point, PetscQuadrature quad, PetscReal v[], PetscReal J[], PetscReal invJ[], PetscReal *detJ)
 {
-  PetscQuadrature  feQuad;
-  PetscSection     coordSection;
-  Vec              coordinates;
-  PetscScalar     *coords = NULL;
-  const PetscReal *quadPoints;
-  PetscReal       *basisDer, *basis, detJt;
-  PetscInt         dim, cdim, pdim, qdim, Nq, numCoords, q;
-  PetscErrorCode   ierr;
+  PetscQuadrature   feQuad;
+  PetscSection      coordSection;
+  Vec               coordinates;
+  PetscScalar      *coords = NULL;
+  const PetscReal  *quadPoints;
+  PetscTabulation T;
+  PetscInt          dim, cdim, pdim, qdim, Nq, numCoords, q;
+  PetscErrorCode    ierr;
 
   PetscFunctionBegin;
   ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
@@ -1687,61 +1654,64 @@ static PetscErrorCode DMPlexComputeCellGeometryFEM_FE(DM dm, PetscFE fe, PetscIn
   ierr = PetscFEGetDimension(fe, &pdim);CHKERRQ(ierr);
   ierr = PetscFEGetQuadrature(fe, &feQuad);CHKERRQ(ierr);
   if (feQuad == quad) {
-    ierr = PetscFEGetDefaultTabulation(fe, &basis, J ? &basisDer : NULL, NULL);CHKERRQ(ierr);
+    ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr);
     if (numCoords != pdim*cdim) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "There are %d coordinates for point %d != %d*%d", numCoords, point, pdim, cdim);
   } else {
-    ierr = PetscFEGetTabulation(fe, Nq, quadPoints, &basis, J ? &basisDer : NULL, NULL);CHKERRQ(ierr);
+    ierr = PetscFECreateTabulation(fe, 1, Nq, quadPoints, J ? 1 : 0, &T);CHKERRQ(ierr);
   }
   if (qdim != dim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Point dimension %d != quadrature dimension %d", dim, qdim);
-  if (v) {
-    ierr = PetscArrayzero(v, Nq*cdim);CHKERRQ(ierr);
-    for (q = 0; q < Nq; ++q) {
-      PetscInt i, k;
+  {
+    const PetscReal *basis    = T->T[0];
+    const PetscReal *basisDer = T->T[1];
+    PetscReal        detJt;
 
-      for (k = 0; k < pdim; ++k)
-        for (i = 0; i < cdim; ++i)
-          v[q*cdim + i] += basis[q*pdim + k] * PetscRealPart(coords[k*cdim + i]);
-      ierr = PetscLogFlops(2.0*pdim*cdim);CHKERRQ(ierr);
-    }
-  }
-  if (J) {
-    ierr = PetscArrayzero(J, Nq*cdim*cdim);CHKERRQ(ierr);
-    for (q = 0; q < Nq; ++q) {
-      PetscInt i, j, k, c, r;
+    if (v) {
+      ierr = PetscArrayzero(v, Nq*cdim);CHKERRQ(ierr);
+      for (q = 0; q < Nq; ++q) {
+        PetscInt i, k;
 
-      /* J = dx_i/d\xi_j = sum[k=0,n-1] dN_k/d\xi_j * x_i(k) */
-      for (k = 0; k < pdim; ++k)
-        for (j = 0; j < dim; ++j)
+        for (k = 0; k < pdim; ++k)
           for (i = 0; i < cdim; ++i)
-            J[(q*cdim + i)*cdim + j] += basisDer[(q*pdim + k)*dim + j] * PetscRealPart(coords[k*cdim + i]);
-      ierr = PetscLogFlops(2.0*pdim*dim*cdim);CHKERRQ(ierr);
-      if (cdim > dim) {
-        for (c = dim; c < cdim; ++c)
-          for (r = 0; r < cdim; ++r)
-            J[r*cdim+c] = r == c ? 1.0 : 0.0;
-      }
-      if (!detJ && !invJ) continue;
-      detJt = 0.;
-      switch (cdim) {
-      case 3:
-        DMPlex_Det3D_Internal(&detJt, &J[q*cdim*dim]);
-        if (invJ) {DMPlex_Invert3D_Internal(&invJ[q*cdim*dim], &J[q*cdim*dim], detJt);}
-        break;
-      case 2:
-        DMPlex_Det2D_Internal(&detJt, &J[q*cdim*dim]);
-        if (invJ) {DMPlex_Invert2D_Internal(&invJ[q*cdim*dim], &J[q*cdim*dim], detJt);}
-        break;
-      case 1:
-        detJt = J[q*cdim*dim];
-        if (invJ) invJ[q*cdim*dim] = 1.0/detJt;
+            v[q*cdim + i] += basis[q*pdim + k] * PetscRealPart(coords[k*cdim + i]);
+        ierr = PetscLogFlops(2.0*pdim*cdim);CHKERRQ(ierr);
       }
-      if (detJ) detJ[q] = detJt;
     }
+    if (J) {
+      ierr = PetscArrayzero(J, Nq*cdim*cdim);CHKERRQ(ierr);
+      for (q = 0; q < Nq; ++q) {
+        PetscInt i, j, k, c, r;
+
+        /* J = dx_i/d\xi_j = sum[k=0,n-1] dN_k/d\xi_j * x_i(k) */
+        for (k = 0; k < pdim; ++k)
+          for (j = 0; j < dim; ++j)
+            for (i = 0; i < cdim; ++i)
+              J[(q*cdim + i)*cdim + j] += basisDer[(q*pdim + k)*dim + j] * PetscRealPart(coords[k*cdim + i]);
+        ierr = PetscLogFlops(2.0*pdim*dim*cdim);CHKERRQ(ierr);
+        if (cdim > dim) {
+          for (c = dim; c < cdim; ++c)
+            for (r = 0; r < cdim; ++r)
+              J[r*cdim+c] = r == c ? 1.0 : 0.0;
+        }
+        if (!detJ && !invJ) continue;
+        detJt = 0.;
+        switch (cdim) {
+        case 3:
+          DMPlex_Det3D_Internal(&detJt, &J[q*cdim*dim]);
+          if (invJ) {DMPlex_Invert3D_Internal(&invJ[q*cdim*dim], &J[q*cdim*dim], detJt);}
+          break;
+        case 2:
+          DMPlex_Det2D_Internal(&detJt, &J[q*cdim*dim]);
+          if (invJ) {DMPlex_Invert2D_Internal(&invJ[q*cdim*dim], &J[q*cdim*dim], detJt);}
+          break;
+        case 1:
+          detJt = J[q*cdim*dim];
+          if (invJ) invJ[q*cdim*dim] = 1.0/detJt;
+        }
+        if (detJ) detJ[q] = detJt;
+      }
+    } else if (detJ || invJ) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Need J to compute invJ or detJ");
   }
-  else if (detJ || invJ) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Need J to compute invJ or detJ");
-  if (feQuad != quad) {
-    ierr = PetscFERestoreTabulation(fe, Nq, quadPoints, &basis, J ? &basisDer : NULL, NULL);CHKERRQ(ierr);
-  }
+  if (feQuad != quad) {ierr = PetscTabulationDestroy(&T);CHKERRQ(ierr);}
   ierr = DMPlexVecRestoreClosure(dm, coordSection, coordinates, point, &numCoords, &coords);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -1929,7 +1899,7 @@ static PetscErrorCode DMPlexComputeGeometryFVM_3D_Internal(DM dm, PetscInt dim,
   PetscReal       vsum = 0.0, vtmp, coordsTmp[3*3];
   const PetscInt *faces, *facesO;
   PetscBool       isHybrid = PETSC_FALSE;
-  PetscInt        pEndInterior[4], cdepth, numFaces, f, coordSize, numCorners, p, d;
+  PetscInt        pEndInterior[4], cdepth, numFaces, f, coordSize, p, d;
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
@@ -1948,12 +1918,13 @@ static PetscErrorCode DMPlexComputeGeometryFVM_3D_Internal(DM dm, PetscInt dim,
   ierr = DMPlexGetCone(dm, cell, &faces);CHKERRQ(ierr);
   ierr = DMPlexGetConeOrientation(dm, cell, &facesO);CHKERRQ(ierr);
   for (f = 0; f < numFaces; ++f) {
-    PetscBool flip = isHybrid && f == 0 ? PETSC_TRUE : PETSC_FALSE; /* The first hybrid face is reversed */
+    PetscBool      flip = isHybrid && f == 0 ? PETSC_TRUE : PETSC_FALSE; /* The first hybrid face is reversed */
+    DMPolytopeType ct;
 
     ierr = DMPlexVecGetClosure(dm, coordSection, coordinates, faces[f], &coordSize, &coords);CHKERRQ(ierr);
-    numCorners = coordSize/dim;
-    switch (numCorners) {
-    case 3:
+    ierr = DMPlexGetCellType(dm, faces[f], &ct);CHKERRQ(ierr);
+    switch (ct) {
+    case DM_POLYTOPE_TRIANGLE:
       for (d = 0; d < dim; ++d) {
         coordsTmp[0*dim+d] = PetscRealPart(coords[0*dim+d]);
         coordsTmp[1*dim+d] = PetscRealPart(coords[1*dim+d]);
@@ -1968,7 +1939,7 @@ static PetscErrorCode DMPlexComputeGeometryFVM_3D_Internal(DM dm, PetscInt dim,
         }
       }
       break;
-    case 4:
+    case DM_POLYTOPE_QUADRILATERAL:
     {
       PetscInt fv[4] = {0, 1, 2, 3};
 
@@ -2006,7 +1977,7 @@ static PetscErrorCode DMPlexComputeGeometryFVM_3D_Internal(DM dm, PetscInt dim,
       break;
     }
     default:
-      SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle faces with %D vertices", numCorners);
+      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle face %D of type %s", faces[f], DMPolytopeTypes[ct]);
     }
     ierr = DMPlexVecRestoreClosure(dm, coordSection, coordinates, faces[f], &coordSize, &coords);CHKERRQ(ierr);
   }
@@ -2047,9 +2018,7 @@ PetscErrorCode DMPlexComputeCellGeometryFVM(DM dm, PetscInt cell, PetscReal *vol
   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
   if (depth != dim) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Mesh must be interpolated");
-  /* We need to keep a pointer to the depth label */
-  ierr = DMGetLabelValue(dm, "depth", cell, &depth);CHKERRQ(ierr);
-  /* Cone size is now the number of faces */
+  ierr = DMPlexGetPointDepth(dm, cell, &depth);CHKERRQ(ierr);
   switch (depth) {
   case 1:
     ierr = DMPlexComputeGeometryFVM_1D_Internal(dm, dim, cell, vol, centroid, normal);CHKERRQ(ierr);
@@ -3177,6 +3146,7 @@ PetscErrorCode DMPlexRemapGeometry(DM dm, PetscReal time,
                                                 PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]))
 {
   DM             cdm;
+  DMField        cf;
   Vec            lCoords, tmpCoords;
   PetscErrorCode ierr;
 
@@ -3185,7 +3155,11 @@ PetscErrorCode DMPlexRemapGeometry(DM dm, PetscReal time,
   ierr = DMGetCoordinatesLocal(dm, &lCoords);CHKERRQ(ierr);
   ierr = DMGetLocalVector(cdm, &tmpCoords);CHKERRQ(ierr);
   ierr = VecCopy(lCoords, tmpCoords);CHKERRQ(ierr);
+  /* We have to do the coordinate field manually right now since the coordinate DM will not have its own */
+  ierr = DMGetCoordinateField(dm, &cf);CHKERRQ(ierr);
+  cdm->coordinateField = cf;
   ierr = DMProjectFieldLocal(cdm, time, tmpCoords, &func, INSERT_VALUES, lCoords);CHKERRQ(ierr);
+  cdm->coordinateField = NULL;
   ierr = DMRestoreLocalVector(cdm, &tmpCoords);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexhdf5.c b/src/dm/impls/plex/plexhdf5.c
index 33ff613dc1f..c69e196f6ae 100644
--- a/src/dm/impls/plex/plexhdf5.c
+++ b/src/dm/impls/plex/plexhdf5.c
@@ -431,7 +431,7 @@ static PetscErrorCode CreateConesIS_Private(DM dm, PetscInt cStart, PetscInt cEn
     ierr = PetscObjectReference((PetscObject) sfPoint);CHKERRQ(ierr);
   }
   /* Number all vertices */
-  ierr = DMPlexCreateNumbering_Internal(dm, vStart, vEnd+vExtra, 0, NULL, sfPoint, &globalVertexNumbers);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, vStart, vEnd+vExtra, 0, NULL, sfPoint, &globalVertexNumbers);CHKERRQ(ierr);
   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
   /* Create cones */
   ierr = ISGetIndices(globalVertexNumbers, &gvertex);CHKERRQ(ierr);
diff --git a/src/dm/impls/plex/plexindices.c b/src/dm/impls/plex/plexindices.c
index bb725704f6b..34b4cb1d7f3 100644
--- a/src/dm/impls/plex/plexindices.c
+++ b/src/dm/impls/plex/plexindices.c
@@ -7,7 +7,7 @@
 
   Input Parameters:
 + dm - The DM
-- section - The section describing the layout in v, or NULL to use the default section
+- section - The section describing the layout in the local vector, or NULL to use the default section
 
   Note:
   This should greatly improve the performance of the closure operations, at the cost of additional memory.
@@ -65,9 +65,11 @@ PetscErrorCode DMPlexCreateClosureIndex(DM dm, PetscSection section)
       }
     }
     ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &numPoints, &points);CHKERRQ(ierr);
-    if (q*2 != cldof) SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "Invalid size for closure %d should be %d", q*2, cldof);
+    if (q*2 != cldof) SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "Invalid size for closure %D should be %D", q*2, cldof);
   }
   ierr = ISCreateGeneral(PETSC_COMM_SELF, clSize, clPoints, PETSC_OWN_POINTER, &closureIS);CHKERRQ(ierr);
   ierr = PetscSectionSetClosureIndex(section, (PetscObject) dm, closureSection, closureIS);CHKERRQ(ierr);
+  ierr = PetscSectionDestroy(&closureSection);CHKERRQ(ierr);
+  ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexinterpolate.c b/src/dm/impls/plex/plexinterpolate.c
index cd52f3434e8..792299cdd8c 100644
--- a/src/dm/impls/plex/plexinterpolate.c
+++ b/src/dm/impls/plex/plexinterpolate.c
@@ -19,29 +19,59 @@ typedef struct _PetscHashIJKLKey { PetscInt i, j, k, l; } PetscHashIJKLKey;
 
 PETSC_HASH_MAP(HashIJKL, PetscHashIJKLKey, PetscInt, PetscHashIJKLKeyHash, PetscHashIJKLKeyEqual, -1)
 
+static PetscSFNode _PetscInvalidSFNode = {-1, -1};
+
+typedef struct _PetscHashIJKLRemoteKey { PetscSFNode i, j, k, l; } PetscHashIJKLRemoteKey;
+
+#define PetscHashIJKLRemoteKeyHash(key) \
+  PetscHashCombine(PetscHashCombine(PetscHashInt((key).i.rank + (key).i.index),PetscHashInt((key).j.rank + (key).j.index)), \
+                   PetscHashCombine(PetscHashInt((key).k.rank + (key).k.index),PetscHashInt((key).l.rank + (key).l.index)))
+
+#define PetscHashIJKLRemoteKeyEqual(k1,k2) \
+  (((k1).i.rank==(k2).i.rank) ? ((k1).i.index==(k2).i.index) ? ((k1).j.rank==(k2).j.rank) ? ((k1).j.index==(k2).j.index) ? ((k1).k.rank==(k2).k.rank) ? ((k1).k.index==(k2).k.index) ? ((k1).l.rank==(k2).l.rank) ? ((k1).l.index==(k2).l.index) : 0 : 0 : 0 : 0 : 0 : 0 : 0)
+
+PETSC_HASH_MAP(HashIJKLRemote, PetscHashIJKLRemoteKey, PetscSFNode, PetscHashIJKLRemoteKeyHash, PetscHashIJKLRemoteKeyEqual, _PetscInvalidSFNode)
+
+static PetscErrorCode PetscSortSFNode(PetscInt n, PetscSFNode A[])
+{
+  PetscInt i;
+
+  PetscFunctionBegin;
+  for (i = 1; i < n; ++i) {
+    PetscSFNode x = A[i];
+    PetscInt    j;
+
+    for (j = i-1; j >= 0; --j) {
+      if ((A[j].rank > x.rank) || (A[j].rank == x.rank && A[j].index > x.index)) break;
+      A[j+1] = A[j];
+    }
+    A[j+1] = x;
+  }
+  PetscFunctionReturn(0);
+}
 
 /*
   DMPlexGetFaces_Internal - Gets groups of vertices that correspond to faces for the given cell
   This assumes that the mesh is not interpolated from the depth of point p to the vertices
 */
-PetscErrorCode DMPlexGetFaces_Internal(DM dm, PetscInt dim, PetscInt p, PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
+PetscErrorCode DMPlexGetFaces_Internal(DM dm, PetscInt p, PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
 {
   const PetscInt *cone = NULL;
-  PetscInt        coneSize;
+  DMPolytopeType  ct;
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
-  ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
+  ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
-  ierr = DMPlexGetRawFaces_Internal(dm, dim, coneSize, cone, numFaces, faceSize, faces);CHKERRQ(ierr);
+  ierr = DMPlexGetRawFaces_Internal(dm, ct, cone, numFaces, faceSize, faces);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 /*
   DMPlexRestoreFaces_Internal - Restores the array
 */
-PetscErrorCode DMPlexRestoreFaces_Internal(DM dm, PetscInt dim, PetscInt p, PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
+PetscErrorCode DMPlexRestoreFaces_Internal(DM dm, PetscInt p, PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
 {
   PetscErrorCode  ierr;
 
@@ -53,7 +83,7 @@ PetscErrorCode DMPlexRestoreFaces_Internal(DM dm, PetscInt dim, PetscInt p, Pets
 /*
   DMPlexGetRawFaces_Internal - Gets groups of vertices that correspond to faces for the given cone
 */
-PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize, const PetscInt cone[], PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
+PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, DMPolytopeType ct, const PetscInt cone[], PetscInt *numFaces, PetscInt *faceSize, const PetscInt *faces[])
 {
   PetscInt       *facesTmp;
   PetscInt        maxConeSize, maxSupportSize;
@@ -61,13 +91,15 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
-  if (faces && coneSize) PetscValidIntPointer(cone,4);
+  if (cone) PetscValidIntPointer(cone, 3);
   ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr);
   if (faces) {ierr = DMGetWorkArray(dm, PetscSqr(PetscMax(maxConeSize, maxSupportSize)), MPIU_INT, &facesTmp);CHKERRQ(ierr);}
-  switch (dim) {
-  case 1:
-    switch (coneSize) {
-    case 2:
+  switch (ct) {
+    case DM_POLYTOPE_POINT:
+      if (numFaces) *numFaces = 0;
+      if (faceSize) *faceSize = 0;
+      break;
+    case DM_POLYTOPE_SEGMENT:
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
         *faces = facesTmp;
@@ -75,13 +107,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
       if (numFaces) *numFaces = 2;
       if (faceSize) *faceSize = 1;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  case 2:
-    switch (coneSize) {
-    case 3:
+    case DM_POLYTOPE_TRIANGLE:
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
         facesTmp[2] = cone[1]; facesTmp[3] = cone[2];
@@ -91,7 +117,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
       if (numFaces) *numFaces = 3;
       if (faceSize) *faceSize = 2;
       break;
-    case 4:
+    case DM_POLYTOPE_QUADRILATERAL:
       /* Vertices follow right hand rule */
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
@@ -103,23 +129,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
       if (numFaces) *numFaces = 4;
       if (faceSize) *faceSize = 2;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  case 3:
-    switch (coneSize) {
-    case 3:
-      if (faces) {
-        facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
-        facesTmp[2] = cone[1]; facesTmp[3] = cone[2];
-        facesTmp[4] = cone[2]; facesTmp[5] = cone[0];
-        *faces = facesTmp;
-      }
-      if (numFaces) *numFaces = 3;
-      if (faceSize) *faceSize = 2;
-      break;
-    case 4:
+    case DM_POLYTOPE_TETRAHEDRON:
       /* Vertices of first face follow right hand rule and normal points away from last vertex */
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1]  = cone[1]; facesTmp[2]  = cone[2];
@@ -131,7 +141,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
       if (numFaces) *numFaces = 4;
       if (faceSize) *faceSize = 3;
       break;
-    case 8:
+    case DM_POLYTOPE_HEXAHEDRON:
       /*  7--------6
          /|       /|
         / |      / |
@@ -155,12 +165,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
       if (numFaces) *numFaces = 6;
       if (faceSize) *faceSize = 4;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  default:
-    SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %D not supported", dim);
+    default: SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No face description for cell type %s", DMPolytopeTypes[ct]);
   }
   PetscFunctionReturn(0);
 }
@@ -168,7 +173,7 @@ PetscErrorCode DMPlexGetRawFaces_Internal(DM dm, PetscInt dim, PetscInt coneSize
 /*
   DMPlexGetRawFacesHybrid_Internal - Gets groups of vertices that correspond to faces for the given cone using hybrid ordering (prisms)
 */
-static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, PetscInt dim, PetscInt coneSize, const PetscInt cone[], PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
+static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, DMPolytopeType ct, const PetscInt cone[], PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
 {
   PetscInt       *facesTmp;
   PetscInt        maxConeSize, maxSupportSize;
@@ -176,13 +181,11 @@ static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, PetscInt dim, Pets
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
-  if (faces && coneSize) PetscValidIntPointer(cone,4);
+  if (cone) PetscValidIntPointer(cone, 3);
   ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr);
   if (faces) {ierr = DMGetWorkArray(dm, PetscSqr(PetscMax(maxConeSize, maxSupportSize)), MPIU_INT, &facesTmp);CHKERRQ(ierr);}
-  switch (dim) {
-  case 1:
-    switch (coneSize) {
-    case 2:
+  switch (ct) {
+    case DM_POLYTOPE_SEGMENT:
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
         *faces = facesTmp;
@@ -191,13 +194,8 @@ static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, PetscInt dim, Pets
       if (numFacesNotH) *numFacesNotH = 2;
       if (faceSize)     *faceSize = 1;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  case 2:
-    switch (coneSize) {
-    case 4:
+    case DM_POLYTOPE_QUADRILATERAL:
+    case DM_POLYTOPE_SEG_PRISM_TENSOR:
       if (faces) {
         facesTmp[0] = cone[0]; facesTmp[1] = cone[1];
         facesTmp[2] = cone[2]; facesTmp[3] = cone[3];
@@ -209,13 +207,20 @@ static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, PetscInt dim, Pets
       if (numFacesNotH) *numFacesNotH = 2;
       if (faceSize)     *faceSize = 2;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  case 3:
-    switch (coneSize) {
-    case 6: /* triangular prism */
+    case DM_POLYTOPE_TRI_PRISM:
+      if (faces) {
+        facesTmp[0]  = cone[0]; facesTmp[1]  = cone[2]; facesTmp[2]  = cone[1]; facesTmp[3]  = -1;      /* Bottom */
+        facesTmp[4]  = cone[3]; facesTmp[5]  = cone[4]; facesTmp[6]  = cone[5]; facesTmp[7]  = -1;      /* Top */
+        facesTmp[8]  = cone[0]; facesTmp[9]  = cone[1]; facesTmp[10] = cone[4]; facesTmp[11] = cone[3]; /* Back left */
+        facesTmp[12] = cone[1]; facesTmp[13] = cone[2]; facesTmp[14] = cone[5]; facesTmp[15] = cone[4]; /* Back right */
+        facesTmp[16] = cone[2]; facesTmp[17] = cone[0]; facesTmp[18] = cone[3]; facesTmp[19] = cone[5]; /* Front */
+        *faces = facesTmp;
+      }
+      if (numFaces)     *numFaces = 5;
+      if (numFacesNotH) *numFacesNotH = 2;
+      if (faceSize)     *faceSize = -4;
+      break;
+    case DM_POLYTOPE_TRI_PRISM_TENSOR:
       if (faces) {
         facesTmp[0]  = cone[0]; facesTmp[1]  = cone[1]; facesTmp[2]  = cone[2]; facesTmp[3]  = -1;      /* Bottom */
         facesTmp[4]  = cone[3]; facesTmp[5]  = cone[4]; facesTmp[6]  = cone[5]; facesTmp[7]  = -1;      /* Top */
@@ -228,17 +233,12 @@ static PetscErrorCode DMPlexGetRawFacesHybrid_Internal(DM dm, PetscInt dim, Pets
       if (numFacesNotH) *numFacesNotH = 2;
       if (faceSize)     *faceSize = -4;
       break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cone size %D not supported for dimension %D", coneSize, dim);
-    }
-    break;
-  default:
-    SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %D not supported", dim);
+    default: SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No face description for cell type %s", DMPolytopeTypes[ct]);
   }
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexRestoreRawFacesHybrid_Internal(DM dm, PetscInt dim, PetscInt coneSize, const PetscInt cone[], PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
+static PetscErrorCode DMPlexRestoreRawFacesHybrid_Internal(DM dm, DMPolytopeType ct, const PetscInt cone[], PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
 {
   PetscErrorCode  ierr;
 
@@ -247,17 +247,17 @@ static PetscErrorCode DMPlexRestoreRawFacesHybrid_Internal(DM dm, PetscInt dim,
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexGetFacesHybrid_Internal(DM dm, PetscInt dim, PetscInt p, PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
+static PetscErrorCode DMPlexGetFacesHybrid_Internal(DM dm, PetscInt p, PetscInt *numFaces, PetscInt *numFacesNotH, PetscInt *faceSize, const PetscInt *faces[])
 {
   const PetscInt *cone = NULL;
-  PetscInt        coneSize;
+  DMPolytopeType  ct;
   PetscErrorCode  ierr;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
-  ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
+  ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
-  ierr = DMPlexGetRawFacesHybrid_Internal(dm, dim, coneSize, cone, numFaces, numFacesNotH, faceSize, faces);CHKERRQ(ierr);
+  ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, numFaces, numFacesNotH, faceSize, faces);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -302,14 +302,16 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
   pMax = pMax < 0 ? pEnd[cellDepth] : pMax;
   if (pMax < pEnd[cellDepth]) {
     const PetscInt *cellFaces, *cone;
+    DMPolytopeType  ct;
     PetscInt        numCellFacesT, faceSize, cf;
 
     /* First get normal cell face size (we now allow hybrid cells to meet normal cells on either hybrid or normal faces */
-    if (pStart[cellDepth] < pMax) {ierr = DMPlexGetFaces_Internal(dm, cellDim, pStart[cellDepth], NULL, &faceSizeAll, NULL);CHKERRQ(ierr);}
+    if (pStart[cellDepth] < pMax) {ierr = DMPlexGetFaces_Internal(dm, pStart[cellDepth], NULL, &faceSizeAll, NULL);CHKERRQ(ierr);}
 
+    ierr = DMPlexGetCellType(dm, pMax, &ct);CHKERRQ(ierr);
     ierr = DMPlexGetConeSize(dm, pMax, &coneSizeH);CHKERRQ(ierr);
     ierr = DMPlexGetCone(dm, pMax, &cone);CHKERRQ(ierr);
-    ierr = DMPlexGetRawFacesHybrid_Internal(dm, cellDim, coneSizeH, cone, &numCellFacesH, &numCellFacesT, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, &numCellFacesH, &numCellFacesT, &faceSize, &cellFaces);CHKERRQ(ierr);
     if (faceSize < 0) {
       PetscInt *sizes, minv, maxv;
 
@@ -343,9 +345,9 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
     } else { /* the size of the faces in hybrid cells is the same */
       faceSizeAll = faceSizeAllH = faceSizeAllT = faceSize;
     }
-    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, cellDim, coneSizeH, cone, &numCellFacesH, &numCellFacesT, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, ct, cone, &numCellFacesH, &numCellFacesT, &faceSize, &cellFaces);CHKERRQ(ierr);
   } else if (pEnd[cellDepth] > pStart[cellDepth]) {
-    ierr = DMPlexGetFaces_Internal(dm, cellDim, pStart[cellDepth], NULL, &faceSizeAll, NULL);CHKERRQ(ierr);
+    ierr = DMPlexGetFaces_Internal(dm, pStart[cellDepth], NULL, &faceSizeAll, NULL);CHKERRQ(ierr);
     faceSizeAllH = faceSizeAllT = faceSizeAll;
   }
   if (faceSizeAll > 4) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Do not support interpolation of meshes with faces of %D vertices", faceSizeAll);
@@ -364,17 +366,19 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
       PetscInt        numCellFaces, faceSize, faceSizeInc, faceSizeCheck, cf;
 
       if (c < pMax) {
-        ierr = DMPlexGetFaces_Internal(dm, cellDim, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexGetFaces_Internal(dm, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
         if (faceSize != faceSizeAll) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent face for cell %D of size %D != %D", c, faceSize, faceSizeAll);
         faceSizeCheck = faceSizeAll;
       } else { /* Hybrid cell */
         const PetscInt *cone;
+        DMPolytopeType  ct;
         PetscInt        numCellFacesN, coneSize;
 
+        ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
         ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
         if (coneSize != coneSizeH) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected hybrid coneSize %D != %D", coneSize, coneSizeH);
         ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
-        ierr = DMPlexGetRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
         if (numCellFaces != numCellFacesH) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected numCellFaces %D != %D for hybrid cell %D", numCellFaces, numCellFacesH, c);
         faceSize = PetscMax(faceSize, -faceSize);
         if (faceSize > 4) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Do not support interpolation of meshes with faces of %D vertices", faceSize);
@@ -410,9 +414,9 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
         }
       }
       if (c < pMax) {
-        ierr = DMPlexRestoreFaces_Internal(dm, cellDim, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexRestoreFaces_Internal(dm, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
       } else {
-        ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, cellDim, coneSizeH, NULL, NULL, NULL, NULL, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, DM_POLYTOPE_UNKNOWN, NULL, NULL, NULL, NULL, &cellFaces);CHKERRQ(ierr);
       }
     }
   }
@@ -421,11 +425,12 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
   /* Second pass for hybrid meshes: number hybrid faces */
   for (c = pMax; c < pEnd[cellDepth]; ++c) {
     const PetscInt *cellFaces, *cone;
-    PetscInt        numCellFaces, numCellFacesN, faceSize, cf, coneSize;
+    DMPolytopeType  ct;
+    PetscInt        numCellFaces, numCellFacesN, faceSize, cf;
 
-    ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
+    ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
-    ierr = DMPlexGetRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
     if (numCellFaces != numCellFacesH) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected hybrid numCellFaces %D != %D", numCellFaces, numCellFacesH);
     faceSize = PetscMax(faceSize, -faceSize);
     for (cf = numCellFacesN; cf < numCellFaces; ++cf) { /* These are the hybrid faces */
@@ -451,7 +456,7 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
       ierr = PetscHashIJKLPut(faceTable, key, &iter, &missing);CHKERRQ(ierr);
       if (missing) {ierr = PetscHashIJKLIterSet(faceTable, iter, face++);CHKERRQ(ierr);}
     }
-    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
   }
   faceH = face - pEnd[faceDepth];
   if (faceH) {
@@ -495,9 +500,9 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
       for (p = pStart[d]; p < pEnd[d]; ++p) {
         /* Number of cell faces may be different from number of cell vertices*/
         if (p < pMax) {
-          ierr = DMPlexGetFaces_Internal(dm, cellDim, p, &coneSize, NULL, NULL);CHKERRQ(ierr);
+          ierr = DMPlexGetFaces_Internal(dm, p, &coneSize, NULL, NULL);CHKERRQ(ierr);
         } else {
-          ierr = DMPlexGetFacesHybrid_Internal(dm, cellDim, p, &coneSize, NULL, NULL, NULL);CHKERRQ(ierr);
+          ierr = DMPlexGetFacesHybrid_Internal(dm, p, &coneSize, NULL, NULL, NULL);CHKERRQ(ierr);
         }
         ierr = DMPlexSetConeSize(idm, p, coneSize);CHKERRQ(ierr);
       }
@@ -533,16 +538,18 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
       PetscInt        numCellFaces, faceSize, faceSizeInc, cf;
 
       if (c < pMax) {
-        ierr = DMPlexGetFaces_Internal(dm, cellDim, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexGetFaces_Internal(dm, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
         if (faceSize != faceSizeAll) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent face for cell %D of size %D != %D", c, faceSize, faceSizeAll);
       } else {
         const PetscInt *cone;
+        DMPolytopeType  ct;
         PetscInt        numCellFacesN, coneSize;
 
+        ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
         ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
         if (coneSize != coneSizeH) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected hybrid coneSize %D != %D", coneSize, coneSizeH);
         ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
-        ierr = DMPlexGetRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
         if (numCellFaces != numCellFacesH) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected numCellFaces %D != %D for hybrid cell %D", numCellFaces, numCellFacesH, c);
         faceSize = PetscMax(faceSize, -faceSize);
         if (faceSize > 4) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Do not support interpolation of meshes with faces of %D vertices", faceSize);
@@ -601,20 +608,22 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
         }
       }
       if (c < pMax) {
-        ierr = DMPlexRestoreFaces_Internal(dm, cellDim, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexRestoreFaces_Internal(dm, c, &numCellFaces, &faceSize, &cellFaces);CHKERRQ(ierr);
       } else {
-        ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, cellDim, coneSizeH, NULL, NULL, NULL, NULL, &cellFaces);CHKERRQ(ierr);
+        ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, DM_POLYTOPE_UNKNOWN, NULL, NULL, NULL, NULL, &cellFaces);CHKERRQ(ierr);
       }
     }
   }
   /* Second pass for hybrid meshes: orient hybrid faces */
   for (c = pMax; c < pEnd[cellDepth]; ++c) {
     const PetscInt *cellFaces, *cone;
-    PetscInt        numCellFaces, numCellFacesN, faceSize, cf, coneSize;
+    DMPolytopeType  ct;
+    PetscInt        numCellFaces, numCellFacesN, faceSize, coneSize, cf;
 
+    ierr = DMPlexGetCellType(dm, c, &ct);CHKERRQ(ierr);
     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
-    ierr = DMPlexGetRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
     if (numCellFaces != numCellFacesH) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unexpected hybrid numCellFaces %D != %D", numCellFaces, numCellFacesH);
     faceSize = PetscMax(faceSize, -faceSize);
     for (cf = numCellFacesN; cf < numCellFaces; ++cf) { /* These are the hybrid faces */
@@ -698,7 +707,7 @@ static PetscErrorCode DMPlexInterpolateFaces_Internal(DM dm, PetscInt cellDepth,
         ierr = DMPlexInsertConeOrientation(idm, c, cf, ornt);CHKERRQ(ierr);
       }
     }
-    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, cellDim, coneSize, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreRawFacesHybrid_Internal(dm, ct, cone, &numCellFaces, &numCellFacesN, &faceSize, &cellFaces);CHKERRQ(ierr);
   }
   if (face != pEnd[faceDepth]) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of faces %D should be %D", face-pStart[faceDepth], pEnd[faceDepth]-pStart[faceDepth]);
   ierr = PetscFree2(pStart,pEnd);CHKERRQ(ierr);
@@ -886,27 +895,204 @@ static PetscErrorCode SFNodeArrayViewFromOptions(MPI_Comm comm, const char opt[]
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexMapToLocalPoint(PetscHMapIJ roothash, const PetscInt localPoints[], PetscMPIInt rank, PetscSFNode remotePoint, PetscInt *localPoint)
+static PetscErrorCode DMPlexMapToLocalPoint(DM dm, PetscHMapIJ remotehash, PetscSFNode remotePoint, PetscInt *localPoint)
 {
-  PetscErrorCode ierr;
+  PetscSF         sf;
+  const PetscInt *locals;
+  PetscMPIInt     rank;
+  PetscErrorCode  ierr;
 
   PetscFunctionBegin;
+  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
+  ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
+  ierr = PetscSFGetGraph(sf, NULL, NULL, &locals, NULL);CHKERRQ(ierr);
   if (remotePoint.rank == rank) {
     *localPoint = remotePoint.index;
   } else {
     PetscHashIJKey key;
-    PetscInt       root;
+    PetscInt       l;
 
     key.i = remotePoint.index;
     key.j = remotePoint.rank;
-    ierr = PetscHMapIJGet(roothash, key, &root);CHKERRQ(ierr);
-    if (root >= 0) {
-      *localPoint = localPoints[root];
+    ierr = PetscHMapIJGet(remotehash, key, &l);CHKERRQ(ierr);
+    if (l >= 0) {
+      *localPoint = locals[l];
     } else PetscFunctionReturn(1);
   }
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode DMPlexMapToGlobalPoint(DM dm, PetscInt localPoint, PetscSFNode *remotePoint)
+{
+  PetscSF            sf;
+  const PetscInt    *locals, *rootdegree;
+  const PetscSFNode *remotes;
+  PetscInt           Nl, l;
+  PetscMPIInt        rank;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBegin;
+  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
+  ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
+  ierr = PetscSFGetGraph(sf, NULL, &Nl, &locals, &remotes);CHKERRQ(ierr);
+  if (Nl < 0) goto owned;
+  ierr = PetscSFComputeDegreeBegin(sf, &rootdegree);CHKERRQ(ierr);
+  ierr = PetscSFComputeDegreeEnd(sf, &rootdegree);CHKERRQ(ierr);
+  if (rootdegree[localPoint]) goto owned;
+  ierr = PetscFindInt(localPoint, Nl, locals, &l);CHKERRQ(ierr);
+  if (l < 0) PetscFunctionReturn(1);
+  *remotePoint = remotes[l];
+  PetscFunctionReturn(0);
+  owned:
+  remotePoint->rank  = rank;
+  remotePoint->index = localPoint;
+  PetscFunctionReturn(0);
+}
+
+
+static PetscErrorCode DMPlexPointIsShared(DM dm, PetscInt p, PetscBool *isShared)
+{
+  PetscSF         sf;
+  const PetscInt *locals, *rootdegree;
+  PetscInt        Nl, idx;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  *isShared = PETSC_FALSE;
+  ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
+  ierr = PetscSFGetGraph(sf, NULL, &Nl, &locals, NULL);CHKERRQ(ierr);
+  if (Nl < 0) PetscFunctionReturn(0);
+  ierr = PetscFindInt(p, Nl, locals, &idx);CHKERRQ(ierr);
+  if (idx >= 0) {*isShared = PETSC_TRUE; PetscFunctionReturn(0);}
+  ierr = PetscSFComputeDegreeBegin(sf, &rootdegree);CHKERRQ(ierr);
+  ierr = PetscSFComputeDegreeEnd(sf, &rootdegree);CHKERRQ(ierr);
+  if (rootdegree[p] > 0) *isShared = PETSC_TRUE;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode DMPlexConeIsShared(DM dm, PetscInt p, PetscBool *isShared)
+{
+  const PetscInt *cone;
+  PetscInt        coneSize, c;
+  PetscBool       cShared = PETSC_TRUE;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
+  ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
+  for (c = 0; c < coneSize; ++c) {
+    PetscBool pointShared;
+
+    ierr = DMPlexPointIsShared(dm, cone[c], &pointShared);CHKERRQ(ierr);
+    cShared = (PetscBool) (cShared && pointShared);
+  }
+  *isShared = coneSize ? cShared : PETSC_FALSE;
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode DMPlexGetConeMinimum(DM dm, PetscInt p, PetscSFNode *cpmin)
+{
+  const PetscInt *cone;
+  PetscInt        coneSize, c;
+  PetscSFNode     cmin = {PETSC_MAX_INT, PETSC_MAX_INT}, missing = {-1, -1};
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
+  ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
+  for (c = 0; c < coneSize; ++c) {
+    PetscSFNode rcp;
+
+    ierr = DMPlexMapToGlobalPoint(dm, cone[c], &rcp);
+    if (ierr) {
+      cmin = missing;
+    } else {
+      cmin = (rcp.rank < cmin.rank) || (rcp.rank == cmin.rank && rcp.index < cmin.index) ? rcp : cmin;
+    }
+  }
+  *cpmin = coneSize ? cmin : missing;
+  PetscFunctionReturn(0);
+}
+
+/*
+  Each shared face has an entry in the candidates array:
+    (-1, coneSize-1), {(global cone point)}
+  where the set is missing the point p which we use as the key for the face
+*/
+static PetscErrorCode DMPlexAddSharedFace_Private(DM dm, PetscSection candidateSection, PetscSFNode candidates[], PetscHMapIJ faceHash, PetscInt p, PetscBool debug)
+{
+  MPI_Comm        comm;
+  const PetscInt *support;
+  PetscInt        supportSize, s, off = 0, idx = 0, overlap, cellHeight, height;
+  PetscMPIInt     rank;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  ierr = DMPlexGetOverlap(dm, &overlap);CHKERRQ(ierr);
+  ierr = DMPlexGetVTKCellHeight(dm, &cellHeight);CHKERRQ(ierr);
+  ierr = DMPlexGetPointHeight(dm, p, &height);CHKERRQ(ierr);
+  if (!overlap && height <= cellHeight+1) {
+    /* cells can't be shared for non-overlapping meshes */
+    if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Skipping face %D to avoid adding cell to hashmap since this is nonoverlapping mesh\n", rank, p);CHKERRQ(ierr);}
+    PetscFunctionReturn(0);
+  }
+  ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
+  ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
+  if (candidates) {ierr = PetscSectionGetOffset(candidateSection, p, &off);CHKERRQ(ierr);}
+  for (s = 0; s < supportSize; ++s) {
+    const PetscInt  face = support[s];
+    const PetscInt *cone;
+    PetscSFNode     cpmin={-1,-1}, rp={-1,-1};
+    PetscInt        coneSize, c, f;
+    PetscBool       isShared = PETSC_FALSE;
+    PetscHashIJKey  key;
+
+    /* Only add point once */
+    if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Support face %D\n", rank, face);CHKERRQ(ierr);}
+    key.i = p;
+    key.j = face;
+    ierr = PetscHMapIJGet(faceHash, key, &f);CHKERRQ(ierr);
+    if (f >= 0) continue;
+    ierr = DMPlexConeIsShared(dm, face, &isShared);CHKERRQ(ierr);
+    ierr = DMPlexGetConeMinimum(dm, face, &cpmin);CHKERRQ(ierr);
+    ierr = DMPlexMapToGlobalPoint(dm, p, &rp);CHKERRQ(ierr);
+    if (debug) {
+      ierr = PetscSynchronizedPrintf(comm, "[%d]      Face point %D is shared: %d\n", rank, face, (int) isShared);CHKERRQ(ierr);
+      ierr = PetscSynchronizedPrintf(comm, "[%d]      Global point (%D, %D) Min Cone Point (%D, %D)\n", rank, rp.rank, rp.index, cpmin.rank, cpmin.index);CHKERRQ(ierr);
+    }
+    if (isShared && (rp.rank == cpmin.rank && rp.index == cpmin.index)) {
+      ierr = PetscHMapIJSet(faceHash, key, p);CHKERRQ(ierr);
+      if (candidates) {
+        if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Adding shared face %D at idx %D\n[%d]     ", rank, face, idx, rank);CHKERRQ(ierr);}
+        ierr = DMPlexGetConeSize(dm, face, &coneSize);CHKERRQ(ierr);
+        ierr = DMPlexGetCone(dm, face, &cone);CHKERRQ(ierr);
+        candidates[off+idx].rank    = -1;
+        candidates[off+idx++].index = coneSize-1;
+        candidates[off+idx].rank    = rank;
+        candidates[off+idx++].index = face;
+        for (c = 0; c < coneSize; ++c) {
+          const PetscInt cp = cone[c];
+
+          if (cp == p) continue;
+          ierr = DMPlexMapToGlobalPoint(dm, cp, &candidates[off+idx]);CHKERRQ(ierr);
+          if (debug) {ierr = PetscSynchronizedPrintf(comm, " (%D,%D)", candidates[off+idx].rank, candidates[off+idx].index);CHKERRQ(ierr);}
+          ++idx;
+        }
+        if (debug) {ierr = PetscSynchronizedPrintf(comm, "\n");CHKERRQ(ierr);}
+      } else {
+        /* Add cone size to section */
+        if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Scheduling shared face %D\n", rank, face);CHKERRQ(ierr);}
+        ierr = DMPlexGetConeSize(dm, face, &coneSize);CHKERRQ(ierr);
+        ierr = PetscHMapIJSet(faceHash, key, p);CHKERRQ(ierr);
+        ierr = PetscSectionAddDof(candidateSection, p, coneSize+1);CHKERRQ(ierr);
+      }
+    }
+  }
+  PetscFunctionReturn(0);
+}
+
 /*@
   DMPlexInterpolatePointSF - Insert interpolated points in the overlap into the PointSF in parallel, following local interpolation
 
@@ -919,7 +1105,7 @@ static PetscErrorCode DMPlexMapToLocalPoint(PetscHMapIJ roothash, const PetscInt
   Output Parameter:
 . pointSF - The SF including interpolated points
 
-  Level: intermediate
+  Level: developer
 
    Note: All debugging for this process can be turned on with the options: -dm_interp_pre_view -petscsf_interp_pre_view -petscsection_interp_candidate_view -petscsection_interp_candidate_remote_view -petscsection_interp_claim_view -petscsf_interp_pre_view -dmplex_interp_debug
 
@@ -927,166 +1113,92 @@ static PetscErrorCode DMPlexMapToLocalPoint(PetscHMapIJ roothash, const PetscInt
 @*/
 PetscErrorCode DMPlexInterpolatePointSF(DM dm, PetscSF pointSF)
 {
-  /*
-       Okay, the algorithm is:
-         - Take each point in the overlap (root)
-         - Look at the neighboring points in the overlap (candidates)
-         - Send these candidate points to neighbors
-         - Neighbor checks for edge between root and candidate
-         - If edge is found, it replaces candidate point with edge point
-         - Send back the overwritten candidates (claims)
-         - Original guy checks for edges, different from original candidate, and gets its own edge
-         - This pair is put into SF
-
-       We need a new algorithm that tolerates groups larger than 2.
-         - Take each point in the overlap (root)
-         - Find all collections of points in the overlap which make faces (do early join)
-         - Send collections as candidates (add size as first number)
-           - Make sure to send collection to all owners of all overlap points in collection
-         - Neighbor check for face in collections
-         - If face is found, it replaces candidate point with face point
-         - Send back the overwritten candidates (claims)
-         - Original guy checks for faces, different from original candidate, and gets its own face
-         - This pair is put into SF
-  */
-  PetscHMapI         leafhash;
-  PetscHMapIJ        roothash;
+  MPI_Comm           comm;
+  PetscHMapIJ        remoteHash;
+  PetscHMapI         claimshash;
+  PetscSection       candidateSection, candidateRemoteSection, claimSection;
+  PetscSFNode       *candidates, *candidatesRemote, *claims;
   const PetscInt    *localPoints, *rootdegree;
   const PetscSFNode *remotePoints;
-  PetscSFNode       *candidates, *candidatesRemote, *claims;
-  PetscSection       candidateSection, candidateSectionRemote, claimSection;
-  PetscInt           numLeaves, l, numRoots, r, candidatesSize, candidatesRemoteSize;
-  PetscMPIInt        size, rank;
-  PetscHashIJKey     key;
-  PetscBool          debug = PETSC_FALSE;
+  PetscInt           ov, Nr, r, Nl, l;
+  PetscInt           candidatesSize, candidatesRemoteSize, claimsSize;
+  PetscBool          flg, debug = PETSC_FALSE;
+  PetscMPIInt        rank;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  PetscValidHeaderSpecific(pointSF, PETSCSF_CLASSID, 3);
+  ierr = DMPlexIsDistributed(dm, &flg);CHKERRQ(ierr);
+  if (!flg) PetscFunctionReturn(0);
+  /* Set initial SF so that lower level queries work */
+  ierr = DMSetPointSF(dm, pointSF);CHKERRQ(ierr);
+  ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  ierr = DMPlexGetOverlap(dm, &ov);CHKERRQ(ierr);
+  if (ov) SETERRQ(comm, PETSC_ERR_SUP, "Interpolation of overlapped DMPlex not implemented yet");
   ierr = PetscOptionsHasName(NULL, ((PetscObject) dm)->prefix, "-dmplex_interp_debug", &debug);CHKERRQ(ierr);
-  ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
-  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
-  ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &localPoints, &remotePoints);CHKERRQ(ierr);
-  if (size < 2 || numRoots < 0) PetscFunctionReturn(0);
-  ierr = DMPlexGetOverlap(dm, &r);CHKERRQ(ierr);
-  if (r) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Interpolation of overlapped DMPlex not implemented yet");
   ierr = PetscObjectViewFromOptions((PetscObject) dm, NULL, "-dm_interp_pre_view");CHKERRQ(ierr);
   ierr = PetscObjectViewFromOptions((PetscObject) pointSF, NULL, "-petscsf_interp_pre_view");CHKERRQ(ierr);
   ierr = PetscLogEventBegin(DMPLEX_InterpolateSF,dm,0,0,0);CHKERRQ(ierr);
-  /* Build hashes of points in the SF for efficient lookup */
-  ierr = PetscHMapICreate(&leafhash);CHKERRQ(ierr);
-  ierr = PetscHMapIJCreate(&roothash);CHKERRQ(ierr);
-  for (l = 0; l < numLeaves; ++l) {
+  /* Step 0: Precalculations */
+  ierr = PetscSFGetGraph(pointSF, &Nr, &Nl, &localPoints, &remotePoints);CHKERRQ(ierr);
+  if (Nr < 0) SETERRQ(comm, PETSC_ERR_ARG_WRONGSTATE, "This DMPlex is distributed but input PointSF has no graph set");
+  ierr = PetscHMapIJCreate(&remoteHash);CHKERRQ(ierr);
+  for (l = 0; l < Nl; ++l) {
+    PetscHashIJKey key;
     key.i = remotePoints[l].index;
     key.j = remotePoints[l].rank;
-    ierr = PetscHMapISet(leafhash, localPoints[l], l);CHKERRQ(ierr);
-    ierr = PetscHMapIJSet(roothash, key, l);CHKERRQ(ierr);
+    ierr = PetscHMapIJSet(remoteHash, key, l);CHKERRQ(ierr);
   }
-  /* Compute root degree to identify shared points */
+  /*   Compute root degree to identify shared points */
   ierr = PetscSFComputeDegreeBegin(pointSF, &rootdegree);CHKERRQ(ierr);
   ierr = PetscSFComputeDegreeEnd(pointSF, &rootdegree);CHKERRQ(ierr);
-  ierr = IntArrayViewFromOptions(PetscObjectComm((PetscObject) dm), "-interp_root_degree_view", "Root degree", "point", "degree", numRoots, rootdegree);CHKERRQ(ierr);
-  /* Build a section / SFNode array of candidate points (face bd points) in the cone(support(leaf)),
-     where each candidate is defined by a set of remote points (roots) for the other points that define the face. */
-  ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &candidateSection);CHKERRQ(ierr);
-  ierr = PetscSectionSetChart(candidateSection, 0, numRoots);CHKERRQ(ierr);
+  ierr = IntArrayViewFromOptions(comm, "-interp_root_degree_view", "Root degree", "point", "degree", Nr, rootdegree);CHKERRQ(ierr);
+  /*
+  1) Loop over each leaf point $p$ at depth $d$ in the SF
+  \item Get set $F(p)$ of faces $f$ in the support of $p$ for which
+  \begin{itemize}
+    \item all cone points of $f$ are shared
+    \item $p$ is the cone point with smallest canonical number
+  \end{itemize}
+  \item Send $F(p)$ and the cone of each face to the active root point $r(p)$
+  \item At the root, if at least two faces with a given cone are present, including a local face, mark the face as shared \label{alg:rootStep} and choose the root face
+  \item Send the root face from the root back to all leaf process
+  \item Leaf processes add the shared face to the SF
+  */
+  /* Step 1: Construct section+SFNode array
+       The section has entries for all shared faces for which we have a leaf point in the cone
+       The array holds candidate shared faces, each face is refered to by the leaf point */
+  ierr = PetscSectionCreate(comm, &candidateSection);CHKERRQ(ierr);
+  ierr = PetscSectionSetChart(candidateSection, 0, Nr);CHKERRQ(ierr);
   {
-    PetscHMapIJ facehash;
-
-    ierr = PetscHMapIJCreate(&facehash);CHKERRQ(ierr);
-    for (l = 0; l < numLeaves; ++l) {
-      const PetscInt    localPoint = localPoints[l];
-      const PetscInt   *support;
-      PetscInt          supportSize, s;
-
-      if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking local point %D\n", rank, localPoint);CHKERRQ(ierr);}
-      ierr = DMPlexGetSupportSize(dm, localPoint, &supportSize);CHKERRQ(ierr);
-      ierr = DMPlexGetSupport(dm, localPoint, &support);CHKERRQ(ierr);
-      for (s = 0; s < supportSize; ++s) {
-        const PetscInt  face = support[s];
-        const PetscInt *cone;
-        PetscInt        coneSize, c, f, root;
-        PetscBool       isFace = PETSC_TRUE;
-
-        /* Only add face once */
-        if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Support point %D\n", rank, face);CHKERRQ(ierr);}
-        key.i = localPoint;
-        key.j = face;
-        ierr = PetscHMapIJGet(facehash, key, &f);CHKERRQ(ierr);
-        if (f >= 0) continue;
-        ierr = DMPlexGetConeSize(dm, face, &coneSize);CHKERRQ(ierr);
-        ierr = DMPlexGetCone(dm, face, &cone);CHKERRQ(ierr);
-        /* If a cone point does not map to leaves on any proc, then do not put face in SF */
-        for (c = 0; c < coneSize; ++c) {
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]      Cone point %D\n", rank, cone[c]);CHKERRQ(ierr);}
-          ierr = PetscHMapIGet(leafhash, cone[c], &root);CHKERRQ(ierr);
-          if (!rootdegree[cone[c]] && (root < 0)) {isFace = PETSC_FALSE; break;}
-        }
-        if (isFace) {
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Found shared face %D\n", rank, face);CHKERRQ(ierr);}
-          ierr = PetscHMapIJSet(facehash, key, l);CHKERRQ(ierr);
-          ierr = PetscSectionAddDof(candidateSection, localPoint, coneSize);CHKERRQ(ierr);
-        }
-      }
+    PetscHMapIJ faceHash;
+
+    ierr = PetscHMapIJCreate(&faceHash);CHKERRQ(ierr);
+    for (l = 0; l < Nl; ++l) {
+      const PetscInt p = localPoints[l];
+
+      if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]  First pass leaf point %D\n", rank, p);CHKERRQ(ierr);}
+      ierr = DMPlexAddSharedFace_Private(dm, candidateSection, NULL, faceHash, p, debug);CHKERRQ(ierr);
     }
-    if (debug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), NULL);CHKERRQ(ierr);}
-    ierr = PetscHMapIJClear(facehash);CHKERRQ(ierr);
+    ierr = PetscHMapIJClear(faceHash);CHKERRQ(ierr);
     ierr = PetscSectionSetUp(candidateSection);CHKERRQ(ierr);
     ierr = PetscSectionGetStorageSize(candidateSection, &candidatesSize);CHKERRQ(ierr);
     ierr = PetscMalloc1(candidatesSize, &candidates);CHKERRQ(ierr);
-    for (l = 0; l < numLeaves; ++l) {
-      const PetscInt    localPoint = localPoints[l];
-      const PetscInt   *support;
-      PetscInt          supportSize, s, offset, idx = 0;
-
-      if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking local point %D\n", rank, localPoint);CHKERRQ(ierr);}
-      ierr = PetscSectionGetOffset(candidateSection, localPoint, &offset);CHKERRQ(ierr);
-      ierr = DMPlexGetSupportSize(dm, localPoint, &supportSize);CHKERRQ(ierr);
-      ierr = DMPlexGetSupport(dm, localPoint, &support);CHKERRQ(ierr);
-      for (s = 0; s < supportSize; ++s) {
-        const PetscInt  face = support[s];
-        const PetscInt *cone;
-        PetscInt        coneSize, c, f, root;
-        PetscBool       isFace = PETSC_TRUE;
-
-        /* Only add face once */
-        if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Support point %D\n", rank, face);CHKERRQ(ierr);}
-        key.i = localPoint;
-        key.j = face;
-        ierr = PetscHMapIJGet(facehash, key, &f);CHKERRQ(ierr);
-        if (f >= 0) continue;
-        ierr = DMPlexGetConeSize(dm, face, &coneSize);CHKERRQ(ierr);
-        ierr = DMPlexGetCone(dm, face, &cone);CHKERRQ(ierr);
-        /* If a cone point does not map to leaves on any proc, then do not put face in SF */
-        for (c = 0; c < coneSize; ++c) {
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]      Cone point %D\n", rank, cone[c]);CHKERRQ(ierr);}
-          ierr = PetscHMapIGet(leafhash, cone[c], &root);CHKERRQ(ierr);
-          if (!rootdegree[cone[c]] && (root < 0)) {isFace = PETSC_FALSE; break;}
-        }
-        if (isFace) {
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Adding shared face %D at idx %D\n", rank, face, idx);CHKERRQ(ierr);}
-          ierr = PetscHMapIJSet(facehash, key, l);CHKERRQ(ierr);
-          candidates[offset+idx].rank    = -1;
-          candidates[offset+idx++].index = coneSize-1;
-          for (c = 0; c < coneSize; ++c) {
-            if (cone[c] == localPoint) continue;
-            if (rootdegree[cone[c]]) {
-              candidates[offset+idx].rank    = rank;
-              candidates[offset+idx++].index = cone[c];
-            } else {
-              ierr = PetscHMapIGet(leafhash, cone[c], &root);CHKERRQ(ierr);
-              if (root < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot locate local point %D in SF", cone[c]);
-              candidates[offset+idx++] = remotePoints[root];
-            }
-          }
-        }
-      }
+    for (l = 0; l < Nl; ++l) {
+      const PetscInt p = localPoints[l];
+
+      if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]  Second pass leaf point %D\n", rank, p);CHKERRQ(ierr);}
+      ierr = DMPlexAddSharedFace_Private(dm, candidateSection, candidates, faceHash, p, debug);CHKERRQ(ierr);
     }
-    if (debug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), NULL);CHKERRQ(ierr);}
-    ierr = PetscHMapIJDestroy(&facehash);CHKERRQ(ierr);
-    ierr = PetscObjectViewFromOptions((PetscObject) candidateSection, NULL, "-petscsection_interp_candidate_view");CHKERRQ(ierr);
-    ierr = SFNodeArrayViewFromOptions(PetscObjectComm((PetscObject) dm), "-petscsection_interp_candidate_view", "Candidates", NULL, candidatesSize, candidates);CHKERRQ(ierr);
+    ierr = PetscHMapIJDestroy(&faceHash);CHKERRQ(ierr);
+    if (debug) {ierr = PetscSynchronizedFlush(comm, NULL);CHKERRQ(ierr);}
   }
-  /* Gather candidate section / array pair into the root partition via inverse(multi(pointSF)). */
+  ierr = PetscObjectSetName((PetscObject) candidateSection, "Candidate Section");CHKERRQ(ierr);
+  ierr = PetscObjectViewFromOptions((PetscObject) candidateSection, NULL, "-petscsection_interp_candidate_view");CHKERRQ(ierr);
+  ierr = SFNodeArrayViewFromOptions(comm, "-petscsection_interp_candidate_view", "Candidates", NULL, candidatesSize, candidates);CHKERRQ(ierr);
+  /* Step 2: Gather candidate section / array pair into the root partition via inverse(multi(pointSF)). */
   /*   Note that this section is indexed by offsets into leaves, not by point number */
   {
     PetscSF   sfMulti, sfInverse, sfCandidates;
@@ -1094,10 +1206,10 @@ PetscErrorCode DMPlexInterpolatePointSF(DM dm, PetscSF pointSF)
 
     ierr = PetscSFGetMultiSF(pointSF, &sfMulti);CHKERRQ(ierr);
     ierr = PetscSFCreateInverseSF(sfMulti, &sfInverse);CHKERRQ(ierr);
-    ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &candidateSectionRemote);CHKERRQ(ierr);
-    ierr = PetscSFDistributeSection(sfInverse, candidateSection, &remoteOffsets, candidateSectionRemote);CHKERRQ(ierr);
-    ierr = PetscSFCreateSectionSF(sfInverse, candidateSection, remoteOffsets, candidateSectionRemote, &sfCandidates);CHKERRQ(ierr);
-    ierr = PetscSectionGetStorageSize(candidateSectionRemote, &candidatesRemoteSize);CHKERRQ(ierr);
+    ierr = PetscSectionCreate(comm, &candidateRemoteSection);CHKERRQ(ierr);
+    ierr = PetscSFDistributeSection(sfInverse, candidateSection, &remoteOffsets, candidateRemoteSection);CHKERRQ(ierr);
+    ierr = PetscSFCreateSectionSF(sfInverse, candidateSection, remoteOffsets, candidateRemoteSection, &sfCandidates);CHKERRQ(ierr);
+    ierr = PetscSectionGetStorageSize(candidateRemoteSection, &candidatesRemoteSize);CHKERRQ(ierr);
     ierr = PetscMalloc1(candidatesRemoteSize, &candidatesRemote);CHKERRQ(ierr);
     ierr = PetscSFBcastBegin(sfCandidates, MPIU_2INT, candidates, candidatesRemote);CHKERRQ(ierr);
     ierr = PetscSFBcastEnd(sfCandidates, MPIU_2INT, candidates, candidatesRemote);CHKERRQ(ierr);
@@ -1105,128 +1217,219 @@ PetscErrorCode DMPlexInterpolatePointSF(DM dm, PetscSF pointSF)
     ierr = PetscSFDestroy(&sfCandidates);CHKERRQ(ierr);
     ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
 
-    ierr = PetscObjectViewFromOptions((PetscObject) candidateSectionRemote, NULL, "-petscsection_interp_candidate_remote_view");CHKERRQ(ierr);
-    ierr = SFNodeArrayViewFromOptions(PetscObjectComm((PetscObject) dm), "-petscsection_interp_candidate_remote_view", "Remote Candidates", NULL, candidatesRemoteSize, candidatesRemote);CHKERRQ(ierr);
+    ierr = PetscObjectSetName((PetscObject) candidateRemoteSection, "Remote Candidate Section");CHKERRQ(ierr);
+    ierr = PetscObjectViewFromOptions((PetscObject) candidateRemoteSection, NULL, "-petscsection_interp_candidate_remote_view");CHKERRQ(ierr);
+    ierr = SFNodeArrayViewFromOptions(comm, "-petscsection_interp_candidate_remote_view", "Remote Candidates", NULL, candidatesRemoteSize, candidatesRemote);CHKERRQ(ierr);
   }
-  /* */
+  /* Step 3: At the root, if at least two faces with a given cone are present, including a local face, mark the face as shared and choose the root face */
   {
-    PetscInt idx;
+    PetscHashIJKLRemote faceTable;
+    PetscInt            idx, idx2;
+
+    ierr = PetscHashIJKLRemoteCreate(&faceTable);CHKERRQ(ierr);
     /* There is a section point for every leaf attached to a given root point */
-    for (r = 0, idx = 0; r < numRoots; ++r) {
+    for (r = 0, idx = 0, idx2 = 0; r < Nr; ++r) {
       PetscInt deg;
+
       for (deg = 0; deg < rootdegree[r]; ++deg, ++idx) {
         PetscInt offset, dof, d;
 
-        ierr = PetscSectionGetDof(candidateSectionRemote, idx, &dof);CHKERRQ(ierr);
-        ierr = PetscSectionGetOffset(candidateSectionRemote, idx, &offset);CHKERRQ(ierr);
+        ierr = PetscSectionGetDof(candidateRemoteSection, idx, &dof);CHKERRQ(ierr);
+        ierr = PetscSectionGetOffset(candidateRemoteSection, idx, &offset);CHKERRQ(ierr);
+        /* dof may include many faces from the remote process */
         for (d = 0; d < dof; ++d) {
-          const PetscInt  sizeInd   = offset+d;
-          const PetscInt  numPoints = candidatesRemote[sizeInd].index;
-          const PetscInt *join      = NULL;
-          PetscInt        points[1024], p, joinSize;
+          const PetscInt         hidx  = offset+d;
+          const PetscInt         Np    = candidatesRemote[hidx].index+1;
+          const PetscSFNode      rface = candidatesRemote[hidx+1];
+          const PetscSFNode     *fcone = &candidatesRemote[hidx+2];
+          PetscSFNode            fcp0;
+          const PetscSFNode      pmax  = {PETSC_MAX_INT, PETSC_MAX_INT};
+          const PetscInt        *join  = NULL;
+          PetscHashIJKLRemoteKey key;
+          PetscHashIter          iter;
+          PetscBool              missing;
+          PetscInt               points[1024], p, joinSize;
+
+          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking face (%D, %D) at (%D, %D, %D) with cone size %D\n", rank, rface.rank, rface.index, r, idx, d, Np);CHKERRQ(ierr);}
+          if (Np > 4) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle face (%D, %D) at (%D, %D, %D) with %D cone points", rface.rank, rface.index, r, idx, d, Np);
+          fcp0.rank  = rank;
+          fcp0.index = r;
+          d += Np;
+          /* Put remote face in hash table */
+          key.i = fcp0;
+          key.j = fcone[0];
+          key.k = Np > 2 ? fcone[1] : pmax;
+          key.l = Np > 3 ? fcone[2] : pmax;
+          ierr = PetscSortSFNode(Np, (PetscSFNode *) &key);CHKERRQ(ierr);
+          ierr = PetscHashIJKLRemotePut(faceTable, key, &iter, &missing);CHKERRQ(ierr);
+          if (missing) {
+            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Setting remote face (%D, %D)\n", rank, rface.index, rface.rank);CHKERRQ(ierr);}
+            ierr = PetscHashIJKLRemoteIterSet(faceTable, iter, rface);CHKERRQ(ierr);
+          } else {
+            PetscSFNode oface;
 
+            ierr = PetscHashIJKLRemoteIterGet(faceTable, iter, &oface);CHKERRQ(ierr);
+            if ((rface.rank < oface.rank) || (rface.rank == oface.rank && rface.index < oface.index)) {
+              if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Replacing with remote face (%D, %D)\n", rank, rface.index, rface.rank);CHKERRQ(ierr);}
+              ierr = PetscHashIJKLRemoteIterSet(faceTable, iter, rface);CHKERRQ(ierr);
+            }
+          }
+          /* Check for local face */
           points[0] = r;
-          for (p = 0; p < numPoints; ++p) {
-            ierr = DMPlexMapToLocalPoint(roothash, localPoints, rank, candidatesRemote[offset+(++d)], &points[p+1]);
-            if (ierr) {d += numPoints-1 - p; break;} /* We got a point not in our overlap */
-            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking local candidate %D\n", rank, points[p+1]);CHKERRQ(ierr);}
+          for (p = 1; p < Np; ++p) {
+            ierr = DMPlexMapToLocalPoint(dm, remoteHash, fcone[p-1], &points[p]);
+            if (ierr) break; /* We got a point not in our overlap */
+            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking local candidate %D\n", rank, points[p]);CHKERRQ(ierr);}
           }
           if (ierr) continue;
-          ierr = DMPlexGetJoin(dm, numPoints+1, points, &joinSize, &join);CHKERRQ(ierr);
+          ierr = DMPlexGetJoin(dm, Np, points, &joinSize, &join);CHKERRQ(ierr);
           if (joinSize == 1) {
-            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Adding face %D at idx %D\n", rank, join[0], sizeInd);CHKERRQ(ierr);}
-            candidatesRemote[sizeInd].rank  = rank;
-            candidatesRemote[sizeInd].index = join[0];
+            PetscSFNode lface;
+            PetscSFNode oface;
+
+            /* Always replace with local face */
+            lface.rank  = rank;
+            lface.index = join[0];
+            ierr = PetscHashIJKLRemoteIterGet(faceTable, iter, &oface);CHKERRQ(ierr);
+            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Replacing (%D, %D) with local face (%D, %D)\n", rank, oface.index, oface.rank, lface.index, lface.rank);CHKERRQ(ierr);}
+            ierr = PetscHashIJKLRemoteIterSet(faceTable, iter, lface);CHKERRQ(ierr);
           }
-          ierr = DMPlexRestoreJoin(dm, numPoints+1, points, &joinSize, &join);CHKERRQ(ierr);
+          ierr = DMPlexRestoreJoin(dm, Np, points, &joinSize, &join);CHKERRQ(ierr);
+        }
+      }
+      /* Put back faces for this root */
+      for (deg = 0; deg < rootdegree[r]; ++deg, ++idx2) {
+        PetscInt offset, dof, d;
+
+        ierr = PetscSectionGetDof(candidateRemoteSection, idx2, &dof);CHKERRQ(ierr);
+        ierr = PetscSectionGetOffset(candidateRemoteSection, idx2, &offset);CHKERRQ(ierr);
+        /* dof may include many faces from the remote process */
+        for (d = 0; d < dof; ++d) {
+          const PetscInt         hidx  = offset+d;
+          const PetscInt         Np    = candidatesRemote[hidx].index+1;
+          const PetscSFNode     *fcone = &candidatesRemote[hidx+2];
+          PetscSFNode            fcp0;
+          const PetscSFNode      pmax  = {PETSC_MAX_INT, PETSC_MAX_INT};
+          PetscHashIJKLRemoteKey key;
+          PetscHashIter          iter;
+          PetscBool              missing;
+
+          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Entering face at (%D, %D)\n", rank, r, idx);CHKERRQ(ierr);}
+          if (Np > 4) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot handle faces with %D cone points", Np);
+          fcp0.rank  = rank;
+          fcp0.index = r;
+          d += Np;
+          /* Find remote face in hash table */
+          key.i = fcp0;
+          key.j = fcone[0];
+          key.k = Np > 2 ? fcone[1] : pmax;
+          key.l = Np > 3 ? fcone[2] : pmax;
+          ierr = PetscSortSFNode(Np, (PetscSFNode *) &key);CHKERRQ(ierr);
+          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    key (%D, %D) (%D, %D) (%D, %D) (%D, %D)\n", rank, key.i.rank, key.i.index, key.j.rank, key.j.index, key.k.rank, key.k.index, key.l.rank, key.l.index);CHKERRQ(ierr);}
+          ierr = PetscHashIJKLRemotePut(faceTable, key, &iter, &missing);CHKERRQ(ierr);
+          if (missing) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Root %D Idx %D ought to have an assoicated face", r, idx2);
+          else        {ierr = PetscHashIJKLRemoteIterGet(faceTable, iter, &candidatesRemote[hidx]);CHKERRQ(ierr);}
         }
       }
     }
     if (debug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), NULL);CHKERRQ(ierr);}
+    ierr = PetscHashIJKLRemoteDestroy(&faceTable);CHKERRQ(ierr);
   }
-  /* Push claims back to receiver via the MultiSF and derive new pointSF mapping on receiver */
+  /* Step 4: Push back owned faces */
   {
-    PetscSF         sfMulti, sfClaims, sfPointNew;
-    PetscSFNode    *remotePointsNew;
-    PetscHMapI      claimshash;
-    PetscInt       *remoteOffsets, *localPointsNew;
-    PetscInt        claimsSize, pStart, pEnd, root, numLocalNew, p, d;
+    PetscSF      sfMulti, sfClaims, sfPointNew;
+    PetscSFNode *remotePointsNew;
+    PetscInt    *remoteOffsets, *localPointsNew;
+    PetscInt     pStart, pEnd, r, NlNew, p;
 
+    /* 4) Push claims back to receiver via the MultiSF and derive new pointSF mapping on receiver */
     ierr = PetscSFGetMultiSF(pointSF, &sfMulti);CHKERRQ(ierr);
-    ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &claimSection);CHKERRQ(ierr);
-    ierr = PetscSFDistributeSection(sfMulti, candidateSectionRemote, &remoteOffsets, claimSection);CHKERRQ(ierr);
-    ierr = PetscSFCreateSectionSF(sfMulti, candidateSectionRemote, remoteOffsets, claimSection, &sfClaims);CHKERRQ(ierr);
+    ierr = PetscSectionCreate(comm, &claimSection);CHKERRQ(ierr);
+    ierr = PetscSFDistributeSection(sfMulti, candidateRemoteSection, &remoteOffsets, claimSection);CHKERRQ(ierr);
+    ierr = PetscSFCreateSectionSF(sfMulti, candidateRemoteSection, remoteOffsets, claimSection, &sfClaims);CHKERRQ(ierr);
     ierr = PetscSectionGetStorageSize(claimSection, &claimsSize);CHKERRQ(ierr);
     ierr = PetscMalloc1(claimsSize, &claims);CHKERRQ(ierr);
+    for (p = 0; p < claimsSize; ++p) claims[p].rank = -1;
     ierr = PetscSFBcastBegin(sfClaims, MPIU_2INT, candidatesRemote, claims);CHKERRQ(ierr);
     ierr = PetscSFBcastEnd(sfClaims, MPIU_2INT, candidatesRemote, claims);CHKERRQ(ierr);
     ierr = PetscSFDestroy(&sfClaims);CHKERRQ(ierr);
     ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
+    ierr = PetscObjectSetName((PetscObject) claimSection, "Claim Section");CHKERRQ(ierr);
     ierr = PetscObjectViewFromOptions((PetscObject) claimSection, NULL, "-petscsection_interp_claim_view");CHKERRQ(ierr);
-    ierr = SFNodeArrayViewFromOptions(PetscObjectComm((PetscObject) dm), "-petscsection_interp_claim_view", "Claims", NULL, claimsSize, claims);CHKERRQ(ierr);
-    /* Walk the original section of local supports and add an SF entry for each updated item */
+    ierr = SFNodeArrayViewFromOptions(comm, "-petscsection_interp_claim_view", "Claims", NULL, claimsSize, claims);CHKERRQ(ierr);
+    /* Step 5) Walk the original section of local supports and add an SF entry for each updated item */
+    /* TODO I should not have to do a join here since I already put the face and its cone in the candidate section */
     ierr = PetscHMapICreate(&claimshash);CHKERRQ(ierr);
-    for (p = 0; p < numRoots; ++p) {
-      PetscInt dof, offset;
+    for (r = 0; r < Nr; ++r) {
+      PetscInt dof, off, d;
 
-      if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]  Checking root for claims %D\n", rank, p);CHKERRQ(ierr);}
-      ierr = PetscSectionGetDof(candidateSection, p, &dof);CHKERRQ(ierr);
-      ierr = PetscSectionGetOffset(candidateSection, p, &offset);CHKERRQ(ierr);
+      if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]  Checking root for claims %D\n", rank, r);CHKERRQ(ierr);}
+      ierr = PetscSectionGetDof(candidateSection, r, &dof);CHKERRQ(ierr);
+      ierr = PetscSectionGetOffset(candidateSection, r, &off);CHKERRQ(ierr);
       for (d = 0; d < dof;) {
-        if (claims[offset+d].rank >= 0) {
-          const PetscInt  faceInd   = offset+d;
-          const PetscInt  numPoints = candidates[faceInd].index;
-          const PetscInt *join      = NULL;
+        if (claims[off+d].rank >= 0) {
+          const PetscInt  faceInd = off+d;
+          const PetscInt  Np      = candidates[off+d].index;
+          const PetscInt *join    = NULL;
           PetscInt        joinSize, points[1024], c;
 
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Found claim for remote point (%D, %D)\n", rank, claims[faceInd].rank, claims[faceInd].index);CHKERRQ(ierr);}
-          points[0] = p;
-          if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]      point %D\n", rank, points[0]);CHKERRQ(ierr);}
-          for (c = 0, ++d; c < numPoints; ++c, ++d) {
-            key.i = candidates[offset+d].index;
-            key.j = candidates[offset+d].rank;
-            ierr = PetscHMapIJGet(roothash, key, &root);CHKERRQ(ierr);
-            points[c+1] = localPoints[root];
-            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]      point %D\n", rank, points[c+1]);CHKERRQ(ierr);}
+          if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Found claim for remote point (%D, %D)\n", rank, claims[faceInd].rank, claims[faceInd].index);CHKERRQ(ierr);}
+          points[0] = r;
+          if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]      point %D\n", rank, points[0]);CHKERRQ(ierr);}
+          for (c = 0, d += 2; c < Np; ++c, ++d) {
+            ierr = DMPlexMapToLocalPoint(dm, remoteHash, candidates[off+d], &points[c+1]);CHKERRQ(ierr);
+            if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]      point %D\n", rank, points[c+1]);CHKERRQ(ierr);}
           }
-          ierr = DMPlexGetJoin(dm, numPoints+1, points, &joinSize, &join);CHKERRQ(ierr);
+          ierr = DMPlexGetJoin(dm, Np+1, points, &joinSize, &join);CHKERRQ(ierr);
           if (joinSize == 1) {
-            if (debug) {ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d]    Found local face %D\n", rank, join[0]);CHKERRQ(ierr);}
-            ierr = PetscHMapISet(claimshash, join[0], faceInd);CHKERRQ(ierr);
+            if (claims[faceInd].rank == rank) {
+              if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Ignoring local face %D for non-remote partner\n", rank, join[0]);CHKERRQ(ierr);}
+            } else {
+              if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Found local face %D\n", rank, join[0]);CHKERRQ(ierr);}
+              ierr = PetscHMapISet(claimshash, join[0], faceInd);CHKERRQ(ierr);
+            }
+          } else {
+            if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    Failed to find face\n", rank);CHKERRQ(ierr);}
           }
-          ierr = DMPlexRestoreJoin(dm, numPoints+1, points, &joinSize, &join);CHKERRQ(ierr);
-        } else d += claims[offset+d].index+1;
+          ierr = DMPlexRestoreJoin(dm, Np+1, points, &joinSize, &join);CHKERRQ(ierr);
+        } else {
+          if (debug) {ierr = PetscSynchronizedPrintf(comm, "[%d]    No claim for point %D\n", rank, r);CHKERRQ(ierr);}
+          d += claims[off+d].index+1;
+        }
       }
     }
-    if (debug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), NULL);CHKERRQ(ierr);}
-    /* Create new pointSF from hashed claims */
-    ierr = PetscHMapIGetSize(claimshash, &numLocalNew);CHKERRQ(ierr);
+    if (debug) {ierr = PetscSynchronizedFlush(comm, NULL);CHKERRQ(ierr);}
+    /* Step 6) Create new pointSF from hashed claims */
+    ierr = PetscHMapIGetSize(claimshash, &NlNew);CHKERRQ(ierr);
     ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
-    ierr = PetscMalloc1(numLeaves + numLocalNew, &localPointsNew);CHKERRQ(ierr);
-    ierr = PetscMalloc1(numLeaves + numLocalNew, &remotePointsNew);CHKERRQ(ierr);
-    for (p = 0; p < numLeaves; ++p) {
-      localPointsNew[p] = localPoints[p];
-      remotePointsNew[p].index = remotePoints[p].index;
-      remotePointsNew[p].rank  = remotePoints[p].rank;
+    ierr = PetscMalloc1(Nl + NlNew, &localPointsNew);CHKERRQ(ierr);
+    ierr = PetscMalloc1(Nl + NlNew, &remotePointsNew);CHKERRQ(ierr);
+    for (l = 0; l < Nl; ++l) {
+      localPointsNew[l] = localPoints[l];
+      remotePointsNew[l].index = remotePoints[l].index;
+      remotePointsNew[l].rank  = remotePoints[l].rank;
     }
-    p = numLeaves;
+    p = Nl;
     ierr = PetscHMapIGetKeys(claimshash, &p, localPointsNew);CHKERRQ(ierr);
-    ierr = PetscSortInt(numLocalNew, &localPointsNew[numLeaves]);CHKERRQ(ierr);
-    for (p = numLeaves; p < numLeaves + numLocalNew; ++p) {
-      PetscInt offset;
-      ierr = PetscHMapIGet(claimshash, localPointsNew[p], &offset);CHKERRQ(ierr);
-      remotePointsNew[p] = claims[offset];
+    /* We sort new points, and assume they are numbered after all existing points */
+    ierr = PetscSortInt(NlNew, &localPointsNew[Nl]);CHKERRQ(ierr);
+    for (p = Nl; p < Nl + NlNew; ++p) {
+      PetscInt off;
+      ierr = PetscHMapIGet(claimshash, localPointsNew[p], &off);CHKERRQ(ierr);
+      if (claims[off].rank < 0 || claims[off].index < 0) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid claim for local point %D, (%D, %D)", localPointsNew[p], claims[off].rank, claims[off].index);
+      remotePointsNew[p] = claims[off];
     }
-    ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), &sfPointNew);CHKERRQ(ierr);
-    ierr = PetscSFSetGraph(sfPointNew, pEnd-pStart, numLeaves+numLocalNew, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
+    ierr = PetscSFCreate(comm, &sfPointNew);CHKERRQ(ierr);
+    ierr = PetscSFSetGraph(sfPointNew, pEnd-pStart, Nl+NlNew, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
+    ierr = PetscSFSetUp(sfPointNew);CHKERRQ(ierr);
     ierr = DMSetPointSF(dm, sfPointNew);CHKERRQ(ierr);
+    ierr = PetscObjectViewFromOptions((PetscObject) sfPointNew, NULL, "-petscsf_interp_view");CHKERRQ(ierr);
     ierr = PetscSFDestroy(&sfPointNew);CHKERRQ(ierr);
     ierr = PetscHMapIDestroy(&claimshash);CHKERRQ(ierr);
   }
-  ierr = PetscHMapIDestroy(&leafhash);CHKERRQ(ierr);
-  ierr = PetscHMapIJDestroy(&roothash);CHKERRQ(ierr);
+  ierr = PetscHMapIJDestroy(&remoteHash);CHKERRQ(ierr);
   ierr = PetscSectionDestroy(&candidateSection);CHKERRQ(ierr);
-  ierr = PetscSectionDestroy(&candidateSectionRemote);CHKERRQ(ierr);
+  ierr = PetscSectionDestroy(&candidateRemoteSection);CHKERRQ(ierr);
   ierr = PetscSectionDestroy(&claimSection);CHKERRQ(ierr);
   ierr = PetscFree(candidates);CHKERRQ(ierr);
   ierr = PetscFree(candidatesRemote);CHKERRQ(ierr);
@@ -1252,6 +1455,9 @@ PetscErrorCode DMPlexInterpolatePointSF(DM dm, PetscSF pointSF)
   Notes:
     It does not copy over the coordinates.
 
+  Developer Notes:
+    It sets plex->interpolated = DMPLEX_INTERPOLATED_FULL.
+
 .seealso: DMPlexUninterpolate(), DMPlexCreateFromCellList(), DMPlexCopyCoordinates()
 @*/
 PetscErrorCode DMPlexInterpolate(DM dm, DM *dmInt)
@@ -1284,7 +1490,12 @@ PetscErrorCode DMPlexInterpolate(DM dm, DM *dmInt)
       if (depth > 0) {
         ierr = DMPlexInterpolateFaces_Internal(odm, 1, idm);CHKERRQ(ierr);
         ierr = DMGetPointSF(odm, &sfPoint);CHKERRQ(ierr);
-        ierr = DMPlexInterpolatePointSF(idm, sfPoint);CHKERRQ(ierr);
+        {
+          /* TODO: We need to systematically fix cases of distributed Plexes with no graph set */
+          PetscInt nroots;
+          ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
+          if (nroots >= 0) {ierr = DMPlexInterpolatePointSF(idm, sfPoint);CHKERRQ(ierr);}
+        }
       }
       if (odm != dm) {ierr = DMDestroy(&odm);CHKERRQ(ierr);}
       odm = idm;
@@ -1306,7 +1517,7 @@ PetscErrorCode DMPlexInterpolate(DM dm, DM *dmInt)
   }
   /* This function makes the mesh fully interpolated on all ranks */
   {
-    DM_Plex *plex = (DM_Plex *) dm->data;
+    DM_Plex *plex = (DM_Plex *) idm->data;
     plex->interpolated = plex->interpolatedCollective = DMPLEX_INTERPOLATED_FULL;
   }
   *dmInt = idm;
@@ -1338,7 +1549,7 @@ PetscErrorCode DMPlexCopyCoordinates(DM dmA, DM dmB)
   PetscSection   coordSectionA, coordSectionB;
   PetscScalar   *coordsA, *coordsB;
   PetscInt       spaceDim, Nf, vStartA, vStartB, vEndA, vEndB, coordSizeB, v, d;
-  PetscInt       cStartA, cEndA, cStartB, cEndB, cS, cE;
+  PetscInt       cStartA, cEndA, cStartB, cEndB, cS, cE, cdim;
   PetscBool      lc = PETSC_FALSE;
   PetscErrorCode ierr;
 
@@ -1346,6 +1557,8 @@ PetscErrorCode DMPlexCopyCoordinates(DM dmA, DM dmB)
   PetscValidHeaderSpecific(dmA, DM_CLASSID, 1);
   PetscValidHeaderSpecific(dmB, DM_CLASSID, 2);
   if (dmA == dmB) PetscFunctionReturn(0);
+  ierr = DMGetCoordinateDim(dmA, &cdim);CHKERRQ(ierr);
+  ierr = DMSetCoordinateDim(dmB, cdim);CHKERRQ(ierr);
   ierr = DMPlexGetDepthStratum(dmA, 0, &vStartA, &vEndA);CHKERRQ(ierr);
   ierr = DMPlexGetDepthStratum(dmB, 0, &vStartB, &vEndB);CHKERRQ(ierr);
   if ((vEndA-vStartA) != (vEndB-vStartB)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "The number of vertices in first DM %d != %d in the second DM", vEndA-vStartA, vEndB-vStartB);
@@ -1450,6 +1663,9 @@ PetscErrorCode DMPlexCopyCoordinates(DM dmA, DM dmB)
   Notes:
     It does not copy over the coordinates.
 
+  Developer Notes:
+    It sets plex->interpolated = DMPLEX_INTERPOLATED_NONE.
+
 .seealso: DMPlexInterpolate(), DMPlexCreateFromCellList(), DMPlexCopyCoordinates()
 @*/
 PetscErrorCode DMPlexUninterpolate(DM dm, DM *dmUnint)
@@ -1553,7 +1769,7 @@ PetscErrorCode DMPlexUninterpolate(DM dm, DM *dmUnint)
   }
   /* This function makes the mesh fully uninterpolated on all ranks */
   {
-    DM_Plex *plex = (DM_Plex *) dm->data;
+    DM_Plex *plex = (DM_Plex *) udm->data;
     plex->interpolated = plex->interpolatedCollective = DMPLEX_INTERPOLATED_NONE;
   }
   *dmUnint = udm;
@@ -1606,7 +1822,7 @@ static PetscErrorCode DMPlexIsInterpolated_Internal(DM dm, DMPlexInterpolatedFla
 }
 
 /*@
-  DMPlexIsInterpolated - Find out whether this DM is interpolated, i.e. number of strata is equal to dimension.
+  DMPlexIsInterpolated - Find out to what extent the DMPlex is topologically interpolated.
 
   Not Collective
 
@@ -1619,10 +1835,24 @@ static PetscErrorCode DMPlexIsInterpolated_Internal(DM dm, DMPlexInterpolatedFla
   Level: intermediate
 
   Notes:
-  This is NOT collective so the results can be different on different ranks in special cases.
+  Unlike DMPlexIsInterpolatedCollective(), this is NOT collective
+  so the results can be different on different ranks in special cases.
   However, DMPlexInterpolate() guarantees the result is the same on all.
+
   Unlike DMPlexIsInterpolatedCollective(), this cannot return DMPLEX_INTERPOLATED_MIXED.
 
+  Developer Notes:
+  Initially, plex->interpolated = DMPLEX_INTERPOLATED_INVALID.
+
+  If plex->interpolated == DMPLEX_INTERPOLATED_INVALID, DMPlexIsInterpolated_Internal() is called.
+  It checks the actual topology and sets plex->interpolated on each rank separately to one of
+  DMPLEX_INTERPOLATED_NONE, DMPLEX_INTERPOLATED_PARTIAL or DMPLEX_INTERPOLATED_FULL.
+
+  If plex->interpolated != DMPLEX_INTERPOLATED_INVALID, this function just returns plex->interpolated.
+
+  DMPlexInterpolate() sets plex->interpolated = DMPLEX_INTERPOLATED_FULL,
+  and DMPlexUninterpolate() sets plex->interpolated = DMPLEX_INTERPOLATED_NONE.
+
 .seealso: DMPlexInterpolate(), DMPlexIsInterpolatedCollective()
 @*/
 PetscErrorCode DMPlexIsInterpolated(DM dm, DMPlexInterpolatedFlag *interpolated)
@@ -1640,7 +1870,7 @@ PetscErrorCode DMPlexIsInterpolated(DM dm, DMPlexInterpolatedFlag *interpolated)
     DMPlexInterpolatedFlag flg;
 
     ierr = DMPlexIsInterpolated_Internal(dm, &flg);CHKERRQ(ierr);
-    if (flg != plex->interpolated) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "stashed DMPlexInterpolatedFlag is inconsistent");
+    if (flg != plex->interpolated) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Stashed DMPlexInterpolatedFlag %s is inconsistent with current %s", DMPlexInterpolatedFlags[plex->interpolated], DMPlexInterpolatedFlags[flg]);
 #endif
   }
   *interpolated = plex->interpolated;
@@ -1648,7 +1878,7 @@ PetscErrorCode DMPlexIsInterpolated(DM dm, DMPlexInterpolatedFlag *interpolated)
 }
 
 /*@
-  DMPlexIsInterpolatedCollective - Find out whether this DM is interpolated, i.e. number of strata is equal to dimension.
+  DMPlexIsInterpolatedCollective - Find out to what extent the DMPlex is topologically interpolated (in collective manner).
 
   Collective
 
@@ -1661,8 +1891,19 @@ PetscErrorCode DMPlexIsInterpolated(DM dm, DMPlexInterpolatedFlag *interpolated)
   Level: intermediate
 
   Notes:
-  This is collective so the results are always guaranteed to be the same on all ranks.
-  Unlike DMPlexIsInterpolated(), this will return DMPLEX_INTERPOLATED_MIXED if the results of DMPlexIsInterpolated() are different on different ranks.
+  Unlike DMPlexIsInterpolated(), this is collective so the results are guaranteed to be the same on all ranks.
+
+  This function will return DMPLEX_INTERPOLATED_MIXED if the results of DMPlexIsInterpolated() are different on different ranks.
+
+  Developer Notes:
+  Initially, plex->interpolatedCollective = DMPLEX_INTERPOLATED_INVALID.
+
+  If plex->interpolatedCollective == DMPLEX_INTERPOLATED_INVALID, this function calls DMPlexIsInterpolated() which sets plex->interpolated.
+  MPI_Allreduce() is then called and collectively consistent flag plex->interpolatedCollective is set and returned;
+  if plex->interpolated varies on different ranks, plex->interpolatedCollective = DMPLEX_INTERPOLATED_MIXED,
+  otherwise sets plex->interpolatedCollective = plex->interpolated.
+
+  If plex->interpolatedCollective != DMPLEX_INTERPOLATED_INVALID, this function just returns plex->interpolatedCollective.
 
 .seealso: DMPlexInterpolate(), DMPlexIsInterpolated()
 @*/
diff --git a/src/dm/impls/plex/plexnatural.c b/src/dm/impls/plex/plexnatural.c
index 051e113a3be..84d8b8ff60e 100644
--- a/src/dm/impls/plex/plexnatural.c
+++ b/src/dm/impls/plex/plexnatural.c
@@ -198,17 +198,22 @@ PetscErrorCode DMPlexGlobalToNaturalBegin(DM dm, Vec gv, Vec nv)
 {
   const PetscScalar *inarray;
   PetscScalar       *outarray;
+  PetscMPIInt        size;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
   ierr = PetscLogEventBegin(DMPLEX_GlobalToNaturalBegin,dm,0,0,0);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
   if (dm->sfNatural) {
     ierr = VecGetArray(nv, &outarray);CHKERRQ(ierr);
     ierr = VecGetArrayRead(gv, &inarray);CHKERRQ(ierr);
     ierr = PetscSFBcastBegin(dm->sfNatural, MPIU_SCALAR, (PetscScalar *) inarray, outarray);CHKERRQ(ierr);
     ierr = VecRestoreArrayRead(gv, &inarray);CHKERRQ(ierr);
     ierr = VecRestoreArray(nv, &outarray);CHKERRQ(ierr);
-  } else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
+  } else if (size == 1) {
+    ierr = VecCopy(nv, gv);CHKERRQ(ierr);
+  } else if (dm->useNatural) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "DM global to natural SF not present.\nIf DMPlexDistribute() was called, report to petsc-maint@mcs.anl.gov.\n");
+  else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
   ierr = PetscLogEventEnd(DMPLEX_GlobalToNaturalBegin,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -235,17 +240,21 @@ PetscErrorCode DMPlexGlobalToNaturalEnd(DM dm, Vec gv, Vec nv)
 {
   const PetscScalar *inarray;
   PetscScalar       *outarray;
+  PetscMPIInt        size;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
   ierr = PetscLogEventBegin(DMPLEX_GlobalToNaturalEnd,dm,0,0,0);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
   if (dm->sfNatural) {
     ierr = VecGetArrayRead(gv, &inarray);CHKERRQ(ierr);
-    ierr = VecGetArray(nv, &outarray);CHKERRQ(ierr);CHKERRQ(ierr);
+    ierr = VecGetArray(nv, &outarray);CHKERRQ(ierr);
     ierr = PetscSFBcastEnd(dm->sfNatural, MPIU_SCALAR, (PetscScalar *) inarray, outarray);CHKERRQ(ierr);
     ierr = VecRestoreArrayRead(gv, &inarray);CHKERRQ(ierr);
     ierr = VecRestoreArray(nv, &outarray);CHKERRQ(ierr);
-  }
+  } else if (size == 1) {
+  } else if (dm->useNatural) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "DM global to natural SF not present.\nIf DMPlexDistribute() was called, report to petsc-maint@mcs.anl.gov.\n");
+  else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
   ierr = PetscLogEventEnd(DMPLEX_GlobalToNaturalEnd,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -272,10 +281,12 @@ PetscErrorCode DMPlexNaturalToGlobalBegin(DM dm, Vec nv, Vec gv)
 {
   const PetscScalar *inarray;
   PetscScalar       *outarray;
+  PetscMPIInt        size;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
   ierr = PetscLogEventBegin(DMPLEX_NaturalToGlobalBegin,dm,0,0,0);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
   if (dm->sfNatural) {
     /* We only have acces to the SF that goes from Global to Natural.
        Instead of inverting dm->sfNatural, we can call PetscSFReduceBegin/End with MPI_Op MPI_SUM.
@@ -286,7 +297,10 @@ PetscErrorCode DMPlexNaturalToGlobalBegin(DM dm, Vec nv, Vec gv)
     ierr = PetscSFReduceBegin(dm->sfNatural, MPIU_SCALAR, (PetscScalar *) inarray, outarray, MPI_SUM);CHKERRQ(ierr);
     ierr = VecRestoreArrayRead(nv, &inarray);CHKERRQ(ierr);
     ierr = VecRestoreArray(gv, &outarray);CHKERRQ(ierr);
-  } else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
+  } else if (size == 1) {
+    ierr = VecCopy(nv, gv);CHKERRQ(ierr);
+  } else if (dm->useNatural) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "DM global to natural SF not present.\nIf DMPlexDistribute() was called, report to petsc-maint@mcs.anl.gov.\n");
+  else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
   ierr = PetscLogEventEnd(DMPLEX_NaturalToGlobalBegin,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -313,17 +327,21 @@ PetscErrorCode DMPlexNaturalToGlobalEnd(DM dm, Vec nv, Vec gv)
 {
   const PetscScalar *inarray;
   PetscScalar       *outarray;
+  PetscMPIInt        size;
   PetscErrorCode     ierr;
 
   PetscFunctionBegin;
   ierr = PetscLogEventBegin(DMPLEX_NaturalToGlobalEnd,dm,0,0,0);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
   if (dm->sfNatural) {
     ierr = VecGetArrayRead(nv, &inarray);CHKERRQ(ierr);
-    ierr = VecGetArray(gv, &outarray);CHKERRQ(ierr);CHKERRQ(ierr);
+    ierr = VecGetArray(gv, &outarray);CHKERRQ(ierr);
     ierr = PetscSFReduceEnd(dm->sfNatural, MPIU_SCALAR, (PetscScalar *) inarray, outarray, MPI_SUM);CHKERRQ(ierr);
     ierr = VecRestoreArrayRead(nv, &inarray);CHKERRQ(ierr);
     ierr = VecRestoreArray(gv, &outarray);CHKERRQ(ierr);
-  }
+  } else if (size == 1) {
+  } else if (dm->useNatural) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "DM global to natural SF not present.\nIf DMPlexDistribute() was called, report to petsc-maint@mcs.anl.gov.\n");
+  else SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM global to natural SF was not created.\nYou must call DMSetUseNatural() before DMPlexDistribute().\n");
   ierr = PetscLogEventEnd(DMPLEX_NaturalToGlobalEnd,dm,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexpartition.c b/src/dm/impls/plex/plexpartition.c
index ce247ae82dc..69505942a8a 100644
--- a/src/dm/impls/plex/plexpartition.c
+++ b/src/dm/impls/plex/plexpartition.c
@@ -27,6 +27,20 @@ const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
                                "  pages   = {71--85},\n"
                                "  year    = {1998}\n}\n";
 
+PetscBool PTScotchPartitionercite = PETSC_FALSE;
+const char PTScotchPartitionerCitation[] =
+  "@article{PTSCOTCH,\n"
+  "  author  = {C. Chevalier and F. Pellegrini},\n"
+  "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
+  "  journal = {Parallel Computing},\n"
+  "  volume  = {34},\n"
+  "  number  = {6},\n"
+  "  pages   = {318--331},\n"
+  "  year    = {2008},\n"
+  "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
+  "}\n";
+
+
 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
 
 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
@@ -72,7 +86,7 @@ static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height
   /* Always use FVM adjacency to create partitioner graph */
   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
-  ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
   if (globalNumbering) {
     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
     *globalNumbering = cellNumbering;
@@ -240,7 +254,7 @@ static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height
   PetscSF        sfPoint;
   const PetscInt *rows, *cols, *ii, *jj;
   PetscInt       *idxs,*idxs2;
-  PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
+  PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
   PetscMPIInt    rank;
   PetscBool      flg;
   PetscErrorCode ierr;
@@ -269,8 +283,8 @@ static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height
   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
-  ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
-  ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
+  ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
   if (globalNumbering) {
     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
   }
@@ -316,7 +330,8 @@ static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height
   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
-  ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
+  ierr = DMPlexGetMaxSizes(dm, NULL, &lm);CHKERRQ(ierr);
+  ierr = MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));CHKERRQ(ierr);
   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
 
   /* Assemble matrix */
@@ -434,7 +449,7 @@ PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *nu
 /*@C
   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
 
-  Collective
+  Collective on DM
 
   Input Arguments:
 + dm - The DMPlex
@@ -674,7 +689,7 @@ PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*fun
 /*@C
   PetscPartitionerSetType - Builds a particular PetscPartitioner
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameters:
 + part - The PetscPartitioner object
@@ -698,7 +713,6 @@ PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerTy
   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
   if (match) PetscFunctionReturn(0);
 
-  ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
 
@@ -729,20 +743,40 @@ PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerTy
 @*/
 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
 {
-  PetscErrorCode ierr;
-
   PetscFunctionBegin;
   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
   PetscValidPointer(name, 2);
-  ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
   *name = ((PetscObject) part)->type_name;
   PetscFunctionReturn(0);
 }
 
+/*@C
+   PetscPartitionerViewFromOptions - View from Options
+
+   Collective on PetscPartitioner
+
+   Input Parameters:
++  A - the PetscPartitioner object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
+@*/
+PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   PetscPartitionerView - Views a PetscPartitioner
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameter:
 + part - the PetscPartitioner object to view
@@ -766,10 +800,9 @@ PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
-    ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
-    ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
-    ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
-    ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
+    ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
   }
   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
@@ -797,14 +830,19 @@ static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, co
 /*@
   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameter:
 . part - the PetscPartitioner object to set options for
 
+  Options Database Keys:
++  -petscpartitioner_type  - Sets the PetscPartitioner type; use -help for a list of available types
+.  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
+-  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
+
   Level: developer
 
-.seealso: PetscPartitionerView()
+.seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
 @*/
 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
 {
@@ -815,7 +853,6 @@ PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
-  ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
@@ -824,11 +861,14 @@ PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
   } else if (!((PetscObject) part)->type_name) {
     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
   }
+  ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
   if (part->ops->setfromoptions) {
     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
   }
+  ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
-  ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
+  ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
   /* process any options handlers added with PetscObjectAddOptionsHandler() */
   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();CHKERRQ(ierr);
@@ -838,7 +878,7 @@ PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
 /*@C
   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameter:
 . part - the PetscPartitioner object to setup
@@ -860,7 +900,7 @@ PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
 /*@
   PetscPartitionerDestroy - Destroys a PetscPartitioner object
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameter:
 . part - the PetscPartitioner object to destroy
@@ -880,12 +920,113 @@ PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
   ((PetscObject) (*part))->refct = 0;
 
+  ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
+/*@
+  PetscPartitionerPartition - Partition a graph
+
+  Collective on PetscPartitioner
+
+  Input Parameters:
++ part    - The PetscPartitioner
+. nparts  - Number of partitions
+. numVertices - Number of vertices in the local part of the graph
+. start - row pointers for the local part of the graph (CSR style)
+. adjacency - adjacency list (CSR style)
+. vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
+- targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
+
+  Output Parameters:
++ partSection     - The PetscSection giving the division of points by partition
+- partition       - The list of points by partition
+
+  Options Database:
+. -petscpartitioner_view - View the partitioner information
+. -petscpartitioner_view_graph - View the graph we are partitioning
+
+  Notes:
+    The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
+    The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
+
+  Level: developer
+
+.seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
+@*/
+PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
+  PetscValidLogicalCollectiveInt(part, nparts, 2);
+  if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
+  if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
+  if (numVertices && !part->noGraph) {
+    PetscValidIntPointer(start, 4);
+    PetscValidIntPointer(start + numVertices, 4);
+    if (start[numVertices]) PetscValidIntPointer(adjacency, 5);
+  }
+  if (vertexSection) {
+    PetscInt s,e;
+
+    PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
+    ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
+    if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
+  }
+  if (targetSection) {
+    PetscInt s,e;
+
+    PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
+    ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
+    if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
+  }
+  PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
+  PetscValidPointer(partition, 9);
+
+  ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
+  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
+  if (nparts == 1) { /* quick */
+    ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
+    ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
+  } else {
+    if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
+    ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
+  }
+  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
+  if (part->viewerGraph) {
+    PetscViewer viewer = part->viewerGraph;
+    PetscBool   isascii;
+    PetscInt    v, i;
+    PetscMPIInt rank;
+
+    ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
+    ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
+    if (isascii) {
+      ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
+      ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
+      for (v = 0; v < numVertices; ++v) {
+        const PetscInt s = start[v];
+        const PetscInt e = start[v+1];
+
+        ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
+        for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
+        ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
+      }
+      ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
+      ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
+    }
+  }
+  if (part->viewer) {
+    ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
 /*@
   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
 
@@ -918,49 +1059,56 @@ PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
 
   p->edgeCut = 0;
   p->balance = 0.0;
+  p->usevwgt = PETSC_TRUE;
 
   *part = p;
   PetscFunctionReturn(0);
 }
 
 /*@
-  PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
+  PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
 
-  Collective on dm
+  Collective on PetscPartitioner
 
   Input Parameters:
 + part    - The PetscPartitioner
+. targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
 - dm      - The mesh DM
 
   Output Parameters:
 + partSection     - The PetscSection giving the division of points by partition
 - partition       - The list of points by partition
 
-  Options Database:
-. -petscpartitioner_view_graph - View the graph we are partitioning
-
-  Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
+  Notes:
+    If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
+    by the section in the transitive closure of the point.
 
   Level: developer
 
-.seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
+.seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
 @*/
-PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
+PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
   PetscMPIInt    size;
+  PetscBool      isplex;
   PetscErrorCode ierr;
+  PetscSection   vertSection = NULL;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
+  if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
   PetscValidPointer(partition, 5);
+  ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
+  if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
   if (size == 1) {
     PetscInt *points;
     PetscInt  cStart, cEnd, c;
 
     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
+    ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
@@ -982,36 +1130,65 @@ PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSect
       PetscInt       p, pStart, pEnd;
 
       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
-      ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
+      ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
     }
-    if (part->viewGraph) {
-      PetscViewer viewer = part->viewerGraph;
-      PetscBool   isascii;
-      PetscInt    v, i;
-      PetscMPIInt rank;
-
-      ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
-      ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
-      if (isascii) {
-        ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
-        ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
-        for (v = 0; v < numVertices; ++v) {
-          const PetscInt s = start[v];
-          const PetscInt e = start[v+1];
-
-          ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
-          for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
-          ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
+    if (part->usevwgt) {
+      PetscSection   section = dm->localSection, clSection = NULL;
+      IS             clPoints = NULL;
+      const PetscInt *gid,*clIdx;
+      PetscInt       v, p, pStart, pEnd;
+
+      /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
+      /* We do this only if the local section has been set */
+      if (section) {
+        ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
+        if (!clSection) {
+          ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
         }
-        ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
-        ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
+        ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
+        ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
       }
+      ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
+      ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
+      ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
+      if (globalNumbering) {
+        ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
+      } else gid = NULL;
+      for (p = pStart, v = 0; p < pEnd; ++p) {
+        PetscInt dof = 1;
+
+        /* skip cells in the overlap */
+        if (gid && gid[p-pStart] < 0) continue;
+
+        if (section) {
+          PetscInt cl, clSize, clOff;
+
+          dof  = 0;
+          ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
+          ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
+          for (cl = 0; cl < clSize; cl+=2) {
+            PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
+
+            ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
+            dof += clDof;
+          }
+        }
+        if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
+        ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
+        v++;
+      }
+      if (globalNumbering) {
+        ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
+      }
+      if (clPoints) {
+        ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
+      }
+      ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
     }
-    if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
-    ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
+    ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
     ierr = PetscFree(start);CHKERRQ(ierr);
     ierr = PetscFree(adjacency);CHKERRQ(ierr);
     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
@@ -1042,7 +1219,7 @@ PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSect
       *partition = newPartition;
     }
   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
-  ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
+  ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -1097,7 +1274,7 @@ static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *Pet
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
   PetscInt                np;
@@ -1109,7 +1286,7 @@ static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM
     PetscInt   *sizes, *points, v, p;
     PetscMPIInt rank;
 
-    ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
+    ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
@@ -1131,7 +1308,7 @@ static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM
     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
   }
-  if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
+  if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
@@ -1158,6 +1335,9 @@ static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
 
   Level: intermediate
 
+  Options Database Keys:
+.  -petscpartitioner_shell_random - Use a random partition
+
 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
 M*/
 
@@ -1179,18 +1359,17 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
 /*@C
   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameters:
 + part   - The PetscPartitioner
 . size   - The number of partitions
-. sizes  - array of size size (or NULL) providing the number of points in each partition
-- points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
+. sizes  - array of length size (or NULL) providing the number of points in each partition
+- points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
 
   Level: developer
 
   Notes:
-
     It is safe to free the sizes and points arrays after use in this routine.
 
 .seealso DMPlexDistribute(), PetscPartitionerCreate()
@@ -1223,7 +1402,7 @@ PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt
 /*@
   PetscPartitionerShellSetRandom - Set the flag to use a random partition
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameters:
 + part   - The PetscPartitioner
@@ -1246,7 +1425,7 @@ PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool r
 /*@
   PetscPartitionerShellGetRandom - get the flag to use a random partition
 
-  Collective on part
+  Collective on PetscPartitioner
 
   Input Parameter:
 . part   - The PetscPartitioner
@@ -1298,73 +1477,128 @@ static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscVi
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
   MPI_Comm       comm;
-  PetscInt       np;
+  PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
   PetscMPIInt    size;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
   comm = PetscObjectComm((PetscObject)part);
   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
-  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
+  if (targetSection) {
+    ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
+    ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
+    for (np = 0; np < nparts; ++np) {
+      ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
+      sumw += tpwgts[np];
+    }
+    if (!sumw) {
+      ierr = PetscFree(tpwgts);CHKERRQ(ierr);
+    } else {
+      PetscInt m,mp;
+      for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
+      for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
+        if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
+        sumw += tpwgts[np];
+      }
+      if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
+    }
+  }
+
   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
   if (size == 1) {
-    for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
+    if (tpwgts) {
+      for (np = 0; np < nparts; ++np) {
+        ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
+      }
+    } else {
+      for (np = 0; np < nparts; ++np) {
+        ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
+      }
+    }
   } else {
-    PetscMPIInt rank;
-    PetscInt nvGlobal, *offsets, myFirst, myLast;
-
-    ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
-    offsets[0] = 0;
-    ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
-    for (np = 2; np <= size; np++) {
-      offsets[np] += offsets[np-1];
-    }
-    nvGlobal = offsets[size];
-    ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
-    myFirst = offsets[rank];
-    myLast  = offsets[rank + 1] - 1;
-    ierr = PetscFree(offsets);CHKERRQ(ierr);
-    if (numVertices) {
-      PetscInt firstPart = 0, firstLargePart = 0;
-      PetscInt lastPart = 0, lastLargePart = 0;
-      PetscInt rem = nvGlobal % nparts;
-      PetscInt pSmall = nvGlobal/nparts;
-      PetscInt pBig = nvGlobal/nparts + 1;
-
-
-      if (rem) {
-        firstLargePart = myFirst / pBig;
-        lastLargePart  = myLast  / pBig;
-
-        if (firstLargePart < rem) {
-          firstPart = firstLargePart;
-        } else {
-          firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
+    if (tpwgts) {
+      Vec         v;
+      PetscScalar *array;
+      PetscInt    st,j;
+      PetscMPIInt rank;
+
+      ierr = VecCreate(comm,&v);CHKERRQ(ierr);
+      ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
+      ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
+      ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
+      for (np = 0,st = 0; np < nparts; ++np) {
+        if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
+          for (j = 0; j < tpwgts[np]; j++) {
+            ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
+          }
         }
-        if (lastLargePart < rem) {
-          lastPart = lastLargePart;
+        st += tpwgts[np];
+      }
+      ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
+      ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
+      ierr = VecGetArray(v,&array);CHKERRQ(ierr);
+      for (j = 0; j < numVertices; ++j) {
+        ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
+      }
+      ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
+      ierr = VecDestroy(&v);CHKERRQ(ierr);
+    } else {
+      PetscMPIInt rank;
+      PetscInt nvGlobal, *offsets, myFirst, myLast;
+
+      ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
+      offsets[0] = 0;
+      ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
+      for (np = 2; np <= size; np++) {
+        offsets[np] += offsets[np-1];
+      }
+      nvGlobal = offsets[size];
+      ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
+      myFirst = offsets[rank];
+      myLast  = offsets[rank + 1] - 1;
+      ierr = PetscFree(offsets);CHKERRQ(ierr);
+      if (numVertices) {
+        PetscInt firstPart = 0, firstLargePart = 0;
+        PetscInt lastPart = 0, lastLargePart = 0;
+        PetscInt rem = nvGlobal % nparts;
+        PetscInt pSmall = nvGlobal/nparts;
+        PetscInt pBig = nvGlobal/nparts + 1;
+
+        if (rem) {
+          firstLargePart = myFirst / pBig;
+          lastLargePart  = myLast  / pBig;
+
+          if (firstLargePart < rem) {
+            firstPart = firstLargePart;
+          } else {
+            firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
+          }
+          if (lastLargePart < rem) {
+            lastPart = lastLargePart;
+          } else {
+            lastPart = rem + (myLast - (rem * pBig)) / pSmall;
+          }
         } else {
-          lastPart = rem + (myLast - (rem * pBig)) / pSmall;
+          firstPart = myFirst / (nvGlobal/nparts);
+          lastPart  = myLast  / (nvGlobal/nparts);
         }
-      } else {
-        firstPart = myFirst / (nvGlobal/nparts);
-        lastPart  = myLast  / (nvGlobal/nparts);
-      }
 
-      for (np = firstPart; np <= lastPart; np++) {
-        PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
-        PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
+        for (np = firstPart; np <= lastPart; np++) {
+          PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
+          PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
 
-        PartStart = PetscMax(PartStart,myFirst);
-        PartEnd   = PetscMin(PartEnd,myLast+1);
-        ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
+          PartStart = PetscMax(PartStart,myFirst);
+          PartEnd   = PetscMin(PartEnd,myLast+1);
+          ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
+        }
       }
     }
   }
-  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
+  ierr = PetscFree(tpwgts);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -1429,17 +1663,15 @@ static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscVi
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
   PetscInt       np;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
-  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -1522,7 +1754,7 @@ PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
 extern int FREE_GRAPH;
 #endif
 
-static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
 #if defined(PETSC_HAVE_CHACO)
   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
@@ -1555,7 +1787,7 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
+  ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
 #if defined (PETSC_USE_DEBUG)
   {
     int ival,isum;
@@ -1567,9 +1799,7 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM
     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
   }
 #endif
-  if (!numVertices) {
-    ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
-    ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
+  if (!numVertices) { /* distributed case, return if not holding the graph */
     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
     PetscFunctionReturn(0);
   }
@@ -1578,7 +1808,7 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM
 
   if (global_method == INERTIAL_METHOD) {
     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
-    SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
+    SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
   }
   mesh_dims[0] = nparts;
   mesh_dims[1] = 1;
@@ -1590,12 +1820,13 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM
   {
     int piperet;
     piperet = pipe(fd_pipe);
-    if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
+    if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
     fd_stdout = dup(1);
     close(1);
     dup2(fd_pipe[1], 1);
   }
 #endif
+  if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
                    vmax, ndims, eigtol, seed);
@@ -1613,24 +1844,22 @@ static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM
     close(fd_stdout);
     close(fd_pipe[0]);
     close(fd_pipe[1]);
-    if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
+    if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
   }
 #else
-  if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
+  if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
 #endif
   /* Convert to PetscSection+IS */
-  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
   for (v = 0; v < nvtxs; ++v) {
     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
   }
-  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
   for (p = 0, i = 0; p < nparts; ++p) {
     for (v = 0; v < nvtxs; ++v) {
       if (assignment[v] == p) points[i++] = v;
     }
   }
-  if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
+  if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
   if (global_method == INERTIAL_METHOD) {
     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
@@ -1684,6 +1913,7 @@ static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
   PetscErrorCode             ierr;
 
   PetscFunctionBegin;
+  ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
   ierr = PetscFree(p);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -1735,12 +1965,11 @@ static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *
 #include 
 #endif
 
-static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
 #if defined(PETSC_HAVE_PARMETIS)
   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
   MPI_Comm       comm;
-  PetscSection   section;
   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
@@ -1754,9 +1983,9 @@ static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part,
   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
   PetscInt       options[64];               /* Options */
-  /* Outputs */
   PetscInt       v, i, *assignment, *points;
-  PetscMPIInt    size, rank, p;
+  PetscMPIInt    p, size, rank;
+  PetscBool      hasempty = PETSC_FALSE;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
@@ -1764,76 +1993,111 @@ static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part,
   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   /* Calculate vertex distribution */
-  ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
+  ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
   vtxdist[0] = 0;
   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
   for (p = 2; p <= size; ++p) {
+    hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
     vtxdist[p] += vtxdist[p-1];
   }
+  /* null graph */
+  if (vtxdist[size] == 0) {
+    ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
+    ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
+    PetscFunctionReturn(0);
+  }
   /* Calculate partition weights */
-  for (p = 0; p < nparts; ++p) {
-    tpwgts[p] = 1.0/nparts;
+  if (targetSection) {
+    PetscInt p;
+    real_t   sumt = 0.0;
+
+    for (p = 0; p < nparts; ++p) {
+      PetscInt tpd;
+
+      ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
+      sumt += tpd;
+      tpwgts[p] = tpd;
+    }
+    if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
+      for (p = 0, sumt = 0.0; p < nparts; ++p) {
+        tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
+        sumt += tpwgts[p];
+      }
+      for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
+      for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
+      tpwgts[nparts - 1] = 1. - sumt;
+    }
+  } else {
+    for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
   }
   ubvec[0] = pm->imbalanceRatio;
-  /* Weight cells by dofs on cell by default */
-  ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
-  for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
-  if (section) {
-    PetscInt cStart, cEnd, dof;
-
-    /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
-    /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
-    ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
-    for (v = cStart; v < cStart + numVertices; ++v) {
-      ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
-      vwgt[v-cStart] = PetscMax(dof, 1);
+
+  /* Weight cells */
+  if (vertSection) {
+    ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
+    for (v = 0; v < nvtxs; ++v) {
+      ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
     }
+    wgtflag |= 2; /* have weights on graph vertices */
   }
-  wgtflag |= 2; /* have weights on graph vertices */
 
-  if (nparts == 1) {
-    ierr = PetscArrayzero(assignment, nvtxs);CHKERRQ(ierr);
+  for (p = 0; !vtxdist[p+1] && p < size; ++p);
+  if (vtxdist[p+1] == vtxdist[size]) {
+    if (rank == p) {
+      ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
+      options[METIS_OPTION_DBGLVL] = pm->debugFlag;
+      options[METIS_OPTION_SEED]   = pm->randomSeed;
+      if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
+      if (metis_ptype == 1) {
+        PetscStackPush("METIS_PartGraphRecursive");
+        ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
+        PetscStackPop;
+        if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
+      } else {
+        /*
+         It would be nice to activate the two options below, but they would need some actual testing.
+         - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
+         - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
+        */
+        /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
+        /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
+        PetscStackPush("METIS_PartGraphKway");
+        ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
+        PetscStackPop;
+        if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
+      }
+    }
   } else {
-    for (p = 0; !vtxdist[p+1] && p < size; ++p);
-    if (vtxdist[p+1] == vtxdist[size]) {
-      if (rank == p) {
-        ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
-        options[METIS_OPTION_DBGLVL] = pm->debugFlag;
-        options[METIS_OPTION_SEED]   = pm->randomSeed;
-        if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
-        if (metis_ptype == 1) {
-          PetscStackPush("METIS_PartGraphRecursive");
-          ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
-          PetscStackPop;
-          if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
-        } else {
-          /*
-           It would be nice to activate the two options below, but they would need some actual testing.
-           - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
-           - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
-          */
-          /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
-          /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
-          PetscStackPush("METIS_PartGraphKway");
-          ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
-          PetscStackPop;
-          if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
+    MPI_Comm pcomm;
+
+    options[0] = 1; /*use options */
+    options[1] = pm->debugFlag;
+    options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
+
+    if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
+      PetscInt cnt;
+
+      ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
+      for (p=0,cnt=0;pdebugFlag;
-      options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
+    } else pcomm = pm->pcomm;
+    if (nvtxs) {
       PetscStackPush("ParMETIS_V3_PartKway");
-      ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
+      ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
       PetscStackPop;
       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
     }
+    if (hasempty) {
+      ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
+    }
   }
+
   /* Convert to PetscSection+IS */
-  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
-  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
   for (p = 0, i = 0; p < nparts; ++p) {
     for (v = 0; v < nvtxs; ++v) {
@@ -1842,7 +2106,8 @@ static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part,
   }
   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
-  ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
+  ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
+  ierr = PetscFree(vwgt);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 #else
   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
@@ -1861,10 +2126,18 @@ static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
 }
 
 /*MC
-  PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
+  PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
 
   Level: intermediate
 
+  Options Database Keys:
++  -petscpartitioner_parmetis_type  - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
+.  -petscpartitioner_parmetis_imbalance_ratio  - Load imbalance ratio limit
+.  -petscpartitioner_parmetis_debug  - Debugging flag passed to ParMETIS/METIS routines
+-  -petscpartitioner_parmetis_seed  - Random seed
+
+  Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
+
 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
 M*/
 
@@ -1878,6 +2151,7 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner par
   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
   part->data = p;
 
+  ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
   p->ptype          = 0;
   p->imbalanceRatio = 1.05;
   p->debugFlag      = 0;
@@ -1888,19 +2162,6 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner par
   PetscFunctionReturn(0);
 }
 
-PetscBool PTScotchPartitionercite = PETSC_FALSE;
-const char PTScotchPartitionerCitation[] =
-  "@article{PTSCOTCH,\n"
-  "  author  = {C. Chevalier and F. Pellegrini},\n"
-  "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
-  "  journal = {Parallel Computing},\n"
-  "  volume  = {34},\n"
-  "  number  = {6},\n"
-  "  pages   = {318--331},\n"
-  "  year    = {2008},\n"
-  "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
-  "}\n";
-
 #if defined(PETSC_HAVE_PTSCOTCH)
 
 EXTERN_C_BEGIN
@@ -1924,9 +2185,8 @@ static int PTScotch_Strategy(PetscInt strategy)
   }
 }
 
-
 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
-                                             SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
+                                             SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
 {
   SCOTCH_Graph   grafdat;
   SCOTCH_Strat   stradat;
@@ -1941,6 +2201,7 @@ static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalan
   PetscFunctionBegin;
   {
     PetscBool flg = PETSC_TRUE;
+    ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
     if (!flg) velotab = NULL;
   }
@@ -1948,17 +2209,22 @@ static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalan
   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
-#if defined(PETSC_USE_DEBUG)
-  ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
-#endif
-  ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
+  if (tpart) {
+    SCOTCH_Arch archdat;
+    ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
+    ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
+    ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
+    SCOTCH_archExit(&archdat);
+  } else {
+    ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
+  }
   SCOTCH_stratExit(&stradat);
   SCOTCH_graphExit(&grafdat);
   PetscFunctionReturn(0);
 }
 
 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
-                                             SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
+                                             SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
 {
   PetscMPIInt     procglbnbr;
   PetscMPIInt     proclocnum;
@@ -1977,6 +2243,7 @@ static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalan
   PetscFunctionBegin;
   {
     PetscBool flg = PETSC_TRUE;
+    ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
     if (!flg) veloloctab = NULL;
   }
@@ -1987,13 +2254,14 @@ static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalan
 
   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
-#if defined(PETSC_USE_DEBUG)
-  ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
-#endif
   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
-  ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
+  if (tpart) { /* target partition weights */
+    ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
+  } else {
+    ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
+  }
   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
 
   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
@@ -2012,6 +2280,7 @@ static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
   PetscErrorCode             ierr;
 
   PetscFunctionBegin;
+  ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
   ierr = PetscFree(p);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -2023,8 +2292,8 @@ static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part,
 
   PetscFunctionBegin;
   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
-  ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
-  ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
+  ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -2058,71 +2327,99 @@ static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
+static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 {
 #if defined(PETSC_HAVE_PTSCOTCH)
-  MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
-  PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
-  PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
-  PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
-  PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
-  PetscInt      *vwgt       = NULL;        /* Vertex weights */
-  PetscInt      *adjwgt     = NULL;        /* Edge weights */
+  MPI_Comm       comm;
+  PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
+  PetscInt       *vtxdist;              /* Distribution of vertices across processes */
+  PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
+  PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
+  PetscInt       *vwgt   = NULL;        /* Vertex weights */
+  PetscInt       *adjwgt = NULL;        /* Edge weights */
   PetscInt       v, i, *assignment, *points;
   PetscMPIInt    size, rank, p;
+  PetscBool      hasempty = PETSC_FALSE;
+  PetscInt       *tpwgts = NULL;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
+  ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
-
   /* Calculate vertex distribution */
   vtxdist[0] = 0;
   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
   for (p = 2; p <= size; ++p) {
+    hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
     vtxdist[p] += vtxdist[p-1];
   }
+  /* null graph */
+  if (vtxdist[size] == 0) {
+    ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
+    ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
+    PetscFunctionReturn(0);
+  }
 
-  if (nparts == 1) {
-    ierr = PetscArrayzero(assignment, nvtxs);CHKERRQ(ierr);
-  } else { /* Weight cells by dofs on cell by default */
-    PetscSection section;
-
-    /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
-    /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
-    ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
-    for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
-    ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
-    if (section) {
-      PetscInt vStart, vEnd, dof;
-      ierr = DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);CHKERRQ(ierr);
-      for (v = vStart; v < vStart + numVertices; ++v) {
-        ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
-        vwgt[v-vStart] = PetscMax(dof, 1);
-      }
+  /* Calculate vertex weights */
+  if (vertSection) {
+    ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
+    for (v = 0; v < nvtxs; ++v) {
+      ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
     }
-    {
-      PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
-      int                       strat = PTScotch_Strategy(pts->strategy);
-      double                    imbal = (double)pts->imbalance;
-
-      for (p = 0; !vtxdist[p+1] && p < size; ++p);
-      if (vtxdist[p+1] == vtxdist[size]) {
-        if (rank == p) {
-          ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
+  }
+
+  /* Calculate partition weights */
+  if (targetSection) {
+    PetscInt sumw;
+
+    ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
+    for (p = 0, sumw = 0; p < nparts; ++p) {
+      ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
+      sumw += tpwgts[p];
+    }
+    if (!sumw) {
+      ierr = PetscFree(tpwgts);CHKERRQ(ierr);
+    }
+  }
+
+  {
+    PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
+    int                       strat = PTScotch_Strategy(pts->strategy);
+    double                    imbal = (double)pts->imbalance;
+
+    for (p = 0; !vtxdist[p+1] && p < size; ++p);
+    if (vtxdist[p+1] == vtxdist[size]) {
+      if (rank == p) {
+        ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
+      }
+    } else {
+      PetscInt cnt;
+      MPI_Comm pcomm;
+
+      if (hasempty) {
+        ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
+        for (p=0,cnt=0;ppcomm;
+      if (nvtxs) {
+        ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
+      }
+      if (hasempty) {
+        ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
       }
     }
-    ierr = PetscFree(vwgt);CHKERRQ(ierr);
   }
+  ierr = PetscFree(vwgt);CHKERRQ(ierr);
+  ierr = PetscFree(tpwgts);CHKERRQ(ierr);
 
   /* Convert to PetscSection+IS */
-  ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
-  ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
   for (p = 0, i = 0; p < nparts; ++p) {
     for (v = 0; v < nvtxs; ++v) {
@@ -2155,6 +2452,12 @@ static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
 
   Level: intermediate
 
+  Options Database Keys:
++  -petscpartitioner_ptscotch_strategy  - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
+-  -petscpartitioner_ptscotch_imbalance  - Load imbalance ratio
+
+  Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
+
 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
 M*/
 
@@ -2168,6 +2471,7 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner par
   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
   part->data = p;
 
+  ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
   p->strategy  = 0;
   p->imbalance = 0.01;
 
@@ -2176,7 +2480,6 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner par
   PetscFunctionReturn(0);
 }
 
-
 /*@
   DMPlexGetPartitioner - Get the mesh partitioner
 
@@ -2192,7 +2495,7 @@ PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner par
 
   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
 
-.seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
+.seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
 @*/
 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
 {
@@ -2208,7 +2511,7 @@ PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
 /*@
   DMPlexSetPartitioner - Set the mesh partitioner
 
-  logically collective on dm
+  logically collective on DM
 
   Input Parameters:
 + dm - The DM
@@ -2540,10 +2843,10 @@ PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
   Input Parameters:
 + dm        - The DM
 . rootLabel - DMLabel assinging ranks to local roots
-. processSF - A star forest mapping into the local index on each remote rank
+- processSF - A star forest mapping into the local index on each remote rank
 
   Output Parameter:
-- leafLabel - DMLabel assinging ranks to remote roots
+. leafLabel - DMLabel assinging ranks to remote roots
 
   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
@@ -2678,7 +2981,7 @@ PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF proc
 . label - DMLabel assinging ranks to remote roots
 
   Output Parameter:
-- sf    - The star forest communication context encapsulating the defined mapping
+. sf    - The star forest communication context encapsulating the defined mapping
 
   Note: The incoming label is a receiver mapping of remote points to their parent rank.
 
@@ -2763,11 +3066,11 @@ PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
 
   Input parameters:
-  + dm                - The DMPlex object.
-  + n                 - The number of points.
-  + pointsToRewrite   - The points in the SF whose ownership will change.
-  + targetOwners      - New owner for each element in pointsToRewrite.
-  + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
++ dm                - The DMPlex object.
+. n                 - The number of points.
+. pointsToRewrite   - The points in the SF whose ownership will change.
+. targetOwners      - New owner for each element in pointsToRewrite.
+- degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
 
   Level: developer
 
@@ -2945,13 +3248,13 @@ static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt
   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
 
   Input parameters:
-  + dm               - The DMPlex object.
-  + entityDepth      - depth of the entity to balance (0 -> balance vertices).
-  + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
-  + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
++ dm               - The DMPlex object.
+. entityDepth      - depth of the entity to balance (0 -> balance vertices).
+. useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
+- parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
 
   Output parameters:
-  + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
+. success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
 
   Level: intermediate
 
@@ -3062,6 +3365,7 @@ PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBoo
   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
 
   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
+  for (i=0; i
 
-static PetscErrorCode DMProjectPoint_Func_Private(DM dm, PetscDS prob, PetscReal time, PetscFEGeom *fegeom, PetscFVCellGeom *fvgeom, PetscBool isFE[], PetscDualSpace sp[],
+/*
+  DMProjectPoint_Func_Private - Interpolate the given function in the output basis on the given point
+
+  Input Parameters:
++ dm     - The output DM
+. ds     - The output DS
+. dmIn   - The input DM
+. dsIn   - The input DS
+. time   - The time for this evaluation
+. fegeom - The FE geometry for this point
+. fvgeom - The FV geometry for this point
+. isFE   - Flag indicating whether each output field has an FE discretization
+. sp     - The output PetscDualSpace for each field
+. funcs  - The evaluation function for each field
+- ctxs   - The user context for each field
+
+  Output Parameter:
+. values - The value for each dual basis vector in the output dual space
+
+  Level: developer
+
+.seealso: DMProjectPoint_Field_Private()
+*/
+static PetscErrorCode DMProjectPoint_Func_Private(DM dm, PetscDS ds, DM dmIn, PetscDS dsIn, PetscReal time, PetscFEGeom *fegeom, PetscFVCellGeom *fvgeom, PetscBool isFE[], PetscDualSpace sp[],
                                                   PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *), void **ctxs,
                                                   PetscScalar values[])
 {
-  PetscInt       coordDim, Nf, *Nc, f, totDim, spDim, d, v, tp;
+  PetscInt       coordDim, Nf, *Nc, f, spDim, d, v, tp;
   PetscBool      isAffine, transform;
   PetscErrorCode ierr;
 
   PetscFunctionBeginHot;
-  ierr = DMGetCoordinateDim(dm,&coordDim);CHKERRQ(ierr);
-  ierr = DMHasBasisTransform(dm, &transform);CHKERRQ(ierr);
-  ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(prob, &Nc);CHKERRQ(ierr);
-  ierr = PetscDSGetTotalDimension(prob, &totDim);CHKERRQ(ierr);
+  ierr = DMGetCoordinateDim(dmIn, &coordDim);CHKERRQ(ierr);
+  ierr = DMHasBasisTransform(dmIn, &transform);CHKERRQ(ierr);
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   /* Get values for closure */
   isAffine = fegeom->isAffine;
   for (f = 0, v = 0, tp = 0; f < Nf; ++f) {
@@ -46,7 +68,7 @@ static PetscErrorCode DMProjectPoint_Func_Private(DM dm, PetscDS prob, PetscReal
           } else {
             v0 = &fegeom->v[tp*coordDim];
           }
-          if (transform) {ierr = DMPlexBasisTransformApplyReal_Internal(dm, v0, PETSC_TRUE, coordDim, v0, x, dm->transformCtx);CHKERRQ(ierr); v0 = x;}
+          if (transform) {ierr = DMPlexBasisTransformApplyReal_Internal(dmIn, v0, PETSC_TRUE, coordDim, v0, x, dm->transformCtx);CHKERRQ(ierr); v0 = x;}
           ierr = (*funcs[f])(coordDim, time, v0, Nc[f], &pointEval[Nc[f]*q], ctx);CHKERRQ(ierr);
         }
         /* Transform point evaluations pointEval[q,c] */
@@ -67,8 +89,38 @@ static PetscErrorCode DMProjectPoint_Func_Private(DM dm, PetscDS prob, PetscReal
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS prob, DM dmAux, PetscDS probAux, PetscReal time, Vec localU, Vec localA, PetscFEGeom *cgeom, PetscDualSpace sp[], PetscInt p, PetscInt Ncc, const PetscInt comps[],
-                                                   PetscReal **basisTab, PetscReal **basisDerTab, PetscReal **basisTabAux, PetscReal **basisDerTabAux,
+/*
+  DMProjectPoint_Field_Private - Interpolate a function of the given field, in the input basis, using the output basis on the given point
+
+  Input Parameters:
++ dm             - The output DM
+. ds             - The output DS
+. dmIn           - The input DM
+. dsIn           - The input DS
+. dmAux          - The auxiliary DM, which is always for the input space
+. dsAux          - The auxiliary DS, which is always for the input space
+. time           - The time for this evaluation
+. localU         - The local solution
+. localA         - The local auziliary fields
+. cgeom          - The FE geometry for this point
+. sp             - The output PetscDualSpace for each field
+. p              - The point in the output DM
+. T              - Input basis and derviatives for each field tabulated on the quadrature points
+. TAux           - Auxiliary basis and derivatives for each aux field tabulated on the quadrature points
+. funcs          - The evaluation function for each field
+- ctxs           - The user context for each field
+
+  Output Parameter:
+. values         - The value for each dual basis vector in the output dual space
+
+  Note: Not supported for FV
+
+  Level: developer
+
+.seealso: DMProjectPoint_Field_Private()
+*/
+static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS ds, DM dmIn, DMEnclosureType encIn, PetscDS dsIn, DM dmAux, DMEnclosureType encAux, PetscDS dsAux, PetscReal time, Vec localU, Vec localA, PetscFEGeom *cgeom, PetscDualSpace sp[], PetscInt p,
+                                                   PetscTabulation *T, PetscTabulation *TAux,
                                                    void (**funcs)(PetscInt, PetscInt, PetscInt,
                                                                   const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
                                                                   const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
@@ -81,37 +133,35 @@ static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS prob, DM dmAux
   PetscScalar       *coefficients_t = NULL, *coefficientsAux_t = NULL;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nc;
   PetscFEGeom        fegeom;
   const PetscInt     dE = cgeom->dimEmbed;
-  PetscInt           dimAux = 0, numConstants, Nf, NfAux = 0, f, spDim, d, v, tp = 0;
+  PetscInt           numConstants, Nf, NfIn, NfAux = 0, f, spDim, d, v, inp, tp = 0;
   PetscBool          isAffine, transform;
   PetscErrorCode     ierr;
 
   PetscFunctionBeginHot;
-  ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(prob, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(prob, &Nc);CHKERRQ(ierr);
-  ierr = PetscDSGetComponentOffsets(prob, &uOff);CHKERRQ(ierr);
-  ierr = PetscDSGetComponentDerivativeOffsets(prob, &uOff_x);CHKERRQ(ierr);
-  ierr = PetscDSGetEvaluationArrays(prob, &u, &bc /*&u_t*/, &u_x);CHKERRQ(ierr);
-  ierr = PetscDSGetWorkspace(prob, &x, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
-  ierr = PetscDSGetConstants(prob, &numConstants, &constants);CHKERRQ(ierr);
-  ierr = DMHasBasisTransform(dm, &transform);CHKERRQ(ierr);
-  ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
-  ierr = DMPlexVecGetClosure(dm, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
+  ierr = PetscDSGetNumFields(dsIn, &NfIn);CHKERRQ(ierr);
+  ierr = PetscDSGetComponentOffsets(dsIn, &uOff);CHKERRQ(ierr);
+  ierr = PetscDSGetComponentDerivativeOffsets(dsIn, &uOff_x);CHKERRQ(ierr);
+  ierr = PetscDSGetEvaluationArrays(dsIn, &u, &bc /*&u_t*/, &u_x);CHKERRQ(ierr);
+  ierr = PetscDSGetWorkspace(dsIn, &x, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscDSGetConstants(dsIn, &numConstants, &constants);CHKERRQ(ierr);
+  ierr = DMHasBasisTransform(dmIn, &transform);CHKERRQ(ierr);
+  ierr = DMGetLocalSection(dmIn, §ion);CHKERRQ(ierr);
+  ierr = DMGetEnclosurePoint(dmIn, dm, encIn, p, &inp);CHKERRQ(ierr);
+  ierr = DMPlexVecGetClosure(dmIn, section, localU, inp, NULL, &coefficients);CHKERRQ(ierr);
   if (dmAux) {
     PetscInt subp;
 
-    ierr = DMPlexGetAuxiliaryPoint(dm, dmAux, p, &subp);CHKERRQ(ierr);
-    ierr = PetscDSGetSpatialDimension(probAux, &dimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetNumFields(probAux, &NfAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(probAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(probAux, &NcAux);CHKERRQ(ierr);
+    ierr = DMGetEnclosurePoint(dmAux, dm, encAux, p, &subp);CHKERRQ(ierr);
+    ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = DMGetLocalSection(dmAux, §ionAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponentOffsets(probAux, &aOff);CHKERRQ(ierr);
-    ierr = PetscDSGetComponentDerivativeOffsets(probAux, &aOff_x);CHKERRQ(ierr);
-    ierr = PetscDSGetEvaluationArrays(probAux, &a, NULL /*&a_t*/, &a_x);CHKERRQ(ierr);
+    ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
+    ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
+    ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL /*&a_t*/, &a_x);CHKERRQ(ierr);
     ierr = DMPlexVecGetClosure(dmAux, sectionAux, localA, subp, NULL, &coefficientsAux);CHKERRQ(ierr);
   }
   /* Get values for closure */
@@ -142,29 +192,29 @@ static PetscErrorCode DMProjectPoint_Field_Private(DM dm, PetscDS prob, DM dmAux
     ierr = DMGetWorkArray(dm,numPoints*Nc[f],MPIU_SCALAR,&pointEval);CHKERRQ(ierr);
     for (q = 0; q < numPoints; ++q, ++tp) {
       if (isAffine) {
-        CoordinatesRefToReal(dE, dim, fegeom.xi, cgeom->v, fegeom.J, &points[q*dim], x);
+        CoordinatesRefToReal(dE, cgeom->dim, fegeom.xi, cgeom->v, fegeom.J, &points[q*dim], x);
       } else {
         fegeom.v    = &cgeom->v[tp*dE];
         fegeom.J    = &cgeom->J[tp*dE*dE];
         fegeom.invJ = &cgeom->invJ[tp*dE*dE];
         fegeom.detJ = &cgeom->detJ[tp];
       }
-      ierr = PetscFEEvaluateFieldJets_Internal(prob, dim, Nf, Nb, Nc, tp, basisTab, basisDerTab, &fegeom, coefficients, coefficients_t, u, u_x, u_t);CHKERRQ(ierr);
-      if (probAux) {ierr = PetscFEEvaluateFieldJets_Internal(probAux, dimAux, NfAux, NbAux, NcAux, tp, basisTabAux, basisDerTabAux, &fegeom, coefficientsAux, coefficientsAux_t, a, a_x, a_t);CHKERRQ(ierr);}
-      if (transform) {ierr = DMPlexBasisTransformApplyReal_Internal(dm, fegeom.v, PETSC_TRUE, dE, fegeom.v, fegeom.v, dm->transformCtx);CHKERRQ(ierr);}
-      (*funcs[f])(dE, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, a_t, a_x, time, fegeom.v, numConstants, constants, &pointEval[Nc[f]*q]);
+      ierr = PetscFEEvaluateFieldJets_Internal(dsIn, NfIn, 0, tp, T, &fegeom, coefficients, coefficients_t, u, u_x, u_t);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, 0, tp, TAux, &fegeom, coefficientsAux, coefficientsAux_t, a, a_x, a_t);CHKERRQ(ierr);}
+      if (transform) {ierr = DMPlexBasisTransformApplyReal_Internal(dmIn, fegeom.v, PETSC_TRUE, dE, fegeom.v, fegeom.v, dm->transformCtx);CHKERRQ(ierr);}
+      (*funcs[f])(dE, NfIn, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, a_t, a_x, time, fegeom.v, numConstants, constants, &pointEval[Nc[f]*q]);
     }
     ierr = PetscDualSpaceApplyAll(sp[f], pointEval, &values[v]);CHKERRQ(ierr);
     ierr = DMRestoreWorkArray(dm,numPoints*Nc[f],MPIU_SCALAR,&pointEval);CHKERRQ(ierr);
     v += spDim;
   }
-  ierr = DMPlexVecRestoreClosure(dm, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
+  ierr = DMPlexVecRestoreClosure(dmIn, section, localU, inp, NULL, &coefficients);CHKERRQ(ierr);
   if (dmAux) {ierr = DMPlexVecRestoreClosure(dmAux, sectionAux, localA, p, NULL, &coefficientsAux);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS prob, DM dmAux, PetscDS probAux, PetscReal time, Vec localU, Vec localA, PetscFEGeom *fgeom, PetscDualSpace sp[], PetscInt p, PetscInt Ncc, const PetscInt comps[],
-                                                     PetscReal **basisTab, PetscReal **basisDerTab, PetscReal **basisTabAux, PetscReal **basisDerTabAux,
+static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS ds, DM dmIn, PetscDS dsIn, DM dmAux, DMEnclosureType encAux, PetscDS dsAux, PetscReal time, Vec localU, Vec localA, PetscFEGeom *fgeom, PetscDualSpace sp[], PetscInt p,
+                                                     PetscTabulation *T, PetscTabulation *TAux,
                                                      void (**funcs)(PetscInt, PetscInt, PetscInt,
                                                                     const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
                                                                     const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
@@ -177,39 +227,33 @@ static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS prob, DM dmA
   PetscScalar       *coefficients_t = NULL, *coefficientsAux_t = NULL;
   const PetscScalar *constants;
   PetscReal         *x;
-  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nb, *Nc, *NbAux = NULL, *NcAux = NULL;
+  PetscInt          *uOff, *uOff_x, *aOff = NULL, *aOff_x = NULL, *Nc;
   PetscFEGeom        fegeom, cgeom;
   const PetscInt     dE = fgeom->dimEmbed;
-  PetscInt           dimAux = 0, numConstants, Nf, NfAux = 0, f, spDim, d, v, tp = 0;
+  PetscInt           numConstants, Nf, NfAux = 0, f, spDim, d, v, tp = 0;
   PetscBool          isAffine;
   PetscErrorCode     ierr;
 
   PetscFunctionBeginHot;
-  ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
-  ierr = PetscDSGetDimensions(prob, &Nb);CHKERRQ(ierr);
-  ierr = PetscDSGetComponents(prob, &Nc);CHKERRQ(ierr);
-  ierr = PetscDSGetComponentOffsets(prob, &uOff);CHKERRQ(ierr);
-  ierr = PetscDSGetComponentDerivativeOffsets(prob, &uOff_x);CHKERRQ(ierr);
-  ierr = PetscDSGetEvaluationArrays(prob, &u, &bc /*&u_t*/, &u_x);CHKERRQ(ierr);
-  ierr = PetscDSGetWorkspace(prob, &x, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
-  ierr = PetscDSGetConstants(prob, &numConstants, &constants);CHKERRQ(ierr);
+  if (dm != dmIn) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Not yet upgraded to use different input DM");
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
+  ierr = PetscDSGetComponentOffsets(ds, &uOff);CHKERRQ(ierr);
+  ierr = PetscDSGetComponentDerivativeOffsets(ds, &uOff_x);CHKERRQ(ierr);
+  ierr = PetscDSGetEvaluationArrays(ds, &u, &bc /*&u_t*/, &u_x);CHKERRQ(ierr);
+  ierr = PetscDSGetWorkspace(ds, &x, NULL, NULL, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscDSGetConstants(ds, &numConstants, &constants);CHKERRQ(ierr);
   ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
-  ierr = DMPlexVecGetClosure(dm, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
+  ierr = DMPlexVecGetClosure(dmIn, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
   if (dmAux) {
-    DMLabel  spmap;
-    PetscInt subp = p;
-
-    /* If dm is a submesh, do not get subpoint */
-    ierr = DMPlexGetSubpointMap(dm, &spmap);CHKERRQ(ierr);
-    if (!spmap) {ierr = DMPlexGetSubpoint(dmAux, p, &subp);CHKERRQ(ierr);}
-    ierr = PetscDSGetSpatialDimension(probAux, &dimAux);CHKERRQ(ierr);
-    ierr = PetscDSGetNumFields(probAux, &NfAux);CHKERRQ(ierr);
-    ierr = PetscDSGetDimensions(probAux, &NbAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponents(probAux, &NcAux);CHKERRQ(ierr);
+    PetscInt subp;
+
+    ierr = DMGetEnclosurePoint(dmAux, dm, encAux, p, &subp);CHKERRQ(ierr);
+    ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
     ierr = DMGetLocalSection(dmAux, §ionAux);CHKERRQ(ierr);
-    ierr = PetscDSGetComponentOffsets(probAux, &aOff);CHKERRQ(ierr);
-    ierr = PetscDSGetComponentDerivativeOffsets(probAux, &aOff_x);CHKERRQ(ierr);
-    ierr = PetscDSGetEvaluationArrays(probAux, &a, NULL /*&a_t*/, &a_x);CHKERRQ(ierr);
+    ierr = PetscDSGetComponentOffsets(dsAux, &aOff);CHKERRQ(ierr);
+    ierr = PetscDSGetComponentDerivativeOffsets(dsAux, &aOff_x);CHKERRQ(ierr);
+    ierr = PetscDSGetEvaluationArrays(dsAux, &a, NULL /*&a_t*/, &a_x);CHKERRQ(ierr);
     ierr = DMPlexVecGetClosure(dmAux, sectionAux, localA, subp, NULL, &coefficientsAux);CHKERRQ(ierr);
   }
   /* Get values for closure */
@@ -218,6 +262,8 @@ static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS prob, DM dmA
   fegeom.J  = 0;
   fegeom.v  = 0;
   fegeom.xi = 0;
+  cgeom.dim      = fgeom->dim;
+  cgeom.dimEmbed = fgeom->dimEmbed;
   if (isAffine) {
     fegeom.v    = x;
     fegeom.xi   = fgeom->xi;
@@ -249,7 +295,7 @@ static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS prob, DM dmA
     ierr = DMGetWorkArray(dm,numPoints*Nc[f],MPIU_SCALAR,&pointEval);CHKERRQ(ierr);
     for (q = 0; q < numPoints; ++q, ++tp) {
       if (isAffine) {
-        CoordinatesRefToReal(dE, dim, fegeom.xi, fgeom->v, fegeom.J, &points[q*dim], x);
+        CoordinatesRefToReal(dE, fgeom->dim, fegeom.xi, fgeom->v, fegeom.J, &points[q*dim], x);
       } else {
         fegeom.v    = &fgeom->v[tp*dE];
         fegeom.J    = &fgeom->J[tp*dE*dE];
@@ -261,22 +307,22 @@ static PetscErrorCode DMProjectPoint_BdField_Private(DM dm, PetscDS prob, DM dmA
         cgeom.invJ  = &fgeom->suppInvJ[0][tp*dE*dE];
         cgeom.detJ  = &fgeom->suppDetJ[0][tp];
       }
-      ierr = PetscFEEvaluateFieldJets_Internal(prob, dim, Nf, Nb, Nc, tp, basisTab, basisDerTab, &cgeom, coefficients, coefficients_t, u, u_x, u_t);CHKERRQ(ierr);
-      if (probAux) {ierr = PetscFEEvaluateFieldJets_Internal(probAux, dimAux, NfAux, NbAux, NcAux, tp, basisTabAux, basisDerTabAux, &cgeom, coefficientsAux, coefficientsAux_t, a, a_x, a_t);CHKERRQ(ierr);}
+      /* TODO We should use cgeom here, instead of fegeom, however the geometry coming in through fgeom does not have the support cell geometry */
+      ierr = PetscFEEvaluateFieldJets_Internal(ds, Nf, 0, tp, T, &cgeom, coefficients, coefficients_t, u, u_x, u_t);CHKERRQ(ierr);
+      if (dsAux) {ierr = PetscFEEvaluateFieldJets_Internal(dsAux, NfAux, 0, tp, TAux, &cgeom, coefficientsAux, coefficientsAux_t, a, a_x, a_t);CHKERRQ(ierr);}
       (*funcs[f])(dE, Nf, NfAux, uOff, uOff_x, u, u_t, u_x, aOff, aOff_x, a, a_t, a_x, time, fegeom.v, fegeom.n, numConstants, constants, &pointEval[Nc[f]*q]);
     }
     ierr = PetscDualSpaceApplyAll(sp[f], pointEval, &values[v]);CHKERRQ(ierr);
     ierr = DMRestoreWorkArray(dm,numPoints*Nc[f],MPIU_SCALAR,&pointEval);CHKERRQ(ierr);
     v += spDim;
   }
-  ierr = DMPlexVecRestoreClosure(dm, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
+  ierr = DMPlexVecRestoreClosure(dmIn, section, localU, p, NULL, &coefficients);CHKERRQ(ierr);
   if (dmAux) {ierr = DMPlexVecRestoreClosure(dmAux, sectionAux, localA, p, NULL, &coefficientsAux);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMProjectPoint_Private(DM dm, PetscDS prob, PetscFEGeom *fegeom, DM dmAux, PetscDS probAux, PetscInt effectiveHeight, PetscReal time, Vec localU, Vec localA, PetscBool hasFE, PetscBool hasFV, PetscBool isFE[],
-                                             PetscDualSpace sp[], PetscInt p, PetscInt Ncc, const PetscInt comps[],
-                                             PetscReal **basisTab, PetscReal **basisDerTab, PetscReal **basisTabAux, PetscReal **basisDerTabAux,
+static PetscErrorCode DMProjectPoint_Private(DM dm, PetscDS ds, DM dmIn, DMEnclosureType encIn, PetscDS dsIn, DM dmAux, DMEnclosureType encAux, PetscDS dsAux, PetscFEGeom *fegeom, PetscInt effectiveHeight, PetscReal time, Vec localU, Vec localA, PetscBool hasFE, PetscBool hasFV, PetscBool isFE[],
+                                             PetscDualSpace sp[], PetscInt p, PetscTabulation *T, PetscTabulation *TAux,
                                              DMBoundaryConditionType type, void (**funcs)(void), void **ctxs, PetscBool fieldActive[], PetscScalar values[])
 {
   PetscFVCellGeom fvgeom;
@@ -290,18 +336,16 @@ static PetscErrorCode DMProjectPoint_Private(DM dm, PetscDS prob, PetscFEGeom *f
   switch (type) {
   case DM_BC_ESSENTIAL:
   case DM_BC_NATURAL:
-    ierr = DMProjectPoint_Func_Private(dm, prob, time, fegeom, &fvgeom, isFE, sp, (PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *)) funcs, ctxs, values);CHKERRQ(ierr);break;
+    ierr = DMProjectPoint_Func_Private(dm, ds, dmIn, dsIn, time, fegeom, &fvgeom, isFE, sp, (PetscErrorCode (**)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *)) funcs, ctxs, values);CHKERRQ(ierr);break;
   case DM_BC_ESSENTIAL_FIELD:
   case DM_BC_NATURAL_FIELD:
-    ierr = DMProjectPoint_Field_Private(dm, prob, dmAux, probAux, time, localU, localA, fegeom, sp, p, Ncc, comps,
-                                        basisTab, basisDerTab, basisTabAux, basisDerTabAux,
+    ierr = DMProjectPoint_Field_Private(dm, ds, dmIn, encIn, dsIn, dmAux, encAux, dsAux, time, localU, localA, fegeom, sp, p, T, TAux,
                                         (void (**)(PetscInt, PetscInt, PetscInt,
                                                    const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
                                                    const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
                                                    PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[])) funcs, ctxs, values);CHKERRQ(ierr);break;
   case DM_BC_ESSENTIAL_BD_FIELD:
-    ierr = DMProjectPoint_BdField_Private(dm, prob, dmAux, probAux, time, localU, localA, fegeom, sp, p, Ncc, comps,
-                                          basisTab, basisDerTab, basisTabAux, basisDerTabAux,
+    ierr = DMProjectPoint_BdField_Private(dm, ds, dmIn, dsIn, dmAux, encAux, dsAux, time, localU, localA, fegeom, sp, p, T, TAux,
                                           (void (**)(PetscInt, PetscInt, PetscInt,
                                                      const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
                                                      const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
@@ -352,36 +396,42 @@ static PetscErrorCode PetscDualSpaceGetAllPointsUnion(PetscInt Nf, PetscDualSpac
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMGetFirstLabelEntry_Private(DM dm, DMLabel label, PetscInt numIds, const PetscInt ids[], PetscInt height, PetscInt *lStart, PetscDS *prob)
+static PetscErrorCode DMGetFirstLabelEntry_Private(DM dm, DM odm, DMLabel label, PetscInt numIds, const PetscInt ids[], PetscInt height, PetscInt *lStart, PetscDS *ds)
 {
-  DMLabel        depthLabel;
-  PetscInt       dim, cdepth, ls = -1, i;
-  PetscErrorCode ierr;
+  DM              plex;
+  DMEnclosureType enc;
+  DMLabel         depthLabel;
+  PetscInt        dim, cdepth, ls = -1, i;
+  PetscErrorCode  ierr;
 
   PetscFunctionBegin;
   if (lStart) *lStart = -1;
   if (!label) PetscFunctionReturn(0);
+  ierr = DMGetEnclosureRelation(dm, odm, &enc);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
+  ierr = DMConvert(dm, DMPLEX, &plex);CHKERRQ(ierr);
+  ierr = DMPlexGetDepthLabel(plex, &depthLabel);CHKERRQ(ierr);
   cdepth = dim - height;
   for (i = 0; i < numIds; ++i) {
     IS              pointIS;
     const PetscInt *points;
-    PetscInt        pdepth;
+    PetscInt        pdepth, point;
 
     ierr = DMLabelGetStratumIS(label, ids[i], &pointIS);CHKERRQ(ierr);
     if (!pointIS) continue; /* No points with that id on this process */
     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
-    ierr = DMLabelGetValue(depthLabel, points[0], &pdepth);CHKERRQ(ierr);
+    ierr = DMGetEnclosurePoint(dm, odm, enc, points[0], &point);CHKERRQ(ierr);
+    ierr = DMLabelGetValue(depthLabel, point, &pdepth);CHKERRQ(ierr);
     if (pdepth == cdepth) {
-      ls = points[0];
-      if (prob) {ierr = DMGetCellDS(dm, ls, prob);CHKERRQ(ierr);}
+      ls = point;
+      if (ds) {ierr = DMGetCellDS(dm, ls, ds);CHKERRQ(ierr);}
     }
     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
     if (ls >= 0) break;
   }
   if (lStart) *lStart = ls;
+  ierr = DMDestroy(&plex);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -421,67 +471,80 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
                                                   DMBoundaryConditionType type, void (**funcs)(void), void **ctxs,
                                                   InsertMode mode, Vec localX)
 {
-  DM              dmAux = NULL, tdm;
-  PetscDS         prob = NULL, probAux = NULL;
-  Vec             localA = NULL, tv;
-  PetscSection    section;
-  PetscDualSpace *sp, *cellsp;
-  PetscReal     **basisTab = NULL, **basisDerTab = NULL, **basisTabAux = NULL, **basisDerTabAux = NULL;
-  PetscInt       *Nc;
-  PetscInt        dim, dimEmbed, depth, minHeight, maxHeight, h, Nf, NfAux = 0, f;
-  PetscBool      *isFE, hasFE = PETSC_FALSE, hasFV = PETSC_FALSE, auxBd = PETSC_FALSE, transform;
-  DMField         coordField;
-  DMLabel         depthLabel;
-  PetscQuadrature allPoints = NULL;
-  PetscErrorCode  ierr;
+  DM                 plex, dmIn, plexIn, dmAux = NULL, plexAux = NULL, tdm;
+  DMEnclosureType    encIn, encAux;
+  PetscDS            ds = NULL, dsIn = NULL, dsAux = NULL;
+  Vec                localA = NULL, tv;
+  PetscSection       section;
+  PetscDualSpace    *sp, *cellsp;
+  PetscTabulation *T = NULL, *TAux = NULL;
+  PetscInt          *Nc;
+  PetscInt           dim, dimEmbed, depth, minHeight, maxHeight, h, Nf, NfIn, NfAux = 0, f;
+  PetscBool         *isFE, hasFE = PETSC_FALSE, hasFV = PETSC_FALSE, auxBd = PETSC_FALSE, transform;
+  DMField            coordField;
+  DMLabel            depthLabel;
+  PetscQuadrature    allPoints = NULL;
+  PetscErrorCode     ierr;
 
   PetscFunctionBegin;
+  if (localU) {ierr = VecGetDM(localU, &dmIn);CHKERRQ(ierr);}
+  else        {dmIn = dm;}
   ierr = PetscObjectQuery((PetscObject) dm, "dmAux", (PetscObject *) &dmAux);CHKERRQ(ierr);
   ierr = PetscObjectQuery((PetscObject) dm, "A", (PetscObject *) &localA);CHKERRQ(ierr);
+  ierr = DMConvert(dm, DMPLEX, &plex);CHKERRQ(ierr);
+  ierr = DMConvert(dmIn, DMPLEX, &plexIn);CHKERRQ(ierr);
+  ierr = DMGetEnclosureRelation(dmIn, dm, &encIn);CHKERRQ(ierr);
+  ierr = DMGetEnclosureRelation(dmAux, dm, &encAux);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = DMPlexGetVTKCellHeight(dm, &minHeight);CHKERRQ(ierr);
+  ierr = DMPlexGetVTKCellHeight(plex, &minHeight);CHKERRQ(ierr);
   ierr = DMGetBasisTransformDM_Internal(dm, &tdm);CHKERRQ(ierr);
   ierr = DMGetBasisTransformVec_Internal(dm, &tv);CHKERRQ(ierr);
   ierr = DMHasBasisTransform(dm, &transform);CHKERRQ(ierr);
   /* Auxiliary information can only be used with interpolation of field functions */
-  if (type == DM_BC_ESSENTIAL_FIELD || type == DM_BC_NATURAL_FIELD) {
-    if (dmAux && !localA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing localA vector");
-    if (!minHeight && dmAux) {
-      DMLabel spmap;
-
-      /* If dmAux is a surface, then force the projection to take place over a surface */
-      ierr = DMPlexGetSubpointMap(dmAux, &spmap);CHKERRQ(ierr);
-      if (spmap) {
-        ierr = DMPlexGetVTKCellHeight(dmAux, &minHeight);CHKERRQ(ierr);
-        auxBd = minHeight ? PETSC_TRUE : PETSC_FALSE;
+  if (dmAux) {
+    ierr = DMConvert(dmAux, DMPLEX, &plexAux);CHKERRQ(ierr);
+    if (type == DM_BC_ESSENTIAL_FIELD || type == DM_BC_ESSENTIAL_BD_FIELD || type == DM_BC_NATURAL_FIELD) {
+      if (!localA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing localA vector");
+      if (!minHeight) {
+        DMLabel spmap;
+
+        /* If dmAux is a surface, then force the projection to take place over a surface */
+        ierr = DMPlexGetSubpointMap(plexAux, &spmap);CHKERRQ(ierr);
+        if (spmap) {
+          ierr = DMPlexGetVTKCellHeight(plexAux, &minHeight);CHKERRQ(ierr);
+          auxBd = minHeight ? PETSC_TRUE : PETSC_FALSE;
+        }
       }
     }
   }
-  ierr = DMPlexGetDepth(dm,&depth);CHKERRQ(ierr);
-  ierr = DMPlexGetDepthLabel(dm,&depthLabel);CHKERRQ(ierr);
-  ierr = DMPlexGetMaxProjectionHeight(dm, &maxHeight);CHKERRQ(ierr);
+  ierr = DMPlexGetDepth(plex, &depth);CHKERRQ(ierr);
+  ierr = DMPlexGetDepthLabel(plex, &depthLabel);CHKERRQ(ierr);
+  ierr = DMPlexGetMaxProjectionHeight(plex, &maxHeight);CHKERRQ(ierr);
   maxHeight = PetscMax(maxHeight, minHeight);
   if (maxHeight < 0 || maxHeight > dim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Maximum projection height %D not in [0, %D)", maxHeight, dim);
-  ierr = DMGetFirstLabelEntry_Private(dm, label, numIds, ids, 0, NULL, &prob);CHKERRQ(ierr);
-  if (!prob) {ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);}
-  ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
+  ierr = DMGetFirstLabelEntry_Private(dm, dm, label, numIds, ids, 0, NULL, &ds);CHKERRQ(ierr);
+  if (!ds) {ierr = DMGetDS(dm, &ds);CHKERRQ(ierr);}
+  ierr = DMGetFirstLabelEntry_Private(dmIn, dm, label, numIds, ids, 0, NULL, &dsIn);CHKERRQ(ierr);
+  if (!dsIn) {ierr = DMGetDS(dmIn, &dsIn);CHKERRQ(ierr);}
+  ierr = PetscDSGetNumFields(ds, &Nf);CHKERRQ(ierr);
+  ierr = PetscDSGetNumFields(dsIn, &NfIn);CHKERRQ(ierr);
   ierr = DMGetCoordinateDim(dm, &dimEmbed);CHKERRQ(ierr);
   ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr);
   if (dmAux) {
-    ierr = DMGetDS(dmAux, &probAux);CHKERRQ(ierr);
-    ierr = PetscDSGetNumFields(probAux, &NfAux);CHKERRQ(ierr);
+    ierr = DMGetDS(dmAux, &dsAux);CHKERRQ(ierr);
+    ierr = PetscDSGetNumFields(dsAux, &NfAux);CHKERRQ(ierr);
   }
-  ierr = PetscDSGetComponents(prob, &Nc);CHKERRQ(ierr);
+  ierr = PetscDSGetComponents(ds, &Nc);CHKERRQ(ierr);
   ierr = PetscMalloc2(Nf, &isFE, Nf, &sp);CHKERRQ(ierr);
   if (maxHeight > 0) {ierr = PetscMalloc1(Nf, &cellsp);CHKERRQ(ierr);}
   else               {cellsp = sp;}
-  if (localU && localU != localX) {ierr = DMPlexInsertBoundaryValues(dm, PETSC_TRUE, localU, time, NULL, NULL, NULL);CHKERRQ(ierr);}
+  if (localU && localU != localX) {ierr = DMPlexInsertBoundaryValues(plex, PETSC_TRUE, localU, time, NULL, NULL, NULL);CHKERRQ(ierr);}
   /* Get cell dual spaces */
   for (f = 0; f < Nf; ++f) {
     PetscObject  obj;
     PetscClassId id;
 
-    ierr = PetscDSGetDiscretization(prob, f, &obj);CHKERRQ(ierr);
+    ierr = PetscDSGetDiscretization(ds, f, &obj);CHKERRQ(ierr);
     ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
     if (id == PETSCFE_CLASSID) {
       PetscFE fe = (PetscFE) obj;
@@ -501,6 +564,7 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
   if (type == DM_BC_ESSENTIAL_FIELD || type == DM_BC_NATURAL_FIELD) {
     PetscInt         effectiveHeight = auxBd ? minHeight : 0;
     PetscFE          fem, subfem;
+    PetscBool        isfe;
     const PetscReal *points;
     PetscInt         numPoints;
 
@@ -511,33 +575,37 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
     }
     ierr = PetscDualSpaceGetAllPointsUnion(Nf,sp,dim-effectiveHeight,funcs,&allPoints);CHKERRQ(ierr);
     ierr = PetscQuadratureGetData(allPoints,NULL,NULL,&numPoints,&points,NULL);CHKERRQ(ierr);
-    ierr = PetscMalloc4(Nf, &basisTab, Nf, &basisDerTab, NfAux, &basisTabAux, NfAux, &basisDerTabAux);CHKERRQ(ierr);
-    for (f = 0; f < Nf; ++f) {
-      if (!isFE[f]) continue;
-      ierr = PetscDSGetDiscretization(prob, f, (PetscObject *) &fem);CHKERRQ(ierr);
+    ierr = PetscMalloc2(NfIn, &T, NfAux, &TAux);CHKERRQ(ierr);
+    for (f = 0; f < NfIn; ++f) {
+      ierr = PetscDSIsFE_Internal(dsIn, f, &isfe);CHKERRQ(ierr);
+      if (!isfe) continue;
+      ierr = PetscDSGetDiscretization(dsIn, f, (PetscObject *) &fem);CHKERRQ(ierr);
       if (!effectiveHeight) {subfem = fem;}
       else                  {ierr = PetscFEGetHeightSubspace(fem, effectiveHeight, &subfem);CHKERRQ(ierr);}
-      ierr = PetscFEGetTabulation(subfem, numPoints, points, &basisTab[f], &basisDerTab[f], NULL);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(subfem, 1, numPoints, points, 1, &T[f]);CHKERRQ(ierr);
     }
     for (f = 0; f < NfAux; ++f) {
-      ierr = PetscDSGetDiscretization(probAux, f, (PetscObject *) &fem);CHKERRQ(ierr);
+      ierr = PetscDSIsFE_Internal(dsAux, f, &isfe);CHKERRQ(ierr);
+      if (!isfe) continue;
+      ierr = PetscDSGetDiscretization(dsAux, f, (PetscObject *) &fem);CHKERRQ(ierr);
       if (!effectiveHeight || auxBd) {subfem = fem;}
       else                           {ierr = PetscFEGetHeightSubspace(fem, effectiveHeight, &subfem);CHKERRQ(ierr);}
-      ierr = PetscFEGetTabulation(subfem, numPoints, points, &basisTabAux[f], &basisDerTabAux[f], NULL);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation(subfem, 1, numPoints, points, 1, &TAux[f]);CHKERRQ(ierr);
     }
   }
   /* Note: We make no attempt to optimize for height. Higher height things just overwrite the lower height results. */
   for (h = minHeight; h <= maxHeight; h++) {
     PetscInt     effectiveHeight = h - (auxBd ? 0 : minHeight);
-    PetscDS      probEff         = prob;
+    PetscDS      dsEff         = ds;
     PetscScalar *values;
     PetscBool   *fieldActive;
     PetscInt     maxDegree;
     PetscInt     pStart, pEnd, p, lStart, spDim, totDim, numValues;
     IS           heightIS;
 
-    ierr = DMPlexGetHeightStratum(dm, h, &pStart, &pEnd);CHKERRQ(ierr);
-    ierr = DMGetFirstLabelEntry_Private(dm, label, numIds, ids, h, &lStart, NULL);CHKERRQ(ierr);
+    /* Note we assume that dm and dmIn share the same topology */
+    ierr = DMPlexGetHeightStratum(plex, h, &pStart, &pEnd);CHKERRQ(ierr);
+    ierr = DMGetFirstLabelEntry_Private(dm, dm, label, numIds, ids, h, &lStart, NULL);CHKERRQ(ierr);
     ierr = DMLabelGetStratumIS(depthLabel, depth - h, &heightIS);CHKERRQ(ierr);
     if (!h) {
       PetscInt cEndInterior;
@@ -561,13 +629,13 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
       ierr = PetscDualSpaceGetDimension(sp[f], &spDim);CHKERRQ(ierr);
       totDim += spDim;
     }
-    ierr = DMPlexVecGetClosure(dm, section, localX, lStart < 0 ? pStart : lStart, &numValues, NULL);CHKERRQ(ierr);
+    ierr = DMPlexVecGetClosure(plex, section, localX, lStart < 0 ? pStart : lStart, &numValues, NULL);CHKERRQ(ierr);
     if (numValues != totDim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "The section point closure size %d != dual space dimension %d", numValues, totDim);
     if (!totDim) {
       ierr = ISDestroy(&heightIS);CHKERRQ(ierr);
       continue;
     }
-    if (effectiveHeight) {ierr = PetscDSGetHeightSubspace(prob, effectiveHeight, &probEff);CHKERRQ(ierr);}
+    if (effectiveHeight) {ierr = PetscDSGetHeightSubspace(ds, effectiveHeight, &dsEff);CHKERRQ(ierr);}
     /* Loop over points at this height */
     ierr = DMGetWorkArray(dm, numValues, MPIU_SCALAR, &values);CHKERRQ(ierr);
     ierr = DMGetWorkArray(dm, Nf, MPI_INT, &fieldActive);CHKERRQ(ierr);
@@ -601,21 +669,21 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
             ierr = PetscDualSpaceGetAllPointsUnion(Nf,sp,dim-h,funcs,&quad);CHKERRQ(ierr);
           }
         }
-        ierr = DMFieldCreateFEGeom(coordField,isectIS,quad,PETSC_FALSE,&fegeom);CHKERRQ(ierr);
+        ierr = DMFieldCreateFEGeom(coordField,isectIS,quad,(effectiveHeight && h == minHeight)?PETSC_TRUE:PETSC_FALSE,&fegeom);CHKERRQ(ierr);
         for (p = 0; p < n; ++p) {
           const PetscInt  point = points[p];
 
           ierr = PetscArrayzero(values, numValues);CHKERRQ(ierr);
           ierr = PetscFEGeomGetChunk(fegeom,p,p+1,&chunkgeom);CHKERRQ(ierr);
-          ierr = DMProjectPoint_Private(dm, probEff, chunkgeom, dmAux, probAux, effectiveHeight, time, localU, localA, hasFE, hasFV, isFE, sp, point, Ncc, comps, basisTab, basisDerTab, basisTabAux, basisDerTabAux, type, funcs, ctxs, fieldActive, values);
+          ierr = DMProjectPoint_Private(dm, dsEff, plexIn, encIn, dsIn, plexAux, encAux, dsAux, chunkgeom, effectiveHeight, time, localU, localA, hasFE, hasFV, isFE, sp, point, T, TAux, type, funcs, ctxs, fieldActive, values);
           if (ierr) {
             PetscErrorCode ierr2;
             ierr2 = DMRestoreWorkArray(dm, numValues, MPIU_SCALAR, &values);CHKERRQ(ierr2);
             ierr2 = DMRestoreWorkArray(dm, Nf, MPI_INT, &fieldActive);CHKERRQ(ierr2);
             CHKERRQ(ierr);
           }
-          if (transform) {ierr = DMPlexBasisTransformPoint_Internal(dm, tdm, tv, point, fieldActive, PETSC_FALSE, values);CHKERRQ(ierr);}
-          ierr = DMPlexVecSetFieldClosure_Internal(dm, section, localX, fieldActive, point, Ncc, comps, values, mode);CHKERRQ(ierr);
+          if (transform) {ierr = DMPlexBasisTransformPoint_Internal(plex, tdm, tv, point, fieldActive, PETSC_FALSE, values);CHKERRQ(ierr);}
+          ierr = DMPlexVecSetFieldClosure_Internal(plex, section, localX, fieldActive, point, Ncc, comps, values, mode);CHKERRQ(ierr);
         }
         ierr = PetscFEGeomRestoreChunk(fegeom,p,p+1,&chunkgeom);CHKERRQ(ierr);
         ierr = PetscFEGeomDestroy(&fegeom);CHKERRQ(ierr);
@@ -641,19 +709,19 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
           ierr = PetscDualSpaceGetAllPointsUnion(Nf,sp,dim-h,funcs,&quad);CHKERRQ(ierr);
         }
       }
-      ierr = DMFieldCreateFEGeom(coordField,pointIS,quad,PETSC_FALSE,&fegeom);CHKERRQ(ierr);
+      ierr = DMFieldCreateFEGeom(coordField,pointIS,quad,(effectiveHeight && h == minHeight)?PETSC_TRUE:PETSC_FALSE,&fegeom);CHKERRQ(ierr);
       for (p = pStart; p < pEnd; ++p) {
         ierr = PetscArrayzero(values, numValues);CHKERRQ(ierr);
         ierr = PetscFEGeomGetChunk(fegeom,p-pStart,p-pStart+1,&chunkgeom);CHKERRQ(ierr);
-        ierr = DMProjectPoint_Private(dm, probEff, chunkgeom, dmAux, probAux, effectiveHeight, time, localU, localA, hasFE, hasFV, isFE, sp, p, Ncc, comps, basisTab, basisDerTab, basisTabAux, basisDerTabAux, type, funcs, ctxs, fieldActive, values);
+        ierr = DMProjectPoint_Private(dm, dsEff, plexIn, encIn, dsIn, plexAux, encAux, dsAux, chunkgeom, effectiveHeight, time, localU, localA, hasFE, hasFV, isFE, sp, p, T, TAux, type, funcs, ctxs, fieldActive, values);
         if (ierr) {
           PetscErrorCode ierr2;
           ierr2 = DMRestoreWorkArray(dm, numValues, MPIU_SCALAR, &values);CHKERRQ(ierr2);
           ierr2 = DMRestoreWorkArray(dm, Nf, MPI_INT, &fieldActive);CHKERRQ(ierr2);
           CHKERRQ(ierr);
         }
-        if (transform) {ierr = DMPlexBasisTransformPoint_Internal(dm, tdm, tv, p, fieldActive, PETSC_FALSE, values);CHKERRQ(ierr);}
-        ierr = DMPlexVecSetFieldClosure_Internal(dm, section, localX, fieldActive, p, Ncc, comps, values, mode);CHKERRQ(ierr);
+        if (transform) {ierr = DMPlexBasisTransformPoint_Internal(plex, tdm, tv, p, fieldActive, PETSC_FALSE, values);CHKERRQ(ierr);}
+        ierr = DMPlexVecSetFieldClosure_Internal(plex, section, localX, fieldActive, p, Ncc, comps, values, mode);CHKERRQ(ierr);
       }
       ierr = PetscFEGeomRestoreChunk(fegeom,p-pStart,pStart-p+1,&chunkgeom);CHKERRQ(ierr);
       ierr = PetscFEGeomDestroy(&fegeom);CHKERRQ(ierr);
@@ -666,27 +734,32 @@ static PetscErrorCode DMProjectLocal_Generic_Plex(DM dm, PetscReal time, Vec loc
   }
   /* Cleanup */
   if (type == DM_BC_ESSENTIAL_FIELD || type == DM_BC_NATURAL_FIELD) {
-    PetscInt effectiveHeight = auxBd ? minHeight : 0;
-    PetscFE  fem, subfem;
+    PetscInt  effectiveHeight = auxBd ? minHeight : 0;
+    PetscFE   fem, subfem;
+    PetscBool isfe;
 
-    for (f = 0; f < Nf; ++f) {
-      if (!isFE[f]) continue;
-      ierr = PetscDSGetDiscretization(prob, f, (PetscObject *) &fem);CHKERRQ(ierr);
+    for (f = 0; f < NfIn; ++f) {
+      ierr = PetscDSIsFE_Internal(dsIn, f, &isfe);CHKERRQ(ierr);
+      ierr = PetscDSGetDiscretization(dsIn, f, (PetscObject *) &fem);CHKERRQ(ierr);
       if (!effectiveHeight) {subfem = fem;}
       else                  {ierr = PetscFEGetHeightSubspace(fem, effectiveHeight, &subfem);CHKERRQ(ierr);}
-      ierr = PetscFERestoreTabulation(subfem, 0, NULL, &basisTab[f], &basisDerTab[f], NULL);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&T[f]);CHKERRQ(ierr);
     }
     for (f = 0; f < NfAux; ++f) {
-      ierr = PetscDSGetDiscretization(probAux, f, (PetscObject *) &fem);CHKERRQ(ierr);
+      ierr = PetscDSIsFE_Internal(dsAux, f, &isfe);CHKERRQ(ierr);
+      ierr = PetscDSGetDiscretization(dsAux, f, (PetscObject *) &fem);CHKERRQ(ierr);
       if (!effectiveHeight || auxBd) {subfem = fem;}
       else                           {ierr = PetscFEGetHeightSubspace(fem, effectiveHeight, &subfem);CHKERRQ(ierr);}
-      ierr = PetscFERestoreTabulation(subfem, 0, NULL, &basisTabAux[f], &basisDerTabAux[f], NULL);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&TAux[f]);CHKERRQ(ierr);
     }
-    ierr = PetscFree4(basisTab, basisDerTab, basisTabAux, basisDerTabAux);CHKERRQ(ierr);
+    ierr = PetscFree2(T, TAux);CHKERRQ(ierr);
   }
   ierr = PetscQuadratureDestroy(&allPoints);CHKERRQ(ierr);
   ierr = PetscFree2(isFE, sp);CHKERRQ(ierr);
   if (maxHeight > 0) {ierr = PetscFree(cellsp);CHKERRQ(ierr);}
+  ierr = DMDestroy(&plex);CHKERRQ(ierr);
+  ierr = DMDestroy(&plexIn);CHKERRQ(ierr);
+  if (dmAux) {ierr = DMDestroy(&plexAux);CHKERRQ(ierr);}
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/impls/plex/plexrefine.c b/src/dm/impls/plex/plexrefine.c
index 385bdbf20c8..7270cbdfdcc 100644
--- a/src/dm/impls/plex/plexrefine.c
+++ b/src/dm/impls/plex/plexrefine.c
@@ -247,11 +247,10 @@ PetscErrorCode CellRefinerGetAffineTransforms_Internal(CellRefiner refiner, Pets
      |         |         |       |         |         |
      0---------0---------3       4---------0---------5
      */
-    break;
     dim = 3;
     if (numSubcells) *numSubcells = 8;
     if (v0) {
-      ierr = PetscMalloc3(4*dim,&v,4*dim*dim,&j,4*dim*dim,&invj);CHKERRQ(ierr);
+      ierr = PetscMalloc3(8*dim,&v,8*dim*dim,&j,8*dim*dim,&invj);CHKERRQ(ierr);
       /* A */
       v[0+0] = -1.0; v[0+1] = -1.0; v[0+2] = -1.0;
       j[0+0] =  0.5; j[0+1] =  0.0; j[0+2] =  0.0;
@@ -297,6 +296,7 @@ PetscErrorCode CellRefinerGetAffineTransforms_Internal(CellRefiner refiner, Pets
         DMPlex_Invert3D_Internal(&invj[s*dim*dim], &j[s*dim*dim], detJ);
       }
     }
+    break;
   default:
     SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown cell refiner %s", CellRefiners[refiner]);
   }
@@ -333,6 +333,9 @@ PetscErrorCode CellRefinerInCellTest_Internal(CellRefiner refiner, const PetscRe
   case REFINER_HEX_2D:
     for (d = 0; d < 2; ++d) if ((point[d] < -1.00000000001) || (point[d] > 1.000000000001)) {*inside = PETSC_FALSE; break;}
     break;
+  case REFINER_HEX_3D:
+    for (d = 0; d < 3; d++) if (PetscAbsReal(point[d]) > 1 + PETSC_SMALL) {*inside = PETSC_FALSE; break;}
+    break;
   default:
     SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown cell refiner %s", CellRefiners[refiner]);
   }
@@ -531,7 +534,7 @@ static PetscErrorCode DMLabelSetStratumBounds(DMLabel label, PetscInt value, Pet
 static PetscErrorCode CellRefinerSetConeSizes(CellRefiner refiner, DM dm, PetscInt depthSize[], DM rdm)
 {
   PetscInt       depth, cStart, cStartNew, cEnd, cEndNew, cMax, c, vStart, vStartNew, vEnd, vEndNew, vMax, v, fStart, fStartNew, fEnd, fEndNew, fMax, f, eStart, eStartNew, eEnd, eEndNew, eMax, e, r;
-  DMLabel        depthLabel;
+  DMLabel        depthLabel, celltypeLabel;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
@@ -1727,6 +1730,12 @@ static PetscErrorCode CellRefinerSetConeSizes(CellRefiner refiner, DM dm, PetscI
   default:
     SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown cell refiner %s", CellRefiners[refiner]);
   }
+  {
+    DM_Plex *plex = (DM_Plex *) rdm->data;
+
+    ierr = DMPlexGetCellTypeLabel(rdm, &celltypeLabel);CHKERRQ(ierr);
+    ierr = PetscObjectStateGet((PetscObject) celltypeLabel, &plex->celltypeState);CHKERRQ(ierr);
+  }
   PetscFunctionReturn(0);
 }
 
@@ -3488,7 +3497,7 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
     break;
   case REFINER_SIMPLEX_3D:
     /* All cells have 4 faces: Tet face order is prescribed in DMPlexGetFaces_Internal() */
-    ierr = DMPlexGetRawFaces_Internal(dm, 3, 4, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFaces_Internal(dm, DM_POLYTOPE_TETRAHEDRON, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
     for (c = cStart; c < cEnd; ++c) {
       const PetscInt  newp = cStartNew + (c - cStart)*8;
       const PetscInt *cone, *ornt;
@@ -4130,12 +4139,12 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
 #endif
     }
     ierr = PetscFree(supportRef);CHKERRQ(ierr);
-    ierr = DMPlexRestoreFaces_Internal(dm, 3, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreFaces_Internal(dm, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
     break;
   case REFINER_HYBRID_SIMPLEX_3D:
     ierr = DMPlexGetHybridBounds(rdm, &cMaxNew, &fMaxNew, &eMaxNew, NULL);CHKERRQ(ierr);
     /* Interior cells have 4 faces: Tet face order is prescribed in DMPlexGetFaces_Internal() */
-    ierr = DMPlexGetRawFaces_Internal(dm, 3, 4, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFaces_Internal(dm, DM_POLYTOPE_TETRAHEDRON, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
     for (c = cStart; c < cMax; ++c) {
       const PetscInt  newp = cStartNew + (c - cStart)*8;
       const PetscInt *cone, *ornt;
@@ -5018,10 +5027,10 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
 #endif
     }
     ierr = PetscFree(supportRef);CHKERRQ(ierr);
-    ierr = DMPlexRestoreFaces_Internal(dm, 3, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreFaces_Internal(dm, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
     break;
   case REFINER_SIMPLEX_TO_HEX_3D:
-    ierr = DMPlexGetRawFaces_Internal(dm, 3, 4, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFaces_Internal(dm, DM_POLYTOPE_TETRAHEDRON, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
     /* All cells have 6 faces */
     for (c = cStart; c < cEnd; ++c) {
       const PetscInt  newp = cStartNew + (c - cStart)*4;
@@ -5583,7 +5592,7 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
 #endif
     }
     ierr = PetscFree(supportRef);CHKERRQ(ierr);
-    ierr = DMPlexRestoreFaces_Internal(dm, 3, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreFaces_Internal(dm, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
     break;
   case REFINER_HYBRID_SIMPLEX_TO_HEX_3D:
     if (cMax < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No cell maximum specified in hybrid mesh");
@@ -5592,7 +5601,7 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
     fMax = PetscMin(fEnd, fMax);
     if (eMax < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No face maximum specified in hybrid mesh");
     eMax = PetscMin(eEnd, eMax);
-    ierr = DMPlexGetRawFaces_Internal(dm, 3, 4, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexGetRawFaces_Internal(dm, DM_POLYTOPE_TETRAHEDRON, cellInd, NULL, NULL, &faces);CHKERRQ(ierr);
     /* All cells have 6 faces */
     for (c = cStart; c < cMax; ++c) {
       const PetscInt  newp = cStartNew + (c - cStart)*4;
@@ -6580,7 +6589,7 @@ static PetscErrorCode CellRefinerSetCones(CellRefiner refiner, DM dm, PetscInt d
 #endif
     }
     ierr = PetscFree(supportRef);CHKERRQ(ierr);
-    ierr = DMPlexRestoreFaces_Internal(dm, 3, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
+    ierr = DMPlexRestoreFaces_Internal(dm, cStart, NULL, NULL, &faces);CHKERRQ(ierr);
     break;
   case REFINER_HEX_3D:
     /*
@@ -8755,6 +8764,7 @@ static PetscErrorCode CellRefinerSetCoordinates(CellRefiner refiner, DM dm, Pets
         ierr = DMLocalizeCoordinate_Internal(dm, spaceDim, &coords[offA], &coords[offB], &coordsNew[offnew]);CHKERRQ(ierr);
         for (d = 0; d < spaceDim; ++d) {
           coordsNew[offnew+d] = 0.5*(coords[offA+d] + coordsNew[offnew+d]);
+          ierr = DMPlexSnapToGeomModel(dm, e, &coordsNew[offnew], &coordsNew[offnew]);CHKERRQ(ierr);
         }
       } else {
         for (d = 0; d < spaceDim; ++d) coordsNew[offnew+d] = PETSC_MIN_REAL;
@@ -9683,7 +9693,7 @@ static PetscErrorCode CellRefinerCreateLabels(CellRefiner refiner, DM dm, PetscI
   for (l = 0; l < numLabels; ++l) {
     DMLabel         label, labelNew;
     const char     *lname;
-    PetscBool       isDepth;
+    PetscBool       isDepth, isCellType;
     IS              valueIS;
     const PetscInt *values;
     PetscInt        defVal;
@@ -9692,6 +9702,8 @@ static PetscErrorCode CellRefinerCreateLabels(CellRefiner refiner, DM dm, PetscI
     ierr = DMGetLabelName(dm, l, &lname);CHKERRQ(ierr);
     ierr = PetscStrcmp(lname, "depth", &isDepth);CHKERRQ(ierr);
     if (isDepth) continue;
+    ierr = PetscStrcmp(lname, "celltype", &isCellType);CHKERRQ(ierr);
+    if (isCellType) continue;
     ierr = DMCreateLabel(rdm, lname);CHKERRQ(ierr);
     ierr = DMGetLabel(dm, lname, &label);CHKERRQ(ierr);
     ierr = DMGetLabel(rdm, lname, &labelNew);CHKERRQ(ierr);
@@ -10423,59 +10435,31 @@ PetscErrorCode DMPlexGetRefinementFunction(DM dm, PetscErrorCode (**refinementFu
 
 PetscErrorCode DMPlexGetCellRefiner_Internal(DM dm, CellRefiner *cellRefiner)
 {
-  PetscInt       dim, cStart, cEnd, coneSize, cMax, fMax;
+  DMPolytopeType ct;
+  PetscInt       dim, cStart, cEnd, cMax, fMax;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
   ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
   if (cEnd <= cStart) {*cellRefiner = REFINER_NOOP; PetscFunctionReturn(0);}
-  ierr = DMPlexGetConeSize(dm, cStart, &coneSize);CHKERRQ(ierr);
   ierr = DMPlexGetHybridBounds(dm, &cMax, &fMax, NULL, NULL);CHKERRQ(ierr);
-  switch (dim) {
-  case 1:
-    switch (coneSize) {
-    case 2:
-      *cellRefiner = REFINER_SIMPLEX_1D;
-      break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown coneSize %D in dimension %D for cell refiner", coneSize, dim);
-    }
-    break;
-  case 2:
-    switch (coneSize) {
-    case 3:
-      if (cMax >= 0) *cellRefiner = REFINER_HYBRID_SIMPLEX_2D;
-      else *cellRefiner = REFINER_SIMPLEX_2D;
-      break;
-    case 4:
-      if (cMax >= 0 && fMax >= 0) *cellRefiner = REFINER_HYBRID_HEX_2D;
-      else *cellRefiner = REFINER_HEX_2D;
-      break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown coneSize %D in dimension %D for cell refiner", coneSize, dim);
-    }
-    break;
-  case 3:
-    switch (coneSize) {
-    case 4:
-      if (cMax >= 0) *cellRefiner = REFINER_HYBRID_SIMPLEX_3D;
-      else *cellRefiner = REFINER_SIMPLEX_3D;
-      break;
-    case 5:
-      if (cMax == 0) *cellRefiner = REFINER_HYBRID_SIMPLEX_3D;
-      else SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown coneSize %D in dimension %D for cell refiner", coneSize, dim);
-      break;
-    case 6:
-      if (cMax >= 0) *cellRefiner = REFINER_HYBRID_HEX_3D;
-      else *cellRefiner = REFINER_HEX_3D;
-      break;
-    default:
-      SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown coneSize %D in dimension %D for cell refiner", coneSize, dim);
-    }
-    break;
-  default:
-    SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown dimension %D for cell refiner", dim);
+  /* TODO Must tag hybrid cells with correct cell types */
+  ierr = DMPlexGetCellType(dm, cStart, &ct);CHKERRQ(ierr);
+  switch (ct) {
+    case DM_POLYTOPE_SEGMENT:       *cellRefiner = REFINER_SIMPLEX_1D; break;
+    case DM_POLYTOPE_TRIANGLE:      if (cMax >= 0) {*cellRefiner = REFINER_HYBRID_SIMPLEX_2D; break;}
+                                    else           {*cellRefiner = REFINER_SIMPLEX_2D; break;}
+    case DM_POLYTOPE_QUADRILATERAL: if (cMax >= 0) {*cellRefiner = REFINER_HYBRID_HEX_2D; break;} /* Why did this have fMax >= 0 ??? */
+                                    else           {*cellRefiner = REFINER_HEX_2D; break;}
+    case DM_POLYTOPE_TETRAHEDRON:   if (cMax >= 0) {*cellRefiner = REFINER_HYBRID_SIMPLEX_3D; break;}
+                                    else           {*cellRefiner = REFINER_SIMPLEX_3D; break;}
+    case DM_POLYTOPE_HEXAHEDRON:    if (cMax >= 0) {*cellRefiner = REFINER_HYBRID_HEX_3D; break;}
+                                    else           {*cellRefiner = REFINER_HEX_3D; break;}
+    case DM_POLYTOPE_SEG_PRISM_TENSOR:  *cellRefiner = REFINER_HYBRID_SIMPLEX_2D; break;
+    case DM_POLYTOPE_TRI_PRISM_TENSOR:  *cellRefiner = REFINER_HYBRID_SIMPLEX_3D; break;
+    case DM_POLYTOPE_QUAD_PRISM_TENSOR: *cellRefiner = REFINER_HYBRID_HEX_3D; break;
+    default: SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No cell refiner for cell %D with type %s", cStart, DMPolytopeTypes[ct]);
   }
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexreorder.c b/src/dm/impls/plex/plexreorder.c
index ad6f75a33aa..d85d32ce646 100644
--- a/src/dm/impls/plex/plexreorder.c
+++ b/src/dm/impls/plex/plexreorder.c
@@ -180,44 +180,6 @@ PetscErrorCode DMPlexPermute(DM dm, IS perm, DM *pdm)
   /* Ignore ltogmap, ltogmapb */
   /* Ignore sf, sectionSF */
   /* Ignore globalVertexNumbers, globalCellNumbers */
-  /* Remap coordinates */
-  {
-    DM              cdm, cdmNew;
-    PetscSection    csection, csectionNew;
-    Vec             coordinates, coordinatesNew;
-    PetscScalar    *coords, *coordsNew;
-    const PetscInt *pperm;
-    PetscInt        pStart, pEnd, p;
-    const char     *name;
-
-    ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
-    ierr = DMGetLocalSection(cdm, &csection);CHKERRQ(ierr);
-    ierr = PetscSectionPermute(csection, perm, &csectionNew);CHKERRQ(ierr);
-    ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
-    ierr = VecDuplicate(coordinates, &coordinatesNew);CHKERRQ(ierr);
-    ierr = PetscObjectGetName((PetscObject)coordinates,&name);CHKERRQ(ierr);
-    ierr = PetscObjectSetName((PetscObject)coordinatesNew,name);CHKERRQ(ierr);
-    ierr = VecGetArray(coordinates, &coords);CHKERRQ(ierr);
-    ierr = VecGetArray(coordinatesNew, &coordsNew);CHKERRQ(ierr);
-    ierr = PetscSectionGetChart(csectionNew, &pStart, &pEnd);CHKERRQ(ierr);
-    ierr = ISGetIndices(perm, &pperm);CHKERRQ(ierr);
-    for (p = pStart; p < pEnd; ++p) {
-      PetscInt dof, off, offNew, d;
-
-      ierr = PetscSectionGetDof(csectionNew, p, &dof);CHKERRQ(ierr);
-      ierr = PetscSectionGetOffset(csection, p, &off);CHKERRQ(ierr);
-      ierr = PetscSectionGetOffset(csectionNew, pperm[p], &offNew);CHKERRQ(ierr);
-      for (d = 0; d < dof; ++d) coordsNew[offNew+d] = coords[off+d];
-    }
-    ierr = ISRestoreIndices(perm, &pperm);CHKERRQ(ierr);
-    ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
-    ierr = VecRestoreArray(coordinatesNew, &coordsNew);CHKERRQ(ierr);
-    ierr = DMGetCoordinateDM(*pdm, &cdmNew);CHKERRQ(ierr);
-    ierr = DMSetLocalSection(cdmNew, csectionNew);CHKERRQ(ierr);
-    ierr = DMSetCoordinatesLocal(*pdm, coordinatesNew);CHKERRQ(ierr);
-    ierr = PetscSectionDestroy(&csectionNew);CHKERRQ(ierr);
-    ierr = VecDestroy(&coordinatesNew);CHKERRQ(ierr);
-  }
   /* Reorder labels */
   {
     PetscInt numLabels, l;
@@ -230,6 +192,7 @@ PetscErrorCode DMPlexPermute(DM dm, IS perm, DM *pdm)
       ierr = DMAddLabel(*pdm, labelNew);CHKERRQ(ierr);
       ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
     }
+    ierr = DMGetLabel(*pdm, "depth", &(*pdm)->depthLabel);CHKERRQ(ierr);
     if (plex->subpointMap) {ierr = DMLabelPermute(plex->subpointMap, perm, &plexNew->subpointMap);CHKERRQ(ierr);}
   }
   /* Reorder topology */
@@ -275,7 +238,44 @@ PetscErrorCode DMPlexPermute(DM dm, IS perm, DM *pdm)
     }
     ierr = ISRestoreIndices(perm, &pperm);CHKERRQ(ierr);
   }
-  ierr = DMCopyDisc(dm, *pdm);CHKERRQ(ierr);
+  /* Remap coordinates */
+  {
+    DM              cdm, cdmNew;
+    PetscSection    csection, csectionNew;
+    Vec             coordinates, coordinatesNew;
+    PetscScalar    *coords, *coordsNew;
+    const PetscInt *pperm;
+    PetscInt        pStart, pEnd, p;
+    const char     *name;
+
+    ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
+    ierr = DMGetLocalSection(cdm, &csection);CHKERRQ(ierr);
+    ierr = PetscSectionPermute(csection, perm, &csectionNew);CHKERRQ(ierr);
+    ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
+    ierr = VecDuplicate(coordinates, &coordinatesNew);CHKERRQ(ierr);
+    ierr = PetscObjectGetName((PetscObject)coordinates,&name);CHKERRQ(ierr);
+    ierr = PetscObjectSetName((PetscObject)coordinatesNew,name);CHKERRQ(ierr);
+    ierr = VecGetArray(coordinates, &coords);CHKERRQ(ierr);
+    ierr = VecGetArray(coordinatesNew, &coordsNew);CHKERRQ(ierr);
+    ierr = PetscSectionGetChart(csectionNew, &pStart, &pEnd);CHKERRQ(ierr);
+    ierr = ISGetIndices(perm, &pperm);CHKERRQ(ierr);
+    for (p = pStart; p < pEnd; ++p) {
+      PetscInt dof, off, offNew, d;
+
+      ierr = PetscSectionGetDof(csectionNew, p, &dof);CHKERRQ(ierr);
+      ierr = PetscSectionGetOffset(csection, p, &off);CHKERRQ(ierr);
+      ierr = PetscSectionGetOffset(csectionNew, pperm[p], &offNew);CHKERRQ(ierr);
+      for (d = 0; d < dof; ++d) coordsNew[offNew+d] = coords[off+d];
+    }
+    ierr = ISRestoreIndices(perm, &pperm);CHKERRQ(ierr);
+    ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
+    ierr = VecRestoreArray(coordinatesNew, &coordsNew);CHKERRQ(ierr);
+    ierr = DMGetCoordinateDM(*pdm, &cdmNew);CHKERRQ(ierr);
+    ierr = DMSetLocalSection(cdmNew, csectionNew);CHKERRQ(ierr);
+    ierr = DMSetCoordinatesLocal(*pdm, coordinatesNew);CHKERRQ(ierr);
+    ierr = PetscSectionDestroy(&csectionNew);CHKERRQ(ierr);
+    ierr = VecDestroy(&coordinatesNew);CHKERRQ(ierr);
+  }
   (*pdm)->setupcalled = PETSC_TRUE;
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plexsection.c b/src/dm/impls/plex/plexsection.c
index b0068a3e24a..6cc0f4ba9b3 100644
--- a/src/dm/impls/plex/plexsection.c
+++ b/src/dm/impls/plex/plexsection.c
@@ -99,8 +99,8 @@ static PetscErrorCode DMPlexCreateSectionDof(DM dm, DMLabel label[],const PetscI
 
     ierr = DMGetField(dm, f, NULL, &obj);CHKERRQ(ierr);
     ierr = PetscObjectGetClassId(obj, &id);CHKERRQ(ierr);
-    if (id == PETSCFE_CLASSID)      {isFE[f] = PETSC_TRUE;}
-    else if (id == PETSCFV_CLASSID) {isFE[f] = PETSC_FALSE;}
+    /* User is allowed to put a "placeholder" field in (c.f. DMCreateDS) */
+    isFE[f] = id == PETSCFE_CLASSID ? PETSC_TRUE : PETSC_FALSE;
   }
 
   ierr = PetscMalloc1(depth+1, &pMax);CHKERRQ(ierr);
@@ -167,10 +167,13 @@ static PetscErrorCode DMPlexCreateSectionBCDof(DM dm, PetscInt numBC, const Pets
     PetscInt        field = 0;
     const PetscInt *comp;
     const PetscInt *idx;
-    PetscInt        Nc = -1, n, i;
+    PetscInt        Nc = 0, cNc = -1, n, i;
 
-    if (Nf) field = bcField[bc];
-    if (bcComps && bcComps[bc]) {ierr = ISGetLocalSize(bcComps[bc], &Nc);CHKERRQ(ierr);}
+    if (Nf) {
+      field = bcField[bc];
+      ierr = PetscSectionGetFieldComponents(section, field, &Nc);CHKERRQ(ierr);
+    }
+    if (bcComps && bcComps[bc]) {ierr = ISGetLocalSize(bcComps[bc], &cNc);CHKERRQ(ierr);}
     if (bcComps && bcComps[bc]) {ierr = ISGetIndices(bcComps[bc], &comp);CHKERRQ(ierr);}
     ierr = ISGetLocalSize(bcPoints[bc], &n);CHKERRQ(ierr);
     ierr = ISGetIndices(bcPoints[bc], &idx);CHKERRQ(ierr);
@@ -183,9 +186,17 @@ static PetscErrorCode DMPlexCreateSectionBCDof(DM dm, PetscInt numBC, const Pets
       } else {
         ierr = PetscSectionGetDof(section, p, &numConst);CHKERRQ(ierr);
       }
-      /* If Nc < 0, constrain every dof on the point */
-      /* TODO: Matt, this only works if there is one node on the point.  We need to handle numDofs > NumComponents */
-      if (Nc > 0) numConst = PetscMin(numConst, Nc);
+      /* If Nc <= 0, constrain every dof on the point */
+      if (cNc > 0) {
+        /* We assume that a point may have multiple "nodes", which are collections of Nc dofs,
+           and that those dofs are numbered n*Nc+c */
+        if (Nf) {
+          if (numConst % Nc) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Point %D has %D dof which is not divisible by %D field components", p, numConst, Nc);
+          numConst = (numConst/Nc) * cNc;
+        } else {
+          numConst = PetscMin(numConst, cNc);
+        }
+      }
       if (Nf) {ierr = PetscSectionAddFieldConstraintDof(section, p, field, numConst);CHKERRQ(ierr);}
       ierr = PetscSectionAddConstraintDof(section, p, numConst);CHKERRQ(ierr);
     }
@@ -238,27 +249,31 @@ static PetscErrorCode DMPlexCreateSectionBCIndicesField(DM dm, PetscInt numBC,co
   for (bc = 0; bc < numBC; ++bc) {
     const PetscInt  field = bcField[bc];
     const PetscInt *comp, *idx;
-    PetscInt        Nc = -1, n, i;
+    PetscInt        Nc, cNc = -1, n, i;
 
-    if (bcComps && bcComps[bc]) {ierr = ISGetLocalSize(bcComps[bc], &Nc);CHKERRQ(ierr);}
+    ierr = PetscSectionGetFieldComponents(section, field, &Nc);CHKERRQ(ierr);
+    if (bcComps && bcComps[bc]) {ierr = ISGetLocalSize(bcComps[bc], &cNc);CHKERRQ(ierr);}
     if (bcComps && bcComps[bc]) {ierr = ISGetIndices(bcComps[bc], &comp);CHKERRQ(ierr);}
     ierr = ISGetLocalSize(bcPoints[bc], &n);CHKERRQ(ierr);
     ierr = ISGetIndices(bcPoints[bc], &idx);CHKERRQ(ierr);
     for (i = 0; i < n; ++i) {
       const PetscInt  p = idx[i];
       const PetscInt *find;
-      PetscInt        fdof, fcdof, c;
+      PetscInt        fdof, fcdof, c, j;
 
       ierr = PetscSectionGetFieldDof(section, p, field, &fdof);CHKERRQ(ierr);
       if (!fdof) continue;
-      if (Nc < 0) {
+      if (cNc < 0) {
         for (d = 0; d < fdof; ++d) indices[d] = d;
         fcdof = fdof;
       } else {
+        /* We assume that a point may have multiple "nodes", which are collections of Nc dofs,
+           and that those dofs are numbered n*Nc+c */
         ierr = PetscSectionGetFieldConstraintDof(section, p, field, &fcdof);CHKERRQ(ierr);
         ierr = PetscSectionGetFieldConstraintIndices(section, p, field, &find);CHKERRQ(ierr);
+        /* Get indices constrained by previous bcs */
         for (d = 0; d < fcdof; ++d) {if (find[d] < 0) break; indices[d] = find[d];}
-        for (c = 0; c < Nc; ++c) indices[d++] = comp[c];
+        for (j = 0; j < fdof/Nc; ++j) for (c = 0; c < cNc; ++c) indices[d++] = j*Nc + comp[c];
         ierr = PetscSortRemoveDupsInt(&d, indices);CHKERRQ(ierr);
         for (c = d; c < fcdof; ++c) indices[c] = -1;
         fcdof = d;
@@ -343,9 +358,9 @@ static PetscErrorCode DMPlexCreateSectionBCIndices(DM dm, PetscSection section)
 
   Input Parameters:
 + dm        - The DMPlex object
+. label     - The label indicating the mesh support of each field, or NULL for the whole mesh
 . numComp   - An array of size numFields that holds the number of components for each field
 . numDof    - An array of size numFields*(dim+1) which holds the number of dof for each field on a mesh piece of dimension d
-. label     - The label indicating the mesh support of each field, or NULL for the whole mesh
 . numBC     - The number of boundary conditions
 . bcField   - An array of size numBC giving the field number for each boundry condition
 . bcComps   - [Optional] An array of size numBC giving an IS holding the field components to which each boundary condition applies
@@ -363,9 +378,6 @@ static PetscErrorCode DMPlexCreateSectionBCIndices(DM dm, PetscSection section)
 
   Level: developer
 
-  Fortran Notes:
-  A Fortran 90 version is available as DMPlexCreateSectionF90()
-
   TODO: How is this related to DMCreateLocalSection()
 
 .seealso: DMPlexCreate(), PetscSectionCreate(), PetscSectionSetPermutation()
diff --git a/src/dm/impls/plex/plexsubmesh.c b/src/dm/impls/plex/plexsubmesh.c
index 2ea4e800269..b7bae96d46d 100644
--- a/src/dm/impls/plex/plexsubmesh.c
+++ b/src/dm/impls/plex/plexsubmesh.c
@@ -172,7 +172,7 @@ PetscErrorCode DMPlexLabelComplete(DM dm, DMLabel label)
 }
 
 /*@
-  DMPlexLabelAddCells - Starting with a label marking faces on a surface, we add a cell for each face
+  DMPlexLabelAddCells - Starting with a label marking points on a surface, we add a cell for each point
 
   Input Parameters:
 + dm - The DM
@@ -185,7 +185,7 @@ PetscErrorCode DMPlexLabelComplete(DM dm, DMLabel label)
 
   Note: The cells allow FEM boundary conditions to be applied using the cell geometry
 
-.seealso: DMPlexLabelComplete(), DMPlexLabelCohesiveComplete()
+.seealso: DMPlexLabelAddFaceCells(), DMPlexLabelComplete(), DMPlexLabelCohesiveComplete()
 @*/
 PetscErrorCode DMPlexLabelAddCells(DM dm, DMLabel label)
 {
@@ -209,12 +209,12 @@ PetscErrorCode DMPlexLabelAddCells(DM dm, DMLabel label)
     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
     for (p = 0; p < numPoints; ++p) {
       PetscInt *closure = NULL;
-      PetscInt  closureSize, point, cl;
+      PetscInt  closureSize, cl;
 
       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_FALSE, &closureSize, &closure);CHKERRQ(ierr);
       for (cl = closureSize-1; cl > 0; --cl) {
-        point = closure[cl*2];
-        if ((point >= cStart) && (point < cEnd)) {ierr = DMLabelSetValue(label, point, values[v]);CHKERRQ(ierr); break;}
+        const PetscInt cell = closure[cl*2];
+        if ((cell >= cStart) && (cell < cEnd)) {ierr = DMLabelSetValue(label, cell, values[v]);CHKERRQ(ierr); break;}
       }
       ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_FALSE, &closureSize, &closure);CHKERRQ(ierr);
     }
@@ -226,6 +226,64 @@ PetscErrorCode DMPlexLabelAddCells(DM dm, DMLabel label)
   PetscFunctionReturn(0);
 }
 
+/*@
+  DMPlexLabelAddFaceCells - Starting with a label marking faces on a surface, we add a cell for each face
+
+  Input Parameters:
++ dm - The DM
+- label - A DMLabel marking the surface points
+
+  Output Parameter:
+. label - A DMLabel incorporating cells
+
+  Level: developer
+
+  Note: The cells allow FEM boundary conditions to be applied using the cell geometry
+
+.seealso: DMPlexLabelAddCells(), DMPlexLabelComplete(), DMPlexLabelCohesiveComplete()
+@*/
+PetscErrorCode DMPlexLabelAddFaceCells(DM dm, DMLabel label)
+{
+  IS              valueIS;
+  const PetscInt *values;
+  PetscInt        numValues, v, cStart, cEnd, fStart, fEnd;
+  PetscErrorCode  ierr;
+
+  PetscFunctionBegin;
+  ierr = DMPlexGetInteriorCellStratum(dm, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
+  ierr = DMLabelGetNumValues(label, &numValues);CHKERRQ(ierr);
+  ierr = DMLabelGetValueIS(label, &valueIS);CHKERRQ(ierr);
+  ierr = ISGetIndices(valueIS, &values);CHKERRQ(ierr);
+  for (v = 0; v < numValues; ++v) {
+    IS              pointIS;
+    const PetscInt *points;
+    PetscInt        numPoints, p;
+
+    ierr = DMLabelGetStratumSize(label, values[v], &numPoints);CHKERRQ(ierr);
+    ierr = DMLabelGetStratumIS(label, values[v], &pointIS);CHKERRQ(ierr);
+    ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
+    for (p = 0; p < numPoints; ++p) {
+      const PetscInt face = points[p];
+      PetscInt      *closure = NULL;
+      PetscInt       closureSize, cl;
+
+      if ((face < fStart) || (face >= fEnd)) continue;
+      ierr = DMPlexGetTransitiveClosure(dm, face, PETSC_FALSE, &closureSize, &closure);CHKERRQ(ierr);
+      for (cl = closureSize-1; cl > 0; --cl) {
+        const PetscInt cell = closure[cl*2];
+        if ((cell >= cStart) && (cell < cEnd)) {ierr = DMLabelSetValue(label, cell, values[v]);CHKERRQ(ierr); break;}
+      }
+      ierr = DMPlexRestoreTransitiveClosure(dm, face, PETSC_FALSE, &closureSize, &closure);CHKERRQ(ierr);
+    }
+    ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
+    ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
+  }
+  ierr = ISRestoreIndices(valueIS, &values);CHKERRQ(ierr);
+  ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@
   DMPlexLabelClearCells - Remove cells from a label
 
@@ -238,7 +296,7 @@ PetscErrorCode DMPlexLabelAddCells(DM dm, DMLabel label)
 
   Level: developer
 
-  Note: This undoes DMPlexLabelAddCells()
+  Note: This undoes DMPlexLabelAddCells() or DMPlexLabelAddFaceCells()
 
 .seealso: DMPlexLabelComplete(), DMPlexLabelCohesiveComplete(), DMPlexLabelAddCells()
 @*/
@@ -726,6 +784,7 @@ static PetscErrorCode DMPlexConstructGhostCells_Internal(DM dm, DMLabel label, P
       if ((faces[f] >= fStart) && (faces[f] < fEnd)) ++numBdFaces;
     }
     Ng += numBdFaces;
+    ierr = ISRestoreIndices(faceIS, &faces);CHKERRQ(ierr);
     ierr = ISDestroy(&faceIS);CHKERRQ(ierr);
   }
   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
@@ -3684,83 +3743,125 @@ PetscErrorCode DMPlexCreateSubpointIS(DM dm, IS *subpointIS)
 }
 
 /*@
-  DMPlexGetSubpoint - Return the subpoint corresponding to a point in the original mesh. If the DM
-                      is not a submesh, just return the input point.
-
-  Note collective
+  DMGetEnclosureRelation - Get the relationship between dmA and dmB
 
   Input Parameters:
-+ dm - The submesh DM
-- p  - The point in the original, from which the submesh was created
++ dmA - The first DM
+- dmB - The second DM
 
   Output Parameter:
-. subp - The point in the submesh
+. rel - The relation of dmA to dmB
 
-  Level: developer
+  Level: intermediate
 
-.seealso: DMPlexCreateSubmesh(), DMPlexGetSubpointMap(), DMPlexCreateSubpointIS()
+.seealso: DMPlexGetEnclosurePoint()
 @*/
-PetscErrorCode DMPlexGetSubpoint(DM dm, PetscInt p, PetscInt *subp)
+PetscErrorCode DMGetEnclosureRelation(DM dmA, DM dmB, DMEnclosureType *rel)
 {
+  DM             plexA, plexB, sdm;
   DMLabel        spmap;
+  PetscInt       pStartA, pEndA, pStartB, pEndB, NpA, NpB;
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  *subp = p;
-  ierr = DMPlexGetSubpointMap(dm, &spmap);CHKERRQ(ierr);
-  if (spmap) {
-    IS              subpointIS;
-    const PetscInt *subpoints;
-    PetscInt        numSubpoints;
-
-    /* TODO Cache the IS, making it look like an index */
-    ierr = DMPlexCreateSubpointIS(dm, &subpointIS);CHKERRQ(ierr);
-    ierr = ISGetLocalSize(subpointIS, &numSubpoints);CHKERRQ(ierr);
-    ierr = ISGetIndices(subpointIS, &subpoints);CHKERRQ(ierr);
-    ierr = PetscFindInt(p, numSubpoints, subpoints, subp);CHKERRQ(ierr);
-    if (*subp < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d not found in submesh", p);
-    ierr = ISRestoreIndices(subpointIS, &subpoints);CHKERRQ(ierr);
-    ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
+  PetscValidPointer(rel, 3);
+  *rel = DM_ENC_NONE;
+  if (!dmA || !dmB) PetscFunctionReturn(0);
+  PetscValidHeaderSpecific(dmA, DM_CLASSID, 1);
+  PetscValidHeaderSpecific(dmB, DM_CLASSID, 1);
+  if (dmA == dmB) {*rel = DM_ENC_EQUALITY; PetscFunctionReturn(0);}
+  ierr = DMConvert(dmA, DMPLEX, &plexA);CHKERRQ(ierr);
+  ierr = DMConvert(dmB, DMPLEX, &plexB);CHKERRQ(ierr);
+  ierr = DMPlexGetChart(plexA, &pStartA, &pEndA);CHKERRQ(ierr);
+  ierr = DMPlexGetChart(plexB, &pStartB, &pEndB);CHKERRQ(ierr);
+  /* Assumption 1: subDMs have smaller charts than the DMs that they originate from
+    - The degenerate case of a subdomain which includes all of the domain on some process can be treated as equality */
+  if ((pStartA == pStartB) && (pEndA == pEndB)) {
+    *rel = DM_ENC_EQUALITY;
+    goto end;
+  }
+  NpA = pEndA - pStartA;
+  NpB = pEndB - pStartB;
+  if (NpA == NpB) goto end;
+  sdm = NpA > NpB ? plexB : plexA; /* The other is the original, enclosing dm */
+  ierr = DMPlexGetSubpointMap(sdm, &spmap);CHKERRQ(ierr);
+  if (!spmap) goto end;
+  /* TODO Check the space mapped to by subpointMap is same size as dm */
+  if (NpA > NpB) {
+    *rel = DM_ENC_SUPERMESH;
+  } else {
+    *rel = DM_ENC_SUBMESH;
   }
+  end:
+  ierr = DMDestroy(&plexA);CHKERRQ(ierr);
+  ierr = DMDestroy(&plexB);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 /*@
-  DMPlexGetAuxiliaryPoint - For a given point in the DM, return the matching point in the auxiliary DM.
-
-  Note collective
+  DMGetEnclosurePoint - Get the point pA in dmA which corresponds to the point pB in dmB
 
   Input Parameters:
-+ dm    - The DM
-. dmAux - The related auxiliary DM
-- p     - The point in the original DM
++ dmA   - The first DM
+. dmB   - The second DM
+. etype - The type of enclosure relation that dmA has to dmB
+- pB    - A point of dmB
 
   Output Parameter:
-. subp - The point in the auxiliary DM
+. pA    - The corresponding point of dmA
 
-  Notes: If the DM is a submesh, we assume the dmAux is as well and just return the point. If only dmAux is a submesh,
-  then we map the point back to the original space.
+  Level: intermediate
 
-  Level: developer
-
-.seealso: DMPlexCreateSubmesh(), DMPlexGetSubpointMap(), DMPlexCreateSubpointIS()
+.seealso: DMGetEnclosureRelation()
 @*/
-PetscErrorCode DMPlexGetAuxiliaryPoint(DM dm, DM dmAux, PetscInt p, PetscInt *subp)
+PetscErrorCode DMGetEnclosurePoint(DM dmA, DM dmB, DMEnclosureType etype, PetscInt pB, PetscInt *pA)
 {
-  DMLabel        spmap;
-  PetscErrorCode ierr;
+  DM              sdm;
+  DMLabel         spmap;
+  IS              subpointIS;
+  const PetscInt *subpoints;
+  PetscInt        numSubpoints;
+  PetscErrorCode  ierr;
 
   PetscFunctionBegin;
-  *subp = p;
-  /* If dm is a submesh, do not get subpoint */
-  ierr = DMPlexGetSubpointMap(dm, &spmap);CHKERRQ(ierr);
-  if (dmAux && !spmap) {
-    PetscInt h;
-
-    ierr = DMPlexGetVTKCellHeight(dmAux, &h);CHKERRQ(ierr);
-    ierr = DMPlexGetSubpointMap(dmAux, &spmap);CHKERRQ(ierr);
-    if (spmap && !h) {ierr = DMLabelGetValue(spmap, p, subp);CHKERRQ(ierr);}
-    else             {ierr = DMPlexGetSubpoint(dmAux, p, subp);CHKERRQ(ierr);}
+  /* TODO Cache the IS, making it look like an index */
+  switch (etype) {
+    case DM_ENC_SUPERMESH:
+    sdm  = dmB;
+    ierr = DMPlexGetSubpointMap(sdm, &spmap);CHKERRQ(ierr);
+    ierr = DMPlexCreateSubpointIS(sdm, &subpointIS);CHKERRQ(ierr);
+    ierr = ISGetIndices(subpointIS, &subpoints);CHKERRQ(ierr);
+    *pA  = subpoints[pB];
+    ierr = ISRestoreIndices(subpointIS, &subpoints);CHKERRQ(ierr);
+    ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
+    break;
+    case DM_ENC_SUBMESH:
+    sdm  = dmA;
+    ierr = DMPlexGetSubpointMap(sdm, &spmap);CHKERRQ(ierr);
+    ierr = DMPlexCreateSubpointIS(sdm, &subpointIS);CHKERRQ(ierr);
+    ierr = ISGetLocalSize(subpointIS, &numSubpoints);CHKERRQ(ierr);
+    ierr = ISGetIndices(subpointIS, &subpoints);CHKERRQ(ierr);
+    ierr = PetscFindInt(pB, numSubpoints, subpoints, pA);CHKERRQ(ierr);
+    if (*pA < 0) {
+      ierr = DMViewFromOptions(dmA, NULL, "-dm_enc_A_view");CHKERRQ(ierr);
+      ierr = DMViewFromOptions(dmB, NULL, "-dm_enc_B_view");CHKERRQ(ierr);
+      SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d not found in submesh", pB);
+    }
+    ierr = ISRestoreIndices(subpointIS, &subpoints);CHKERRQ(ierr);
+    ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
+    break;
+    case DM_ENC_EQUALITY:
+    case DM_ENC_NONE:
+    *pA = pB;break;
+    case DM_ENC_UNKNOWN:
+    {
+      DMEnclosureType enc;
+
+      ierr = DMGetEnclosureRelation(dmA, dmB, &enc);CHKERRQ(ierr);
+      ierr = DMGetEnclosurePoint(dmA, dmB, enc, pB, pA);CHKERRQ(ierr);
+    }
+    break;
+    default: SETERRQ1(PetscObjectComm((PetscObject) dmA), PETSC_ERR_ARG_OUTOFRANGE, "Invalid enclosure type %d", (int) etype);
   }
   PetscFunctionReturn(0);
 }
diff --git a/src/dm/impls/plex/plextree.c b/src/dm/impls/plex/plextree.c
index 573e8f3da21..f9072457ddd 100644
--- a/src/dm/impls/plex/plextree.c
+++ b/src/dm/impls/plex/plextree.c
@@ -2491,7 +2491,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
             for (cl = 0; cl < numPoints; cl++) {
               PetscInt globalOff, c = points[2*cl];
               ierr = PetscSectionGetOffset(globalCoarse, c, &globalOff);CHKERRQ(ierr);
-              DMPlexGetIndicesPointFields_Internal(localCoarse, c, globalOff < 0 ? -(globalOff+1) : globalOff, newOffsets, PETSC_FALSE, perms, cl, NULL, pInd);
+              ierr = DMPlexGetIndicesPointFields_Internal(localCoarse, PETSC_FALSE, c, globalOff < 0 ? -(globalOff+1) : globalOff, newOffsets, PETSC_FALSE, perms, cl, NULL, pInd);CHKERRQ(ierr);
             }
           } else {
             for (cl = 0; cl < numPoints; cl++) {
@@ -2499,7 +2499,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
               const PetscInt *perm = perms[0] ? perms[0][cl] : NULL;
 
               ierr = PetscSectionGetOffset(globalCoarse, c, &globalOff);CHKERRQ(ierr);
-              DMPlexGetIndicesPoint_Internal(localCoarse, c, globalOff < 0 ? -(globalOff+1) : globalOff, newOffsets, PETSC_FALSE, perm, NULL, pInd);
+              ierr = DMPlexGetIndicesPoint_Internal(localCoarse, PETSC_FALSE, c, globalOff < 0 ? -(globalOff+1) : globalOff, newOffsets, PETSC_FALSE, perm, NULL, pInd);CHKERRQ(ierr);
             }
           }
           for (f = 0; f < maxFields; f++) {
@@ -2538,19 +2538,19 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
             newOffsets[f + 1]    += newOffsets[f];
             newOffsetsCopy[f + 1] = newOffsets[f + 1];
           }
-          ierr = DMPlexGetIndicesPointFields_Internal(cSec,p,cOff,offsetsCopy,PETSC_TRUE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPointFields_Internal(cSec,PETSC_TRUE,p,cOff,offsetsCopy,PETSC_TRUE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
           for (a = 0; a < aDof; a++) {
             PetscInt anchor = anchors[a + aOff], lOff;
             ierr = PetscSectionGetOffset(localCoarse,anchor,&lOff);CHKERRQ(ierr);
-            ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,anchor,lOff,newOffsetsCopy,PETSC_TRUE,NULL,-1, NULL,colIndices);CHKERRQ(ierr);
+            ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_TRUE,anchor,lOff,newOffsetsCopy,PETSC_TRUE,NULL,-1, NULL,colIndices);CHKERRQ(ierr);
           }
         }
         else {
-          ierr = DMPlexGetIndicesPoint_Internal(cSec,p,cOff,offsetsCopy,PETSC_TRUE,NULL, NULL,rowIndices);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPoint_Internal(cSec,PETSC_TRUE,p,cOff,offsetsCopy,PETSC_TRUE,NULL, NULL,rowIndices);CHKERRQ(ierr);
           for (a = 0; a < aDof; a++) {
             PetscInt anchor = anchors[a + aOff], lOff;
             ierr = PetscSectionGetOffset(localCoarse,anchor,&lOff);CHKERRQ(ierr);
-            ierr = DMPlexGetIndicesPoint_Internal(localCoarse,anchor,lOff,newOffsetsCopy,PETSC_TRUE,NULL, NULL,colIndices);CHKERRQ(ierr);
+            ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_TRUE,anchor,lOff,newOffsetsCopy,PETSC_TRUE,NULL, NULL,colIndices);CHKERRQ(ierr);
           }
         }
         if (numFields) {
@@ -2568,7 +2568,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
             PetscInt anchor = anchors[a + aOff];
             PetscInt gOff;
             ierr = PetscSectionGetOffset(globalCoarse,anchor,&gOff);CHKERRQ(ierr);
-            ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,anchor,gOff < 0 ? -(gOff + 1) : gOff,newOffsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
+            ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_FALSE,anchor,gOff < 0 ? -(gOff + 1) : gOff,newOffsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
           }
         }
         else {
@@ -2578,7 +2578,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
             PetscInt anchor = anchors[a + aOff];
             PetscInt gOff;
             ierr = PetscSectionGetOffset(globalCoarse,anchor,&gOff);CHKERRQ(ierr);
-            ierr = DMPlexGetIndicesPoint_Internal(localCoarse,anchor,gOff < 0 ? -(gOff + 1) : gOff,newOffsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
+            ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_FALSE,anchor,gOff < 0 ? -(gOff + 1) : gOff,newOffsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
           }
         }
         ierr = DMRestoreWorkArray(coarse,numColIndices,MPIU_INT,&colIndices);CHKERRQ(ierr);
@@ -2598,9 +2598,9 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
             pInd[numColIndices + f]             = offsets[f+1];
             pInd[numColIndices + numFields + f] = offsets[f+1];
           }
-          ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
         } else {
-          ierr = DMPlexGetIndicesPoint_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
         }
       }
     }
@@ -2693,7 +2693,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
           numD[f] = 0;
           numO[f] = 0;
         }
-        ierr = DMPlexGetIndicesPointFields_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPointFields_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
         for (f = 0; f < numFields; f++) {
           PetscInt colOffset    = newOffsets[f];
           PetscInt numFieldCols = newOffsets[f + 1] - newOffsets[f];
@@ -2711,7 +2711,7 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
         }
       }
       else {
-        ierr = DMPlexGetIndicesPoint_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPoint_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,rowIndices);CHKERRQ(ierr);
         numD[0] = 0;
         numO[0] = 0;
         for (i = 0; i < numColIndices; i++) {
@@ -2871,10 +2871,10 @@ PetscErrorCode DMPlexComputeInterpolatorTree(DM coarse, DM fine, PetscSF coarseT
           rowOffsets[f + 1]  = pInd[numColIndices + f];
           newOffsets[f + 1]  = pInd[numColIndices + numFields + f];
         }
-        ierr = DMPlexGetIndicesPointFields_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPointFields_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,rowIndices);CHKERRQ(ierr);
       }
       else {
-        ierr = DMPlexGetIndicesPoint_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPoint_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,rowIndices);CHKERRQ(ierr);
       }
       ierr = PetscSectionGetDof(leafMatricesSec,p,&matSize);CHKERRQ(ierr);
       if (!matSize) { /* incoming matrix is identity */
@@ -3228,19 +3228,19 @@ PetscErrorCode DMPlexComputeInjectorReferenceTree(DM refTree, Mat *inj)
           PetscInt        numClosure;
           PetscInt        iCell = pperms ? pperms[i] : i;
           PetscInt        parentCellShapeDof = cellShapeOff + iCell;
-          PetscReal       *Bparent;
+          PetscTabulation Tparent;
 
           ierr = PetscDualSpaceGetFunctional(dsp,parentCellShapeDof,&q);CHKERRQ(ierr);
           ierr = PetscQuadratureGetData(q,&dim,&thisNc,&numPoints,&points,&weights);CHKERRQ(ierr);
           if (thisNc != Nc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Functional dim %D does not much basis dim %D\n",thisNc,Nc);
-          ierr = PetscFEGetTabulation(fe,numPoints,points,&Bparent,NULL,NULL);CHKERRQ(ierr); /* I'm expecting a nodal basis: weights[:]' * Bparent[:,cellShapeDof] = 1. */
+          ierr = PetscFECreateTabulation(fe,1,numPoints,points,0,&Tparent);CHKERRQ(ierr); /* I'm expecting a nodal basis: weights[:]' * Bparent[:,cellShapeDof] = 1. */
           for (j = 0; j < numPoints; j++) {
             PetscInt          childCell = -1;
             PetscReal         *parentValAtPoint;
             const PetscReal   xi0[3] = {-1.,-1.,-1.};
             const PetscReal   *pointReal = &points[dim * j];
             const PetscScalar *point;
-            PetscReal         *Bchild;
+            PetscTabulation Tchild;
             PetscInt          childCellShapeOff, pointMatOff;
 #if defined(PETSC_USE_COMPLEX)
             PetscInt          d;
@@ -3253,7 +3253,7 @@ PetscErrorCode DMPlexComputeInjectorReferenceTree(DM refTree, Mat *inj)
             point = pointReal;
 #endif
 
-            parentValAtPoint = &Bparent[(fSize * j + parentCellShapeDof) * Nc];
+            parentValAtPoint = &Tparent->T[0][(fSize * j + parentCellShapeDof) * Nc];
 
             for (k = 0; k < numChildren; k++) { /* locate the point in a child's star cell*/
               PetscInt child = children[k];
@@ -3277,7 +3277,7 @@ PetscErrorCode DMPlexComputeInjectorReferenceTree(DM refTree, Mat *inj)
             CoordinatesRefToReal(dim, dim, xi0, v0parent, Jparent, pointReal, vtmp);
             CoordinatesRealToRef(dim, dim, xi0, v0, invJ, vtmp, pointRef);
 
-            ierr = PetscFEGetTabulation(fe,1,pointRef,&Bchild,NULL,NULL);CHKERRQ(ierr);
+            ierr = PetscFECreateTabulation(fe,1,1,pointRef,0,&Tchild);CHKERRQ(ierr);
             ierr = DMPlexGetTransitiveClosure(refTree,childCell,PETSC_TRUE,&numClosure,&closure);CHKERRQ(ierr);
             for (k = 0, pointMatOff = 0; k < numChildren; k++) { /* point is located in cell => child dofs support at point are in closure of cell */
               PetscInt child = children[k], childDepth, childDof, childO = PETSC_MIN_INT;
@@ -3309,7 +3309,7 @@ PetscErrorCode DMPlexComputeInjectorReferenceTree(DM refTree, Mat *inj)
                 PetscReal   *childValAtPoint;
                 PetscReal   val = 0.;
 
-                childValAtPoint = &Bchild[childCellDof * Nc];
+                childValAtPoint = &Tchild->T[0][childCellDof * Nc];
                 for (m = 0; m < Nc; m++) {
                   val += weights[j * Nc + m] * parentValAtPoint[m] * childValAtPoint[m];
                 }
@@ -3319,9 +3319,9 @@ PetscErrorCode DMPlexComputeInjectorReferenceTree(DM refTree, Mat *inj)
               pointMatOff += childDof;
             }
             ierr = DMPlexRestoreTransitiveClosure(refTree,childCell,PETSC_TRUE,&numClosure,&closure);CHKERRQ(ierr);
-            ierr = PetscFERestoreTabulation(fe,1,pointRef,&Bchild,NULL,NULL);CHKERRQ(ierr);
+            ierr = PetscTabulationDestroy(&Tchild);CHKERRQ(ierr);
           }
-          ierr = PetscFERestoreTabulation(fe,numPoints,points,&Bparent,NULL,NULL);CHKERRQ(ierr);
+          ierr = PetscTabulationDestroy(&Tparent);CHKERRQ(ierr);
         }
       }
       else { /* just the volume-weighted averages of the children */
@@ -3558,9 +3558,9 @@ static PetscErrorCode DMPlexTransferInjectorTree(DM coarse, DM fine, PetscSF coa
             ierr = PetscSectionGetFieldDof(localFine,p,f,&fDof);CHKERRQ(ierr);
             offsets[f + 1] = fDof + offsets[f];
           }
-          ierr = DMPlexGetIndicesPointFields_Internal(localFine,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPointFields_Internal(localFine,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL,-1, NULL,pInd);CHKERRQ(ierr);
         } else {
-          ierr = DMPlexGetIndicesPoint_Internal(localFine,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
+          ierr = DMPlexGetIndicesPoint_Internal(localFine,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsets,PETSC_FALSE,NULL, NULL,pInd);CHKERRQ(ierr);
         }
         if (gatheredValues) {ierr = VecGetValues(fineVec,dof,pInd,pVal);CHKERRQ(ierr);}
       }
@@ -3794,9 +3794,9 @@ PetscErrorCode DMPlexComputeInjectorTree(DM coarse, DM fine, PetscSF coarseToFin
         ierr = PetscSectionGetFieldDof(localCoarse,p,f,&fDof);CHKERRQ(ierr);
         rowOffsets[f + 1] = offsetsCopy[f + 1] = fDof + rowOffsets[f];
       }
-      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,parentIndices);CHKERRQ(ierr);
     } else {
-      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,parentIndices);CHKERRQ(ierr);
       rowOffsets[1] = offsetsCopy[0];
     }
 
@@ -3902,9 +3902,9 @@ PetscErrorCode DMPlexComputeInjectorTree(DM coarse, DM fine, PetscSF coarseToFin
         ierr = PetscSectionGetFieldDof(localCoarse,p,f,&fDof);CHKERRQ(ierr);
         rowOffsets[f + 1] = offsetsCopy[f + 1] = fDof + rowOffsets[f];
       }
-      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1, NULL,parentIndices);CHKERRQ(ierr);
     } else {
-      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL, NULL,parentIndices);CHKERRQ(ierr);
       rowOffsets[1] = offsetsCopy[0];
     }
 
@@ -4239,14 +4239,14 @@ static PetscErrorCode DMPlexTransferVecTree_Interpolate(DM coarse, Vec vecCoarse
           /* TODO: closure indices */
           newOffsets[f + 1]     = newOffsets[f] + ((childId == -1) ? rowDof : refPointFieldN[childId - pRefStart][f]);
         }
-        ierr = DMPlexGetIndicesPointFields_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1,NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPointFields_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL,-1,NULL,rowIndices);CHKERRQ(ierr);
       }
       else {
         offsets[0]    = 0;
         offsets[1]    = lDof;
         newOffsets[0] = 0;
         newOffsets[1] = (childId == -1) ? lDof : refPointFieldN[childId - pRefStart][0];
-        ierr = DMPlexGetIndicesPoint_Internal(localFine,p,gOff,offsetsCopy,PETSC_FALSE,NULL,NULL,rowIndices);CHKERRQ(ierr);
+        ierr = DMPlexGetIndicesPoint_Internal(localFine,PETSC_FALSE,p,gOff,offsetsCopy,PETSC_FALSE,NULL,NULL,rowIndices);CHKERRQ(ierr);
       }
       if (childId == -1) { /* no child interpolation: one nnz per */
         ierr = VecSetValues(vecFine,numValues,rowIndices,pVal,INSERT_VALUES);CHKERRQ(ierr);
@@ -4384,9 +4384,9 @@ static PetscErrorCode DMPlexTransferVecTree_Inject(DM fine, Vec vecFine, DM coar
         ierr = PetscSectionGetFieldDof(localCoarse,p,f,&fDof);CHKERRQ(ierr);
         rowOffsets[f + 1] = offsetsCopy[f + 1] = fDof + rowOffsets[f];
       }
-      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1,NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPointFields_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,-1,NULL,parentIndices);CHKERRQ(ierr);
     } else {
-      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,NULL,parentIndices);CHKERRQ(ierr);
+      ierr = DMPlexGetIndicesPoint_Internal(localCoarse,PETSC_FALSE,p,gOff < 0 ? -(gOff + 1) : gOff,offsetsCopy,PETSC_FALSE,NULL,NULL,parentIndices);CHKERRQ(ierr);
       rowOffsets[1] = offsetsCopy[0];
     }
 
@@ -4496,7 +4496,7 @@ static PetscErrorCode DMPlexTransferVecTree_Inject(DM fine, Vec vecFine, DM coar
 
   Level: developer
 
-.seealso(): DMPlexSetReferenceTree(), DMPlexGetReferenceTree(), PetscFVGetComputeGradients()
+.seealso: DMPlexSetReferenceTree(), DMPlexGetReferenceTree(), PetscFVGetComputeGradients()
 @*/
 PetscErrorCode DMPlexTransferVecTree(DM dmIn, Vec vecIn, DM dmOut, Vec vecOut, PetscSF sfRefine, PetscSF sfCoarsen, PetscInt *cidsRefine, PetscInt *cidsCoarsen, PetscBool useBCs, PetscReal time)
 {
diff --git a/src/dm/impls/plex/plexvtk.c b/src/dm/impls/plex/plexvtk.c
index 75288f1ce73..d5c8455f303 100644
--- a/src/dm/impls/plex/plexvtk.c
+++ b/src/dm/impls/plex/plexvtk.c
@@ -271,7 +271,15 @@ static PetscErrorCode DMPlexVTKWritePartition_ASCII(DM dm, FILE *fp)
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, PetscSection globalSection, Vec v, FILE *fp, PetscInt enforceDof, PetscInt precision, PetscReal scale)
+#if defined(PETSC_USE_REAL_DOUBLE) || defined(PETSC_USE_REAL___FLOAT128)
+typedef double PetscVTKReal;
+#elif defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL___FP16)
+typedef float PetscVTKReal;
+#else
+typedef PetscReal PetscVTKReal;
+#endif
+
+static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, PetscSection globalSection, Vec v, FILE *fp, PetscInt enforceDof, PetscInt precision, PetscReal scale, PetscInt imag)
 {
   MPI_Comm           comm;
   const MPI_Datatype mpiType = MPIU_SCALAR;
@@ -321,9 +329,8 @@ static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, P
   enforceDof = PetscMax(enforceDof, maxDof);
   ierr = VecGetArray(v, &array);CHKERRQ(ierr);
   if (!rank) {
-#if defined(PETSC_USE_REAL___FLOAT128)
-    double dval;
-#endif
+    PetscVTKReal dval;
+    PetscScalar  val;
     char formatString[8];
 
     ierr = PetscSNPrintf(formatString, 8, "%%.%de", precision);CHKERRQ(ierr);
@@ -350,12 +357,9 @@ static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, P
           if (d > 0) {
             ierr = PetscFPrintf(comm, fp, " ");CHKERRQ(ierr);
           }
-#if defined(PETSC_USE_REAL___FLOAT128)
-          dval = (double)PetscRealPart(array[off+d])*scale;
+          val = array[off+d];
+          dval = (PetscVTKReal) ((imag ? PetscImaginaryPart(val) : PetscRealPart(val)) * scale);
           ierr = PetscFPrintf(comm, fp, formatString, dval);CHKERRQ(ierr);
-#else
-          ierr = PetscFPrintf(comm, fp, formatString, PetscRealPart(array[off+d])*scale);CHKERRQ(ierr);
-#endif
         }
         for (d = dof; d < enforceDof; d++) {
           ierr = PetscFPrintf(comm, fp, " 0.0");CHKERRQ(ierr);
@@ -376,12 +380,9 @@ static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, P
           if (d > 0) {
             ierr = PetscFPrintf(comm, fp, " ");CHKERRQ(ierr);
           }
-#if defined(PETSC_USE_REAL___FLOAT128)
-          dval = PetscRealPart(remoteValues[p*maxDof+d])*scale;
+          val = remoteValues[p*maxDof+d];
+          dval = (PetscVTKReal) ((imag ? PetscImaginaryPart(val) : PetscRealPart(val)) * scale);
           ierr = PetscFPrintf(comm, fp, formatString, dval);CHKERRQ(ierr);
-#else
-          ierr = PetscFPrintf(comm, fp, formatString, PetscRealPart(remoteValues[p*maxDof+d])*scale);CHKERRQ(ierr);
-#endif
         }
         for (d = maxDof; d < enforceDof; ++d) {
           ierr = PetscFPrintf(comm, fp, " 0.0");CHKERRQ(ierr);
@@ -427,7 +428,7 @@ static PetscErrorCode DMPlexVTKWriteSection_ASCII(DM dm, PetscSection section, P
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode DMPlexVTKWriteField_ASCII(DM dm, PetscSection section, PetscSection globalSection, Vec field, const char name[], FILE *fp, PetscInt enforceDof, PetscInt precision, PetscReal scale)
+static PetscErrorCode DMPlexVTKWriteField_ASCII(DM dm, PetscSection section, PetscSection globalSection, Vec field, const char name[], FILE *fp, PetscInt enforceDof, PetscInt precision, PetscReal scale, PetscBool nameComplex, PetscInt imag)
 {
   MPI_Comm       comm;
   PetscInt       numDof = 0, maxDof;
@@ -445,12 +446,20 @@ static PetscErrorCode DMPlexVTKWriteField_ASCII(DM dm, PetscSection section, Pet
   ierr = MPIU_Allreduce(&numDof, &maxDof, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
   if (!name) name = "Unknown";
   if (maxDof == 3) {
-    ierr = PetscFPrintf(comm, fp, "VECTORS %s double\n", name);CHKERRQ(ierr);
+    if (nameComplex) {
+      ierr = PetscFPrintf(comm, fp, "VECTORS %s.%s double\n", name, imag ? "Im" : "Re");CHKERRQ(ierr);
+    } else {
+      ierr = PetscFPrintf(comm, fp, "VECTORS %s double\n", name);CHKERRQ(ierr);
+    }
   } else {
-    ierr = PetscFPrintf(comm, fp, "SCALARS %s double %D\n", name, maxDof);CHKERRQ(ierr);
+    if (nameComplex) {
+      ierr = PetscFPrintf(comm, fp, "SCALARS %s.%s double %D\n", name, imag ? "Im" : "Re", maxDof);CHKERRQ(ierr);
+    } else {
+      ierr = PetscFPrintf(comm, fp, "SCALARS %s double %D\n", name, maxDof);CHKERRQ(ierr);
+    }
     ierr = PetscFPrintf(comm, fp, "LOOKUP_TABLE default\n");CHKERRQ(ierr);
   }
-  ierr = DMPlexVTKWriteSection_ASCII(dm, section, globalSection, field, fp, enforceDof, precision, scale);CHKERRQ(ierr);
+  ierr = DMPlexVTKWriteSection_ASCII(dm, section, globalSection, field, fp, enforceDof, precision, scale, imag);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -464,11 +473,18 @@ static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer)
   PetscLayout              vLayout;
   Vec                      coordinates;
   PetscReal                lengthScale;
-  PetscInt                 vMax, totVertices, totCells = 0;
-  PetscBool                hasPoint = PETSC_FALSE, hasCell = PETSC_FALSE, writePartition = PETSC_FALSE, localized;
+  PetscInt                 vMax, totVertices, totCells = 0, loops_per_scalar, l;
+  PetscBool                hasPoint = PETSC_FALSE, hasCell = PETSC_FALSE, writePartition = PETSC_FALSE, localized, writeComplex;
   PetscErrorCode           ierr;
 
   PetscFunctionBegin;
+#if defined(PETSC_USE_COMPLEX)
+  loops_per_scalar = 2;
+  writeComplex = PETSC_TRUE;
+#else
+  loops_per_scalar = 1;
+  writeComplex = PETSC_FALSE;
+#endif
   ierr = DMGetCoordinatesLocalized(dm,&localized);CHKERRQ(ierr);
   if (localized) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"VTK output with localized coordinates not yet supported");
   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
@@ -503,7 +519,7 @@ static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer)
   }
   ierr = PetscLayoutGetSize(vLayout, &totVertices);CHKERRQ(ierr);
   ierr = PetscFPrintf(comm, fp, "POINTS %D double\n", totVertices);CHKERRQ(ierr);
-  ierr = DMPlexVTKWriteSection_ASCII(dm, coordSection, globalCoordSection, coordinates, fp, 3, PETSC_DETERMINE, lengthScale);CHKERRQ(ierr);
+  ierr = DMPlexVTKWriteSection_ASCII(dm, coordSection, globalCoordSection, coordinates, fp, 3, PETSC_DETERMINE, lengthScale, 0);CHKERRQ(ierr);
   /* Cells */
   ierr = DMPlexVTKWriteCells_ASCII(dm, fp, &totCells);CHKERRQ(ierr);
   /* Vertex fields */
@@ -515,67 +531,85 @@ static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer)
     ierr = PetscFPrintf(comm, fp, "POINT_DATA %D\n", totVertices);CHKERRQ(ierr);
     for (link = vtk->link; link; link = link->next) {
       Vec          X = (Vec) link->vec;
-      DM           dmX;
-      PetscSection section, globalSection, newSection = NULL;
+      PetscSection section = NULL, globalSection, newSection = NULL;
+      char         namebuf[256];
       const char   *name;
       PetscInt     enforceDof = PETSC_DETERMINE;
 
       if ((link->ft != PETSC_VTK_POINT_FIELD) && (link->ft != PETSC_VTK_POINT_VECTOR_FIELD)) continue;
       if (link->ft == PETSC_VTK_POINT_VECTOR_FIELD) enforceDof = 3;
       ierr = PetscObjectGetName(link->vec, &name);CHKERRQ(ierr);
-      ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
-      if (dmX) {
-        DMLabel  subpointMap, subpointMapX;
-        PetscInt dim, dimX, pStart, pEnd, qStart, qEnd;
-
-        ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);
-        /* Here is where we check whether dmX is a submesh of dm */
-        ierr = DMGetDimension(dm,  &dim);CHKERRQ(ierr);
-        ierr = DMGetDimension(dmX, &dimX);CHKERRQ(ierr);
-        ierr = DMPlexGetChart(dm,  &pStart, &pEnd);CHKERRQ(ierr);
-        ierr = DMPlexGetChart(dmX, &qStart, &qEnd);CHKERRQ(ierr);
-        ierr = DMPlexGetSubpointMap(dm,  &subpointMap);CHKERRQ(ierr);
-        ierr = DMPlexGetSubpointMap(dmX, &subpointMapX);CHKERRQ(ierr);
-        if (((dim != dimX) || ((pEnd-pStart) < (qEnd-qStart))) && subpointMap && !subpointMapX) {
-          const PetscInt *ind = NULL;
-          IS              subpointIS;
-          PetscInt        n = 0, q;
-
-          ierr = PetscSectionGetChart(section, &qStart, &qEnd);CHKERRQ(ierr);
-          ierr = DMPlexCreateSubpointIS(dm, &subpointIS);CHKERRQ(ierr);
-          if (subpointIS) {
-            ierr = ISGetLocalSize(subpointIS, &n);CHKERRQ(ierr);
-            ierr = ISGetIndices(subpointIS, &ind);CHKERRQ(ierr);
-          }
-          ierr = PetscSectionCreate(comm, &newSection);CHKERRQ(ierr);
-          ierr = PetscSectionSetChart(newSection, pStart, pEnd);CHKERRQ(ierr);
-          for (q = qStart; q < qEnd; ++q) {
-            PetscInt dof, off, p;
-
-            ierr = PetscSectionGetDof(section, q, &dof);CHKERRQ(ierr);
-            if (dof) {
-              ierr = PetscFindInt(q, n, ind, &p);CHKERRQ(ierr);
-              if (p >= pStart) {
-                ierr = PetscSectionSetDof(newSection, p, dof);CHKERRQ(ierr);
-                ierr = PetscSectionGetOffset(section, q, &off);CHKERRQ(ierr);
-                ierr = PetscSectionSetOffset(newSection, p, off);CHKERRQ(ierr);
+      ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+      if (!section) {
+        DM           dmX;
+
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (dmX) {
+          DMLabel  subpointMap, subpointMapX;
+          PetscInt dim, dimX, pStart, pEnd, qStart, qEnd;
+
+          ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);
+          /* Here is where we check whether dmX is a submesh of dm */
+          ierr = DMGetDimension(dm,  &dim);CHKERRQ(ierr);
+          ierr = DMGetDimension(dmX, &dimX);CHKERRQ(ierr);
+          ierr = DMPlexGetChart(dm,  &pStart, &pEnd);CHKERRQ(ierr);
+          ierr = DMPlexGetChart(dmX, &qStart, &qEnd);CHKERRQ(ierr);
+          ierr = DMPlexGetSubpointMap(dm,  &subpointMap);CHKERRQ(ierr);
+          ierr = DMPlexGetSubpointMap(dmX, &subpointMapX);CHKERRQ(ierr);
+          if (((dim != dimX) || ((pEnd-pStart) < (qEnd-qStart))) && subpointMap && !subpointMapX) {
+            const PetscInt *ind = NULL;
+            IS              subpointIS;
+            PetscInt        n = 0, q;
+
+            ierr = PetscSectionGetChart(section, &qStart, &qEnd);CHKERRQ(ierr);
+            ierr = DMPlexCreateSubpointIS(dm, &subpointIS);CHKERRQ(ierr);
+            if (subpointIS) {
+              ierr = ISGetLocalSize(subpointIS, &n);CHKERRQ(ierr);
+              ierr = ISGetIndices(subpointIS, &ind);CHKERRQ(ierr);
+            }
+            ierr = PetscSectionCreate(comm, &newSection);CHKERRQ(ierr);
+            ierr = PetscSectionSetChart(newSection, pStart, pEnd);CHKERRQ(ierr);
+            for (q = qStart; q < qEnd; ++q) {
+              PetscInt dof, off, p;
+
+              ierr = PetscSectionGetDof(section, q, &dof);CHKERRQ(ierr);
+              if (dof) {
+                ierr = PetscFindInt(q, n, ind, &p);CHKERRQ(ierr);
+                if (p >= pStart) {
+                  ierr = PetscSectionSetDof(newSection, p, dof);CHKERRQ(ierr);
+                  ierr = PetscSectionGetOffset(section, q, &off);CHKERRQ(ierr);
+                  ierr = PetscSectionSetOffset(newSection, p, off);CHKERRQ(ierr);
+                }
               }
             }
+            if (subpointIS) {
+              ierr = ISRestoreIndices(subpointIS, &ind);CHKERRQ(ierr);
+              ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
+            }
+            /* No need to setup section */
+            section = newSection;
           }
-          if (subpointIS) {
-            ierr = ISRestoreIndices(subpointIS, &ind);CHKERRQ(ierr);
-            ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
-          }
-          /* No need to setup section */
-          section = newSection;
+        }
+      }
+      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it and could not create one from VecGetDM()", name);
+      if (link->field >= 0) {
+        const char *fieldname;
+
+        ierr = PetscSectionGetFieldName(section, link->field, &fieldname);CHKERRQ(ierr);
+        ierr = PetscSectionGetField(section, link->field, §ion);CHKERRQ(ierr);
+        if (fieldname) {
+          ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s%s", name, fieldname);CHKERRQ(ierr);
+        } else {
+          ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s%D", name, link->field);CHKERRQ(ierr);
         }
       } else {
-        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
-        if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
+        ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s", name);CHKERRQ(ierr);
       }
-      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
+      ierr = PetscViewerVTKSanitizeName_Internal(namebuf, sizeof(namebuf));CHKERRQ(ierr);
       ierr = PetscSectionCreateGlobalSection(section, dm->sf, PETSC_FALSE, PETSC_FALSE, &globalSection);CHKERRQ(ierr);
-      ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, name, fp, enforceDof, PETSC_DETERMINE, 1.0);CHKERRQ(ierr);
+      for (l = 0; l < loops_per_scalar; l++) {
+        ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, namebuf, fp, enforceDof, PETSC_DETERMINE, 1.0, writeComplex, l);CHKERRQ(ierr);
+      }
       ierr = PetscSectionDestroy(&globalSection);CHKERRQ(ierr);
       if (newSection) {ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr);}
     }
@@ -586,27 +620,42 @@ static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer)
     ierr = PetscFPrintf(comm, fp, "CELL_DATA %D\n", totCells);CHKERRQ(ierr);
     for (link = vtk->link; link; link = link->next) {
       Vec          X = (Vec) link->vec;
-      DM           dmX;
-      PetscSection section, globalSection;
-      const char   *name;
+      PetscSection section = NULL, globalSection;
+      const char   *name = "";
+      char         namebuf[256];
       PetscInt     enforceDof = PETSC_DETERMINE;
 
       if ((link->ft != PETSC_VTK_CELL_FIELD) && (link->ft != PETSC_VTK_CELL_VECTOR_FIELD)) continue;
       if (link->ft == PETSC_VTK_CELL_VECTOR_FIELD) enforceDof = 3;
       ierr = PetscObjectGetName(link->vec, &name);CHKERRQ(ierr);
-      ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
-      if (dmX) {
-        ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);
-      } else {
-        PetscContainer c;
+      ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+      if (!section) {
+        DM           dmX;
 
-        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) &c);CHKERRQ(ierr);
-        if (!c) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
-        ierr = PetscContainerGetPointer(c, (void**) §ion);CHKERRQ(ierr);
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (dmX) {
+          ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);
+        }
+      }
+      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it and could not create one from VecGetDM()", name);
+      if (link->field >= 0) {
+        const char *fieldname;
+
+        ierr = PetscSectionGetFieldName(section, link->field, &fieldname);CHKERRQ(ierr);
+        ierr = PetscSectionGetField(section, link->field, §ion);CHKERRQ(ierr);
+        if (fieldname) {
+          ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s%s", name, fieldname);CHKERRQ(ierr);
+        } else {
+          ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s%D", name, link->field);CHKERRQ(ierr);
+        }
+      } else {
+        ierr = PetscSNPrintf(namebuf, sizeof(namebuf), "%s", name);CHKERRQ(ierr);
       }
-      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
+      ierr = PetscViewerVTKSanitizeName_Internal(namebuf, sizeof(namebuf));CHKERRQ(ierr);
       ierr = PetscSectionCreateGlobalSection(section, dm->sf, PETSC_FALSE, PETSC_FALSE, &globalSection);CHKERRQ(ierr);
-      ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, name, fp, enforceDof, PETSC_DETERMINE, 1.0);CHKERRQ(ierr);
+      for (l = 0; l < loops_per_scalar; l++) {
+        ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, namebuf, fp, enforceDof, PETSC_DETERMINE, 1.0, writeComplex, l);CHKERRQ(ierr);
+      }
       ierr = PetscSectionDestroy(&globalSection);CHKERRQ(ierr);
     }
     if (writePartition) {
diff --git a/src/dm/impls/plex/plexvtu.c b/src/dm/impls/plex/plexvtu.c
index 0bfd9e10f29..8c4ce4fc4b6 100644
--- a/src/dm/impls/plex/plexvtu.c
+++ b/src/dm/impls/plex/plexvtu.c
@@ -7,12 +7,20 @@ typedef struct {
   PetscInt nconn;               /* number of entries in cell->vertex connectivity array */
 } PieceInfo;
 
-#if defined(PETSC_USE_REAL_SINGLE)
+#if defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL___FP16)
+/* output in float if single or half precision in memory */
 static const char precision[] = "Float32";
-#elif defined(PETSC_USE_REAL_DOUBLE)
+typedef float PetscVTUReal;
+#define MPIU_VTUREAL MPI_FLOAT
+#elif defined(PETSC_USE_REAL_DOUBLE) || defined(PETSC_USE_REAL___FLOAT128)
+/* output in double if double or quad precision in memory */
 static const char precision[] = "Float64";
+typedef double PetscVTUReal;
+#define MPIU_VTUREAL MPI_DOUBLE
 #else
 static const char precision[] = "UnknownPrecision";
+typedef PetscReal PetscVTUReal;
+#define MPIU_VTUREAL MPIU_REAL
 #endif
 
 static PetscErrorCode TransferWrite(PetscViewer viewer,FILE *fp,PetscMPIInt srank,PetscMPIInt root,const void *send,void *recv,PetscMPIInt count,MPI_Datatype mpidatatype,PetscMPIInt tag)
@@ -130,11 +138,14 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
   PieceInfo                piece,*gpiece = NULL;
   void                     *buffer = NULL;
   const char               *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
+  PetscInt                 loops_per_scalar;
 
   PetscFunctionBegin;
   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
 #if defined(PETSC_USE_COMPLEX)
-  SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Complex values not supported");
+  loops_per_scalar = 2;
+#else
+  loops_per_scalar = 1;
 #endif
   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
@@ -201,7 +212,7 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
       /* Coordinate positions */
       ierr     = PetscFPrintf(PETSC_COMM_SELF,fp,"      \n");CHKERRQ(ierr);
       ierr     = PetscFPrintf(PETSC_COMM_SELF,fp,"        \n",precision,boffset);CHKERRQ(ierr);
-      boffset += gpiece[r].nvertices*3*sizeof(PetscScalar) + sizeof(int);
+      boffset += gpiece[r].nvertices*3*sizeof(PetscVTUReal) + sizeof(int);
       ierr     = PetscFPrintf(PETSC_COMM_SELF,fp,"      \n");CHKERRQ(ierr);
       /* Cell connectivity */
       ierr     = PetscFPrintf(PETSC_COMM_SELF,fp,"      \n");CHKERRQ(ierr);
@@ -222,47 +233,96 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
       /* all the vectors */
       for (link=vtk->link; link; link=link->next) {
         Vec        X = (Vec)link->vec;
+        DM         dmX = NULL;
         PetscInt   bs,nfields,field;
         const char *vecname = "";
+        PetscSection section;
         if ((link->ft != PETSC_VTK_CELL_FIELD) && (link->ft != PETSC_VTK_CELL_VECTOR_FIELD)) continue;
         if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
           ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
         }
-        ierr = PetscSectionGetDof(dm->localSection,cStart,&bs);CHKERRQ(ierr);
-        ierr = PetscSectionGetNumFields(dm->localSection,&nfields);CHKERRQ(ierr);
-        for (field=0,i=0; field<(nfields?nfields:1); field++) {
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (!dmX) dmX = dm;
+        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,cStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
+        for (i=0; field<(nfields?nfields:1); field++) {
           PetscInt     fbs,j;
           PetscFV      fv = NULL;
           PetscObject  f;
           PetscClassId fClass;
           const char *fieldname = NULL;
           char       buf[256];
+          PetscBool    vector;
           if (nfields) {        /* We have user-defined fields/components */
-            ierr = PetscSectionGetFieldDof(dm->localSection,cStart,field,&fbs);CHKERRQ(ierr);
-            ierr = PetscSectionGetFieldName(dm->localSection,field,&fieldname);CHKERRQ(ierr);
+            ierr = PetscSectionGetFieldDof(section,cStart,field,&fbs);CHKERRQ(ierr);
+            ierr = PetscSectionGetFieldName(section,field,&fieldname);CHKERRQ(ierr);
           } else fbs = bs;      /* Say we have one field with 'bs' components */
-          ierr = DMGetField(dm,field,NULL,&f);CHKERRQ(ierr);
+          ierr = DMGetField(dmX,field,NULL,&f);CHKERRQ(ierr);
           ierr = PetscObjectGetClassId(f,&fClass);CHKERRQ(ierr);
           if (fClass == PETSCFV_CLASSID) {
             fv = (PetscFV) f;
           }
-          if (!fieldname) {
+          if (nfields && !fieldname) {
             ierr = PetscSNPrintf(buf,sizeof(buf),"CellField%D",field);CHKERRQ(ierr);
             fieldname = buf;
           }
-          for (j=0; j\n",precision,vecname,fieldname,compName,boffset);CHKERRQ(ierr);
+          vector = PETSC_FALSE;
+          if (link->ft == PETSC_VTK_CELL_VECTOR_FIELD) {
+            vector = PETSC_TRUE;
+            if (fbs > 3) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_SIZ,"Cell vector fields can have at most 3 components, %D given\n", fbs);
+            for (j = 0; j < fbs; j++) {
+              const char *compName = NULL;
+              if (fv) {
+                ierr = PetscFVGetComponentName(fv,j,&compName);CHKERRQ(ierr);
+                if (compName) break;
+              }
             }
-            else {
-              ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,j,boffset);CHKERRQ(ierr);
+            if (j < fbs) vector = PETSC_FALSE;
+          }
+          if (vector) {
+#if defined(PETSC_USE_COMPLEX)
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].ncells*3*sizeof(PetscVTUReal) + sizeof(int);
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].ncells*3*sizeof(PetscVTUReal) + sizeof(int);
+#else
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].ncells*3*sizeof(PetscVTUReal) + sizeof(int);
+#endif
+            i+=fbs;
+          } else {
+            for (j=0; j 1) {
+                ierr = PetscSNPrintf(finalname,255,"%s%s.%D",vecname,fieldname,j);CHKERRQ(ierr);
+              } else {
+                ierr = PetscSNPrintf(finalname,255,"%s%s",vecname,fieldname);CHKERRQ(ierr);
+              }
+#if defined(PETSC_USE_COMPLEX)
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].ncells*sizeof(PetscVTUReal) + sizeof(int);
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].ncells*sizeof(PetscVTUReal) + sizeof(int);
+#else
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].ncells*sizeof(PetscVTUReal) + sizeof(int);
+#endif
+              i++;
             }
-            boffset += gpiece[r].ncells*sizeof(PetscScalar) + sizeof(int);
-            i++;
           }
         }
         if (i != bs) SETERRQ2(comm,PETSC_ERR_PLIB,"Total number of field components %D != block size %D",i,bs);
@@ -275,34 +335,70 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
       ierr = PetscFPrintf(PETSC_COMM_SELF,fp,"      \n");CHKERRQ(ierr);
       for (link=vtk->link; link; link=link->next) {
         Vec        X = (Vec)link->vec;
+        DM         dmX;
         PetscInt   bs,nfields,field;
         const char *vecname = "";
+        PetscSection section;
         if ((link->ft != PETSC_VTK_POINT_FIELD) && (link->ft != PETSC_VTK_POINT_VECTOR_FIELD)) continue;
         if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
           ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
         }
-        ierr = PetscSectionGetDof(dm->localSection,vStart,&bs);CHKERRQ(ierr);
-        ierr = PetscSectionGetNumFields(dm->localSection,&nfields);CHKERRQ(ierr);
-        for (field=0,i=0; field<(nfields?nfields:1); field++) {
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (!dmX) dmX = dm;
+        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,vStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
+        for (i=0; field<(nfields?nfields:1); field++) {
           PetscInt   fbs,j;
           const char *fieldname = NULL;
           char       buf[256];
           if (nfields) {        /* We have user-defined fields/components */
-            ierr = PetscSectionGetFieldDof(dm->localSection,vStart,field,&fbs);CHKERRQ(ierr);
-            ierr = PetscSectionGetFieldName(dm->localSection,field,&fieldname);CHKERRQ(ierr);
+            ierr = PetscSectionGetFieldDof(section,vStart,field,&fbs);CHKERRQ(ierr);
+            ierr = PetscSectionGetFieldName(section,field,&fieldname);CHKERRQ(ierr);
           } else fbs = bs;      /* Say we have one field with 'bs' components */
-          if (!fieldname) {
+          if (nfields && !fieldname) {
             ierr = PetscSNPrintf(buf,sizeof(buf),"PointField%D",field);CHKERRQ(ierr);
             fieldname = buf;
           }
-          for (j=0; j\n",precision,vecname,fieldname,j,boffset);CHKERRQ(ierr);
-            boffset += gpiece[r].nvertices*sizeof(PetscScalar) + sizeof(int);
+          if (link->ft == PETSC_VTK_POINT_VECTOR_FIELD) {
+            if (fbs > 3) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_SIZ,"Point vector fields can have at most 3 components, %D given\n", fbs);
+#if defined(PETSC_USE_COMPLEX)
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].nvertices*3*sizeof(PetscVTUReal) + sizeof(int);
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].nvertices*3*sizeof(PetscVTUReal) + sizeof(int);
+#else
+            ierr = PetscFPrintf(comm,fp,"        \n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
+            boffset += gpiece[r].nvertices*3*sizeof(PetscVTUReal) + sizeof(int);
+#endif
+          } else {
+            for (j=0; j 1) {
+                ierr = PetscSNPrintf(finalname,255,"%s%s.%D",vecname,fieldname,j);CHKERRQ(ierr);
+              } else {
+                ierr = PetscSNPrintf(finalname,255,"%s%s",vecname,fieldname);CHKERRQ(ierr);
+              }
+#if defined(PETSC_USE_COMPLEX)
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].nvertices*sizeof(PetscVTUReal) + sizeof(int);
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].nvertices*sizeof(PetscVTUReal) + sizeof(int);
+#else
+              ierr = PetscFPrintf(comm,fp,"        \n",precision,finalname,boffset);CHKERRQ(ierr);
+              boffset += gpiece[r].nvertices*sizeof(PetscVTUReal) + sizeof(int);
+#endif
+            }
           }
         }
       }
       ierr = PetscFPrintf(PETSC_COMM_SELF,fp,"      \n");CHKERRQ(ierr);
-
       ierr = PetscFPrintf(PETSC_COMM_SELF,fp,"    \n");CHKERRQ(ierr);
     }
   }
@@ -314,8 +410,8 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
   if (!rank) {
     PetscInt maxsize = 0;
     for (r=0; r= vStart) && (closure[v] < vEnd)) {
                     ierr = PetscSectionGetOffset(coordSection, closure[v], &off);CHKERRQ(ierr);
                     if (dimEmbed != 3) {
-                      y[cnt*3+0] = x[off+0];
-                      y[cnt*3+1] = (dimEmbed > 1) ? x[off+1] : 0.0;
-                      y[cnt*3+2] = 0.0;
+                      y[cnt*3+0] = (PetscVTUReal) PetscRealPart(x[off+0]);
+                      y[cnt*3+1] = (PetscVTUReal) ((dimEmbed > 1) ? PetscRealPart(x[off+1]) : 0.0);
+                      y[cnt*3+2] = (PetscVTUReal) 0.0;
                     } else {
-                      y[cnt*3+0] = x[off+0];
-                      y[cnt*3+1] = x[off+1];
-                      y[cnt*3+2] = x[off+2];
+                      y[cnt*3+0] = (PetscVTUReal) PetscRealPart(x[off+0]);
+                      y[cnt*3+1] = (PetscVTUReal) PetscRealPart(x[off+1]);
+                      y[cnt*3+2] = (PetscVTUReal) PetscRealPart(x[off+2]);
                     }
                     cnt++;
                   }
@@ -363,14 +465,14 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
                 ierr = PetscSectionGetOffset(coordSection, c, &off);CHKERRQ(ierr);
                 if (dimEmbed != 3) {
                   for (i=0; i 1) ? x[off + i*dimEmbed + 1] : 0.0;
-                    y[cnt*3+2] = 0.0;
+                    y[cnt*3+0] = (PetscVTUReal) PetscRealPart(x[off + i*dimEmbed + 0]);
+                    y[cnt*3+1] = (PetscVTUReal) ((dimEmbed > 1) ? PetscRealPart(x[off + i*dimEmbed + 1]) : 0.0);
+                    y[cnt*3+2] = (PetscVTUReal) 0.0;
                     cnt++;
                   }
                 } else {
                   for (i=0; i 1) ? x[i*dimEmbed+1] : 0;
-              y[i*3+2] = 0.0;
+              y[i*3+0] = (PetscVTUReal) PetscRealPart(x[i*dimEmbed+0]);
+              y[i*3+1] = (PetscVTUReal) ((dimEmbed > 1) ? PetscRealPart(x[i*dimEmbed+1]) : 0.);
+              y[i*3+2] = (PetscVTUReal) ((dimEmbed > 2) ? PetscRealPart(x[i*dimEmbed+2]) : 0.);
             }
           }
         }
         nsend = piece.nvertices*3;
-        ierr  = TransferWrite(viewer,fp,r,0,y ? y : x,buffer,nsend,MPIU_SCALAR,tag);CHKERRQ(ierr);
+        ierr  = TransferWrite(viewer,fp,r,0,copy ? (const void *) y : (const void *) x,buffer,nsend,MPIU_VTUREAL,tag);CHKERRQ(ierr);
         ierr  = PetscFree(y);CHKERRQ(ierr);
         ierr  = VecRestoreArrayRead(coords,&x);CHKERRQ(ierr);
       }
@@ -409,95 +511,336 @@ PetscErrorCode DMPlexVTKWriteAll_VTU(DM dm,PetscViewer viewer)
       /* Cell data */
       for (link=vtk->link; link; link=link->next) {
         Vec               X = (Vec)link->vec;
+        DM                dmX;
         const PetscScalar *x;
-        PetscScalar       *y;
-        PetscInt          bs;
+        PetscVTUReal      *y;
+        PetscInt          bs, nfields, field;
+        PetscSection      section = NULL;
+
         if ((link->ft != PETSC_VTK_CELL_FIELD) && (link->ft != PETSC_VTK_CELL_VECTOR_FIELD)) continue;
-        ierr = PetscSectionGetDof(dm->localSection,cStart,&bs);CHKERRQ(ierr);
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (!dmX) dmX = dm;
+        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,cStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
         ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
-        ierr = PetscMalloc1(piece.ncells,&y);CHKERRQ(ierr);
-        for (i=0; ift == PETSC_VTK_CELL_VECTOR_FIELD) {
+            vector = PETSC_TRUE;
+            for (j = 0; j < fbs; j++) {
+              const char *compName = NULL;
+              if (fv) {
+                ierr = PetscFVGetComponentName(fv,j,&compName);CHKERRQ(ierr);
+                if (compName) break;
+              }
+            }
+            if (j < fbs) vector = PETSC_FALSE;
+          }
+          if (vector) {
+            PetscInt cnt, l;
+            for (l = 0; l < loops_per_scalar; l++) {
+              for (c=cStart,cnt=0; clink; link; link=link->next) {
         Vec               X = (Vec)link->vec;
+        DM                dmX;
         const PetscScalar *x;
-        PetscScalar       *y;
-        PetscInt          bs;
+        PetscVTUReal      *y;
+        PetscInt          bs, nfields, field;
+        PetscSection      section = NULL;
+
         if ((link->ft != PETSC_VTK_POINT_FIELD) && (link->ft != PETSC_VTK_POINT_VECTOR_FIELD)) continue;
-        ierr = PetscSectionGetDof(dm->localSection,vStart,&bs);CHKERRQ(ierr);
+        ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
+        if (!dmX) dmX = dm;
+        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,vStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
         ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
-        ierr = PetscMalloc1(piece.nvertices,&y);CHKERRQ(ierr);
-        for (i=0; ift == PETSC_VTK_POINT_VECTOR_FIELD) {
+            PetscInt cnt, l;
+            for (l = 0; l < loops_per_scalar; l++) {
+              if (!localized) {
+                for (v=vStart,cnt=0; v= vStart) && (closure[v] < vEnd)) {
+                      PetscInt    voff;
+                      const PetscScalar *xpoint;
+
+                      if (nfields) {
+                        ierr = PetscSectionGetFieldOffset(section,closure[v],field,&voff);CHKERRQ(ierr);
+                      } else {
+                        ierr = PetscSectionGetOffset(section,closure[v],&voff);CHKERRQ(ierr);
+                      }
+                      xpoint         = &x[voff];
+                      for (j = 0; j < fbs; j++) {
+                        y[cnt + off++] = (PetscVTUReal) (l ? PetscImaginaryPart(xpoint[j]) : PetscRealPart(xpoint[j]));
+                      }
+                      for (; j < 3; j++) y[cnt + off++] = 0.;
+                    }
+                  }
+                  cnt += off;
+                  ierr = DMPlexRestoreTransitiveClosure(dmX, c, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
+                }
+              }
+              if (cnt != piece.nvertices*3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Count does not match");
+              ierr = TransferWrite(viewer,fp,r,0,y,buffer,piece.nvertices*3,MPIU_VTUREAL,tag);CHKERRQ(ierr);
             }
           } else {
-            for (c=cStart,cnt=0; c= vStart) && (closure[v] < vEnd)) {
-                  PetscScalar *xpoint;
-
-                  ierr = DMPlexPointLocalRead(dm,closure[v],x,&xpoint);CHKERRQ(ierr);
-                  y[cnt + off++] = xpoint[i];
+            for (i=0; i= vStart) && (closure[v] < vEnd)) {
+                        PetscInt    voff;
+                        const PetscScalar *xpoint;
+
+                        if (nfields) {
+                          ierr           = PetscSectionGetFieldOffset(section,closure[v],field,&voff);CHKERRQ(ierr);
+                        } else {
+                          ierr           = PetscSectionGetOffset(section,closure[v],&voff);CHKERRQ(ierr);
+                        }
+                        xpoint         = &x[voff];
+                        y[cnt + off++] = (l ? PetscImaginaryPart(xpoint[i]) : PetscRealPart(xpoint[i]));
+                      }
+                    }
+                    cnt += off;
+                    ierr = DMPlexRestoreTransitiveClosure(dmX, c, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
+                  }
                 }
+                if (cnt != piece.nvertices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Count does not match");
+                ierr = TransferWrite(viewer,fp,r,0,y,buffer,piece.nvertices,MPIU_VTUREAL,tag);CHKERRQ(ierr);
               }
-              cnt += off;
-              ierr = DMPlexRestoreTransitiveClosure(dm, c, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
             }
           }
-          if (cnt != piece.nvertices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Count does not match");
-          ierr = TransferWrite(viewer,fp,r,0,y,buffer,piece.nvertices,MPIU_SCALAR,tag);CHKERRQ(ierr);
         }
         ierr = PetscFree(y);CHKERRQ(ierr);
         ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
       }
     } else if (!rank) {
-      ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].nvertices*3,MPIU_SCALAR,tag);CHKERRQ(ierr); /* positions */
+      PetscInt l;
+
+      ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].nvertices*3,MPIU_VTUREAL,tag);CHKERRQ(ierr); /* positions */
       ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].nconn,MPI_INT,tag);CHKERRQ(ierr); /* connectivity */
       ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].ncells,MPI_INT,tag);CHKERRQ(ierr); /* offsets */
       ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].ncells,MPI_CHAR,tag);CHKERRQ(ierr); /* types */
       ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].ncells,MPI_INT,tag);CHKERRQ(ierr); /* owner rank (cells) */
       /* all cell data */
       for (link=vtk->link; link; link=link->next) {
-        PetscInt bs;
+        Vec               X = (Vec)link->vec;
+        PetscInt bs, nfields, field;
+        DM           dmX;
+        PetscSection section = NULL;
+
         if ((link->ft != PETSC_VTK_CELL_FIELD) && (link->ft != PETSC_VTK_CELL_VECTOR_FIELD)) continue;
-        ierr = PetscSectionGetDof(dm->localSection,cStart,&bs);CHKERRQ(ierr);
-        for (i=0; ivec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,cStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
+        for (i=0; field<(nfields?nfields:1); field++) {
+          PetscInt     fbs,j;
+          PetscFV      fv = NULL;
+          PetscObject  f;
+          PetscClassId fClass;
+          PetscBool    vector;
+          if (nfields) {        /* We have user-defined fields/components */
+            ierr = PetscSectionGetFieldDof(section,cStart,field,&fbs);CHKERRQ(ierr);
+          } else fbs = bs;      /* Say we have one field with 'bs' components */
+          ierr = DMGetField(dmX,field,NULL,&f);CHKERRQ(ierr);
+          ierr = PetscObjectGetClassId(f,&fClass);CHKERRQ(ierr);
+          if (fClass == PETSCFV_CLASSID) {
+            fv = (PetscFV) f;
+          }
+          vector = PETSC_FALSE;
+          if (link->ft == PETSC_VTK_CELL_VECTOR_FIELD) {
+            vector = PETSC_TRUE;
+            for (j = 0; j < fbs; j++) {
+              const char *compName = NULL;
+              if (fv) {
+                ierr = PetscFVGetComponentName(fv,j,&compName);CHKERRQ(ierr);
+                if (compName) break;
+              }
+            }
+            if (j < fbs) vector = PETSC_FALSE;
+          }
+          if (vector) {
+            for (l = 0; l < loops_per_scalar; l++) {
+              ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].ncells*3,MPIU_VTUREAL,tag);CHKERRQ(ierr);
+            }
+          } else {
+            for (i=0; ilink; link; link=link->next) {
-        PetscInt bs;
+        Vec               X = (Vec)link->vec;
+        DM                dmX;
+        PetscInt bs, nfields, field;
+        PetscSection section = NULL;
+
         if ((link->ft != PETSC_VTK_POINT_FIELD) && (link->ft != PETSC_VTK_POINT_VECTOR_FIELD)) continue;
-        ierr = PetscSectionGetDof(dm->localSection,vStart,&bs);CHKERRQ(ierr);
-        for (i=0; ivec, "section", (PetscObject*) §ion);CHKERRQ(ierr);
+        if (!section) {ierr = DMGetLocalSection(dmX, §ion);CHKERRQ(ierr);}
+        ierr = PetscSectionGetDof(section,vStart,&bs);CHKERRQ(ierr);
+        ierr = PetscSectionGetNumFields(section,&nfields);CHKERRQ(ierr);
+        field = 0;
+        if (link->field >= 0) {
+          field = link->field;
+          nfields = field + 1;
+        }
+        for (i=0; field<(nfields?nfields:1); field++) {
+          PetscInt   fbs;
+          if (nfields) {        /* We have user-defined fields/components */
+            ierr = PetscSectionGetFieldDof(section,vStart,field,&fbs);CHKERRQ(ierr);
+          } else fbs = bs;      /* Say we have one field with 'bs' components */
+          if (link->ft == PETSC_VTK_POINT_VECTOR_FIELD) {
+            for (l = 0; l < loops_per_scalar; l++) {
+              ierr = TransferWrite(viewer,fp,r,0,NULL,buffer,gpiece[r].nvertices*3,MPIU_VTUREAL,tag);CHKERRQ(ierr);
+            }
+          } else {
+            for (i=0; idata;
-  PetscInt       i,*globals;
-
   PetscFunctionBegin;
-  ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
-  for (i=0; iN; i++) globals[i] = i;
-  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -339,12 +332,19 @@ static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscI
   DM_Redundant   *red = (DM_Redundant*)dm->data;
   PetscErrorCode ierr;
   PetscMPIInt    myrank;
+  PetscInt       i,*globals;
 
   PetscFunctionBegin;
   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr);
   red->rank = rank;
   red->N    = N;
   red->n    = (myrank == rank) ? N : 0;
+
+  /* mapping is setup here */
+  ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
+  for (i=0; iN; i++) globals[i] = i;
+  ierr = ISLocalToGlobalMappingDestroy(&dm->ltogmap);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/impls/shell/dmshell.c b/src/dm/impls/shell/dmshell.c
index 2f82fe38eb0..9e9c1fe9bf5 100644
--- a/src/dm/impls/shell/dmshell.c
+++ b/src/dm/impls/shell/dmshell.c
@@ -206,12 +206,15 @@ static PetscErrorCode DMCreateMatrix_Shell(DM dm,Mat *J)
     }
   }
   if (((PetscObject)A)->refct < 2) { /* We have an exclusive reference so we can give it out */
+    PetscBool f;
+
     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
-    ierr = MatZeroEntries(A);CHKERRQ(ierr);
+    /* MATSHELL does not implement MATOP_ZERO_ENTRIES */
+    ierr = MatHasOperation(A,MATOP_ZERO_ENTRIES,&f);CHKERRQ(ierr);
+    if (f) { ierr = MatZeroEntries(A);CHKERRQ(ierr); }
     *J   = A;
-  } else {                      /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
+  } else { /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
     ierr = MatDuplicate(A,MAT_DO_NOT_COPY_VALUES,J);CHKERRQ(ierr);
-    ierr = MatZeroEntries(*J);CHKERRQ(ierr);
   }
   PetscFunctionReturn(0);
 }
@@ -232,7 +235,7 @@ PetscErrorCode DMCreateGlobalVector_Shell(DM dm,Vec *gvec)
     ierr  = PetscObjectReference((PetscObject)X);CHKERRQ(ierr);
     ierr  = VecZeroEntries(X);CHKERRQ(ierr);
     *gvec = X;
-  } else {                      /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
+  } else { /* Need to create a copy */
     ierr = VecDuplicate(X,gvec);CHKERRQ(ierr);
     ierr = VecZeroEntries(*gvec);CHKERRQ(ierr);
   }
@@ -256,7 +259,7 @@ PetscErrorCode DMCreateLocalVector_Shell(DM dm,Vec *gvec)
     ierr  = PetscObjectReference((PetscObject)X);CHKERRQ(ierr);
     ierr  = VecZeroEntries(X);CHKERRQ(ierr);
     *gvec = X;
-  } else {                      /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
+  } else { /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
     ierr = VecDuplicate(X,gvec);CHKERRQ(ierr);
     ierr = VecZeroEntries(*gvec);CHKERRQ(ierr);
   }
@@ -365,7 +368,6 @@ PetscErrorCode DMShellSetMatrix(DM dm,Mat J)
 @*/
 PetscErrorCode DMShellSetCreateMatrix(DM dm,PetscErrorCode (*func)(DM,Mat*))
 {
-
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
   dm->ops->creatematrix = func;
@@ -430,7 +432,6 @@ PetscErrorCode DMShellSetGlobalVector(DM dm,Vec X)
 @*/
 PetscErrorCode DMShellSetCreateGlobalVector(DM dm,PetscErrorCode (*func)(DM,Vec*))
 {
-
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
   dm->ops->createglobalvector = func;
@@ -462,7 +463,7 @@ PetscErrorCode DMShellSetLocalVector(DM dm,Vec X)
   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
   ierr = PetscObjectTypeCompare((PetscObject)dm,DMSHELL,&isshell);CHKERRQ(ierr);
   if (!isshell) PetscFunctionReturn(0);
-  ierr           = VecGetDM(X,&vdm);CHKERRQ(ierr);
+  ierr = VecGetDM(X,&vdm);CHKERRQ(ierr);
   /*
       if the vector proposed as the new base global vector for the DM is a DM vector associated
       with the same DM then the current base global vector for the DM is ok and if we replace it with the new one
diff --git a/src/dm/impls/stag/examples/tests/ex10.c b/src/dm/impls/stag/examples/tests/ex10.c
index 8d227967781..ba0b2cd118e 100644
--- a/src/dm/impls/stag/examples/tests/ex10.c
+++ b/src/dm/impls/stag/examples/tests/ex10.c
@@ -35,8 +35,8 @@ int main(int argc,char **argv)
   ierr = DMGlobalToLocalEnd(dm,vec,INSERT_VALUES,vecLocal1);CHKERRQ(ierr);
 
   ierr = DMStagGetCorners(dm,&startx,&starty,NULL,&nx,&ny,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOFRead(dm,vecLocal1,&a1);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOF(dm,vecLocal2,&a2);CHKERRQ(ierr);
+  ierr = DMStagVecGetArrayRead(dm,vecLocal1,&a1);CHKERRQ(ierr);
+  ierr = DMStagVecGetArray(dm,vecLocal2,&a2);CHKERRQ(ierr);
   for (j=starty; j 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Non-periodic check implemented assuming stencilWidth = 1");
       for (j=starty; j 0) {
     ierr = DMStagGetLocationSlot(dm,DMSTAG_LEFT,0,&idxLeft);CHKERRQ(ierr);
   }
@@ -162,13 +162,13 @@ static PetscErrorCode Test2_1d(DM dm)
       }
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dm,vecLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dm,vecLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dm,&vecGlobal);CHKERRQ(ierr);
   ierr = DMLocalToGlobal(dm,vecLocal,INSERT_VALUES,vecGlobal);CHKERRQ(ierr);
   ierr = VecDuplicate(vecLocal,&vecLocalCheck);CHKERRQ(ierr);
   ierr = VecSet(vecLocalCheck,-1.0);CHKERRQ(ierr);
   ierr = DMGlobalToLocal(dm,vecGlobal,INSERT_VALUES,vecLocalCheck);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOFRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArrayRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
   for (i=startx; i 0) {
     ierr = DMStagGetLocationSlot(dm,DMSTAG_DOWN_LEFT,0,&idxDownLeft);CHKERRQ(ierr);
   }
@@ -246,13 +246,13 @@ static PetscErrorCode Test2_2d(DM dm)
       }
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dm,vecLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dm,vecLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dm,&vecGlobal);CHKERRQ(ierr);
   ierr = DMLocalToGlobal(dm,vecLocal,INSERT_VALUES,vecGlobal);CHKERRQ(ierr);
   ierr = VecDuplicate(vecLocal,&vecLocalCheck);CHKERRQ(ierr);
   ierr = VecSet(vecLocalCheck,-1.0);CHKERRQ(ierr);
   ierr = DMGlobalToLocal(dm,vecGlobal,INSERT_VALUES,vecLocalCheck);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOFRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArrayRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
   for (j=starty; j 0) {
     ierr = DMStagGetLocationSlot(dm,DMSTAG_BACK_DOWN_LEFT,0,&idxBackDownLeft);CHKERRQ(ierr);
   }
@@ -389,13 +389,13 @@ static PetscErrorCode Test2_3d(DM dm)
       }
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dm,vecLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dm,vecLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dm,&vecGlobal);CHKERRQ(ierr);
   ierr = DMLocalToGlobal(dm,vecLocal,INSERT_VALUES,vecGlobal);CHKERRQ(ierr);
   ierr = VecDuplicate(vecLocal,&vecLocalCheck);CHKERRQ(ierr);
   ierr = VecSet(vecLocalCheck,-1.0);CHKERRQ(ierr);
   ierr = DMGlobalToLocal(dm,vecGlobal,INSERT_VALUES,vecLocalCheck);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOFRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArrayRead(dm,vecLocalCheck,&arr);CHKERRQ(ierr);
   for (k=startz; k
+#include 
+
+int main(int argc,char **argv)
+{
+  PetscErrorCode  ierr;
+  DM              dm,dm2;
+  PetscInt        dim;
+  PetscBool       flg,setSizes;
+
+  /* Create a DMStag object */
+  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
+  ierr = PetscOptionsGetInt(NULL,NULL,"-dim",&dim,&flg);CHKERRQ(ierr);
+  if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Supply -dim option with value 1, 2, or 3\n");
+  setSizes = PETSC_FALSE;
+  ierr = PetscOptionsGetBool(NULL,NULL,"-setsizes",&setSizes,NULL);CHKERRQ(ierr);
+  if (setSizes) {
+    PetscMPIInt size;
+    PetscInt lx[4] = {2,3},   ranksx = 2, mx = 5;
+    PetscInt ly[3] = {3,8,2}, ranksy = 3, my = 13;
+    PetscInt lz[2] = {2,4},   ranksz = 2, mz = 6;
+
+    ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
+    switch (dim) {
+      case 1:
+        if (size != ranksx) SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Must run on %D ranks with -dim 1 -setSizes",ranksx);
+        ierr = DMStagCreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,mx,1,1,DMSTAG_STENCIL_BOX,1,lx,&dm);CHKERRQ(ierr);
+        break;
+      case 2:
+        if (size != ranksx * ranksy) SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Must run on %D ranks with -dim 2 -setSizes",ranksx * ranksy);
+        ierr = DMStagCreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,mx,my,ranksx,ranksy,1,1,1,DMSTAG_STENCIL_BOX,1,lx,ly,&dm);CHKERRQ(ierr);
+        break;
+      case 3:
+        if (size != ranksx * ranksy * ranksz) SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Must run on %D ranks with -dim 3 -setSizes", ranksx * ranksy * ranksz);
+        ierr = DMStagCreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,mx,my,mz,ranksx,ranksy,ranksz,1,1,1,1,DMSTAG_STENCIL_BOX,1,lx,ly,lz,&dm);CHKERRQ(ierr);
+        break;
+      default:
+        SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_SUP,"No support for dimension %D",dim);
+    }
+  } else {
+    if (dim == 1) {
+      ierr = DMStagCreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,2,2,3,DMSTAG_STENCIL_BOX,1,NULL,&dm);CHKERRQ(ierr);
+    } else if (dim == 2) {
+      ierr = DMStagCreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,2,2,PETSC_DECIDE,PETSC_DECIDE,2,3,4,DMSTAG_STENCIL_BOX,1,NULL,NULL,&dm);CHKERRQ(ierr);
+    } else if (dim == 3) {
+      ierr = DMStagCreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,2,2,2,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,2,3,4,5,DMSTAG_STENCIL_BOX,1,NULL,NULL,NULL,&dm);CHKERRQ(ierr);
+    } else {
+      ierr = PetscPrintf(PETSC_COMM_WORLD,"Supply -dim option with value 1, 2, or 3\n");CHKERRQ(ierr);
+      return 1;
+    }
+  }
+  ierr = DMSetFromOptions(dm);CHKERRQ(ierr);CHKERRQ(ierr);
+  ierr = DMSetUp(dm);CHKERRQ(ierr);CHKERRQ(ierr);
+  ierr = DMView(dm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+
+  /* Create a cloned DMStag object */
+  ierr = DMClone(dm,&dm2);CHKERRQ(ierr);
+  ierr = DMView(dm2,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
+  ierr = DMDestroy(&dm2);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+/*TEST
+
+   test:
+      suffix: 1
+      nsize: 1
+      args: -dim 1
+
+   test:
+      suffix: 2
+      nsize: 4
+      args: -dim 2
+
+   test:
+      suffix: 3
+      nsize: 6
+      args: -dim 3 -stag_grid_x 3 -stag_grid_y 2 -stag_grid_z 1
+
+   test:
+      suffix: 4
+      nsize: 2
+      args: -dim 1 -setsizes
+
+   test:
+      suffix: 5
+      nsize: 6
+      args: -dim 2 -setsizes
+
+   test:
+      suffix: 6
+      nsize: 12
+      args: -dim 3 -setsizes
+
+TEST*/
diff --git a/src/dm/impls/stag/examples/tests/ex23.c b/src/dm/impls/stag/examples/tests/ex23.c
new file mode 100644
index 00000000000..c1ce2383615
--- /dev/null
+++ b/src/dm/impls/stag/examples/tests/ex23.c
@@ -0,0 +1,95 @@
+static char help[] = "Test modifying DMStag coordinates, when represented as a product of 1d coordinate arrays\n\n";
+
+#include 
+#include 
+
+int main(int argc,char **argv)
+{
+  PetscErrorCode ierr;
+  DM             dm,cdm;
+  PetscInt       ex,ey,ez,n[3],start[3],nExtra[3],iNext,iPrev,iCenter,d,round;
+  PetscScalar    **cArrX,**cArrY,**cArrZ;
+
+  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
+
+  ierr = DMStagCreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_GHOSTED,DM_BOUNDARY_PERIODIC,4,3,2,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,1,1,1,1,DMSTAG_STENCIL_BOX,2,NULL,NULL,NULL,&dm);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
+  ierr = DMSetUp(dm);CHKERRQ(ierr);
+  ierr = DMStagSetUniformCoordinatesProduct(dm,-1.0,0.0,-2.0,0.0,-3.0,0.0);CHKERRQ(ierr);
+
+  ierr = DMStagGetCorners(dm,&start[0],&start[1],&start[2],&n[0],&n[1],&n[2],&nExtra[0],&nExtra[1],&nExtra[2]);CHKERRQ(ierr);
+
+  for (round=1; round<=2; ++round) {
+    ierr = DMStagGetProductCoordinateArrays(dm,&cArrX,&cArrY,&cArrZ);CHKERRQ(ierr);
+    ierr = DMStagGetProductCoordinateLocationSlot(dm,DMSTAG_LEFT,&iPrev);CHKERRQ(ierr);
+    ierr = DMStagGetProductCoordinateLocationSlot(dm,DMSTAG_RIGHT,&iNext);CHKERRQ(ierr);
+    ierr = DMStagGetProductCoordinateLocationSlot(dm,DMSTAG_ELEMENT,&iCenter);CHKERRQ(ierr);
+    if (round == 1){
+      /* On first round, do a stretching operation */
+      for (ex=start[0]; ex
+#include 
+#include 
+
+/* Shorter, more convenient names for DMStagStencilLocation entries */
+#define BACK_DOWN_LEFT   DMSTAG_BACK_DOWN_LEFT
+#define BACK_DOWN        DMSTAG_BACK_DOWN
+#define BACK_DOWN_RIGHT  DMSTAG_BACK_DOWN_RIGHT
+#define BACK_LEFT        DMSTAG_BACK_LEFT
+#define BACK             DMSTAG_BACK
+#define BACK_RIGHT       DMSTAG_BACK_RIGHT
+#define BACK_UP_LEFT     DMSTAG_BACK_UP_LEFT
+#define BACK_UP          DMSTAG_BACK_UP
+#define BACK_UP_RIGHT    DMSTAG_BACK_UP_RIGHT
+#define DOWN_LEFT        DMSTAG_DOWN_LEFT
+#define DOWN             DMSTAG_DOWN
+#define DOWN_RIGHT       DMSTAG_DOWN_RIGHT
+#define LEFT             DMSTAG_LEFT
+#define ELEMENT          DMSTAG_ELEMENT
+#define RIGHT            DMSTAG_RIGHT
+#define UP_LEFT          DMSTAG_UP_LEFT
+#define UP               DMSTAG_UP
+#define UP_RIGHT         DMSTAG_UP_RIGHT
+#define FRONT_DOWN_LEFT  DMSTAG_FRONT_DOWN_LEFT
+#define FRONT_DOWN       DMSTAG_FRONT_DOWN
+#define FRONT_DOWN_RIGHT DMSTAG_FRONT_DOWN_RIGHT
+#define FRONT_LEFT       DMSTAG_FRONT_LEFT
+#define FRONT            DMSTAG_FRONT
+#define FRONT_RIGHT      DMSTAG_FRONT_RIGHT
+#define FRONT_UP_LEFT    DMSTAG_FRONT_UP_LEFT
+#define FRONT_UP         DMSTAG_FRONT_UP
+#define FRONT_UP_RIGHT   DMSTAG_FRONT_UP_RIGHT
+
+static PetscErrorCode CreateMat(DM,Mat*);
+static PetscErrorCode CheckMat(DM,Mat);
+
+int main(int argc,char **argv)
+{
+  PetscErrorCode ierr;
+  DM             dmSol;
+  Mat            A;
+
+  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
+  {
+    const PetscInt dof0 = 0, dof1 = 0,dof2 = 1, dof3 = 1; /* 1 dof on each face and element center */
+    const PetscInt stencilWidth = 1;
+    ierr = DMStagCreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,4,5,6,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof0,dof1,dof2,dof3,DMSTAG_STENCIL_BOX,stencilWidth,NULL,NULL,NULL,&dmSol);CHKERRQ(ierr);
+    ierr = DMSetFromOptions(dmSol);CHKERRQ(ierr);
+    ierr = DMSetUp(dmSol);CHKERRQ(ierr);
+    ierr = DMStagSetUniformCoordinatesExplicit(dmSol,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
+  }
+  ierr = CreateMat(dmSol,&A);CHKERRQ(ierr);
+  ierr = CheckMat(dmSol,A);CHKERRQ(ierr);
+  ierr = MatDestroy(&A);CHKERRQ(ierr);
+  ierr = DMDestroy(&dmSol);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+static PetscErrorCode CreateMat(DM dmSol,Mat *pA)
+{
+  PetscErrorCode    ierr;
+  Vec               coordLocal;
+  Mat               A;
+  PetscInt          startx,starty,startz,N[3],nx,ny,nz,ex,ey,ez,d;
+  PetscInt          icp[3],icux[3],icuy[3],icuz[3],icux_right[3],icuy_up[3],icuz_front[3];
+  PetscBool         isLastRankx,isLastRanky,isLastRankz,isFirstRankx,isFirstRanky,isFirstRankz;
+  PetscReal         hx,hy,hz;
+  DM                dmCoord;
+  PetscScalar       ****arrCoord;
+
+  PetscFunctionBeginUser;
+  ierr = DMCreateMatrix(dmSol,pA);CHKERRQ(ierr);
+  A = *pA;
+  ierr = DMStagGetCorners(dmSol,&startx,&starty,&startz,&nx,&ny,&nz,NULL,NULL,NULL);CHKERRQ(ierr);
+  ierr = DMStagGetGlobalSizes(dmSol,&N[0],&N[1],&N[2]);CHKERRQ(ierr);
+  if (N[0] < 2 || N[1] < 2 || N[2] < 2) SETERRQ(PetscObjectComm((PetscObject)dmSol),PETSC_ERR_ARG_SIZ,"This example requires at least two elements in each dimensions");
+  ierr = DMStagGetIsLastRank(dmSol,&isLastRankx,&isLastRanky,&isLastRankz);CHKERRQ(ierr);
+  ierr = DMStagGetIsFirstRank(dmSol,&isFirstRankx,&isFirstRanky,&isFirstRankz);CHKERRQ(ierr);
+  hx = 1.0/N[0]; hy = 1.0/N[1]; hz = 1.0/N[2];
+  ierr = DMGetCoordinateDM(dmSol,&dmCoord);CHKERRQ(ierr);
+  ierr = DMGetCoordinatesLocal(dmSol,&coordLocal);CHKERRQ(ierr);
+  ierr = DMStagVecGetArrayRead(dmCoord,coordLocal,&arrCoord);CHKERRQ(ierr);
+  for (d=0; d<3; ++d) {
+    ierr = DMStagGetLocationSlot(dmCoord,ELEMENT,d,&icp[d]       );CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,LEFT,   d,&icux[d]      );CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,DOWN,   d,&icuy[d]      );CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,BACK,   d,&icuz[d]      );CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,RIGHT,  d,&icux_right[d]);CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,UP,     d,&icuy_up[d]   );CHKERRQ(ierr);
+    ierr = DMStagGetLocationSlot(dmCoord,FRONT,  d,&icuz_front[d]);CHKERRQ(ierr);
+  }
+
+  for (ez = startz; ez 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Check implemented assuming stencilWidth = 1");
     for (k=startz; kdmStokes,0,0,2,0,&dmVelAvg);CHKERRQ(ierr); /* 2 dof per element */
   ierr = DMSetUp(dmVelAvg);CHKERRQ(ierr);
   ierr = DMStagSetUniformCoordinatesExplicit(dmVelAvg,0.0,ctx->xmax,0.0,ctx->ymax,0.0,0.0);CHKERRQ(ierr);
diff --git a/src/dm/impls/stag/examples/tutorials/output/ex3_2.out b/src/dm/impls/stag/examples/tutorials/output/ex3_2.out
index 477f50a8017..ead69ad0b60 100644
--- a/src/dm/impls/stag/examples/tutorials/output/ex3_2.out
+++ b/src/dm/impls/stag/examples/tutorials/output/ex3_2.out
@@ -1,40 +1,18 @@
   0 KSP Residual norm 29.7322 
-  1 KSP Residual norm 29.7154 
-  2 KSP Residual norm 29.599 
-  3 KSP Residual norm 28.9951 
-  4 KSP Residual norm 20.4734 
-  5 KSP Residual norm 20.455 
-  6 KSP Residual norm 11.8524 
-  7 KSP Residual norm 11.8522 
-  8 KSP Residual norm 10.8716 
-  9 KSP Residual norm 9.40732 
- 10 KSP Residual norm 5.82129 
- 11 KSP Residual norm 4.70239 
- 12 KSP Residual norm 3.4366 
- 13 KSP Residual norm 1.88612 
- 14 KSP Residual norm 1.13645 
- 15 KSP Residual norm 1.00455 
- 16 KSP Residual norm 0.459665 
- 17 KSP Residual norm 0.20327 
- 18 KSP Residual norm 0.180225 
- 19 KSP Residual norm 0.0931185 
- 20 KSP Residual norm 0.0576615 
- 21 KSP Residual norm 0.0490717 
- 22 KSP Residual norm 0.0392816 
- 23 KSP Residual norm 0.0153601 
- 24 KSP Residual norm 0.0121437 
- 25 KSP Residual norm 0.0097283 
- 26 KSP Residual norm 0.00282707 
- 27 KSP Residual norm 0.00203073 
- 28 KSP Residual norm 0.00164238 
- 29 KSP Residual norm 0.00058169 
- 30 KSP Residual norm 0.000425327 
- 31 KSP Residual norm 0.000425324 
- 32 KSP Residual norm 0.000424161 
- 33 KSP Residual norm 0.000392647 
- 34 KSP Residual norm 0.000384362 
- 35 KSP Residual norm 0.000346703 
- 36 KSP Residual norm 0.000188895 
-Linear solve converged due to CONVERGED_RTOL iterations 36
-Error (abs): 3.79597
-Error (rel): 0.208148
+  1 KSP Residual norm 19.0519 
+  2 KSP Residual norm 18.0607 
+  3 KSP Residual norm 11.1347 
+  4 KSP Residual norm 0.976072 
+  5 KSP Residual norm 0.823254 
+  6 KSP Residual norm 0.808854 
+  7 KSP Residual norm 0.0598007 
+  8 KSP Residual norm 0.0530177 
+  9 KSP Residual norm 0.0423075 
+ 10 KSP Residual norm 0.00428706 
+ 11 KSP Residual norm 0.00392479 
+ 12 KSP Residual norm 0.00258912 
+ 13 KSP Residual norm 0.000326356 
+ 14 KSP Residual norm 0.000294056 
+Linear solve converged due to CONVERGED_RTOL iterations 14
+Error (abs): 0.539402
+Error (rel): 0.0295775
diff --git a/src/dm/impls/stag/makefile b/src/dm/impls/stag/makefile
index 78f38fe8b5f..6a9d1bf2a9c 100644
--- a/src/dm/impls/stag/makefile
+++ b/src/dm/impls/stag/makefile
@@ -3,7 +3,7 @@ ALL: lib
 CPPFLAGS =
 CFLAGS   =
 FFLAGS   =
-SOURCEC  = stag.c stag1d.c stag2d.c stag3d.c stagda.c stagstencil.c stagutils.c
+SOURCEC  = stag.c stag1d.c stag2d.c stag3d.c stagda.c stagintern.c stagstencil.c stagutils.c
 SOURCEF  =
 SOURCEH  = ../../../../include/petscdmstag.h ../../../../include/petsc/private/dmstagimpl.h
 DIRS     = examples
diff --git a/src/dm/impls/stag/stag.c b/src/dm/impls/stag/stag.c
index 7b74b6c3e0a..af840fbb741 100644
--- a/src/dm/impls/stag/stag.c
+++ b/src/dm/impls/stag/stag.c
@@ -2,11 +2,25 @@
    Implementation of DMStag, defining dimension-independent functions in the
    DM API. stag1d.c, stag2d.c, and stag3d.c may include dimension-specific
    implementations of DM API functions, and other files here contain additional
-   DMStag-specific API functions (and internal functions).
+   DMStag-specific API functions, as well as internal functions.
 */
 #include 
 #include 
 
+static PetscErrorCode DMClone_Stag(DM dm,DM *newdm)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  /* Destroy the DM created by generic logic in DMClone() */
+  if (*newdm) {
+    ierr = DMDestroy(newdm);CHKERRQ(ierr);
+  }
+  ierr = DMStagDuplicateWithoutSetup(dm,PetscObjectComm((PetscObject)dm),newdm);CHKERRQ(ierr);
+  ierr = DMSetUp(*newdm);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 static PetscErrorCode DMDestroy_Stag(DM dm)
 {
   PetscErrorCode ierr;
@@ -444,6 +458,9 @@ PETSC_EXTERN PetscErrorCode DMCreate_Stag(DM dm)
   stag->coordinateDMType                              = NULL;
 
   ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
+#if defined(PETSC_USE_DEBUG)
+  if (dim != 1 && dim != 2 && dim != 3) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"DMSetDimension() must be called to set a dimension with value 1, 2, or 3");
+#endif
 
   ierr = PetscMemzero(dm->ops,sizeof(*(dm->ops)));CHKERRQ(ierr);
   dm->ops->createcoordinatedm  = DMCreateCoordinateDM_Stag;
@@ -464,6 +481,7 @@ PETSC_EXTERN PetscErrorCode DMCreate_Stag(DM dm)
     case 3: dm->ops->setup     = DMSetUp_Stag_3d; break;
     default : SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"Unsupported dimension %d",dim);
   }
+  dm->ops->clone               = DMClone_Stag;
   dm->ops->view                = DMView_Stag;
   dm->ops->getcompatibility    = DMGetCompatibility_Stag;
   PetscFunctionReturn(0);
diff --git a/src/dm/impls/stag/stag1d.c b/src/dm/impls/stag/stag1d.c
index 4762c11d0b9..61afbf8477f 100644
--- a/src/dm/impls/stag/stag1d.c
+++ b/src/dm/impls/stag/stag1d.c
@@ -39,25 +39,13 @@
 PETSC_EXTERN PetscErrorCode DMStagCreate1d(MPI_Comm comm,DMBoundaryType bndx,PetscInt M,PetscInt dof0,PetscInt dof1,DMStagStencilType stencilType,PetscInt stencilWidth,const PetscInt lx[],DM* dm)
 {
   PetscErrorCode ierr;
-  DM_Stag        *stag;
   PetscMPIInt    size;
 
   PetscFunctionBegin;
   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
-  ierr = DMSetDimension(*dm,1);CHKERRQ(ierr); /* Must precede DMSetType */
-  ierr = DMSetType(*dm,DMSTAG);CHKERRQ(ierr);
-  stag = (DM_Stag*)(*dm)->data;
-
-  /* Global sizes and flags (derived quantities set in DMSetUp_Stag) */
-  stag->boundaryType[0] = bndx;
-  stag->N[0]            = M;
-  stag->nRanks[0]       = size;
-  stag->stencilType     = stencilType;
-  stag->stencilWidth    = stencilWidth;
-  ierr = DMStagSetDOF(*dm,dof0,dof1,0,0);CHKERRQ(ierr);
-  ierr = DMStagSetOwnershipRanges(*dm,lx,NULL,NULL);CHKERRQ(ierr);
-
+  ierr = DMSetDimension(*dm,1);CHKERRQ(ierr);
+  ierr = DMStagInitialize(bndx,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,M,0,0,size,0,0,dof0,dof1,0,0,stencilType,stencilWidth,lx,NULL,NULL,*dm);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -80,7 +68,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_1d(DM dm,PetscRe
   }
   ierr = DMGetLocalVector(dmCoord,&coordLocal);CHKERRQ(ierr);
 
-  ierr = DMStagVecGetArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   if (stagCoord->dof[0]) {
     ierr = DMStagGetLocationSlot(dmCoord,DMSTAG_LEFT,0,&ileft);CHKERRQ(ierr);
   }
@@ -102,7 +90,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_1d(DM dm,PetscRe
         arr[ind][ielement] = min + ((PetscReal)ind + off) * h;
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dmCoord,&coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalBegin(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalEnd(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
diff --git a/src/dm/impls/stag/stag2d.c b/src/dm/impls/stag/stag2d.c
index e1bb268c943..d98a101cd21 100644
--- a/src/dm/impls/stag/stag2d.c
+++ b/src/dm/impls/stag/stag2d.c
@@ -40,29 +40,14 @@
 
 .seealso: DMSTAG, DMStagCreate1d(), DMStagCreate3d(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateLocalVector(), DMLocalToGlobalBegin(), DMDACreate2d()
 @*/
-PETSC_EXTERN PetscErrorCode DMStagCreate2d(MPI_Comm comm, DMBoundaryType bndx ,DMBoundaryType bndy, PetscInt M,PetscInt N, PetscInt m,PetscInt n, PetscInt dof0,PetscInt dof1,PetscInt dof2,DMStagStencilType stencilType,PetscInt stencilWidth,const PetscInt lx[],const PetscInt ly[],DM* dm)
+PETSC_EXTERN PetscErrorCode DMStagCreate2d(MPI_Comm comm, DMBoundaryType bndx,DMBoundaryType bndy, PetscInt M,PetscInt N, PetscInt m,PetscInt n, PetscInt dof0, PetscInt dof1, PetscInt dof2, DMStagStencilType stencilType, PetscInt stencilWidth, const PetscInt lx[], const PetscInt ly[],DM* dm)
 {
   PetscErrorCode ierr;
-  DM_Stag        *stag;
 
   PetscFunctionBegin;
   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
-  ierr = DMSetDimension(*dm,2);CHKERRQ(ierr); /* Must precede DMSetType */
-  ierr = DMSetType(*dm,DMSTAG);CHKERRQ(ierr);
-  stag = (DM_Stag*)(*dm)->data;
-
-  /* Global sizes and flags (derived quantities set in DMSetUp_Stag) */
-  stag->boundaryType[0] = bndx;
-  stag->boundaryType[1] = bndy;
-  stag->N[0]            = M;
-  stag->N[1]            = N;
-  stag->nRanks[0]       = m; /* Adjusted later in DMSetUp_Stag */
-  stag->nRanks[1]       = n; /* Adjusted later in DMSetUp_Stag */
-  stag->stencilType     = stencilType;
-  stag->stencilWidth    = stencilWidth;
-  ierr = DMStagSetDOF(*dm,dof0,dof1,dof2,0);CHKERRQ(ierr);
-  ierr = DMStagSetOwnershipRanges(*dm,lx,ly,NULL);CHKERRQ(ierr);
-
+  ierr = DMSetDimension(*dm,2);CHKERRQ(ierr);
+  ierr = DMStagInitialize(bndx,bndy,DM_BOUNDARY_NONE,M,N,0,m,n,0,dof0,dof1,dof2,0,stencilType,stencilWidth,lx,ly,NULL,*dm);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -85,7 +70,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_2d(DM dm,PetscRe
   }
   ierr = DMGetLocalVector(dmCoord,&coordLocal);CHKERRQ(ierr);
 
-  ierr = DMStagVecGetArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   if (stagCoord->dof[0]) {
     ierr = DMStagGetLocationSlot(dmCoord,DMSTAG_DOWN_LEFT,0,&idownleft);CHKERRQ(ierr);
   }
@@ -130,7 +115,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_2d(DM dm,PetscRe
       }
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dmCoord,&coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalBegin(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalEnd(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
diff --git a/src/dm/impls/stag/stag3d.c b/src/dm/impls/stag/stag3d.c
index cfa61eb1736..0c39fff2b8c 100644
--- a/src/dm/impls/stag/stag3d.c
+++ b/src/dm/impls/stag/stag3d.c
@@ -47,27 +47,11 @@
 PETSC_EXTERN PetscErrorCode DMStagCreate3d(MPI_Comm comm,DMBoundaryType bndx,DMBoundaryType bndy,DMBoundaryType bndz,PetscInt M,PetscInt N,PetscInt P,PetscInt m,PetscInt n,PetscInt p,PetscInt dof0,PetscInt dof1,PetscInt dof2,PetscInt dof3,DMStagStencilType stencilType,PetscInt stencilWidth,const PetscInt lx[],const PetscInt ly[],const PetscInt lz[],DM* dm)
 {
   PetscErrorCode ierr;
-  DM_Stag        *stag;
 
   PetscFunctionBegin;
   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
-  ierr = DMSetDimension(*dm,3);CHKERRQ(ierr); /* Must precede DMSetType */
-  ierr = DMSetType(*dm,DMSTAG);CHKERRQ(ierr);
-  stag = (DM_Stag*)(*dm)->data;
   ierr = DMSetDimension(*dm,3);CHKERRQ(ierr);
-  stag->boundaryType[0] = bndx;
-  stag->boundaryType[1] = bndy;
-  stag->boundaryType[2] = bndz;
-  stag->N[0]            = M;
-  stag->N[1]            = N;
-  stag->N[2]            = P;
-  stag->nRanks[0]       = m; /* Adjusted later in DMSetUp_Stag */
-  stag->nRanks[1]       = n; /* Adjusted later in DMSetUp_Stag */
-  stag->nRanks[2]       = p; /* Adjusted later in DMSetUp_Stag */
-  stag->stencilType     = stencilType;
-  stag->stencilWidth    = stencilWidth;
-  ierr = DMStagSetDOF(*dm,dof0,dof1,dof2,dof3);CHKERRQ(ierr);
-  ierr = DMStagSetOwnershipRanges(*dm,lx,ly,lz);CHKERRQ(ierr);
+  ierr = DMStagInitialize(bndx,bndy,bndz,M,N,P,m,n,p,dof0,dof1,dof2,dof3,stencilType,stencilWidth,lx,ly,lz,*dm);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -89,7 +73,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_3d(DM dm,PetscRe
     if (stagCoord->dof[s] !=0 && stagCoord->dof[s] != 3) SETERRQ2(PetscObjectComm((PetscObject)dm),PETSC_ERR_PLIB,"Coordinate DM in 3 dimensions must have 0 or 3 dof on each stratum, but stratum %d has %d dof",s,stagCoord->dof[s]);
   }
   ierr = DMGetLocalVector(dmCoord,&coordLocal);CHKERRQ(ierr);
-  ierr = DMStagVecGetArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecGetArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   if (stagCoord->dof[0]) {
     ierr = DMStagGetLocationSlot(dmCoord,DMSTAG_BACK_DOWN_LEFT,0,&ibackdownleft);CHKERRQ(ierr);
   }
@@ -166,7 +150,7 @@ PETSC_INTERN PetscErrorCode DMStagSetUniformCoordinatesExplicit_3d(DM dm,PetscRe
       }
     }
   }
-  ierr = DMStagVecRestoreArrayDOF(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
+  ierr = DMStagVecRestoreArray(dmCoord,coordLocal,&arr);CHKERRQ(ierr);
   ierr = DMCreateGlobalVector(dmCoord,&coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalBegin(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
   ierr = DMLocalToGlobalEnd(dmCoord,coordLocal,INSERT_VALUES,coord);CHKERRQ(ierr);
diff --git a/src/dm/impls/stag/stagda.c b/src/dm/impls/stag/stagda.c
index 4cfc4190014..9ccca5723dc 100644
--- a/src/dm/impls/stag/stagda.c
+++ b/src/dm/impls/stag/stagda.c
@@ -323,18 +323,18 @@ static PetscErrorCode DMStagTransferCoordinatesToDMDA(DM dmstag,DMStagStencilLoc
       PetscInt slot;
       PetscScalar **cArrStag;
       ierr = DMStagGetLocationSlot(dmstagCoord,loc,0,&slot);CHKERRQ(ierr);
-      ierr = DMStagVecGetArrayDOFRead(dmstagCoord,stagCoord,&cArrStag);CHKERRQ(ierr);
+      ierr = DMStagVecGetArrayRead(dmstagCoord,stagCoord,&cArrStag);CHKERRQ(ierr);
       for (ex=start[0]; ex
+
+/* Note: this is an internal function but we provide a man page in case it's made public */
+/*@C
+  DMStagDuplicateWithoutSetup - duplicate a DMStag object without setting it up
+
+  Collective
+
+  Input Parameters:
++ dm - The original DM object
+- comm - the MPI communicator for the new DM (MPI_COMM_NULL to use the same communicator as dm)
+
+  Output Parameter:
+. newdm  - The new DM object
+
+  Developer Notes:
+  Copies over all of the state for a DMStag object, except that which is
+  populated during DMSetUp().  This function is used within (all) other
+  functions that require an un-setup clone, which is common when duplicating,
+  coarsening, refining, or creating compatible DMs with different fields.  For
+  this reason it also accepts an MPI communicator as an argument (though note
+  that at the time of this writing, implementations of DMCoarsen and DMRefine
+  don't usually seem to respect their "comm" arguments). This function could be
+  pushed up to the general DM API (and perhaps given a different name).
+
+  Level: developer
+
+  .seealso: DMClone(), DMStagCreateCompatibleDMStag(), DMCoarsen(), DMRefine()
+@*/
+PetscErrorCode DMStagDuplicateWithoutSetup(DM dm, MPI_Comm comm, DM *newdm)
+{
+  PetscErrorCode  ierr;
+  DM_Stag * const stag  = (DM_Stag*)dm->data;
+  DM_Stag         *newstag;
+  PetscInt        dim;
+  MPI_Comm        newcomm;
+
+  PetscValidHeaderSpecificType(dm,DM_CLASSID,1,DMSTAG);
+  newcomm = (comm == MPI_COMM_NULL) ? PetscObjectComm((PetscObject)dm) : comm;
+  ierr = DMCreate(newcomm,newdm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
+  ierr = DMSetDimension(*newdm,dim);CHKERRQ(ierr);
+
+  /* Call routine to define all data required for setup */
+  ierr = DMStagInitialize(stag->boundaryType[0],stag->boundaryType[1],stag->boundaryType[2],stag->N[0],stag->N[1],stag->N[2],stag->nRanks[0],stag->nRanks[1],stag->nRanks[2],stag->dof[0],stag->dof[1],stag->dof[2],stag->dof[3],stag->stencilType,stag->stencilWidth,stag->l[0],stag->l[1],stag->l[2],*newdm);CHKERRQ(ierr);
+
+  /* Copy all data unrelated to setup */
+  newstag = (DM_Stag*)(*newdm)->data;
+  ierr = PetscStrallocpy(stag->coordinateDMType,(char**)&newstag->coordinateDMType);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/* Populate data created after DMCreate_Stag() is called, which is used by DMSetUp_Stag(),
+   such as the grid dimensions and dof information. Arguments are ignored for dimensions
+   less than three. */
+PetscErrorCode DMStagInitialize(DMBoundaryType bndx,DMBoundaryType bndy,DMBoundaryType bndz,PetscInt M,PetscInt N,PetscInt P,PetscInt m,PetscInt n,PetscInt p,PetscInt dof0,PetscInt dof1,PetscInt dof2,PetscInt dof3,DMStagStencilType stencilType,PetscInt stencilWidth,const PetscInt lx[],const PetscInt ly[],const PetscInt lz[],DM dm)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = DMSetType(dm,DMSTAG);CHKERRQ(ierr);
+  ierr = DMStagSetBoundaryTypes(dm,bndx,bndy,bndz);CHKERRQ(ierr);
+  ierr = DMStagSetGlobalSizes(dm,M,N,P);CHKERRQ(ierr);
+  ierr = DMStagSetNumRanks(dm,m,n,p);CHKERRQ(ierr);
+  ierr = DMStagSetStencilType(dm,stencilType);CHKERRQ(ierr);
+  ierr = DMStagSetStencilWidth(dm,stencilWidth);CHKERRQ(ierr);
+  ierr = DMStagSetDOF(dm,dof0,dof1,dof2,dof3);CHKERRQ(ierr);
+  ierr = DMStagSetOwnershipRanges(dm,lx,ly,lz);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/impls/stag/stagstencil.c b/src/dm/impls/stag/stagstencil.c
index ff43190a062..f9455a837d3 100644
--- a/src/dm/impls/stag/stagstencil.c
+++ b/src/dm/impls/stag/stagstencil.c
@@ -160,6 +160,44 @@ static PetscErrorCode DMStagStencilToIndexLocal(DM dm,PetscInt n,const DMStagSte
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMStagMatGetValuesStencil - retrieve local matrix entries using grid indexing
+
+  Not Collective
+
+  Input Parameters:
++ dm - the DMStag object
+. mat - the matrix
+. nRow - number of rows
+. posRow - grid locations (including components) of rows
+. nCol - number of columns
+- posCol - grid locations (including components) of columns
+
+  Output Parameter:
+. val - logically two-dimensional array of values
+
+  Level: advanced
+
+.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecGetValuesStencil(), DMStagVecSetValuesStencil(), DMStagMatSetValuesStencil(), MatSetValuesStencil(), MatAssemblyBegin(), MatAssemblyEnd(), DMCreateMatrix()
+@*/
+PetscErrorCode DMStagMatGetValuesStencil(DM dm,Mat mat,PetscInt nRow,const DMStagStencil *posRow,PetscInt nCol,const DMStagStencil *posCol,PetscScalar *val)
+{
+  PetscErrorCode ierr;
+  PetscInt       dim;
+  PetscInt       *ir,*ic;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
+  PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
+  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
+  ierr = PetscMalloc2(nRow,&ir,nCol,&ic);CHKERRQ(ierr);
+  ierr = DMStagStencilToIndexLocal(dm,nRow,posRow,ir);CHKERRQ(ierr);
+  ierr = DMStagStencilToIndexLocal(dm,nCol,posCol,ic);CHKERRQ(ierr);
+  ierr = MatGetValuesLocal(mat,nRow,ir,nCol,ic,val);CHKERRQ(ierr);
+  ierr = PetscFree2(ir,ic);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
   DMStagMatSetValuesStencil - insert or add matrix entries using grid indexing
 
@@ -180,7 +218,7 @@ static PetscErrorCode DMStagStencilToIndexLocal(DM dm,PetscInt n,const DMStagSte
 
   Level: intermediate
 
-.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecGetValuesStencil(), DMStagVecSetValuesStencil(), MatSetValuesStencil(), MatAssemblyBegin(), MatAssemblyEnd(), DMCreateMatrix()
+.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecGetValuesStencil(), DMStagVecSetValuesStencil(), DMStagMatGetValuesStencil(), MatSetValuesStencil(), MatAssemblyBegin(), MatAssemblyEnd(), DMCreateMatrix()
 @*/
 PetscErrorCode DMStagMatSetValuesStencil(DM dm,Mat mat,PetscInt nRow,const DMStagStencil *posRow,PetscInt nCol,const DMStagStencil *posCol,const PetscScalar *val,InsertMode insertMode)
 {
@@ -218,11 +256,11 @@ PetscErrorCode DMStagMatSetValuesStencil(DM dm,Mat mat,PetscInt nRow,const DMSta
   Accepts stencils which refer to global element numbers, but
   only allows access to entries in the local representation (including ghosts).
 
-  This approach is not as efficient as setting values directly with DMStagVecGetArrayDOF(), which is recommended for matrix free operators.
+  This approach is not as efficient as setting values directly with DMStagVecGetArray(), which is recommended for matrix free operators.
 
   Level: advanced
 
-.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecSetValuesStencil(), DMStagMatSetValuesStencil(), DMStagVecGetArrayDOF()
+.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecSetValuesStencil(), DMStagMatSetValuesStencil(), DMStagVecGetArray()
 @*/
 PetscErrorCode DMStagVecGetValuesStencil(DM dm, Vec vec,PetscInt n,const DMStagStencil *pos,PetscScalar *val)
 {
@@ -263,12 +301,12 @@ PetscErrorCode DMStagVecGetValuesStencil(DM dm, Vec vec,PetscInt n,const DMStagS
   Notes:
   The vector is expected to be a global vector compatible with the DM (usually obtained by DMGetGlobalVector() or DMCreateGlobalVector()).
 
-  This approach is not as efficient as setting values directly with DMStagVecGetArrayDOF(), which is recommended for matrix-free operators. 
+  This approach is not as efficient as setting values directly with DMStagVecGetArray(), which is recommended for matrix-free operators. 
   For assembling systems, where overhead may be less important than convenience, this routine could be helpful in assembling a righthand side and a matrix (using DMStagMatSetValuesStencil()).
 
   Level: advanced
 
-.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecGetValuesStencil(), DMStagMatSetValuesStencil(), DMCreateGlobalVector(), DMGetLocalVector(), DMStagVecGetArrayDOF()
+.seealso: DMSTAG, DMStagStencil, DMStagStencilLocation, DMStagVecGetValuesStencil(), DMStagMatSetValuesStencil(), DMCreateGlobalVector(), DMGetLocalVector(), DMStagVecGetArray()
 @*/
 PetscErrorCode DMStagVecSetValuesStencil(DM dm,Vec vec,PetscInt n,const DMStagStencil *pos,const PetscScalar *val,InsertMode insertMode)
 {
diff --git a/src/dm/impls/stag/stagutils.c b/src/dm/impls/stag/stagutils.c
index aa8c095b6c0..de8f83c151c 100644
--- a/src/dm/impls/stag/stagutils.c
+++ b/src/dm/impls/stag/stagutils.c
@@ -31,27 +31,7 @@ PetscErrorCode DMStagGetBoundaryTypes(DM dm,DMBoundaryType *boundaryTypeX,DMBoun
   PetscFunctionReturn(0);
 }
 
-/*@C
-  DMStagGet1dCoordinateArraysDOFRead - extract 1D coordinate arrays
-
-  Logically Collective
-
-  A high-level helper function to quickly extract raw 1D local coordinate arrays.
-  Checks that the coordinate DM is a DMProduct or 1D DMStags, with the same number of dof.
-  Check on the number of dof and dimension ensures that the elementwise data
-  is the same for each, so the same indexing can be used on the arrays.
-
-  Input Parameter:
-. dm - the DMStag object
-
-  Output Parameters:
-. arrX,arrY,arrX - local 1D coordinate arrays
-
-  Level: intermediate
-
-.seealso: DMSTAG, DMPRODUCT, DMStagSetUniformCoordinates(), DMStagSetUniformCoordinatesProduct(), DMStagGet1dCoordinateLocationSlot()
-@*/
-PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM dm,void* arrX,void* arrY,void* arrZ)
+static PetscErrorCode DMStagGetProductCoordinateArrays_Private(DM dm,void* arrX,void* arrY,void* arrZ,PetscBool read)
 {
   PetscErrorCode ierr;
   PetscInt       dim,d,dofCheck[DMSTAG_MAX_STRATA],s;
@@ -78,7 +58,8 @@ PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM dm,void* arrX,void* arrY,vo
     DMType    dmType;
     PetscBool isStag;
     PetscInt  dof[DMSTAG_MAX_STRATA],subDim;
-    Vec       coord1d;
+    Vec       coord1d_local;
+
     ierr = DMProductGetDM(dmCoord,d,&subDM);CHKERRQ(ierr);
     if (!subDM) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"Coordinate DM is missing sub DM %D",d);
     ierr = DMGetDimension(subDM,&subDim);CHKERRQ(ierr);
@@ -94,20 +75,86 @@ PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM dm,void* arrX,void* arrY,vo
         if (dofCheck[s] != dof[s]) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"Coordinate sub-DMs have different dofs");
       }
     }
-    ierr = DMGetCoordinatesLocal(subDM,&coord1d);CHKERRQ(ierr);
-    ierr = DMStagVecGetArrayDOFRead(subDM,coord1d,arr[d]);CHKERRQ(ierr);
+    ierr = DMGetCoordinatesLocal(subDM,&coord1d_local);CHKERRQ(ierr);
+    if (read) {
+      ierr = DMStagVecGetArrayRead(subDM,coord1d_local,arr[d]);CHKERRQ(ierr);
+    } else {
+      ierr = DMStagVecGetArray(subDM,coord1d_local,arr[d]);CHKERRQ(ierr);
+    }
   }
   PetscFunctionReturn(0);
 }
 
 /*@C
-  DMStagGet1dCoordinateLocationSlot - get slot for use with local 1D coordinate arrays
+  DMStagGetProductCoordinateArrays - extract local product coordinate arrays, one per dimension
+
+  Logically Collective
+
+  A high-level helper function to quickly extract local coordinate arrays.
+
+  Note that 2-dimensional arrays are returned. See
+  DMStagVecGetArray(), which is called internally to produce these arrays
+  representing coordinates on elements and vertices (element boundaries)
+  for a 1-dimensional DMStag in each coordinate direction.
 
-  High-level helper function to get slot ids for 1D coordinate DMs.
-  For use with DMStagGetIDCoordinateArraysDOFRead() and related functions.
+  One should use DMStagGetProductCoordinateSlot() to determine appropriate
+  indices for the second dimension in these returned arrays. This function
+  checks that the coordinate array is a suitable product of 1-dimensional
+  DMStag objects.
+
+  Input Parameter:
+. dm - the DMStag object
+
+  Output Parameters:
+. arrX,arrY,arrZ - local 1D coordinate arrays
+
+  Level: intermediate
+
+.seealso: DMSTAG, DMPRODUCT, DMStagGetProductCoordinateArraysRead(), DMStagSetUniformCoordinates(), DMStagSetUniformCoordinatesProduct(), DMStagGetProductCoordinateLocationSlot()
+@*/
+PetscErrorCode DMStagGetProductCoordinateArrays(DM dm,void* arrX,void* arrY,void* arrZ)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = DMStagGetProductCoordinateArrays_Private(dm,arrX,arrY,arrZ,PETSC_FALSE);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/*@C
+  DMStagGetProductCoordinateArraysRead - extract product coordinate arrays, read-only
+
+  Logically Collective
+
+  See the man page for DMStagGetProductCoordinateArrays() for more information.
+
+  Input Parameter:
+. dm - the DMStag object
+
+  Output Parameters:
+. arrX,arrY,arrZ - local 1D coordinate arrays
+
+  Level: intermediate
+
+.seealso: DMSTAG, DMPRODUCT, DMStagGetProductCoordinateArrays(), DMStagSetUniformCoordinates(), DMStagSetUniformCoordinatesProduct(), DMStagGetProductCoordinateLocationSlot()
+@*/
+PetscErrorCode DMStagGetProductCoordinateArraysRead(DM dm,void* arrX,void* arrY,void* arrZ)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  ierr = DMStagGetProductCoordinateArrays_Private(dm,arrX,arrY,arrZ,PETSC_TRUE);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/*@C
+  DMStagGetProductCoordinateLocationSlot - get slot for use with local product coordinate arrays
 
   Not Collective
 
+  High-level helper function to get slot indices for 1D coordinate DMs,
+  for use with DMStagGetProductCoordinateArrays() and related functions.
+
   Input Parameters:
 + dm - the DMStag object
 - loc - the grid location
@@ -121,9 +168,9 @@ PetscErrorCode DMStagGet1dCoordinateArraysDOFRead(DM dm,void* arrX,void* arrY,vo
 
   Level: intermediate
 
-.seealso: DMSTAG, DMPRODUCT, DMStagGet1dCoordinateArraysDOFRead(), DMStagSetUniformCoordinates()
+.seealso: DMSTAG, DMPRODUCT, DMStagGetProductCoordinateArrays(), DMStagGetProductCoordinateArraysRead(), DMStagSetUniformCoordinates()
 @*/
-PETSC_EXTERN PetscErrorCode DMStagGet1dCoordinateLocationSlot(DM dm,DMStagStencilLocation loc,PetscInt *slot)
+PETSC_EXTERN PetscErrorCode DMStagGetProductCoordinateLocationSlot(DM dm,DMStagStencilLocation loc,PetscInt *slot)
 {
   PetscErrorCode ierr;
   DM             dmCoord;
@@ -414,7 +461,7 @@ PetscErrorCode DMStagGetLocalSizes(DM dm,PetscInt* m,PetscInt* n,PetscInt* p)
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagGetGlobalSizes(), DMStagGetLocalSize(), DMStagSetNumRank(), DMDAGetInfo()
+.seealso: DMSTAG, DMStagGetGlobalSizes(), DMStagGetLocalSize(), DMStagSetNumRanks(), DMDAGetInfo()
 @*/
 PetscErrorCode DMStagGetNumRanks(DM dm,PetscInt *nRanks0,PetscInt *nRanks1,PetscInt *nRanks2)
 {
@@ -567,24 +614,11 @@ PetscErrorCode DMStagGetOwnershipRanges(DM dm,const PetscInt *lx[],const PetscIn
 PetscErrorCode DMStagCreateCompatibleDMStag(DM dm,PetscInt dof0,PetscInt dof1,PetscInt dof2,PetscInt dof3,DM *newdm)
 {
   PetscErrorCode  ierr;
-  const DM_Stag * const stag = (DM_Stag*)dm->data;
-  PetscInt        dim;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecificType(dm,DM_CLASSID,1,DMSTAG);
-  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
-  switch (dim) {
-    case 1:
-      ierr = DMStagCreate1d(PetscObjectComm((PetscObject)dm),stag->boundaryType[0],stag->N[0],dof0,dof1,stag->stencilType,stag->stencilWidth,NULL,newdm);CHKERRQ(ierr);
-      break;
-    case 2:
-      ierr = DMStagCreate2d(PetscObjectComm((PetscObject)dm),stag->boundaryType[0],stag->boundaryType[1],stag->N[0],stag->N[1],stag->nRanks[0],stag->nRanks[1],dof0,dof1,dof2,stag->stencilType,stag->stencilWidth,NULL,NULL,newdm);CHKERRQ(ierr);
-      break;
-    case 3:
-      ierr = DMStagCreate3d(PetscObjectComm((PetscObject)dm),stag->boundaryType[0],stag->boundaryType[1],stag->boundaryType[2],stag->N[0],stag->N[1],stag->N[2],stag->nRanks[0],stag->nRanks[1],stag->nRanks[2],dof0,dof1,dof2,dof3,stag->stencilType,stag->stencilWidth,NULL,NULL,NULL,newdm);CHKERRQ(ierr);
-      break;
-    default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"Unsupported dimension %D",dim);
-  }
+  ierr = DMStagDuplicateWithoutSetup(dm,PetscObjectComm((PetscObject)dm),newdm);CHKERRQ(ierr);
+  ierr = DMStagSetDOF(*newdm,dof0,dof1,dof2,dof3);CHKERRQ(ierr);
   ierr = DMSetUp(*newdm);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
@@ -603,13 +637,13 @@ PetscErrorCode DMStagCreateCompatibleDMStag(DM dm,PetscInt dof0,PetscInt dof1,Pe
 . slot - index to use
 
   Notes:
-  Provides an appropriate index to use with DMStagVecGetArrayDOF() and friends.
+  Provides an appropriate index to use with DMStagVecGetArray() and friends.
   This is required so that the user doesn't need to know about the ordering of
   dof associated with each local element.
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagVecGetArrayDOF(), DMStagVecGetArrayDOFRead(), DMStagGetDOF(), DMStagGetEntriesPerElement()
+.seealso: DMSTAG, DMStagVecGetArray(), DMStagVecGetArrayRead(), DMStagGetDOF(), DMStagGetEntriesPerElement()
 @*/
 PetscErrorCode DMStagGetLocationSlot(DM dm,DMStagStencilLocation loc,PetscInt c,PetscInt *slot)
 {
@@ -801,8 +835,36 @@ PetscErrorCode DMStagPopulateLocalToGlobalInjective(DM dm)
   PetscFunctionReturn(0);
 }
 
+static PetscErrorCode DMStagRestoreProductCoordinateArrays_Private(DM dm,void *arrX,void *arrY,void *arrZ,PetscBool read)
+{
+  PetscErrorCode  ierr;
+  PetscInt        dim,d;
+  void*           arr[DMSTAG_MAX_DIM];
+  DM              dmCoord;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
+  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
+  if (dim > DMSTAG_MAX_DIM) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not implemented for %D dimensions",dim);
+  arr[0] = arrX; arr[1] = arrY; arr[2] = arrZ;
+  ierr = DMGetCoordinateDM(dm,&dmCoord);CHKERRQ(ierr);
+  for (d=0; dglobal scatter to populate global coordinates from the local coordinates. Thus, it may be required to explicitly perform these operations in some situations, as in the following partial example:
+
+$   ierr = DMGetCoordinateDM(dm,&cdm);CHKERRQ(ierr);
+$   for (d=0; d<3; ++d) {
+$     DM  subdm;
+$     Vec coor,coor_local;
+
+$     ierr = DMProductGetDM(cdm,d,&subdm);CHKERRQ(ierr);
+$     ierr = DMGetCoordinates(subdm,&coor);CHKERRQ(ierr);
+$     ierr = DMGetCoordinatesLocal(subdm,&coor_local);CHKERRQ(ierr);
+$     ierr = DMLocalToGlobal(subdm,coor_local,INSERT_VALUES,coor);CHKERRQ(ierr);
+$     ierr = PetscPrintf(PETSC_COMM_WORLD,"Coordinates dim %D:\n",d);CHKERRQ(ierr);
+$     ierr = VecView(coor,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
+$   }
+
+.seealso: DMSTAG, DMStagGetProductCoordinateArrays(), DMStagGetProductCoordinateArraysRead()
 @*/
-PetscErrorCode DMStagRestore1dCoordinateArraysDOFRead(DM dm,void *arrX,void *arrY,void *arrZ)
+PetscErrorCode DMStagRestoreProductCoordinateArrays(DM dm,void *arrX,void *arrY,void *arrZ)
 {
   PetscErrorCode  ierr;
-  PetscInt        dim,d;
-  void*           arr[DMSTAG_MAX_DIM];
-  DM              dmCoord;
 
   PetscFunctionBegin;
-  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
-  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
-  if (dim > DMSTAG_MAX_DIM) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not implemented for %D dimensions",dim);
-  arr[0] = arrX; arr[1] = arrY; arr[2] = arrZ;
-  ierr = DMGetCoordinateDM(dm,&dmCoord);CHKERRQ(ierr);
-  for (d=0; d 1) PetscValidLogicalCollectiveEnum(dm,boundaryType1,3);
+  if (dim > 2) PetscValidLogicalCollectiveEnum(dm,boundaryType2,4);
   if (dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
-  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
   if (boundaryType0           ) stag->boundaryType[0] = boundaryType0;
   if (boundaryType1 && dim > 1) stag->boundaryType[1] = boundaryType1;
   if (boundaryType2 && dim > 2) stag->boundaryType[2] = boundaryType2;
@@ -969,12 +1057,12 @@ PetscErrorCode DMStagSetNumRanks(DM dm,PetscInt nRanks0,PetscInt nRanks1,PetscIn
   PetscValidLogicalCollectiveInt(dm,nRanks2,4);
   if (dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
   ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
-  if (nRanks0 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in X direction cannot be less than 1");
-  if (dim > 1 && nRanks1 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in Y direction cannot be less than 1");
-  if (dim > 2 && nRanks2 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in Z direction cannot be less than 1");
+  if (nRanks0 != PETSC_DECIDE && nRanks0 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in X direction cannot be less than 1");
+  if (dim > 1 && nRanks1 != PETSC_DECIDE && nRanks1 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in Y direction cannot be less than 1");
+  if (dim > 2 && nRanks2 != PETSC_DECIDE && nRanks2 < 1) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"number of ranks in Z direction cannot be less than 1");
   if (nRanks0) stag->nRanks[0] = nRanks0;
-  if (nRanks1) stag->nRanks[1] = nRanks1;
-  if (nRanks2) stag->nRanks[2] = nRanks2;
+  if (dim > 1 && nRanks1) stag->nRanks[1] = nRanks1;
+  if (dim > 2 && nRanks2) stag->nRanks[2] = nRanks2;
   PetscFunctionReturn(0);
 }
 
@@ -1085,13 +1173,14 @@ PetscErrorCode DMStagSetOwnershipRanges(DM dm,PetscInt const *lx,PetscInt const
   PetscErrorCode  ierr;
   DM_Stag * const stag = (DM_Stag*)dm->data;
   const PetscInt  *lin[3];
-  PetscInt        d;
+  PetscInt        d,dim;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecificType(dm,DM_CLASSID,1,DMSTAG);
   if (dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
   lin[0] = lx; lin[1] = ly; lin[2] = lz;
-  for (d=0; d<3; ++d) {
+  ierr = DMGetDimension(dm,&dim);CHKERRQ(ierr);
+  for (d=0; dnRanks[d] < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of ranks");
       if (!stag->l[d]) {
@@ -1267,10 +1356,28 @@ PetscErrorCode DMStagSetUniformCoordinatesProduct(DM dm,PetscReal xmin,PetscReal
 }
 
 /*@C
-  DMStagVecGetArrayDOF - get access to raw local array
+  DMStagVecGetArray - get access to local array
 
   Logically Collective
 
+  This function returns a (dim+1)-dimensional array for a dim-dimensional
+  DMStag.
+
+  The first 1-3 dimensions indicate an element in the global
+  numbering, using the standard C ordering.
+
+  The final dimension in this array corresponds to a degree
+  of freedom with respect to this element, for example corresponding to
+  the element or one of its neighboring faces, edges, or vertices.
+
+  For example, for a 3D DMStag, indexing is array[k][j][i][idx], where k is the
+  index in the z-direction, j is the index in the y-direction, and i is the
+  index in the x-direction.
+
+  "idx" is obtained with DMStagGetLocationSlot(), since the correct offset
+  into the (dim+1)-dimensional C array depends on the grid size and the number
+  of dof stored at each location.
+
   Input Parameters:
 + dm - the DMStag object
 - vec - the Vec object
@@ -1279,14 +1386,13 @@ PetscErrorCode DMStagSetUniformCoordinatesProduct(DM dm,PetscReal xmin,PetscReal
 . array - the array
 
   Notes:
-  Indexing is array[k][j][i][idx].
-  Obtain idx with DMStagGetLocationSlot().
+  DMStagVecRestoreArray() must be called, once finished with the array
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagVecGetArrayDOFRead(), DMStagGetLocationSlot(), DMGetLocalVector(), DMCreateLocalVector(), DMGetGlobalVector(), DMCreateGlobalVector(), DMDAVecGetArrayDOF()
+.seealso: DMSTAG, DMStagVecGetArrayRead(), DMStagGetLocationSlot(), DMGetLocalVector(), DMCreateLocalVector(), DMGetGlobalVector(), DMCreateGlobalVector(), DMDAVecGetArray(), DMDAVecGetArrayDOF()
 @*/
-PetscErrorCode DMStagVecGetArrayDOF(DM dm,Vec vec,void *array)
+PetscErrorCode DMStagVecGetArray(DM dm,Vec vec,void *array)
 {
   PetscErrorCode  ierr;
   DM_Stag * const stag = (DM_Stag*)dm->data;
@@ -1315,26 +1421,27 @@ PetscErrorCode DMStagVecGetArrayDOF(DM dm,Vec vec,void *array)
 }
 
 /*@C
-  DMStagVecGetArrayDOFRead - get read-only access to raw local array
+  DMStagVecGetArrayRead - get read-only access to a local array
 
   Logically Collective
 
+  See the man page for DMStagVecGetArray() for more information.
+
   Input Parameters:
 + dm - the DMStag object
 - vec - the Vec object
 
   Output Parameters:
-. array - read-only the array
+. array - the read-only array
 
   Notes:
-  Indexing is array[k][j][i][idx].
-  Obtain idx with DMStagGetLocationSlot()
+  DMStagVecRestoreArrayRead() must be called, once finished with the array
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagVecGetArrayDOFRead(), DMStagGetLocationSlot(), DMGetLocalVector(), DMCreateLocalVector(), DMGetGlobalVector(), DMCreateGlobalVector(), DMDAVecGetArrayDOFRead()
+.seealso: DMSTAG, DMStagVecGetArrayRead(), DMStagGetLocationSlot(), DMGetLocalVector(), DMCreateLocalVector(), DMGetGlobalVector(), DMCreateGlobalVector(), DMDAVecGetArrayRead(), DMDAVecGetArrayDOFRead()
 @*/
-PetscErrorCode DMStagVecGetArrayDOFRead(DM dm,Vec vec,void *array)
+PetscErrorCode DMStagVecGetArrayRead(DM dm,Vec vec,void *array)
 {
   PetscErrorCode  ierr;
   DM_Stag * const stag = (DM_Stag*)dm->data;
@@ -1363,7 +1470,7 @@ PetscErrorCode DMStagVecGetArrayDOFRead(DM dm,Vec vec,void *array)
 }
 
 /*@C
-  DMStagVecRestoreArrayDOF - restore read-only access to a raw array
+  DMStagVecRestoreArray - restore access to a raw array
 
   Logically Collective
 
@@ -1376,9 +1483,9 @@ PetscErrorCode DMStagVecGetArrayDOFRead(DM dm,Vec vec,void *array)
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagVecGetArrayDOF(), DMDAVecRestoreArrayDOFRead()
+.seealso: DMSTAG, DMStagVecGetArray(), DMDAVecRestoreArray(), DMDAVecRestoreArrayDOF()
 @*/
-PetscErrorCode DMStagVecRestoreArrayDOF(DM dm,Vec vec,void *array)
+PetscErrorCode DMStagVecRestoreArray(DM dm,Vec vec,void *array)
 {
   PetscErrorCode  ierr;
   DM_Stag * const stag = (DM_Stag*)dm->data;
@@ -1407,7 +1514,7 @@ PetscErrorCode DMStagVecRestoreArrayDOF(DM dm,Vec vec,void *array)
 }
 
 /*@C
-  DMStagVecRestoreArrayDOFRead - restore read-only access to a raw array
+  DMStagVecRestoreArrayRead - restore read-only access to a raw array
 
   Logically Collective
 
@@ -1420,9 +1527,9 @@ PetscErrorCode DMStagVecRestoreArrayDOF(DM dm,Vec vec,void *array)
 
   Level: beginner
 
-.seealso: DMSTAG, DMStagVecGetArrayDOFRead(), DMDAVecRestoreArrayDOFRead()
+.seealso: DMSTAG, DMStagVecGetArrayRead(), DMDAVecRestoreArrayRead(), DMDAVecRestoreArrayDOFRead()
 @*/
-PetscErrorCode DMStagVecRestoreArrayDOFRead(DM dm,Vec vec,void *array)
+PetscErrorCode DMStagVecRestoreArrayRead(DM dm,Vec vec,void *array)
 {
   PetscErrorCode  ierr;
   DM_Stag * const stag = (DM_Stag*)dm->data;
diff --git a/src/dm/impls/swarm/examples/tests/ex1.c b/src/dm/impls/swarm/examples/tests/ex1.c
index a175d16b6d4..96bf25f25b6 100644
--- a/src/dm/impls/swarm/examples/tests/ex1.c
+++ b/src/dm/impls/swarm/examples/tests/ex1.c
@@ -199,18 +199,22 @@ int main (int argc, char * argv[]) {
   test:
     suffix: proj_0
     requires: pragmatic
+    TODO: broken
     args: -dim 2 -nbrVerEdge 3 -dm_plex_separate_marker 0 -dm_view -sw_view -petscspace_degree 1 -petscfe_default_quadrature_order 1 -pc_type lu
   test:
     suffix: proj_1
     requires: pragmatic
+    TODO: broken
     args: -dim 2 -simplex 0 -nbrVerEdge 3 -dm_plex_separate_marker 0 -dm_view -sw_view -petscspace_degree 1 -petscfe_default_quadrature_order 1 -pc_type lu
   test:
     suffix: proj_2
     requires: pragmatic
+    TODO: broken
     args: -dim 3 -nbrVerEdge 3 -dm_view -sw_view -petscspace_degree 1 -petscfe_default_quadrature_order 1 -pc_type lu
   test:
     suffix: proj_3
     requires: pragmatic
+    TODO: broken
     args: -dim 2 -simplex 0 -nbrVerEdge 3 -dm_plex_separate_marker 0 -dm_view -sw_view -petscspace_degree 1 -petscfe_default_quadrature_order 1 -pc_type lu
 
 TEST*/
diff --git a/src/dm/impls/swarm/examples/tests/ex4.c b/src/dm/impls/swarm/examples/tests/ex4.c
index 5ebf52f160f..4ae90043ec5 100644
--- a/src/dm/impls/swarm/examples/tests/ex4.c
+++ b/src/dm/impls/swarm/examples/tests/ex4.c
@@ -1,189 +1,64 @@
-static char help[] = "Example of simple hamiltonian system with particles and a basic symplectic integrator\n";
+static char help[] = "Example of simple hamiltonian system (harmonic oscillator) with particles and a basic symplectic integrator\n";
 
 #include 
-#include 
+#include   /* For norm */
+#include  /* For CoordinatesRefToReal() */
 #include 
-#include 
-#include 
-#include 
 #include 
+
 typedef struct {
-  PetscInt       dim;                              /* The topological mesh dimension */
-  PetscInt       nts;                              /* print the energy at each nts time steps */
-  PetscBool      simplex;                          /* Flag for simplices or tensor cells */
-  PetscBool      monitor;                          /* Flag for use of the TS monitor */
-  char           meshFilename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */
-  PetscInt       faces;                            /* Number of faces per edge if unit square/cube generated */
-  PetscReal      domain_lo[3], domain_hi[3];       /* Lower left and upper right mesh corners */
-  PetscReal omega;                                 /* Oscillation value omega */
-  DMBoundaryType boundary[3];                      /* The domain boundary type, e.g. periodic */
-  PetscInt       particlesPerCell;                 /* The number of partices per cell */
-  PetscReal      particleRelDx;                    /* Relative particle position perturbation compared to average cell diameter h */
-  PetscReal      meshRelDx;                        /* Relative vertex position perturbation compared to average cell diameter h */
-  PetscInt       k;                                /* Mode number for test function */
-  PetscReal      momentTol;                        /* Tolerance for checking moment conservation */
-  PetscErrorCode (*func)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *);
+  PetscInt  dim;                          /* The topological mesh dimension */
+  PetscBool simplex;                      /* Flag for simplices or tensor cells */
+  char      filename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */
+  PetscReal omega;                        /* Oscillation frequency omega */
+  PetscInt  particlesPerCell;             /* The number of partices per cell */
+  PetscReal momentTol;                    /* Tolerance for checking moment conservation */
+  PetscBool monitor;                      /* Flag for using the TS monitor */
+  PetscBool error;                        /* Flag for printing the error */
+  PetscInt  ostep;                        /* print the energy at each ostep time steps */
 } AppCtx;
 
-/* const char *const ex2FunctionTypes[] = {"linear","x2_x4","sin","ex2FunctionTypes","EX2_FUNCTION_",0}; */
-static PetscErrorCode linear(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx  *ctx = (AppCtx *) a_ctx;
-  PetscInt d;
-
-  u[0] = 0.0;
-  for (d = 0; d < dim; ++d) u[0] += x[d]/(ctx->domain_hi[d] - ctx->domain_lo[d]);
-  return 0;
-}
-
-static PetscErrorCode x2_x4(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx  *ctx = (AppCtx *) a_ctx;
-  PetscInt d;
-
-  u[0] = 1;
-  for (d = 0; d < dim; ++d) u[0] *= PetscSqr(x[d])*PetscSqr(ctx->domain_hi[d]) - PetscPowRealInt(x[d], 4);
-  return 0;
-}
-
-static PetscErrorCode sinx(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx *ctx = (AppCtx *) a_ctx;
-
-  u[0] = sin(2*PETSC_PI*ctx->k*x[0]/(ctx->domain_hi[0] - ctx->domain_lo[0]));
-  return 0;
-}
-
-
-
 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 {
-  PetscInt       ii, bd;
-  char           fstring[PETSC_MAX_PATH_LEN] = "linear";
-  PetscBool      flag;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
   options->dim              = 2;
   options->simplex          = PETSC_TRUE;
   options->monitor          = PETSC_FALSE;
-  options->faces            = 1;
-  options->domain_lo[0]     = 0.0;
-  options->domain_lo[1]     = 0.0;
-  options->domain_lo[2]     = 0.0;
-  options->domain_hi[0]     = 1.0;
-  options->domain_hi[1]     = 1.0;
-  options->domain_hi[2]     = 1.0;
-  options->boundary[0]      = DM_BOUNDARY_NONE; /* PERIODIC (plotting does not work in parallel, moments not conserved) */
-  options->boundary[1]      = DM_BOUNDARY_NONE;
-  options->boundary[2]      = DM_BOUNDARY_NONE;
+  options->error            = PETSC_FALSE;
   options->particlesPerCell = 1;
-  options->k                = 1;
-  options->particleRelDx    = 1.e-20;
-  options->meshRelDx        = 1.e-20;
-  options->momentTol        = 100.*PETSC_MACHINE_EPSILON;
-  options->omega            = 64.;
-  options->nts              = 100;
-  
-  ierr = PetscOptionsBegin(comm, "", "L2 Projection Options", "DMPLEX");CHKERRQ(ierr);
-  
-  ierr = PetscStrcpy(options->meshFilename, "");CHKERRQ(ierr);
-
-  ierr = PetscOptionsInt("-next_output","time steps for next output point","<100>",options->nts,&options->nts,PETSC_NULL);CHKERRQ(ierr); 
-  ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex2.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
-  
-  ierr = PetscOptionsBool("-monitor", "To use the TS monitor or not", "ex4.c", options->monitor, &options->monitor, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex2.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
-  
-  ierr = PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex2.c", options->meshFilename, options->meshFilename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-faces", "Number of faces per edge if unit square/cube generated", "ex2.c", options->faces, &options->faces, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-k", "Mode number of test", "ex2.c", options->k, &options->k, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-particlesPerCell", "Number of particles per cell", "ex2.c", options->particlesPerCell, &options->particlesPerCell, NULL);CHKERRQ(ierr);
-
-  ierr = PetscOptionsReal("-omega","parameter","<64>",options->omega,&options->omega,PETSC_NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsReal("-particle_perturbation", "Relative perturbation of particles (0,1)", "ex2.c", options->particleRelDx, &options->particleRelDx, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsReal("-mesh_perturbation", "Relative perturbation of mesh points (0,1)", "ex2.c", options->meshRelDx, &options->meshRelDx, NULL);CHKERRQ(ierr);
-  ii = options->dim;
-  ierr = PetscOptionsRealArray("-domain_hi", "Domain size", "ex2.c", options->domain_hi, &ii, NULL);CHKERRQ(ierr);
-  ii = options->dim;
-  ierr = PetscOptionsRealArray("-domain_lo", "Domain size", "ex2.c", options->domain_lo, &ii, NULL);CHKERRQ(ierr);
-  bd = options->boundary[0];
-  ierr = PetscOptionsEList("-x_boundary", "The x-boundary", "ex2.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[0]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[0] = (DMBoundaryType) bd;
-  bd = options->boundary[1];
-  ierr = PetscOptionsEList("-y_boundary", "The y-boundary", "ex2.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[1]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[1] = (DMBoundaryType) bd;
-  bd = options->boundary[2];
-  ierr = PetscOptionsEList("-z_boundary", "The z-boundary", "ex2.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[2]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[2] = (DMBoundaryType) bd;
-  ierr = PetscOptionsString("-function", "Name of test function", "ex2.c", fstring, fstring, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
-  ierr = PetscStrcmp(fstring, "linear", &flag);CHKERRQ(ierr);
-  if (flag) {
-    options->func = linear;
-  } else {
-    ierr = PetscStrcmp(fstring, "sin", &flag);CHKERRQ(ierr);
-    if (flag) {
-      options->func = sinx;
-    } else {
-      ierr = PetscStrcmp(fstring, "x2_x4", &flag);CHKERRQ(ierr);
-      options->func = x2_x4;
-      if (!flag) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown function %s",fstring);
-    }
-  }
+  options->momentTol        = 100.0*PETSC_MACHINE_EPSILON;
+  options->omega            = 64.0;
+  options->ostep            = 100;
+
+  ierr = PetscStrcpy(options->filename, "");CHKERRQ(ierr);
+
+  ierr = PetscOptionsBegin(comm, "", "Harmonic Oscillator Options", "DMPLEX");CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-output_step", "Number of time steps between output", "ex4.c", options->ostep, &options->ostep, PETSC_NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex4.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-monitor", "Flag to use the TS monitor", "ex4.c", options->monitor, &options->monitor, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-error", "Flag to print the error", "ex4.c", options->error, &options->error, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex4.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex4.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-particles_per_cell", "Number of particles per cell", "ex4.c", options->particlesPerCell, &options->particlesPerCell, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsReal("-omega", "Oscillator frequency", "ex4.c", options->omega, &options->omega, PETSC_NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();CHKERRQ(ierr);
 
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PerturbVertices(DM dm, AppCtx *user)
-{
-  PetscRandom    rnd;
-  PetscReal      interval = user->meshRelDx;
-  Vec            coordinates;
-  PetscScalar   *coords;
-  PetscReal      hh[3];
-  PetscInt       d, cdim, N, p, bs;
-  PetscErrorCode ierr;
-
-  PetscFunctionBeginUser;
-  for (d = 0; d < user->dim; ++d) hh[d] = (user->domain_hi[d] - user->domain_lo[d])/user->faces;
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rnd);CHKERRQ(ierr);
-  ierr = PetscRandomSetInterval(rnd, -interval, interval);CHKERRQ(ierr);
-  ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
-  ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
-  ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(coordinates, &N);CHKERRQ(ierr);
-  ierr = VecGetBlockSize(coordinates, &bs);CHKERRQ(ierr);
-  if (bs != cdim) SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_SIZ, "Coordinate vector has wrong block size %D != %D", bs, cdim);
-  ierr = VecGetArray(coordinates, &coords);CHKERRQ(ierr);
-  for (p = 0; p < N; p += cdim) {
-    PetscScalar *coord = &coords[p], value;
-
-    for (d = 0; d < cdim; ++d) {
-      ierr = PetscRandomGetValue(rnd, &value);CHKERRQ(ierr);
-      coord[d] = PetscMax(user->domain_lo[d], PetscMin(user->domain_hi[d], coord[d] + value*hh[d]));
-    }
-  }
-  ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
-  PetscFunctionReturn(0);
-}
-
-
 static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
 {
   PetscBool      flg;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
-  ierr = PetscStrcmp(user->meshFilename, "", &flg);CHKERRQ(ierr);
+  ierr = PetscStrcmp(user->filename, "", &flg);CHKERRQ(ierr);
   if (flg) {
-    PetscInt faces[3];
-
-    faces[0] = user->faces; faces[1] = user->faces; faces[2] = user->faces;
-    ierr = DMPlexCreateBoxMesh(comm, user->dim, user->simplex, faces, user->domain_lo, user->domain_hi, user->boundary, PETSC_TRUE, dm);CHKERRQ(ierr);
+    ierr = DMPlexCreateBoxMesh(comm, user->dim, user->simplex, NULL, NULL, NULL, NULL, PETSC_TRUE, dm);CHKERRQ(ierr);
   } else {
-    ierr = DMPlexCreateFromFile(comm, user->meshFilename, PETSC_TRUE, dm);CHKERRQ(ierr);
+    ierr = DMPlexCreateFromFile(comm, user->filename, PETSC_TRUE, dm);CHKERRQ(ierr);
     ierr = DMGetDimension(*dm, &user->dim);CHKERRQ(ierr);
   }
   {
@@ -197,276 +72,341 @@ static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
   }
   ierr = DMLocalizeCoordinates(*dm);CHKERRQ(ierr); /* needed for periodic */
   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
-  ierr = PerturbVertices(*dm, user);CHKERRQ(ierr);
   ierr = PetscObjectSetName((PetscObject) *dm, "Mesh");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user)
+static PetscErrorCode SetInitialCoordinates(DM dmSw)
 {
-  PetscRandom    rnd, rndp;
-  PetscReal      interval = user->particleRelDx;
-  PetscScalar    value, *vals;
-  PetscReal     *centroid, *coords, *xi0, *v0, *J, *invJ, detJ, *initialConditions;
-  PetscInt      *cellid;
-  PetscInt       Ncell, Np = user->particlesPerCell, p, c, dim, d;
+  DM             dm;
+  AppCtx        *user;
+  PetscRandom    rnd;
+  PetscBool      simplex;
+  PetscReal     *centroid, *coords, *xi0, *v0, *J, *invJ, detJ;
+  PetscInt       dim, d, cStart, cEnd, c, Np, p;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
-  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = DMCreate(PetscObjectComm((PetscObject) dm), sw);CHKERRQ(ierr);
-  ierr = DMSetType(*sw, DMSWARM);CHKERRQ(ierr);
-  ierr = DMSetDimension(*sw, dim);CHKERRQ(ierr);
-
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rnd);CHKERRQ(ierr);
+  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dmSw), &rnd);CHKERRQ(ierr);
   ierr = PetscRandomSetInterval(rnd, -1.0, 1.0);CHKERRQ(ierr);
   ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rndp);CHKERRQ(ierr);
-  ierr = PetscRandomSetInterval(rndp, -interval, interval);CHKERRQ(ierr);
-  ierr = PetscRandomSetFromOptions(rndp);CHKERRQ(ierr);
-
-  ierr = DMSwarmSetType(*sw, DMSWARM_PIC);CHKERRQ(ierr);
-  ierr = DMSwarmSetCellDM(*sw, dm);CHKERRQ(ierr);
-  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "w_q", 1, PETSC_SCALAR);CHKERRQ(ierr);
-  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", 2, PETSC_REAL);CHKERRQ(ierr);
-  ierr = DMSwarmFinalizeFieldRegister(*sw);CHKERRQ(ierr);
-  ierr = DMPlexGetHeightStratum(dm, 0, NULL, &Ncell);CHKERRQ(ierr);
-  ierr = DMSwarmSetLocalSizes(*sw, Ncell * Np, 0);CHKERRQ(ierr);
-  ierr = DMSetFromOptions(*sw);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, "w_q", NULL, NULL, (void **) &vals);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, "kinematics", NULL, NULL, (void **) &initialConditions);CHKERRQ(ierr);
 
+  ierr = DMGetApplicationContext(dmSw, (void **) &user);CHKERRQ(ierr);
+  simplex = user->simplex;
+  Np   = user->particlesPerCell;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = PetscMalloc5(dim, ¢roid, dim, &xi0, dim, &v0, dim*dim, &J, dim*dim, &invJ);CHKERRQ(ierr);
-  for (c = 0; c < Ncell; ++c) {
+  for (d = 0; d < dim; ++d) xi0[d] = -1.0;
+  ierr = DMSwarmGetField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
     if (Np == 1) {
       ierr = DMPlexComputeCellGeometryFVM(dm, c, NULL, centroid, NULL);CHKERRQ(ierr);
-      cellid[c] = c;
       for (d = 0; d < dim; ++d) coords[c*dim+d] = centroid[d];
     } else {
       ierr = DMPlexComputeCellGeometryFEM(dm, c, NULL, v0, J, invJ, &detJ);CHKERRQ(ierr); /* affine */
-      for (d = 0; d < dim; ++d) xi0[d] = -1.0;
       for (p = 0; p < Np; ++p) {
         const PetscInt n   = c*Np + p;
         PetscReal      sum = 0.0, refcoords[3];
 
-        cellid[n] = c;
-        for (d = 0; d < dim; ++d) {ierr = PetscRandomGetValue(rnd, &value);CHKERRQ(ierr); refcoords[d] = PetscRealPart(value); sum += refcoords[d];}
-        if (user->simplex && sum > 0.0) for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim)*sum;
+        for (d = 0; d < dim; ++d) {
+          ierr = PetscRandomGetValueReal(rnd, &refcoords[d]);CHKERRQ(ierr);
+          sum += refcoords[d];
+        }
+        if (simplex && sum > 0.0) for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim)*sum;
         CoordinatesRefToReal(dim, dim, xi0, v0, J, refcoords, &coords[n*dim]);
       }
     }
   }
+  ierr = DMSwarmRestoreField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
   ierr = PetscFree5(centroid, xi0, v0, J, invJ);CHKERRQ(ierr);
-  for (c = 0; c < Ncell; ++c) {
+  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode SetInitialConditions(DM dmSw, Vec u)
+{
+  DM             dm;
+  AppCtx        *user;
+  PetscReal     *coords;
+  PetscScalar   *initialConditions;
+  PetscInt       dim, cStart, cEnd, c, Np, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetApplicationContext(dmSw, (void **) &user);CHKERRQ(ierr);
+  Np   = user->particlesPerCell;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMSwarmGetField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  ierr = VecGetArray(u, &initialConditions);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
     for (p = 0; p < Np; ++p) {
       const PetscInt n = c*Np + p;
-      
-      for (d = 0; d < dim; ++d) {ierr = PetscRandomGetValue(rndp, &value);CHKERRQ(ierr); coords[n*dim+d] += PetscRealPart(value);}
-      user->func(dim, 0.0, &coords[n*dim], 1, &vals[c], user);
+
+      initialConditions[n*2+0] = DMPlex_NormD_Internal(dim, &coords[n*dim]);
+      initialConditions[n*2+1] = 0.0;
     }
   }
-  for (p = 0; p < Np*Ncell; ++p) {
-    initialConditions[p*2+0] = p+0.2; 
-    initialConditions[p*2+1] = 0.0;
+  ierr = VecRestoreArray(u, &initialConditions);CHKERRQ(ierr);
+  ierr = DMSwarmRestoreField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user)
+{
+  PetscInt      *cellid;
+  PetscInt       dim, cStart, cEnd, c, Np = user->particlesPerCell, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMCreate(PetscObjectComm((PetscObject) dm), sw);CHKERRQ(ierr);
+  ierr = DMSetType(*sw, DMSWARM);CHKERRQ(ierr);
+  ierr = DMSetDimension(*sw, dim);CHKERRQ(ierr);
+
+  ierr = DMSwarmSetType(*sw, DMSWARM_PIC);CHKERRQ(ierr);
+  ierr = DMSwarmSetCellDM(*sw, dm);CHKERRQ(ierr);
+  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", 2, PETSC_REAL);CHKERRQ(ierr);
+  ierr = DMSwarmFinalizeFieldRegister(*sw);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMSwarmSetLocalSizes(*sw, (cEnd - cStart) * Np, 0);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(*sw);CHKERRQ(ierr);
+  ierr = DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
+    for (p = 0; p < Np; ++p) {
+      const PetscInt n = c*Np + p;
+
+      cellid[n] = c;
+    }
   }
-  ierr = DMSwarmRestoreField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
   ierr = DMSwarmRestoreField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
-  ierr = DMSwarmRestoreField(*sw, "w_q", NULL, NULL, (void **) &vals);CHKERRQ(ierr);
-  ierr = DMSwarmRestoreField(*sw, "kinematics", NULL, NULL, (void **) &initialConditions);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rndp);CHKERRQ(ierr);
   ierr = PetscObjectSetName((PetscObject) *sw, "Particles");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*sw, NULL, "-sw_view");CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 /* Create particle RHS Functions */
-static PetscErrorCode RHSFunction1(TS ts,PetscReal t,Vec V,Vec Xres,void *ctx)
+static PetscErrorCode RHSFunction1(TS ts, PetscReal t, Vec V, Vec Xres, void *ctx)
 {
   const PetscScalar *v;
   PetscScalar       *xres;
-  PetscInt          Np, p;
-  PetscErrorCode    ierr;
+  PetscInt           Np, p;
+  PetscErrorCode     ierr;
 
   PetscFunctionBeginUser;
-  ierr = VecGetArray(Xres,&xres);CHKERRQ(ierr);
-  ierr = VecGetArrayRead(V,&v);CHKERRQ(ierr);
+  ierr = VecGetArray(Xres, &xres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(V, &v);CHKERRQ(ierr);
   ierr = VecGetLocalSize(Xres, &Np);CHKERRQ(ierr);
-
   for (p = 0; p < Np; ++p) {
      xres[p] = v[p];
   }
-
-  ierr = VecRestoreArrayRead(V,&v);CHKERRQ(ierr);
-  ierr = VecRestoreArray(Xres,&xres);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(V, &v);CHKERRQ(ierr);
+  ierr = VecRestoreArray(Xres, &xres);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-
-static PetscErrorCode RHSFunction2(TS ts,PetscReal t,Vec X,Vec Vres,void *ctx)
+static PetscErrorCode RHSFunction2(TS ts, PetscReal t, Vec X, Vec Vres, void *ctx)
 {
-  AppCtx*           user = (AppCtx*)ctx;
+  AppCtx            *user = (AppCtx *)ctx;
   const PetscScalar *x;
-  PetscInt          Np, p;
   PetscScalar       *vres;
-  PetscErrorCode    ierr; 
+  PetscInt           Np, p;
+  PetscErrorCode     ierr;
 
   PetscFunctionBeginUser;
-  ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
-  ierr = VecGetArray(Vres,&vres);CHKERRQ(ierr);
+  ierr = VecGetArray(Vres, &vres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(X, &x);CHKERRQ(ierr);
   ierr = VecGetLocalSize(Vres, &Np);CHKERRQ(ierr);
-
-  for(p = 0; p < Np; ++p){
-    vres[p] = -user->omega*user->omega*x[p];
+  for (p = 0; p < Np; ++p) {
+    vres[p] = -PetscSqr(user->omega)*x[p];
   }
-
-  ierr = VecRestoreArray(Vres,&vres);CHKERRQ(ierr);
-  ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(X, &x);CHKERRQ(ierr);
+  ierr = VecRestoreArray(Vres, &vres);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode RHSFunctionParticles(TS ts,PetscReal t,Vec U,Vec R,void *ctx)
+static PetscErrorCode RHSFunctionParticles(TS ts, PetscReal t, Vec U, Vec R, void *ctx)
 {
-  DM                dm;
-  AppCtx*           user = (AppCtx*)ctx;
+  AppCtx            *user = (AppCtx *) ctx;
+  DM                 dm;
   const PetscScalar *u;
   PetscScalar       *r;
-  PetscInt          Np, p;
-  PetscErrorCode    ierr;
+  PetscInt           Np, p;
+  PetscErrorCode     ierr;
 
   PetscFunctionBeginUser;
-  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
-  ierr = VecGetArrayRead(U,&u);CHKERRQ(ierr);
-  ierr = VecGetArray(R,&r);CHKERRQ(ierr);
   ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = VecGetArray(R, &r);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
   Np  /= 2;
-
-  for(p = 0; p < Np; ++p){
+  for (p = 0; p < Np; ++p) {
     r[p*2+0] = u[p*2+1];
-    r[p*2+1] = -user->omega*user->omega*u[p*2+0];
+    r[p*2+1] = -PetscSqr(user->omega)*u[p*2+0];
   }
-  
-  ierr = VecRestoreArrayRead(U,&u);CHKERRQ(ierr);
-  ierr = VecRestoreArray(R,&r);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(R, &r);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal t,Vec U,void *ctx)
+static PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal t, Vec U, void *ctx)
 {
-  PetscErrorCode    ierr;
+  AppCtx            *user  = (AppCtx *) ctx;
+  const PetscReal    omega = user->omega;
   const PetscScalar *u;
-  PetscReal         dt;
-  PetscInt          p, Np;
-  AppCtx*           user = (AppCtx*)ctx;
+  MPI_Comm           comm;
+  PetscReal          dt;
+  PetscInt           Np, p;
+  PetscErrorCode     ierr;
 
   PetscFunctionBeginUser;
-  if (step%user->nts == 0) {
-    ierr = TSGetTimeStep(ts,&dt);CHKERRQ(ierr);
-    ierr = VecGetArrayRead(U,&u);CHKERRQ(ierr);
+  if (step%user->ostep == 0) {
+    ierr = PetscObjectGetComm((PetscObject) ts, &comm);CHKERRQ(ierr);
+    if (!step) {ierr = PetscPrintf(comm, "Time     Step Part     Energy Mod Energy\n");CHKERRQ(ierr);}
+    ierr = TSGetTimeStep(ts, &dt);CHKERRQ(ierr);
+    ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
     ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
     Np /= 2;
+    for (p = 0; p < Np; ++p) {
+      const PetscReal x  = PetscRealPart(u[p*2+0]);
+      const PetscReal v  = PetscRealPart(u[p*2+1]);
+      const PetscReal E  = 0.5*(v*v + PetscSqr(omega)*x*x);
+      const PetscReal mE = 0.5*(v*v + PetscSqr(omega)*x*x - PetscSqr(omega)*dt*x*v);
 
-    for(p = 0; p < Np; ++p){
-      ierr = PetscPrintf(PETSC_COMM_WORLD,"Energy: %g\n",(double) (u[p*2+1]*u[p*2+1]+user->omega*user->omega*u[p*2]*u[p*2])/2.);CHKERRQ(ierr);
-      ierr = PetscPrintf(PETSC_COMM_WORLD,"Modified energy: %g\n", (double) (u[p*2+1]*u[p*2+1]+user->omega*user->omega*u[p*2]*u[p*2]-user->omega*user->omega*dt*u[p*2]*u[p*2+1])/2.);CHKERRQ(ierr);
+      ierr = PetscPrintf(comm, "%.6lf %4D %4D %10.4lf %10.4lf\n", t, step, p, (double) E, (double) mE);CHKERRQ(ierr);
     }
+    ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
 
-    ierr = VecRestoreArrayRead(U,&u);CHKERRQ(ierr);
+static PetscErrorCode InitializeSolve(TS ts, Vec u)
+{
+  DM             dm;
+  AppCtx        *user;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(dm, (void **) &user);CHKERRQ(ierr);
+  ierr = SetInitialCoordinates(dm);CHKERRQ(ierr);
+  ierr = SetInitialConditions(dm, u);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode ComputeError(TS ts, Vec U, Vec E)
+{
+  MPI_Comm           comm;
+  DM                 sdm;
+  AppCtx            *user;
+  const PetscScalar *u, *coords;
+  PetscScalar       *e;
+  PetscReal          t, omega;
+  PetscInt           dim, Np, p;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscObjectGetComm((PetscObject) ts, &comm);CHKERRQ(ierr);
+  ierr = TSGetDM(ts, &sdm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(sdm, (void **) &user);CHKERRQ(ierr);
+  omega = user->omega;
+  ierr = DMGetDimension(sdm, &dim);CHKERRQ(ierr);
+  ierr = TSGetSolveTime(ts, &t);CHKERRQ(ierr);
+  ierr = VecGetArray(E, &e);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
+  Np  /= 2;
+  ierr = DMSwarmGetField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscReal x  = PetscRealPart(u[p*2+0]);
+    const PetscReal v  = PetscRealPart(u[p*2+1]);
+    const PetscReal x0 = DMPlex_NormD_Internal(dim, &coords[p*dim]);
+    const PetscReal ex =  x0*PetscCosReal(omega*t);
+    const PetscReal ev = -x0*omega*PetscSinReal(omega*t);
+
+    if (user->error) {ierr = PetscPrintf(comm, "p%D error [%.2g %.2g] sol [%.6lf %.6lf] exact [%.6lf %.6lf] energy/exact energy %g / %g\n", p, (double) PetscAbsReal(x-ex), (double) PetscAbsReal(v-ev), (double) x, (double) v, (double) ex, (double) ev, 0.5*(v*v + PetscSqr(omega)*x*x), (double) 0.5*PetscSqr(omega*x0));}
+    e[p*2+0] = x - ex;
+    e[p*2+1] = v - ev;
   }
+  ierr = DMSwarmRestoreField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(E, &e);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 int main(int argc,char **argv)
 {
-  TS              ts;            /* nonlinear solver */
-  IS              is1,is2;
-  PetscReal       ftime   = 0.1;
-  PetscInt        locsize, p, Np;
-  Vec             f;              /* swarm vector */
-  MPI_Comm        comm;
-  DM              dm, sw;
-  const PetscReal *fkin;
-  AppCtx          user;
-
-  PetscErrorCode  ierr;  
-
-  
-  ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
-  comm = PETSC_COMM_WORLD;
+  TS             ts;     /* nonlinear solver */
+  DM             dm, sw; /* Mesh and particle managers */
+  Vec            u;      /* swarm vector */
+  IS             is1, is2;
+  PetscInt       n;
+  MPI_Comm       comm;
+  AppCtx         user;
+  PetscErrorCode ierr;
 
+  ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
+  comm = PETSC_COMM_WORLD;
   ierr = ProcessOptions(comm, &user);CHKERRQ(ierr);
-  
-  /* Create dm and particles */
+
   ierr = CreateMesh(comm, &dm, &user);CHKERRQ(ierr);
   ierr = CreateParticles(dm, &sw, &user);CHKERRQ(ierr);
-  ierr = DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &f);CHKERRQ(ierr);
-  
-  ierr = VecGetLocalSize(f, &locsize);CHKERRQ(ierr);
-
-  ierr = ISCreateStride(comm, locsize/2, 0, 2,&is1);CHKERRQ(ierr);
-  ierr = ISCreateStride(comm, locsize/2, 1, 2,&is2);CHKERRQ(ierr);
-  
-  ierr = TSCreate(comm,&ts);CHKERRQ(ierr);
-  ierr = TSSetType(ts,TSBASICSYMPLECTIC);CHKERRQ(ierr);
-  ierr = TSSetDM(ts, dm);CHKERRQ(ierr);
-  ierr = TSRHSSplitSetIS(ts,"position",is1);CHKERRQ(ierr);
-  ierr = TSRHSSplitSetIS(ts,"momentum",is2);CHKERRQ(ierr);
-  
-  ierr = TSRHSSplitSetRHSFunction(ts,"position",NULL,RHSFunction1,&user);CHKERRQ(ierr);
-  ierr = TSRHSSplitSetRHSFunction(ts,"momentum",NULL,RHSFunction2,&user);CHKERRQ(ierr);
-  
-  ierr = TSSetRHSFunction(ts,NULL,RHSFunctionParticles,&user);CHKERRQ(ierr);
-
-  ierr = TSSetMaxTime(ts,ftime);CHKERRQ(ierr);
-  ierr = TSSetTimeStep(ts,0.00001);CHKERRQ(ierr);
-  ierr = TSSetMaxSteps(ts,100);CHKERRQ(ierr);
-  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
-  
-  if (user.monitor) {
-    ierr = TSMonitorSet(ts,Monitor,&user,NULL);CHKERRQ(ierr);
-  }
-  
-  ierr = TSSetTime(ts,0.0);CHKERRQ(ierr);
-  ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
-
-  ierr = TSSolve(ts,f);CHKERRQ(ierr);
-  ierr = TSGetSolveTime(ts,&ftime);CHKERRQ(ierr);
-  
-  ierr = VecGetLocalSize(f, &Np);CHKERRQ(ierr);
-  Np /= 2;
+  ierr = DMSetApplicationContext(sw, &user);CHKERRQ(ierr);
+
+  ierr = TSCreate(comm, &ts);CHKERRQ(ierr);
+  ierr = TSSetType(ts, TSBASICSYMPLECTIC);CHKERRQ(ierr);
+  ierr = TSSetDM(ts, sw);CHKERRQ(ierr);
+  ierr = TSSetMaxTime(ts, 0.1);CHKERRQ(ierr);
+  ierr = TSSetTimeStep(ts, 0.00001);CHKERRQ(ierr);
+  ierr = TSSetMaxSteps(ts, 100);CHKERRQ(ierr);
+  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
+  if (user.monitor) {ierr = TSMonitorSet(ts, Monitor, &user, NULL);CHKERRQ(ierr);}
+  ierr = TSSetRHSFunction(ts, NULL, RHSFunctionParticles, &user);CHKERRQ(ierr);
+
+  ierr = DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(u, &n);CHKERRQ(ierr);
+  ierr = ISCreateStride(comm, n/2, 0, 2, &is1);CHKERRQ(ierr);
+  ierr = ISCreateStride(comm, n/2, 1, 2, &is2);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetIS(ts, "position", is1);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetIS(ts, "momentum", is2);CHKERRQ(ierr);
+  ierr = ISDestroy(&is1);CHKERRQ(ierr);
+  ierr = ISDestroy(&is2);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetRHSFunction(ts, "position", NULL, RHSFunction1, &user);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetRHSFunction(ts, "momentum", NULL, RHSFunction2, &user);CHKERRQ(ierr);
 
-  ierr = VecGetArrayRead(f,&fkin);CHKERRQ(ierr);
-  for (p = 0; p < Np; ++p) {
+  ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
+  ierr = TSSetComputeInitialCondition(ts, InitializeSolve);CHKERRQ(ierr);
+  ierr = TSSetComputeExactError(ts, ComputeError);CHKERRQ(ierr);
+  ierr = TSComputeInitialCondition(ts, u);CHKERRQ(ierr);
+  ierr = TSSolve(ts, u);CHKERRQ(ierr);
+  ierr = DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
 
-    ierr = PetscPrintf(comm,"The particle solution for (xp, vp) at time %.6lf is [%g %g]\n",(double)ftime,fkin[p*2],fkin[p*2+1]);CHKERRQ(ierr);
-    ierr = PetscPrintf(comm,"The exact solution for (xp, vp) at time %.6lf is [%g %g]\n",(double)ftime,(double) (p+0.2)*PetscCosReal(user.omega*ftime),(double) -(p+0.2)*user.omega*PetscSinReal(user.omega*ftime));CHKERRQ(ierr);
-    ierr = PetscPrintf(comm,"Exact Particle Energy: %g\n", (double) .5*64*64*(p+0.2)*(p+0.2));
-  
-  }
-  ierr = VecRestoreArrayRead(f,&fkin);CHKERRQ(ierr);
-  
-  ierr = DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &f);CHKERRQ(ierr);
   ierr = TSDestroy(&ts);CHKERRQ(ierr);
-  ierr = ISDestroy(&is1);CHKERRQ(ierr);
-  ierr = ISDestroy(&is2);CHKERRQ(ierr);
-  ierr = DMDestroy(&dm);CHKERRQ(ierr);
   ierr = DMDestroy(&sw);CHKERRQ(ierr);
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
   ierr = PetscFinalize();
   return ierr;
 }
 
-
 /*TEST
 
    build:
      requires: triangle !single !complex
    test:
-     args: -dim 2 -faces 1 -particlesPerCell 1 -dm_view -sw_view -ts_basicsymplectic_type 1 -monitor
+     suffix: 1
+     args: -dm_plex_box_faces 1,1 -ts_basicsymplectic_type 1 -ts_convergence_estimate -convest_num_refine 2 -dm_view -sw_view -monitor -output_step 50 -error
    test:
      suffix: 2
-     args: -dim 2 -faces 1 -particlesPerCell 1 -dm_view -sw_view -ts_basicsymplectic_type 2 -monitor
+     args: -dm_plex_box_faces 1,1 -ts_basicsymplectic_type 2 -ts_convergence_estimate -convest_num_refine 2 -dm_view -sw_view -monitor -output_step 50 -error
+   test:
+     suffix: 3
+     args: -dm_plex_box_faces 1,1 -ts_basicsymplectic_type 3 -ts_convergence_estimate -convest_num_refine 2 -ts_dt 0.0001 -dm_view -sw_view -monitor -output_step 50 -error
+   test:
+     suffix: 4
+     args: -dm_plex_box_faces 1,1 -ts_basicsymplectic_type 4 -ts_convergence_estimate -convest_num_refine 2 -ts_dt 0.0001 -dm_view -sw_view -monitor -output_step 50 -error
 
 TEST*/
diff --git a/src/dm/impls/swarm/examples/tests/ex5.c b/src/dm/impls/swarm/examples/tests/ex5.c
index 7156232b2dd..b2ee9daf0d7 100644
--- a/src/dm/impls/swarm/examples/tests/ex5.c
+++ b/src/dm/impls/swarm/examples/tests/ex5.c
@@ -1,189 +1,61 @@
 static char help[] = "Vlasov example of particles orbiting around a central massive point.\n";
 
 #include 
-#include 
+#include   /* For norm */
+#include  /* For CoordinatesRefToReal() */
 #include 
-#include 
-#include 
-#include 
 #include 
-#include 
+
 typedef struct {
-  PetscInt       dim;                              /* The topological mesh dimension */
-  PetscInt       nts;                              /* print the energy at each nts time steps */
-  PetscBool      simplex;                          /* Flag for simplices or tensor cells */
-  PetscBool      monitor;                          /* Flag for use of the TS monitor */
-  char           meshFilename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */
-  PetscInt       faces;                            /* Number of faces per edge if unit square/cube generated */
-  PetscReal      domain_lo[3], domain_hi[3];       /* Lower left and upper right mesh corners */
-  PetscReal omega;                                 /* Oscillation value omega */
-  DMBoundaryType boundary[3];                      /* The domain boundary type, e.g. periodic */
-  PetscInt       particlesPerCell;                 /* The number of partices per cell */
-  PetscReal      particleRelDx;                    /* Relative particle position perturbation compared to average cell diameter h */
-  PetscReal      meshRelDx;                        /* Relative vertex position perturbation compared to average cell diameter h */
-  PetscInt       k;                                /* Mode number for test function */
-  PetscReal      momentTol;                        /* Tolerance for checking moment conservation */
-  PetscErrorCode (*func)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *);
+  PetscInt  dim;                          /* The topological mesh dimension */
+  PetscBool simplex;                      /* Flag for simplices or tensor cells */
+  char      filename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */
+  PetscInt  particlesPerCell;             /* The number of partices per cell */
+  PetscReal momentTol;                    /* Tolerance for checking moment conservation */
+  PetscBool monitor;                      /* Flag for using the TS monitor */
+  PetscBool error;                        /* Flag for printing the error */
+  PetscInt  ostep;                        /* print the energy at each ostep time steps */
 } AppCtx;
 
-static PetscErrorCode linear(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx  *ctx = (AppCtx *) a_ctx;
-  PetscInt d;
-
-  u[0] = 0.0;
-  for (d = 0; d < dim; ++d) u[0] += x[d]/(ctx->domain_hi[d] - ctx->domain_lo[d]);
-  return 0;
-}
-
-static PetscErrorCode x2_x4(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx  *ctx = (AppCtx *) a_ctx;
-  PetscInt d;
-
-  u[0] = 1;
-  for (d = 0; d < dim; ++d) u[0] *= PetscSqr(x[d])*PetscSqr(ctx->domain_hi[d]) - PetscPowRealInt(x[d], 4);
-  return 0;
-}
-
-static PetscErrorCode sinx(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *a_ctx)
-{
-  AppCtx *ctx = (AppCtx *) a_ctx;
-
-  u[0] = sin(2*PETSC_PI*ctx->k*x[0]/(ctx->domain_hi[0] - ctx->domain_lo[0]));
-  return 0;
-}
-
-
-
 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 {
-  PetscInt       ii, bd;
-  char           fstring[PETSC_MAX_PATH_LEN] = "linear";
-  PetscBool      flag;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
   options->dim              = 2;
   options->simplex          = PETSC_TRUE;
-  options->monitor          = PETSC_TRUE;
-  options->faces            = 1;
-  options->domain_lo[0]     = 0.0;
-  options->domain_lo[1]     = 0.0;
-  options->domain_lo[2]     = 0.0;
-  options->domain_hi[0]     = 1.0;
-  options->domain_hi[1]     = 1.0;
-  options->domain_hi[2]     = 1.0;
-  options->boundary[0]      = DM_BOUNDARY_NONE; /* PERIODIC (plotting does not work in parallel, moments not conserved) */
-  options->boundary[1]      = DM_BOUNDARY_NONE;
-  options->boundary[2]      = DM_BOUNDARY_NONE;
+  options->monitor          = PETSC_FALSE;
+  options->error            = PETSC_FALSE;
   options->particlesPerCell = 1;
-  options->k                = 1;
-  options->particleRelDx    = 1.e-20;
-  options->meshRelDx        = 1.e-20;
-  options->momentTol        = 100.*PETSC_MACHINE_EPSILON;
-  options->omega            = 64.;
-  options->nts              = 100;
-  
-  ierr = PetscOptionsBegin(comm, "", "L2 Projection Options", "DMPLEX");CHKERRQ(ierr);
-  
-  ierr = PetscStrcpy(options->meshFilename, "");CHKERRQ(ierr);
-
-  ierr = PetscOptionsInt("-next_output","time steps for next output point","<100>",options->nts,&options->nts,PETSC_NULL);CHKERRQ(ierr); 
-  ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex5.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
-  
-  ierr = PetscOptionsBool("-monitor", "To use the TS monitor or not", "ex5.c", options->monitor, &options->monitor, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex5.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
-  
-  ierr = PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex5.c", options->meshFilename, options->meshFilename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-faces", "Number of faces per edge if unit square/cube generated", "ex5.c", options->faces, &options->faces, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-k", "Mode number of test", "ex5.c", options->k, &options->k, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsInt("-particlesPerCell", "Number of particles per cell", "ex5.c", options->particlesPerCell, &options->particlesPerCell, NULL);CHKERRQ(ierr);
-
-  ierr = PetscOptionsReal("-omega","parameter","<64>",options->omega,&options->omega,PETSC_NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsReal("-particle_perturbation", "Relative perturbation of particles (0,1)", "ex5.c", options->particleRelDx, &options->particleRelDx, NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsReal("-mesh_perturbation", "Relative perturbation of mesh points (0,1)", "ex5.c", options->meshRelDx, &options->meshRelDx, NULL);CHKERRQ(ierr);
-  ii = options->dim;
-  ierr = PetscOptionsRealArray("-domain_hi", "Domain size", "ex5.c", options->domain_hi, &ii, NULL);CHKERRQ(ierr);
-  ii = options->dim;
-  ierr = PetscOptionsRealArray("-domain_lo", "Domain size", "ex5.c", options->domain_lo, &ii, NULL);CHKERRQ(ierr);
-  bd = options->boundary[0];
-  ierr = PetscOptionsEList("-x_boundary", "The x-boundary", "ex5.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[0]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[0] = (DMBoundaryType) bd;
-  bd = options->boundary[1];
-  ierr = PetscOptionsEList("-y_boundary", "The y-boundary", "ex5.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[1]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[1] = (DMBoundaryType) bd;
-  bd = options->boundary[2];
-  ierr = PetscOptionsEList("-z_boundary", "The z-boundary", "ex5.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->boundary[2]], &bd, NULL);CHKERRQ(ierr);
-  options->boundary[2] = (DMBoundaryType) bd;
-  ierr = PetscOptionsString("-function", "Name of test function", "ex5.c", fstring, fstring, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
-  ierr = PetscStrcmp(fstring, "linear", &flag);CHKERRQ(ierr);
-  if (flag) {
-    options->func = linear;
-  } else {
-    ierr = PetscStrcmp(fstring, "sin", &flag);CHKERRQ(ierr);
-    if (flag) {
-      options->func = sinx;
-    } else {
-      ierr = PetscStrcmp(fstring, "x2_x4", &flag);CHKERRQ(ierr);
-      options->func = x2_x4;
-      if (!flag) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown function %s",fstring);
-    }
-  }
+  options->momentTol        = 100.0*PETSC_MACHINE_EPSILON;
+  options->ostep            = 100;
+
+  ierr = PetscStrcpy(options->filename, "");CHKERRQ(ierr);
+
+  ierr = PetscOptionsBegin(comm, "", "Vlasov Options", "DMPLEX");CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-output_step", "Number of time steps between output", "ex4.c", options->ostep, &options->ostep, PETSC_NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex4.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-monitor", "Flag to use the TS monitor", "ex4.c", options->monitor, &options->monitor, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-error", "Flag to print the error", "ex4.c", options->error, &options->error, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex4.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex4.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-particles_per_cell", "Number of particles per cell", "ex4.c", options->particlesPerCell, &options->particlesPerCell, NULL);CHKERRQ(ierr);
   ierr = PetscOptionsEnd();CHKERRQ(ierr);
 
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode PerturbVertices(DM dm, AppCtx *user)
-{
-  PetscRandom    rnd;
-  PetscReal      interval = user->meshRelDx;
-  Vec            coordinates;
-  PetscScalar   *coords;
-  PetscReal      hh[3];
-  PetscInt       d, cdim, N, p, bs;
-  PetscErrorCode ierr;
-
-  PetscFunctionBeginUser;
-  for (d = 0; d < user->dim; ++d) hh[d] = (user->domain_hi[d] - user->domain_lo[d])/user->faces;
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rnd);CHKERRQ(ierr);
-  ierr = PetscRandomSetInterval(rnd, -interval, interval);CHKERRQ(ierr);
-  ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
-  ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
-  ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(coordinates, &N);CHKERRQ(ierr);
-  ierr = VecGetBlockSize(coordinates, &bs);CHKERRQ(ierr);
-  if (bs != cdim) SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_SIZ, "Coordinate vector has wrong block size %D != %D", bs, cdim);
-  ierr = VecGetArray(coordinates, &coords);CHKERRQ(ierr);
-  for (p = 0; p < N; p += cdim) {
-    PetscScalar *coord = &coords[p], value;
-
-    for (d = 0; d < cdim; ++d) {
-      ierr = PetscRandomGetValue(rnd, &value);CHKERRQ(ierr);
-      coord[d] = PetscMax(user->domain_lo[d], PetscMin(user->domain_hi[d], coord[d] + value*hh[d]));
-    }
-  }
-  ierr = VecRestoreArray(coordinates, &coords);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
-  PetscFunctionReturn(0);
-}
-
-
 static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
 {
   PetscBool      flg;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
-  ierr = PetscStrcmp(user->meshFilename, "", &flg);CHKERRQ(ierr);
+  ierr = PetscStrcmp(user->filename, "", &flg);CHKERRQ(ierr);
   if (flg) {
-    PetscInt faces[3];
-
-    faces[0] = user->faces; faces[1] = user->faces; faces[2] = user->faces;
-    ierr = DMPlexCreateBoxMesh(comm, user->dim, user->simplex, faces, user->domain_lo, user->domain_hi, user->boundary, PETSC_TRUE, dm);CHKERRQ(ierr);
+    ierr = DMPlexCreateBoxMesh(comm, user->dim, user->simplex, NULL, NULL, NULL, NULL, PETSC_TRUE, dm);CHKERRQ(ierr);
   } else {
-    ierr = DMPlexCreateFromFile(comm, user->meshFilename, PETSC_TRUE, dm);CHKERRQ(ierr);
+    ierr = DMPlexCreateFromFile(comm, user->filename, PETSC_TRUE, dm);CHKERRQ(ierr);
     ierr = DMGetDimension(*dm, &user->dim);CHKERRQ(ierr);
   }
   {
@@ -197,274 +69,336 @@ static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
   }
   ierr = DMLocalizeCoordinates(*dm);CHKERRQ(ierr); /* needed for periodic */
   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
-  ierr = PerturbVertices(*dm, user);CHKERRQ(ierr);
   ierr = PetscObjectSetName((PetscObject) *dm, "Mesh");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user)
+static PetscErrorCode SetInitialCoordinates(DM dmSw)
 {
-  PetscRandom    rnd, rndp;
-  PetscReal      interval = user->particleRelDx;
-  PetscScalar    value, *vals;
-  PetscReal     *centroid, *coords, *xi0, *v0, *J, *invJ, detJ, *initialConditions;
-  PetscInt      *cellid;
-  PetscInt       Ncell, Np = user->particlesPerCell, p, c, dim, d;
+  DM             dm;
+  AppCtx        *user;
+  PetscRandom    rnd;
+  PetscBool      simplex;
+  PetscReal     *centroid, *coords, *xi0, *v0, *J, *invJ, detJ;
+  PetscInt       dim, d, cStart, cEnd, c, Np, p;
   PetscErrorCode ierr;
 
   PetscFunctionBeginUser;
-  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  ierr = DMCreate(PetscObjectComm((PetscObject) dm), sw);CHKERRQ(ierr);
-  ierr = DMSetType(*sw, DMSWARM);CHKERRQ(ierr);
-  
-  ierr = DMSetDimension(*sw, dim);CHKERRQ(ierr);
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rnd);CHKERRQ(ierr);
+  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dmSw), &rnd);CHKERRQ(ierr);
   ierr = PetscRandomSetInterval(rnd, -1.0, 1.0);CHKERRQ(ierr);
   ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
-  ierr = PetscRandomCreate(PetscObjectComm((PetscObject) dm), &rndp);CHKERRQ(ierr);
-  ierr = PetscRandomSetInterval(rndp, -interval, interval);CHKERRQ(ierr);
-  ierr = PetscRandomSetFromOptions(rndp);CHKERRQ(ierr);
-
-  ierr = DMSwarmSetType(*sw, DMSWARM_PIC);CHKERRQ(ierr);
-  ierr = DMSwarmSetCellDM(*sw, dm);CHKERRQ(ierr);
-  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "w_q", 1, PETSC_SCALAR);CHKERRQ(ierr);
-  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", 2*dim, PETSC_REAL);CHKERRQ(ierr);
-  ierr = DMSwarmFinalizeFieldRegister(*sw);CHKERRQ(ierr);
-  ierr = DMPlexGetHeightStratum(dm, 0, NULL, &Ncell);CHKERRQ(ierr);
-  ierr = DMSwarmSetLocalSizes(*sw, Ncell * Np, 0);CHKERRQ(ierr);
-  ierr = DMSetFromOptions(*sw);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, "w_q", NULL, NULL, (void **) &vals);CHKERRQ(ierr);
-  ierr = DMSwarmGetField(*sw, "kinematics", NULL, NULL, (void **) &initialConditions);CHKERRQ(ierr);
 
+  ierr = DMGetApplicationContext(dmSw, (void **) &user);CHKERRQ(ierr);
+  simplex = user->simplex;
+  Np   = user->particlesPerCell;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
   ierr = PetscMalloc5(dim, ¢roid, dim, &xi0, dim, &v0, dim*dim, &J, dim*dim, &invJ);CHKERRQ(ierr);
-  for (c = 0; c < Ncell; ++c) {
+  for (d = 0; d < dim; ++d) xi0[d] = -1.0;
+  ierr = DMSwarmGetField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
     if (Np == 1) {
       ierr = DMPlexComputeCellGeometryFVM(dm, c, NULL, centroid, NULL);CHKERRQ(ierr);
-      cellid[c] = c;
       for (d = 0; d < dim; ++d) coords[c*dim+d] = centroid[d];
     } else {
       ierr = DMPlexComputeCellGeometryFEM(dm, c, NULL, v0, J, invJ, &detJ);CHKERRQ(ierr); /* affine */
-      for (d = 0; d < dim; ++d) xi0[d] = -1.0;
       for (p = 0; p < Np; ++p) {
         const PetscInt n   = c*Np + p;
         PetscReal      sum = 0.0, refcoords[3];
 
-        cellid[n] = c;
-        for (d = 0; d < dim; ++d) {ierr = PetscRandomGetValue(rnd, &value);CHKERRQ(ierr); refcoords[d] = PetscRealPart(value); sum += refcoords[d];}
-        if (user->simplex && sum > 0.0) for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim)*sum;
+        for (d = 0; d < dim; ++d) {
+          ierr = PetscRandomGetValueReal(rnd, &refcoords[d]);CHKERRQ(ierr);
+          sum += refcoords[d];
+        }
+        if (simplex && sum > 0.0) for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim)*sum;
         CoordinatesRefToReal(dim, dim, xi0, v0, J, refcoords, &coords[n*dim]);
       }
     }
   }
+  ierr = DMSwarmRestoreField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
   ierr = PetscFree5(centroid, xi0, v0, J, invJ);CHKERRQ(ierr);
-  for (c = 0; c < Ncell; ++c) {
+  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode SetInitialConditions(DM dmSw, Vec u)
+{
+  DM             dm;
+  AppCtx        *user;
+  PetscScalar   *initialConditions;
+  PetscInt       dim, cStart, cEnd, c, Np, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetApplicationContext(dmSw, (void **) &user);CHKERRQ(ierr);
+  Np   = user->particlesPerCell;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = VecGetArray(u, &initialConditions);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
     for (p = 0; p < Np; ++p) {
       const PetscInt n = c*Np + p;
-      
-      for (d = 0; d < dim; ++d) {ierr = PetscRandomGetValue(rndp, &value);CHKERRQ(ierr); coords[n*dim+d] += PetscRealPart(value);}
-      user->func(dim, 0.0, &coords[n*dim], 1, &vals[c], user);
+
+      initialConditions[(n*2 + 0)*dim + 0] = n+1;
+      initialConditions[(n*2 + 0)*dim + 1] = 0;
+      initialConditions[(n*2 + 1)*dim + 0] = 0;
+      initialConditions[(n*2 + 1)*dim + 1] = PetscSqrtReal(1000./(n+1.));
     }
   }
-  
-  /* Set initial conditions for multiple particles in an orbiting system  in format xp, yp, vxp, vyp */
-  for (p = 0; p < Np*Ncell; ++p) {
-    initialConditions[p*2*dim+0*dim+0] = p+1;
-    initialConditions[p*2*dim+0*dim+1] = 0;
-    initialConditions[p*2*dim+1*dim+0] = 0;
-    initialConditions[p*2*dim+1*dim+1] = PetscSqrtReal(1000./(p+1.));
-  }
+  ierr = VecRestoreArray(u, &initialConditions);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user)
+{
+  PetscInt      *cellid;
+  PetscInt       dim, cStart, cEnd, c, Np = user->particlesPerCell, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMCreate(PetscObjectComm((PetscObject) dm), sw);CHKERRQ(ierr);
+  ierr = DMSetType(*sw, DMSWARM);CHKERRQ(ierr);
+  ierr = DMSetDimension(*sw, dim);CHKERRQ(ierr);
 
-  ierr = DMSwarmRestoreField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  ierr = DMSwarmSetType(*sw, DMSWARM_PIC);CHKERRQ(ierr);
+  ierr = DMSwarmSetCellDM(*sw, dm);CHKERRQ(ierr);
+  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", 2*dim, PETSC_REAL);CHKERRQ(ierr);
+  ierr = DMSwarmFinalizeFieldRegister(*sw);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = DMSwarmSetLocalSizes(*sw, (cEnd - cStart) * Np, 0);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(*sw);CHKERRQ(ierr);
+  ierr = DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
+  for (c = cStart; c < cEnd; ++c) {
+    for (p = 0; p < Np; ++p) {
+      const PetscInt n = c*Np + p;
+
+      cellid[n] = c;
+    }
+  }
   ierr = DMSwarmRestoreField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
-  ierr = DMSwarmRestoreField(*sw, "w_q", NULL, NULL, (void **) &vals);CHKERRQ(ierr);
-  ierr = DMSwarmRestoreField(*sw, "kinematics", NULL, NULL, (void **) &initialConditions);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
-  ierr = PetscRandomDestroy(&rndp);CHKERRQ(ierr);
   ierr = PetscObjectSetName((PetscObject) *sw, "Particles");CHKERRQ(ierr);
   ierr = DMViewFromOptions(*sw, NULL, "-sw_view");CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
 /* Create particle RHS Functions for gravity with G = 1 for simplification */
-static PetscErrorCode RHSFunction1(TS ts,PetscReal t,Vec V,Vec Posres,void *ctx)
+static PetscErrorCode RHSFunction1(TS ts, PetscReal t, Vec V, Vec Xres, void *ctx)
 {
+  DM                dm;
   const PetscScalar *v;
-  PetscScalar       *posres;
+  PetscScalar       *xres;
   PetscInt          Np, p, dim, d;
-  DM                dm;
   PetscErrorCode    ierr;
 
   PetscFunctionBeginUser;
-  ierr = VecGetLocalSize(Posres, &Np);CHKERRQ(ierr);
-  ierr = VecGetArray(Posres,&posres);CHKERRQ(ierr);
-  ierr = VecGetArrayRead(V,&v);CHKERRQ(ierr);
   ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(Xres, &Np);CHKERRQ(ierr);
   Np  /= dim;
-
+  ierr = VecGetArray(Xres, &xres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(V, &v);CHKERRQ(ierr);
   for (p = 0; p < Np; ++p) {
-     for(d = 0; d < dim; ++d){
-       posres[p*dim+d] = v[p*dim+d];
+     for(d = 0; d < dim; ++d) {
+       xres[p*dim+d] = v[p*dim+d];
      }
   }
-
-  ierr = VecRestoreArrayRead(V,&v);CHKERRQ(ierr);
-  ierr = VecRestoreArray(Posres,&posres);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(V,& v);CHKERRQ(ierr);
+  ierr = VecRestoreArray(Xres, &xres);CHKERRQ(ierr);
   PetscFunctionReturn(0);
-
 }
 
-static PetscErrorCode RHSFunction2(TS ts,PetscReal t,Vec X,Vec Vres,void *ctx)
+static PetscErrorCode RHSFunction2(TS ts, PetscReal t, Vec X, Vec Vres, void *ctx)
 {
+  DM                dm;
   const PetscScalar *x;
-  PetscScalar       rsqr, *vres;
+  PetscScalar       *vres;
   PetscInt          Np, p, dim, d;
-  DM                dm;
   PetscErrorCode    ierr;
 
 
   PetscFunctionBeginUser;
-  ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(Vres, &Np);CHKERRQ(ierr);
-  ierr = VecGetArray(Vres,&vres);CHKERRQ(ierr);
-
   ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  Np/=dim;
-  
-  for(p = 0; p < Np; ++p){
-    rsqr = 0;
-    for(d = 0; d < dim; ++d) rsqr += PetscSqr(x[p*dim+d]);
-    for(d=0; d< dim; ++d){
-       vres[p*dim+d] = (1000./(p+1.))*(-x[p*dim+d])/rsqr;
+  ierr = VecGetLocalSize(Vres, &Np);CHKERRQ(ierr);
+  Np  /= dim;
+  ierr = VecGetArray(Vres, &vres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(X, &x);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscScalar rsqr = DMPlex_NormD_Internal(dim, &x[p*dim]);
+
+    for (d = 0; d < dim; ++d) {
+      vres[p*dim+d] = -(1000./(p+1.)) * x[p*dim+d]/PetscSqr(rsqr);
     }
   }
-
-  ierr = VecRestoreArray(Vres,&vres);CHKERRQ(ierr);
-  ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
+  ierr = VecRestoreArray(Vres, &vres);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(X, &x);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-static PetscErrorCode RHSFunctionParticles(TS ts,PetscReal t,Vec U,Vec R,void *ctx)
+static PetscErrorCode RHSFunctionParticles(TS ts, PetscReal t , Vec U, Vec R, void *ctx)
 {
+  DM                dm;
   const PetscScalar *u;
-  PetscScalar       *r, rsqr; 
+  PetscScalar       *r;
   PetscInt          Np, p, dim, d;
-  DM                dm;
   PetscErrorCode    ierr;
 
   PetscFunctionBeginUser;
-  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
-  ierr = VecGetArrayRead(U,&u);CHKERRQ(ierr);
-  ierr = VecGetArray(R,&r);CHKERRQ(ierr);
-
   ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
-  Np /= 2*dim;
-  
-  for( p = 0; p < Np; ++p){
-    rsqr = 0;
-    for(d=0; d < dim; ++d) rsqr += PetscSqr(u[p*2*dim+d]);
-    for(d=0; d < dim; ++d){
-        r[p*2*dim+d] = u[p*2*dim+d+2];
-        r[p*2*dim+d+2] = (-1.*1000./(1.+p))*u[p*2*dim+d]/rsqr;
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
+  Np  /= 2*dim;
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetArray(R, &r);CHKERRQ(ierr);
+  for( p = 0; p < Np; ++p) {
+    const PetscScalar rsqr = DMPlex_NormD_Internal(dim, &u[p*2*dim]);
+
+    for (d = 0; d < dim; ++d) {
+        r[p*2*dim+d]   = u[p*2*dim+d+2];
+        r[p*2*dim+d+2] = -(1000./(1.+p)) * u[p*2*dim+d]/PetscSqr(rsqr);
     }
   }
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(R, &r);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
 
-  ierr = VecRestoreArrayRead(U,&u);CHKERRQ(ierr);
-  ierr = VecRestoreArray(R,&r);CHKERRQ(ierr);
+static PetscErrorCode InitializeSolve(TS ts, Vec u)
+{
+  DM             dm;
+  AppCtx        *user;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(dm, (void **) &user);CHKERRQ(ierr);
+  ierr = SetInitialCoordinates(dm);CHKERRQ(ierr);
+  ierr = SetInitialConditions(dm, u);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode ComputeError(TS ts, Vec U, Vec E)
+{
+  MPI_Comm           comm;
+  DM                 sdm;
+  AppCtx            *user;
+  const PetscScalar *u, *coords;
+  PetscScalar       *e;
+  PetscReal          t;
+  PetscInt           dim, Np, p;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscObjectGetComm((PetscObject) ts, &comm);CHKERRQ(ierr);
+  ierr = TSGetDM(ts, &sdm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(sdm, (void **) &user);CHKERRQ(ierr);
+  ierr = DMGetDimension(sdm, &dim);CHKERRQ(ierr);
+  ierr = TSGetSolveTime(ts, &t);CHKERRQ(ierr);
+  ierr = VecGetArray(E, &e);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
+  Np  /= 2*dim;
+  ierr = DMSwarmGetField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscScalar *x     = &u[(p*2+0)*dim];
+    const PetscScalar *v     = &u[(p*2+1)*dim];
+    const PetscReal    x0    = p+1.;
+    const PetscReal    omega = PetscSqrtReal(1000./(p+1.))/x0;
+    const PetscReal    xe[3] = { x0*PetscCosReal(omega*t),       x0*PetscSinReal(omega*t),       0.0};
+    const PetscReal    ve[3] = {-x0*omega*PetscSinReal(omega*t), x0*omega*PetscCosReal(omega*t), 0.0};
+    PetscInt           d;
+
+    for (d = 0; d < dim; ++d) {
+      e[(p*2+0)*dim+d] = x[d] - xe[d];
+      e[(p*2+1)*dim+d] = v[d] - ve[d];
+    }
+    if (user->error) {ierr = PetscPrintf(comm, "p%D error [%.2g %.2g] sol [(%.6lf %.6lf) (%.6lf %.6lf)] exact [(%.6lf %.6lf) (%.6lf %.6lf)] energy/exact energy %g / %g\n", p, (double) DMPlex_NormD_Internal(dim, &e[(p*2+0)*dim]), (double) DMPlex_NormD_Internal(dim, &e[(p*2+1)*dim]), (double) x[0], (double) x[1], (double) v[0], (double) v[1], (double) xe[0], (double) xe[1], (double) ve[0], (double) ve[1], 0.5*DMPlex_NormD_Internal(dim, v), (double) (0.5*(1000./(p+1))));}
+  }
+  ierr = DMSwarmRestoreField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(E, &e);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-int main(int argc,char **argv)
+int main(int argc, char **argv)
 {
-  TSConvergedReason reason;
+  TS                 ts;
+  TSConvergedReason  reason;
+  DM                 dm, sw;
+  Vec                u;
+  IS                 is1, is2;
+  PetscInt          *idx1, *idx2;
+  MPI_Comm           comm;
+  AppCtx             user;
   const PetscScalar *endVals;
-  PetscReal         ftime   = .1, vx, vy;
-  PetscInt          locSize, p, d, dim, Np, steps, *idx1, *idx2;
-  Vec               f;             
-  TS                ts;            
-  IS                is1,is2;
-  DM                dm, sw;
-  AppCtx            user;
-  MPI_Comm          comm;
-  PetscErrorCode    ierr;  
-
-  
-  ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
-  comm = PETSC_COMM_WORLD;
+  PetscReal          ftime   = .1;
+  PetscInt           locSize, dim, d, Np, p, steps;
+  PetscErrorCode     ierr;
 
+  ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
+  comm = PETSC_COMM_WORLD;
   ierr = ProcessOptions(comm, &user);CHKERRQ(ierr);
-  
-  /* Create dm and particles */
+
   ierr = CreateMesh(comm, &dm, &user);CHKERRQ(ierr);
   ierr = CreateParticles(dm, &sw, &user);CHKERRQ(ierr);
+  ierr = DMSetApplicationContext(sw, &user);CHKERRQ(ierr);
 
-  /* Get vector containing initial conditions from swarm */
-  ierr = DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &f);CHKERRQ(ierr);
-  
-  /* Allocate for IS Strides that will contain x, y and vx, vy */
+  ierr = TSCreate(comm, &ts);CHKERRQ(ierr);
+  ierr = TSSetType(ts, TSBASICSYMPLECTIC);CHKERRQ(ierr);
+  ierr = TSSetDM(ts, sw);CHKERRQ(ierr);
+  ierr = TSSetMaxTime(ts, ftime);CHKERRQ(ierr);
+  ierr = TSSetTimeStep(ts, 0.0001);CHKERRQ(ierr);
+  ierr = TSSetMaxSteps(ts, 10);CHKERRQ(ierr);
+  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
+  ierr = TSSetTime(ts, 0.0);CHKERRQ(ierr);
+  ierr = TSSetRHSFunction(ts, NULL, RHSFunctionParticles, &user);CHKERRQ(ierr);
+
+  ierr = DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
   ierr = DMGetDimension(sw, &dim);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(f, &locSize);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(u, &locSize);CHKERRQ(ierr);
+  Np   = locSize/(2*dim);
   ierr = PetscMalloc1(locSize/2, &idx1);CHKERRQ(ierr);
   ierr = PetscMalloc1(locSize/2, &idx2);CHKERRQ(ierr);
-  Np = locSize/(2*dim);
-
   for (p = 0; p < Np; ++p) {
     for (d = 0; d < dim; ++d) {
       idx1[p*dim+d] = (p*2+0)*dim + d;
       idx2[p*dim+d] = (p*2+1)*dim + d;
     }
   }
-  
-  
   ierr = ISCreateGeneral(comm, locSize/2, idx1, PETSC_OWN_POINTER, &is1);CHKERRQ(ierr);
   ierr = ISCreateGeneral(comm, locSize/2, idx2, PETSC_OWN_POINTER, &is2);CHKERRQ(ierr);
-  
-  ierr = TSCreate(comm,&ts);CHKERRQ(ierr);
-  
-  /* DM needs to be set before splits so it propogates to sub TSs */
-  ierr = TSSetDM(ts, sw);CHKERRQ(ierr);
-  ierr = TSSetType(ts,TSBASICSYMPLECTIC);CHKERRQ(ierr);
-
-  ierr = TSRHSSplitSetIS(ts,"position",is1);CHKERRQ(ierr);
-  ierr = TSRHSSplitSetIS(ts,"momentum",is2);CHKERRQ(ierr);
-
+  ierr = TSRHSSplitSetIS(ts, "position", is1);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetIS(ts, "momentum", is2);CHKERRQ(ierr);
+  ierr = ISDestroy(&is1);CHKERRQ(ierr);
+  ierr = ISDestroy(&is2);CHKERRQ(ierr);
   ierr = TSRHSSplitSetRHSFunction(ts,"position",NULL,RHSFunction1,&user);CHKERRQ(ierr);
   ierr = TSRHSSplitSetRHSFunction(ts,"momentum",NULL,RHSFunction2,&user);CHKERRQ(ierr);
 
-  ierr = TSSetRHSFunction(ts,NULL,RHSFunctionParticles,&user);CHKERRQ(ierr);
-
-  ierr = TSSetMaxTime(ts,ftime);CHKERRQ(ierr);
-  ierr = TSSetTimeStep(ts,0.0001);CHKERRQ(ierr);
-  ierr = TSSetMaxSteps(ts,10);CHKERRQ(ierr);
-  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
-  ierr = TSSetTime(ts,0.0);CHKERRQ(ierr);
   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
-
-  ierr = TSSolve(ts,f);CHKERRQ(ierr);
-  ierr = TSGetSolveTime(ts,&ftime);CHKERRQ(ierr);
+  ierr = TSSetComputeInitialCondition(ts, InitializeSolve);CHKERRQ(ierr);
+  ierr = TSSetComputeExactError(ts, ComputeError);CHKERRQ(ierr);
+  ierr = TSComputeInitialCondition(ts, u);CHKERRQ(ierr);
+  ierr = VecViewFromOptions(u, NULL, "-init_view");CHKERRQ(ierr);
+  ierr = TSSolve(ts, u);CHKERRQ(ierr);
+  ierr = TSGetSolveTime(ts, &ftime);CHKERRQ(ierr);
   ierr = TSGetConvergedReason(ts, &reason);CHKERRQ(ierr);
-  ierr = TSGetStepNumber(ts,&steps);CHKERRQ(ierr);
-  ierr = PetscPrintf(comm,"%s at time %g after %D steps\n",TSConvergedReasons[reason],(double)ftime,steps);CHKERRQ(ierr);
-  
-  ierr = VecGetArrayRead(f, &endVals);CHKERRQ(ierr);
-  for(p = 0; p < Np; ++p){
-    vx = endVals[p*2*dim+2];
-    vy = endVals[p*2*dim+3];
-    ierr = PetscPrintf(comm, "Particle %D initial Energy: %g  Final Energy: %g\n", p,(double) (0.5*(1000./(p+1))),(double ) (0.5*(PetscSqr(vx) + PetscSqr(vy))));CHKERRQ(ierr);
-  } 
-  
-  ierr = VecRestoreArrayRead(f, &endVals);CHKERRQ(ierr);
-  ierr = DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &f);CHKERRQ(ierr);
+  ierr = TSGetStepNumber(ts, &steps);CHKERRQ(ierr);
+  ierr = PetscPrintf(comm,"%s at time %g after %D steps\n", TSConvergedReasons[reason], (double) ftime, steps);CHKERRQ(ierr);
+
+  ierr = VecGetArrayRead(u, &endVals);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscReal norm = DMPlex_NormD_Internal(dim, &endVals[(p*2 + 1)*dim]);
+    ierr = PetscPrintf(comm, "Particle %D initial Energy: %g  Final Energy: %g\n", p, (double) (0.5*(1000./(p+1))), (double) 0.5*PetscSqr(norm));CHKERRQ(ierr);
+  }
+  ierr = VecRestoreArrayRead(u, &endVals);CHKERRQ(ierr);
+  ierr = DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
   ierr = TSDestroy(&ts);CHKERRQ(ierr);
-  ierr = ISDestroy(&is1);CHKERRQ(ierr);
-  ierr = ISDestroy(&is2);CHKERRQ(ierr);
-  ierr = DMDestroy(&dm);CHKERRQ(ierr);
   ierr = DMDestroy(&sw);CHKERRQ(ierr);
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
   ierr = PetscFinalize();
   return ierr;
 }
@@ -476,12 +410,12 @@ int main(int argc,char **argv)
      requires: triangle !single !complex
    test:
      suffix: bsi1
-     args: -dim 2 -faces 1 -particlesPerCell 1 -dm_view -sw_view -ts_basicsymplectic_type 1 -ts_monitor_sp_swarm
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -ts_basicsymplectic_type 1 -ts_max_time 0.1 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
    test:
      suffix: bsi2
-     args: -dim 2 -faces 1 -particlesPerCell 1 -dm_view -sw_view -ts_basicsymplectic_type 2 -ts_monitor_sp_swarm
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -ts_basicsymplectic_type 2 -ts_max_time 0.1 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
    test:
-     suffix: euler 
-     args: -dim 2 -faces 1 -particlesPerCell 1 -dm_view -sw_view -ts_type euler -ts_monitor_sp_swarm
+     suffix: euler
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -ts_type euler -ts_max_time 0.1 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
 
 TEST*/
diff --git a/src/dm/impls/swarm/examples/tests/ex6.c b/src/dm/impls/swarm/examples/tests/ex6.c
new file mode 100644
index 00000000000..32a22888ea2
--- /dev/null
+++ b/src/dm/impls/swarm/examples/tests/ex6.c
@@ -0,0 +1,451 @@
+static char help[] = "Vlasov example of many particles orbiting around a several massive points.\n";
+
+#include 
+#include   /* For norm */
+#include  /* For CoordinatesRefToReal() */
+#include 
+#include 
+
+typedef struct {
+  PetscInt  dim;                          /* The topological mesh dimension */
+  PetscBool simplex;                      /* Flag for simplices or tensor cells */
+  char      filename[PETSC_MAX_PATH_LEN]; /* Name of the mesh filename if any */
+  PetscInt  particlesPerCircle;           /* The number of partices per circle */
+  PetscReal momentTol;                    /* Tolerance for checking moment conservation */
+  PetscBool monitor;                      /* Flag for using the TS monitor */
+  PetscBool error;                        /* Flag for printing the error */
+  PetscInt  ostep;                        /* print the energy at each ostep time steps */
+  PetscReal center[6];                    /* Centers of the two orbits */
+  PetscReal radius[2];                    /* Radii of the two orbits */
+} AppCtx;
+
+static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  options->dim                = 2;
+  options->simplex            = PETSC_TRUE;
+  options->monitor            = PETSC_FALSE;
+  options->error              = PETSC_FALSE;
+  options->particlesPerCircle = 1;
+  options->momentTol          = 100.0*PETSC_MACHINE_EPSILON;
+  options->ostep              = 100;
+
+  ierr = PetscStrcpy(options->filename, "");CHKERRQ(ierr);
+
+  ierr = PetscOptionsBegin(comm, "", "Vlasov Options", "DMPLEX");CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-output_step", "Number of time steps between output", "ex4.c", options->ostep, &options->ostep, PETSC_NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex4.c", options->dim, &options->dim, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-monitor", "Flag to use the TS monitor", "ex4.c", options->monitor, &options->monitor, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-error", "Flag to print the error", "ex4.c", options->error, &options->error, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsBool("-simplex", "The flag for simplices or tensor cells", "ex4.c", options->simplex, &options->simplex, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsString("-mesh", "Name of the mesh filename if any", "ex4.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsInt("-particles_per_circle", "Number of particles per circle", "ex4.c", options->particlesPerCircle, &options->particlesPerCircle, NULL);CHKERRQ(ierr);
+  ierr = PetscOptionsEnd();CHKERRQ(ierr);
+
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
+{
+  PetscBool      flg;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscStrcmp(user->filename, "", &flg);CHKERRQ(ierr);
+  if (flg) {
+    ierr = DMPlexCreateBoxMesh(comm, user->dim, user->simplex, NULL, NULL, NULL, NULL, PETSC_TRUE, dm);CHKERRQ(ierr);
+  } else {
+    ierr = DMPlexCreateFromFile(comm, user->filename, PETSC_TRUE, dm);CHKERRQ(ierr);
+    ierr = DMGetDimension(*dm, &user->dim);CHKERRQ(ierr);
+  }
+  {
+    DM distributedMesh = NULL;
+
+    ierr = DMPlexDistribute(*dm, 0, NULL, &distributedMesh);CHKERRQ(ierr);
+    if (distributedMesh) {
+      ierr = DMDestroy(dm);CHKERRQ(ierr);
+      *dm  = distributedMesh;
+    }
+  }
+  ierr = DMLocalizeCoordinates(*dm);CHKERRQ(ierr); /* needed for periodic */
+  ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) *dm, "Mesh");CHKERRQ(ierr);
+  ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode orbit(AppCtx *ctx, PetscInt c, PetscInt p, PetscReal t, PetscReal x[], PetscReal v[])
+{
+  const PetscInt  Np    = ctx->particlesPerCircle;
+  const PetscReal r     = ctx->radius[c];
+  const PetscReal omega = PetscSqrtReal(1000./r)/r;
+  const PetscReal t0    = (2.*PETSC_PI*p)/(Np*omega);
+  const PetscInt  dim   = 2;
+
+  PetscFunctionBeginUser;
+  if (x) {
+    x[0] = r*PetscCosReal(omega*(t + t0)) + ctx->center[c*dim + 0];
+    x[1] = r*PetscSinReal(omega*(t + t0)) + ctx->center[c*dim + 1];
+  }
+  if (v) {
+    v[0] = -r*omega*PetscSinReal(omega*(t + t0));
+    v[1] =  r*omega*PetscCosReal(omega*(t + t0));
+  }
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode force(AppCtx *ctx, PetscInt c, const PetscReal x[], PetscReal force[])
+{
+  const PetscReal r     = ctx->radius[c];
+  const PetscReal omega = PetscSqrtReal(1000./r)/r;
+  const PetscInt  dim   = 2;
+  PetscInt        d;
+
+  PetscFunctionBeginUser;
+  for (d = 0; d < dim; ++d) force[d] = -PetscSqr(omega)*(x[d] - ctx->center[c*dim + d]);
+  PetscFunctionReturn(0);
+}
+
+static PetscReal energy(AppCtx *ctx, PetscInt c)
+{
+  const PetscReal r     = ctx->radius[c];
+  const PetscReal omega = PetscSqrtReal(1000./r)/r;
+
+  return 0.5 * omega * r;
+}
+
+static PetscErrorCode SetInitialCoordinates(DM dmSw)
+{
+  DM                 dm;
+  AppCtx            *ctx;
+  Vec                coordinates;
+  PetscSF            cellSF = NULL;
+  PetscReal         *coords;
+  PetscInt          *cellid;
+  const PetscInt    *found;
+  const PetscSFNode *cells;
+  PetscInt           dim, d, c, Np, p;
+  PetscMPIInt        rank;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetApplicationContext(dmSw, (void **) &ctx);CHKERRQ(ierr);
+  Np   = ctx->particlesPerCircle;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMSwarmGetField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (c = 0; c < 2; ++c) {
+    for (d = 0; d < dim; ++d) ctx->center[c*dim+d] = (!c && !d) ? 3.0 : 0.0;
+    ctx->radius[c] = 3.*c+1.;
+    for (p = 0; p < Np; ++p) {
+      const PetscInt n = c*Np + p;
+
+      ierr = orbit(ctx, c, p, 0.0, &coords[n*dim], NULL);CHKERRQ(ierr);
+    }
+  }
+  ierr = DMSwarmRestoreField(dmSw, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+
+  ierr = DMSwarmCreateGlobalVectorFromField(dmSw, DMSwarmPICField_coor, &coordinates);CHKERRQ(ierr);
+  ierr = DMLocatePoints(dm, coordinates, DM_POINTLOCATION_NONE, &cellSF);CHKERRQ(ierr);
+  ierr = DMSwarmDestroyGlobalVectorFromField(dmSw, DMSwarmPICField_coor, &coordinates);CHKERRQ(ierr);
+
+  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dmSw), &rank);CHKERRQ(ierr);
+  ierr = DMSwarmGetField(dmSw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
+  ierr = PetscSFGetGraph(cellSF, NULL, &Np, &found, &cells);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscInt part = found ? found[p] : p;
+
+    if (cells[p].rank != rank) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Point %D not found in the mesh", part);
+    cellid[part] = cells[p].index;
+  }
+  ierr = DMSwarmRestoreField(dmSw, DMSwarmPICField_cellid, NULL, NULL, (void **) &cellid);CHKERRQ(ierr);
+  ierr = PetscSFDestroy(&cellSF);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode SetInitialConditions(DM dmSw, Vec u)
+{
+  DM             dm;
+  AppCtx        *ctx;
+  PetscScalar   *initialConditions;
+  PetscInt       dim, cStart, cEnd, c, Np, p;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetApplicationContext(dmSw, (void **) &ctx);CHKERRQ(ierr);
+  Np   = ctx->particlesPerCircle;
+  ierr = DMSwarmGetCellDM(dmSw, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
+  ierr = VecGetArray(u, &initialConditions);CHKERRQ(ierr);
+  for (c = 0; c < 2; ++c) {
+    for (p = 0; p < Np; ++p) {
+      const PetscInt n = c*Np + p;
+
+      ierr = orbit(ctx, c, p, 0.0, &initialConditions[(n*2 + 0)*dim], &initialConditions[(n*2 + 1)*dim]);CHKERRQ(ierr);
+    }
+  }
+  ierr = VecRestoreArray(u, &initialConditions);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode CreateParticles(DM dm, DM *sw, AppCtx *user)
+{
+  PetscInt       dim, Np = user->particlesPerCircle;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = DMCreate(PetscObjectComm((PetscObject) dm), sw);CHKERRQ(ierr);
+  ierr = DMSetType(*sw, DMSWARM);CHKERRQ(ierr);
+  ierr = DMSetDimension(*sw, dim);CHKERRQ(ierr);
+
+  ierr = DMSwarmSetType(*sw, DMSWARM_PIC);CHKERRQ(ierr);
+  ierr = DMSwarmSetCellDM(*sw, dm);CHKERRQ(ierr);
+  ierr = DMSwarmRegisterPetscDatatypeField(*sw, "kinematics", 2*dim, PETSC_REAL);CHKERRQ(ierr);
+  ierr = DMSwarmFinalizeFieldRegister(*sw);CHKERRQ(ierr);
+  ierr = DMSwarmSetLocalSizes(*sw, 2 * Np, 0);CHKERRQ(ierr);
+  ierr = DMSetFromOptions(*sw);CHKERRQ(ierr);
+  ierr = PetscObjectSetName((PetscObject) *sw, "Particles");CHKERRQ(ierr);
+  ierr = DMViewFromOptions(*sw, NULL, "-sw_view");CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+/* Create particle RHS Functions for gravity with G = 1 for simplification */
+static PetscErrorCode RHSFunction1(TS ts, PetscReal t, Vec V, Vec Xres, void *ctx)
+{
+  DM                dm;
+  const PetscScalar *v;
+  PetscScalar       *xres;
+  PetscInt          Np, p, dim, d;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(Xres, &Np);CHKERRQ(ierr);
+  Np  /= dim;
+  ierr = VecGetArray(Xres, &xres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(V, &v);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+     for(d = 0; d < dim; ++d) {
+       xres[p*dim+d] = v[p*dim+d];
+     }
+  }
+  ierr = VecRestoreArrayRead(V,& v);CHKERRQ(ierr);
+  ierr = VecRestoreArray(Xres, &xres);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode RHSFunction2(TS ts, PetscReal t, Vec X, Vec Vres, void *user)
+{
+  AppCtx           *ctx = (AppCtx *) user;
+  DM                dm;
+  const PetscScalar *x;
+  PetscScalar       *vres;
+  PetscInt          Np, p, dim;
+  PetscErrorCode    ierr;
+
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(Vres, &Np);CHKERRQ(ierr);
+  Np  /= dim;
+  ierr = VecGetArray(Vres, &vres);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(X, &x);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscInt c = p / ctx->particlesPerCircle;
+
+    ierr = force(ctx, c, &x[p*dim], &vres[p*dim]);CHKERRQ(ierr);
+  }
+  ierr = VecRestoreArray(Vres, &vres);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(X, &x);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode RHSFunctionParticles(TS ts, PetscReal t , Vec U, Vec R, void *user)
+{
+  AppCtx           *ctx = (AppCtx *) user;
+  DM                dm;
+  const PetscScalar *u;
+  PetscScalar       *r;
+  PetscInt          Np, p, dim, d;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
+  Np  /= 2*dim;
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetArray(R, &r);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    const PetscInt c = p / ctx->particlesPerCircle;
+
+    for (d = 0; d < dim; ++d) r[(p*2 + 0)*dim + d] = u[(p*2 + 1)*dim + d];
+    ierr = force(ctx, c, &u[(p*2 + 0)*dim], &r[(p*2 + 1)*dim]);CHKERRQ(ierr);
+  }
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(R, &r);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode InitializeSolve(TS ts, Vec u)
+{
+  DM             dm;
+  AppCtx        *user;
+  PetscErrorCode ierr;
+
+  PetscFunctionBeginUser;
+  ierr = TSGetDM(ts, &dm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(dm, (void **) &user);CHKERRQ(ierr);
+  ierr = SetInitialCoordinates(dm);CHKERRQ(ierr);
+  ierr = SetInitialConditions(dm, u);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+static PetscErrorCode ComputeError(TS ts, Vec U, Vec E)
+{
+  MPI_Comm           comm;
+  DM                 sdm;
+  AppCtx            *ctx;
+  const PetscScalar *u, *coords;
+  PetscScalar       *e;
+  PetscReal          t;
+  PetscInt           dim, Np, p, c;
+  PetscErrorCode     ierr;
+
+  PetscFunctionBeginUser;
+  ierr = PetscObjectGetComm((PetscObject) ts, &comm);CHKERRQ(ierr);
+  ierr = TSGetDM(ts, &sdm);CHKERRQ(ierr);
+  ierr = DMGetApplicationContext(sdm, (void **) &ctx);CHKERRQ(ierr);
+  ierr = DMGetDimension(sdm, &dim);CHKERRQ(ierr);
+  ierr = TSGetSolveTime(ts, &t);CHKERRQ(ierr);
+  ierr = VecGetArray(E, &e);CHKERRQ(ierr);
+  ierr = VecGetArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(U, &Np);CHKERRQ(ierr);
+  Np  /= 2*dim*2;
+  ierr = DMSwarmGetField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  for (c = 0; c < 2; ++c) {
+    for (p = 0; p < Np; ++p) {
+      const PetscInt     n = c*Np + p;
+      const PetscScalar *x = &u[(n*2+0)*dim];
+      const PetscScalar *v = &u[(n*2+1)*dim];
+      PetscReal          xe[3], ve[3];
+      PetscInt           d;
+
+      ierr = orbit(ctx, c, p, t, xe, ve);CHKERRQ(ierr);
+      for (d = 0; d < dim; ++d) {
+        e[(p*2+0)*dim+d] = x[d] - xe[d];
+        e[(p*2+1)*dim+d] = v[d] - ve[d];
+      }
+      if (ctx->error) {ierr = PetscPrintf(comm, "p%D error [%.2g %.2g] sol [(%.6lf %.6lf) (%.6lf %.6lf)] exact [(%.6lf %.6lf) (%.6lf %.6lf)] energy/exact energy %g / %g\n", p, (double) DMPlex_NormD_Internal(dim, &e[(p*2+0)*dim]), (double) DMPlex_NormD_Internal(dim, &e[(p*2+1)*dim]), (double) x[0], (double) x[1], (double) v[0], (double) v[1], (double) xe[0], (double) xe[1], (double) ve[0], (double) ve[1], 0.5*DMPlex_NormD_Internal(dim, v), (double) energy(ctx, c));}
+    }
+  }
+  ierr = DMSwarmRestoreField(sdm, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
+  ierr = VecRestoreArrayRead(U, &u);CHKERRQ(ierr);
+  ierr = VecRestoreArray(E, &e);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+int main(int argc, char **argv)
+{
+  TS                 ts;
+  TSConvergedReason  reason;
+  DM                 dm, sw;
+  Vec                u;
+  IS                 is1, is2;
+  PetscInt          *idx1, *idx2;
+  MPI_Comm           comm;
+  AppCtx             user;
+  const PetscScalar *endVals;
+  PetscReal          ftime   = .1;
+  PetscInt           locSize, dim, d, Np, p, c, steps;
+  PetscErrorCode     ierr;
+
+  ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
+  comm = PETSC_COMM_WORLD;
+  ierr = ProcessOptions(comm, &user);CHKERRQ(ierr);
+
+  ierr = CreateMesh(comm, &dm, &user);CHKERRQ(ierr);
+  ierr = CreateParticles(dm, &sw, &user);CHKERRQ(ierr);
+  ierr = DMSetApplicationContext(sw, &user);CHKERRQ(ierr);
+
+  ierr = TSCreate(comm, &ts);CHKERRQ(ierr);
+  ierr = TSSetType(ts, TSBASICSYMPLECTIC);CHKERRQ(ierr);
+  ierr = TSSetDM(ts, sw);CHKERRQ(ierr);
+  ierr = TSSetMaxTime(ts, ftime);CHKERRQ(ierr);
+  ierr = TSSetTimeStep(ts, 0.0001);CHKERRQ(ierr);
+  ierr = TSSetMaxSteps(ts, 10);CHKERRQ(ierr);
+  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
+  ierr = TSSetTime(ts, 0.0);CHKERRQ(ierr);
+  ierr = TSSetRHSFunction(ts, NULL, RHSFunctionParticles, &user);CHKERRQ(ierr);
+
+  ierr = DMSwarmCreateGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
+  ierr = DMGetDimension(sw, &dim);CHKERRQ(ierr);
+  ierr = VecGetLocalSize(u, &locSize);CHKERRQ(ierr);
+  Np   = locSize/(2*dim);
+  ierr = PetscMalloc1(locSize/2, &idx1);CHKERRQ(ierr);
+  ierr = PetscMalloc1(locSize/2, &idx2);CHKERRQ(ierr);
+  for (p = 0; p < Np; ++p) {
+    for (d = 0; d < dim; ++d) {
+      idx1[p*dim+d] = (p*2+0)*dim + d;
+      idx2[p*dim+d] = (p*2+1)*dim + d;
+    }
+  }
+  ierr = ISCreateGeneral(comm, locSize/2, idx1, PETSC_OWN_POINTER, &is1);CHKERRQ(ierr);
+  ierr = ISCreateGeneral(comm, locSize/2, idx2, PETSC_OWN_POINTER, &is2);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetIS(ts, "position", is1);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetIS(ts, "momentum", is2);CHKERRQ(ierr);
+  ierr = ISDestroy(&is1);CHKERRQ(ierr);
+  ierr = ISDestroy(&is2);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetRHSFunction(ts,"position",NULL,RHSFunction1,&user);CHKERRQ(ierr);
+  ierr = TSRHSSplitSetRHSFunction(ts,"momentum",NULL,RHSFunction2,&user);CHKERRQ(ierr);
+
+  ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
+  ierr = TSSetComputeInitialCondition(ts, InitializeSolve);CHKERRQ(ierr);
+  ierr = TSSetComputeExactError(ts, ComputeError);CHKERRQ(ierr);
+  ierr = TSComputeInitialCondition(ts, u);CHKERRQ(ierr);
+  ierr = VecViewFromOptions(u, NULL, "-init_view");CHKERRQ(ierr);
+  ierr = TSSolve(ts, u);CHKERRQ(ierr);
+  ierr = TSGetSolveTime(ts, &ftime);CHKERRQ(ierr);
+  ierr = TSGetConvergedReason(ts, &reason);CHKERRQ(ierr);
+  ierr = TSGetStepNumber(ts, &steps);CHKERRQ(ierr);
+  ierr = PetscPrintf(comm,"%s at time %g after %D steps\n", TSConvergedReasons[reason], (double) ftime, steps);CHKERRQ(ierr);
+
+  ierr = VecGetArrayRead(u, &endVals);CHKERRQ(ierr);
+  for (c = 0; c < 2; ++c) {
+    for (p = 0; p < Np/2; ++p) {
+      const PetscInt  n    = c*(Np/2) + p;
+      const PetscReal norm = DMPlex_NormD_Internal(dim, &endVals[(n*2 + 1)*dim]);
+      ierr = PetscPrintf(comm, "Particle %D initial Energy: %g  Final Energy: %g\n", p, (double) (0.5*(1000./(3*c+1.))), (double) 0.5*PetscSqr(norm));CHKERRQ(ierr);
+    }
+  }
+  ierr = VecRestoreArrayRead(u, &endVals);CHKERRQ(ierr);
+  ierr = DMSwarmDestroyGlobalVectorFromField(sw, "kinematics", &u);CHKERRQ(ierr);
+  ierr = TSDestroy(&ts);CHKERRQ(ierr);
+  ierr = DMDestroy(&sw);CHKERRQ(ierr);
+  ierr = DMDestroy(&dm);CHKERRQ(ierr);
+  ierr = PetscFinalize();
+  return ierr;
+}
+
+
+/*TEST
+
+   build:
+     requires: triangle !single !complex
+   test:
+     suffix: bsi1
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -particles_per_circle 5 -ts_basicsymplectic_type 1 -ts_max_time 0.1 -ts_dt 0.001 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
+   test:
+     suffix: bsi2
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -particles_per_circle 5 -ts_basicsymplectic_type 2 -ts_max_time 0.1 -ts_dt 0.001 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
+   test:
+     suffix: euler
+     args: -dim 2 -dm_plex_box_faces 1,1 -dm_view -sw_view -particles_per_circle 5 -ts_type euler -ts_max_time 0.1 -ts_dt 0.001 -ts_monitor_sp_swarm -ts_convergence_estimate -convest_num_refine 2
+
+TEST*/
diff --git a/src/dm/impls/swarm/examples/tests/output/ex1_proj_0.out b/src/dm/impls/swarm/examples/tests/output/ex1_proj_0.out
index 254a2c81757..2b7316c224a 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex1_proj_0.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex1_proj_0.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Mesh 1 MPI processes
   type: plex
 Mesh in 2 dimensions:
diff --git a/src/dm/impls/swarm/examples/tests/output/ex1_proj_1.out b/src/dm/impls/swarm/examples/tests/output/ex1_proj_1.out
index ce9090aba56..43a9561c639 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex1_proj_1.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex1_proj_1.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Mesh 1 MPI processes
   type: plex
 Mesh in 2 dimensions:
diff --git a/src/dm/impls/swarm/examples/tests/output/ex1_proj_2.out b/src/dm/impls/swarm/examples/tests/output/ex1_proj_2.out
index 92b17c2b625..5a2f8c547bf 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex1_proj_2.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex1_proj_2.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Mesh 1 MPI processes
   type: plex
 Mesh in 3 dimensions:
diff --git a/src/dm/impls/swarm/examples/tests/output/ex1_proj_3.out b/src/dm/impls/swarm/examples/tests/output/ex1_proj_3.out
index ce9090aba56..43a9561c639 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex1_proj_3.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex1_proj_3.out
@@ -1,5 +1,3 @@
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Mesh 1 MPI processes
   type: plex
 Mesh in 2 dimensions:
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_0.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_0.out
index 37024489fe5..9154e72eac0 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_0.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_0.out
@@ -8,8 +8,6 @@ Labels:
   Face Sets: 1 strata with value/size (1 (4))
   marker: 1 strata with value/size (1 (8))
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_0.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_0.out
index 446fd0ad832..bcd81926df6 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_0.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_0.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   Face Sets: 4 strata with value/size (4 (1), 2 (1), 1 (1), 3 (1))
   depth: 3 strata with value/size (0 (4), 1 (4), 2 (1))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces.out
index b2e946c03d0..5a3d9f435fd 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   Face Sets: 4 strata with value/size (4 (2), 2 (2), 1 (2), 3 (2))
   depth: 3 strata with value/size (0 (9), 1 (12), 2 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_2.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_2.out
index 2858147f367..8624ffd569e 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_2.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_2.out
@@ -8,8 +8,6 @@ Labels:
   depth: 3 strata with value/size (0 (9), 1 (12), 2 (4))
   Face Sets: 4 strata with value/size (4 (2), 2 (2), 1 (2), 3 (2))
   marker: 1 strata with value/size (1 (16))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_3.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_3.out
index 2858147f367..8624ffd569e 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_3.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_2_faces_quad_order_3.out
@@ -8,8 +8,6 @@ Labels:
   depth: 3 strata with value/size (0 (9), 1 (12), 2 (4))
   Face Sets: 4 strata with value/size (4 (2), 2 (2), 1 (2), 3 (2))
   marker: 1 strata with value/size (1 (16))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_5P.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_5P.out
index 52d7262c12f..3936e12d012 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_5P.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_quad_5P.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   Face Sets: 4 strata with value/size (4 (1), 2 (1), 1 (1), 3 (1))
   depth: 3 strata with value/size (0 (4), 1 (4), 2 (1))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_0.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_0.out
index 4c099f225cc..0491b4b13d2 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_0.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_0.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces.out
index d9e89b8fbea..a01353f5dfb 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
   Face Sets: 1 strata with value/size (1 (8))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces_quad_order_3.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces_quad_order_3.out
index 36e738d8045..26e822658fa 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces_quad_order_3.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_2_faces_quad_order_3.out
@@ -8,8 +8,6 @@ Labels:
   Face Sets: 1 strata with value/size (1 (8))
   marker: 1 strata with value/size (1 (16))
   depth: 3 strata with value/size (0 (9), 1 (16), 2 (8))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d.out
index 73644aa6c52..2401a03e6b8 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 6
 Labels:
   depth: 4 strata with value/size (0 (8), 1 (19), 2 (18), 3 (6))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_2_faces.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_2_faces.out
index 34f55d6cf01..e7550fefe7b 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_2_faces.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_2_faces.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 48
 Labels:
   depth: 4 strata with value/size (0 (27), 1 (98), 2 (120), 3 (48))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_5P.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_5P.out
index 91320e8683a..b47818cb38b 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_5P.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_5P.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 6
 Labels:
   depth: 4 strata with value/size (0 (8), 1 (19), 2 (18), 3 (6))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx.out
index eb6433f8efc..c3a4350bb92 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 6
 Labels:
   depth: 4 strata with value/size (0 (8), 1 (19), 2 (18), 3 (6))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_2_faces.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_2_faces.out
index 1a849346998..b63266e9cd2 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_2_faces.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_2_faces.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 48
 Labels:
   depth: 4 strata with value/size (0 (27), 1 (98), 2 (120), 3 (48))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P.out
index 928822879e5..f7220f79465 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 6
 Labels:
   depth: 4 strata with value/size (0 (8), 1 (19), 2 (18), 3 (6))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P_2_faces.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P_2_faces.out
index 81f208f7601..7db3347e97c 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P_2_faces.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_3d_mdx_5P_2_faces.out
@@ -7,8 +7,6 @@ Mesh in 3 dimensions:
   3-cells: 48
 Labels:
   depth: 4 strata with value/size (0 (27), 1 (98), 2 (120), 3 (48))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_5P.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_5P.out
index 1336dac1d71..8a4c76466fe 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_5P.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_5P.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx.out
index bb614258420..10f779dc2bc 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx_5P.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx_5P.out
index dabc5b4fb42..aa637ee4d30 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx_5P.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_mdx_5P.out
@@ -7,8 +7,6 @@ Mesh in 2 dimensions:
 Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_quad_order_3.out b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_quad_order_3.out
index 30d7a3e1b38..835cba03ec7 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_quad_order_3.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex2_proj_tri_quad_order_3.out
@@ -8,8 +8,6 @@ Labels:
   Face Sets: 1 strata with value/size (1 (4))
   marker: 1 strata with value/size (1 (8))
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_1.out b/src/dm/impls/swarm/examples/tests/output/ex4_1.out
index 46535fa2b75..c85998fead2 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex4_1.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_1.out
@@ -8,47 +8,79 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
-DMSwarmDataBucketView: 
-  L                  = 2 
-  buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
     [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
+                            blocksize        = 1
+                            atomic size      = 8
     [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
+                            blocksize        = 1
+                            atomic size      = 4
     [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
+                            atomic size/item = 8
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
-  Total mem. usage                           = 1.12e-04 (MB) (collective)
-Energy: 81.92
-Modified energy: 81.92
-Energy: 2949.12
-Modified energy: 2949.12
-Energy: 81.9167
-Modified energy: 81.92
-Energy: 2949.
-Modified energy: 2949.12
-The particle solution for (xp, vp) at time 0.001000 is [0.199586 -0.818641]
-The exact solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-Exact Particle Energy: 81.92
-The particle solution for (xp, vp) at time 0.001000 is [1.19752 -4.91185]
-The exact solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-Exact Particle Energy: 2949.12
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1018   455.1111
+0.000500   50    1  1820.4072  1820.4444
+0.001000  100    0   455.0925   455.1111
+0.001000  100    1  1820.3701  1820.4444
+p0 error [9.6e-06 1.3e-07] sol [0.470430 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.093 / 455.111
+p1 error [1.9e-05 2.6e-07] sol [0.940860 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.37 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000250   50    0   455.1088   455.1111
+0.000250   50    1  1820.4351  1820.4444
+0.000500  100    0   455.1065   455.1111
+0.000500  100    1  1820.4258  1820.4444
+0.000750  150    0   455.1041   455.1111
+0.000750  150    1  1820.4165  1820.4444
+0.001000  200    0   455.1018   455.1111
+0.001000  200    1  1820.4073  1820.4444
+p0 error [4.8e-06 3.3e-08] sol [0.470435 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.102 / 455.111
+p1 error [9.6e-06 6.6e-08] sol [0.940869 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.41 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000125   50    0   455.1105   455.1111
+0.000125   50    1  1820.4421  1820.4444
+0.000250  100    0   455.1099   455.1111
+0.000250  100    1  1820.4398  1820.4444
+0.000375  150    0   455.1094   455.1111
+0.000375  150    1  1820.4375  1820.4444
+0.000500  200    0   455.1088   455.1111
+0.000500  200    1  1820.4351  1820.4444
+0.000625  250    0   455.1082   455.1111
+0.000625  250    1  1820.4328  1820.4444
+0.000750  300    0   455.1076   455.1111
+0.000750  300    1  1820.4305  1820.4444
+0.000875  350    0   455.1070   455.1111
+0.000875  350    1  1820.4282  1820.4444
+0.001000  400    0   455.1065   455.1111
+0.001000  400    1  1820.4259  1820.4444
+p0 error [2.4e-06 8.2e-09] sol [0.470437 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.106 / 455.111
+p1 error [4.8e-06 1.6e-08] sol [0.940874 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.43 / 1820.44
+L_2 convergence rate: 1.0
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1018   455.1111
+0.000500   50    1  1820.4072  1820.4444
+0.001000  100    0   455.0925   455.1111
+0.001000  100    1  1820.3701  1820.4444
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_1_alt.out b/src/dm/impls/swarm/examples/tests/output/ex4_1_alt.out
index c904b8bea05..9191333b08e 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex4_1_alt.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_1_alt.out
@@ -8,47 +8,79 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
-DMSwarmDataBucketView: 
-  L                  = 2 
-  buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
     [  0]     DMSwarm_pid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
+                            blocksize        = 1
+                            atomic size      = 4
     [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
+                            blocksize        = 1
+                            atomic size      = 4
     [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
+                            atomic size/item = 8
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
+                            atomic size/item = 8
   Total mem. usage                           = 1.04e-04 (MB) (collective)
-Energy: 81.92
-Modified energy: 81.92
-Energy: 2949.12
-Modified energy: 2949.12
-Energy: 81.9167
-Modified energy: 81.92
-Energy: 2949.
-Modified energy: 2949.12
-The particle solution for (xp, vp) at time 0.001000 is [0.199586 -0.818641]
-The exact solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-Exact Particle Energy: 81.92
-The particle solution for (xp, vp) at time 0.001000 is [1.19752 -4.91185]
-The exact solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-Exact Particle Energy: 2949.12
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1018   455.1111
+0.000500   50    1  1820.4072  1820.4444
+0.001000  100    0   455.0925   455.1111
+0.001000  100    1  1820.3701  1820.4444
+p0 error [9.6e-06 1.3e-07] sol [0.470430 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.093 / 455.111
+p1 error [1.9e-05 2.6e-07] sol [0.940860 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.37 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000250   50    0   455.1088   455.1111
+0.000250   50    1  1820.4351  1820.4444
+0.000500  100    0   455.1065   455.1111
+0.000500  100    1  1820.4258  1820.4444
+0.000750  150    0   455.1041   455.1111
+0.000750  150    1  1820.4165  1820.4444
+0.001000  200    0   455.1018   455.1111
+0.001000  200    1  1820.4073  1820.4444
+p0 error [4.8e-06 3.3e-08] sol [0.470435 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.102 / 455.111
+p1 error [9.6e-06 6.6e-08] sol [0.940869 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.41 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000125   50    0   455.1105   455.1111
+0.000125   50    1  1820.4421  1820.4444
+0.000250  100    0   455.1099   455.1111
+0.000250  100    1  1820.4398  1820.4444
+0.000375  150    0   455.1094   455.1111
+0.000375  150    1  1820.4375  1820.4444
+0.000500  200    0   455.1088   455.1111
+0.000500  200    1  1820.4351  1820.4444
+0.000625  250    0   455.1082   455.1111
+0.000625  250    1  1820.4328  1820.4444
+0.000750  300    0   455.1076   455.1111
+0.000750  300    1  1820.4305  1820.4444
+0.000875  350    0   455.1070   455.1111
+0.000875  350    1  1820.4282  1820.4444
+0.001000  400    0   455.1065   455.1111
+0.001000  400    1  1820.4259  1820.4444
+p0 error [2.4e-06 8.2e-09] sol [0.470437 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.106 / 455.111
+p1 error [4.8e-06 1.6e-08] sol [0.940874 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.43 / 1820.44
+L_2 convergence rate: 1.0
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1018   455.1111
+0.000500   50    1  1820.4072  1820.4444
+0.001000  100    0   455.0925   455.1111
+0.001000  100    1  1820.3701  1820.4444
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_2.out b/src/dm/impls/swarm/examples/tests/output/ex4_2.out
index 8ee1c53b100..b6fb47e14ec 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex4_2.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_2.out
@@ -8,47 +8,79 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
-DMSwarmDataBucketView: 
-  L                  = 2 
-  buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
     [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
+                            blocksize        = 1
+                            atomic size      = 8
     [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
+                            blocksize        = 1
+                            atomic size      = 4
     [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
+                            atomic size/item = 8
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
                             atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
-  Total mem. usage                           = 1.12e-04 (MB) (collective)
-Energy: 81.92
-Modified energy: 81.92
-Energy: 2949.12
-Modified energy: 2949.12
-Energy: 81.92
-Modified energy: 81.9233
-Energy: 2949.12
-Modified energy: 2949.24
-The particle solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-The exact solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-Exact Particle Energy: 81.92
-The particle solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-The exact solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-Exact Particle Energy: 2949.12
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1111   455.1204
+0.000500   50    1  1820.4444  1820.4817
+0.001000  100    0   455.1111   455.1297
+0.001000  100    1  1820.4444  1820.5188
+p0 error [3.3e-11 1.3e-07] sol [0.470439 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.111 / 455.111
+p1 error [6.6e-11 2.6e-07] sol [0.940879 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000250   50    0   455.1111   455.1134
+0.000250   50    1  1820.4444  1820.4538
+0.000500  100    0   455.1111   455.1158
+0.000500  100    1  1820.4444  1820.4631
+0.000750  150    0   455.1111   455.1181
+0.000750  150    1  1820.4444  1820.4724
+0.001000  200    0   455.1111   455.1204
+0.001000  200    1  1820.4444  1820.4816
+p0 error [8.2e-12 3.3e-08] sol [0.470439 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.111 / 455.111
+p1 error [1.6e-11 6.6e-08] sol [0.940879 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000125   50    0   455.1111   455.1117
+0.000125   50    1  1820.4444  1820.4468
+0.000250  100    0   455.1111   455.1123
+0.000250  100    1  1820.4444  1820.4491
+0.000375  150    0   455.1111   455.1129
+0.000375  150    1  1820.4444  1820.4514
+0.000500  200    0   455.1111   455.1134
+0.000500  200    1  1820.4444  1820.4538
+0.000625  250    0   455.1111   455.1140
+0.000625  250    1  1820.4444  1820.4561
+0.000750  300    0   455.1111   455.1146
+0.000750  300    1  1820.4444  1820.4584
+0.000875  350    0   455.1111   455.1152
+0.000875  350    1  1820.4444  1820.4607
+0.001000  400    0   455.1111   455.1158
+0.001000  400    1  1820.4444  1820.4630
+p0 error [2.1e-12 8.2e-09] sol [0.470439 -1.929555] exact [0.470439 -1.929555] energy/exact energy 455.111 / 455.111
+p1 error [4.1e-12 1.6e-08] sol [0.940879 -3.859110] exact [0.940879 -3.859110] energy/exact energy 1820.44 / 1820.44
+L_2 convergence rate: 2.0
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.000500   50    0   455.1111   455.1204
+0.000500   50    1  1820.4444  1820.4817
+0.001000  100    0   455.1111   455.1297
+0.001000  100    1  1820.4444  1820.5188
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_2_alt.out b/src/dm/impls/swarm/examples/tests/output/ex4_2_alt.out
deleted file mode 100644
index d5bc16779bd..00000000000
--- a/src/dm/impls/swarm/examples/tests/output/ex4_2_alt.out
+++ /dev/null
@@ -1,54 +0,0 @@
-DM Object: Mesh 1 MPI processes
-  type: plex
-Mesh in 2 dimensions:
-  0-cells: 4
-  1-cells: 5
-  2-cells: 2
-Labels:
-  depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
-  marker: 1 strata with value/size (1 (8))
-  Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
-DM Object: Particles 1 MPI processes
-  type: swarm
-DMSwarmDataBucketView: 
-  L                  = 2 
-  buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
-    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
-                            atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
-    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
-                            blocksize        = 2 
-                            atomic size      = 16 [full block, bs=2]
-                            atomic size/item = 8 
-  Total mem. usage                           = 1.04e-04 (MB) (collective)
-Energy: 81.92
-Modified energy: 81.92
-Energy: 2949.12
-Modified energy: 2949.12
-Energy: 81.92
-Modified energy: 81.9233
-Energy: 2949.12
-Modified energy: 2949.24
-The particle solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-The exact solution for (xp, vp) at time 0.001000 is [0.199591 -0.818641]
-Exact Particle Energy: 81.92
-The particle solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-The exact solution for (xp, vp) at time 0.001000 is [1.19754 -4.91185]
-Exact Particle Energy: 2949.12
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_3.out b/src/dm/impls/swarm/examples/tests/output/ex4_3.out
new file mode 100644
index 00000000000..3f83f5e380e
--- /dev/null
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_3.out
@@ -0,0 +1,86 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 4
+  1-cells: 5
+  2-cells: 2
+Labels:
+  depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
+  marker: 1 strata with value/size (1 (8))
+  Face Sets: 1 strata with value/size (1 (4))
+DM Object: Particles 1 MPI processes
+  type: swarm
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
+    [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 8
+    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4445  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4445  1826.0253
+p0 error [2e-09 1.7e-10] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [4.1e-09 3.3e-10] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.002500   50    0   455.1111   455.3402
+0.002500   50    1  1820.4444  1821.3607
+0.005000  100    0   455.1111   455.5460
+0.005000  100    1  1820.4444  1822.1839
+0.007500  150    0   455.1111   455.7076
+0.007500  150    1  1820.4444  1822.8305
+0.010000  200    0   455.1111   455.8087
+0.010000  200    1  1820.4444  1823.2349
+p0 error [2.6e-10 1e-11] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [5.1e-10 2.1e-11] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.001250   50    0   455.1111   455.1691
+0.001250   50    1  1820.4444  1820.6765
+0.002500  100    0   455.1111   455.2256
+0.002500  100    1  1820.4444  1820.9026
+0.003750  150    0   455.1111   455.2792
+0.003750  150    1  1820.4444  1821.1170
+0.005000  200    0   455.1111   455.3285
+0.005000  200    1  1820.4444  1821.3142
+0.006250  250    0   455.1111   455.3723
+0.006250  250    1  1820.4444  1821.4892
+0.007500  300    0   455.1111   455.4094
+0.007500  300    1  1820.4444  1821.6375
+0.008750  350    0   455.1111   455.4388
+0.008750  350    1  1820.4444  1821.7553
+0.010000  400    0   455.1111   455.4599
+0.010000  400    1  1820.4444  1821.8397
+p0 error [3.2e-11 7.9e-13] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [6.4e-11 1.6e-12] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+L_2 convergence rate: 3.0
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4445  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4445  1826.0253
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_3_alt.out b/src/dm/impls/swarm/examples/tests/output/ex4_3_alt.out
new file mode 100644
index 00000000000..07a6fea3feb
--- /dev/null
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_3_alt.out
@@ -0,0 +1,86 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 4
+  1-cells: 5
+  2-cells: 2
+Labels:
+  depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
+  marker: 1 strata with value/size (1 (8))
+  Face Sets: 1 strata with value/size (1 (4))
+DM Object: Particles 1 MPI processes
+  type: swarm
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
+    [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4445  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4445  1826.0253
+p0 error [2e-09 1.7e-10] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [4.1e-09 3.3e-10] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.002500   50    0   455.1111   455.3402
+0.002500   50    1  1820.4444  1821.3607
+0.005000  100    0   455.1111   455.5460
+0.005000  100    1  1820.4444  1822.1839
+0.007500  150    0   455.1111   455.7076
+0.007500  150    1  1820.4444  1822.8305
+0.010000  200    0   455.1111   455.8087
+0.010000  200    1  1820.4444  1823.2349
+p0 error [2.6e-10 1e-11] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [5.1e-10 2.1e-11] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.001250   50    0   455.1111   455.1691
+0.001250   50    1  1820.4444  1820.6765
+0.002500  100    0   455.1111   455.2256
+0.002500  100    1  1820.4444  1820.9026
+0.003750  150    0   455.1111   455.2792
+0.003750  150    1  1820.4444  1821.1170
+0.005000  200    0   455.1111   455.3285
+0.005000  200    1  1820.4444  1821.3142
+0.006250  250    0   455.1111   455.3723
+0.006250  250    1  1820.4444  1821.4892
+0.007500  300    0   455.1111   455.4094
+0.007500  300    1  1820.4444  1821.6375
+0.008750  350    0   455.1111   455.4388
+0.008750  350    1  1820.4444  1821.7553
+0.010000  400    0   455.1111   455.4599
+0.010000  400    1  1820.4444  1821.8397
+p0 error [3.2e-11 7.9e-13] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [6.4e-11 1.6e-12] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+L_2 convergence rate: 3.0
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4445  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4445  1826.0253
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_4.out b/src/dm/impls/swarm/examples/tests/output/ex4_4.out
new file mode 100644
index 00000000000..81104ed637d
--- /dev/null
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_4.out
@@ -0,0 +1,86 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 4
+  1-cells: 5
+  2-cells: 2
+Labels:
+  depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
+  marker: 1 strata with value/size (1 (8))
+  Face Sets: 1 strata with value/size (1 (4))
+DM Object: Particles 1 MPI processes
+  type: swarm
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
+    [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 8
+    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4444  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4444  1826.0253
+p0 error [2e-11 5.7e-10] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [4e-11 1.1e-09] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.002500   50    0   455.1111   455.3402
+0.002500   50    1  1820.4444  1821.3607
+0.005000  100    0   455.1111   455.5460
+0.005000  100    1  1820.4444  1822.1839
+0.007500  150    0   455.1111   455.7076
+0.007500  150    1  1820.4444  1822.8305
+0.010000  200    0   455.1111   455.8087
+0.010000  200    1  1820.4444  1823.2349
+p0 error [1.2e-12 3.5e-11] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [2.5e-12 7.1e-11] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.001250   50    0   455.1111   455.1691
+0.001250   50    1  1820.4444  1820.6765
+0.002500  100    0   455.1111   455.2256
+0.002500  100    1  1820.4444  1820.9026
+0.003750  150    0   455.1111   455.2792
+0.003750  150    1  1820.4444  1821.1170
+0.005000  200    0   455.1111   455.3285
+0.005000  200    1  1820.4444  1821.3142
+0.006250  250    0   455.1111   455.3723
+0.006250  250    1  1820.4444  1821.4892
+0.007500  300    0   455.1111   455.4094
+0.007500  300    1  1820.4444  1821.6375
+0.008750  350    0   455.1111   455.4388
+0.008750  350    1  1820.4444  1821.7553
+0.010000  400    0   455.1111   455.4599
+0.010000  400    1  1820.4444  1821.8397
+p0 error [8.1e-14 2.4e-12] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [1.6e-13 4.9e-12] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+L_2 convergence rate: 3.9
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4444  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4444  1826.0253
diff --git a/src/dm/impls/swarm/examples/tests/output/ex4_4_alt.out b/src/dm/impls/swarm/examples/tests/output/ex4_4_alt.out
new file mode 100644
index 00000000000..4b0d1d8f0b3
--- /dev/null
+++ b/src/dm/impls/swarm/examples/tests/output/ex4_4_alt.out
@@ -0,0 +1,86 @@
+DM Object: Mesh 1 MPI processes
+  type: plex
+Mesh in 2 dimensions:
+  0-cells: 4
+  1-cells: 5
+  2-cells: 2
+Labels:
+  depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
+  marker: 1 strata with value/size (1 (8))
+  Face Sets: 1 strata with value/size (1 (4))
+DM Object: Particles 1 MPI processes
+  type: swarm
+DMSwarmDataBucketView:
+  L                  = 2
+  buffer             = 0
+  allocated          = 2
+  nfields registered = 5
+    [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            blocksize        = 1
+                            atomic size      = 4
+    [  4]      kinematics : Mem. usage       = 3.20e-05 (MB) [rank0]
+                            blocksize        = 2
+                            atomic size      = 16 [full block, bs=2]
+                            atomic size/item = 8
+  Total mem. usage                           = 9.60e-05 (MB) (collective)
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4444  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4444  1826.0253
+p0 error [2e-11 5.7e-10] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [4e-11 1.1e-09] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.002500   50    0   455.1111   455.3402
+0.002500   50    1  1820.4444  1821.3607
+0.005000  100    0   455.1111   455.5460
+0.005000  100    1  1820.4444  1822.1839
+0.007500  150    0   455.1111   455.7076
+0.007500  150    1  1820.4444  1822.8305
+0.010000  200    0   455.1111   455.8087
+0.010000  200    1  1820.4444  1823.2349
+p0 error [1.2e-12 3.5e-11] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [2.5e-12 7.1e-11] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.001250   50    0   455.1111   455.1691
+0.001250   50    1  1820.4444  1820.6765
+0.002500  100    0   455.1111   455.2256
+0.002500  100    1  1820.4444  1820.9026
+0.003750  150    0   455.1111   455.2792
+0.003750  150    1  1820.4444  1821.1170
+0.005000  200    0   455.1111   455.3285
+0.005000  200    1  1820.4444  1821.3142
+0.006250  250    0   455.1111   455.3723
+0.006250  250    1  1820.4444  1821.4892
+0.007500  300    0   455.1111   455.4094
+0.007500  300    1  1820.4444  1821.6375
+0.008750  350    0   455.1111   455.4388
+0.008750  350    1  1820.4444  1821.7553
+0.010000  400    0   455.1111   455.4599
+0.010000  400    1  1820.4444  1821.8397
+p0 error [8.1e-14 2.4e-12] sol [0.378112 -18.017320] exact [0.378112 -18.017320] energy/exact energy 455.111 / 455.111
+p1 error [1.6e-13 4.9e-12] sol [0.756223 -36.034641] exact [0.756223 -36.034641] energy/exact energy 1820.44 / 1820.44
+L_2 convergence rate: 3.9
+Time     Step Part     Energy Mod Energy
+0.000000    0    0   455.1111   455.1111
+0.000000    0    1  1820.4444  1820.4444
+0.005000   50    0   455.1111   455.9808
+0.005000   50    1  1820.4444  1823.9234
+0.010000  100    0   455.1111   456.5063
+0.010000  100    1  1820.4444  1826.0253
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_bsi1.out b/src/dm/impls/swarm/examples/tests/output/ex5_bsi1.out
index b61815468df..9917184a931 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_bsi1.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex5_bsi1.out
@@ -8,15 +8,13 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
   L                  = 2 
   buffer             = 0 
   allocated          = 2 
-  nfields registered = 6 
+  nfields registered = 5 
     [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 8 
@@ -30,14 +28,12 @@ DMSwarmDataBucketView:
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.44e-04 (MB) (collective)
+  Total mem. usage                           = 1.28e-04 (MB) (collective)
+L_2 convergence rate: 1.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.05
 Particle 1 initial Energy: 250.  Final Energy: 250.003
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_bsi2.out b/src/dm/impls/swarm/examples/tests/output/ex5_bsi2.out
index f3e938321cf..a630637d848 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_bsi2.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex5_bsi2.out
@@ -8,15 +8,13 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
   L                  = 2 
   buffer             = 0 
   allocated          = 2 
-  nfields registered = 6 
+  nfields registered = 5 
     [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 8 
@@ -30,14 +28,12 @@ DMSwarmDataBucketView:
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.44e-04 (MB) (collective)
+  Total mem. usage                           = 1.28e-04 (MB) (collective)
+L_2 convergence rate: 2.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.
 Particle 1 initial Energy: 250.  Final Energy: 250.
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_euler.out b/src/dm/impls/swarm/examples/tests/output/ex5_euler.out
index b61815468df..9917184a931 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_euler.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex5_euler.out
@@ -8,15 +8,13 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
   L                  = 2 
   buffer             = 0 
   allocated          = 2 
-  nfields registered = 6 
+  nfields registered = 5 
     [  0]     DMSwarm_pid : Mem. usage       = 1.60e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 8 
@@ -30,14 +28,12 @@ DMSwarmDataBucketView:
     [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.44e-04 (MB) (collective)
+  Total mem. usage                           = 1.28e-04 (MB) (collective)
+L_2 convergence rate: 1.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.05
 Particle 1 initial Energy: 250.  Final Energy: 250.003
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_bsi1_alt.out b/src/dm/impls/swarm/examples/tests/output/ex6_bsi1.out
similarity index 53%
rename from src/dm/impls/swarm/examples/tests/output/ex5_bsi1_alt.out
rename to src/dm/impls/swarm/examples/tests/output/ex6_bsi1.out
index a69bd76e6d8..aad330a6bd5 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_bsi1_alt.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex6_bsi1.out
@@ -8,36 +8,40 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
-  L                  = 2 
+  L                  = 10 
   buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
-    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-06 (MB) [rank0]
+  allocated          = 10 
+  nfields registered = 5 
+    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-05 (MB) [rank0]
                             blocksize        = 1 
-                            atomic size      = 4 
-    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            atomic size      = 8 
+    [  1]    DMSwarm_rank : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+    [  2] DMSwarmPIC_coor : Mem. usage       = 1.60e-04 (MB) [rank0]
                             blocksize        = 2 
                             atomic size      = 16 [full block, bs=2]
                             atomic size/item = 8 
-    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+    [  3]  DMSwarm_cellid : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 3.20e-04 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.36e-04 (MB) (collective)
+  Total mem. usage                           = 6.40e-04 (MB) (collective)
+L_2 convergence rate: 1.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.05
-Particle 1 initial Energy: 250.  Final Energy: 250.003
+Particle 1 initial Energy: 500.  Final Energy: 500.05
+Particle 2 initial Energy: 500.  Final Energy: 500.05
+Particle 3 initial Energy: 500.  Final Energy: 500.05
+Particle 4 initial Energy: 500.  Final Energy: 500.05
+Particle 0 initial Energy: 125.  Final Energy: 125.
+Particle 1 initial Energy: 125.  Final Energy: 125.
+Particle 2 initial Energy: 125.  Final Energy: 125.
+Particle 3 initial Energy: 125.  Final Energy: 125.
+Particle 4 initial Energy: 125.  Final Energy: 125.
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_bsi2_alt.out b/src/dm/impls/swarm/examples/tests/output/ex6_bsi2.out
similarity index 54%
rename from src/dm/impls/swarm/examples/tests/output/ex5_bsi2_alt.out
rename to src/dm/impls/swarm/examples/tests/output/ex6_bsi2.out
index baa5f4a3817..4786667d0fb 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_bsi2_alt.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex6_bsi2.out
@@ -8,36 +8,40 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
-  L                  = 2 
+  L                  = 10 
   buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
-    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-06 (MB) [rank0]
+  allocated          = 10 
+  nfields registered = 5 
+    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-05 (MB) [rank0]
                             blocksize        = 1 
-                            atomic size      = 4 
-    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            atomic size      = 8 
+    [  1]    DMSwarm_rank : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+    [  2] DMSwarmPIC_coor : Mem. usage       = 1.60e-04 (MB) [rank0]
                             blocksize        = 2 
                             atomic size      = 16 [full block, bs=2]
                             atomic size/item = 8 
-    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+    [  3]  DMSwarm_cellid : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 3.20e-04 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.36e-04 (MB) (collective)
+  Total mem. usage                           = 6.40e-04 (MB) (collective)
+L_2 convergence rate: 2.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.
-Particle 1 initial Energy: 250.  Final Energy: 250.
+Particle 1 initial Energy: 500.  Final Energy: 500.
+Particle 2 initial Energy: 500.  Final Energy: 500.
+Particle 3 initial Energy: 500.  Final Energy: 500.
+Particle 4 initial Energy: 500.  Final Energy: 500.
+Particle 0 initial Energy: 125.  Final Energy: 125.
+Particle 1 initial Energy: 125.  Final Energy: 125.
+Particle 2 initial Energy: 125.  Final Energy: 125.
+Particle 3 initial Energy: 125.  Final Energy: 125.
+Particle 4 initial Energy: 125.  Final Energy: 125.
diff --git a/src/dm/impls/swarm/examples/tests/output/ex5_euler_alt.out b/src/dm/impls/swarm/examples/tests/output/ex6_euler.out
similarity index 53%
rename from src/dm/impls/swarm/examples/tests/output/ex5_euler_alt.out
rename to src/dm/impls/swarm/examples/tests/output/ex6_euler.out
index a69bd76e6d8..aad330a6bd5 100644
--- a/src/dm/impls/swarm/examples/tests/output/ex5_euler_alt.out
+++ b/src/dm/impls/swarm/examples/tests/output/ex6_euler.out
@@ -8,36 +8,40 @@ Labels:
   depth: 3 strata with value/size (0 (4), 1 (5), 2 (2))
   marker: 1 strata with value/size (1 (8))
   Face Sets: 1 strata with value/size (1 (4))
-  DMSWARM_PIC: Using method CellDM->LocatePoints
-  DMSWARM_PIC: Using method CellDM->GetNeigbors
 DM Object: Particles 1 MPI processes
   type: swarm
 DMSwarmDataBucketView: 
-  L                  = 2 
+  L                  = 10 
   buffer             = 0 
-  allocated          = 2 
-  nfields registered = 6 
-    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-06 (MB) [rank0]
+  allocated          = 10 
+  nfields registered = 5 
+    [  0]     DMSwarm_pid : Mem. usage       = 8.00e-05 (MB) [rank0]
                             blocksize        = 1 
-                            atomic size      = 4 
-    [  1]    DMSwarm_rank : Mem. usage       = 8.00e-06 (MB) [rank0]
+                            atomic size      = 8 
+    [  1]    DMSwarm_rank : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  2] DMSwarmPIC_coor : Mem. usage       = 3.20e-05 (MB) [rank0]
+    [  2] DMSwarmPIC_coor : Mem. usage       = 1.60e-04 (MB) [rank0]
                             blocksize        = 2 
                             atomic size      = 16 [full block, bs=2]
                             atomic size/item = 8 
-    [  3]  DMSwarm_cellid : Mem. usage       = 8.00e-06 (MB) [rank0]
+    [  3]  DMSwarm_cellid : Mem. usage       = 4.00e-05 (MB) [rank0]
                             blocksize        = 1 
                             atomic size      = 4 
-    [  4]             w_q : Mem. usage       = 1.60e-05 (MB) [rank0]
-                            blocksize        = 1 
-                            atomic size      = 8 
-    [  5]      kinematics : Mem. usage       = 6.40e-05 (MB) [rank0]
+    [  4]      kinematics : Mem. usage       = 3.20e-04 (MB) [rank0]
                             blocksize        = 4 
                             atomic size      = 32 [full block, bs=4]
                             atomic size/item = 8 
-  Total mem. usage                           = 1.36e-04 (MB) (collective)
+  Total mem. usage                           = 6.40e-04 (MB) (collective)
+L_2 convergence rate: 1.0
 CONVERGED_ITS at time 0.001 after 10 steps
 Particle 0 initial Energy: 500.  Final Energy: 500.05
-Particle 1 initial Energy: 250.  Final Energy: 250.003
+Particle 1 initial Energy: 500.  Final Energy: 500.05
+Particle 2 initial Energy: 500.  Final Energy: 500.05
+Particle 3 initial Energy: 500.  Final Energy: 500.05
+Particle 4 initial Energy: 500.  Final Energy: 500.05
+Particle 0 initial Energy: 125.  Final Energy: 125.
+Particle 1 initial Energy: 125.  Final Energy: 125.
+Particle 2 initial Energy: 125.  Final Energy: 125.
+Particle 3 initial Energy: 125.  Final Energy: 125.
+Particle 4 initial Energy: 125.  Final Energy: 125.
diff --git a/src/dm/impls/swarm/swarm.c b/src/dm/impls/swarm/swarm.c
index 02b340ee1ff..0e1ea8281a7 100644
--- a/src/dm/impls/swarm/swarm.c
+++ b/src/dm/impls/swarm/swarm.c
@@ -34,9 +34,9 @@ const char DMSwarmPICField_cellid[] = "DMSwarm_cellid";
    Level: beginner
 
    Notes:
- 
+
    The field with name fieldname must be defined as having a data type of PetscScalar.
- 
+
    This function must be called prior to calling DMCreateLocalVector(), DMCreateGlobalVector().
    Mutiple calls to DMSwarmVectorDefineField() are permitted.
 
@@ -227,7 +227,7 @@ static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass,
 
   ierr = PetscCalloc2(locRows, &dnz, locRows, &onz);CHKERRQ(ierr);
   ierr = PetscHSetIJCreate(&ht);CHKERRQ(ierr);
-  
+
   ierr = PetscSynchronizedFlush(comm, NULL);CHKERRQ(ierr);
   /* count non-zeros */
   ierr = DMSwarmSortGetAccess(dmc);CHKERRQ(ierr);
@@ -269,9 +269,10 @@ static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass,
   ierr = PetscFree2(dnz, onz);CHKERRQ(ierr);
   ierr = PetscMalloc3(maxC*totDim, &elemMat, maxC, &rowIDXs, maxC*dim, &xi);CHKERRQ(ierr);
   for (field = 0; field < Nf; ++field) {
-    PetscObject     obj;
-    PetscReal      *Bcoarse, *coords;
-    PetscInt        Nc, i;
+    PetscTabulation Tcoarse;
+    PetscObject       obj;
+    PetscReal        *coords;
+    PetscInt          Nc, i;
 
     ierr = PetscDSGetDiscretization(prob, field, &obj);CHKERRQ(ierr);
     ierr = PetscFEGetNumComponents((PetscFE) obj, &Nc);CHKERRQ(ierr);
@@ -289,14 +290,14 @@ static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass,
       for (p = 0; p < numCIndices; ++p) {
         CoordinatesRealToRef(dim, dim, v0ref, v0, invJ, &coords[cindices[p]*dim], &xi[p*dim]);
       }
-      ierr = PetscFEGetTabulation((PetscFE) obj, numCIndices, xi, &Bcoarse, NULL, NULL);CHKERRQ(ierr);
+      ierr = PetscFECreateTabulation((PetscFE) obj, 1, numCIndices, xi, 0, &Tcoarse);CHKERRQ(ierr);
       /* Get elemMat entries by multiplying by weight */
       ierr = PetscArrayzero(elemMat, numCIndices*totDim);CHKERRQ(ierr);
       for (i = 0; i < numFIndices; ++i) {
         for (p = 0; p < numCIndices; ++p) {
           for (c = 0; c < Nc; ++c) {
             /* B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c */
-            elemMat[p*numFIndices+i] += Bcoarse[(p*numFIndices + i)*Nc + c]*(useDeltaFunction ? 1.0 : detJ);
+            elemMat[p*numFIndices+i] += Tcoarse->T[0][(p*numFIndices + i)*Nc + c]*(useDeltaFunction ? 1.0 : detJ);
           }
         }
       }
@@ -305,7 +306,7 @@ static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass,
       ierr = MatSetValues(mass, numCIndices, rowIDXs, numFIndices, findices, elemMat, ADD_VALUES);CHKERRQ(ierr);
       ierr = PetscFree(cindices);CHKERRQ(ierr);
       ierr = DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, &numFIndices, &findices, NULL);CHKERRQ(ierr);
-      ierr = PetscFERestoreTabulation((PetscFE) obj, numCIndices, xi, &Bcoarse, NULL, NULL);CHKERRQ(ierr);
+      ierr = PetscTabulationDestroy(&Tcoarse);CHKERRQ(ierr);
     }
     ierr = DMSwarmRestoreField(dmc, DMSwarmPICField_coor, NULL, NULL, (void **) &coords);CHKERRQ(ierr);
   }
@@ -483,7 +484,7 @@ PETSC_EXTERN PetscErrorCode DMSwarmInitializeFieldRegister(DM dm)
   PetscFunctionBegin;
   if (!swarm->field_registration_initialized) {
     swarm->field_registration_initialized = PETSC_TRUE;
-    ierr = DMSwarmRegisterPetscDatatypeField(dm,DMSwarmField_pid,1,PETSC_LONG);CHKERRQ(ierr); /* unique identifer */
+    ierr = DMSwarmRegisterPetscDatatypeField(dm,DMSwarmField_pid,1,PETSC_INT64);CHKERRQ(ierr); /* unique identifer */
     ierr = DMSwarmRegisterPetscDatatypeField(dm,DMSwarmField_rank,1,PETSC_INT);CHKERRQ(ierr); /* used for communication */
   }
   PetscFunctionReturn(0);
@@ -941,23 +942,23 @@ PETSC_EXTERN PetscErrorCode DMSwarmRemovePointAtIndex(DM dm,PetscInt idx)
 
 /*@C
    DMSwarmCopyPoint - Copy point pj to point pi in the DMSwarm
- 
+
    Not collective
- 
+
    Input parameters:
 +  dm - a DMSwarm
 .  pi - the index of the point to copy
 -  pj - the point index where the copy should be located
- 
+
  Level: beginner
- 
+
 .seealso: DMSwarmRemovePoint()
 @*/
 PETSC_EXTERN PetscErrorCode DMSwarmCopyPoint(DM dm,PetscInt pi,PetscInt pj)
 {
   DM_Swarm       *swarm = (DM_Swarm*)dm->data;
   PetscErrorCode ierr;
-  
+
   PetscFunctionBegin;
   if (!swarm->issetup) {ierr = DMSetUp(dm);CHKERRQ(ierr);}
   ierr = DMSwarmDataBucketCopyPoint(swarm->db,pi,swarm->db,pj);CHKERRQ(ierr);
@@ -1163,16 +1164,16 @@ PetscErrorCode DMSetup_Swarm(DM dm)
 
     if (swarm->dmcell->ops->locatepointssubdomain) {
       /* check methods exists for exact ownership identificiation */
-      ierr = PetscPrintf(PetscObjectComm((PetscObject)dm),"  DMSWARM_PIC: Using method CellDM->ops->LocatePointsSubdomain\n");CHKERRQ(ierr);
+      ierr = PetscInfo(dm, "DMSWARM_PIC: Using method CellDM->ops->LocatePointsSubdomain\n");CHKERRQ(ierr);
       swarm->migrate_type = DMSWARM_MIGRATE_DMCELLEXACT;
     } else {
       /* check methods exist for point location AND rank neighbor identification */
       if (swarm->dmcell->ops->locatepoints) {
-        ierr = PetscPrintf(PetscObjectComm((PetscObject)dm),"  DMSWARM_PIC: Using method CellDM->LocatePoints\n");CHKERRQ(ierr);
+        ierr = PetscInfo(dm, "DMSWARM_PIC: Using method CellDM->LocatePoints\n");CHKERRQ(ierr);
       } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_USER,"DMSWARM_PIC requires the method CellDM->ops->locatepoints be defined");
 
       if (swarm->dmcell->ops->getneighbors) {
-        ierr = PetscPrintf(PetscObjectComm((PetscObject)dm),"  DMSWARM_PIC: Using method CellDM->GetNeigbors\n");CHKERRQ(ierr);
+        ierr = PetscInfo(dm, "DMSWARM_PIC: Using method CellDM->GetNeigbors\n");CHKERRQ(ierr);
       } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_USER,"DMSWARM_PIC requires the method CellDM->ops->getneighbors be defined");
 
       swarm->migrate_type = DMSWARM_MIGRATE_DMCELLNSCATTER;
@@ -1318,9 +1319,9 @@ PetscErrorCode DMView_Swarm(DM dm, PetscViewer viewer)
  If the local size of the DMSwarm does not match the local size of the global vector
  when DMSwarmDestroyGlobalVectorFromField() is called, an error is thrown.
 
- Additional high-level support is provided for Particle-In-Cell methods. 
+ Additional high-level support is provided for Particle-In-Cell methods.
  Please refer to the man page for DMSwarmSetType().
- 
+
  Level: beginner
 
 .seealso: DMType, DMCreate(), DMSetType()
diff --git a/src/dm/impls/swarm/swarmpic_plex.c b/src/dm/impls/swarm/swarmpic_plex.c
index 5943f89accb..d35e3130be3 100644
--- a/src/dm/impls/swarm/swarmpic_plex.c
+++ b/src/dm/impls/swarm/swarmpic_plex.c
@@ -188,7 +188,7 @@ PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_PLEX_SubDivide(DM dm,DM dm
   PetscErrorCode ierr;
   PetscInt dim,nfaces,nbasis;
   PetscInt q,npoints_q,e,nel,pcnt,ps,pe,d,k,r;
-  PetscReal *B;
+  PetscTabulation T;
   Vec coorlocal;
   PetscSection coordSection;
   PetscScalar *elcoor = NULL;
@@ -219,7 +219,7 @@ PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_PLEX_SubDivide(DM dm,DM dm
   ierr = PetscFEGetQuadrature(fe,&quadrature);CHKERRQ(ierr);
   ierr = PetscQuadratureGetData(quadrature, NULL, NULL, &npoints_q, &xiq, NULL);CHKERRQ(ierr);
   ierr = PetscFEGetDimension(fe,&nbasis);CHKERRQ(ierr);
-  ierr = PetscFEGetDefaultTabulation(fe, &B, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr);
 
   /* 0->cell, 1->edge, 2->vert */
   ierr = DMPlexGetHeightStratum(dmc,0,&ps,&pe);CHKERRQ(ierr);
@@ -240,7 +240,7 @@ PetscErrorCode private_DMSwarmInsertPointsUsingCellDM_PLEX_SubDivide(DM dm,DM dm
       for (d=0; dT[0][q*nbasis + k] * PetscRealPart(elcoor[dim*k+d]);
         }
       }
       swarm_cellid[pcnt] = e;
@@ -663,7 +663,8 @@ PetscErrorCode private_DMSwarmSetPointCoordinatesCellwise_PLEX(DM dm,DM dmc,Pets
   PetscInt dim,nfaces,ps,pe,p,d,nbasis,pcnt,e,k,nel;
   PetscFE fe;
   PetscQuadrature quadrature;
-  PetscReal *B,*xiq;
+  PetscTabulation T;
+  PetscReal *xiq;
   Vec coorlocal;
   PetscSection coordSection;
   PetscScalar *elcoor = NULL;
@@ -720,7 +721,7 @@ PetscErrorCode private_DMSwarmSetPointCoordinatesCellwise_PLEX(DM dm,DM dmc,Pets
   ierr = private_PetscFECreateDefault_scalar_pk1(dmc, dim, is_simplex, 0, &fe);CHKERRQ(ierr);
   ierr = PetscFESetQuadrature(fe,quadrature);CHKERRQ(ierr);
   ierr = PetscFEGetDimension(fe,&nbasis);CHKERRQ(ierr);
-  ierr = PetscFEGetDefaultTabulation(fe, &B, NULL, NULL);CHKERRQ(ierr);
+  ierr = PetscFEGetCellTabulation(fe, &T);CHKERRQ(ierr);
 
   /* for each cell, interpolate coordaintes and insert the interpolated points coordinates into swarm */
   /* 0->cell, 1->edge, 2->vert */
@@ -742,7 +743,7 @@ PetscErrorCode private_DMSwarmSetPointCoordinatesCellwise_PLEX(DM dm,DM dmc,Pets
       for (d=0; dT[0][p*nbasis + k] * PetscRealPart(elcoor[dim*k+d]);
         }
       }
       swarm_cellid[pcnt] = e;
diff --git a/src/dm/interface/dlregisdmdm.c b/src/dm/interface/dlregisdmdm.c
index a1870137914..27436980d1e 100644
--- a/src/dm/interface/dlregisdmdm.c
+++ b/src/dm/interface/dlregisdmdm.c
@@ -58,6 +58,7 @@ PetscErrorCode  DMInitializePackage(void)
   ierr = PetscClassIdRegister("Distributed Mesh",&DM_CLASSID);CHKERRQ(ierr);
   ierr = PetscClassIdRegister("DM Label",&DMLABEL_CLASSID);CHKERRQ(ierr);
   ierr = PetscClassIdRegister("GraphPartitioner",&PETSCPARTITIONER_CLASSID);CHKERRQ(ierr);
+  ierr = PetscClassIdRegister("Quadrature",&PETSCQUADRATURE_CLASSID);CHKERRQ(ierr);
 
 #if defined(PETSC_HAVE_HYPRE)
   ierr = MatRegister(MATHYPRESTRUCT, MatCreate_HYPREStruct);CHKERRQ(ierr);
@@ -77,7 +78,12 @@ PetscErrorCode  DMInitializePackage(void)
   ierr = PetscLogEventRegister("DMCreateRestrict",       DM_CLASSID,&DM_CreateRestriction);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMCreateInject",         DM_CLASSID,&DM_CreateInjection);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMCreateMat",            DM_CLASSID,&DM_CreateMatrix);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("DMLoad",                 DM_CLASSID,&DM_Load);CHKERRQ(ierr);
 
+  ierr = PetscLogEventRegister("DMPlexCrFrCeLi",         DM_CLASSID,&DMPLEX_CreateFromCellList);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("DMPlexCrFrCeLiCo",       DM_CLASSID,&DMPLEX_CreateFromCellList_Coordinates);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("DMPlexCreateGmsh",       DM_CLASSID,&DMPLEX_CreateGmsh);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("DMPlexCrFromFile",       DM_CLASSID,&DMPLEX_CreateFromFile);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("Mesh Partition",         DM_CLASSID,&DMPLEX_Partition);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("Mesh Migration",         DM_CLASSID,&DMPLEX_Migrate);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMPlexPartSelf",         DM_CLASSID,&DMPLEX_PartSelf);CHKERRQ(ierr);
@@ -106,7 +112,6 @@ PetscErrorCode  DMInitializePackage(void)
   ierr = PetscLogEventRegister("DMPlexInterpFE",         DM_CLASSID,&DMPLEX_InterpolatorFEM);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMPlexInjectorFE",       DM_CLASSID,&DMPLEX_InjectorFEM);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMPlexIntegralFEM",      DM_CLASSID,&DMPLEX_IntegralFEM);CHKERRQ(ierr);
-  ierr = PetscLogEventRegister("DMPlexCreateGmsh",       DM_CLASSID,&DMPLEX_CreateGmsh);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("DMPlexRebalance",        DM_CLASSID,&DMPLEX_RebalanceSharedPoints);CHKERRQ(ierr);
 
   ierr = PetscLogEventRegister("DMSwarmMigrate",         DM_CLASSID,&DMSWARM_Migrate);CHKERRQ(ierr);
diff --git a/src/dm/interface/dm.c b/src/dm/interface/dm.c
index 260f8073d2d..5b399491e86 100644
--- a/src/dm/interface/dm.c
+++ b/src/dm/interface/dm.c
@@ -8,11 +8,11 @@
 
 PetscClassId  DM_CLASSID;
 PetscClassId  DMLABEL_CLASSID;
-PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal, DM_LocatePoints, DM_Coarsen, DM_Refine, DM_CreateInterpolation, DM_CreateRestriction, DM_CreateInjection, DM_CreateMatrix;
+PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal, DM_LocatePoints, DM_Coarsen, DM_Refine, DM_CreateInterpolation, DM_CreateRestriction, DM_CreateInjection, DM_CreateMatrix, DM_Load;
 
 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DMBoundaryType","DM_BOUNDARY_",0};
 const char *const DMBoundaryConditionTypes[] = {"INVALID","ESSENTIAL","NATURAL","INVALID","INVALID","ESSENTIAL_FIELD","NATURAL_FIELD","INVALID","INVALID","INVALID","NATURAL_RIEMANN","DMBoundaryConditionType","DM_BC_",0};
-
+const char *const DMPolytopeTypes[] = {"point", "segment", "triangle", "quadrilateral", "segment tensor prism", "tetrahedron", "hexahedron", "triangular prism", "triangular tensor prism", "quadrilateral tensor prism", "unknown", "DMPolytopeType", "DM_POLYTOPE_", 0};
 /*@
   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
 
@@ -55,6 +55,7 @@ PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
   v->adjacency[0]             = PETSC_FALSE;
   v->adjacency[1]             = PETSC_TRUE;
   v->depthLabel               = NULL;
+  v->celltypeLabel            = NULL;
   v->localSection             = NULL;
   v->globalSection            = NULL;
   v->defaultConstraintSection = NULL;
@@ -98,11 +99,14 @@ PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
 
   Level: beginner
 
-  Notes: For some DM this is a shallow clone, the result of which may share (referent counted) information with its parent. For example,
-         DMClone() applied to a DMPLEX object will result in a new DMPLEX that shares the topology with the original DMPLEX. It does
-         share the PetscSection of the original DM
+  Notes: 
+  For some DM implementations this is a shallow clone, the result of which may share (referent counted) information with its parent. For example,
+  DMClone() applied to a DMPLEX object will result in a new DMPLEX that shares the topology with the original DMPLEX. It does not
+  share the PetscSection of the original DM.
+
+  The clone is considered set up iff the original is.
 
-.seealso: DMDestry(), DMCreate(), DMSetType(), DMSetLocalSection(), DMSetGlobalSection()
+.seealso: DMDestroy(), DMCreate(), DMSetType(), DMSetLocalSection(), DMSetGlobalSection()
 
 @*/
 PetscErrorCode DMClone(DM dm, DM *newdm)
@@ -579,7 +583,8 @@ PetscErrorCode DMDestroyLabelLinkList_Internal(DM dm)
   while (next) {
     DMLabelLink tmp = next->next;
 
-    if (next->label == dm->depthLabel) dm->depthLabel = NULL;
+    if (next->label == dm->depthLabel)    dm->depthLabel    = NULL;
+    if (next->label == dm->celltypeLabel) dm->celltypeLabel = NULL;
     ierr = DMLabelDestroy(&next->label);CHKERRQ(ierr);
     ierr = PetscFree(next);CHKERRQ(ierr);
     next = tmp;
@@ -764,6 +769,7 @@ PetscErrorCode  DMDestroy(DM *dm)
   if ((*dm)->ops->destroy) {
     ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
   }
+  ierr = DMMonitorCancel(*dm);CHKERRQ(ierr);
   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
   PetscFunctionReturn(0);
@@ -810,9 +816,19 @@ PetscErrorCode  DMSetUp(DM dm)
 .   -dm_mat_type   - type of matrix to create inside DM
 -   -dm_is_coloring_type - 
 
-    Level: developer
+    DMPLEX Specific Checks
++   -dm_plex_check_symmetry        - Check that the adjacency information in the mesh is symmetric - DMPlexCheckSymmetry()
+.   -dm_plex_check_skeleton        - Check that each cell has the correct number of vertices (only for homogeneous simplex or tensor meshes) - DMPlexCheckSkeleton()
+.   -dm_plex_check_faces           - Check that the faces of each cell give a vertex order this is consistent with what we expect from the cell type - DMPlexCheckFaces()
+.   -dm_plex_check_geometry        - Check that cells have positive volume - DMPlexCheckGeometry()
+.   -dm_plex_check_pointsf         - Check some necessary conditions for PointSF - DMPlexCheckPointSF()
+.   -dm_plex_check_interface_cones - Check points on inter-partition interfaces have conforming order of cone points - DMPlexCheckInterfaceCones()
+-   -dm_plex_check_all             - Perform all the checks above
 
-.seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
+    Level: intermediate
+
+.seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(),
+    DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces(), DMPlexCheckGeometry(), DMPlexCheckPointSF(), DMPlexCheckInterfaceCones()
 
 @*/
 PetscErrorCode DMSetFromOptions(DM dm)
@@ -846,6 +862,29 @@ PetscErrorCode DMSetFromOptions(DM dm)
   PetscFunctionReturn(0);
 }
 
+/*@C
+   DMViewFromOptions - View from Options
+
+   Collective on DM
+
+   Input Parameters:
++  dm - the DM object
+.  obj - Optional object
+-  name - command line option
+
+   Level: intermediate
+.seealso:  DM, DMView, PetscObjectViewFromOptions(), DMCreate()
+@*/
+PetscErrorCode  DMViewFromOptions(DM dm,PetscObject obj,const char name[])
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
+  ierr = PetscObjectViewFromOptions((PetscObject)dm,obj,name);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@C
     DMView - Views a DM
 
@@ -1087,7 +1126,7 @@ PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
 
 
-.seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen(), DMCreateRestriction()
+.seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen(), DMCreateRestriction(), DMCreateInterpolationScale()
 
 @*/
 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
@@ -1105,6 +1144,38 @@ PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
   PetscFunctionReturn(0);
 }
 
+/*@
+    DMCreateInterpolationScale - Forms L = 1/(R*1) such that diag(L)*R preserves scale and is thus suitable for state (versus residual) restriction.
+
+  Input Parameters:
++      dac - DM that defines a coarse mesh
+.      daf - DM that defines a fine mesh
+-      mat - the restriction (or interpolation operator) from fine to coarse
+
+  Output Parameter:
+.    scale - the scaled vector
+
+  Level: developer
+
+.seealso: DMCreateInterpolation()
+
+@*/
+PetscErrorCode  DMCreateInterpolationScale(DM dac,DM daf,Mat mat,Vec *scale)
+{
+  PetscErrorCode ierr;
+  Vec            fine;
+  PetscScalar    one = 1.0;
+
+  PetscFunctionBegin;
+  ierr = DMCreateGlobalVector(daf,&fine);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(dac,scale);CHKERRQ(ierr);
+  ierr = VecSet(fine,one);CHKERRQ(ierr);
+  ierr = MatRestrict(mat,fine,*scale);CHKERRQ(ierr);
+  ierr = VecDestroy(&fine);CHKERRQ(ierr);
+  ierr = VecReciprocal(*scale);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
 /*@
     DMCreateRestriction - Gets restriction matrix between two DM objects
 
@@ -1220,9 +1291,15 @@ PetscErrorCode DMCreateMassMatrix(DM dm1, DM dm2, Mat *mat)
     Output Parameter:
 .   coloring - the coloring
 
+    Notes:
+       Coloring of matrices can be computed directly from the sparse matrix nonzero structure via the MatColoring object or from the mesh from which the
+       matrix comes from. In general using the mesh produces a more optimal coloring (fewer colors).
+
+       This produces a coloring with the distance of 2, see MatSetColoringDistance() which can be used for efficiently computing Jacobians with MatFDColoringCreate()
+
     Level: developer
 
-.seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
+.seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType(), MatColoring, MatFDColoringCreate()
 
 @*/
 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
@@ -3715,6 +3792,7 @@ PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
   ierr = PetscViewerCheckReadable(viewer);CHKERRQ(ierr);
   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr);
+  ierr = PetscLogEventBegin(DM_Load,viewer,0,0,0);CHKERRQ(ierr);
   if (isbinary) {
     PetscInt classid;
     char     type[256];
@@ -3727,6 +3805,7 @@ PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
   } else if (ishdf5) {
     if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);}
   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()");
+  ierr = PetscLogEventEnd(DM_Load,viewer,0,0,0);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
@@ -7139,6 +7218,8 @@ PetscErrorCode DMAddLabel(DM dm, DMLabel label)
   ierr = PetscObjectReference((PetscObject)label);CHKERRQ(ierr);
   ierr = PetscStrcmp(lname, "depth", &flg);CHKERRQ(ierr);
   if (flg) dm->depthLabel = label;
+  ierr = PetscStrcmp(lname, "celltype", &flg);CHKERRQ(ierr);
+  if (flg) dm->celltypeLabel = label;
   PetscFunctionReturn(0);
 }
 
@@ -7187,6 +7268,8 @@ PetscErrorCode DMRemoveLabel(DM dm, const char name[], DMLabel *label)
       *pnext = link->next; /* Remove from list */
       ierr = PetscStrcmp(name, "depth", &hasLabel);CHKERRQ(ierr);
       if (hasLabel) dm->depthLabel = NULL;
+      ierr = PetscStrcmp(name, "celltype", &hasLabel);CHKERRQ(ierr);
+      if (hasLabel) dm->celltypeLabel = NULL;
       if (label) *label = link->label;
       else       {ierr = DMLabelDestroy(&link->label);CHKERRQ(ierr);}
       ierr = PetscFree(link);CHKERRQ(ierr);
@@ -7232,6 +7315,7 @@ PetscErrorCode DMRemoveLabelBySelf(DM dm, DMLabel *label, PetscBool failNotFound
       hasLabel = PETSC_TRUE;
       *pnext = link->next; /* Remove from list */
       if (*label == dm->depthLabel) dm->depthLabel = NULL;
+      if (*label == dm->celltypeLabel) dm->celltypeLabel = NULL;
       if (((PetscObject) link->label)->refct < 2) *label = NULL; /* nullify if exclusive reference */
       ierr = DMLabelDestroy(&link->label);CHKERRQ(ierr);
       ierr = PetscFree(link);CHKERRQ(ierr);
@@ -7322,7 +7406,7 @@ PetscErrorCode DMSetLabelOutput(DM dm, const char name[], PetscBool output)
 + dmA - The DM object with initial labels
 . dmB - The DM object with copied labels
 . mode - Copy labels by pointers (PETSC_OWN_POINTER) or duplicate them (PETSC_COPY_VALUES)
-- all  - Copy all labels including "depth" and "dim" (PETSC_TRUE) which are otherwise ignored (PETSC_FALSE)
+- all  - Copy all labels including "depth", "dim", and "celltype" (PETSC_TRUE) which are otherwise ignored (PETSC_FALSE)
 
   Level: intermediate
 
@@ -7353,8 +7437,11 @@ PetscErrorCode DMCopyLabels(DM dmA, DM dmB, PetscCopyMode mode, PetscBool all)
       if (flg) continue;
       ierr = PetscStrcmp(name, "dim", &flg);CHKERRQ(ierr);
       if (flg) continue;
+      ierr = PetscStrcmp(name, "celltype", &flg);CHKERRQ(ierr);
+      if (flg) continue;
     } else {
-      dmB->depthLabel = dmA->depthLabel;
+      dmB->depthLabel    = dmA->depthLabel;
+      dmB->celltypeLabel = dmA->celltypeLabel;
     }
     if (mode==PETSC_COPY_VALUES) {
       ierr = DMLabelDuplicate(label, &labelNew);CHKERRQ(ierr);
@@ -7642,7 +7729,9 @@ PetscErrorCode DMIsBoundaryPoint(DM dm, PetscInt point, PetscBool *isBd)
 }
 
 /*@C
-  DMProjectFunction - This projects the given function into the function space provided.
+  DMProjectFunction - This projects the given function into the function space provided, putting the coefficients in a global vector.
+
+  Collective on DM
 
   Input Parameters:
 + dm      - The DM
@@ -7665,7 +7754,7 @@ PetscErrorCode DMIsBoundaryPoint(DM dm, PetscInt point, PetscBool *isBd)
 
   Level: developer
 
-.seealso: DMComputeL2Diff()
+.seealso: DMProjectFunctionLocal(), DMProjectFunctionLabel(), DMComputeL2Diff()
 @*/
 PetscErrorCode DMProjectFunction(DM dm, PetscReal time, PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *), void **ctxs, InsertMode mode, Vec X)
 {
@@ -7682,6 +7771,34 @@ PetscErrorCode DMProjectFunction(DM dm, PetscReal time, PetscErrorCode (**funcs)
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMProjectFunctionLocal - This projects the given function into the function space provided, putting the coefficients in a local vector.
+
+  Not collective
+
+  Input Parameters:
++ dm      - The DM
+. time    - The time
+. funcs   - The coordinate functions to evaluate, one per field
+. ctxs    - Optional array of contexts to pass to each coordinate function.  ctxs itself may be null.
+- mode    - The insertion mode for values
+
+  Output Parameter:
+. localX - vector
+
+   Calling sequence of func:
+$    func(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx);
+
++  dim - The spatial dimension
+.  x   - The coordinates
+.  Nf  - The number of fields
+.  u   - The output field values
+-  ctx - optional user-defined function context
+
+  Level: developer
+
+.seealso: DMProjectFunction(), DMProjectFunctionLabel(), DMComputeL2Diff()
+@*/
 PetscErrorCode DMProjectFunctionLocal(DM dm, PetscReal time, PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *), void **ctxs, InsertMode mode, Vec localX)
 {
   PetscErrorCode ierr;
@@ -7694,6 +7811,35 @@ PetscErrorCode DMProjectFunctionLocal(DM dm, PetscReal time, PetscErrorCode (**f
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMProjectFunctionLabel - This projects the given function into the function space provided, putting the coefficients in a global vector, setting values only for points in the given label.
+
+  Collective on DM
+
+  Input Parameters:
++ dm      - The DM
+. time    - The time
+. label   - The DMLabel selecting the portion of the mesh for projection
+. funcs   - The coordinate functions to evaluate, one per field
+. ctxs    - Optional array of contexts to pass to each coordinate function.  ctxs itself may be null.
+- mode    - The insertion mode for values
+
+  Output Parameter:
+. X - vector
+
+   Calling sequence of func:
+$    func(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx);
+
++  dim - The spatial dimension
+.  x   - The coordinates
+.  Nf  - The number of fields
+.  u   - The output field values
+-  ctx - optional user-defined function context
+
+  Level: developer
+
+.seealso: DMProjectFunction(), DMProjectFunctionLocal(), DMProjectFunctionLabelLocal(), DMComputeL2Diff()
+@*/
 PetscErrorCode DMProjectFunctionLabel(DM dm, PetscReal time, DMLabel label, PetscInt numIds, const PetscInt ids[], PetscInt Nc, const PetscInt comps[], PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *), void **ctxs, InsertMode mode, Vec X)
 {
   Vec            localX;
@@ -7709,6 +7855,35 @@ PetscErrorCode DMProjectFunctionLabel(DM dm, PetscReal time, DMLabel label, Pets
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMProjectFunctionLabelLocal - This projects the given function into the function space provided, putting the coefficients in a local vector, setting values only for points in the given label.
+
+  Not collective
+
+  Input Parameters:
++ dm      - The DM
+. time    - The time
+. label   - The DMLabel selecting the portion of the mesh for projection
+. funcs   - The coordinate functions to evaluate, one per field
+. ctxs    - Optional array of contexts to pass to each coordinate function.  ctxs itself may be null.
+- mode    - The insertion mode for values
+
+  Output Parameter:
+. localX - vector
+
+   Calling sequence of func:
+$    func(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx);
+
++  dim - The spatial dimension
+.  x   - The coordinates
+.  Nf  - The number of fields
+.  u   - The output field values
+-  ctx - optional user-defined function context
+
+  Level: developer
+
+.seealso: DMProjectFunction(), DMProjectFunctionLocal(), DMProjectFunctionLabel(), DMComputeL2Diff()
+@*/
 PetscErrorCode DMProjectFunctionLabelLocal(DM dm, PetscReal time, DMLabel label, PetscInt numIds, const PetscInt ids[], PetscInt Nc, const PetscInt comps[], PetscErrorCode (**funcs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *), void **ctxs, InsertMode mode, Vec localX)
 {
   PetscErrorCode ierr;
@@ -7721,6 +7896,55 @@ PetscErrorCode DMProjectFunctionLabelLocal(DM dm, PetscReal time, DMLabel label,
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMProjectFieldLocal - This projects the given function of the input fields into the function space provided, putting the coefficients in a local vector.
+
+  Not collective
+
+  Input Parameters:
++ dm      - The DM
+. time    - The time
+. localU  - The input field vector
+. funcs   - The functions to evaluate, one per field
+- mode    - The insertion mode for values
+
+  Output Parameter:
+. localX  - The output vector
+
+   Calling sequence of func:
+$    func(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+$         const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+$         const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+$         PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[]);
+
++  dim          - The spatial dimension
+.  Nf           - The number of input fields
+.  NfAux        - The number of input auxiliary fields
+.  uOff         - The offset of each field in u[]
+.  uOff_x       - The offset of each field in u_x[]
+.  u            - The field values at this point in space
+.  u_t          - The field time derivative at this point in space (or NULL)
+.  u_x          - The field derivatives at this point in space
+.  aOff         - The offset of each auxiliary field in u[]
+.  aOff_x       - The offset of each auxiliary field in u_x[]
+.  a            - The auxiliary field values at this point in space
+.  a_t          - The auxiliary field time derivative at this point in space (or NULL)
+.  a_x          - The auxiliary field derivatives at this point in space
+.  t            - The current time
+.  x            - The coordinates of this point
+.  numConstants - The number of constants
+.  constants    - The value of each constant
+-  f            - The value of the function at this point in space
+
+  Note: There are three different DMs that potentially interact in this function. The output DM, dm, specifies the layout of the values calculates by funcs.
+  The input DM, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
+  a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary DM, attached to the
+  auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations.
+
+  Level: intermediate
+
+.seealso: DMProjectField(), DMProjectFieldLabelLocal(), DMProjectFunction(), DMComputeL2Diff()
+@*/
 PetscErrorCode DMProjectFieldLocal(DM dm, PetscReal time, Vec localU,
                                    void (**funcs)(PetscInt, PetscInt, PetscInt,
                                                   const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
@@ -7739,6 +7963,60 @@ PetscErrorCode DMProjectFieldLocal(DM dm, PetscReal time, Vec localU,
   PetscFunctionReturn(0);
 }
 
+/*@C
+  DMProjectFieldLabelLocal - This projects the given function of the input fields into the function space provided, putting the coefficients in a local vector, calculating only over the portion of the domain specified by the label.
+
+  Not collective
+
+  Input Parameters:
++ dm      - The DM
+. time    - The time
+. label   - The DMLabel marking the portion of the domain to output
+. numIds  - The number of label ids to use
+. ids     - The label ids to use for marking
+. Nc      - The number of components to set in the output, or PETSC_DETERMINE for all components
+. comps   - The components to set in the output, or NULL for all components
+. localU  - The input field vector
+. funcs   - The functions to evaluate, one per field
+- mode    - The insertion mode for values
+
+  Output Parameter:
+. localX  - The output vector
+
+   Calling sequence of func:
+$    func(PetscInt dim, PetscInt Nf, PetscInt NfAux,
+$         const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
+$         const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
+$         PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[]);
+
++  dim          - The spatial dimension
+.  Nf           - The number of input fields
+.  NfAux        - The number of input auxiliary fields
+.  uOff         - The offset of each field in u[]
+.  uOff_x       - The offset of each field in u_x[]
+.  u            - The field values at this point in space
+.  u_t          - The field time derivative at this point in space (or NULL)
+.  u_x          - The field derivatives at this point in space
+.  aOff         - The offset of each auxiliary field in u[]
+.  aOff_x       - The offset of each auxiliary field in u_x[]
+.  a            - The auxiliary field values at this point in space
+.  a_t          - The auxiliary field time derivative at this point in space (or NULL)
+.  a_x          - The auxiliary field derivatives at this point in space
+.  t            - The current time
+.  x            - The coordinates of this point
+.  numConstants - The number of constants
+.  constants    - The value of each constant
+-  f            - The value of the function at this point in space
+
+  Note: There are three different DMs that potentially interact in this function. The output DM, dm, specifies the layout of the values calculates by funcs.
+  The input DM, attached to U, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
+  a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary DM, attached to the
+  auxiliary field vector, which is attached to dm, can also be different. It can have a different topology, number of fields, and discretizations.
+
+  Level: intermediate
+
+.seealso: DMProjectField(), DMProjectFieldLabelLocal(), DMProjectFunction(), DMComputeL2Diff()
+@*/
 PetscErrorCode DMProjectFieldLabelLocal(DM dm, PetscReal time, DMLabel label, PetscInt numIds, const PetscInt ids[], PetscInt Nc, const PetscInt comps[], Vec localU,
                                         void (**funcs)(PetscInt, PetscInt, PetscInt,
                                                        const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[],
@@ -8118,3 +8396,156 @@ PetscErrorCode DMGetCompatibility(DM dm,DM dm2,PetscBool *compatible,PetscBool *
   }
   PetscFunctionReturn(0);
 }
+
+/*@C
+  DMMonitorSet - Sets an ADDITIONAL function that is to be used after a solve to monitor discretization performance.
+
+  Logically Collective on DM
+
+  Input Parameters:
++ DM - the DM
+. f - the monitor function
+. mctx - [optional] user-defined context for private data for the monitor routine (use NULL if no context is desired)
+- monitordestroy - [optional] routine that frees monitor context (may be NULL)
+
+  Options Database Keys:
+- -dm_monitor_cancel - cancels all monitors that have been hardwired into a code by calls to DMMonitorSet(), but
+                            does not cancel those set via the options database.
+
+  Notes:
+  Several different monitoring routines may be set by calling
+  DMMonitorSet() multiple times; all will be called in the
+  order in which they were set.
+
+  Fortran Notes:
+  Only a single monitor function can be set for each DM object
+
+  Level: intermediate
+
+.seealso: DMMonitorCancel()
+@*/
+PetscErrorCode DMMonitorSet(DM dm, PetscErrorCode (*f)(DM, void *), void *mctx, PetscErrorCode (*monitordestroy)(void**))
+{
+  PetscInt       m;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  for (m = 0; m < dm->numbermonitors; ++m) {
+    PetscBool identical;
+
+    ierr = PetscMonitorCompare((PetscErrorCode (*)(void)) f, mctx, monitordestroy, (PetscErrorCode (*)(void)) dm->monitor[m], dm->monitorcontext[m], dm->monitordestroy[m], &identical);CHKERRQ(ierr);
+    if (identical) PetscFunctionReturn(0);
+  }
+  if (dm->numbermonitors >= MAXDMMONITORS) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many monitors set");
+  dm->monitor[dm->numbermonitors]          = f;
+  dm->monitordestroy[dm->numbermonitors]   = monitordestroy;
+  dm->monitorcontext[dm->numbermonitors++] = (void *) mctx;
+  PetscFunctionReturn(0);
+}
+
+/*@
+  DMMonitorCancel - Clears all the monitor functions for a DM object.
+
+  Logically Collective on DM
+
+  Input Parameter:
+. dm - the DM
+
+  Options Database Key:
+. -dm_monitor_cancel - cancels all monitors that have been hardwired
+  into a code by calls to DMonitorSet(), but does not cancel those
+  set via the options database
+
+  Notes:
+  There is no way to clear one specific monitor from a DM object.
+
+  Level: intermediate
+
+.seealso: DMMonitorSet()
+@*/
+PetscErrorCode DMMonitorCancel(DM dm)
+{
+  PetscErrorCode ierr;
+  PetscInt       m;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  for (m = 0; m < dm->numbermonitors; ++m) {
+    if (dm->monitordestroy[m]) {ierr = (*dm->monitordestroy[m])(&dm->monitorcontext[m]);CHKERRQ(ierr);}
+  }
+  dm->numbermonitors = 0;
+  PetscFunctionReturn(0);
+}
+
+/*@C
+  DMMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
+
+  Collective on DM
+
+  Input Parameters:
++ dm   - DM object you wish to monitor
+. name - the monitor type one is seeking
+. help - message indicating what monitoring is done
+. manual - manual page for the monitor
+. monitor - the monitor function
+- monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the DM or PetscViewer objects
+
+  Output Parameter:
+. flg - Flag set if the monitor was created
+
+  Level: developer
+
+.seealso: PetscOptionsGetViewer(), PetscOptionsGetReal(), PetscOptionsHasName(), PetscOptionsGetString(),
+          PetscOptionsGetIntArray(), PetscOptionsGetRealArray(), PetscOptionsBool()
+          PetscOptionsInt(), PetscOptionsString(), PetscOptionsReal(), PetscOptionsBool(),
+          PetscOptionsName(), PetscOptionsBegin(), PetscOptionsEnd(), PetscOptionsHead(),
+          PetscOptionsStringArray(),PetscOptionsRealArray(), PetscOptionsScalar(),
+          PetscOptionsBoolGroupBegin(), PetscOptionsBoolGroup(), PetscOptionsBoolGroupEnd(),
+          PetscOptionsFList(), PetscOptionsEList()
+@*/
+PetscErrorCode DMMonitorSetFromOptions(DM dm, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(DM, void *), PetscErrorCode (*monitorsetup)(DM, PetscViewerAndFormat *), PetscBool *flg)
+{
+  PetscViewer       viewer;
+  PetscViewerFormat format;
+  PetscErrorCode    ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) dm), ((PetscObject) dm)->options, ((PetscObject) dm)->prefix, name, &viewer, &format, flg);CHKERRQ(ierr);
+  if (*flg) {
+    PetscViewerAndFormat *vf;
+
+    ierr = PetscViewerAndFormatCreate(viewer, format, &vf);CHKERRQ(ierr);
+    ierr = PetscObjectDereference((PetscObject) viewer);CHKERRQ(ierr);
+    if (monitorsetup) {ierr = (*monitorsetup)(dm, vf);CHKERRQ(ierr);}
+    ierr = DMMonitorSet(dm,(PetscErrorCode (*)(DM, void *)) monitor, vf, (PetscErrorCode (*)(void **)) PetscViewerAndFormatDestroy);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
+
+/*@
+   DMMonitor - runs the user provided monitor routines, if they exist
+
+   Collective on DM
+
+   Input Parameters:
+.  dm - The DM
+
+   Level: developer
+
+.seealso: DMMonitorSet()
+@*/
+PetscErrorCode DMMonitor(DM dm)
+{
+  PetscInt       m;
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  if (!dm) PetscFunctionReturn(0);
+  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
+  for (m = 0; m < dm->numbermonitors; ++m) {
+    ierr = (*dm->monitor[m])(dm, dm->monitorcontext[m]);CHKERRQ(ierr);
+  }
+  PetscFunctionReturn(0);
+}
diff --git a/src/dm/interface/dmregall.c b/src/dm/interface/dmregall.c
index 2688d9eef18..4d0fe773b2e 100644
--- a/src/dm/interface/dmregall.c
+++ b/src/dm/interface/dmregall.c
@@ -46,25 +46,27 @@ PetscErrorCode  DMRegisterAll(void)
   if (DMRegisterAllCalled) PetscFunctionReturn(0);
   DMRegisterAllCalled = PETSC_TRUE;
 
-  ierr = DMRegister(DMDA,         DMCreate_DA);CHKERRQ(ierr);
-  ierr = DMRegister(DMCOMPOSITE,  DMCreate_Composite);CHKERRQ(ierr);
-  ierr = DMRegister(DMSLICED,     DMCreate_Sliced);CHKERRQ(ierr);
-  ierr = DMRegister(DMSHELL,      DMCreate_Shell);CHKERRQ(ierr);
-  ierr = DMRegister(DMREDUNDANT,  DMCreate_Redundant);CHKERRQ(ierr);
-  ierr = DMRegister(DMPLEX,       DMCreate_Plex);CHKERRQ(ierr);
-  ierr = DMRegister(DMPATCH,      DMCreate_Patch);CHKERRQ(ierr);
-  ierr = DMRegister(DMSWARM,      DMCreate_Swarm);CHKERRQ(ierr);
+  ierr = DMRegister(DMDA,       DMCreate_DA);CHKERRQ(ierr);
+  ierr = DMRegister(DMCOMPOSITE,DMCreate_Composite);CHKERRQ(ierr);
+  ierr = DMRegister(DMSLICED,   DMCreate_Sliced);CHKERRQ(ierr);
+  ierr = DMRegister(DMSHELL,    DMCreate_Shell);CHKERRQ(ierr);
+  ierr = DMRegister(DMREDUNDANT,DMCreate_Redundant);CHKERRQ(ierr);
+  ierr = DMRegister(DMPLEX,     DMCreate_Plex);CHKERRQ(ierr);
+  ierr = DMRegister(DMPATCH,    DMCreate_Patch);CHKERRQ(ierr);
+  ierr = DMRegister(DMSWARM,    DMCreate_Swarm);CHKERRQ(ierr);
 #if defined(PETSC_HAVE_MOAB)
-  ierr = DMRegister(DMMOAB,       DMCreate_Moab);CHKERRQ(ierr);
+  ierr = DMRegister(DMMOAB,     DMCreate_Moab);CHKERRQ(ierr);
 #endif
-  ierr = DMRegister(DMNETWORK,    DMCreate_Network);CHKERRQ(ierr);
-  ierr = DMRegister(DMFOREST,     DMCreate_Forest);CHKERRQ(ierr);
+  ierr = DMRegister(DMNETWORK,  DMCreate_Network);CHKERRQ(ierr);
+  ierr = DMRegister(DMFOREST,   DMCreate_Forest);CHKERRQ(ierr);
 #if defined(PETSC_HAVE_P4EST)
-  ierr = DMRegister(DMP4EST,      DMCreate_p4est);CHKERRQ(ierr);
-  ierr = DMRegister(DMP8EST,      DMCreate_p8est);CHKERRQ(ierr);
+  ierr = DMRegister(DMP4EST,    DMCreate_p4est);CHKERRQ(ierr);
+  ierr = DMRegister(DMP8EST,    DMCreate_p8est);CHKERRQ(ierr);
 #endif
-  ierr = DMRegister(DMPRODUCT,    DMCreate_Product);CHKERRQ(ierr);
-  ierr = DMRegister(DMSTAG,       DMCreate_Stag);CHKERRQ(ierr);
+  ierr = DMRegister(DMPRODUCT,  DMCreate_Product);CHKERRQ(ierr);
+  ierr = DMRegister(DMSTAG,     DMCreate_Stag);CHKERRQ(ierr);
+
+  ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
diff --git a/src/dm/interface/ftn-custom/zdmf.c b/src/dm/interface/ftn-custom/zdmf.c
index 6961eff5177..7b213f22619 100644
--- a/src/dm/interface/ftn-custom/zdmf.c
+++ b/src/dm/interface/ftn-custom/zdmf.c
@@ -24,6 +24,7 @@
 #define dmgetstratumis_              DMGETSTRATUMIS
 #define dmsetstratumis_              DMSETSTRATUMIS
 #define dmremovelabel_               DMREMOVELABEL
+#define dmviewfromoptions_           DMVIEWFROMOPTIONS
 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
 #define dmview_                      dmview
 #define dmsetoptionsprefix_          dmsetoptionsprefix
@@ -46,6 +47,7 @@
 #define dmgetstratumis_              dmgetstratumis
 #define dmsetstratumis_              dmsetstratumis
 #define dmremovelabel_               dmremovelabel
+#define dmviewfromoptions_           dmviewfromoptions
 #endif
 
 PETSC_EXTERN void PETSC_STDCALL dmgetmattype_(DM *mm,char* name PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
@@ -233,3 +235,12 @@ PETSC_EXTERN void PETSC_STDCALL dmremovelabel_(DM *dm, char* name PETSC_MIXED_LE
   *ierr = DMRemoveLabel(*dm, lname, label);if (*ierr) return;
   FREECHAR(name, lname);
 }
+
+PETSC_EXTERN void PETSC_STDCALL dmviewfromoptions_(DM *dm,PetscObject obj,char* type PETSC_MIXED_LEN(len),PetscErrorCode *ierr PETSC_END_LEN(len))
+{
+  char *t;
+
+  FIXCHAR(type,len,t);
+  *ierr = DMViewFromOptions(*dm,obj,t);if (*ierr) return;
+  FREECHAR(type,t);
+}
diff --git a/src/dm/label/dmlabel.c b/src/dm/label/dmlabel.c
index 9045fa1e97a..32b6ebf090e 100644
--- a/src/dm/label/dmlabel.c
+++ b/src/dm/label/dmlabel.c
@@ -86,7 +86,12 @@ static PetscErrorCode DMLabelMakeValid_Private(DMLabel label, PetscInt v)
       ierr = PetscBTSet(label->bt, point - label->pStart);CHKERRQ(ierr);
     }
   }
-  ierr = ISCreateGeneral(PETSC_COMM_SELF, label->stratumSizes[v], pointArray, PETSC_OWN_POINTER, &is);CHKERRQ(ierr);
+  if (label->stratumSizes[v] > 0 && pointArray[label->stratumSizes[v]-1] == pointArray[0] + label->stratumSizes[v]-1) {
+    ierr = ISCreateStride(PETSC_COMM_SELF, label->stratumSizes[v], pointArray[0], 1, &is);CHKERRQ(ierr);
+    ierr = PetscFree(pointArray);CHKERRQ(ierr);
+  } else {
+    ierr = ISCreateGeneral(PETSC_COMM_SELF, label->stratumSizes[v], pointArray, PETSC_OWN_POINTER, &is);CHKERRQ(ierr);
+  }
   ierr = PetscObjectSetName((PetscObject) is, "indices");CHKERRQ(ierr);
   label->points[v]  = is;
   label->validIS[v] = PETSC_TRUE;
diff --git a/src/docs/tao_tex/manual/cover.tex b/src/docs/tao_tex/manual/cover.tex
index d1252bf129d..e4473c45605 100644
--- a/src/docs/tao_tex/manual/cover.tex
+++ b/src/docs/tao_tex/manual/cover.tex
@@ -41,7 +41,7 @@
 
 \vspace*{0.5in}
 \noindent Prepared by \\
-{\bf Alp Dener \\ Adam Denchfield \\ Todd Munson \\ Jason Sarich \\ Stefan Wild \\ Steven Benson \\ Lois Curfman McInnes}\\
+{\bf Alp Dener \\ Adam Denchfield \\ Hansol Suh \\ Todd Munson \\ Jason Sarich \\ Stefan Wild \\ Steven Benson \\ Lois Curfman McInnes}\\
 
 \vspace*{30pt}
 \noindent September 2019
diff --git a/src/docs/tao_tex/manual/part2b.tex b/src/docs/tao_tex/manual/part2b.tex
index 73d0153a0de..d7cf2793175 100644
--- a/src/docs/tao_tex/manual/part2b.tex
+++ b/src/docs/tao_tex/manual/part2b.tex
@@ -1395,3 +1395,61 @@ \subsection{Interior-Point Newton's Method}\label{sec:bqpip}
 This method also requires the solution of systems of linear equations,
 whose solver can be accessed and modified 
 with the command {\tt Tao\-Get\-KSP()}.
+
+\section{Constrained Solvers}
+
+Constrained solvers solve optimization problems of the form
+\[
+\begin{array}{ll}
+\displaystyle \min_{x} & f(x) \\
+\mbox{subject to} & g(x) = c
+\end{array}
+\]
+where $g_i(x) = c_i$ for $i = 1,...,n$. are equality constraints.
+
+\subsection{Alternating Direction Method of Multipliers}
+
+The ADMM is an algorithm that is intended to blend the decomposability
+of dual ascent with the superior convergence properties of the
+method of multipliers. \cite{boyd} 
+The algorithm solves problems in the form
+\[
+\begin{array}{ll}
+\displaystyle \min_{x} & f(x) + g(z) \\ 
+\mbox{subject to} & Ax + Bz = c
+\end{array}
+\]
+where $x \in \Re^n$, $z \in \Re^m$, $A \in \Re^{p \times n}$, $B \in \Re^{p \times m}$, and $c \in \Re^p$.
+Essentially, ADMM is a wrapper over two TAO solver, one for $f(x)$, and one for $g(z)$.
+With method of multipliers, one can form the augmented Lagrangian 
+\begin{equation}
+L_{\rho}(x,z,y) = f(x) + g(z) + y^T(Ax+Bz-c) + (\rho/2)||Ax+Bz-c||_2^2
+\end{equation}
+Then, ADMM consists of the iterations
+\begin{equation}
+x^{k+1} := \text{argmin}L_{\rho}(x,z^k,y^k)
+\end{equation}
+\begin{equation}
+z^{k+1} := \text{argmin}L_{\rho}(x^{k+1},z,y^k)
+\end{equation}
+\begin{equation}
+y^{k+1} := y^k + \rho(Ax^{k+1}+Bz^{k+1}-c)
+\end{equation}
+In certain formulation of ADMM, solution of $z^{k+1}$ may have closed-form solution. 
+Currently ADMM provides one default implementation for $z^{k+1}$, which is soft-threshold. 
+It can be used with either {\tt TaoADMMSetRegularizerType\_ADMM()} or
+\texttt{-tao\_admm\_regularizer\_type }.
+User can also pass spectral penalty value, $\rho$, with either {\tt TaoADMMSetSpectralPenalty()} or
+\texttt{-tao\_admm\_spectral\_penalty}.
+Currently, user can use
+\begin{itemize}
+\item {\tt TaoADMMSetMisfitObjectiveAndGradientRoutine()}
+\item {\tt TaoADMMSetRegularizerObjectiveAndGradientRoutine()}
+\item {\tt TaoADMMSetMisfitHessianRoutine()}
+\item {\tt TaoADMMSetRegularizerHessianRoutine()}
+\end{itemize}
+Any other combination of routines is currently not supported. Hessian matrices can either be constant or non-constant,
+of which fact can be set via {\tt TaoADMMSetMisfitHessianChangeStatus()}, and {\tt TaoADMMSetRegularizerHessianChangeStatus()}.
+Also, it may appear in certain cases where augmented Lagrangian's Hessian may become nearly singular depending on the 
+$\rho$, which may change in the case of \texttt{-tao\_admm\_dual\_update , }.
+This issue can be prevented by {\tt TaoADMMSetMinimumSpectralPenalty()}.
diff --git a/src/docs/tao_tex/tao.bib b/src/docs/tao_tex/tao.bib
index 68c9b5e0531..a2d3a4397e0 100644
--- a/src/docs/tao_tex/tao.bib
+++ b/src/docs/tao_tex/tao.bib
@@ -544,6 +544,17 @@ @TechReport(
     Number = "ANL-95/48"
     )
 
+@Article{boyd,
+  title = "Distributed optimization and statistical learning via the alternating direction method of multipliers",
+  author= "Boyd, Stephen and Parikh, Neal and Chu, Eric and Peleato, Borja and Eckstein, Jonathan and others",
+  journal= "Foundations and Trends{\textregistered} in Machine learning",
+  volume= "3",
+  number= "1",
+  pages= "1-122",
+  year= "2011",
+  publisher= "Now Publishers, Inc."
+}
+
 
 
 
diff --git a/src/docs/tex/manual/developers.tex b/src/docs/tex/manual/developers.tex
index ca837a9e70c..ff589c142b9 100644
--- a/src/docs/tex/manual/developers.tex
+++ b/src/docs/tex/manual/developers.tex
@@ -2170,7 +2170,7 @@ \section{Using the Test Harness for Regression Testing}
 # Approximate time (not incl. build time): 429 sec
 #
 # To rerun failed tests:
-#     /opt/local/bin/gmake -f gmakefile test search='ts_tutorials-ex11_adv_2d_quad_0 sys_classes_viewer_tests-ex4_4 ts_tutorials-ex11_adv_2d_quad'
+#     /opt/local/bin/gmake -f gmakefile test test-fail=1
 \end{outputlisting}
 This output indicates that three tests have failed and should be investigated.
 \item Rerun one of the failed tests, with additional verbosity.
@@ -2238,6 +2238,172 @@ \subsection{Additional Tips}
 \begin{bashlisting}
 config/report_tests.py -t 5
 \end{bashlisting}
+
+\subsection{Querying the Tests}
+
+PETSc has thousands of tests.  As can be imagined, managing such a large test
+suite can be challenging.  To enable exploring the test system more thoroughly,
+there exists the script 
+\begin{bashlisting}
+config/query_tests.py
+\end{bashlisting}
+This script by default generates a listing of files suitable for invocation by
+\trl{gmakefile.test}.  An example invocation would be:
+\begin{bashlisting}
+config/query_tests.py config/query_tests.py requires '*MPI_PROCESS_SHARED_MEMORY*'
+\end{bashlisting}
+The first argument is the field and can be any field that is part of the normal
+test language specification.
+
+By default, the search is based on a given configure specification as
+encapsulated by specifying \trl{PETSC_ARCH}.  Technically speaking,
+\trl{gmakegentest.py} writes out a Python pickle file of the dictionary
+structure used by \trl{gmakegentest.py}.  With the \trl{--use-source} argument
+to this script, the entire set of test cases can be searched.
+
+More interestingly, \trl{query_tests.py} is meant to be used by ipython to
+enable a featureful and rich exploration of the data structures.  See the 
+\trl{do_query} method in \trl{query_tests.py} for more details.  Here is an
+example session:
+\begin{bashlisting}
+In [1]: import sys, os; sys.path.append('config')
+
+In [2]: import query_tests
+
+In [3]: dataDict=query_tests.walktree('src/vec') # Do only vec tests
+
+In [4]: srcdir=os.path.join(os.path.abspath(os.curdir),'src') # Need for correct labels
+
+In [5]: invDict=query_tests.get_inverse_dictionary(dataDict,'requires',srcdir)
+
+In [6]: results=query_tests.query(invDict,'cuda')
+
+In [7]: print(results)
+['vec_vec_tests-ex22_cuda', 'vec_vec_tests-ex22_cuda_sf', 'vec_vec_tests-ex23_cuda', 'vec_vec_tests-ex24_cuda', 'vec_vec_tests-ex28_2_cuda', 'vec_vec_tests-ex28_cuda', 'vec_vec_tests-ex34_cuda', 'vec_vec_tests-ex38_cuda', 'vec_vec_tests-ex4_cuda', 'vec_vec_tests-ex4_cuda2', 'vec_vec_tests-ex43_cuda', 'vec_vec_tests-ex44_cuda', 'vec_vec_tutorials-ex1_2_cuda', 'vec_vec_tutorials-ex1_cuda']
+\end{bashlisting}
+As can be seen, with the dictionaries available within ipython, a rich querying
+of the data within the source files can be performed.
+
+As always, to see the full usage of the python script \trl{query_tests.py}, use
+the help flag:  \trl{query_tests.py --help}.
+
+%----------------------------------------------------------------------
+\cleardoublepage
+\chapter{Makefiles}
+\label{ch_makefiles}
+
+This chapter describes the design of the PETSc makefiles, which are
+key to managing code portability across a wide variety of UNIX and Windows systems.
+
+
+
+\section{Makefile Commands} \label{sec_common}
+
+The directory
+\trl{${PETSC_DIR}/lib/petsc/conf}
+contains virtually all
+makefile commands and customizations to enable portability across
+different architectures.  Most makefile commands for maintaining the
+PETSc system are defined in the file
+\trl{${PETSC_DIR}/lib/petsc/conf/rules}
+These commands, which process all appropriate files within the
+directory of execution, include
+\begin{itemize}
+\item \trl{libs} - Update the PETSc libraries
+\item \trl{clean} - Remove garbage files as well as libraries 
+\end{itemize}
+
+The \trl{tree} command enables the user to execute a particular action
+within a directory and all of its subdirectories.  The action is specified
+by \trl{ACTION=[action]}, where \trl{action} is one of the basic commands
+listed above. 
+
+\section{Customized Makefiles}
+\label{sec_custom}
+
+The directory \trl{${PETSC_DIR}/} contains a subdirectory for each
+architecture that contains machine-specific information, enabling the
+portability of our makefile system, these are
+\trl{${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf}
+ Each architecture directory contains
+two makefiles:
+\begin{itemize}
+\item \trl{petscvariables} - definitions of the compilers, linkers, etc.
+\item \trl{petscrules} - some build rules specific to this machine.
+\end{itemize}
+These files are generated automatically when you run \trl{./configure}.
+
+These are included by the architecture-independent makefiles located in
+\trl{${PETSC_DIR}/lib/petsc/conf}.
+
+\section{PETSc Flags}
+\label{sec_makeflags}
+
+PETSc has several flags that determine how the source code will be
+compiled.  The default flags for particular versions are specified in
+\trl{${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h}.
+The flags include
+\begin{itemize}
+\item \trl{PETSC_USE_DEBUG} - The PETSc debugging options are activated. We
+      recommend always using this except when doing performance analysis or production runs. \findex{PETSC_USE_DEBUG}
+\item \trl{PETSC_USE_COMPLEX} - The version with scalars represented
+      as complex numbers is used. \findex{PETSC_USE_COMPLEX}
+\end{itemize}
+
+\section{Sample Makefiles}
+
+
+Some additional variables that can be used in the makefile are defined
+as follows:
+\begin{tightitemize}
+\item \lstinline{CFLAGS, FFLAGS} User specified additional options for the C compiler and
+        Fortran compiler.
+\item \lstinline{CPPFLAGS, FPPFLAGS} User specified additional flags for the C preprocessor
+        and Fortran preprocessor.
+\item \lstinline{CLINKER, FLINKER} the C and Fortran linkers.
+\item \lstinline{RM} the remove command for deleting files.
+\end{tightitemize}
+
+Figure \ref{fig_make3} contains a makefile that maintains a PETSc
+library.  
+\begin{figure}[H]
+\begin{makelisting}
+ALL: lib
+
+CFLAGS   =
+SOURCEC  = sp1wd.c spinver.c spnd.c spqmd.c sprcm.c
+SOURCEF  = degree.F  fnroot.F genqmd.F qmdqt.F rcm.F fn1wd.F gen1wd.F \
+          genrcm.F qmdrch.F rootls.F fndsep.F gennd.F qmdmrg.F qmdupd.F
+SOURCEH  =
+LIBBASE  = libpetscmat
+MANSEC   = Mat
+
+include ${PETSC_DIR}/lib/petsc/conf/variables
+include ${PETSC_DIR}/lib/petsc/conf/rules
+\end{makelisting}
+\caption{Sample PETSc Makefile for Library Maintenance}
+\label{fig_make3}
+\end{figure}
+
+The library's name is \trl{libpetscmat.a}, and the source files being added
+to it are indicated by \lstinline{SOURCEC} (for C files) and \lstinline{SOURCEF} (for
+Fortran files).
+
+The variable \lstinline{MANSEC} indicates that any manual pages generated
+from this source should be included in the \lstinline{Mat} section.
+
+\section{Limitations}
+
+This approach to portable makefiles has some minor limitations, including
+the following:
+\begin{itemize}
+\item Each makefile must be called ``makefile''.
+\item Each makefile can maintain at most one archive library.
+\end{itemize}
+
+%------------------------------------------------------------------
+
+
 \bibliographystyle{plain}
 \bibliography{../petsc,../petscapp}
 
diff --git a/src/docs/tex/manual/part1.tex b/src/docs/tex/manual/part1.tex
index 9a075ba9989..22020ef220f 100644
--- a/src/docs/tex/manual/part1.tex
+++ b/src/docs/tex/manual/part1.tex
@@ -1,4 +1,3 @@
-
 % --------------------------------------------------------------------
 %
 %                            PART 1
@@ -172,7 +171,7 @@ \section{Suggested Reading}
 such as vectors, matrices, index sets, linear and nonlinear
 solvers, and graphics.  Part III describes a variety of useful
 information, including profiling, the options database, viewers, error
-handling, makefiles, and some details of
+handling, and some details of
 PETSc design.
 
 \nocite{efficient}
@@ -535,6 +534,8 @@ \subsection*{Error Checking}
 \begin{figure}[H]
   {
     \begin{outputlisting}
+ $ cd $PETSC_DIR/src/ksp/ksp/examples/tutorials
+ $ make ex3
  $ mpiexec -n 1 ./ex3 -m 100000
  [0]PETSC ERROR: --------------------- Error Message --------------------------------
  [0]PETSC ERROR: Out of memory. This could be due to allocating
@@ -625,8 +626,8 @@ \subsection*{Compiling and Running Programs}
 
 Figure~\ref{fig_exrun} illustrates compiling and running a PETSc program
 using MPICH on an OS X laptop.  Note that different machines will have
-compilation commands as determined by the configuration process.  See Chapter \ref{ch_makefiles}
-for a discussion about compiling PETSc programs.
+compilation commands as determined by the configuration process.  See Section \ref{application}
+for a discussion about how to compile your PETSc programs.
 Users who are experiencing difficulties linking PETSc programs should
 refer to the FAQ on the PETSc website
 \href{https://www.mcs.anl.gov/petsc}{https://www.mcs.anl.gov/petsc} or
@@ -635,7 +636,8 @@ \subsection*{Compiling and Running Programs}
 \begin{figure}[H]
 {
  \begin{outputlisting}
-$ make ex2
+ $ cd $PETSC_DIR/src/ksp/ksp/examples/tutorials
+ $ make ex2
 /Users/patrick/petsc/arch-darwin-double-debug/bin/mpicc -o ex2.o -c -Wall -Wwrite-strings -Wno-strict-aliasing -Wno-unknown-pragmas -Qunused-arguments -fvisibility=hidden -g3   -I/Users/patrick/petsc/include -I/Users/patrick/petsc/arch-darwin-double-debug/include -I/opt/X11/include -I/opt/local/include    `pwd`/ex2.c
 /Users/patrick/petsc/arch-darwin-double-debug/bin/mpicc -Wl,-multiply_defined,suppress -Wl,-multiply_defined -Wl,suppress -Wl,-commons,use_dylibs -Wl,-search_paths_first -Wl,-multiply_defined,suppress -Wl,-multiply_defined -Wl,suppress -Wl,-commons,use_dylibs -Wl,-search_paths_first    -Wall -Wwrite-strings -Wno-strict-aliasing -Wno-unknown-pragmas -Qunused-arguments -fvisibility=hidden -g3  -o ex2 ex2.o  -Wl,-rpath,/Users/patrick/petsc/arch-darwin-double-debug/lib -L/Users/patrick/petsc/arch-darwin-double-debug/lib  -lpetsc -Wl,-rpath,/Users/patrick/petsc/arch-darwin-double-debug/lib -lf2clapack -lf2cblas -Wl,-rpath,/opt/X11/lib -L/opt/X11/lib -lX11 -lssl -lcrypto -Wl,-rpath,/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/clang/7.0.2/lib/darwin -L/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/clang/7.0.2/lib/darwin -lmpifort -lgfortran -Wl,-rpath,/opt/local/lib/gcc5/gcc/x86_64-apple-darwin14/5.3.0 -L/opt/local/lib/gcc5/gcc/x86_64-apple-darwin14/5.3.0 -Wl,-rpath,/opt/local/lib/gcc5 -L/opt/local/lib/gcc5 -lgfortran -lgcc_ext.10.5 -lquadmath -lm -lclang_rt.osx -lmpicxx -lc++ -Wl,-rpath,/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.2/lib/darwin -L/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.2/lib/darwin -lclang_rt.osx -Wl,-rpath,/Users/patrick/petsc/arch-darwin-double-debug/lib -L/Users/patrick/petsc/arch-darwin-double-debug/lib -ldl -lmpi -lpmpi -lSystem -Wl,-rpath,/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.2/lib/darwin -L/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.2/lib/darwin -lclang_rt.osx -ldl
 /bin/rm -f ex2.o
@@ -718,7 +720,8 @@ \subsection*{Compiling and Running Programs}
 \label{fig_exprof}
 \end{figure}
 
-\subsection*{Writing Application Codes with PETSc}
+\section{Writing Application Codes with PETSc}
+\label{applicatation}
 
 The examples throughout the library demonstrate the software usage
 and can serve as templates for developing
@@ -727,7 +730,7 @@ \subsection*{Writing Application Codes with PETSc}
 \trl{${PETSC_DIR}/src//examples/tutorials}
 where \trl{}
 denotes any of the PETSc libraries (listed in the following
-section), such as \trl{SNES} or \trl{KSP}.
+section), such as \trl{SNES} or \trl{KSP}  or \trl{TS}.
 The manual pages located at \trl{${PETSC_DIR}/docs/index.htm} or 
 \href{https://www.mcs.anl.gov/petsc/documentation/}{https://www.mcs.anl.gov/petsc/documentation/}
 provide links (organized by both routine names and concepts) to the tutorial examples.
@@ -736,14 +739,27 @@ \subsection*{Writing Application Codes with PETSc}
 following procedure:
 \begin{tightenumerate}
 \item Install and test PETSc according to the instructions at the PETSc web site.
-\item Copy one of the many PETSc examples in the directory
-      that corresponds to the class of problem of interest (e.g.,
-      for linear solvers, see \trl{${PETSC_DIR}/src/ksp/ksp/examples/tutorials}).
-\item Copy the corresponding makefile within the example directory;
-      compile and run the example program.
+\item Make a working directory for your source code: for example, \trl{mkdir $HOME/application}
+\item Change to that working directory; for example,\trl{ cd $HOME/application}
+\item Copy one of the examples in the directory
+      that corresponds to the class of problem of interest into your working directory, for example, \trl{cp $PETSC_DIR/src/snes/examples/tutorials/ex19.c ex19.c}
+\item Copy \$PETSC\_DIR/share/petsc/Makefile.user to your working directory, for example, \trl{cp $PETSC_DIR/share/petsc/Makefile.user Makefile}
+\item Compile and run the example program, for example, \trl{make ex19; ./ex19}
 \item Use the example program as a starting point for developing a custom code.
 \end{tightenumerate}
 
+We highly recommend against the following since it requires changing your makefile for each new configuration/computing system but
+if you do not wish to include any PETSc utilities in your makefile,
+you can use the following commands in the PETSc root directory to get the information
+needed by your makefile:
+\begin{bashlisting}
+make getlinklibs getincludedirs getcflags getcxxflags getfortranflags getccompiler getfortrancompiler getcxxcompiler
+\end{bashlisting}
+All the libraries listed need to be linked into your executable and the
+include directories and flags need to be passed to the compiler.
+Usually this is done with \lstinline{CFLAGS=} and
+\lstinline{FFLAGS=} in your makefile.
+
 %---------------------------------------------------------------------
 
 \section{Citing PETSc}
diff --git a/src/docs/tex/manual/part2.tex b/src/docs/tex/manual/part2.tex
index a5629af2b4f..fad280271d1 100644
--- a/src/docs/tex/manual/part2.tex
+++ b/src/docs/tex/manual/part2.tex
@@ -660,8 +660,8 @@ \subsection{Local/Global Vectors and Scatters}
 a local vector with correct ghost point values.
 This scatter may be done with the commands
 \begin{lstlisting}
-DMDALocalToLocalBegin(DM da,Vec l1,InsertMode iora,Vec l2);
-DMDALocalToLocalEnd(DM da,Vec l1,InsertMode iora,Vec l2);
+DMLocalToLocalBegin(DM da,Vec l1,InsertMode iora,Vec l2);
+DMLocalToLocalEnd(DM da,Vec l1,InsertMode iora,Vec l2);
 \end{lstlisting}
 Since both local vectors, \lstinline{l1} and \lstinline{l2},
 must be compatible with the distributed array, \lstinline{da}, they should be
@@ -2533,10 +2533,9 @@ \subsection{Understanding the Operator's Spectrum}
 drawing scatter plots of the eigenvalues. \findex{PetscDrawSP*()}
 
 The eigenvalues may also be computed and displayed graphically with the options
-data base commands \trl{-ksp_plot_eigenvalues} and \trl{-ksp_plot_eigenvalues_explicitly}.  \findex{-ksp_plot_eigenvalues} \findex{-ksp_plot_eigenvalues_explicitly}
+data base commands \trl{-ksp_view_eigenvalues draw} and \trl{-ksp_view_eigenvalues_explicitly draw}.  \findex{-ksp_view_eigenvalues} \findex{-ksp_view_eigenvalues_explicitly}
 Or they can be dumped to the screen in ASCII text via
-\trl{-ksp_compute_eigenvalues} and \trl{-ksp_compute_eigenvalues_explicitly}.
-\findex{-ksp_compute_eigenvalues} \findex{-ksp_compute_eigenvalues_explicitly}
+\trl{-ksp_view_eigenvalues} and \trl{-ksp_view_eigenvalues_explicitly}.
 \sindex{eigenvalues} \sindex{spectrum} \sindex{Arnoldi} \sindex{Lanczos}
 
 \subsection{Other KSP Options}
@@ -2952,25 +2951,39 @@ \subsection{Algebraic Multigrid (AMG) Preconditioners}
 For instance, \lstinline{MatSetValuesBlocked} works with AIJ matrices.
 
 GAMG provides unsmoothed aggregation (\trl{-pc_gamg_agg_nsmooths 0}) and smoothed aggregation (\trl{-pc_gamg_agg_nsmooths 1} or \lstinline{PCGAMGSetNSmooths(pc,1)}).
-Smoothed aggregation is recommended for symmetric positive definite
-systems.
+Smoothed aggregation (SA) is recommended for symmetric positive
+definite systems.
 Unsmoothed aggregation can be useful for asymmetric
 problems and problems where highest eigen estimates are problematic.
-If poor convergence rates are observed using the smoothed version one can test unsmoothed aggregation.
-The parameters for the eigen estimator can be set with the prefix \trl{gamg_est}.
-For example CG is a much better KSP type than the default GMRES if your problem is symmetric positive definite; one can specify this with \trl{-gamg_est_ksp_type cg}, prepending any solver prefix that has been added to the solver.
+If poor convergence rates are observed using the smoothed version one
+can test unsmoothed aggregation.
+
+{\bf Eigenvalue estimates:}
+The parameters for the KSP eigen estimator, use for SA, can be set with \trl{-pc_gamg_esteig_ksp_max_it} and \trl{-pc_gamg_esteig_ksp_type}.
+For example CG generally converges to the highest eigenvalue fast than GMRES (the default
+for KSP) if your problem is symmetric positive definite.
+One can specify CG with \trl{-pc_gamg_esteig_ksp_type cg}.
+The default for \trl{-pc_gamg_esteig_ksp_max_it} is 10, which we have
+found is pretty safe with a (default) safety factor of 1.1.
+One can specify the range of real eigenvalues, in the same way that
+one can for Chebyshev KSP solvers (smoothers), with
+\trl{-pc_gamg_eigenvalues }.
+GAMG sets the MG smoother type to chebyshev by default.
+By default, GAMG uses its eigen estimate, if it has one, for Chebyshev
+smoothers if the smoother uses Jacobi preconditioning.
+This can be overridden with \trl{-pc_gamg_use_sa_esteig  }.
 
 AMG methods requires knowledge of the number of degrees of
 freedom per vertex, the default is one (a scalar problem).
 Vector problems like elasticity should set the block size of the matrix appropriately with \trl{-mat_block_size bs}  or \lstinline{MatSetBlockSize(mat,bs)}.
 Equations must be ordered in ``vertex-major'' ordering (e.g., $x_1,y_1,z_1,x_2,y_2,...$).
 
-Smoothed aggregation requires an explicit representation of the (near) null space of the operator for optimal performance.
+{\bf Near null space:} Smoothed aggregation requires an explicit representation of the (near) null space of the operator for optimal performance.
 One can provide an orthonormal set of null space vectors with \lstinline{MatSetNearNullSpace()}.
 The vector of all ones is the default, for each variable given by the block size (e.g., the translational rigid  body modes).
 For elasticity, where rotational rigid body modes are required to complete the near null space you can use \lstinline{MatNullSpaceCreateRigidBody()} to create the null space vectors and then \lstinline{MatSetNearNullSpace()}.
 
-The GAMG framework provides for reducing the number of active processes on coarse grids to reduce communication costs when there is not enough parallelism to keep relative communication costs down.
+{\bf Coarse grid data model:} The GAMG framework provides for reducing the number of active processes on coarse grids to reduce communication costs when there is not enough parallelism to keep relative communication costs down.
 Most AMG solver reduce to just one active process on the coarsest grid (the PETSc MG framework also supports redundantly solving the coarse grid on all processes to potentially reduce communication  costs), although this forcing to one process can be overridden if one wishes to use a parallel coarse grid solver.
 GAMG generalizes this by reducing the active number of processes on other coarse grids as well.
 GAMG will select the number of active processors by fitting the desired number of equation per process (set with \trl{-pc_gamg_process_eq_limit <50>,}) at each level given that size of each level.
@@ -2979,9 +2992,9 @@ \subsection{Algebraic Multigrid (AMG) Preconditioners}
 As mentioned multigrid generally coarsens the problem until it is small enough to be solved with an exact solver (eg, LU or SVD) in a relatively small time.
 GAMG will stop coarsening when the number of equation on a grid falls below at threshold give by \trl{-pc_gamg_coarse_eq_limit <50>,}.
 
-There are several options to provide parameters to the coarsening algorithm and parallel data layout.
+{\bf Coarse grid parameters:} There are several options to provide parameters to the coarsening algorithm and parallel data layout.
 Run a code that uses GAMG with \trl{-help} to get full listing of GAMG parameters with short parameter descriptions.
-The rate of coarsening is critical in AMG performance -- too slow of coarsening will result in an overly expensive solver per iteration and too fast coarsening will result in decrease in the convergence rate. \trl{-pc_gamg_threshold <0>} and  \trl{-pc_gamg_square_graph <1>,} are the primary parameters that control coarsening rates, which is very important for AMG perforamance.
+The rate of coarsening is critical in AMG performance -- too slow of coarsening will result in an overly expensive solver per iteration and too fast coarsening will result in decrease in the convergence rate. \trl{-pc_gamg_threshold <0>} and  \trl{-pc_gamg_square_graph <1>,} are the primary parameters that control coarsening rates, which is very important for AMG performance.
 A greedy maximal independent set (MIS) algorithm is used in coarsening.
 Squaring the graph implements so called MIS-2, the root vertex in an aggregate is more than two edges away from another root vertex, instead of more than one in MIS.
 The threshold parameter sets a normalized threshold for which edges are removed from the MIS graph, thereby coarsening slower.
@@ -3004,8 +3017,8 @@ \subsection{Algebraic Multigrid (AMG) Preconditioners}
 particularly well suited to jumps in coefficients but it is not a
 perfect solution), highly incompressible elasticity, not to mention
 ill-posed problems, and many others. For Grad-Div and Curl-Curl operators,
-you may want to try the Auxilary Maxwell Space (AMS, \trl{-pc_type hypre -pc_hypre_set_type ams})
-or the Auxilary Divergence Space (ADS, \trl{-pc_type hypre -pc_hypre_set_type ads}) solvers.
+you may want to try the Auxiliary-space Maxwell Solver (AMS, \trl{-pc_type hypre -pc_hypre_type ams})
+or the Auxiliary-space Divergence Solver (ADS, \trl{-pc_type hypre -pc_hypre_type ads}) solvers.
 These solvers need some additional information on the underlying mesh;
 specifically, AMS needs the discrete gradient operator, which can be specified via
 \lstinline{PCHYPRESetDiscreteGradient()}. In addition to the discrete gradient, ADS also needs the
@@ -3063,7 +3076,7 @@ \subsection{Algebraic Multigrid (AMG) Preconditioners}
 Likewise if your \lstinline{MatPtAP} time is small and your convergence rate is not ideal then you could decrease the coarsening rate.
 
 PETSc's AMG solver is constructed as a framework for developers to
-easily add AMG capabilities, like a new AMG methods or anAMG component like a matrix
+easily add AMG capabilities, like a new AMG methods or an AMG component like a matrix
 triple product. Contact us directly if you are interested in contributing.
 
 %----------------------------------------------------------------------
@@ -3337,7 +3350,7 @@ \subsection{Multigrid Preconditioners} \sindex{multigrid} \label{sec_mg}
 PCMGGetCoarseSolve(PC pc,KSP *ksp);
 \end{lstlisting}
 and set the appropriate options in \lstinline{ksp}. Similarly, the
-smoothers are setcontrolled by first calling
+smoothers are controlled by first calling
 \begin{lstlisting}
 PCMGGetSmoother(PC pc,PetscInt level,KSP *ksp);
 \end{lstlisting}
@@ -4409,7 +4422,7 @@ \subsection{Checking Accuracy of Derivatives}
 to compare the matrices at several points.  Although not exhaustive,
 this test will generally catch obvious problems.  One can compare the
 elements of the two matrices by using the option \trl{
--snes_test_display} \findex{-snes_test_jacobian_display -snes_test_jacobian_display_threshold}, which causes the two
+-snes_test_jacobian_view} \findex{-snes_test_jacobian_view}, which causes the two
 matrices to be printed to the screen.  \sindex{Jacobian, testing}
 
 Another means for verifying the correctness of a code for Jacobian
@@ -6099,7 +6112,7 @@ \section{Evaluating Residuals} \sindex{Residual Evaluation}
 
   PetscSectionGetDof(section, points[p], &dof);
   PetscSectionGetOffset(section, points[p], &off);
-  for=(d = 0; d <= dof; ++d) {
+  for (d = 0; d <= dof; ++d) {
     myfunc(a[off+d]);
   }
 }
@@ -6146,98 +6159,128 @@ \subsection{Application flow}
 The general flow of an application code using \lstinline{DMNetwork} is as follows:
 
 \begin{enumerate}
-  \item Create a network object and a ``component'' library:
+  \item Create a network object
 \begin{lstlisting}
-DMNetworkCreate(MPI_Comm, DM*);
+DMNetworkCreate(MPI_Comm comm, DM *dm);
 \end{lstlisting}
-  creates an empty network object. A ``component'' is specific application data at a node/edge of the network required for its residual evaluation. For example, components could be resistor, inductor data for circuit applications, edge weights for graph problems, generator/transmission line data for power grids. Components are registered by calling
+  \item Create components and register them with the network. A ``component'' is specific application data at a vertex/edge of the network required for its residual evaluation. For example, components could be resistor, inductor data for circuit applications, edge weights for graph problems, generator/transmission line data for power grids. Components are registered by calling
 \begin{lstlisting}
-DMNetworkRegisterComponent(DM,const char* name, PetscInt size, PetscInt* compkey);
+DMNetworkRegisterComponent(DM dm, const char *name, size_t size, PetscInt *compkey);
 \end{lstlisting}
-  Here, \lstinline{name} is the component name,\lstinline{size} is the size of component data type, and \lstinline{compkey} is an integer key that can be used for
-  setting/getting the component at a node or an edge.
-  \item Set network size (number of nodes, edges), edge connectivity.
-  \item Set the bare layout (graph) of the network
+  Here, \lstinline{name} is the component name, \lstinline{size} is the size of component data type, and \lstinline{compkey} is an integer key that can be used for setting/getting the component at a vertex or an edge. DMNetwork currently allows upto 16 components to be registered for a network.
+
+  \item A DMNetwork can consist of one or more {\em physical} subnetworks. When multiple physical subnetworks are used one can (optionally) provide {\em coupling information between subnetworks} which consist only of edges connecting the vertices of the physical subnetworks. The topological sizes of the network are set by calling
 \begin{lstlisting}
-DMNetworkLayoutSetUp(DM dm);
+DMNetworkSetSizes(DM dm, PetscInt Nsubnet, PetscInt nV[], PetscInt nE[], PetscInt NsubnetCouple, PetscInt nec[]);
 \end{lstlisting}
+Here, \lstinline{Nsubnet} is the number of subnetworks, \lstinline{nV} and \lstinline{nE} is the number of vertices and edges for each subnetwork, \lstinline{NsubnetCouple} is the number of pairs of subnetworks that are coupled, and \lstinline{nec} is the number of edges coupling each subnetwork pair. DMNetwork assumes coupling between the subnetworks through coupling edges. For a single network, set \lstinline{Nsubnet} = 1, \lstinline{NsubnetCouple} = 0, and \lstinline{nec} = NULL. Note that the coupling between subnetworks is still an experimental feature and under development.
+  \item The next step is to set up the connectivity for the network. This is done by specifying the connectivity within each subnetwork (\lstinline{edgelist}) and between subnetworks (\lstinline{edgelistCouple}).
 \begin{lstlisting}
-DMNetworkSetSizes(DM dm, PetscInt nnodes, PetscInt nedges, PetscInt Nnodes, PetscInt Nedges);
+DMNetworkSetEdgeList(DM dm, PetscInt *edgelist[], PetscInt *edgelistCouple[]);
 \end{lstlisting}
+Each element of \lstinline{edgelist} is an integer array of size 2*nE[i] containing the edge connectivity for the i-th subnetwork. Each element in \lstinline{edgelistCouple} has four entries - {from subnetwork number (net.id), from subnetwork vertex number (vertex.id), to subnetwork number (net.id), to subetwork vertex number (vertex.id)}.
+
+As an example, consider a network comprising of 2 subnetworks that are coupled. The topological information for the network is as follows: \\
+subnetwork 0: v0 --- v1 --- v2 --- v3 \\
+subnetwork 1: v1 --- v2 --- v0 \\
+coupling between subnetworks: subnetwork 1: v2 --- subnetwork 0: v0  \\
+The \lstinline{edgelist} and \lstinline{edgelistCouple} for this network are \\
+edgelist[0] = \{0,1,1,2,2,3\} \\
+edgelist[1] = \{1,2,2,0\} \\
+edgelistCouple[0] = \{1,2,0,0\}.
+
+\item The next step is to have DMNetwork to create a bare layout (graph) of the network by calling
 \begin{lstlisting}
-DMNetworkSetEdgeList(DM dm, int edgeconns[]);
+DMNetworkLayoutSetUp(DM dm);
 \end{lstlisting}
-  \item Set components and number of variables for nodes/edges.
+
+\item After completing the previous steps, the network graph is set up, but no physics is associated yet. This is done by adding the components and setting the number of variables for the vertices and edges.
+
+A component is added to a vertex/edge by calling
 \begin{lstlisting}
-DMNetworkAddComponent(DM dm, PetscInt p,PetscInt component,void* component);
+DMNetworkAddComponent(DM dm, PetscInt p, PetscInt compkey, void* compdata);
 \end{lstlisting}
-  Multiple components can be added at a node/edge.
+where \lstinline{p} is the network vertex/edge point in the range obtained by either DMNetworkGetEdgeRange or DMNetworkGetVertexRange, \lstinline{compkey} is the component key returned when registering the component (DMNetworkRegisterComponent), and \lstinline{compdata} holds the data for the component. DMNetwork supports setting multiple components (max. 36) at a vertex/edge.
+
+DMNetwork currently assumes the component data to be stored in a contiguous chunk of memory. As such, it does not do any packing/unpacking before/after the component data gets distributed. Any such serialization (packing/unpacking) should be done by the application.
+ 
+The number of variables at each vertex/edge are set by
 \begin{lstlisting}
-DMNetworkSetNumVariables(DM dm,PetscInt p,PetscInt nvar);
-\end{lstlisting}
+DMNetworkSetNumVariables(DM dm, PetscInt p, PetscInt nvar);
+\end{lstlisting} or
 \begin{lstlisting}
-DMNetworkAddNumVariables(DM dm,PetscInt p,PetscInt nvar);
+DMNetworkAddNumVariables(DM dm, PetscInt p, PetscInt nvar);
 \end{lstlisting}
-  \item Signal network ready to be distributed.
+
+\item Set up network internal data structures.
+  %Signal network ready to be distributed.
 \begin{lstlisting}
 DMSetUp(DM dm);
 \end{lstlisting}
-  \item Distribute the network (also moves components attached with nodes/edges)
+  \item Distribute the network (also moves components attached with vertices/edges) to multiple processors.
 \begin{lstlisting}
-DMNetworkDistribute(DM oldDM, const char partitioner[], PetscInt overlap,DM *distDM);
+DMNetworkDistribute(DM dm, const char partitioner[], PetscInt overlap, DM *distDM);
+\end{lstlisting}
+\item Associate the \lstinline{DM} with a PETSc solver:
+\begin{lstlisting}
+KSPSetDM(KSP ksp, DM dm) or SNESSetDM(SNES snes, DM dm) or TSSetDM(TS ts, DM dm).
 \end{lstlisting}
-\item Hook up the \lstinline{DM} with the solver: \lstinline{KSPSetDM()}, \lstinline{SNESSetDM()}, \lstinline{TSSetDM()}
 \end{enumerate}
 
 \subsection{Utility functions}
-  \lstinline{DMNetwork} provides functions for obtaining iterators for nodes/edges, checking the ``ghost''
-  status of a node (vertex), and retrieving local/global indices of node/edge variables for inserting
-  elements in vectors/matrices.
+  \lstinline{DMNetwork} provides several utility functions for operations on the network. The mostly commonly used functions are:
+obtaining iterators for vertices/edges,
+\begin{lstlisting}
+DMNetworkGetEdgeRange(DM dm, PetscInt *eStart, PetscInt *eEnd);
+\end{lstlisting}
 \begin{lstlisting}
-DMNetworkGetEdgeRange(DM dm,PetscInt *eStart,PetscInt *eEnd);
+DMNetworkGetVertexRange(DM dm, PetscInt *vStart, PetscInt *vEnd);
 \end{lstlisting}
 \begin{lstlisting}
-DMNetworkGetVertexRange(DM dm,PetscInt *vStart, PetscInt *vEnd);
+DMNetworkGetSubnetworkInfo(DM dm, PetscInt netid, PetscInt *nv, PetscInt *ne, const PetscInt **vtx, const PetscInt **edge);
 \end{lstlisting}
+Checking the ``ghost'' status of a vertex,
 \begin{lstlisting}
-DMNetworkIsGhostVertex(DM dm,PetscInt p,PetscBool *isghost);
+DMNetworkIsGhostVertex(DM dm, PetscInt p, PetscBool *isghost);
 \end{lstlisting}
+and retrieving local/global indices of vertex/edge variables for inserting
+  elements in vectors/matrices.
 \begin{lstlisting}
-DMNetworkGetVariableOffset(DM dm,PetscInt p,PetscInt *offset);
+DMNetworkGetVariableOffset(DM dm, PetscInt p, PetscInt *offset);
 \end{lstlisting}
 \begin{lstlisting}
-DMNetworkGetVariableGlobalOffset(DM dm,PetscInt p,PetscInt *offsetg);
+DMNetworkGetVariableGlobalOffset(DM dm, PetscInt p, PetscInt *offsetg);
 \end{lstlisting}
-In network applications, one frequently needs to find the supporting edges for a node or
-the connecting nodes covering an edge. These can be obtained by the following two routines.
+In network applications, one frequently needs to find the supporting edges for a vertex or
+the connecting vertices covering an edge. These can be obtained by the following two routines.
 \begin{lstlisting}
-DMNetworkGetConnectedVertices(DM dm,PetscInt edge,const PetscInt *vertices[]);
+DMNetworkGetConnectedVertices(DM dm, PetscInt edge, const PetscInt *vertices[]);
 \end{lstlisting}
 \begin{lstlisting}
-DMNetworkGetSupportingEdges(DM dm,PetscInt vertex,PetscInt *nedges,const PetscInt *edges[]) ;
+DMNetworkGetSupportingEdges(DM dm, PetscInt vertex, PetscInt *nedges, const PetscInt *edges[]);
 \end{lstlisting}
 
 \subsection{Retrieving components}
-The components set at nodes/edges be accessed by
+The components set at a vertex/edge can be accessed by
 \begin{lstlisting}
-DMNetworkGetComponent(DM dm,PetscInt p, PetscInt compnum, PetscInt *compkey, void** component);
+DMNetworkGetComponent(DM dm, PetscInt p, PetscInt compnum, PetscInt *compkey, void** component);
 \end{lstlisting}
-\lstinline{compkey} is the key set by \lstinline{DMNetworkRegisterComponent}. An example of accessing and retrieving the components at nodes is:
+\lstinline{compkey} is the key set by \lstinline{DMNetworkRegisterComponent}. An example of accessing and retrieving the components at vertices is:
 
 \begin{lstlisting}
 PetscInt Start, End, numcomps,key,v,compnum;
 void *component;
 
 DMNetworkGetVertexRange(dm, &Start, &End);
-for(v=Start; v  < End; v++) {
+for (v=Start; v  < End; v++) {
   DMNetworkGetNumComponents(dm,v, &numcomps);
-  for(compnum=0; compnum < numcomps;compnum++) {
+  for (compnum=0; compnum < numcomps;compnum++) {
     DMNetworkGetComponent(dm,v,compnum, &key, &component);
     compdata = (UserCompDataType)(component);
   }
 }
 \end{lstlisting}
-The above example does not explicitly make use the component key. It is used when different component types are set at different nodes. In this case, the {compkey} is the used to differentiate the component type.
+The above example does not explicitly make use the component key. It is used when different component types are set at different vertices. In this case, the {compkey} is used to differentiate the component type.
 
 % --------------------------------------------------------------------
 %                            PART 3
@@ -9066,268 +9109,6 @@ \subsection{Base}
 
 After configure tests have been run, various kinds of output can be generated.A \#define statement can be added to the configure header using \trl{addDefine()}, and \trl{addTypedef()} and \trl{addPrototype()} also put information in this header file. Using \trl{addMakeMacro()} and \trl{addMakeRule()} will add make macros and rules to the output makefiles specified in the framework. In addition we provide \trl{addSubstitution()} and \trl{addArgumentSubstitution()} to mimic the bahvior of Autoconf if necessary. The object may define a \trl{headerPrefix} member, which will be appended, followed by an underscore, to every define which is output from it. Similarly, a \trl{substPrefix} can be defined which applies to every substitution from the object. Typedefs and function prototypes are placed in a separate header in order to accommodate languages such as Fortran whose preprocessor can sometimes fail at these statements.
 
-%----------------------------------------------------------------------
-\cleardoublepage
-\chapter{Makefiles}
-\label{ch_makefiles}
-
-This chapter describes the design of the PETSc makefiles, which are
-key to managing code portability across a wide variety of UNIX and Windows systems.
-
-\section{Makefile System}
-
-To make a program named \trl{ex1}, one may use the command
-\begin{bashlisting}
-make PETSC_ARCH=arch ex1
-\end{bashlisting}
-which will compile the
-example and automatically link the appropriate libraries.  The
-architecture, \trl{arch}, is one of \trl{solaris, rs6000, IRIX,
-hpux, arch-darwin-double-debug}, etc., as determined during the PETSc configuration process.
-Note that when using command line options with \trl{make} (as illustrated above),
-one must {\em not} place spaces on either side of the ``\trl{=}'' signs.
-The variable \trl{PETSC_ARCH} can also be set as an environment
-variable.
-
-\subsection{Makefile Commands} \label{sec_common}
-
-The directory
-\trl{${PETSC_DIR}/lib/petsc/conf}
-contains virtually all
-makefile commands and customizations to enable portability across
-different architectures.  Most makefile commands for maintaining the
-PETSc system are defined in the file
-\trl{${PETSC_DIR}/lib/petsc/conf/rules}
-These commands, which process all appropriate files within the
-directory of execution, include
-\begin{itemize}
-\item \trl{lib} - Update the PETSc libraries based on the source code
-      in the directory.
-\item \trl{libfast} - Update the libraries faster.  Since
-      \trl{libfast} recompiles all source files in the directory at once,
-      rather than individually, this command saves time when many files
-      must be compiled.
-\item \trl{clean} - Remove garbage files.
-\end{itemize}
-
-The \trl{tree} command enables the user to execute a particular action
-within a directory and all of its subdirectories.  The action is specified
-by \trl{ACTION=[action]}, where \trl{action} is one of the basic commands
-listed above. For example, if the command
-\trl{make ACTION=lib tree}
-were executed from the directory
-\trl{${PETSC_DIR}/src/ksp/ksp}
-the debugging library for all Krylov subspace solvers would be built.
-
-\subsection{Customized Makefiles}
-\label{sec_custom}
-
-The directory \trl{${PETSC_DIR}/} contains a subdirectory for each
-architecture that contains machine-specific information, enabling the
-portability of our makefile system, these are
-\trl{${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf}
- Each architecture directory contains
-two makefiles:
-\begin{itemize}
-\item \trl{petscvariables} - definitions of the compilers, linkers, etc.
-\item \trl{petscrules} - some build rules specific to this machine.
-\end{itemize}
-These files are generated automatically when you run \trl{./configure}.
-
-These are included by the architecture-independent makefiles located in
-\trl{${PETSC_DIR}/lib/petsc/conf}.
-
-\section{PETSc Flags}
-\label{sec_makeflags}
-
-PETSc has several flags that determine how the source code will be
-compiled.  The default flags for particular versions are specified in
-\trl{${PETSC_DIR}/${PETSC_ARCH}/include/petscconf.h}.
-The flags include
-\begin{itemize}
-\item \trl{PETSC_USE_DEBUG} - The PETSc debugging options are activated. We
-      recommend always using this except when doing performance analysis or production runs. \findex{PETSC_USE_DEBUG}
-\item \trl{PETSC_USE_COMPLEX} - The version with scalars represented
-      as complex numbers is used. \findex{PETSC_USE_COMPLEX}
-\item \trl{PETSC_USE_LOG} - Various monitoring statistics on floating-point operations,
-      and message-passing activity are kept. \findex{PETSC_USE_LOG}
-\end{itemize}
-
-\subsection{Sample Makefiles}
-
-Maintaining portable PETSc makefiles is very simple.
-
-A first example, shown in Figure \ref{fig_make1}, is a ``minimum'' makefile for maintaining
-a single program that uses the PETSc libraries.
-The most important lines in this makefile are the lines starting with \trl{include}:
-\begin{makelisting}
-include ${PETSC_DIR}/lib/petsc/conf/variables
-include ${PETSC_DIR}/lib/petsc/conf/rules
-\end{makelisting}
-These lines includes other makefiles that provide the needed definitions
-and rules for the particular base PETSc installation (specified by
-\trl{PETSC_DIR}) and architecture (specified by
-\trl{PETSC_ARCH}).  (See \ref{sec_running} for information on
-setting these environmental variables.)  As listed in the sample
-makefile, the appropriate \trl{include} file is automatically
-completely specified; the user should {\em not} alter this statement
-within the makefile.
-
-\begin{figure}[H]
-\begin{makelisting}
-   ALL: ex2
-   CFLAGS     =
-   FFLAGS     =
-   CPPFLAGS   =
-   FPPFLAGS   =
-   CLEANFILES = ex2
-
-   include ${PETSC_DIR}/lib/petsc/conf/variables
-   include ${PETSC_DIR}/lib/petsc/conf/rules
-
-   ex2: ex2.o
-           ${CLINKER} -o ex2 ex2.o  ${PETSC_LIB}
-           ${RM} ex2.o
-\end{makelisting}
-\caption{Sample PETSc Makefile for a Single Program}
-\label{fig_make1}
-\end{figure}
-
-Users who wish to manage the compile process themselves
-and {\emph not} use the rules PETSc uses for compiling programs
-should only include \trl{variables}. That is, use something like
-the makefile shown in Figure \ref{fig_make1var}.
-
-\begin{figure}[H]
-\begin{makelisting}
-   ALL: ex2
-   CFLAGS   = ${PETSC_CC_INCLUDES}
-   FFLAGS   = ${PETSC_FC_INCLUDES}
-
-   include ${PETSC_DIR}/lib/petsc/conf/variables
-
-   ex2: ex2.o
-           mylinkercommand -o ex2 ex2.o  ${PETSC_LIB}
-\end{makelisting}
-\caption{Sample PETSc Makefile that does {\bf not} use PETSc's rules for compiling}
-\label{fig_make1var}
-\end{figure}
-
-The variables \trl{PETSC_CC_INCLUDES}, \trl{PETSC_FC_INCLUDES}
-and \trl{PETSC_LIB} are defined by the included \trl{${PETSC_DIR}/lib/petsc/conf/variables} file.
-
-If you do not wish to include any PETSc makefiles in your makefile,
-you can use the following commands in the PETSc root directory to get the information
-needed by your makefile:
-\begin{bashlisting}
-make getlinklibs getincludedirs getpetscflags
-\end{bashlisting}
-All the libraries listed need to be linked into your executable and the
-include directories and flags need to be passed to the compiler.
-Usually this is done with \lstinline{CFLAGS=} and
-\lstinline{FFLAGS=} in your makefile.
-
-\findex{PETSC_LIB} \findex{PETSC_LIB}
-Note that the variable \trl{PETSC_LIB} (as listed on the link
-line in the above makefile) specifies {\em all} of the various PETSc
-libraries in the appropriate order for correct linking.  For users who
-employ only a specific PETSc library, can use alternative variables
-like  \trl{PETSC_SYS_LIB}, \trl{PETSC_VEC_LIB},
-\trl{PETSC_MAT_LIB}, \trl{PETSC_DM_LIB},
-\trl{PETSC_KSP_LIB}, \trl{PETSC_SNES_LIB} or
-\trl{PETSC_TS_LIB}.
-
-The second sample makefile, given in Figure~\ref{fig_make2},
-controls the generation of several example programs.
-
-\begin{figure}[H]
-\begin{makelisting}
-   CFLAGS   =
-   FFLAGS   =
-   CPPFLAGS =
-   FPPFLAGS =
-
-   include ${PETSC_DIR}/lib/petsc/conf/variables
-   include ${PETSC_DIR}/lib/petsc/conf/rules
-
-   ex1: ex1.o
-        -${CLINKER} -o ex1 ex1.o  ${PETSC_LIB}
-        ${RM} ex1.o
-   ex2: ex2.o
-        -${CLINKER} -o ex2 ex2.o  ${PETSC_LIB}
-        ${RM} ex2.o
-   ex3: ex3.o
-        -${FLINKER} -o ex3 ex3.o  ${PETSC_LIB}
-        ${RM} ex3.o
-   ex4: ex4.o
-        -${CLINKER} -o ex4 ex4.o  ${PETSC_LIB}
-        ${RM} ex4.o
-
-   runex1:
-        -@${MPIEXEC} ex1
-   runex2:
-        -@${MPIEXEC} -n 2 ./ex2 -mat_seqdense -options_left
-   runex3:
-        -@${MPIEXEC} ex3 -v -log_view
-   runex4:
-        -@${MPIEXEC} -n 4 ./ex4 -trdump
-\end{makelisting}
-\caption{Sample PETSc Makefile for Several Example Programs}
-\label{fig_make2}
-\end{figure}
-
-Again, the most important lines in this makefile are the \lstinline{include}
-lines that include the files defining all of the macro variables.
-Some additional variables that can be used in the makefile are defined
-as follows:
-\begin{tightitemize}
-\item \lstinline{CFLAGS, FFLAGS} User specified additional options for the C compiler and
-        Fortran compiler.
-\item \lstinline{CPPFLAGS, FPPFLAGS} User specified additional flags for the C preprocessor
-        and Fortran preprocessor.
-\item \lstinline{CLINKER, FLINKER} the C and Fortran linkers.
-\item \lstinline{RM} the remove command for deleting files.
-\end{tightitemize}
-
-Figure \ref{fig_make3} contains a makefile that maintains a PETSc
-library.  Although most users do not need to understand or deal with such
-makefiles, they are also easily used.
-\begin{figure}[H]
-\begin{makelisting}
-ALL: lib
-
-CFLAGS   =
-SOURCEC  = sp1wd.c spinver.c spnd.c spqmd.c sprcm.c
-SOURCEF  = degree.F  fnroot.F genqmd.F qmdqt.F rcm.F fn1wd.F gen1wd.F \
-          genrcm.F qmdrch.F rootls.F fndsep.F gennd.F qmdmrg.F qmdupd.F
-SOURCEH  =
-LIBBASE  = libpetscmat
-MANSEC   = Mat
-
-include ${PETSC_DIR}/lib/petsc/conf/variables
-include ${PETSC_DIR}/lib/petsc/conf/rules
-\end{makelisting}
-\caption{Sample PETSc Makefile for Library Maintenance}
-\label{fig_make3}
-\end{figure}
-
-The library's name is \trl{libpetscmat.a}, and the source files being added
-to it are indicated by \lstinline{SOURCEC} (for C files) and \lstinline{SOURCEF} (for
-Fortran files).
-
-The variable \lstinline{MANSEC} indicates that any manual pages generated
-from this source should be included in the \lstinline{Mat} section.
-
-\section{Limitations}
-
-This approach to portable makefiles has some minor limitations, including
-the following:
-\begin{itemize}
-\item Each makefile must be called ``makefile''.
-\item Each makefile can maintain at most one archive library.
-\end{itemize}
-
-%------------------------------------------------------------------
 
 \cleardoublepage
 \chapter{Unimportant and Advanced Features of Matrices and Solvers}
@@ -9725,6 +9506,8 @@ \subsection{Understanding test output and more information}
 make -f gmakefile test searchin='tutorials'              # Run all tutorials
 make -f gmakefile test search='ts%' searchin='tutorials' # Run all TS tutorials
 make -f gmakefile test argsearch='cuda'                  # Run examples with cuda in arguments
+make -f gmakefile test test-fail='1'
+make -f gmakefile test query='requires' queryval='*MPI_PROCESS_SHARED_MEMORY*'
 \end{bashlisting}
 
 It is useful before invoking the tests to see what targets will
diff --git a/src/docs/tex/petsc.bib b/src/docs/tex/petsc.bib
index fd9e9e7f2d3..54c7c12d160 100644
--- a/src/docs/tex/petsc.bib
+++ b/src/docs/tex/petsc.bib
@@ -4837,7 +4837,7 @@ @Misc{chombo:web
 }
 
 @article{lanzkron96,
-  author = "P. J. Kanzkron and D. J. Rose and J. T. Wilkes",
+  author = "P. J. Lanzkron and D. J. Rose and J. T. Wilkes",
   title = "An Analysis of Approximate Nonlinear Elimination",
   journal = "SIAM J. Sci. Comput.",
   volume = 17,
@@ -9974,6 +9974,73 @@ @article{GreenbaumPtakStrakos1996
   publisher = {SIAM}
 }
 
+@article{NangiaPatankarBhalla2019,
+  title   = {A {DLM} immersed boundary method based wave-structure interaction solver for high density ratio multiphase flows},
+  author  = {Nishant Nangia and Neelesh A Patankar and Amneet Pal Singh Bhalla},
+  journal = {Journal of Computational Physics},
+  volume  = {398},
+  pages   = {108804},
+  year    = {2019}
+}
+@article{BhallaNangiaDafnakisBraccoMattiazzo2019,
+  title   = {Simulating water-entry/exit problems using {Eulerian}-{Lagrangian} and fully-{Eulerian} fictitious domain methods within the open-source {IBAMR} library},
+  author  = {Amneet Pal Singh Bhalla and Nishant Nangia and Panagiotis Dafnakis and Giovanni Bracco and Giuliana Mattiazzo},
+  journal = {Applied Ocean Research},
+  note    = {In press},
+  year    = {2019}
+}
+@article{BhallaGriffithPatankarDonev2013,
+  title   = {A minimally-resolved immersed boundary model for reaction-diffusion problems},
+  author  = {Amneet Pal Singh Bhalla and Boyce E. Griffith and Neelesh A. Patankar and Alexander Donev},
+  journal = {Journal of Chemical Physics},
+  volume  = {139},
+  number  = {21},
+  pages   = {214112},
+  year    = {2013}
+}
+@article{BhallaBaleGriffithPatankar2014,
+  title   = {Fully resolved immersed electrohydrodynamics for particle motion, electrolocation, and self-propulsion},
+  author  = {Amneet Pal Singh Bhalla and R. Bale and Boyce E. Griffith and Neelesh A. Patankar},
+  journal = {Journal of Computational Physics},
+  volume  = {256},
+  pages   = {88--108},
+  year    = {2014}
+}
+@article{UsabiagaKallemovDelmotteBhallaGriffithDonev2016,
+  title   = {Hydrodynamics of suspensions of passive and active rigid particles: a rigid multiblob approach},
+  author  = {F. Balboa Usabiaga and B. Kallemov and B. Delmotte and Amneet Pal Singh Bhalla and Boyce E. Griffith and Alexander Donev},
+  journal = {Communications in Applied Mathematics and Computational Science},
+  volume  = {11},
+  number  = {2},
+  pages   = {217--296},
+  year    = {2016}
+}
+@article{KouBhallaGriffithPandolfinoKahrilasPatankar2015,
+  title   = {A fully resolved active musculo-mechanical model for esophageal transport},
+  author  = {W. Kou and Amneet Pal Singh Bhalla and Boyce E. Griffith and J.E. Pandolfino and P.J. Kahrilas and Neelesh A. Patankar},
+  journal = {Journal of Computational Physics},
+  volume  = {298},
+  pages   = {446--465},
+  year    = {2015}
+}
+@article{BaleNevelnBhallaMacIverPatankar2015,
+  title   = {Convergent evolution of mechanically optimal locomotion in aquatic invertebrates and vertebrates},
+  author  = {R. Bale and I. D. Neveln and Amneet Pal Singh Bhalla and M.A. MacIver and Neelesh A. Patankar},
+  journal = {PLoS Biology},
+  volume  = {13},
+  number  = {4},
+  pages   = {1--22},
+  year    = {2015}
+}
+@article{BaleHaoBhallaPatankar2014,
+  title   = {Energy efficiency and allometry of movement of swimming and flying animals},
+  author  = {Rahul Bale and Max Hao and Amneet Pal Singh Bhalla and Neelesh A Patankar},
+  journal = {Proceedings of the National Academy of Sciences},
+  volume  = {111},
+  number  = {21},
+  pages   = {7517--7521},
+  year    = {2014}
+}
 @article{BhallaBaleGriffithPatankar2013,
   title   = {A unified mathematical framework and an adaptive numerical method for fluid--structure interaction with rigid, deforming, and elastic bodies},
   author  = {Amneet Pal Singh Bhalla and Rahul Bale and Boyce E Griffith and Neelesh A Patankar},
@@ -10649,13 +10716,25 @@ @article{FabienKnepleyRiviere2019
   year    = {2019}
 }
 @article{FabienKnepleyRiviere2019B,
-  title   = {Families of Interior Penalty Hybridizable discontinuous Galerkin methods for second order elliptic problems^},
+  title   = {Families of Interior Penalty Hybridizable discontinuous Galerkin methods for second order elliptic problems},
   author  = {Maurice S. Fabien and Matthew G. Knepley and B\'eatrice M. Rivier\'e},
   journal = {Journal of Numerical Mathematics},
   url     = {https://doi.org/10.1515/jnma-2019-0027},
   doi     = {10.1515/jnma-2019-0027},
   year    = {2019}
 }
+@article{Fabien2019,
+  title   = {A {GPU}-Accelerated Hybridizable Discontinuous {Galerkin} Method for Linear Elasticity},
+  author  = {Maurice S. Fabien},
+  journal = {Communications in Computational Physics},
+  volume  = {27},
+  number  = {2},
+  pages   = {513--545},
+  issn    = {1991-7120},
+  doi     = {https://doi.org/10.4208/cicp.OA-2018-0235},
+  url     = {http://global-sci.org/intro/article_detail/cicp/13457.html},
+  year    = {2019},
+}
 @article{ThompsonRiviereKnepley2018,
   title   = {An Implicit Discontinuous Galerkin Method For Modeling Acute Edema and Resuscitation In The Small Intestine},
   author  = {Travis Thompson and B\'eatrice M. Rivier\'e and Matthew G. Knepley},
diff --git a/src/docs/tex/petscapp.bib b/src/docs/tex/petscapp.bib
index 2525a388b70..864ba1f0291 100644
--- a/src/docs/tex/petscapp.bib
+++ b/src/docs/tex/petscapp.bib
@@ -68,8 +68,7 @@ @techreport{tspaper
   number      = {ANL/MCS-P5061-0114},
   institution = {ANL},
   month       = {January},
-  year        = {2014},
-  note        = {Submitted to {TOMS}}
+  year        = {2014}
 }
 @TechReport{bb2008,
   author = "Tomasz Blachowicz and Bartlomiej Baron",
@@ -2811,10 +2810,14 @@ @InProceedings{texas98
 % LiteralHTML:  

CFD

% LiteralHTML: @article{FarrellMitchellWechsung2018, - title = {An augmented {Lagrangian} preconditioner for the {3D} stationary incompressible {Navier-Stokes} equations at high {Reynolds} number}, - author = {Patrick E Farrell and Lawrence Mitchell and Florian Wechsung}, - eprint = {1810.03315}, - year = {2018} + title = {An augmented {Lagrangian} preconditioner for the {3D} stationary incompressible {Navier-Stokes} equations at high {Reynolds} number}, + author = {Patrick E Farrell and Lawrence Mitchell and Florian Wechsung}, + journal = {SIAM Journal on Scientific Computing}, + volume = {41}, + number = {5}, + eprint = {1810.03315}, + pages = {A3073-A3096}, + year = {2019} } @article{HeVuikKlaij2017, title = {Block-preconditioners for the incompressible {Navier}--{Stokes} equations discretized by a finite volume method}, @@ -5446,14 +5449,13 @@ @Unpublished{fe-software-web-page @Misc{petsc-web-page, author = {Satish Balay and Shrirang Abhyankar and Mark~F. Adams and Jed Brown and Peter Brune and Kris Buschelman and Lisandro Dalcin and Alp Dener and Victor Eijkhout and William~D. Gropp - and Dinesh Kaushik and Matthew~G. Knepley and Dave~A. May and Lois Curfman McInnes + and Dmitry Karpeyev and Dinesh Kaushik and Matthew~G. Knepley and Dave~A. May and Lois Curfman McInnes and Richard Tran Mills and Todd Munson and Karl Rupp and Patrick Sanan and Barry~F. Smith and Stefano Zampini and Hong Zhang and Hong Zhang}, title = {{PETS}c {W}eb page}, - key = {Balay}, - url = {http://www.mcs.anl.gov/petsc}, - howpublished = {\url{http://www.mcs.anl.gov/petsc}}, - year = {2017} + url = {https://www.mcs.anl.gov/petsc}, + howpublished = {\url{https://www.mcs.anl.gov/petsc}}, + year = {2019} } @Unpublished{petsc-debian-package, author = "Adam C. Powell, IV", @@ -5824,13 +5826,13 @@ @InProceedings{efficient @TechReport{petsc-user-ref, author = {Satish Balay and Shrirang Abhyankar and Mark~F. Adams and Jed Brown and Peter Brune and Kris Buschelman and Lisandro Dalcin and Alp Dener and Victor Eijkhout and William~D. Gropp - and Dinesh Kaushik and Matthew~G. Knepley and Dave~A. May and Lois Curfman McInnes + and Dmitry Karpeyev and Dinesh Kaushik and Matthew~G. Knepley and Dave~A. May and Lois Curfman McInnes and Richard Tran Mills and Todd Munson and Karl Rupp and Patrick Sanan and Barry~F. Smith and Stefano Zampini and Hong Zhang and Hong Zhang}, - title = {{PETS}c Users Manual: Revision 3.12}, + title = {{PETS}c Users Manual}, institution = {Argonne National Laboratory}, year = 2019, - Number = {ANL-95/11 - Rev 3.12} + number = {ANL-95/11 - Revision 3.12} } % url = {http://www.mcs.anl.gov/petsc} @TechReport{petsc-developers, @@ -5986,6 +5988,12 @@ @techreport{AD-PETSC-TM % LiteralHTML: % LiteralHTML:

Algorithm design and analysis

% LiteralHTML: +@phdthesis{KlotzThesis2019, + author = {Thomas S. Klotz}, + title = {Numerical Analysis of Nonlinear Boundary Integral Equations Arising in Molecular Biology}, + school = {Rice University}, + year = {2019} +} @article{KlotzBardhanKnepley2018, title = {Efficient Evaluation of Ellipsoidal Harmonics for Potential Modeling}, author = {Thomas S. Klotz and Jaydeep Bardhan and Matthew G. Knepley}, diff --git a/src/docs/website/developers/index.html b/src/docs/website/developers/index.html index 8d18f743894..328d0dd56f1 100644 --- a/src/docs/website/developers/index.html +++ b/src/docs/website/developers/index.html @@ -120,7 +120,8 @@

Building documentation:

  • Sowing: - a text processing tool developed by Bill Gropp. This produces the PETSc manual pages see sowing documentation
  • + a text processing tool developed by Bill Gropp. + This produces the PETSc manual pages, see sowing documentation
  • C2html: diff --git a/src/docs/website/dmnetwork/case_studies.html b/src/docs/website/dmnetwork/case_studies.html new file mode 100644 index 00000000000..fb4f00c97ef --- /dev/null +++ b/src/docs/website/dmnetwork/case_studies.html @@ -0,0 +1,86 @@ + + + + + + PETSc DMNetwork: Case Studies + + + + + + + +
    + + + +
    + +
    +

    Case Study 1: Power Flow Simulation

    + +

    + A contingency analysis done on U.S. power + grid system using an AC power flow model developed using DMNetwork. + The input file was obtained from the MatPower package + (matpower.org). The total numbers of buses, + generators, loads, and branches are 82,000, + 13,419, 37,755, and 104,121 respectively. The total number of unknowns solved + is around half million. The scaling results shows that doubling the number + of compute nodes doubled the speedup. Increasing the number of cores from 128 to 2048, + provided a speedup of twelve. + Poster +

    +
    + +
    + +
    + +
    +

    Case Study 2: River Flow Simulation

    +

    + A river flow simulation done on the U.S. river + networks using DMNetwork. The input file was obtained from the NHDPlus dataset + (horizon-systems.com). + The total numbers of reaches and junctions are 3098638 and 3036092, respectively. + The total number of unknowns solved is around a half a billion. + Increasing the number of cores from 1,024 to 65,536, provided a speedup of 35. + Poster +

    +
    + +
    + +
    + +
    + +
    + + + diff --git a/src/docs/website/dmnetwork/documents/Abhyankar-etal-2019.pdf b/src/docs/website/dmnetwork/documents/Abhyankar-etal-2019.pdf new file mode 100644 index 00000000000..3cab53984ec Binary files /dev/null and b/src/docs/website/dmnetwork/documents/Abhyankar-etal-2019.pdf differ diff --git a/src/docs/website/dmnetwork/documents/Application1_Betrie_etal-2019_poster.pdf b/src/docs/website/dmnetwork/documents/Application1_Betrie_etal-2019_poster.pdf new file mode 100644 index 00000000000..f87fab2190f Binary files /dev/null and b/src/docs/website/dmnetwork/documents/Application1_Betrie_etal-2019_poster.pdf differ diff --git a/src/docs/website/dmnetwork/documents/Application2_Betrie_etal_2018_slide.pdf b/src/docs/website/dmnetwork/documents/Application2_Betrie_etal_2018_slide.pdf new file mode 100644 index 00000000000..b188100f347 Binary files /dev/null and b/src/docs/website/dmnetwork/documents/Application2_Betrie_etal_2018_slide.pdf differ diff --git a/src/docs/website/dmnetwork/documents/Application4_Werner-etal-2019_paper.pdf b/src/docs/website/dmnetwork/documents/Application4_Werner-etal-2019_paper.pdf new file mode 100644 index 00000000000..82febe219bf Binary files /dev/null and b/src/docs/website/dmnetwork/documents/Application4_Werner-etal-2019_paper.pdf differ diff --git a/src/docs/website/dmnetwork/documents/Application5_Rinaldo-etal-2018_paper.pdf b/src/docs/website/dmnetwork/documents/Application5_Rinaldo-etal-2018_paper.pdf new file mode 100644 index 00000000000..da160e85166 Binary files /dev/null and b/src/docs/website/dmnetwork/documents/Application5_Rinaldo-etal-2018_paper.pdf differ diff --git a/src/docs/website/dmnetwork/images/dmnetwork.png b/src/docs/website/dmnetwork/images/dmnetwork.png new file mode 100644 index 00000000000..208d3a38d0b Binary files /dev/null and b/src/docs/website/dmnetwork/images/dmnetwork.png differ diff --git a/src/docs/website/dmnetwork/images/network.jpg b/src/docs/website/dmnetwork/images/network.jpg new file mode 100644 index 00000000000..d49241ec778 Binary files /dev/null and b/src/docs/website/dmnetwork/images/network.jpg differ diff --git a/src/docs/website/dmnetwork/images/power.png b/src/docs/website/dmnetwork/images/power.png new file mode 100644 index 00000000000..8d5fe9abc54 Binary files /dev/null and b/src/docs/website/dmnetwork/images/power.png differ diff --git a/src/docs/website/dmnetwork/images/river.png b/src/docs/website/dmnetwork/images/river.png new file mode 100644 index 00000000000..53a46fc7c6b Binary files /dev/null and b/src/docs/website/dmnetwork/images/river.png differ diff --git a/src/docs/website/dmnetwork/index.html b/src/docs/website/dmnetwork/index.html new file mode 100644 index 00000000000..ab6fb7f2567 --- /dev/null +++ b/src/docs/website/dmnetwork/index.html @@ -0,0 +1,73 @@ + + + + + + PETSc: DMNetwork + + + + + + + +
    + + + +
    + +

    + PETSc DMNetwork provides a powerful flexible scalable framework for simulation of multiphysics + phenomena over large-scale networked systems such as power, water, gas, transportation, + and telecommunicaiton. It eases the application development cycle by providing the necessary infrastructure through simple abstractions to define and query the network components. +

    + +
    + +
    + +

    + PETSc DMNetwork provides data and topology management, parallelization for multiphysics systems over a network, and hierarchical and composable solvers to exploit the problem structure. The key features of DMNetwork include creating the network layout, + partitioning for efficient parallelism, parallel data movement, utility routines for extracting + connectivity information, and linear, nonlinear, + and time-stepping solvers. The steps for using DMNetwork is illustrated in the figure below and they include: + (i) Create network graph, (ii) Add physics components, and variabeles to the network elements (edges and vertices), + (iii) Distribute the poplulated network to multiple processors, + and (iv) decompose the domains and associate them with their respective linear, nonlinear, time-stepping solvers. +

    + +
    + +
    + +
    + +
    + + + diff --git a/src/docs/website/dmnetwork/publications.html b/src/docs/website/dmnetwork/publications.html new file mode 100644 index 00000000000..72c8d862753 --- /dev/null +++ b/src/docs/website/dmnetwork/publications.html @@ -0,0 +1,66 @@ + + + + + + PETSc: Publications + + + + + + + +
    + + + +
    + +
    +

    Overview Materials:

    +
      +
    • Abhyankar S., Betrie G., Maldonado D, McInnes L., Smith B., Zhang H. (2019). PETSc DMNetwork: A Library for Scalable Network PDE-Based Multiphysics Simulation. Transactions on Mathematical Software, accepted. General
    • +
        +
    + +
    +

    Applications:

    +
      +
    • Betrie G., Simith B., Zhang H. (2019). A Scalable Multiphysics Modeling Package For Critical Networked Infrastructures Using PETSc DMNetwork. Argonne Postdoctoral Research and Career Symposium, Lemont, IL, 7 November 2019. Application: Critical Infrastructures
    • +
    • Betrie G., Zhang H., Simith B., Yan E. (2018). A scalable river network simulator for extereme scale computers using the PETSc library. AGU Fall Meeting, Washington, D.C., 10-14 December 2018. Application: River Flow Simulation
    • +
    • Werner A., Duwadi K., Stegmeier N., Hansen T., Kimn J. (2019). Parallel Implementation of AC Optimal Power Flow and Time Constrained Optimal Power Flow using High Performance Computing.IN IEEE 9th Annual Computing and Communication Workshop and Conference. Application: Optimal power flow simulation
    • +
    • Rinaldo S., Ceresoli A., Lahaye D., Merlo M., Cvetkovic M., Vitiello S., Fulli G.(2018). Ditributing Load Flow Computations Across System Operators Boundaries Using the Newton-Krylov-Schwarz Algorithm Implemented in PETSc. Application: Power flow flow simulation
    • +
        +
    + +
    + +
    + + + diff --git a/src/docs/website/dmnetwork/tutorials.html b/src/docs/website/dmnetwork/tutorials.html new file mode 100644 index 00000000000..1b610336446 --- /dev/null +++ b/src/docs/website/dmnetwork/tutorials.html @@ -0,0 +1,156 @@ + + + + + + DMNetwork: Hands on + + + + + + + +
    + + + +
    + +
    +

    Example 1: Electric Circuit

    +

    + This example demonstrates simulation of a linear problem of electric + circuit using the DMNetwork interface. Further details:

    + + +

    DO THE FOLLOWING:

    + +
      +
    • Compile ex1.c +
      +            cd petsc/src/ksp/ksp/examples/tutorials/network
      +            make ex1
      +          
      +
    • +
    • Run a 1 processor example and view solution at edges and vertices +
      +            mpiexec -n 1 ./ex1
      +          
      +
    • +
    • Run a 1 processor example with a convergence reason +
      +            mpiexec -n 1 ./ex1 -ksp_converged_reason
      +          
      +
    • +
    • Run with 2 processors with a partitioning option +
      +            mpiexec -n 2 ./ex1  -petscpartitioner_type simple
      +          
      +
    • +
    +
    + +
    +

    Example 2: AC Power Flow

    +

    + This example demonstrates simulation of a nonlinear power flow + in a grid network using the DMNetwork interface. Further details:

    + + +

    DO THE FOLLOWING:

    + +
      +
    • Compile power.c +
      +            cd petsc/src/snes/examples/tutorials/network/power
      +            make power
      +          
      +
    • +
    • Run a 1 processor example and view solution at vertices +
      +            mpiexec -n 1 ./power
      +          
      +
    • +
    • Run with 2 processors with edge and vertex visualization +
      +            mpiexec -n 2 ./power  -dm_view
      +          
      +
    • +
    +
    + +
    +

    Example 3: Water Flow in pipes

    +

    + This example demonstrates simulation of a transient water flow + in a pipe network using the DMNetwork interface. Further details:

    + + +

    DO THE FOLLOWING:

    + +
      +
    • Compile pipes1.c +
      +            cd petsc/src/ts/examples/tutorials/network/wash
      +            make  pipes1
      +          
      +
    • +
    • Run with 2 processors with a partitioning option +
      +            mpiexec -n 2 ./pipes1 -ts_monitor -case 1 -ts_max_steps 1 -petscpartitioner_type
      +            simple -options_left no -viewX
      +          
      +
    • +
    • Run with 3 processors with a different case and more time-stepping options +
      +            mpiexec -n 3 ./pipes1  -ts_monitor -case 2 -ts_max_steps 10 -petscpartitioner_type
      +            simple -options_left no -viewX
      +          
      +
    • +
    +
    + + + +
    + +
    + + + diff --git a/src/docs/website/documentation/changes/21.html b/src/docs/website/documentation/changes/21.html index ff54c6340d9..9bbdd5b3a60 100644 --- a/src/docs/website/documentation/changes/21.html +++ b/src/docs/website/documentation/changes/21.html @@ -208,7 +208,7 @@

    General:

  • Added manual pages for PETSc objects, enums etc.
  • - Added html version of all source code and examples, accessable from + Added html version of all source code and examples, accessible from docs/index.html and the manual pages.
diff --git a/src/docs/website/documentation/changes/311.html b/src/docs/website/documentation/changes/311.html index 850de3dbd40..d8d9d14341f 100644 --- a/src/docs/website/documentation/changes/311.html +++ b/src/docs/website/documentation/changes/311.html @@ -34,6 +34,7 @@

Documentation: Changes: 3.11

Changes
  • dev
  • +
  • 3.12
  • 3.11
  • 3.10
  • 3.9
  • diff --git a/src/docs/website/documentation/changes/312.html b/src/docs/website/documentation/changes/312.html index fd77da69b8f..930c04a54f5 100644 --- a/src/docs/website/documentation/changes/312.html +++ b/src/docs/website/documentation/changes/312.html @@ -3,7 +3,7 @@ - PETSc: Documentation: Changes: 3.11 + PETSc: Documentation: Changes: 3.12 @@ -12,7 +12,7 @@

    PETSc


    @@ -131,6 +131,7 @@

    General:

Configure/Build:

    +
  • C++ dialect is now auto-detected (C++14 first and then C++11). One can disable this check using --with-cxx-dialect=0, or force it to only check for C++11 using --with-cxx-dialect=C++11
  • New option --download-hpddm to enable PCHPDDM and KSPHPDDM

IS:

@@ -236,6 +237,7 @@

PetscViewer:

SYS:

  • Added PetscCheckDupsInt() to check if a PetscInt array has dups.
  • +
  • Added an argument to PetscBinaryRead() and PetscBinarySynchronizedRead() to return the number of items read.

AO:

Sieve:

diff --git a/src/docs/website/documentation/changes/dev.html b/src/docs/website/documentation/changes/dev.html index f1c8c848d6f..26cfa2af72f 100644 --- a/src/docs/website/documentation/changes/dev.html +++ b/src/docs/website/documentation/changes/dev.html @@ -102,32 +102,91 @@

Documentation: Changes: Development

CHANGES in the PETSc Development Version

+

General:

+
    +
  • Change PetscLayoutFindOwner() and PetscLayoutFindOwnerIndex() to return a PetscMPIInt instead of a PetscInt
  • +

Configure/Build:

IS:

+
    +
  • Add ISSetInfo(): set local and global properties of an IS (whether the IS is sorted, unique, a permutation, an interval, or identity).
  • +
  • Add ISGetInfo(): query local and global properties of an IS.
  • +

PetscDraw:

+

PetscSF:

+
    +
  • Fix few bugs in PETSCSFWINDOW when using PETSCSF_WINDOW_SYNC_LOCK or PETSCSF_WINDOW_SYNC_ACTIVE synchronization types.
  • +
  • Add window reusage for PETSCSFWINDOW and support for different creation flavor types. See PetscSFWindowFlavorType man page for details.
  • +

PF:

Vec:

+
    +
  • VecPinToCPU() is deprecated in favor of VecBindToCPU().
  • +

VecScatter:

PetscSection:

+

PetscPartitioner:

+
    +
  • PetscPartitionerPartition() does not longer gets a DM as input. Vertex weights can be specified through PetscSection.
  • +
  • Add support for target partition weights for PETSCPARTITIONERSIMPLE, PETSCPARTITIONERPARMETIS and PETSCPARTITIONERPTSCOTCH.
  • +

Mat:

+
    +
  • Improve the performance of MatConvert_AIJ_BAIJ by preallocating the Mat before dispatching to MatConvert_Basic
  • +
  • Change the behavior of MatConvert_AIJ_SBAIJ for block size greater than one, the block structure is now preserved (even for the diagonal entries)
  • +
  • Fix various bugs related with matrix conversions from Hermitian SBAIJ matrices with complex numbers
  • +
  • Add MatPropagateSymmetryOptions to propagate symmetry information from one matrix to another
  • +
  • Fix a bug in MATSUPERLU_DIST interface when MATMPIAIJ with commsize 1 is used
  • +
  • Add MATCHOLMOD support for block solves
  • +
  • --download-suitesparse now uses the official SuiteSparse repository
  • +
  • MatPinToCPU() is deprecated in favor of MatBindToCPU().
  • +
  • Fix MatAXPY for MATSHELL
  • +
  • MatAXPY(Y,0.0,X,DIFFERENT_NONZERO_PATTERN) no longer modifies the nonzero pattern of Y to include that of X
  • +

PC:

+
    +
  • Change the default behavior of PCASM and PCGASM to not automatically switch to PCASMType BASIC if the matrices are symmetric
  • +
  • Change the default behavior of PCCHOLESKY to use nested dissection ordering for AIJ matrix
  • +

KSP:

+
    +
  • Add KSPHPDDMGetDeflationSpace and KSPHPDDMSetDeflationSpace for recycling Krylov methods in KSPHPDDM
  • +

SNES:

+
    +
  • -snes_test_jacobian_display and -snes_test_jacobian_display_threshold are deprecated. -snes_test_jacobian accepts an optional threshold parameter (since v3.10) and -snes_test_jacobian_view should be used in favor of -snes_test_jacobian_display.
  • +

SNESLineSearch:

TS:

DM/DA:

    -
  • DMCopyLabels(): add two additional input flags.
  • +
  • DMCopyLabels(): Add two additional input flags
  • +
  • DMPlexDistribute(): fixed bug associated with vertex partition weights and overlapped meshes
  • +
  • DMPlexDistribute(): vertex partition weights are now computed by summing all the local section dofs in the transitive closure of each cell
  • +
  • Add PetscTabulation to hold function tabulation data
  • +
  • Add DMEnclosureType to describe relations between meshes
  • +
  • Add DMGetEnclosureRelation() and DMGetEnclosurePoint() to discover relations between meshes.
  • +
  • Add DMPolytopeType to describe different cell constructions

DMPlex:

    +
  • The cell type is now stored in a label, so it can be queried. This is useful for interpolation, which needs extra data, and for geometric calculations.
  • Add DMPlexOrientCell() which orients a single cell
  • Add DMPlexCompareOrientations() which compares the cone point order of a single cell with the given reference cone
  • Add DMPlexIsInterpolated() which finds out whether the plex is interpolated
  • Add DMPlexIsInterpolatedCollective() which finds out whether the plex is interpolated on all ranks
  • +
  • Add DMPlexIsDistributed() which finds out whether the plex is distributed
+

DT:

+
    +
  • All tabulated data is now stored in PetscTabulation structures. This changes interfaces in PetscDS, PetscFE, and PetscFV
  • +
  • DefaultTabulation has been renamed to CellTabulation everywhere.
  • +

PetscViewer:

+
    +
  • Add "fieldnum" argument to PetscViewerVTKAddField() to indicate only one field of the vector should be viewed
  • +

SYS:

AO:

Sieve:

diff --git a/src/docs/website/documentation/faq.html b/src/docs/website/documentation/faq.html index 6cf5f6a2f51..28f145621d8 100644 --- a/src/docs/website/documentation/faq.html +++ b/src/docs/website/documentation/faq.html @@ -116,12 +116,13 @@

Usage

  • How do I collect all the values from a parallel PETSc vector into a vector on the zeroth (or any particular) processor?
  • How can I read in or write out a sparse matrix in Matrix Market, Harwell-Boeing, SLAPC or other ASCII format?
  • Does TSSetFromOptions(), SNESSetFromOptions() or KSPSetFromOptions() reset all the parameters I set or how come TS/SNES/KSPSetXXX() don't seem to work?
  • -
  • Can I use my own makefiles or rules for compiling code, rather than PETSc's?
  • +
  • How do I compile and link may own PETSc application codess and can I use my own makefiles or rules for compiling code, rather than PETSc's?
  • Can I use CMake to build my own project that depends on PETSc?
  • How can I put carriage returns in PetscPrintf() statements from Fortran?
  • How can I implement callbacks using C++ class methods?
  • Everyone knows that when you code Newton's method you should compute the function and its Jacobian at the same time. How can one do this in PETSc?
  • How can I use Newton's method Jacobian free? Can I difference a different function than provided with SNESSetFunction()?
  • +
  • Computing the Jacobian or preconditioner is time consuming, is there any way to compute it less often?
  • How can I determine the condition number of a matrix?
  • How can I compute the inverse of a PETSc matrix?
  • How can I compute a Schur complement: Kbb - Kba *inverse(Kaa)*Kab?
  • @@ -133,6 +134,9 @@

    Usage

  • I would like to compute a custom norm for KSP to use as a convergence test or for monitoring?
  • If I have a sequential program can I use a parallel direct solver?
  • TS or SNES produces infeasible (out of domain) solutions or states, how can I prevent this?
  • +
  • Can PETSc work with Hermitian matrices?
  • +
  • How do I efficiently assemble a bunch of very similar matrices (say in a time loop)?
  • +
  • Can one resize or change the size of PETSc matrices or vectors?
  • Execution

    @@ -502,11 +506,6 @@

    How do I begin using PETSc if the software has a version. -
  • - Begin by copying one of the many PETSc examples (in, for example, - petsc/src/ksp/examples/tutorials) and its corresponding makefile. -
  • -
  • See the introductory section of the PETSc users manual for tips on documentation. @@ -941,9 +940,10 @@

    Does TSSetFromOptions(), SNESSetFromOptions() or KS

  • -

    Can I use my own makefiles or rules for compiling code, instead of using PETSc's?

    +

    How do I compile and link may own PETSc application codes and can I use my own makefiles or rules for compiling code, rather than PETSc's?

    - Yes, see the section of the users manual called Makefiles + See the section of the users manual called Writing Application Codes with PETSc. This provides a + simple makefile that can be used to compiler user code. You are free to modify this makefile or completely replace it with your own makefile.

    Can I use CMake to build my own project that depends on PETSc?

    @@ -1001,7 +1001,38 @@

    Everyone knows that when you code Newton's method function.

    -

    How can I use Newton's method Jacobian free? Can I difference a different function than provided +

    Computing the Jacobian or preconditioner is time consuming, is there any way to compute it less often?

    + + PETSc has a variety of ways of lagging the computation of the Jacobian or the preconditioner. They are documented in the manual page and users manual. + +
      +
    1. + -snes_lag_jacobian - (SNESSetLagJacobian) how often Jacobian is rebuilt (use -1 to never rebuild, use -2 to rebuild the next time requested and then never again). +
    2. +
    3. + -snes_lag_jacobian_persists - forces lagging of Jacobian through multiple SNES solves (SNESSetLagJacobianPersists), same as passing -2 to -snes_lag_jacobian. By default, each new SNES solve normally triggers a recomputation of the Jacobian +
    4. +
    5. + These are often (but does not need to be) used in combination with -snes_mf_operator which applies the fresh Jacobian matrix free for every matrix-vector product. + Otherwise the out-of-date matrix vector product, computed with the lagged Jacobian will be used. +
    6. +
    + +
      +
    1. + -snes_lag_preconditioner - (SNESSetLagPreconditioner) how often the preconditioner is rebuilt. Note: if you are lagging the Jacobian the system will know the + the matrix has not changed and will not recompute the (same) preconditioner. +
    2. +
    3. + -snes_lag_preconditioner_persists - Preconditioner lags through multiple SNES solves +
    4. +
    + + By using KSPMonitorSet() and/or SNESMonitorSet() one can provide code that monitors the convergence rate and automatically triggers an update of the Jacobian or preconditioner based on decreasing convergence of the iterative method. For example if the number of SNES iterations doubles one might trigger a new computation of the Jacobian. + + Experimentation is the only general purpose way to determine which approach is best for your problem. It is also important to experiment on your true problem at the scale you will be solving the problem since the performance benifits depend on the exact problem and and problem size. + +

    How can I use Newton's method Jacobian free? Can I difference a different function than provided with SNESSetFunction()?

    @@ -1266,7 +1297,7 @@

    TS or SNES produces infeasible (out of domain) solution and SNES call SNESSetFunctionDomainError() when the solver passes an infeasible (out of domain) solution or state to your routines.

    - +

    If it occurs for DAEs, it is important to insure the algebraic constraints are well satisfied, which can prevent "breakdown" later. Thus, one can try using a tight tolerance for SNES(), using a dicrect @@ -1274,6 +1305,35 @@

    TS or SNES produces infeasible (out of domain) solution tolerances for adaptive time stepping).

    + +

    Can PETSc work with Hermitian matrices?

    + +

    PETSc's support of Hermitian matrices is very limited. Many operations and solvers work for symmetric (MATSBAIJ) matrices and operations on transpose + matrices but there is little direct support for Hermitian matrices and Hermitian transpose (complex conjugate transpose) operations. There is KSPSolveTranspose() for + solving the transpose of a linear system but no KSPSolveHermitian(). Relevant functions include: +

    + +
      +
    1. + MatIsHermitian(), MatIsHermitianKnown(), MatIsStructurallySymmetric(), MatSetOption(Mat,MAT_SYMMETRIC or MAT_HERMITIAN, PetscBool), MatIsSymmetricKnown(), MatIsSymmetric() +
    2. +
    3. + MatMultHermitianTranspose(), MatMultHermitianTransposeAdd() (very limited support) +
    4. +
    5. + MatCreateNormalHermitian(), MatCreateHermitianTranspose() +
    6. +
    + +

    How can I assemble a bunch of similar matrices?

    + +

    You can first add the values common to all the matrices, then use MatStoreValues() to stash the common values. + Each iteration you call MatRetrieveValues(), then set the unique values in a matrix and assemble.

    + +

    Can one resize or change the size of PETSc matrices or vectors?

    + + No, once the vector or matrices sizes have been set and the matrices or vectors are fully usuable one cannot change the size of the matrices or vectors or number of processors they live on. One may create new vectors and copy, for example using VecScatterCreate(), the old values from the previous vector. +

    Execution

    @@ -1369,7 +1429,7 @@

    Where do I get the input matrices for the examples?< Some examples use ${DATAFILESPATH}/matrices/medium and other files. These test matrices in PETSc binary format can be found at - https://bitbucket.org/petsc/petsc/datafiles + https://gitlab.com/petsc/datafiles

    When I dump some matrices and vectors to binary, I seem to be generating some empty files with .info extensions. What's the deal with these?

    @@ -1901,7 +1961,7 @@

    What does Corrupt argument or Caught signal or SEQV
    • Recommend building PETSc with --download-mpich --with-debugging [debugging is enabled by default]
    • Compile application code with this build of PETSc
    • -
    • run with valgrind using: ${PETSC_DIR}/lib/petsc/bin/petscmpiexec -valgrind -n NPROC PETSCPROGRAMNAME -malloc off PROGRAMOPTIONS
    • +
    • run with valgrind using: ${PETSC_DIR}/lib/petsc/bin/petscmpiexec -valgrind -n NPROC PETSCPROGRAMNAME PROGRAMOPTIONS
    • or invoke valgrind directly with: mpiexec -n NPROC valgrind --tool=memcheck -q --num-callers=20 --log-file=valgrind.log.%p PETSCPROGRAMNAME -malloc off PROGRAMOPTIONS
    diff --git a/src/docs/website/documentation/installation.html b/src/docs/website/documentation/installation.html index 61e125e613f..47516ceb2c4 100644 --- a/src/docs/website/documentation/installation.html +++ b/src/docs/website/documentation/installation.html @@ -1188,10 +1188,14 @@

    Installing on Large Scale DOE Systems

  • It is best not to use built-in modules for external packages (except blas/lapack) because they are often buggy. Most external packages can be built using the --download option with the intel or Gnu environment but not cray
  • You can use config/examples/arch-cray-xc40-knl-opt.py as a template for running configure but it is outdated
  • - +
  • When using the Intel module you may need to use --download-sowing-cc=icc --download-sowing-cxx=icpc -download-sowing-cpp="icc -E" --download-sowing-cxxpp="icpc -E" since the GNU compilers may not work as they access Intel files
  • + +
  • To get an interactive node use qsub -A CSC250STMS07 -n 1 -t 60 -q debug-flat-quad -I
  • +
  • To run on interactive node using two MPI ranks use aprun -n 2 ./program options
  • - + +
  • OLCF - Oak Ridge National Laboratory - Summit machine - NVIDIA GPUs and IBM Power PC processors - + diff --git a/src/docs/website/documentation/linearsolvertable.html b/src/docs/website/documentation/linearsolvertable.html index 6cb9470d19d..0651a109ae9 100644 --- a/src/docs/website/documentation/linearsolvertable.html +++ b/src/docs/website/documentation/linearsolvertable.html @@ -205,6 +205,18 @@

    Requests and contributions welcome

    ILU dt + seqaij + +SuperLU Sequential ILU solver (LBNL). + + + X + + + + + + aij pilut/hypre (LLNL) Deprecated. Use euclid/hypre instead X @@ -399,7 +411,7 @@

    Requests and contributions welcome

    aij - SuperLU (BNL) + SuperLU (LBNL) SuperLU Sequential LU solver / SuperLU_DIST Parallel LU solver @@ -484,7 +496,7 @@

    Requests and contributions welcome

    dense - --- + Elemental X X @@ -535,7 +547,7 @@

    Requests and contributions welcome

    dense - --- + Elemental X X diff --git a/src/docs/website/download/index.html b/src/docs/website/download/index.html index aeb10c2c64d..6b80bd48f0b 100644 --- a/src/docs/website/download/index.html +++ b/src/docs/website/download/index.html @@ -62,13 +62,13 @@

    PETSc Release Version 3.12

  • or from this compact tarball - includes current patches, no documentation (all documentation may be accessed on line)
  • or, the same as above, but with with documentation (users manual, manual pages etc). We suggest using the online documentation and only recommend using this download if you have no or very slow internet access.
  • diff --git a/src/docs/website/index.html b/src/docs/website/index.html index e116f858163..22a03e7694a 100644 --- a/src/docs/website/index.html +++ b/src/docs/website/index.html @@ -51,6 +51,14 @@

    Portable, Extensible Toolkit for
    Scientific Computation<
  • Miscellaneous
  • External Software
  • Developers Site
  • +
  • + DMNetwork + +