pax_global_header00006660000000000000000000000064141114176430014514gustar00rootroot0000000000000052 comment=35d705001bdd42b859ac7f32f124cbf9a41d9b3b dolfinx-0.3.0/000077500000000000000000000000001411141764300131575ustar00rootroot00000000000000dolfinx-0.3.0/.circleci/000077500000000000000000000000001411141764300150125ustar00rootroot00000000000000dolfinx-0.3.0/.circleci/config.yml000066400000000000000000000117731411141764300170130ustar00rootroot00000000000000version: 2.1 install-python-components: &install-python-components name: Install FEniCS Python components command: | git clone https://github.com/FEniCS/basix.git --branch main --single-branch cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -B build-dir -S ./basix cmake --build build-dir --parallel 3 cmake --install build-dir pip3 install ./basix/python pip3 install git+https://github.com/FEniCS/ufl.git pip3 install git+https://github.com/FEniCS/ffcx.git flake8-python-code: &flake8-python-code name: Flake8 checks on Python code command: | cd python/ python3 -m flake8 dolfinx python3 -m flake8 dolfinx_utils python3 -m flake8 demo python3 -m flake8 test build-documentation-cpp: &build-documentation-cpp name: Build documentation (C++) command: cd cpp/doc && doxygen configure-cpp: &configure-cpp name: Configure (C++) command: mkdir -p build && cd build && cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer ../cpp/ build-install-cpp: &build-install-cpp name: Build and install (C++) command: cd build && ninja -j3 install unit-tests-cpp: &unit-tests-cpp name: Build and run C++ unit tests (serial and MPI) command: | cd build/test/unit cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer . ninja -j3 ctest --output-on-failure -R unittests mpirun -np 3 ctest --output-on-failure -R unittests regression-tests-cpp: ®ression-tests-cpp name: Build and run C++ regressions tests (serial) command: | cd build/demo cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer . ninja -j3 ctest -j3 -R demo -R serial regression-tests-cpp-mpi: ®ression-tests-cpp-mpi name: Run C++ regression tests (MPI) command: | cd build/demo cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer . ninja -j3 ctest --verbose -R demo -R mpi_3 build-python-interface: &build-python-interface name: Build Python/pybind11 interface command: | cd python pip3 -v install --global-option build --global-option --debug . --user build-documentation-python: &build-documentation-python name: Build documentation (Python) command: | cd python/demo && python3 ./generate-demo-files.py cd ../doc && make html demos-python: &demos-python name: Run demos (Python, serial) command: | mkdir -p ~/junit cd python/demo python3 ./generate-demo-files.py python3 -m pytest -n=4 -v -m serial --durations=10 --junitxml=~/junit/demo-results.xml test.py demos-python-mpi: &demos-python-mpi name: Run demos (Python, MPI) command: | cd python/demo python3 ./generate-demo-files.py python3 -m pytest -n=2 -v -m mpi test.py --num-proc=3 set-jit-defaults: &set-jit-defaults name: Set default DOLFINx JIT parameters command: | mkdir -p ~/.config/dolfinx echo '{ "cffi_extra_compile_args" : ["-g0", "-O0" ] }' > ~/.config/dolfinx/dolfinx_jit_parameters.json unit-tests-python: &unit-tests-python name: Run unit tests (Python, serial) command: | mkdir -p ~/junit cd python/test/unit python3 -m pytest -n=4 --durations=50 --junitxml=~/junit/test-results.xml . unit-tests-python-mpi: &unit-tests-python-mpi name: Run unit tests (Python, MPI) command: | cd python/test/unit mpirun -np 3 python3 -m pytest . jobs: build-real: docker: - image: fenicsproject/test-env:latest-mpich environment: DEBIAN_FRONTEND: "noninteractive" PETSC_ARCH: "linux-gnu-real-32" steps: - checkout - run: *install-python-components - run: *flake8-python-code - run: *build-documentation-cpp - run: *configure-cpp - run: *build-install-cpp - run: *unit-tests-cpp - run: *regression-tests-cpp - run: *regression-tests-cpp-mpi - run: *build-python-interface - run: *build-documentation-python - run: *demos-python - run: *demos-python-mpi - run: *set-jit-defaults - run: *unit-tests-python - run: *unit-tests-python-mpi - persist_to_workspace: root: . paths: - python/doc/build/html/ - cpp/doc/html/ - store_test_results: path: ~/junit - store_artifacts: path: ~/junit build-complex: docker: - image: fenicsproject/test-env:latest-mpich environment: DEBIAN_FRONTEND: "noninteractive" PETSC_ARCH: "linux-gnu-complex-32" steps: - checkout - run: *install-python-components - run: *flake8-python-code - run: *build-documentation-cpp - run: *configure-cpp - run: *build-install-cpp - run: *unit-tests-cpp - run: *regression-tests-cpp - run: *regression-tests-cpp-mpi - run: *build-python-interface - run: *build-documentation-python - run: *demos-python - run: *demos-python-mpi - run: *set-jit-defaults - run: *unit-tests-python - run: *unit-tests-python-mpi - store_test_results: path: ~/junit - store_artifacts: path: ~/junit workflows: version: 2 build: jobs: - build-real - build-complex dolfinx-0.3.0/.clang-format000066400000000000000000000053651411141764300155430ustar00rootroot00000000000000--- Language: Cpp # BasedOnStyle: LLVM AccessModifierOffset: -2 AlignAfterOpenBracket: Align AlignConsecutiveAssignments: false AlignConsecutiveDeclarations: false AlignEscapedNewlinesLeft: false AlignOperands: true AlignTrailingComments: true AllowAllParametersOfDeclarationOnNextLine: true AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: All AllowShortIfStatementsOnASingleLine: false AllowShortLoopsOnASingleLine: false AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None AlwaysBreakBeforeMultilineStrings: false AlwaysBreakTemplateDeclarations: true BinPackArguments: true BinPackParameters: true BraceWrapping: AfterClass: false AfterControlStatement: false AfterEnum: false AfterFunction: false AfterNamespace: false AfterObjCDeclaration: false AfterStruct: false AfterUnion: false BeforeCatch: false BeforeElse: false IndentBraces: false BreakBeforeBinaryOperators: All BreakBeforeBraces: Allman BreakBeforeTernaryOperators: true BreakConstructorInitializersBeforeComma: false BreakAfterJavaFieldAnnotations: false BreakStringLiterals: true ColumnLimit: 80 CommentPragmas: '^ IWYU pragma:' ConstructorInitializerAllOnOneLineOrOnePerLine: false ConstructorInitializerIndentWidth: 4 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DerivePointerAlignment: false DisableFormat: false ExperimentalAutoDetectBinPacking: false ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] IncludeCategories: - Regex: '^"(llvm|llvm-c|clang|clang-c)/' Priority: 2 - Regex: '^(<|"(gtest|isl|json)/)' Priority: 3 - Regex: '.*' Priority: 1 IncludeIsMainRegex: '$' IndentCaseLabels: false IndentWidth: 2 IndentWrappedFunctionNames: false JavaScriptQuotes: Leave JavaScriptWrapImports: true KeepEmptyLinesAtTheStartOfBlocks: true MacroBlockBegin: '' MacroBlockEnd: '' MaxEmptyLinesToKeep: 1 NamespaceIndentation: None ObjCBlockIndentWidth: 2 ObjCSpaceAfterProperty: false ObjCSpaceBeforeProtocolList: true PenaltyBreakBeforeFirstCallParameter: 19 PenaltyBreakComment: 300 PenaltyBreakFirstLessLess: 120 PenaltyBreakString: 1000 PenaltyExcessCharacter: 1000000 PenaltyReturnTypeOnItsOwnLine: 60 PointerAlignment: Left ReflowComments: true SortIncludes: true SpaceAfterCStyleCast: false SpaceAfterTemplateKeyword: true SpaceBeforeAssignmentOperators: true SpaceBeforeParens: ControlStatements SpaceInEmptyParentheses: false SpacesBeforeTrailingComments: 1 SpacesInAngles: false SpacesInContainerLiterals: true SpacesInCStyleCastParentheses: false SpacesInParentheses: false SpacesInSquareBrackets: false Standard: Cpp11 TabWidth: 8 UseTab: Never ... dolfinx-0.3.0/.dockerignore000066400000000000000000000000101411141764300156220ustar00rootroot00000000000000**/.git dolfinx-0.3.0/.github/000077500000000000000000000000001411141764300145175ustar00rootroot00000000000000dolfinx-0.3.0/.github/workflows/000077500000000000000000000000001411141764300165545ustar00rootroot00000000000000dolfinx-0.3.0/.github/workflows/ccpp.yml000066400000000000000000000141551411141764300202320ustar00rootroot00000000000000name: DOLFINx CI on: push: branches: - "**" tags: - "v*" pull_request: branches: - main jobs: build: if: "!(contains(github.event.head_commit.message, '[ci skip]') || contains(github.event.head_commit.message, '[skip ci]'))" runs-on: ubuntu-20.04 container: fenicsproject/test-env:latest-openmpi env: CC: clang-12 CXX: clang++-12 PETSC_ARCH: linux-gnu-${{ matrix.petsc_arch }}-${{ matrix.petsc_int_type }} OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 OMPI_MCA_rmaps_base_oversubscribe: 1 OMPI_MCA_plm: isolated OMPI_MCA_btl_vader_single_copy_mechanism: none OMPI_MCA_mpi_yield_when_idle: 1 OMPI_MCA_hwloc_base_binding_policy: none strategy: matrix: petsc_arch: [real, complex] petsc_int_type: [32, 64] steps: - uses: actions/checkout@v2 - name: Get Basix and install uses: actions/checkout@v2 with: path: ./basix repository: FEniCS/basix ref: version-0.3.0 - name: Install FEniCS Python components run: | cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -B build-dir -S ./basix cmake --build build-dir cmake --install build-dir python3 -m pip install ./basix/python python3 -m pip install git+https://github.com/FEniCS/ufl.git python3 -m pip install git+https://github.com/FEniCS/ffcx.git@version-0.3.0 - name: Flake8 checks run: | cd python/ python3 -m flake8 dolfinx python3 -m flake8 dolfinx_utils python3 -m flake8 demo python3 -m flake8 test - name: Configure C++ run: cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build -S cpp/ - name: Build and install C++ library run: | cmake --build build cmake --install build - name: Build C++ interface documentation run: | export DOLFINX_VERSION=`cmake -L build | grep DOXYGEN_DOLFINX_VERSION | cut -f2 -d "="` echo $DOLFINX_VERSION cd cpp/doc doxygen - name: Build and run C++ unit tests (serial and MPI) run: | cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build/test/unit/ -S build/test/unit/ cmake --build build/test/unit cd build/test/unit ctest -V --output-on-failure -R unittests mpiexec -np 2 ctest --output-on-failure -R unittests - name: Build and run C++ regression tests (serial and MPI (np=2)) run: | cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build/demo/ -S build/demo/ cmake --build build/demo cd build/demo ctest -V -R demo -R serial ctest -V -R demo -R mpi_2 - name: Build Python interface run: python3 -m pip -v install --global-option build --global-option --debug python/ - name: Build Python interface documentation run: | cd python/demo && python3 ./generate-demo-files.py cd ../doc && make html - name: Set default DOLFINx JIT parameters run: | mkdir -p ~/.config/dolfinx echo '{ "cffi_extra_compile_args": ["-g0", "-O0" ] }' > ~/.config/dolfinx/dolfinx_jit_parameters.json - name: Run demos (Python, serial) run: python3 -m pytest -v -n=2 -m serial --durations=10 python/demo/test.py - name: Run demos (Python, MPI (np=2)) run: python3 -m pytest -v -m mpi --num-proc=2 python/demo/test.py - name: Run Python unit tests (serial) run: python3 -m pytest -v -n=auto --durations=50 python/test/unit/ - name: Run Python unit tests (MPI, np=2) run: mpirun -np 2 python3 -m pytest python/test/unit/ - name: Upload C++ documentation artifact uses: actions/upload-artifact@v2 with: name: doc-cpp-${{ matrix.petsc_arch }}-${{ matrix.petsc_int_type }} path: | cpp/doc/html/ retention-days: 2 if-no-files-found: error - name: Upload Python documentation artifact uses: actions/upload-artifact@v2 with: name: doc-python-${{ matrix.petsc_arch }}-${{ matrix.petsc_int_type }} path: | python/doc/build/html/ retention-days: 2 if-no-files-found: error - name: Checkout FEniCS/docs if: ${{ github.repository == 'FEniCS/dolfinx' && ( github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') ) && runner.os == 'Linux' }} uses: actions/checkout@v2 with: repository: "FEniCS/docs" path: "docs" ssh-key: "${{ secrets.SSH_GITHUB_DOCS_PRIVATE_KEY }}" - name: Set version name if: ${{ github.repository == 'FEniCS/dolfinx' && ( github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') ) && runner.os == 'Linux' }} run: | echo "VERSION_NAME=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - name: Copy documentation into repository if: ${{ github.repository == 'FEniCS/dolfinx' && ( github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') ) && runner.os == 'Linux' }} run: | cd docs git rm -r --ignore-unmatch dolfinx/${{ env.VERSION_NAME }}/cpp git rm -r --ignore-unmatch dolfinx/${{ env.VERSION_NAME }}/python mkdir -p dolfinx/${{ env.VERSION_NAME }}/cpp mkdir -p dolfinx/${{ env.VERSION_NAME }}/python cp -r ../cpp/doc/html/* dolfinx/${{ env.VERSION_NAME }}/cpp cp -r ../python/doc/build/html/* dolfinx/${{ env.VERSION_NAME }}/python - name: Commit and push documentation to FEniCS/docs if: ${{ github.repository == 'FEniCS/dolfinx' && ( github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') ) && runner.os == 'Linux' && matrix.petsc_arch == 'real' && matrix.petsc_int_type == '32' }} run: | cd docs git config --global user.email "fenics@github.com" git config --global user.name "FEniCS GitHub Actions" git add --all git commit --allow-empty -m "C++ FEniCS/dolfinx@${{ github.sha }}" git push dolfinx-0.3.0/.github/workflows/docker-build-env.yml000066400000000000000000000222231411141764300224320ustar00rootroot00000000000000name: Docker environment images # Builds the fenicsproject/test-env:${MPI} and dolfinx/dev-env images # once per week. These are used for build/test workflows and end-user # builds of DOLFINx, respectively. on: push: # This will run every time a version tag is created tags: - 'v*' schedule: # '*' is a special character in YAML, so string must be quoted - cron: "0 22 * * SUN" workflow_dispatch: ~ jobs: start_runner: name: Start self-hosted EC2 runner if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} runs-on: ubuntu-latest outputs: label: ${{ steps.start-ec2-runner.outputs.label }} ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: ${{ secrets.AWS_REGION }} - name: Start EC2 runner id: start-ec2-runner uses: machulav/ec2-github-runner@v2.2.1 with: mode: start github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} ec2-image-id: ami-07529ef40e2c81086 ec2-instance-type: c6g.large subnet-id: subnet-dc2151b7 security-group-id: sg-03fc69ee7a6ba8bc4 build_test_env_openmpi: name: Build fenicsproject/test-env:openmpi image if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} strategy: fail-fast: False matrix: include: - arch_tag: amd64 os: ubuntu-latest experimental: false - arch_tag: arm64 os: ARM64 experimental: false runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} needs: start_runner env: PETSC_SLEPC_OPTFLAGS: "-O2 -g" PETSC_SLEPC_DEBUGGING: "yes" MPI: "openmpi" DOCKER_BUILD_ARGS: "--build-arg PETSC_SLEPC_OPTFLAGS --build-arg PETSC_SLEPC_DEBUGGING --build-arg MPI" ARCH_TAG: ${{ matrix.arch_tag }} steps: - uses: actions/checkout@v2 - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Log into the Dockerhub registry run: echo ${{ secrets.dockerhub_token }} | docker login -u ${{ secrets.dockerhub_username }} --password-stdin - name: Build the Docker image run: docker buildx build ${DOCKER_BUILD_ARGS} --push --cache-from=type=registry,ref=fenicsproject/test-env:${MPI}-${ARCH_TAG} --cache-to=type=inline --file docker/Dockerfile --target dev-env --tag fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${MPI}-${ARCH_TAG} docker/ - name: Cleanup run: docker rmi fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${MPI}-${ARCH_TAG} build_test_env_mpich: name: Build fenicsproject/test-env:mpich image if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} strategy: fail-fast: False matrix: include: - arch_tag: amd64 os: ubuntu-latest experimental: false - arch_tag: arm64 os: ARM64 experimental: true runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} needs: start_runner env: PETSC_SLEPC_OPTFLAGS: "-O2 -g" PETSC_SLEPC_DEBUGGING: "yes" MPI: "mpich" DOCKER_BUILD_ARGS: "--build-arg PETSC_SLEPC_OPTFLAGS --build-arg PETSC_SLEPC_DEBUGGING --build-arg MPI" ARCH_TAG: ${{ matrix.arch_tag }} steps: - uses: actions/checkout@v2 - name: Log into the DockerHub registry run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Build the Docker image run: docker buildx build ${DOCKER_BUILD_ARGS} --push --cache-from=type=registry,ref=fenicsproject/test-env:${MPI}-${ARCH_TAG} --cache-to=type=inline --file docker/Dockerfile --target dev-env --tag fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${MPI}-${ARCH_TAG} docker/ - name: Cleanup run: docker rmi fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${MPI}-${ARCH_TAG} build_dev_env: name: Build dolfinx/dev-env image if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} strategy: fail-fast: False matrix: include: - arch_tag: amd64 os: ubuntu-latest experimental: false - arch_tag: arm64 os: ARM64 experimental: true runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} needs: start_runner env: # TODO: march based on matrix.os PETSC_SLEPC_OPTFLAGS: "-O2" PETSC_SLEPC_DEBUGGING: "no" MPI: "mpich" DOCKER_BUILD_ARGS: "--build-arg PETSC_SLEPC_OPTFLAGS --build-arg PETSC_SLEPC_DEBUGGING --build-arg MPI" ARCH_TAG: ${{ matrix.arch_tag }} steps: - uses: actions/checkout@v2 - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Log into the DockerHub registry run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin - name: Build the Docker image run: docker buildx build ${DOCKER_BUILD_ARGS} --push --cache-from=type=registry,ref=dolfinx/dev-env:${ARCH_TAG} --cache-to=type=inline --file docker/Dockerfile --target dev-env --tag dolfinx/dev-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker/ - name: Cleanup run: docker rmi dolfinx/dev-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} push_multiarch_images: name: Push multiarch images if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} runs-on: ubuntu-latest needs: - build_test_env_openmpi - build_test_env_mpich - build_dev_env steps: - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Log into the DockerHub registry run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin # NOTE: OpenMPI ARM64 build currently fails, see https://github.com/FEniCS/dolfinx/runs/2380975354?check_suite_focus=true#step:4:7472 - name: Push multiarch images run: | docker buildx imagetools create \ -t fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-mpich \ fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-mpich-amd64 \ fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-mpich-arm64 docker buildx imagetools create \ -t fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-openmpi \ fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-openmpi-amd64 \ fenicsproject/test-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-openmpi-arm64 docker buildx imagetools create \ -t dolfinx/dev-env:${{ steps.tag_name.outputs.TAG_PREFIX }} \ dolfinx/dev-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-amd64 \ dolfinx/dev-env:${{ steps.tag_name.outputs.TAG_PREFIX }}-arm64 stop-runner: name: Stop self-hosted EC2 runner if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs needs: - start_runner # required to get output from the start-runner job - build_test_env_openmpi # required to wait when the main job is done - build_test_env_mpich - build_dev_env runs-on: ubuntu-latest steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: ${{ secrets.AWS_REGION }} - name: Stop EC2 runner uses: jhale/ec2-github-runner@main with: mode: stop github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} label: ${{ needs.start_runner.outputs.label }} ec2-instance-id: ${{ needs.start_runner.outputs.ec2-instance-id }} dolfinx-0.3.0/.github/workflows/docker-end-user.yml000066400000000000000000000164341411141764300222760ustar00rootroot00000000000000name: Docker end-user images # Builds images with DOLFINx and all of the required FEniCS Project # components nightly. on: # This will run every time a version tag is pushed push: tags: - 'v*' schedule: # '*' is a special character in YAML, so string must be quoted - cron: "0 5 * * *" workflow_dispatch: ~ jobs: start_runner: name: Start self-hosted EC2 runner if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} runs-on: ubuntu-latest outputs: label: ${{ steps.start-ec2-runner.outputs.label }} ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: ${{ secrets.AWS_REGION }} - name: Start EC2 runner id: start-ec2-runner uses: machulav/ec2-github-runner@v2.2.1 with: mode: start github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} ec2-image-id: ami-07529ef40e2c81086 ec2-instance-type: c6g.large subnet-id: subnet-dc2151b7 security-group-id: sg-03fc69ee7a6ba8bc4 build_end_user_images: name: Build DOLFINx end-user images if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} strategy: fail-fast: False matrix: include: - arch_tag: amd64 os: ubuntu-latest experimental: false - arch_tag: arm64 os: ARM64 experimental: true runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} needs: start_runner env: DOLFINX_CMAKE_BUILD_TYPE: "RelWithDebInfo" DOLFINX_CMAKE_CXX_FLAGS: "-O2" DOCKER_BUILD_ARGS: "--build-arg DOLFINX_CMAKE_BUILD_TYPE --build-arg DOLFINX_CMAKE_CXX_FLAGS" ARCH_TAG: ${{ matrix.arch_tag }} steps: - uses: actions/checkout@v2 with: repository: "FEniCS/dolfinx" path: "dolfinx" - uses: actions/checkout@v2 with: repository: "FEniCS/ffcx" path: "ffcx" ref: "main" - uses: actions/checkout@v2 with: repository: "FEniCS/basix" path: "basix" ref: "main" - uses: actions/checkout@v2 with: repository: "FEniCS/ufl" path: "ufl" ref: "main" - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Set default FFCx parameters run: | echo '{ }' > dolfinx/docker/ffcx_parameters.json - name: Set default DOLFINx JIT parameters run: | echo '{ "cffi_extra_compile_args" : ["-O2"] } ' > dolfinx/docker/dolfinx_jit_parameters.json - name: Build dolfinx/dolfinx-onbuild run: | docker buildx build --file dolfinx/docker/Dockerfile ${DOCKER_BUILD_ARGS} \ --target dolfinx-onbuild --cache-from=type=registry,ref=dolfinx/dev-env \ --tag dolfinx/dolfinx-onbuild . - name: Build intermediate run: | docker buildx build --file dolfinx/docker/Dockerfile ${DOCKER_BUILD_ARGS} \ --target intermediate --tag intermediate . - name: Build dolfinx/dolfinx run: | docker buildx build --file dolfinx/docker/Dockerfile ${DOCKER_BUILD_ARGS} \ --target dolfinx --tag dolfinx/dolfinx . - name: Build dolfinx/lab run: | docker buildx build --file dolfinx/docker/Dockerfile ${DOCKER_BUILD_ARGS} \ --target lab --tag dolfinx/lab . - name: Run basic test inside container run: | docker run --rm dolfinx/dolfinx \ python3 -c "import dolfinx; from mpi4py import MPI; mesh = dolfinx.UnitSquareMesh(MPI.COMM_WORLD, 10, 10); V = dolfinx.FunctionSpace(mesh, ('Lagrange', 1));" - name: Log into the DockerHub registry run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin - name: Push to the DockerHub registry run: | docker tag dolfinx/dolfinx-onbuild dolfinx/dolfinx-onbuild:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker push dolfinx/dolfinx-onbuild:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker tag dolfinx/dolfinx dolfinx/dolfinx:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker push dolfinx/dolfinx:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker tag dolfinx/lab dolfinx/lab:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} docker push dolfinx/lab:${{ steps.tag_name.outputs.TAG_PREFIX }}-${ARCH_TAG} push_multiarch_images: name: Push multiarch image if: ${{ github.repository == 'FEniCS/dolfinx' && github.ref == 'refs/heads/main' }} runs-on: ubuntu-latest needs: - build_end_user_images steps: - name: Get tag name id: tag_name run: | if [[ ${GITHUB_REF#refs/tags/} == v* ]] then echo "::set-output name=TAG_PREFIX::${GITHUB_REF#refs/tags/}" else echo "::set-output name=TAG_PREFIX::latest" fi - name: Log into the DockerHub registry run: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin - name: Push multiarch images run: | docker buildx imagetools create -t dolfinx/dolfinx-onbuild:${{ steps.tag_name.outputs.TAG_PREFIX }} dolfinx/dolfinx-onbuild:${{ steps.tag_name.outputs.TAG_PREFIX }}-amd64 dolfinx/dolfinx-onbuild:${{ steps.tag_name.outputs.TAG_PREFIX }}-arm64 docker buildx imagetools create -t dolfinx/dolfinx:${{ steps.tag_name.outputs.TAG_PREFIX }} dolfinx/dolfinx:${{ steps.tag_name.outputs.TAG_PREFIX }}-amd64 dolfinx/dolfinx:${{ steps.tag_name.outputs.TAG_PREFIX }}-arm64 docker buildx imagetools create -t dolfinx/lab:${{ steps.tag_name.outputs.TAG_PREFIX }} dolfinx/lab:${{ steps.tag_name.outputs.TAG_PREFIX }}-amd64 dolfinx/lab:${{ steps.tag_name.outputs.TAG_PREFIX }}-arm64 stop-runner: name: Stop self-hosted EC2 runner needs: - start_runner # required to get output from the start-runner job - build_end_user_images # required to wait when the main job is done runs-on: ubuntu-latest if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs steps: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: ${{ secrets.AWS_REGION }} - name: Stop EC2 runner uses: jhale/ec2-github-runner@main with: mode: stop github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} label: ${{ needs.start_runner.outputs.label }} ec2-instance-id: ${{ needs.start_runner.outputs.ec2-instance-id }} dolfinx-0.3.0/.github/workflows/intel.yml000066400000000000000000000074611411141764300204220ustar00rootroot00000000000000name: Intel compilers on: pull_request: branches: - main jobs: build: runs-on: ubuntu-20.04 container: fenicsproject/test-env:latest-mpich defaults: run: shell: bash env: CC: ${{ matrix.c_compiler }} CXX: ${{ matrix.cxx_compiler }} PETSC_ARCH: linux-gnu-real-32 strategy: matrix: include: - compiler: "LLVM-based" c_compiler: icx cxx_compiler: icpx steps: - uses: actions/checkout@v2 - name: Install Intel compilers run: | wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB echo "deb https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list apt update # apt install -y intel-hpckit apt install -y intel-oneapi-common-vars intel-oneapi-compiler-dpcpp-cpp - name: Checkout Basix uses: actions/checkout@v2 with: path: ./basix repository: FEniCS/basix ref: main - name: Checkout UFL uses: actions/checkout@v2 with: path: ./ufl repository: FEniCS/ufl ref: main - name: Checkout FFCx uses: actions/checkout@v2 with: path: ./ffcx repository: FEniCS/ffcx ref: main - name: Install FEniCS Python components run: | . /opt/intel/oneapi/setvars.sh cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -B build-dir -S ./basix cmake --build build-dir cmake --install build-dir pip3 install ./basix/python pip3 install ./ufl pip3 install ./ffcx - name: Configure C++ run: | . /opt/intel/oneapi/setvars.sh cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build -S cpp/ - name: Build and install C++ library run: | . /opt/intel/oneapi/setvars.sh cmake --build build cmake --install build - name: Build and run C++ unit tests (serial and MPI) run: | . /opt/intel/oneapi/setvars.sh cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build/test/unit/ -S build/test/unit/ cmake --build build/test/unit cd build/test/unit ctest --output-on-failure -R unittests mpiexec -np 2 ctest --output-on-failure -R unittests - name: Build and run C++ regression tests (serial and MPI (np=2)) run: | . /opt/intel/oneapi/setvars.sh cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build/demo/ -S build/demo/ cmake --build build/demo cd build/demo ctest -R demo -R serial ctest -R demo -R mpi_2 - name: Build Python interface run: | . /opt/intel/oneapi/setvars.sh python3 -m pip -v install python/ - name: Set default DOLFINx JIT parameters run: | mkdir -p ~/.config/dolfinx echo '{ "cffi_extra_compile_args": ["-g0", "-O0" ] }' > ~/.config/dolfinx/dolfinx_jit_parameters.json - name: Run demos (Python, serial) run: | . /opt/intel/oneapi/setvars.sh python3 -m pytest -v -n=2 -m serial --durations=10 python/demo/test.py - name: Run demos (Python, MPI (np=2)) run: | . /opt/intel/oneapi/setvars.sh python3 -m pytest -v -m mpi --num-proc=2 python/demo/test.py - name: Run Python unit tests (serial) run: | . /opt/intel/oneapi/setvars.sh pip3 install sympy python3 -m pytest -n=auto --durations=50 python/test/unit/ - name: Run Python unit tests (MPI, np=2) run: | . /opt/intel/oneapi/setvars.sh mpirun -np 2 python3 -m pytest python/test/unit/ dolfinx-0.3.0/.github/workflows/pyvista.yml000066400000000000000000000054071411141764300210040ustar00rootroot00000000000000name: Test visualisation demos on: # Uncomment the below to trigger tests on push # push: # branches: # - "**" schedule: # '*' is a special character in YAML, so string must be quoted - cron: "0 1 * * *" workflow_dispatch: ~ jobs: build: if: "!(contains(github.event.head_commit.message, '[ci skip]') || contains(github.event.head_commit.message, '[skip ci]'))" runs-on: ubuntu-20.04 container: fenicsproject/test-env:latest-openmpi env: CC: clang CXX: clang++ # For pyvista/pyvistaqt DISPLAY: ":99.0" PYVISTA_OFF_SCREEN: true PYVISTA_VERSION: 0.30.0 PYVISTA_QT_VERSION: 0.4.0 PETSC_ARCH: linux-gnu-${{ matrix.petsc_arch }}-32 OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 OMPI_MCA_rmaps_base_oversubscribe: 1 OMPI_MCA_plm: isolated OMPI_MCA_btl_vader_single_copy_mechanism: none OMPI_MCA_mpi_yield_when_idle: 1 OMPI_MCA_hwloc_base_binding_policy: none strategy: matrix: petsc_arch: [real, complex] steps: - uses: actions/checkout@v2 - name: Checkout Basix uses: actions/checkout@v2 with: path: ./basix repository: FEniCS/basix ref: main - name: Checkout UFL uses: actions/checkout@v2 with: path: ./ufl repository: FEniCS/ufl ref: main - name: Checkout FFCx uses: actions/checkout@v2 with: path: ./ffcx repository: FEniCS/ffcx ref: main - name: Install FEniCS Python components run: | cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build-dir -S ./basix cmake --build build-dir cmake --install build-dir python3 -m pip install ./basix/python python3 -m pip install ./ufl python3 -m pip install ./ffcx apt-get update apt-get install -y --no-install-recommends libgl1-mesa-dev xvfb # pyvista apt-get install -y --no-install-recommends python3-pyqt5 libgl1-mesa-glx # pyvistaqt pip3 install pyvista==${PYVISTA_VERSION} pyvistaqt==${PYVISTA_QT_VERSION} - name: Configure C++ run: cmake -G Ninja -DCMAKE_BUILD_TYPE=Developer -B build -S cpp/ - name: Build and install C++ library run: | cmake --build build cmake --install build - name: Build Python interface run: python3 -m pip -v install --global-option build --global-option --debug python/ - name: Run pyvista demos (Python, serial) run: python3 -m pytest -v -n=2 -m serial --durations=10 python/demo/test.py - name: Run pyivsta demos (Python, MPI (np=2)) run: python3 -m pytest -v -m mpi --num-proc=2 python/demo/test.py dolfinx-0.3.0/.github/workflows/spack.yml000066400000000000000000000052621411141764300204050ustar00rootroot00000000000000name: Spack build on: # Uncomment the below 'push' to trigger on push # push: # branches: # - "**" schedule: # '*' is a special character in YAML, so string must be quoted - cron: "0 2 * * THU" workflow_dispatch: ~ jobs: build: strategy: matrix: # os: [ubuntu-20.04, macos-10.15] os: [ubuntu-20.04] fail-fast: false runs-on: ${{ matrix.os }} env: MPLBACKEND: agg OPENBLAS_NUM_THREADS: 1 OMPI_MCA_rmaps_base_oversubscribe: 1 OMPI_MCA_plm: isolated OMPI_MCA_btl_vader_single_copy_mechanism: none OMPI_MCA_mpi_yield_when_idle: 1 OMPI_MCA_hwloc_base_binding_policy: none steps: # grub-efi: https://github.com/actions/virtual-environments/issues/1605 - name: Install compilers run: | sudo apt-get update sudo apt-get install grub-efi sudo update-grub sudo apt-get upgrade sudo apt-get install g++-10 gfortran-10 grub-efi if: matrix.os == 'ubuntu-20.04' - name: Get Spack uses: actions/checkout@v2 with: path: ./spack repository: spack/spack - name: Build DOLFINx (C++) via Spack run: | . ./spack/share/spack/setup-env.sh spack env create cpp spack env activate cpp echo " concretization: together" >> ./spack/var/spack/environments/cpp/spack.yaml spack add fenics-dolfinx@main spack -v install - name: Get DOLFINx code (to access test files) uses: actions/checkout@v2 with: path: ./dolfinx-test - name: Run a C++ test via Spack run: | . ./spack/share/spack/setup-env.sh spack env activate cpp cd dolfinx-test/cpp/ python cmake/scripts/generate-cmakefiles.py cd demo/poisson ffcx poisson.ufl cmake . export VERBOSE=1 make mpirun -np 2 ./demo_poisson - name: Build DOLFINx (Python) and test dependencies via Spack run: | . ./spack/share/spack/setup-env.sh spack env create py spack env activate py echo " concretization: together" >> ./spack/var/spack/environments/py/spack.yaml spack add py-fenics-dolfinx@main spack install # NOTE: numba is installed via pip because the spack package takes # an extremely long time to compile - name: Run DOLFINx (Python) tests run: | . ./spack/share/spack/setup-env.sh spack env activate py pip install numba mpirun -np 2 python3 ./dolfinx-test/python/demo/stokes-taylor-hood/demo_stokes-taylor-hood.py dolfinx-0.3.0/AUTHORS000066400000000000000000000115711411141764300142340ustar00rootroot00000000000000Authors/contributors in alphabetical order: Ido Akkerman (-) Martin Sandve Alnæs (C) Igor Baratta (-) Fredrik Bengzon (-) Aslak Bergersen (C) Jan Blechta (C) Rolv Erlend Bredesen (C) Jed Brown (C) Solveig Bruvoll (C) Jørgen Dokken (-) Niklas Ericsson (-) Patrick Farrell (C) Georgios Foufas (C) Tom Gustafsson (C) Joachim B Haga (C) Johan Hake (C) Jack S. Hale (C) Rasmus Hemph (-) David Heintz (-) Johan Hoffman (C) Par Ingelstrom (-) Anders E. Johansen (C) Johan Jansson (C) Niclas Jansson (C) Alexander Jarosch (C) Kristen Kaasbjerg (C) Benjamin Kehlet (C) Arve Knudsen (C) Karin Kraft (-) Aleksandra Krusper (-) Evan Lezar (C) Tianyi Li (C) Matthias Liertzer (C) Dag Lindbo (C) Glenn Terje Lines (C) Anders Logg (C) Nuno Lopes (C) Kent-Andre Mardal (C) Andreas Mark (-) Andre Massing (C) Lawrence Mitchell (C) Marco Morandini (C) Mikael Mortensen (C) Corrado Maurini (C) Pablo De Napoli (-) Harish Narayanan (C) Andreas Nilsson (-) Minh Do-Quang (-) Chris Richardson (C) Johannes Ring (C) Marie E. Rognes (C) John Rudge (-) Bartosz Sawicki (C) Nico Schlömer (C) Matthew Scroggs (-) Kristoffer Selim (C) Angelo Simone (C) Ola Skavhaug (C) Thomas Svedberg (-) Erik Svensson (C) Harald Svensson (-) Andy Terrel (C) Jim Tilander (C) Fredrik Valdmanis (C) Magnus Vikstrøm (C) Walter Villanueva (-) Shawn Walker (C) Garth N. Wells (C) Ilmar Wilbers (C) Cian Wilson (C) Ivan Yashchuk (C) Michele Zaffalon (C) Åsmund Ødegård (C) Kristian Ølgaard (C) (C) = copyright form signed (!) = missing copyright form! (-) = minor change, copyright form not signed Missing credits? Tell us and we will fix it. Send an email to fenics-dev@googlegroups.com dolfinx-0.3.0/CODE_OF_CONDUCT.md000066400000000000000000000072501411141764300157620ustar00rootroot00000000000000Code of Conduct =============== Our Pledge ---------- In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. Our Standards ------------- Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others’ private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting Our Responsibilities -------------------- Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. Scope ----- This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. Enforcement ----------- Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at fenics-steering-council@googlegroups.com. Alternatively, you may report individually to one of the members of the Steering Council. Complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project’s leadership. If you feel that your report has not been followed up satisfactorily, then you may contact our parent organisation NumFOCUS at info@numfocus.org for further redress. Attribution ----------- This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html. Adaptations ----------- * Allow reporting to individual Steering Council members * Added the option to contact NumFOCUS for further redress. For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faqdolfinx-0.3.0/COPYING000066400000000000000000001051361411141764300142200ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . Note: Individual files contain the following tag instead of the full license text. SPDX-License-Identifier: LGPL-3.0-or-later This enables machine processing of license information based on the SPDX License Identifiers that are here available: http://spdx.org/licenses/dolfinx-0.3.0/COPYING.LESSER000066400000000000000000000167271411141764300152230ustar00rootroot00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. 0. Additional Definitions. As used herein, "this License" refers to version 3 of the GNU Lesser General Public License, and the "GNU GPL" refers to version 3 of the GNU General Public License. "The Library" refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. An "Application" is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. A "Combined Work" is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the "Linked Version". The "Minimal Corresponding Source" for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. The "Corresponding Application Code" for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. 1. Exception to Section 3 of the GNU GPL. You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. 2. Conveying Modified Versions. If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy. 3. Object Code Incorporating Material from Library Header Files. The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. b) Accompany the object code with a copy of the GNU GPL and this license document. 4. Combined Works. You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. b) Accompany the Combined Work with a copy of the GNU GPL and this license document. c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. d) Do one of the following: 0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. 1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) 5. Combined Libraries. You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 6. Revised Versions of the GNU Lesser General Public License. The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. dolfinx-0.3.0/ChangeLog.rst000066400000000000000000002322641411141764300155510ustar00rootroot00000000000000Change log ========== 2018.2.0.dev0 ------------- - No changes 2018.1.0.dev0 (no release) -------------------------- - Forked DOLFINx 2017.2.0 (2017-12-05) --------------------- - Remove ``UnitQuadMesh`` and ``UnitHexMesh``. Now use ``UnitSquareMesh`` and ``UnitCubeMesh`` with cell type qualifiers. - Remove ``MeshEditor::open`` without cell type. Now you must explicitly specify CellType when opening a ``Mesh`` with ``MeshEditor``. - Rename ``Mesh::size_global`` to ``Mesh::num_entities_global``. - Remove ``Mesh::size``. Use ``Mesh::num_entities`` instead. - Improved mesh topology computation performance. - Remove excessive calls to MPI init. It may now be necessary in some cases to explicitly intialise MPI. - Improvements to sparsity pattern computation. - Addition of some interfaces using ``Eigen::Map/ref`` in addition to ``dolfin::Array(View)``. ``dolfin::Array(View)``interfaces will be removed in favour of Eigen interfaces in the next release. - Update pkg-config (dolfin.pc) file. - CMake modernisations, with more use of exported targets. - Add experimental pybind11 generated Python interface. Will replace the SWIG generated interface in the 2018.1 release. - Remove redundant SLEPc interfaces. Fixes issue `#908 `_. - Bump required Boost version to 1.55. - Remove PETScUserPreconditioner (was unused and untested). - Remove VTK plotting backend. Plotting is no longer available from the C++ interface. Basic plotting is available using ``matplotlib`` and ``x3dom`` backends via the ``plot()`` free function in the Python interface. Users are advised to move to e.g. Paraview for more demanding plotting needs. - Updates for ``ufc::finite_element::evaluate_vertex_values`` interface change. - Add new methods ``XDMFFile::write_checkpoint``, ``XDMFFile::read_checkpoint`` to write and read (checkpoint) functions. - Implement marking vertex and edge mesh functions by ``SubDomain::mark()`` using ``on_boundary`` flag. - Fix quadratic scaling in PETSc matrix allocation with global dofs; assembly with ``Real`` space now exhibits linear scaling in number of dofs. - Add assembly for quadrilateral and hexahedral meshes with CG and DG elements. - Updates for some demos and tests to show usage of quadrilateral and hexahedral meshes. - Deprecate ``CellSize`` (equivalent to ``2*Circumradius``) in favour of new ``CellDiameter``; add ``MinCellEdgeLength`` and ``MaxCellEdgeLength`` - Deprecate subclassing of ``Expression`` in Python; new Python class ``UserExpression`` introduced for user overloads - Deprecate ``VertexFunction``, ``EdgeFunction``, ``FaceFunction``, ``FacetFunction``, ``CellFunction``; use ``MeshFunction`` instead 2017.1.0 (2017-05-09) --------------------- - Refactor PETScLUSolver to use functionality from PETScKrylovSolver. Simplify interface for solving transposed systems. Fixes #815. - Switch default Python version to Python 3. Use `-DDOLFIN_USE_PYTHON3=off` to build with Python 2. - Remove redundant ``solve_transpose`` functions (use solve with bool argument instead) - Remove OpenMpAsssmebler - Remove MPI communicator as argument in GenericVector::init functions (communicator should be passed via constructor) - Remove ``Function::operator[+-*/]`` to prevent memory corruption problems (does not affect Python interface) - Fix XDMF3 output of time series. The default output method is now to assume that all functions have different meshes, and that the meshes change from time step to time step. Two parameters control the output, one limits each function to only one mesh for the whole time series, turn off the default on parameter ``rewrite_function_mesh`` to enable this. You can also make all functions share the same mesh and time series, which currently is better supported in Paraview than the alternative, turn on ``functions_share_mesh`` for this. These two parameters can also be combined in case all functions share the same mesh at all time steps. This creates minimal size files. - Add ``PETScSNESSolver`` and ``PETScTAOSolver`` constructor accepting both communicator and type - Expression("f[0]*f[1]", f=obj) notation now supported for non-scalar GenericFunction obj - Expression("f", f=obj) notation now supports obj of MeshFunction types (only cell based) - Fix MPI deadlock in case of instant compilation failure - Allow using ``Timer`` as context manager and add ``timed`` decorator to measure timings of functions and methods - Add ``NonlinearProblem::J_pc`` and support preconditioning matrix in ``NewtonSolver``, ``PETScSNESSolver`` and ``PETScTAOSolver`` 2016.2.0 [2016-11-30] --------------------- - Updates to XDMFFile interface, now fully supporting MeshFunction and MeshValueCollection with multiple named datasets in one file (useful for volume/boundary markers). Time series now only when a time is explicitly specified for each step. Full support for ASCII/XML XDMF. - Improved X3DOM support - Improved detection of UFC - Add CMake option `-DDOLFIN_USE_PYTHON3` to create a Python 3 build - Require CMake version 3.5 or higher - Add pylit to generate demo doc from rst - More careful checks of Dirichlet BC function spaces - Change definition of FunctionSpace::component() - Adaptive solving now works for tensor-valued unknowns - Improve logging of PETSc errors; details logged at level TRACE 2016.1.0 [2016-06-23] --------------------- - Remove support for 'uint'-valued MeshFunction (replaced by 'size_t') - Major performance improvements and simplifications of the XDMF IO. - Remove Zoltan graph partitioning interface - Add new algorithm for computing mesh entiites. Typical speed-up of two with gcc and four with clang. Reduced memory usage for meshes with irregularly numbered cells. - Remove STLMatrix, STLVector, MUMPSLUSolver and PastixLUSolver classes - Remove PETScPreconditioner::set_near_nullspace and add PETScMatrix::set_near_nullspace - Build system updates for VTK 7.0 - Remove XDMF from File interface. XDMF is XML based, and has many possibilities for file access, which are not accessible through the limited File interface and "<<" ">>" operators. Instead of File, use XDMFFile, and use XDMFFile.read() and XDMFFile.write() for I/O. Demos and tests have been updated to show usage. XDMF now also supports ASCII I/O in serial, useful for compatibility with users who do not have the HDF5 library available. - Require polynomial degree or finite element for Expressions in the Python interface (fixes Issue #355, https://bitbucket.org/fenics-project/dolfin/issues/355) - Switch to Google Test framwork for C++ unit tests - Fix bug when reading domain data from mesh file for a ghosted mesh - Add interface for manipulating mesh geometry using (higher-order) FE functions: free functions set_coordinates, get_coordinates, create_mesh - Fix bug when reading domain data from mesh file for a ghosted mesh. - Remove reference versions of constructors for many classes that store a pointer/reference to the object passed to the constructor. This is an intrusive interface change for C++ users, but necessary to improve code maintainabilty and to improve memory safety. The Python interface is (virtually) unaffected. - Remove class SubSpace. Using FunctionSpace::sub(...) instead - Remove reference versions constructors of NonlinearVariationalSolver - Remove setting of bounds from NonlinearVariationalSolver (was already available through NonlinearVariationalProblem) - Update Trilinos support to include Amesos2, and better support from Python - Rewrite interface of TensorLayout and SparsityPattern; local-to-global maps now handled using new IndexMap class; GenericSparsityPattern class removed - Remove QT (was an optional dependency) - PETScTAOSolver::solve() now returns a pair of number of iterations (std::size_t) and whether iteration converged (bool) - Better quality refinement in 2D in Plaza algorithm, by choosing refinement pattern based on max/min edge ratio - Removed refine_cell() method in CellTypes - Enable marker refinement to work in parallel for 1D meshes too - Add std::abort to Python exception hook to avoid parallel deadlocks - Extend dof_to_vertex_map with unowned dofs, thus making dof_to_vertex_map an inverse of vertex_to_dof_map - Clean-up in PyDOLFIN function space design, issue #576 - Deprecate MixedFunctionSpace and EnrichedFunctionSpace in favour of initialization by suitable UFL element - Add experimental matplotlib-based plotting backend, see mplot demo - Remove method argument of DirichletBC::get_boundary_values() - Change return types of free functions adapt() to shared_ptr 1.6.0 [2015-07-28] ------------------ - Remove redundant pressure boundary condition in Stokes demos - Require Point in RectangleMesh and BoxMesh constructors - Remove BinaryFile (TimeSeries now requires HDF5) - Add (highly experimental) support for Tpetra matrices and vectors from Trilinos, interfacing to Belos, Amesos2, IfPack2 and Muelu. - Enable (highly experimental) support for Quadrilateral and Hexahedral meshes, including some I/O, but no assembly yet. - Enable UMFPACK and CHOLMOD solvers with Eigen backend - Add an MPI_Comm to logger, currently defaulted to MPI_COMM_WORLD allowing better control over output in parallel - Experimental output of quadratic geometry in XDMF files, allows more exact visualisation of P2 Functions - Remove GenericMatrix::compressed (see Issue #61) - Deprecate and PETScKryloveSolver::set_nullspace() and add PETScMatrix::set_nullspace() - Remove uBLAS backend - Remove UmfpackLUSolver and CholmodSolver - Add EigenMatrix/Vector::data() - Remove GenericMatrix/Vector::data() and GenericMatrix/Vector::data() (to use backends that support data(), cast first to backend type, e.g. A = A.as_backend_type() - Remove cmake.local, replaced by fenics-install-component.sh - Make interior facet integrals define - and + cells ordered by cell_domains value. - Remove deprecated arguments *_domains from assemble() and Form(). - Change measure definition notation from dx[mesh_function] to dx(subdomain_data=mesh_function). - Set locale to "C" before reading from file - Change GenericDofMap::cell_dofs return type from const std::vector<..>& to ArrayView - Add ArrayView class for views into arrays - Change fall back linear algebra backend to Eigen - Add Eigen linear algebra backend - Remove deprecated GenericDofMap::geometric_dim function (fixes Issue #443) - Add quadrature rules for multimesh/cut-cell integration up to order 6 - Implement MPI reductions and XML ouput of Table class - list_timings() is now collective and returns MPI average across processes - Add dump_timings_to_xml() - Add enum TimingType { wall, user, system } for selecting wall-clock, user and system time in timing routines - Bump required SWIG version to 3.0.3 - Increase default maximum iterations in NewtonSolver to 50. - Deprecate Python free function homogenize(bc) in favour of member function DirichletBC::homogenize() 1.5.0 [2015-01-12] ------------------ - DG demos working in parallel - Simplify re-use of LU factorisations - CMake 3 compatibility - Make underlying SLEPc object accessible - Full support for linear algebra backends with 64-bit integers - Add smoothed aggregation AMG elasticity demo - Add support for slepc4py - Some self-assignment fixes in mesh data structures - Deprecated GenericDofMap::geometric_dimension() - Experimental support for ghosted meshes (overlapping region in parallel) - Significant memory reduction in dofmap storage - Re-write dofmap construction with significant performance and scaling improvements in parallel - Switch to local (process-wise) indexing for dof indices - Support local (process-wise) indexing in linear algerbra backends - Added support for PETSc 3.5, require version >= 3.3 - Exposed DofMap::tabulate_local_to_global_dofs, MeshEntity::sharing_processes in Python - Added GenericDofmap::local_dimension("all"|"owned"|"unowned") - Added access to SLEPc or slepc4py EPS object of SLEPcEigenSolver (requires slepc4py version >= 3.5.1) - LinearOperator can now be accessed using petsc4py - Add interface (PETScTAOSolver) for the PETSc nonlinear (bound-constrained) optimisation solver (TAO) - Add GenericMatrix::nnz() function to return number of nonzero entries in matrix (fixes #110) - Add smoothed aggregation algerbraic multigrid demo for elasticity - Add argument 'function' to project, to store the result into a preallocated function - Remove CGAL dependency and mesh generation, now provided by mshr - Python 2.7 required - Add experimental Python 3 support. Need swig version 3.0.3 or later - Move to py.test, speed up unit tests and make tests more robust in parallel - Repeated initialization of PETScMatrix is now an error - MPI interface change: num_processes -> size, process_number -> rank - Add optional argument project(..., function=f), to avoid superfluous allocation - Remove excessive printing of points during extrapolation - Clean up DG demos by dropping restrictions of Constants: c('+') -> c - Fix systemassembler warning when a and L both provide the same subdomain data. - Require mesh instead of cell argument to FacetArea, FacetNormal, CellSize, CellVolume, SpatialCoordinate, Circumradius, MinFacetEdgeLength, MaxFacetEdgeLength - Remove argument reset_sparsity to assemble() - Simplify assemble() and Form() signature: remove arguments mesh, coefficients, function_spaces, common_cell. These are now all found by inspecting the UFL form - Speed up assembly of forms with multiple integrals depending on different functions, e.g. f*dx(1) + g*dx(2). - Handle accessing of GenericVectors using numpy arrays in python layer instead of in hard-to-maintain C++ layer - Add support for mpi groups in jit-compilation - Make access to HDFAttributes more dict like - Add 1st and 2nd order Rush Larsen schemes for the PointIntegralSolver - Add vertex assembler for PointIntegrals - Add support for assembly of custom_integral - Add support for multimesh assembly, function spaces, dofmaps and functions - Fix to Cell-Point collision detection to prevent Points inside the mesh from falling between Cells due to rounding errors - Enable reordering of cells and vertices in parallel via SCOTCH and the Giibs-Poole-Stockmeyer algorithm - Efficiency improvements in dof assignment in parallel, working on HPC up to 24000 cores - Introduction of PlazaRefinement methods based on refinement of the Mesh skeleton, giving better quality refinement in 3D in parallel - Basic support for 'ghost cells' allowing integration over interior facets in parallel 1.4.0 [2014-06-02] ------------------ - Feature: Add set_diagonal (with GenericVector) to GenericMatrix - Fix many bugs associated with cell orientations on manifolds - Force all global dofs to be ordered last and to be on the last process in parallel - Speed up dof reordering of mixed space including global dofs by removing the latter from graph reordering - Force all dofs on a shared facet to be owned by the same process - Add FEniCS ('fenics') Python module, identical with DOLFIN Python module - Add function Form::set_some_coefficients() - Remove Boost.MPI dependency - Change GenericMatrix::compresss to return a new matrix (7be3a29) - Add function GenericTensor::empty() - Deprecate resizing of linear algebra via the GenericFoo interfaces (fixes #213) - Deprecate MPI::process_number() in favour of MPI::rank(MPI_Comm) - Use PETSc built-in reference counting to manage lifetime of wrapped PETSc objects - Remove random access function from MeshEntityIterator (fixes #178) - Add support for VTK 6 (fixes #149) - Use MPI communicator in interfaces. Permits the creation of distributed and local objects, e.g. Meshes. - Reduce memory usage and increase speed of mesh topology computation 1.3.0 [2014-01-07] ------------------ - Feature: Enable assignment of sparse MeshValueCollections to MeshFunctions - Feature: Add free function assign that is used for sub function assignment - Feature: Add class FunctionAssigner that cache dofs for sub function assignment - Fix runtime dependency on checking swig version - Deprecate DofMap member methods vertex_to_dof_map and dof_to_vertex_map - Add free functions: vertex_to_dof_map and dof_to_vertex_map, and correct the ordering of the map. - Introduce CompiledSubDomain a more robust version of compiled_subdomains, which is now deprecated - CMake now takes care of calling the correct generate-foo script if so needed. - Feature: Add new built-in computational geometry library (BoundingBoxTree) - Feature: Add support for setting name and label to an Expression when constructed - Feature: Add support for passing a scalar GenericFunction as default value to a CompiledExpression - Feature: Add support for distance queries for 3-D meshes - Feature: Add PointIntegralSolver, which uses the MultiStageSchemes to solve local ODEs at Vertices - Feature: Add RKSolver and MultiStageScheme for general time integral solvers - Feature: Add support for assigning a Function with linear combinations of Functions, which lives in the same FunctionSpace - Added Python wrapper for SystemAssembler - Added a demo using compiled_extension_module with separate source files - Fixes for NumPy 1.7 - Remove DOLFIN wrapper code (moved to FFC) - Add set_options_prefix to PETScKrylovSolver - Remove base class BoundarCondition - Set block size for PETScMatrix when available from TensorLayout - Add support to get block compressed format from STLMatrix - Add detection of block structures in the dofmap for vector equations - Expose PETSc GAMG parameters - Modify SystemAssembler to support separate assembly of A and b 1.2.0 [2013-03-24] ------------------ - Fixes bug where child/parent hierarchy in Python were destroyed - Add utility script dolfin-get-demos - MeshFunctions in python now support iterable protocol - Add timed VTK output for Mesh and MeshFunction in addtion to Functions - Expose ufc::dofmap::tabulate_entity_dofs to GenericDofMap interface - Expose ufc::dofmap::num_entity_dofs to GenericDofMap interface - Allow setting of row dof coordinates in preconditioners (only works with PETSc backed for now) - Expose more PETSc/ML parameters - Improve speed to tabulating coordinates in some DofMap functions - Feature: Add support for passing a Constant as default value to a CompiledExpression - Fix bug in dimension check for 1-D ALE - Remove some redundant graph code - Improvements in speed of parallel dual graph builder - Fix bug in XMDF output for cell-based Functions - Fixes for latest version of clang compiler - LocalSolver class added to efficiently solve cell-wise problems - New implementation of periodic boundary conditions. Now incorporated into the dofmap - Optional arguments to assemblers removed - SymmetricAssembler removed - Domains for assemblers can now only be attached to forms - SubMesh can now be constructed without a CellFunction argument, if the MeshDomain contains marked celldomains. - MeshDomains are propagated to a SubMesh during construction - Simplify generation of a MeshFunction from MeshDomains: No need to call mesh_function with mesh - Rename dolfin-config.cmake to DOLFINConfig.cmake - Use CMake to configure JIT compilation of extension modules - Feature: Add vertex_to_dof_map to DofMap, which map vertex indices to dolfin dofs - Feature: Add support for solving on m dimensional meshes embedded in n >= m dimensions 1.1.0 [2013-01-08] ------------------ - Add support for solving singular problems with Krylov solvers (PETSc only) - Add new typedef dolfin::la_index for consistent indexing with linear algebra backends. - Change default unsigned integer type to std::size_t - Add support to attaching operator null space to preconditioner (required for smoothed aggregation AMG) - Add basic interface to the PETSc AMG preconditioner - Make SCOTCH default graph partitioner (GNU-compatible free license, unlike ParMETIS) - Add scalable construction of mesh dual graph for mesh partitioning - Improve performance of mesh building in parallel - Add mesh output to SVG - Add support for Facet and cell markers to mesh converted from Diffpack - Add support for Facet and cell markers/attributes to mesh converted from Triangle - Change interface for auto-adaptive solvers: these now take the goal functional as a constructor argument - Add memory usage monitor: monitor_memory_usage() - Compare mesh hash in interpolate_vertex_values - Add hash() for Mesh and MeshTopology - Expose GenericVector::operator{+=,-=,+,-}(double) to Python - Add function Function::compute_vertex_values not needing a mesh argument - Add support for XDMF and HDF5 - Add new interface LinearOperator for matrix-free linear systems - Remove MTL4 linear algebra backend - Rename down_cast --> as_type in C++ / as_backend_type in Python - Remove KrylovMatrix interface - Remove quadrature classes - JIT compiled C++ code can now include a dolfin namespace - Expression string parsing now understand C++ namespace such as std::cosh - Fix bug in Expression so one can pass min, max - Fix bug in SystemAssembler, where mesh.init(D-1, D) was not called before assemble - Fix bug where the reference count of Py_None was not increased - Fix bug in reading TimeSeries of size smaller than 3 - Improve code design for Mesh FooIterators to avoid dubious down cast - Bug fix in destruction of PETSc user preconditioners - Add CellVolume(mesh) convenience wrapper to Python interface for UFL function - Fix bug in producing outward pointing normals of BoundaryMesh - Fix bug introduced by SWIG 2.0.5, where typemaps of templated typedefs are not handled correctly - Fix bug introduced by SWIG 2.0.5, which treated uint as Python long - Add check that sample points for TimeSeries are monotone - Fix handling of parameter "report" in Krylov solvers - Add new linear algebra backend "PETScCusp" for GPU-accelerated linear algebra - Add sparray method in the Python interface of GenericMatrix, requires scipy.sparse - Make methods that return a view of contiguous c-arrays, via a NumPy array, keep a reference from the object so it wont get out of scope - Add parameter: "use_petsc_signal_handler", which enables/disable PETSc system signals - Avoid unnecessary resize of result vector for A*b - MPI functionality for distributing values between neighbours - SystemAssembler now works in parallel with topological/geometric boundary search - New symmetric assembler with ability for stand-alone RHS assemble - Major speed-up of DirichletBC computation and mesh marking - Major speed-up of assembly of functions and expressions - Major speed-up of mesh topology computation - Add simple 2D and 3D mesh generation (via CGAL) - Add creation of mesh from triangulations of points (via CGAL) - Split the SWIG interface into six combined modules instead of one - Add has_foo to easy check what solver and preconditioners are available - Add convenience functions for listing available linear_algebra_backends - Change naming convention for cpp unit tests test.cpp -> Foo.cpp - Added cpp unit test for GenericVector::operator{-,+,*,/}= for all la backends - Add functionality for rotating meshes - Add mesh generation based on NETGEN constructive solid geometry - Generalize SparsityPattern and STLMatrix to support column-wise storage - Add interfaces to wrap PaStiX and MUMPS direct solvers - Add CoordinateMatrix class - Make STLMatrix work in parallel - Remove all tr1::tuple and use boost::tuple - Fix wrong link in Python quick reference. 1.0.0 [2011-12-07] ------------------ - Change return value of IntervalCell::facet_area() 0.0 --> 1.0. - Recompile all forms with FFC 1.0.0 - Fix for CGAL 3.9 on OS X - Improve docstrings for Box and Rectangle - Check number of dofs on local patch in extrapolation 1.0-rc2 [2011-11-28] -------------------- - Fix bug in 1D mesh refinement - Fix bug in handling of subdirectories for TimeSeries - Fix logic behind vector assignment, especially in parallel 1.0-rc1 [2011-11-21] -------------------- - 33 bugs fixed - Implement traversal of bounding box trees for all codimensions - Edit and improve all error messages - Added [un]equality operator to FunctionSpace - Remove batch compilation of Expression (Expressions) from Python interface - Added get_value to MeshValueCollection - Added assignment operator to MeshValueCollection 1.0-beta2 [2011-10-26] ---------------------- - Change search path of parameter file to ~/.fenics/dolfin_parameters.xml - Add functions Parameters::has_parameter, Parameters::has_parameter_set - Added option to store all connectivities in a mesh for TimeSeries (false by default) - Added option for gzip compressed binary files for TimeSeries - Propagate global parameters to Krylov and LU solvers - Fix OpenMp assemble of scalars - Make OpenMP assemble over sub domains work - DirichletBC.get_boundary_values, FunctionSpace.collapse now return a dict in Python - Changed name of has_la_backend to has_linear_algebra_backend - Added has_foo functions which can be used instead of the HAS_FOO defines - Less trict check on kwargs for compiled Expression - Add option to not right-justify tables - Rename summary --> list_timings - Add function list_linear_solver_methods - Add function list_lu_solver_methods - Add function list_krylov_solver_methods - Add function list_krylov_solver_preconditioners - Support subdomains in SystemAssembler (not for interior facet integrals) - Add option functionality apply("flush") to PETScMatrix - Add option finalize_tensor=true to assemble functions - Solver parameters can now be passed to solve - Remove deprecated function Variable::disp() - Remove deprecated function logging() - Add new class MeshValueCollection - Add new class MeshDomains replacing old storage of boundary markers as part of MeshData. The following names are no longer supported: - boundary_facet_cells - boundary_facet_numbers - boundary_indicators - material_indicators - cell_domains - interior_facet_domains - exterior_facet_domains - Rename XML tag --> - Rename SubMesh data "global_vertex_indices" --> "parent_vertex_indices" - Get XML input/output of boundary markers working again - Get FacetArea working again 1.0-beta [2011-08-11] --------------------- - Print percentage of non-zero entries when computing sparsity patterns - Use ufl.Real for Constant in Python interface - Add Dirichlet boundary condition argument to Python project function - Add remove functionality for parameter sets - Added out typemap for vector of shared_ptr objects - Fix typemap bug for list of shared_ptr objects - Support parallel XML vector io - Add support for gzipped XML output - Use pugixml for XML output - Move XML SAX parser to libxml2 SAX2 interface - Simplify XML io - Change interface for variational problems, class VariationalProblem removed - Add solve interface: solve(a == L), solve(F == 0) - Add new classes Linear/NonlinearVariationalProblem - Add new classes Linear/NonlinearVariationalSolver - Ad form class aliases ResidualForm and Jacobian form in wrapper code - Default argument to variables in Expression are passed as kwargs in the Python interface - Add has_openmp as utility function in Python interface - Add improved error reporting using dolfin_error - Use Boost to compute Legendre polynolials - Remove ode code - Handle parsing of unrecognized command-line parameters - All const std::vector& now return a read-only NumPy array - Make a robust macro for generating a NumPy array from data - Exposing low level fem functionality to Python, by adding a Cell -> ufc::cell typemap - Added ufl_cell as a method to Mesh in Python interface - Fix memory leak in Zoltan interface - Remove some 'new' for arrays in favour of std::vector - Added cell as an optional argument to Constant - Prevent the use of non contiguous NumPy arrays for most typemaps - Point can now be used to evaluate a Function or Expression in Python - Fixed dimension check for Function and Expression eval in Python - Fix compressed VTK output for tensors in 2D 0.9.11 [2011-05-16] ------------------- - Change license from LGPL v2.1 to LGPL v3 or later - Moved meshconverter to dolfin_utils - Add support for conversion of material markers for Gmsh meshes - Add support for point sources (class PointSource) - Rename logging --> set_log_active - Add parameter "clear_on_write" to TimeSeries - Add support for input/output of nested parameter sets - Check for dimensions in linear solvers - Add support for automated error control for variational problems - Add support for refinement of MeshFunctions after mesh refinement - Change order of test and trial spaces in Form constructors - Make SWIG version >= 2.0 a requirement - Recognize subdomain data in Assembler from both Form and Mesh - Add storage for subdomains (cell_domains etc) in Form class - Rename MeshData "boundary facet cells" --> "boundary_facet_cells" - Rename MeshData "boundary facet numbers" --> "boundary_facet_numbers" - Rename MeshData "boundary indicators" --> "boundary_indicators" - Rename MeshData "exterior facet domains" --> "exterior_facet_domains" - Updates for UFC 2.0.1 - Add FiniteElement::evaluate_basis_derivatives_all - Add support for VTK output of facet-based MeshFunctions - Change default log level from PROGRESS to INFO - Add copy functions to FiniteElement and DofMap - Simplify DofMap - Interpolate vector values when reading from time series 0.9.10 [2011-02-23] ------------------- - Updates for UFC 2.0.0 - Handle TimeSeries stored backward in time (automatic reversal) - Automatic storage of hierarchy during refinement - Remove directory/library 'main', merged into 'common' - dolfin_init --> init, dolfin_set_precision --> set_precision - Remove need for mesh argument to functional assembly when possible - Add function set_output_stream - Add operator () for evaluation at points for Function/Expression in C++ - Add abs() to GenericVector interface - Fix bug for local refinement of manifolds - Interface change: VariationalProblem now takes: a, L or F, (dF) - Map linear algebra objects to processes consistently with mesh partition - Lots of improvemenst to parallel assembly, dof maps and linear algebra - Add lists supported_elements and supported_elements_for_plotting in Python - Add script dolfin-plot for plotting meshes and elements from the command-line - Add support for plotting elements from Python - Add experimental OpenMP assembler - Thread-safe fixed in Function class - Make GenericFunction::eval thread-safe (Data class removed) - Optimize and speedup topology computation (mesh.init()) - Add function Mesh::clean() for cleaning out auxilliary topology data - Improve speed and accuracy of timers - Fix bug in 3D uniform mesh refinement - Add built-in meshes UnitTriangle and UnitTetrahedron - Only create output directories when they don't exist - Make it impossible to set the linear algebra backend to something illegal - Overload value_shape instead of dim for userdefined Python Expressions - Permit unset parameters - Search only for BLAS library (not cblas.h) 0.9.9 [2010-09-01] ------------------ - Change build system to CMake - Add named MeshFunctions: VertexFunction, EdgeFunction, FaceFunction, FacetFunction, CellFunction - Allow setting constant boundary conditions directly without using Constant - Allow setting boundary conditions based on string ("x[0] == 0.0") - Create missing directories if specified as part of file names - Allow re-use of preconditioners for most backends - Fixes for UMFPACK solver on some 32 bit machines - Provide access to more Hypre preconditioners via PETSc - Updates for SLEPc 3.1 - Improve and implement re-use of LU factorizations for all backends - Fix bug in refinement of MeshFunctions 0.9.8 [2010-07-01] ------------------ - Optimize and improve StabilityAnalysis. - Use own implementation of binary search in ODESolution (takes advantage of previous values as initial guess) - Improve reading ODESolution spanning multiple files - Dramatic speedup of progress bar (and algorithms using it) - Fix bug in writing meshes embedded higher dimensions to M-files - Zero vector in uBLASVector::resize() to fix spurious bug in Krylov solver - Handle named fields (u.rename()) in VTK output - Bug fix in computation of FacetArea for tetrahedrons - Add support for direct plotting of Dirichlet boundary conditions: plot(bc) - Updates for PETSc 3.1 - Add relaxation parameter to NewtonSolver - Implement collapse of renumbered dof maps (serial and parallel) - Simplification of DofMapBuilder for parallel dof maps - Improve and simplify DofMap - Add Armadillo dependency for dense linear algebra - Remove LAPACKFoo wrappers - Add abstract base class GenericDofMap - Zero small values in VTK output to avoid VTK crashes - Handle MeshFunction/markers in homogenize bc - Make preconditioner selectable in VariationalProblem (new parameter) - Read/write meshes in binary format - Add parameter "use_ident" in DirichletBC - Issue error by default when solvers don't converge (parameter "error_on_convergence") - Add option to print matrix/vector for a VariationalProblem - Trilinos backend now works in parallel - Remove Mesh refine members functions. Use free refine(...) functions instead - Remove AdapativeObjects - Add Stokes demo using the MINI element - Interface change: operator+ now used to denote enriched function spaces - Interface change: operator+ --> operator* for mixed elements - Add option 'allow_extrapolation' useful when interpolating to refined meshes - Add SpatialCoordinates demo - Add functionality for accessing time series sample times: vector_times(), mesh_times() - Add functionality for snapping mesh to curved boundaries during refinement - Add functionality for smoothing the boundary of a mesh - Speedup assembly over exterior facets by not using BoundaryMesh - Mesh refinement improvements, remove unecessary copying in Python interface - Clean PETSc and Epetra Krylov solvers - Add separate preconditioner classes for PETSc and Epetra solvers - Add function ident_zeros for inserting one on diagonal for zero rows - Add LU support for Trilinos interface 0.9.7 [2010-02-17] ------------------ - Add support for specifying facet orientation in assembly over interior facets - Allow user to choose which LU package PETScLUSolver uses - Add computation of intersection between arbitrary mesh entities - Random access to MeshEntitiyIterators - Modify SWIG flags to prevent leak when using SWIG director feature - Fix memory leak in std::vector typemaps - Add interface for SCOTCH for parallel mesh partitioning - Bug fix in SubDomain::mark, fixes bug in DirichletBC based on SubDomain::inside - Improvements in time series class, recognizing old stored values - Add FacetCell class useful in algorithms iterating over boundary facets - Rename reconstruct --> extrapolate - Remove GTS dependency 0.9.6 [2010-02-03] ------------------ - Simplify access to form compiler parameters, now integrated with global parameters - Add DofMap member function to return set of dofs - Fix memory leak in the LA interface - Do not import cos, sin, exp from NumPy to avoid clash with UFL functions - Fix bug in MTL4Vector assignment - Remove sandbox (moved to separate repository) - Remove matrix factory (dolfin/mf) - Update .ufl files for changes in UFL - Added swig/import/foo.i for easy type importing from dolfin modules - Allow optional argument cell when creating Expression - Change name of Expression argument cpparg --> cppcode - Add simple constructor (dim0, dim1) for C++ matrix Expressions - Add example demonstrating the use of cpparg (C++ code in Python) - Add least squares solver for dense systems (wrapper for DGELS) - New linear algebra wrappers for LAPACK matrices and vectors - Experimental support for reconstruction of higher order functions - Modified interface for eval() and inside() in C++ using Array - Introduce new Array class for simplified wrapping of arrays in SWIG - Improved functionality for intersection detection - Re-implementation of intersection detection using CGAL 0.9.5 [2009-12-03] ------------------ - Set appropriate parameters for symmetric eigenvalue problems with SLEPc - Fix for performance regression in recent uBLAS releases - Simplify Expression interface: f = Expression("sin(x[0])") - Simplify Constant interface: c = Constant(1.0) - Fix bug in periodic boundary conditions - Add simple script dolfin-tetgen for generating DOLFIN XML meshes from STL - Make XML parser append/overwrite parameter set when reading parameters from file - Refinement of function spaces and automatic interpolation of member functions - Allow setting global parameters for Krylov solver - Fix handling of Constants in Python interface to avoid repeated JIT compilation - Allow simple specification of subdomains in Python without needing to subclass SubDomain - Add function homogenize() for simple creation of homogeneous BCs from given BCs - Add copy constructor and possibility to change value for DirichletBC - Add simple wrapper for ufl.cell.n. FacetNormal(mesh) now works again in Python. - Support apply(A), apply(b) and apply(b, x) in PeriodicBC - Enable setting spectral transformation for SLEPc eigenvalue solver 0.9.4 [2009-10-12] ------------------ - Remove set, get and operator() methods from MeshFunction - Added const and none const T &operator[uint/MeshEntity] to MeshFunction - More clean up in SWIG interface files, remove global renames and ignores - Update Python interface to Expression, with extended tests for value ranks - Removed DiscreteFunction class - Require value_shape and geometric_dimension in Expression - Introduce new class Expression replacing user-defined Functions - interpolate_vertex_values --> compute_vertex_values - std::map replaces generated CoefficientSet code - Cleanup logic in Function class as a result of new Expression class - Introduce new Coefficient base class for form coefficients - Replace CellSize::min,max by Mesh::hmin,hmax - Use MUMPS instead of UMFPACK as default direct solver in both serial and parallel - Fix bug in SystemAssembler - Remove support for PETSc 2.3 and support PETSc 3.0.0 only - Remove FacetNormal Function. Use UFL facet normal instead. - Add update() function to FunctionSpace and DofMap for use in adaptive mesh refinement - Require mesh in constructor of functionals (C++) or argument to assemble (Python) 0.9.3 [2009-09-25] ------------------ - Add global parameter "ffc_representation" for form representation in FFC JIT compiler - Make norm() function handle both vectors and functions in Python - Speedup periodic boundary conditions and make work for mixed (vector-valued) elements - Add possibilities to use any number numpy array when assigning matrices and vectors - Add possibilities to use any integer numpy array for indices in matrices and vectors - Fix for int typemaps in PyDOLFIN - Split mult into mult and transpmult - Filter out PETSc argument when parsing command-line parameters - Extend comments to SWIG interface files - Add copyright statements to SWIG interface files (not finished yet) - Add typemaps for misc std::vector in PyDOLFIN - Remove dependencies on std_vector.i reducing SWIG wrapper code size - Use relative %includes in dolfin.i - Changed names on SWIG interface files dolfin_foo.i -> foo.i - Add function interpolate() in Python interface - Fix typmaps for uint in python 2.6 - Use TypeError instead of ValueError in typechecks in typmaps.i - Add in/out shared_ptr typemaps for PyDOLFIN - Fix JIT compiling in parallel - Add a compile_extension_module function in PyDOLFIN - Fix bug in Python vector assignment - Add support for compressed base64 encoded VTK files (using zlib) - Add support for base64 encoded VTK files - Experimental support for parallel assembly and solve - Bug fix in project() function, update to UFL syntax - Remove disp() functions and replace by info(foo, true) - Add fem unit test (Python) - Clean up SystemAssembler - Enable assemble_system through PyDOLFIN - Add 'norm' to GenericMatrix - Efficiency improvements in NewtonSolver - Rename NewtonSolver::get_iteration() to NewtonSolver::iteration() - Improvements to EpetraKrylovSolver::solve - Add constructor Vector::Vector(const GenericVector& x) - Remove SCons deprecation warnings - Memory leak fix in PETScKrylovSolver - Rename dolfin_assert -> assert and use C++ version - Fix debug/optimise flags - Remove AvgMeshSize, InvMeshSize, InvFacetArea from SpecialFunctions - Rename MeshSize -> CellSize - Rewrite parameter system with improved support for command-line parsing, localization of parameters (per class) and usability from Python - Remove OutflowFacet from SpecialFunctions - Rename interpolate(double*) --> interpolate_vertex_values(double*) - Add Python version of Cahn-Hilliard demo - Fix bug in assemble.py - Permit interpolation of functions between non-matching meshes - Remove Function::Function(std::string filename) - Transition to new XML io - Remove GenericSparsityPattern::sort - Require sorted/unsorted parameter in SparsityPattern constructor - Improve performance of SparsityPattern::insert - Replace enums with strings for linear algebra and built-in meshes - Allow direct access to Constant value - Initialize entities in MeshEntity constructor automatically and check range - Add unit tests to the memorycheck - Add call to clean up libxml2 parser at exit - Remove unecessary arguments in DofMap member functions - Remove reference constructors from DofMap, FiniteElement and FunctionSpace - Use a shared_ptr to store the mesh in DofMap objects - Interface change for wrapper code: PoissonBilinearForm --> Poisson::BilinearForm - Add function info_underline() for writing underlined messages - Rename message() --> info() for "compatibility" with Python logging module - Add elementwise multiplication in GeneriVector interface - GenericVector interface in PyDOLFIN now support the sequence protocol - Rename of camelCaps functions names: fooBar --> foo_bar Note: mesh.numVertices() --> mesh.num_vertices(), mesh.numCells() --> mesh.num_cells() - Add slicing capabilities for GenericMatrix interface in PyDOLFIN (only getitem) - Add slicing capabilities for GenericVector interface in PyDOLFIN - Add sum to GenericVector interface 0.9.2 [2009-04-07] ------------------ - Enable setting parameters for Newton solver in VariationalProblem - Simplified and improved implementation of C++ plotting, calling Viper on command-line - Remove precompiled elements and projections - Automatically interpolate user-defined functions on assignment - Add new built-in function MeshCoordinates, useful in ALE simulations - Add new constructor to Function class, Function(V, "vector.xml") - Remove class Array (using std::vector instead) - Add vector_mapping data to MeshData - Use std::vector instead of Array in MeshData - Add assignment operator and copy constructor for MeshFunction - Add function mesh.move(other_mesh) for moving mesh according to matching mesh (for FSI) - Add function mesh.move(u) for moving mesh according to displacement function (for FSI) - Add macro dolfin_not_implemented() - Add new interpolate() function for interpolation of user-defined function to discrete - Make _function_space protected in Function - Added access to crs data from python for uBLAS and MTL4 backend 0.9.1 [2009-02-17] ------------------ - Check Rectangle and Box for non-zero dimensions - ODE solvers now solve the dual problem - New class SubMesh for simple extraction of matching meshes for sub domains - Improvements of multiprecision ODE solver - Fix Function class copy constructor - Bug fixes for errornorm(), updates for new interface - Interface update for MeshData: createMeshFunction --> create_mesh_function etc - Interface update for Rectangle and Box - Add elastodynamics demo - Fix memory leak in IntersectionDetector/GTSInterface - Add check for swig version, in jit and compile functions - Bug fix in dolfin-order script for gzipped files - Make shared_ptr work across C++/Python interface - Replace std::tr1::shared_ptr with boost::shared_ptr - Bug fix in transfinite mean-value interpolation - Less annoying progress bar (silent when progress is fast) - Fix assignment operator for MeshData - Improved adaptive mesh refinement (recursive Rivara) producing better quality meshes 0.9.0 [2009-01-05] ------------------ - Cross-platform fixes - PETScMatrix::copy fix - Some Trilinos fixes - Improvements in MeshData class - Do not use initial guess in Newton solver - Change OutflowFacet to IsOutflowFacet and change syntax - Used shared_ptr for underling linear algebra objects - Cache subspaces in FunctionSpace - Improved plotting, now support plot(grad(u)), plot(div(u)) etc - Simple handling of JIT-compiled functions - Sign change (bug fix) in increment for Newton solver - New class VariationalProblem replacing LinearPDE and NonlinearPDE - Parallel parsing and partitioning of meshes (experimental) - Add script dolfin-order for ordering mesh files - Add new class SubSpace (replacing SubSystem) - Add new class FunctionSpace - Complete redesign of Function class hierarchy, now a single Function class - Increased use of shared_ptr in Function, FunctionSpace, etc - New interface for boundary conditions, form not necessary - Allow simple setting of coefficient functions based on names (not their index) - Don't order mesh automatically, meshes must now be ordered explicitly - Simpler definition of user-defined functions (constructors not necessary) - Make mesh iterators const to allow for const-correct Mesh code 0.8.1 [2008-10-20] ------------------ - Add option to use ML multigrid preconditioner through PETSc - Interface change for ODE solvers: uBLASVector --> double* - Remove homotopy solver - Remove typedef real, now using plain double instead - Add various operators -=, += to GenericMatrix - Don't use -Werror when compiling SWIG generated code - Remove init(n) and init(m, n) from GenericVector/Matrix. Use resize and zero instead - Add new function is_combatible() for checking compatibility of boundary conditions - Use x as initial guess in Krylov solvers (PETSc, uBLAS, ITL) - Add new function errornorm() - Add harmonic ALE mesh smoothing - Refinements of Graph class - Add CholmodCholeskySlover (direct solver for symmetric matrices) - Implement application of Dirichlet boundary conditions within assembly loop - Improve efficiency of SparsityPattern - Allow a variable number of smoothings - Add class Table for pretty-printing of tables - Add experimental MTL4 linear algebra backend - Add OutflowFacet to SpecialFunctions for DG transport problems - Remove unmaintained OpenDX file format - Fix problem with mesh smoothing near nonconvex corners - Simple projection of functions in Python - Add file format: XYZ for use with Xd3d - Add built-in meshes: UnitCircle, Box, Rectangle, UnitSphere 0.8.0 [2008-06-23] ------------------ - Fix input of matrix data from XML - Add function normalize() - Integration with VMTK for reading DOLFIN XML meshes produced by VMTK - Extend mesh XML format to handle boundary indicators - Add support for attaching arbitrarily named data to meshes - Add support for dynamically choosing the linear algebra backend - Add Epetra/Trilinos linear solvers - Add setrow() to matrix interface - Add new solver SingularSolver for solving singular (pressure) systems - Add MeshSize::min(), max() for easy computation of smallest/largest mesh size - LinearSolver now handles all backends and linear solvers - Add access to normal in Function, useful for inflow boundary conditions - Remove GMRES and LU classes, use solve() instead - Improve solve() function, now handles both LU and Krylov + preconditioners - Add ALE mesh interpolation (moving mesh according to new boundary coordinates) 0.7.3 [2008-04-30] ------------------ - Add support for Epetra/Trilinos - Bug fix for order of values in interpolate_vertex_values, now according to UFC - Boundary meshes are now always oriented with respect to outward facet normals - Improved linear algebra, both in C++ and Python - Make periodic boundary conditions work in Python - Fix saving of user-defined functions - Improve plotting - Simple computation of various norms of functions from Python - Evaluation of Functions at arbitrary points in a mesh - Fix bug in assembling over exterior facets (subdomains were ignored) - Make progress bar less annoying - New scons-based build system replaces autotools - Fix bug when choosing iterative solver from Python 0.7.2 [2008-02-18] ------------------ - Improve sparsity pattern generator efficiency - Dimension-independent sparsity pattern generator - Add support for setting strong boundary values for DG elements - Add option setting boundary conditions based on geometrical search - Check UMFPACK return argument for warnings/errors - Simplify setting simple Dirichlet boundary conditions - Much improved integration with FFC in PyDOLFIN - Caching of forms by JIT compiler now works - Updates for UFC 1.1 - Catch exceptions in PyDOLFIN - Work on linear algebra interfaces GenericTensor/Matrix/Vector - Add linear algebra factory (backend) interface - Add support for 1D meshes - Make Assembler independent of linear algebra backend - Add manager for handling sub systems (PETSc and MPI) - Add parallel broadcast of Mesh and MeshFunction - Add experimental support for parallel assembly - Use PETSc MPI matrices when running in parallel - Add predefined functions FacetNormal and AvgMeshSize - Add left/right/crisscross options for UnitSquare - Add more Python demos - Add support for Exodus II format in dolfin-convert - Autogenerate docstrings for PyDOLFIN - Various small bug fixes and improvements 0.7.1 [2007-08-31] ------------------ - Integrate FFC form language into PyDOLFIN - Just-in-time (JIT) compilation of variational forms - Conversion from from Diffpack grid format to DOLFIN XML - Name change: BoundaryCondition --> DirichletBC - Add support for periodic boundary conditions: class PeriodicBC - Redesign default linear algebra interface (Matrix, Vector, KrylovSolver, etc) - Add function to return Vector associated with a DiscreteFunction 0.7.0-1 [2007-06-22] -------------------- - Recompile all forms with latest FFC release - Remove typedefs SparseMatrix and SparseVector - Fix includes in LinearPDE - Rename DofMaps -> DofMapSet 0.7.0 [2007-06-20] ------------------ - Move to UFC interface for code generation - Major rewrite, restructure, cleanup - Add support for Brezzi-Douglas-Marini (BDM) elements - Add support for Raviart-Thomas (RT) elements - Add support for Discontinuous Galerkin (DG) methods - Add support for mesh partitioning (through SCOTCH) - Handle both UMFPACK and UFSPARSE - Local mesh refinement - Mesh smoothing - Built-in plotting (through Viper) - Cleanup log system - Numerous fixes for mesh, in particular MeshFunction - Much improved Python bindings for mesh - Fix Python interface for vertex and cell maps in boundary computation 0.6.4 [2006-12-01] ------------------ - Switch from Python Numeric to Python NumPy - Improved mesh Python bindings - Add input/output support for MeshFunction - Change Mesh::vertices() --> Mesh::coordinates() - Fix bug in output of mesh to MATLAB format - Add plasticty module (experimental) - Fix configure test for Python dev (patch from Åsmund Ødegård) - Add mesh benchmark - Fix memory leak in mesh (data not deleted correctly in MeshTopology) - Fix detection of curses libraries - Remove Tecplot output format 0.6.3 [2006-10-27] ------------------ - Move to new mesh library - Remove dolfin-config and move to pkg-config - Remove unused classes PArray, PList, Table, Tensor - Visualization of 2D solutions in OpenDX is now supported (3D supported before) - Add support for evaluation of functionals - Fix bug in Vector::sum() for uBLAS vectors 0.6.2-1 [2006-09-06] -------------------- - Fix compilation error when using --enable-petsc (dolfin::uBLASVector::PETScVector undefined) 0.6.2 [2006-09-05] ------------------ - Finish chapter in manual on linear algebra - Enable PyDOLFIN by default, use --disable-pydolfin to disable - Disable PETSc by default, use --enable-petsc to enable - Modify ODE solver interface for u0() and f() - Add class ConvectionMatrix - Readd classes LoadVector, MassMatrix, StiffnessMatrix - Add matrix factory for simple creation of standard finite element matrices - Collect static solvers in LU and GMRES - Bug fixes for Python interface PyDOLFIN - Enable use of direct solver for ODE solver (experimental) - Remove demo bistable - Restructure and cleanup linear algebra - Use UMFPACK for LU solver with uBLAS matrix types - Add templated wrapper class for different uBLAS matrix types - Add ILU preconditioning for uBLAS matrices - Add Krylov solver for uBLAS sparse matrices (GMRES and BICGSTAB) - Add first version of new mesh library (NewMesh, experimental) - Add Parametrized::readParameters() to trigger reading of values on set() - Remove output of zeros in Octave matrix file format - Use uBLAS-based vector for Vector if PETSc disabled - Add wrappers for uBLAS compressed_matrix class - Compute eigenvalues using SLEPc (an extension of PETSc) - Clean up assembly and linear algebra - Add function to solve Ax = b for dense matrices and dense vectors - Make it possible to compile without PETSc (--disable-petsc) - Much improved ODE solvers - Complete multi-adaptive benchmarks reaction and wave - Assemble boundary integrals - FEM class cleaned up. - Fix multi-adaptive benchmark problem reaction - Small fixes for Intel C++ compiler version 9.1 - Test for Intel C++ compiler and configure appropriately - Add new classes DenseMatrix and DenseVector (wrappers for ublas) - Fix bug in conversion from Gmsh format 0.6.1 [2006-03-28] ------------------ - Regenerate build system in makedist script - Update for new FFC syntax: BasisFunction --> TestFunction, TrialFunction - Fixes for conversion script dolfin-convert - Initial cleanups and fixes for ODE solvers - Numerous small fixes to improve portability - Remove dolfin:: qualifier on output << in Parameter.h - Don't use anonymous classes in demos, gives errors with some compilers - Remove KrylovSolver::solver() - Fix bug in convection-diffusion demo (boundary condition for pressure), use direct solver - LinearPDE and NewonSolver use umfpack LU solver by default (if available) when doing direct solve - Set PETSc matrix type through Matrix constructor - Allow linear solver and preconditioner type to be passed to NewtonSolver - Fix bug in Stokes demos (wrong boundary conditions) - Cleanup Krylov solver - Remove KrylovSolver::setPreconditioner() etc. and move to constructors - Remove KrylovSolver::setRtol() etc. and replace with parameters - Fix remaining name changes: noFoo() --> numFoo() - Add Cahn-Hilliard equation demo - NewtonSolver option to use residual or incremental convergence criterion - Add separate function to nls to test for convergence of Newton iterations - Fix bug in dolfin-config (wrong version number) 0.6.0 [2006-03-01] ------------------ - Fix bug in XML output format (writing multiple objects) - Fix bug in XML matrix output format (handle zero rows) - Add new nonlinear PDE demo - Restructure PDE class to use envelope-letter design - Add precompiled finite elements for q <= 5 - Add FiniteElementSpec and factor function for FiniteElement - Add input/output of Function to DOLFIN XML - Name change: dof --> node - Name change: noFoo() --> numFoo() - Add conversion from gmsh format in dolfin-convert script - Updates for PETSc 2.3.1 - Add new type of Function (constant) - Simplify use of Function class - Add new demo Stokes + convection-diffusion - Add new demo Stokes (equal-order stabilized) - Add new demo Stokes (Taylor-Hood) - Add new parameter for KrylovSolvers: "monitor convergence" - Add conversion script dolfin-convert for various mesh formats - Add new demo elasticity - Move poisson demo to src/demo/pde/poisson - Move to Mercurial (hg) from CVS - Use libtool to build libraries (including shared) 0.5.12 [2006-01-12] ------------------- - Make Stokes solver dimension independent (2D/3D) - Make Poisson solver dimension independent (2D/3D) - Fix sparse matrix output format for MATLAB - Modify demo problem for Stokes, add exact solution and compute error - Change interface for boundary conditions: operator() --> eval() - Add two benchmark problems for the Navier-Stokes solver - Add support for 2D/3D selection in Navier-Stokes solver - Move tic()/toc() to timing.h - Navier-Stokes solver back online - Make Solver a subclass of Parametrized - Add support for localization of parameters - Redesign of parameter system 0.5.11 [2005-12-15] ------------------- - Add script monitor for monitoring memory usage - Remove meminfo.h (not portable) - Remove dependence on parameter system in log system - Don't use drand48() (not portable) - Don't use strcasecmp() (not portable) - Remove sysinfo.h and class System (not portable) - Don't include (not portable) - Change ::show() --> ::disp() everywhere - Clean out old quadrature classes on triangles and tetrahedra - Clean out old sparse matrix code - Update chapter on Functions in manual - Use std::map to store parameters - Implement class KrylovSolver - Name change: Node --> Vertex - Add nonlinear solver demos - Add support for picking sub functions and components of functions - Update interface for FiniteElement for latest FFC version - Improve and restructure implementation of the Function class - Dynamically adjust safety factor during integration - Improve output Matrix::disp() - Check residual at end of time step, reject step if too large - Implement Vector::sum() - Implement nonlinear solver - New option for ODE solver: "save final solution" --> solution.data - New ODE test problem: reaction - Fixes for automake 1.9 (nobase_include_HEADERS) - Reorganize build system, remove fake install and require make install - Add checks for non-standard PETSc component HYPRE in NSE solver - Make GMRES solver return the number of iterations - Add installation script for Python interface - Add Matrix Market format (Haiko Etzel) - Automatically reinitialize GMRES solver when system size changes - Implement cout << for class Vector 0.5.10 [2005-10-11] ------------------- - Modify ODE solver interface: add T to constructor - Fix compilation on AMD 64 bit systems (add -fPIC) - Add new BLAS mode for form evaluation - Change enum types in File to lowercase - Change default file type for .m to Octave - Add experimental Python interface PyDOLFIN - Fix compilation for gcc 4.0 0.5.9 [2005-09-23] ------------------ - Add Stokes module - Support for arbitrary mixed elements through FFC - VTK output interface now handles time-dependent functions automatically - Fix cout for empty matrix - Change dolfin_start() --> dolfin_end() - Add chapters to manual: about, log system, parameters, reference elements, installation, contributing, license - Use new template fenicsmanual.cls for manual - Add compiler flag -U__STRICT_ANSI__ when compiling under Cygwin - Add class EigenvalueSolver 0.5.8 [2005-07-05] ------------------ - Add new output format Paraview/VTK (Garth N. Wells) - Update Tecplot interface - Move to PETSc 2.3.0 - Complete support for general order Lagrange elements in triangles and tetrahedra - Add test problem in src/demo/fem/convergence/ for general Lagrange elements - Make FEM::assemble() estimate the number of nonzeros in each row - Implement Matrix::init(M, N, nzmax) - Add Matrix::nz(), Matrix::nzsum() and Matrix::nzmax() - Improve Mesh::disp() - Add FiniteElement::disp() and FEM::disp() (useful for debugging) - Remove old class SparseMatrix - Change FEM::setBC() --> FEM::applyBC() - Change Mesh::tetrahedrons --> Mesh::tetrahedra - Implement Dirichlet boundary conditions for tetrahedra - Implement Face::contains(const Point& p) - Add test for shape dimension of mesh and form in FEM::assemble() - Move src/demo/fem/ demo to src/demo/fem/simple/ - Add README file in src/demo/poisson/ (simple manual) - Add simple demo program src/demo/poisson/ - Update computation of alignment of faces to match FFC/FIAT 0.5.7 [2005-06-23] ------------------ - Clean up ODE test problems - Implement automatic detection of sparsity pattern from given matrix - Clean up homotopy solver - Implement automatic computation of Jacobian - Add support for assembly of non-square systems (Andy Terrel) - Make ODE solver report average number of iterations - Make progress bar write first update at 0% - Initialize all values of u before solution in multi-adaptive solver, not only components given by dependencies - Allow user to modify and verify a converging homotopy path - Make homotopy solver save a list of the solutions - Add Matrix::norm() - Add new test problem for CES economy - Remove cast from Parameter to const char* (use std::string) - Make solution data filename optional for homotopy solver - Append homotopy solution data to file during solution - Add dolfin::seed(int) for optionally seeding random number generator - Remove dolfin::max,min (use std::max,min) - Add polynomial-integer (true polynomial) form of general CES system - Compute multi-adaptive efficiency index - Updates for gcc 4.0 (patches by Garth N. Wells) - Add Matrix::mult(const real x[], uint row) (temporary fix, assumes uniprocessor case) - Add Matrix::mult(const Vector& x, uint row) (temporary fix, assumes uniprocessor case) - Update shortcuts MassMatrix and StiffnessMatrix to new system - Add missing friend to Face.h (reported by Garth N. Wells) 0.5.6 [2005-05-17] ------------------ - Implementation of boundary conditions for general order Lagrange (experimental) - Use interpolation function automatically generated by FFC - Put computation of map into class AffineMap - Clean up assembly - Use dof maps automatically generated by FFC (experimental) - Modify interface FiniteElement for new version of FFC - Update ODE homotopy test problems - Add cross product to class Point - Sort mesh entities locally according to ordering used by FIAT and FFC - Add new format for dof maps (preparation for higher-order elements) - Code cleanups: NewFoo --> Foo complete - Updates for new version of FFC (0.1.7) - Bypass log system when finalizing PETSc (may be out of scope) 0.5.5 [2005-04-26] ------------------ - Fix broken log system, curses works again - Much improved multi-adaptive time-stepping - Move elasticity module to new system based on FFC - Add boundary conditions for systems - Improve regulation of time steps - Clean out old assembly classes - Clean out old form classes - Remove kernel module map - Remove kernel module element - Move convection-diffusion module to new system based on FFC - Add iterators for cell neighbors of edges and faces - Implement polynomial for of CES economy - Rename all new linear algebra classes: NewFoo --> Foo - Clean out old linear algebra - Speedup setting of boundary conditions (add MAT_KEEP_ZEROED_ROWS) - Fix bug for option --disable-curses 0.5.4 [2005-03-29] ------------------ - Remove option to compile with PETSc 2.2.0 (2.2.1 required) - Make make install work again (fix missing includes) - Add support for mixing multiple finite elements (through FFC) - Improve functionality of homotopy solver - Simple creation of piecewise linear functions (without having an element) - Simple creation of piecewise linear elements - Add support of automatic creation of simple meshes (unit cube, unit square) 0.5.3 [2005-02-26] ------------------ - Change to PETSc version 2.2.1 - Add flag --with-petsc= to configure script - Move Poisson's equation to system based on FFC - Add support for automatic creation of homotopies - Make all ODE solvers automatically handle complex ODEs: (M) z' = f(z,t) - Implement version of mono-adaptive solver for implicit ODEs: M u' = f(u,t) - Implement Newton's method for multi- and mono-adaptive ODE solvers - Update PETSc wrappers NewVector, NewMatrix, and NewGMRES - Fix initialization of PETSc - Add mono-adaptive cG(q) and dG(q) solvers (experimental) - Implementation of new assebly: NewFEM, using output from FFC - Add access to mesh for nodes, cells, faces and edges - Add Tecplot I/O interface; contributed by Garth N. Wells 0.5.2 [2005-01-26] ------------------ - Benchmarks for DOLFIN vs PETSc (src/demo/form and src/demo/test) - Complete rewrite of the multi-adaptive ODE solver (experimental) - Add wrapper for PETSc GMRES solver - Update class Point with new operators - Complete rewrite of the multi-adaptive solver to improve performance - Add PETSc wrappers NewMatrix and NewVector - Add DOLFIN/PETSc benchmarks 0.5.1 [2004-11-10] ------------------ - Experimental support for automatic generation of forms using FFC - Allow user to supply Jacobian to ODE solver - Add optional test to check if a dependency already exists (Sparsity) - Modify sparse matrix output (Matrix::show()) - Add FGMRES solver in new format (patch from eriksv) - Add non-const version of quick-access of sparse matrices - Add linear mappings for simple computation of derivatives - Add check of matrix dimensions for ODE sparsity pattern - Include missing cmath in Function.cpp 0.5.0 [2004-08-18] ------------------ - First prototype of new form evaluation system - New classes Jacobi, SOR, Richardson (preconditioners and linear solvers) - Add integrals on the boundary (ds), partly working - Add maps from boundary of reference cell - Add evaluation of map from reference cell - New Matrix functions: max, min, norm, and sum of rows and columns (erik) - Derivatives/gradients of ElementFunction (coefficients f.ex.) implemented - Enable assignment to all elements of a NewArray - Add functions Boundary::noNodes(), noFaces(), noEdges() - New class GaussSeidel (preconditioner and linear solver) - New classes Preconditioner and LinearSolver - Bug fix for tetrahedral mesh refinement (ingelstrom) - Add iterators for Edge and Face on Boundary - Add functionality to Map: bdet() and cell() - Add connectivity face-cell and edge-cell - New interface for assembly: Galerkin --> FEM - Bug fix for PDE systems of size > 3 0.4.11 [2004-04-23] ------------------- - Add multigrid solver (experimental) - Update manual 0.4.10 ------ - Automatic model reduction (experimental) - Fix bug in ParticleSystem (divide by mass) - Improve control of integration (add function ODE::update()) - Load/save parameters in XML-format - Add assembly test - Add simple StiffnessMatrix, MassMatrix, and LoadVector - Change dK --> dx - Change dx() --> ddx() - Add support for GiD file format - Add performance tests for multi-adaptivity (both stiff and non-stiff) - First version of Newton for the multi-adaptive solver - Test for Newton for the multi-adaptive solver 0.4.9 ----- - Add multi-adaptive solver for the bistable equation - Add BiCGSTAB solver (thsv) - Fix bug in SOR (thsv) - Improved visual program for OpenDX - Fix OpenDX file format for scalar functions - Allow access to samples of multi-adaptive solution - New patch from thsv for gcc 3.4.0 and 3.5.0 - Make progress step a parameter - New function ODE::sparse(const Matrix& A) - Access nodes, cells, edges, faces by id - New function Matrix::lump() 0.4.8 ----- - Add support for systems (jansson and bengzon) - Add new module wave - Add new module wave-vector - Add new module elasticity - Add new module elasticity-stationary - Multi-adaptive updates - Fix compilation error in LogStream - Fix local Newton iteration for higher order elements - Init matrix to given type - Add output of cG(q) and dG(q) weights in matrix format - Fix numbering of frames from plotslab script - Add png output for plotslab script - Add script for running stiff test problems, plot solutions - Fix bug in MeshInit (node neighbors of node) - Modify output of sysinfo() - Compile with -Wall -Werror -pedantic -ansi -std=c++98 (thsv) 0.4.7 ----- - Make all stiff test problems work - Display status report also when using step() - Improve adaptive damping for stiff problems (remove spikes) - Modify Octave/Matlab format for solution data (speed improvement) - Adaptive sampling of solution (optional) - Restructure stiff test problems - Check if value of right-hand side is valid - Modify divergence test in AdaptiveIterationLevel1 0.4.6 ----- - Save vectors and matrices from Matlab/Octave (foufas) - Rename writexml.m to xmlmesh.m - Inlining of important functions - Optimize evaluation of elements - Optimize Lagrange polynomials - Optimize sparsity: use stl containers - Optimize choice of discrete residual for multi-adaptive solver - Don't save solution in benchmark proble - Improve computation of divergence factor for underdamped systems - Don't check residual on first slab for fixed time step - Decrease largest (default) time step to 0.1 - Add missing in TimeStepper - Move real into dolfin namespace 0.4.5 ----- - Rename function.h to enable compilation under Cygwin - Add new benchmark problem for multi-adaptive solver - Bug fix for ParticleSystem - Initialization of first time step - Improve time step regulation (threshold) - Improve stabilization - Improve TimeStepper interface (Ko Project) - Use iterators instead of recursively calling TimeSlab::update() - Clean up ODESolver - Add iterators for elements in time slabs and element groups - Add -f to creation of symbolic links 0.4.4 ----- - Add support for 3D graphics in Octave using Open Inventor (jj) 0.4.3 ----- - Stabilization of multi-adaptive solver (experimental) - Improved non-support for curses (--disable-curses) - New class MechanicalSystem for simulating mechanical systems - Save debug info from primal and dual (plotslab.m) - Fix bug in progress bar - Add missing include file in Components.h (kakr) - New function dolfin_end(const char* msg, ...) - Move numerical differentiation to RHS - New class Event for limited display of messages - Fix bug in LogStream (large numbers in floating point format) - Specify individual time steps for different components - Compile without warnings - Add -Werror to option enable-debug - Specify individual methods for different components - Fix bug in dGqMethods - Fix bug (delete old block) in ElementData - Add parameters for method and order - New test problem reaction - New class FixedPointIteration - Fix bug in grid refinement 0.4.2 ----- - Fix bug in computation of residual (divide by k) - Add automatic generation and solution of the dual problem - Automatic selection of file names for primal and dual - Fix bug in progress bar (TerminalLogger) - Many updates of multi-adaptive solver - Add class ODEFunction - Update function class hierarchies - Move functions to a separate directory - Store multi-adaptive solution binary on disk with cache 0.4.1 ----- - First version of multi-adaptive solver working - Clean up file formats - Start changing from int to unsigned int where necessary - Fix bool->int when using stdard in Parameter - Add NewArray and NewList (will replace Array and List) 0.4.0 ----- - Initiation of the FEniCS project - Change syntax of mesh files: grid -> mesh - Create symbolic links instead of copying files - Tanganyika -> ODE - Add Heat module - Grid -> Mesh - Move forms and mappings to separate libraries - Fix missing include of DirectSolver.h 0.3.12 ------ - Adaptive grid refinement (!) - Add User Manual - Add function dolfin_log() to turn logging on/off - Change from pointers to references for Node, Cell, Edge, Face - Update writexml.m - Add new grid files and rename old grid files 0.3.11 ------ - Add configure option --disable-curses - Grid refinement updates - Make OpenDX file format work for grids (output) - Add volume() and diameter() in cell - New classes TriGridRefinement and TetGridRefinement - Add iterators for faces and edges on a boundary - New class GridHierarchy 0.3.10 ------ - Use new boundary structure in Galerkin - Make dolfin_start() and dolfin_end() work - Make dolfin_assert() raise segmentation fault for plain text mode - Add configure option --enable-debug - Use autoreconf instead of scripts/preconfigure - Rename configure.in -> configure.ac - New class FaceIterator - New class Face - Move computation of boundary from GridInit to BoundaryInit - New class BoundaryData - New class BoundaryInit - New class Boundary - Make InitGrid compute edges - Add test program for generic matrix in src/demo/la - Clean up Grid classes - Add new class GridRefinementData - Move data from Cell to GenericCell - Make GMRES work with user defined matrix, only mult() needed - GMRES now uses only one function to compute residual() - Change Matrix structure (a modified envelope/letter) - Update script checkerror.m for Poisson - Add function dolfin_info_aptr() - Add cast to element pointer for iterators - Clean up and improve the Tensor class - New class: List - Name change: List -> Table - Name change: ShortList -> Array - Make functions in GridRefinement static - Make functions in GridInit static - Fix bug in GridInit (eriksv) - Add output to OpenDX format for 3D grids - Clean up ShortList class - Clean up List class - New class ODE, Equation replaced by PDE - Add Lorenz test problem - Add new problem type for ODEs - Add new module ode - Work on multi-adaptive ODE solver (lots of new stuff) - Work on grid refinement - Write all macros in LoggerMacros in one line - Add transpose functions to Matrix (Erik) 0.3.9 ----- - Update Krylov solver (Erik, Johan) - Add new LU factorization and LU solve (Niklas) - Add benchmark test in src/demo/bench - Add silent logger 0.3.8 ----- - Make sure dolfin-config is regenerated every time - Add demo program for cG(q) and dG(q) - Add dG(q) precalc of nodal points and weights - Add cG(q) precalc of nodal points and weights - Fix a bug in configure.in (AC_INIT with README) - Add Lagrange polynomials - Add multiplication with transpose - Add scalar products with rows and columns - Add A[i][j] index operator for quick access to dense matrix 0.3.7 ----- - Add new Matlab-like syntax like A(i,all) = x or A(3,all) = A(4,all) - Add dolfin_assert() macro enabled if debug is defined - Redesign of Matrix/DenseMatrix/SparseMatrix to use Matrix as common interface - Include missing cmath in Legendre.cpp and GaussianQuadrature.cpp 0.3.6 ----- - Add output functionality in DenseMatrix - Add high precision solver to DirectSolver - Clean up error messages in Matrix - Make solvers directly accessible through Matrix and DenseMatrix - Add quadrature (Gauss, Radau, and Lobatto) from Tanganyika - Start merge with Tanganyika - Add support for automatic documentation using doxygen - Update configure scripts - Add greeting at end of compilation 0.3.5 ----- - Define version number only in the file configure.in - Fix compilation problem (missing depcomp) 0.3.4 ----- - Fix bugs in some of the ElementFunction operators - Make convection-diffusion solver work again - Fix bug in integration, move multiplication with the determinant - Fix memory leaks in ElementFunction - Add parameter to choose output format - Make OctaveFile and MatlabFile subclasses of MFile - Add classes ScalarExpressionFunction and VectorExpressionFunction - Make progress bars work cleaner - Get ctrl-c in curses logger - Remove Settings-classes and use dolfin_parameter() - Redesign settings to match the structure of the log system - Add vector functions: Function::Vector - Add vector element functions: ElementFunction::Vector 0.3.3 ----- - Increased functionality of curses-based interface - Add progress bars to log system 0.3.2 ----- - More work on grid refinement - Add new curses based log system 0.3.1 ----- - Makefile updates: make install should now work properly - KrylovSolver updates - Preparation for grid refinement - Matrix and Vector updates 0.3.0 ----- - Make poisson work again, other modules still not working - Add output format for octave - Fix code to compile with g++-3.2 -Wall -Werror - New operators for Matrix - New and faster GMRES solver (speedup factor 4) - Changed name from SparseMatrix to Matrix - Remove old unused code - Add subdirectory math containing mathematical functions - Better access for A(i,j) += to improve speed in assembling - Add benchmark for linear algebra - New definition of finite element - Add algebra for function spaces - Convert grids in data/grids to xml.gz - Add iterators for Nodes and Cells - Change from .hh to .h - Add operators to Vector class (foufas) - Add dependence on libxml2 - Change from .C to .cpp to make Jim happy. - Change input/output functionality to streams - Change to new data structure for Grid - Change to object-oriented API at top level - Add use of C++ namespaces - Complete and major restructuring of the code - Fix compilation error in src/config - Fix name of keyword for convection-diffusion 0.2.11-1 -------- - Fix compilation error (`source`) on Solaris 0.2.11 ------ - Automate build process to simplify addition of new modules - Fix bug in matlab_write_field() (walter) - Fix bug in SparseMatrix::GetCopy() (foufas) 0.2.10-1 -------- - Fix compilation errors on RedHat (thsv) 0.2.10 ------ - Fix compilation of problems to use correct compiler - Change default test problems to the ones in the report - Improve memory management using mpatrol for tracking allocations - Change bool to int for va_arg, seems to be a problem with gcc > 3.0 - Improve input / output support: GiD, Matlab, OpenDX 0.2.8 ----- - Navier-Stokes starting to work again - Add Navier-Stokes 2d - Bug fixes 0.2.7 ----- - Add support for 2D problems - Add module convection-diffusion - Add local/global fields in equation/problem - Bug fixes - Navier-Stokes updates (still broken) 0.2.6 [2002-02-19] ------------------ - Navier-Stokes updates (still broken) - Output to matlab format 0.2.5 ----- - Add variational formulation with overloaded operators for systems - ShapeFunction/LocalField/FiniteElement according to Scott & Brenner 0.2.4 ----- - Add boundary conditions - Poisson seems to work ok 0.2.3 ----- - Add GMRES solver - Add CG solver - Add direct solver - Add Poisson solver - Big changes to the organisation of the source tree - Add kwdist.sh script - Bug fixes 0.2.2: ------ - Remove curses temporarily 0.2.1: ------ - Remove all PETSc stuff. Finally! - Gauss-Seidel cannot handle the pressure equation 0.2.0: ------ - First GPL release - Remove all of Klas Samuelssons proprietary grid code - Adaptivity and refinement broken, include in next release dolfinx-0.3.0/INSTALL000066400000000000000000000000701411141764300142050ustar00rootroot00000000000000See https://fenicsproject.org/docs/dolfinx/dev/python/. dolfinx-0.3.0/README.md000066400000000000000000000076631411141764300144520ustar00rootroot00000000000000# DOLFINx [![DOLFINx CI](https://github.com/FEniCS/dolfinx/actions/workflows/ccpp.yml/badge.svg)](https://github.com/FEniCS/dolfinx/actions/workflows/ccpp.yml) [![CircleCI](https://circleci.com/gh/FEniCS/dolfinx.svg?style=shield)](https://circleci.com/gh/FEniCS/dolfinx) [![Actions Docker environment images](https://github.com/FEniCS/dolfinx/workflows/Docker%20environment%20images/badge.svg)](https://github.com/FEniCS/dolfinx/actions?query=workflow%3A%22Docker+environment+images%22) [![Actions Docker image builds](https://github.com/FEniCS/dolfinx/workflows/Docker%20end-user%20images/badge.svg)](https://github.com/FEniCS/dolfinx/actions?query=workflow%3A%22Docker+end-user+images%22) [![Actions Spack build](https://github.com/FEniCS/dolfinx/workflows/Spack%20build/badge.svg)](https://github.com/FEniCS/dolfinx/actions?query=workflow%3A%22Spack+build%22) DOLFINx is a new version of DOLFIN. It is being actively developed and features may come and go as development proceeds. DOLFINx is the computational environment of [FEniCS](https://fenicsproject.org) and implements the FEniCS Problem Solving Environment in Python and C++. ## Documentation Documentation can be viewed at: - https://docs.fenicsproject.org/dolfinx/main/cpp/ - https://docs.fenicsproject.org/dolfinx/main/python/ ## Installation ### From source #### C++ core To build and install the C++ core, in the ``cpp/`` directory, run:: ``` mkdir build cd build cmake .. make install ``` #### Python interface To install the Python interface, first install the C++ core, and then in the ``python/`` directory run:: ``` pip install . ``` (you may need to use ``pip3``, depending on your system). For detailed instructions, see https://fenicsproject.org/docs/dolfinx/dev/python/. ### Spack To build from source using [Spack](https://spack.readthedocs.io/) (assuming a bash shell): ``` git clone https://github.com/spack/spack.git . ./spack/share/spack/setup-env.sh spack env create fenicsx-env spack env activate fenicsx-env echo " concretization: together" >> ./spack/var/spack/environments/fenicsx-env/spack.yaml spack add py-fenics-dolfinx@main ^petsc+mumps+hypre cflags="-O3" fflags="-O3" spack install ``` See the Spack [documentation](https://spack.readthedocs.io/) for comprehensive instructions. ## Docker images A Docker image with DOLFINx built nightly: ``` docker run -ti dolfinx/dolfinx:latest ``` To switch between real and complex builds of DOLFINx. ``` source /usr/local/bin/dolfinx-complex-mode source /usr/local/bin/dolfinx-real-mode ``` A Jupyter Lab environment with DOLFINx built nightly: ``` docker run --init -ti -p 8888:8888 dolfinx/lab:latest # Access at http://localhost:8888 ``` A development image with all of the dependencies required to build DOLFINx: ``` docker run -ti dolfinx/dev-env:latest ``` All Docker images support arm64 and amd64 architectures. For more information, see https://hub.docker.com/u/dolfinx ## License DOLFINx is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. DOLFINx is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with DOLFINx. If not, see . ## Contact For questions about using DOLFINx, visit the FEniCS Discourse page: https://fenicsproject.discourse.group/ For bug reports, visit the DOLFINx GitHub page: https://github.com/FEniCS/dolfinx For comments and requests, send an email to the FEniCS mailing list: fenics-dev@googlegroups.com For questions related to obtaining, building or installing DOLFINx, send an email to the FEniCS support mailing list: fenics-support@googlegroups.com dolfinx-0.3.0/cpp/000077500000000000000000000000001411141764300137415ustar00rootroot00000000000000dolfinx-0.3.0/cpp/CMakeLists.txt000066400000000000000000000332751411141764300165130ustar00rootroot00000000000000#------------------------------------------------------------------------------ # Top level CMakeLists.txt file for DOLFINx cmake_minimum_required(VERSION 3.16) #------------------------------------------------------------------------------ # Set project name and version number project(DOLFINX VERSION "0.3.0") set(DOXYGEN_DOLFINX_VERSION ${DOLFINX_VERSION} CACHE STRING "Version for Doxygen" FORCE) #------------------------------------------------------------------------------ # Use C++17 set(CMAKE_CXX_STANDARD 17) # Require C++17 set(CMAKE_CXX_STANDARD_REQUIRED ON) # Do not enable compler-specific extensions set(CMAKE_CXX_EXTENSIONS OFF) #------------------------------------------------------------------------------ # Get GIT changeset, if available find_program(GIT_FOUND git) if (GIT_FOUND) # Get the commit hash of the working branch execute_process(COMMAND git rev-parse HEAD WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE GIT_COMMIT_HASH OUTPUT_STRIP_TRAILING_WHITESPACE ) else() set(GIT_COMMIT_HASH "unknown") endif() #------------------------------------------------------------------------------ # General configuration # Set location of our FindFoo.cmake modules set(CMAKE_MODULE_PATH "${DOLFINX_SOURCE_DIR}/cmake/modules") # Make sure CMake uses the correct DOLFINConfig.cmake for tests and demos set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} ${CMAKE_CURRENT_BINARY_DIR}/dolfinx) #------------------------------------------------------------------------------ # Configurable options for how we want to build include(FeatureSummary) option(BUILD_SHARED_LIBS "Build DOLFINx with shared libraries." ON) add_feature_info(BUILD_SHARED_LIBS BUILD_SHARED_LIBS "Build DOLFINx with shared libraries.") option(DOLFINX_SKIP_BUILD_TESTS "Skip build tests for testing usability of dependency packages." OFF) add_feature_info(DOLFINX_SKIP_BUILD_TESTS DOLFINX_SKIP_BUILD_TESTS "Skip build tests for testing usability of dependency packages.") # Add shared library paths so shared libs in non-system paths are found option(CMAKE_INSTALL_RPATH_USE_LINK_PATH "Add paths to linker search and installed rpath." ON) add_feature_info(CMAKE_INSTALL_RPATH_USE_LINK_PATH CMAKE_INSTALL_RPATH_USE_LINK_PATH "Add paths to linker search and installed rpath.") # Enable SIMD with xtensor option(XTENSOR_USE_XSIMD "Enable SIMD with xtensor." OFF) add_feature_info(XTENSOR_USE_XSIMD XTENSOR_USE_XSIMD "Enable SIMD with xtensor (xsimd).") # Enable xtensor with target-specific optimization, i.e. -march=native option(XTENSOR_OPTIMIZE "Enable xtensor target-specific optimization" OFF) add_feature_info(XTENSOR_OPTIMIZE XTENSOR_OPTIMIZE "Enable architecture-specific optimizations as defined by xtensor.") #------------------------------------------------------------------------------ # Enable or disable optional packages # List optional packages list(APPEND OPTIONAL_PACKAGES "SLEPc") list(APPEND OPTIONAL_PACKAGES "ParMETIS") list(APPEND OPTIONAL_PACKAGES "KaHIP") # Add options foreach (OPTIONAL_PACKAGE ${OPTIONAL_PACKAGES}) string(TOUPPER "DOLFINX_ENABLE_${OPTIONAL_PACKAGE}" OPTION_NAME) option(${OPTION_NAME} "Compile with support for ${OPTIONAL_PACKAGE}." ON) add_feature_info(${OPTION_NAME} ${OPTION_NAME} "Compile with support for ${OPTIONAL_PACKAGE}.") endforeach() #------------------------------------------------------------------------------ # Check for MPI # FIXME: Should we set CMake to use the discovered MPI compiler wrappers? find_package(MPI 3 REQUIRED) #------------------------------------------------------------------------------ # Compiler flags # Default build type (can be overridden by user) if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING "Choose the type of build, options are: Debug Developer MinSizeRel Release RelWithDebInfo." FORCE) endif() # Check for some compiler flags include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG(-pipe HAVE_PIPE) if (HAVE_PIPE) list(APPEND DOLFINX_CXX_DEVELOPER_FLAGS -pipe) endif() # Add some strict compiler checks CHECK_CXX_COMPILER_FLAG("-Wall -Werror -Wextra -pedantic" HAVE_PEDANTIC) if (HAVE_PEDANTIC) list(APPEND DOLFINX_CXX_DEVELOPER_FLAGS -Wall;-Werror;-Wextra;-pedantic) endif() # Debug flags CHECK_CXX_COMPILER_FLAG(-g HAVE_DEBUG) if (HAVE_DEBUG) list(APPEND DOLFINX_CXX_DEVELOPER_FLAGS -g) endif() # Optimisation CHECK_CXX_COMPILER_FLAG(-O2 HAVE_O2_OPTIMISATION) if (HAVE_O2_OPTIMISATION) list(APPEND DOLFINX_CXX_DEVELOPER_FLAGS -O2) endif() #------------------------------------------------------------------------------ # Run tests to find required packages # Note: When updating Boost version, also update DOLFINXCongif.cmake.in if(DEFINED ENV{BOOST_ROOT} OR DEFINED BOOST_ROOT) set(Boost_NO_SYSTEM_PATHS on) endif() set(Boost_USE_MULTITHREADED $ENV{BOOST_USE_MULTITHREADED}) set(Boost_VERBOSE TRUE) find_package(Boost 1.70 REQUIRED timer filesystem) set_package_properties(Boost PROPERTIES TYPE REQUIRED DESCRIPTION "Boost C++ libraries" URL "http://www.boost.org") # Check for required package xtensor find_package(xtensor 0.23.10 REQUIRED) set_package_properties(xtensor PROPERTIES TYPE REQUIRED DESCRIPTION "C++ library for numerical analysis with multi-dimensional array expressions." URL "https://xtensor.readthedocs.io/") # Check for required package xtensor-blas find_package(xtensor-blas REQUIRED) set_package_properties(xtensor-blas PROPERTIES TYPE REQUIRED DESCRIPTION "BLAS extenssion to xtensor." URL "https://xtensor-blas.readthedocs.io/") find_package(PETSc 3.10) set_package_properties(PETSc PROPERTIES TYPE REQUIRED DESCRIPTION "Portable, Extensible Toolkit for Scientific Computation (PETSc)" URL "https://www.mcs.anl.gov/petsc/" PURPOSE "PETSc linear algebra backend") # Check for HDF5 set(HDF5_PREFER_PARALLEL TRUE) set(HDF5_FIND_DEBUG TRUE) find_package(HDF5 REQUIRED COMPONENTS C) if (NOT HDF5_IS_PARALLEL) message(FATAL_ERROR "Found serial HDF5 build, MPI HDF5 build required, try setting HDF5_DIR or HDF5_ROOT") endif() set_package_properties(HDF5 PROPERTIES TYPE REQUIRED DESCRIPTION "Hierarchical Data Format 5 (HDF5)" URL "https://www.hdfgroup.org/HDF5") find_package(SCOTCH REQUIRED) set_package_properties(SCOTCH PROPERTIES TYPE REQUIRED DESCRIPTION "Programs and libraries for graph, mesh and hypergraph partitioning" URL "https://www.labri.fr/perso/pelegrin/scotch" PURPOSE "Parallel graph partitioning and redordering") # Check for required packages UFC and basix find_package(Python3 COMPONENTS Interpreter REQUIRED) find_package(UFC MODULE ${DOLFINX_VERSION_MAJOR}.${DOLFINX_VERSION_MINOR}) set_package_properties(UFC PROPERTIES TYPE REQUIRED DESCRIPTION "Unified interface for form-compilers (part of FFCx)" URL "https://github.com/fenics/ffcx") find_package(Basix 0.0.1 REQUIRED) set_package_properties(basix PROPERTIES TYPE REQUIRED DESCRIPTION "FEniCS tabulation library" URL "https://github.com/fenics/basix") get_target_property(BASIX_DEFN Basix::basix INTERFACE_COMPILE_DEFINITIONS) if("XTENSOR_USE_XSIMD" IN_LIST BASIX_DEFN) find_package(xsimd REQUIRED) endif() #------------------------------------------------------------------------------ # Run tests to find optional packages # Check for SLEPc set(SLEPC_FOUND FALSE) if (DOLFINX_ENABLE_SLEPC) find_package(SLEPc 3.10) set_package_properties(SLEPc PROPERTIES TYPE OPTIONAL DESCRIPTION "Scalable Library for Eigenvalue Problem Computations" URL "http://slepc.upv.es/") endif() # Check for ParMETIS if (DOLFINX_ENABLE_PARMETIS) find_package(ParMETIS 4.0.2) set_package_properties(ParMETIS PROPERTIES TYPE OPTIONAL DESCRIPTION "Parallel Graph Partitioning and Fill-reducing Matrix Ordering" URL "http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview" PURPOSE "Parallel graph partitioning") endif() # Check for KaHIP if (DOLFINX_ENABLE_KAHIP) find_package(KaHIP) set_package_properties(KaHIP PROPERTIES TYPE OPTIONAL DESCRIPTION "A family of graph partitioning programs" URL "https://kahip.github.io/" PURPOSE "Parallel graph partitioning") endif() #------------------------------------------------------------------------------ # Print summary of found and not found optional packages feature_summary(WHAT ALL) #------------------------------------------------------------------------------ # Installation of DOLFINx library add_subdirectory(dolfinx) #------------------------------------------------------------------------------ # Generate and install helper file dolfinx.conf # FIXME: Can CMake provide the library path name variable? if (APPLE) set(OS_LIBRARY_PATH_NAME "DYLD_LIBRARY_PATH") else() set(OS_LIBRARY_PATH_NAME "LD_LIBRARY_PATH") endif() # FIXME: not cross-platform compatible # Create and install dolfinx.conf file configure_file(${DOLFINX_SOURCE_DIR}/cmake/templates/dolfinx.conf.in ${CMAKE_BINARY_DIR}/dolfinx.conf @ONLY) install(FILES ${CMAKE_BINARY_DIR}/dolfinx.conf DESTINATION ${CMAKE_INSTALL_LIBDIR}/dolfinx COMPONENT Development) #------------------------------------------------------------------------------ # Copy data in demo/test direcories to the build directories set(GENERATE_DEMO_TEST_DATA FALSE) if (Python3_Interpreter_FOUND AND (${DOLFINX_SOURCE_DIR}/demo IS_NEWER_THAN ${CMAKE_CURRENT_BINARY_DIR}/demo OR ${DOLFINX_SOURCE_DIR}/test IS_NEWER_THAN ${CMAKE_CURRENT_BINARY_DIR}/test)) file(REMOVE_RECURSE ${CMAKE_CURRENT_BINARY_DIR}/demo ${CMAKE_CURRENT_BINARY_DIR}/test) set(GENERATE_DEMO_TEST_DATA TRUE) endif() if (GENERATE_DEMO_TEST_DATA) message(STATUS "") message(STATUS "Copying demo and test data to build directory.") message(STATUS "----------------------------------------------") execute_process( COMMAND ${Python3_EXECUTABLE} "-B" "-u" ${DOLFINX_SOURCE_DIR}/cmake/scripts/copy-test-demo-data.py ${CMAKE_CURRENT_BINARY_DIR} ${PETSC_SCALAR_COMPLEX} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} RESULT_VARIABLE COPY_DEMO_DATA_RESULT OUTPUT_VARIABLE COPY_DEMO_DATA_OUTPUT ERROR_VARIABLE COPY_DEMO_DATA_OUTPUT) if (COPY_DEMO_DATA_RESULT) message(FATAL_ERROR "Copy demo data failed: \n${COPY_DEMO_DATA_OUTPUT}") endif() endif() #------------------------------------------------------------------------------ # Generate form files for tests and demos if (GENERATE_DEMO_TEST_DATA) message(STATUS "") message(STATUS "Generating form files in demo and test directories. May take some time...") message(STATUS "----------------------------------------------------------------------------------------") execute_process( COMMAND ${Python3_EXECUTABLE} "-B" "-u" ${DOLFINX_SOURCE_DIR}/cmake/scripts/generate-form-files.py ${PETSC_SCALAR_COMPLEX} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} RESULT_VARIABLE FORM_GENERATION_RESULT OUTPUT_VARIABLE FORM_GENERATION_OUTPUT ERROR_VARIABLE FORM_GENERATION_OUTPUT ) if (FORM_GENERATION_RESULT) # Cleanup so that form generation is triggered next time we run cmake file(REMOVE_RECURSE ${CMAKE_CURRENT_BINARY_DIR}/demo ${CMAKE_CURRENT_BINARY_DIR}/test) message(FATAL_ERROR "Generation of form files failed: \n${FORM_GENERATION_OUTPUT}") endif() endif() #------------------------------------------------------------------------------ # Generate CMakeLists.txt files for demos if needed # NOTE: We need to call this script after generate-formfiles if (GENERATE_DEMO_TEST_DATA) message(STATUS "") message(STATUS "Generating CMakeLists.txt files in demo directory") message(STATUS "-------------------------------------------------------------------") # Generate CMakeLists.txt files in build directory execute_process( COMMAND ${Python3_EXECUTABLE} ${DOLFINX_SOURCE_DIR}/cmake/scripts/generate-cmakefiles.py WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} RESULT_VARIABLE CMAKE_GENERATION_RESULT OUTPUT_VARIABLE CMAKE_GENERATION_OUTPUT ERROR_VARIABLE CMAKE_GENERATION_OUTPUT ) if (CMAKE_GENERATION_RESULT) # Cleanup so FFCx rebuild is triggered next time we run cmake file(REMOVE_RECURSE ${CMAKE_CURRENT_BINARY_DIR}/demo ${CMAKE_CURRENT_BINARY_DIR}/test) message(FATAL_ERROR "Generation of CMakeLists.txt files in build directory failed: \n${CMAKE_GENERATION_OUTPUT}") else() # Generate CMakeLists.txt files in source directory as well, as developers might find it # easier to run in-place demo builds while preparing new demos or modifying existing ones execute_process( COMMAND ${Python3_EXECUTABLE} ${DOLFINX_SOURCE_DIR}/cmake/scripts/generate-cmakefiles.py WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} RESULT_VARIABLE CMAKE_GENERATION_RESULT OUTPUT_VARIABLE CMAKE_GENERATION_OUTPUT ERROR_VARIABLE CMAKE_GENERATION_OUTPUT ) if (CMAKE_GENERATION_RESULT) message(FATAL_ERROR "Generation of CMakeLists.txt files in source directory failed: \n${CMAKE_GENERATION_OUTPUT}") endif() endif() endif() #------------------------------------------------------------------------------ # Install the demo source files install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/demo DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/dolfinx FILES_MATCHING PATTERN "CMakeLists.txt" PATTERN "*.h" PATTERN "*.hpp" PATTERN "*.c" PATTERN "*.cpp" PATTERN "*.ufl" PATTERN "*.xdmf" PATTERN "*.h5" PATTERN "CMakeFiles" EXCLUDE) #------------------------------------------------------------------------------ # Add "make uninstall" target configure_file( "${DOLFINX_SOURCE_DIR}/cmake/templates/cmake_uninstall.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY) add_custom_target(uninstall "${CMAKE_COMMAND}" -P "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake") #------------------------------------------------------------------------------ # Print post-install message add_subdirectory(cmake/post-install) #------------------------------------------------------------------------------ dolfinx-0.3.0/cpp/cmake/000077500000000000000000000000001411141764300150215ustar00rootroot00000000000000dolfinx-0.3.0/cpp/cmake/modules/000077500000000000000000000000001411141764300164715ustar00rootroot00000000000000dolfinx-0.3.0/cpp/cmake/modules/FindKaHIP.cmake000066400000000000000000000073061411141764300211760ustar00rootroot00000000000000# - Try to find KaHIP # Once done this will define # # KAHIP_FOUND - system has KaHIP # KAHIP_INCLUDE_DIRS - include directories for KaHIP # KAHIP_LIBRARIES - libraries for KaHIP # KAHIP_VERSION - version for KaHIP #============================================================================= # Copyright (C) 2019 Igor A. Baratta # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= set(KAHIP_FOUND FALSE) message(STATUS "Checking for package 'KaHIP'") if (MPI_CXX_FOUND) find_path(KAHIP_INCLUDE_DIRS parhip_interface.h HINTS ${KAHIP_DIR}/include $ENV{KAHIP_DIR}/include PATH_SUFFIXES kahip DOC "Directory where the KaHIP header files are located.") find_library(PARHIP_LIBRARY parhip_interface HINTS ${KAHIP_DIR}/lib $ENV{KAHIP_DIR}/lib DOC "Directory where the ParHIP library is located") find_library(KAHIP_LIBRARY interface HINTS ${KAHIP_DIR}/lib $ENV{KAHIP_DIR}/lib DOC "Directory where the KaHIP library is located") set(KAHIP_LIBRARIES ${PARHIP_LIBRARY} ${KAHIP_LIBRARY}) if (KAHIP_LIBRARIES AND KAHIP_LIBRARIES) # Build and run test program include(CheckCXXSourceRuns) # Set flags for building test program set(CMAKE_REQUIRED_INCLUDES ${KAHIP_INCLUDE_DIRS} ${MPI_CXX_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${KAHIP_LIBRARIES} ${MPI_CXX_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} ${MPI_CXX_COMPILE_FLAGS}) check_cxx_source_runs(" #define MPICH_IGNORE_CXX_SEEK 1 #include #include #include int main() { int n = 5; std::vector xadj = {0, 2, 5, 7, 9, 12}; std::vector adjncy = {1, 4, 0, 2, 4, 1, 3, 2, 4, 0, 1, 3}; std::vector part(n); double imbalance = 0.03; int edge_cut = 0; int nparts = 2; int *vwgt = nullptr;; int *adjcwgt = nullptr;; kaffpa(&n, vwgt, xadj.data(), adjcwgt, adjncy.data(), &nparts, &imbalance, false, 0, ECO, &edge_cut, part.data()); return 0; } " KAHIP_TEST_RUNS) endif() endif() include (FindPackageHandleStandardArgs) find_package_handle_standard_args(KaHIP "KaHIP could not be found/configured." KAHIP_INCLUDE_DIRS KAHIP_LIBRARIES KAHIP_TEST_RUNS) dolfinx-0.3.0/cpp/cmake/modules/FindPETSc.cmake000066400000000000000000000225211411141764300212140ustar00rootroot00000000000000# - Try to find PETSc # Once done this will define # # PETSC_FOUND - system has PETSc # PETSC_INCLUDE_DIRS - include directories for PETSc # PETSC_LIBRARY_DIRS - library directories for PETSc # PETSC_LIBRARIES - libraries for PETSc # PETSC_STATIC_LIBRARIES - libraries for PETSc (static linking, # undefined if not required) # PETSC_VERSION - version for PETSc # PETSC_VERSION_MAJOR - First number in PETSC_VERSION # PETSC_VERSION_MINOR - Second number in PETSC_VERSION # PETSC_VERSION_SUBMINOR - Third number in PETSC_VERSION # PETSC_INT_SIZE - sizeof(PetscInt) # PETSC_SCALAR_COMPLEX - PETSc is complied with complex scalar type # # Variables used by this module, they can change the default behaviour and # need to be set before calling find_package: # # PETSC_DEBUG - Set this to TRUE to enable debugging output # of FindPETSc.cmake if you are having problems. # Please enable this before filing any bug reports. #============================================================================= # Copyright (C) 2010-2019 Garth N. Wells, Anders Logg and Johannes Ring # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= # Outline: # 1. Get flags from PETSc-generated pkg-config file # 2. Test compile and run program using shared library linking # 3. If shared library linking fails, test with static library linking # Load pkg-config module (provided by CMake) find_package(PkgConfig REQUIRED) # Find PETSc pkg-config file set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PETSC_DIR}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") pkg_search_module(PETSC petsc PETSc) # Extract major, minor, etc from version string if (PETSC_VERSION) string(REPLACE "." ";" VERSION_LIST ${PETSC_VERSION}) list(GET VERSION_LIST 0 PETSC_VERSION_MAJOR) list(GET VERSION_LIST 1 PETSC_VERSION_MINOR) list(GET VERSION_LIST 2 PETSC_VERSION_SUBMINOR) endif() # Configure PETSc IMPORT (this involves creating an 'imported' target # and attaching 'properties') if (PETSC_FOUND AND NOT TARGET PETSC::petsc) add_library(PETSC::petsc INTERFACE IMPORTED) # Add include paths set_property(TARGET PETSC::petsc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${PETSC_INCLUDE_DIRS}) # Add libraries unset(_libs) foreach (lib ${PETSC_LIBRARIES}) find_library(LIB_${lib} NAMES ${lib} PATHS ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH) list(APPEND _libs ${LIB_${lib}}) endforeach() set_property(TARGET PETSC::petsc PROPERTY INTERFACE_LINK_LIBRARIES "${_libs}") endif() # Configure PETSc 'static' IMPORT (this involves creating an # 'imported' target and attaching 'properties') if (PETSC_FOUND AND NOT TARGET PETSC::petsc_static) add_library(PETSC::petsc_static INTERFACE IMPORTED) # Add libraries (static) unset(_libs) foreach (lib ${PETSC_STATIC_LIBRARIES}) find_library(LIB_${lib} ${lib} HINTS ${PETSC_STATIC_LIBRARY_DIRS}) list(APPEND _libs ${LIB_${lib}}) endforeach() set_property(TARGET PETSC::petsc_static PROPERTY INTERFACE_LINK_LIBRARIES "${_libs}") endif() # Attempt to build and run PETSc test program if (DOLFINX_SKIP_BUILD_TESTS) # Assume PETSc works set(PETSC_TEST_RUNS TRUE) elseif (PETSC_FOUND) # Create PETSc test program set(PETSC_TEST_LIB_CPP "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/petsc_test_lib.cpp") file(WRITE ${PETSC_TEST_LIB_CPP} " #include \"petscts.h\" #include \"petsc.h\" int main() { PetscErrorCode ierr; TS ts; int argc = 0; char** argv = NULL; ierr = PetscInitialize(&argc, &argv, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr); ierr = TSSetFromOptions(ts);CHKERRQ(ierr); ierr = TSDestroy(&ts);CHKERRQ(ierr); ierr = PetscFinalize();CHKERRQ(ierr); return 0; } ") # Add MPI variables if MPI has been found if (MPI_C_FOUND) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${MPI_C_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${MPI_C_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${MPI_C_COMPILE_FLAGS}") set(MPI_LINK_LIBRARIES MPI::MPI_C) endif() # Try to run test program (shared linking) try_run( PETSC_TEST_LIB_EXITCODE PETSC_TEST_LIB_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${PETSC_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" LINK_LIBRARIES PETSC::petsc "${MPI_LINK_LIBRARIES}" COMPILE_OUTPUT_VARIABLE PETSC_TEST_LIB_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE PETSC_TEST_LIB_OUTPUT) # Check program output if (PETSC_TEST_LIB_COMPILED AND PETSC_TEST_LIB_EXITCODE EQUAL 0) message(STATUS "Test PETSC_TEST_RUNS with shared library linking - Success") set(PETSC_TEST_RUNS TRUE) # Static libraries not required, so unset set_property(TARGET PETSC::petsc_static PROPERTY INTERFACE_LINK_LIBRARIES) else() message(STATUS "Test PETSC_TEST_RUNS with shared library linking - Failed") if (PETSC_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_COMPILED = ${PETSC_TEST_LIB_COMPILED}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_COMPILE_OUTPUT = ${PETSC_TEST_LIB_COMPILE_OUTPUT}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_EXITCODE = ${PETSC_TEST_LIB_EXITCODE}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_OUTPUT = ${PETSC_TEST_LIB_OUTPUT}") endif() # Try to run test program (static linking) try_run( PETSC_TEST_LIB_EXITCODE PETSC_TEST_LIB_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${PETSC_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" LINK_LIBRARIES PETSC::petsc PETSC::petsc_static "${MPI_LINK_LIBRARIES}" COMPILE_OUTPUT_VARIABLE PETSC_TEST_LIB_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE PETSC_TEST_LIB_OUTPUT) if (PETSC_TEST_LIB_COMPILED AND PETSC_TEST_LIB_EXITCODE EQUAL 0) message(STATUS "Test PETSC_TEST_RUNS static linking - Success") set(PETSC_TEST_RUNS TRUE) else() message(STATUS "Test PETSC_TEST_RUNS static linking - Failed") set(PETSC_TEST_RUNS FALSE) if (PETSC_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_COMPILED = ${PETSC_TEST_LIB_COMPILED}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_COMPILE_OUTPUT = ${PETSC_TEST_LIB_COMPILE_OUTPUT}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_EXITCODE = ${PETSC_TEST_LIB_EXITCODE}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "PETSC_TEST_LIB_OUTPUT = ${PETSC_TEST_LIB_OUTPUT}") endif() endif() endif() endif() # Check sizeof(PetscInt) and check scalar type if (PETSC_INCLUDE_DIRS) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${PETSC_INCLUDE_DIRS}) set(CMAKE_EXTRA_INCLUDE_FILES petscsys.h) include(CheckTypeSize) check_type_size("PetscInt" PETSC_INT_SIZE) include(CheckSymbolExists) check_symbol_exists(PETSC_USE_COMPLEX petscsys.h PETSC_SCALAR_COMPLEX) unset(CMAKE_EXTRA_INCLUDE_FILES) unset(CMAKE_REQUIRED_INCLUDES) endif() # Standard package handling include(FindPackageHandleStandardArgs) if (PETSC_FOUND) find_package_handle_standard_args(PETSc REQUIRED_VARS PETSC_FOUND PETSC_TEST_RUNS VERSION_VAR PETSC_VERSION FAIL_MESSAGE "PETSc could not be configured.") else() find_package_handle_standard_args(PETSc REQUIRED_VARS PETSC_FOUND FAIL_MESSAGE "PETSc could not be found. Be sure to set PETSC_DIR as an environment variable.") endif() dolfinx-0.3.0/cpp/cmake/modules/FindParMETIS.cmake000066400000000000000000000125311411141764300216220ustar00rootroot00000000000000# - Try to find ParMETIS # Once done this will define # # PARMETIS_FOUND - system has ParMETIS # PARMETIS_INCLUDE_DIRS - include directories for ParMETIS # PARMETIS_LIBRARIES - libraries for ParMETIS # PARMETIS_VERSION - version for ParMETIS #============================================================================= # Copyright (C) 2010 Garth N. Wells, Anders Logg and Johannes Ring # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= if (MPI_CXX_FOUND) find_path(PARMETIS_INCLUDE_DIRS parmetis.h HINTS ${PARMETIS_ROOT}/include $ENV{PARMETIS_ROOT}/include ${PETSC_INCLUDE_DIRS} DOC "Directory where the ParMETIS header files are located" ) find_library(PARMETIS_LIBRARY parmetis HINTS ${PARMETIS_ROOT}/lib $ENV{PARMETIS_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "Directory where the ParMETIS library is located" ) find_library(PARMETIS_LIBRARY parmetis DOC "Directory where the ParMETIS library is located" ) find_library(METIS_LIBRARY metis HINTS ${PARMETIS_ROOT}/lib $ENV{PARMETIS_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "Directory where the METIS library is located" ) find_library(METIS_LIBRARY metis DOC "Directory where the METIS library is located" ) set(PARMETIS_LIBRARIES ${PARMETIS_LIBRARY}) if (METIS_LIBRARY) set(PARMETIS_LIBRARIES ${PARMETIS_LIBRARIES} ${METIS_LIBRARY}) endif() # Try compiling and running test program if (DOLFINX_SKIP_BUILD_TESTS) set(PARMETIS_TEST_RUNS TRUE) set(PARMETIS_VERSION "UNKNOWN") set(PARMETIS_VERSION_OK TRUE) elseif (PARMETIS_INCLUDE_DIRS AND PARMETIS_LIBRARY) # Set flags for building test program set(CMAKE_REQUIRED_INCLUDES ${PARMETIS_INCLUDE_DIRS} ${MPI_CXX_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${PARMETIS_LIBRARIES} ${MPI_CXX_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} ${MPI_CXX_COMPILE_FLAGS}) # Check ParMETIS version set(PARMETIS_CONFIG_TEST_VERSION_CPP "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/parmetis_config_test_version.cpp") file(WRITE ${PARMETIS_CONFIG_TEST_VERSION_CPP} " #define MPICH_IGNORE_CXX_SEEK 1 #include #include \"parmetis.h\" int main() { #ifdef PARMETIS_SUBMINOR_VERSION std::cout << PARMETIS_MAJOR_VERSION << \".\" << PARMETIS_MINOR_VERSION << \".\" << PARMETIS_SUBMINOR_VERSION; #else std::cout << PARMETIS_MAJOR_VERSION << \".\" << PARMETIS_MINOR_VERSION; #endif return 0; } ") try_run( PARMETIS_CONFIG_TEST_VERSION_EXITCODE PARMETIS_CONFIG_TEST_VERSION_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${PARMETIS_CONFIG_TEST_VERSION_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" COMPILE_OUTPUT_VARIABLE PARMETIS_CONFIG_TEST_VERSION_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE PARMETIS_CONFIG_TEST_VERSION_OUTPUT ) if (PARMETIS_CONFIG_TEST_VERSION_EXITCODE EQUAL 0) set(PARMETIS_VERSION ${PARMETIS_CONFIG_TEST_VERSION_OUTPUT}) mark_as_advanced(PARMETIS_VERSION) endif() if (ParMETIS_FIND_VERSION) # Check if version found is >= required version if (NOT "${PARMETIS_VERSION}" VERSION_LESS "${ParMETIS_FIND_VERSION}") set(PARMETIS_VERSION_OK TRUE) endif() else() # No specific version requested set(PARMETIS_VERSION_OK TRUE) endif() mark_as_advanced(PARMETIS_VERSION_OK) # Build and run test program include(CheckCXXSourceRuns) check_cxx_source_runs(" #define MPICH_IGNORE_CXX_SEEK 1 #include #include int main() { // FIXME: Find a simple but sensible test for ParMETIS return 0; } " PARMETIS_TEST_RUNS) endif() endif() # Standard package handling find_package_handle_standard_args(ParMETIS "ParMETIS could not be found/configured." PARMETIS_LIBRARIES PARMETIS_TEST_RUNS PARMETIS_INCLUDE_DIRS PARMETIS_VERSION PARMETIS_VERSION_OK) dolfinx-0.3.0/cpp/cmake/modules/FindSCOTCH.cmake000066400000000000000000000303231411141764300212600ustar00rootroot00000000000000# - Try to find SCOTCH # Once done this will define # # SCOTCH_FOUND - system has found SCOTCH # SCOTCH_INCLUDE_DIRS - include directories for SCOTCH # SCOTCH_LIBARIES - libraries for SCOTCH # SCOTCH_VERSION - version for SCOTCH # # Variables used by this module, they can change the default behaviour and # need to be set before calling find_package: # # SCOTCH_DEBUG - Set this to TRUE to enable debugging output # of FindScotchPT.cmake if you are having problems. # Please enable this before filing any bug reports. #============================================================================= # Copyright (C) 2010-2011 Garth N. Wells, Johannes Ring and Anders Logg # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= set(SCOTCH_FOUND FALSE) message(STATUS "Checking for package 'SCOTCH-PT'") # Check for header file find_path(SCOTCH_INCLUDE_DIRS ptscotch.h HINTS ${SCOTCH_ROOT}/include $ENV{SCOTCH_ROOT}/include ${PETSC_INCLUDE_DIRS} PATH_SUFFIXES scotch DOC "Directory where the SCOTCH-PT header is located" ) # Check for scotch find_library(SCOTCH_LIBRARY NAMES scotch HINTS ${SCOTCH_ROOT}/lib $ENV{SCOTCH_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "The SCOTCH library" ) find_library(SCOTCH_LIBRARY NAMES scotch DOC "The SCOTCH library" ) # Check for scotcherr find_library(SCOTCHERR_LIBRARY NAMES scotcherr HINTS ${SCOTCH_ROOT}/lib $ENV{SCOTCH_ROOT}/lib NO_DEFAULT_PATH DOC "The SCOTCH-ERROR library" ) find_library(SCOTCHERR_LIBRARY NAMES scotcherr DOC "The SCOTCH-ERROR library" ) # Check for ptscotch find_library(PTSCOTCH_LIBRARY NAMES ptscotch HINTS ${SCOTCH_ROOT}/lib $ENV{SCOTCH_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "The PTSCOTCH library" ) find_library(PTSCOTCH_LIBRARY NAMES ptscotch DOC "The PTSCOTCH library" ) # Check for ptesmumps find_library(PTESMUMPS_LIBRARY NAMES ptesmumps esmumps HINTS ${SCOTCH_ROOT}/lib $ENV{SCOTCH_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "The PTSCOTCH-ESMUMPS library" ) find_library(PTESMUMPS_LIBRARY NAMES ptesmumps esmumps DOC "The PTSCOTCH-ESMUMPS library" ) # Check for ptscotcherr find_library(PTSCOTCHERR_LIBRARY NAMES ptscotcherr HINTS ${SCOTCH_ROOT}/lib $ENV{SCOTCH_ROOT}/lib ${PETSC_LIBRARY_DIRS} NO_DEFAULT_PATH DOC "The PTSCOTCH-ERROR library" ) find_library(PTSCOTCHERR_LIBRARY NAMES ptscotcherr DOC "The PTSCOTCH-ERROR library" ) set(SCOTCH_LIBRARIES ${PTSCOTCH_LIBRARY}) if (PTESMUMPS_LIBRARY) set(SCOTCH_LIBRARIES ${SCOTCH_LIBRARIES} ${PTESMUMPS_LIBRARY}) endif() set(SCOTCH_LIBRARIES ${SCOTCH_LIBRARIES} ${PTSCOTCHERR_LIBRARY}) # Basic check of SCOTCH_VERSION which does not require compilation if (SCOTCH_INCLUDE_DIRS) file(STRINGS "${SCOTCH_INCLUDE_DIRS}/ptscotch.h" PTSCOTCH_H) string(REGEX MATCH "SCOTCH_VERSION [0-9]+" SCOTCH_VERSION "${PTSCOTCH_H}") string(REGEX MATCH "[0-9]+" SCOTCH_VERSION "${SCOTCH_VERSION}") endif() # If SCOTCH_VERSION was not found in ptscotch.h, look in scotch.h if (SCOTCH_INCLUDE_DIRS AND NOT SCOTCH_VERSION) file(STRINGS "${SCOTCH_INCLUDE_DIRS}/scotch.h" SCOTCH_H) string(REGEX MATCH "SCOTCH_VERSION [0-9]+" SCOTCH_VERSION "${SCOTCH_H}") string(REGEX MATCH "[0-9]+" SCOTCH_VERSION "${SCOTCH_VERSION}") endif() # For SCOTCH version > 6, need to add libraries scotch and ptscotch if (NOT "${SCOTCH_VERSION}" VERSION_LESS "6") set(SCOTCH_LIBRARIES ${PTSCOTCH_LIBRARY} ${SCOTCH_LIBRARY} ${PTSCOTCHERR_LIBRARY}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${SCOTCH_LIBRARY}) endif() # Try compiling and running test program if (DOLFINX_SKIP_BUILD_TESTS) message(STATUS "Found SCOTCH (version ${SCOTCH_VERSION})") set(SCOTCH_TEST_RUNS TRUE) elseif (SCOTCH_INCLUDE_DIRS AND SCOTCH_LIBRARIES) if (SCOTCH_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "location of ptscotch.h: ${SCOTCH_INCLUDE_DIRS}/ptscotch.h") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "location of libscotch: ${SCOTCH_LIBRARY}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "location of libptscotch: ${PTSCOTCH_LIBRARY}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "location of libptscotcherr: ${PTSCOTCHERR_LIBRARY}") endif() # Set flags for building test program set(CMAKE_REQUIRED_INCLUDES ${SCOTCH_INCLUDE_DIRS}) set(CMAKE_REQUIRED_LIBRARIES ${SCOTCH_LIBRARIES}) #set(CMAKE_REQUIRED_LIBRARIES ${SCOTCH_LIBRARY} ${SCOTCHERR_LIBRARY}) # Add MPI variables if MPI has been found if (MPI_CXX_FOUND) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${MPI_CXX_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${MPI_CXX_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${MPI_CXX_COMPILE_FLAGS}") endif() set(SCOTCH_CONFIG_TEST_VERSION_CPP "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/scotch_config_test_version.cpp") file(WRITE ${SCOTCH_CONFIG_TEST_VERSION_CPP} " #define MPICH_IGNORE_CXX_SEEK 1 #include #include #include #include #include int main() { std::cout << SCOTCH_VERSION << \".\" << SCOTCH_RELEASE << \".\" << SCOTCH_PATCHLEVEL; return 0; } ") try_run( SCOTCH_CONFIG_TEST_VERSION_EXITCODE SCOTCH_CONFIG_TEST_VERSION_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${SCOTCH_CONFIG_TEST_VERSION_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" COMPILE_OUTPUT_VARIABLE COMPILE_OUTPUT RUN_OUTPUT_VARIABLE OUTPUT ) # Set version number if (SCOTCH_CONFIG_TEST_VERSION_EXITCODE EQUAL 0) set(SCOTCH_VERSION ${OUTPUT}) message(STATUS "Found SCOTCH (version ${SCOTCH_VERSION})") endif() # PT-SCOTCH was first introduced in SCOTCH version 5.0 # FIXME: parallel graph partitioning features in PT-SCOTCH was first # introduced in 5.1. Do we require version 5.1? if (NOT ${SCOTCH_VERSION} VERSION_LESS "5.0") set(SCOTCH_TEST_LIB_CPP "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/scotch_test_lib.cpp") file(WRITE ${SCOTCH_TEST_LIB_CPP} " #define MPICH_IGNORE_CXX_SEEK 1 #include #include #include #include #include #include int main() { int provided; SCOTCH_Dgraph dgrafdat; MPI_Init_thread(0, 0, MPI_THREAD_MULTIPLE, &provided); if (SCOTCH_dgraphInit(&dgrafdat, MPI_COMM_WORLD) != 0) { if (MPI_THREAD_MULTIPLE > provided) { std::cout << \"MPI implementation is not thread-safe:\" << std::endl; std::cout << \"SCOTCH should be compiled without SCOTCH_PTHREAD\" << std::endl; exit(1); } else { std::cout << \"libptscotch linked to libscotch or other unknown error\" << std::endl; exit(2); } } else { SCOTCH_dgraphExit(&dgrafdat); } MPI_Finalize(); return 0; } ") message(STATUS "Performing test SCOTCH_TEST_RUNS") try_run( SCOTCH_TEST_LIB_EXITCODE SCOTCH_TEST_LIB_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${SCOTCH_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" COMPILE_OUTPUT_VARIABLE SCOTCH_TEST_LIB_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE SCOTCH_TEST_LIB_OUTPUT ) if (SCOTCH_TEST_LIB_COMPILED AND SCOTCH_TEST_LIB_EXITCODE EQUAL 0) message(STATUS "Performing test SCOTCH_TEST_RUNS - Success") set(SCOTCH_TEST_RUNS TRUE) else() message(STATUS "Performing test SCOTCH_TEST_RUNS - Failed") if (SCOTCH_DEBUG) # Output some variables message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_COMPILED = ${SCOTCH_TEST_LIB_COMPILED}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_COMPILE_OUTPUT = ${SCOTCH_TEST_LIB_COMPILE_OUTPUT}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_EXITCODE = ${SCOTCH_TEST_LIB_EXITCODE}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_OUTPUT = ${SCOTCH_TEST_LIB_OUTPUT}") endif() endif() # If program does not run, try adding zlib library and test again if(NOT SCOTCH_TEST_RUNS) if (NOT ZLIB_FOUND) find_package(ZLIB) endif() if (ZLIB_INCLUDE_DIRS AND ZLIB_LIBRARIES) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${ZLIB_INCLUDE_DIRS}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${ZLIB_LIBRARIES}) message(STATUS "Performing test SCOTCH_ZLIB_TEST_RUNS") try_run( SCOTCH_ZLIB_TEST_LIB_EXITCODE SCOTCH_ZLIB_TEST_LIB_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${SCOTCH_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" COMPILE_OUTPUT_VARIABLE SCOTCH_ZLIB_TEST_LIB_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE SCOTCH_ZLIB_TEST_LIB_OUTPUT ) # Add zlib flags if required and set test run to 'true' if (SCOTCH_ZLIB_TEST_LIB_COMPILED AND SCOTCH_ZLIB_TEST_LIB_EXITCODE EQUAL 0) message(STATUS "Performing test SCOTCH_ZLIB_TEST_RUNS - Success") set(SCOTCH_INCLUDE_DIRS ${SCOTCH_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIRS}) set(SCOTCH_LIBRARIES ${SCOTCH_LIBRARIES} ${ZLIB_LIBRARIES}) set(SCOTCH_TEST_RUNS TRUE) else() message(STATUS "Performing test SCOTCH_ZLIB_TEST_RUNS - Failed") if (SCOTCH_DEBUG) message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_ZLIB_TEST_LIB_COMPILED = ${SCOTCH_ZLIB_TEST_LIB_COMPILED}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_ZLIB_TEST_LIB_COMPILE_OUTPUT = ${SCOTCH_ZLIB_TEST_LIB_COMPILE_OUTPUT}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_EXITCODE = ${SCOTCH_TEST_LIB_EXITCODE}") message(STATUS "[ ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE} ] " "SCOTCH_TEST_LIB_OUTPUT = ${SCOTCH_TEST_LIB_OUTPUT}") endif() endif() endif() endif() endif() endif() # Standard package handling find_package_handle_standard_args(SCOTCH "SCOTCH could not be found. Be sure to set SCOTCH_ROOT." SCOTCH_LIBRARIES SCOTCH_INCLUDE_DIRS SCOTCH_TEST_RUNS) dolfinx-0.3.0/cpp/cmake/modules/FindSLEPc.cmake000066400000000000000000000165161411141764300212130ustar00rootroot00000000000000# - Try to find SLEPC # Once done this will define # # SLEPC_FOUND - system has SLEPc # SLEPC_INCLUDE_DIRS - include directories for SLEPc # SLEPC_LIBRARY_DIRS - library directories for SLEPc # SLEPC_LIBARIES - libraries for SLEPc # SLEPC_STATIC_LIBARIES - ibraries for SLEPc (static linking, undefined if not required) # SLEPC_VERSION - version of SLEPc # SLEPC_VERSION_MAJOR - First number in SLEPC_VERSION # SLEPC_VERSION_MINOR - Second number in SLEPC_VERSION # SLEPC_VERSION_SUBMINOR - Third number in SLEPC_VERSION #============================================================================= # Copyright (C) 2010-2020 Garth N. Wells, Anders Logg, Johannes Ring and Drew Parsons # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= # Load CMake pkg-config module find_package(PkgConfig REQUIRED) # Find SLEPc pkg-config file set(ENV{PKG_CONFIG_PATH} "$ENV{SLEPC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{SLEPC_DIR}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PETSC_DIR}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}:$ENV{PETSC_DIR}:$ENV{PKG_CONFIG_PATH}") pkg_search_module(SLEPC slepc SLEPc) # Extract major, minor, etc from version string if (SLEPC_VERSION) string(REPLACE "." ";" VERSION_LIST ${SLEPC_VERSION}) list(GET VERSION_LIST 0 SLEPC_VERSION_MAJOR) list(GET VERSION_LIST 1 SLEPC_VERSION_MINOR) list(GET VERSION_LIST 2 SLEPC_VERSION_SUBMINOR) endif() # Configure SLEPc IMPORT (this involves creating an 'imported' target # and attaching 'properties') if (SLEPC_FOUND AND NOT TARGET SLEPC::slepc) add_library(SLEPC::slepc INTERFACE IMPORTED) # Add include paths set_property(TARGET SLEPC::slepc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${SLEPC_INCLUDE_DIRS}) # Add libraries unset(_libs) foreach (lib ${SLEPC_LIBRARIES}) find_library(LIB_${lib} NAMES ${lib} PATHS ${SLEPC_LIBRARY_DIRS} NO_DEFAULT_PATH) list(APPEND _libs ${LIB_${lib}}) endforeach() set_property(TARGET SLEPC::slepc PROPERTY INTERFACE_LINK_LIBRARIES "${_libs}") endif() if (SLEPC_FOUND AND NOT TARGET SLEPC::slepc_static) add_library(SLEPC::slepc_static INTERFACE IMPORTED) # Add libraries (static) unset(_libs) foreach (lib ${SLEPC_STATIC_LIBRARIES}) find_library(LIB_${lib} ${lib} HINTS ${SLEPC_STATIC_LIBRARY_DIRS}) list(APPEND _libs ${LIB_${lib}}) endforeach() set_property(TARGET SLEPC::slepc_static PROPERTY INTERFACE_LINK_LIBRARIES "${_libs}") endif() # Compile and run test if (DOLFINX_SKIP_BUILD_TESTS) # Assume SLEPc works set(SLEPC_TEST_RUNS TRUE) elseif (SLEPC_FOUND) # Create SLEPc test program set(SLEPC_TEST_LIB_CPP "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/slepc_test_lib.cpp") file(WRITE ${SLEPC_TEST_LIB_CPP} " #include \"petsc.h\" #include \"slepceps.h\" int main() { PetscErrorCode ierr; int argc = 0; char** argv = NULL; SlepcInitialize(&argc, &argv, PETSC_NULL, PETSC_NULL); EPS eps; ierr = EPSCreate(PETSC_COMM_SELF, &eps); CHKERRQ(ierr); //ierr = EPSSetFromOptions(eps); CHKERRQ(ierr); ierr = EPSDestroy(&eps); CHKERRQ(ierr); ierr = SlepcFinalize(); CHKERRQ(ierr); return 0; } ") # Add MPI variables if MPI has been found if (MPI_C_FOUND) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${MPI_C_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${MPI_C_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${MPI_C_COMPILE_FLAGS}") set(MPI_LINK_LIBRARIES MPI::MPI_C) endif() # Try to run test program (shared linking) try_run( SLEPC_TEST_LIB_EXITCODE SLEPC_TEST_LIB_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${SLEPC_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" LINK_LIBRARIES PETSC::petsc SLEPC::slepc "${MPI_LINK_LIBRARIES}" COMPILE_OUTPUT_VARIABLE SLEPC_TEST_LIB_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE SLEPC_TEST_LIB_OUTPUT ) if (SLEPC_TEST_LIB_COMPILED AND SLEPC_TEST_LIB_EXITCODE EQUAL 0) message(STATUS "Test SLEPC_TEST_RUNS with shared library linking - Success") set(SLEPC_TEST_RUNS TRUE) # Static libraries not required, so unset set_property(TARGET SLEPC::slepc_static PROPERTY INTERFACE_LINK_LIBRARIES) else() message(STATUS "Test SLEPC_TEST_RUNS with shared library linking - Failed") # Add MPI variables if MPI has been found if (MPI_C_FOUND) set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES} ${MPI_C_INCLUDE_PATH}) set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${MPI_C_LIBRARIES}) set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${MPI_C_COMPILE_FLAGS}") endif() # Try to run test program (static linking) try_run( SLEPC_TEST_STATIC_LIBS_EXITCODE SLEPC_TEST_STATIC_LIBS_COMPILED ${CMAKE_CURRENT_BINARY_DIR} ${SLEPC_TEST_LIB_CPP} CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}" "-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}" LINK_LIBRARIES SLEPC::slepc SLEPC::slepc_static "${MPI_LINK_LIBRARIES}" COMPILE_OUTPUT_VARIABLE SLEPC_TEST_STATIC_LIBS_COMPILE_OUTPUT RUN_OUTPUT_VARIABLE SLEPC_TEST_STATIC_LIBS_OUTPUT ) if (SLEPC_TEST_STATIC_LIBS_COMPILED AND SLEPC_TEST_STATIC_LIBS_EXITCODE EQUAL 0) message(STATUS "Test SLEPC_TEST__RUNS with static linking - Success") set(SLEPC_TEST_RUNS TRUE) else() message(STATUS "Test SLEPC_TETS_RUNS with static linking - Failed") set(SLEPC_TEST_RUNS FALSE) endif() endif() endif() # Standard package handling include(FindPackageHandleStandardArgs) if (SLEPC_FOUND) find_package_handle_standard_args(SLEPc REQUIRED_VARS SLEPC_FOUND SLEPC_TEST_RUNS VERSION_VAR SLEPC_VERSION FAIL_MESSAGE "SLEPc could not be configured.") else() find_package_handle_standard_args(SLEPc REQUIRED_VARS SLEPC_FOUND FAIL_MESSAGE "SLEPc could not be found. Be sure to set SLEPC_DIR as an environment variable.") endif() dolfinx-0.3.0/cpp/cmake/modules/FindUFC.cmake000066400000000000000000000074501411141764300207170ustar00rootroot00000000000000# - Try to find UFC # # Once done this will define # # This module defines # # UFC_FOUND - system has UFC with correct version # UFC_INCLUDE_DIRS - where to find ufc.h # UFC_VERSION - UFC version # UFC_SIGNATURE - UFC signature #============================================================================= # Copyright (C) 2010 Johannes Ring # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #============================================================================= # Two paths: Set UFC_INCLUDE_DIR manually, or ask Python/FFCx for location # of UFC headers. if (DEFINED ENV{UFC_INCLUDE_DIR}) MESSAGE(STATUS "Looking for UFC in $ENV{UFC_INCLUDE_DIR}...") if (EXISTS "$ENV{UFC_INCLUDE_DIR}/ufc.h") set(UFC_INCLUDE_DIRS $ENV{UFC_INCLUDE_DIR} CACHE STRING "Where to find ufc.h") execute_process( COMMAND /bin/bash -c "cat $ENV{UFC_INCLUDE_DIR}/ufc.h | sha1sum | cut -c 1-40" OUTPUT_VARIABLE UFC_SIGNATURE OUTPUT_STRIP_TRAILING_WHITESPACE) # Assume user knows what they are doing. set(UFC_VERSION ${UFC_FIND_VERSION}) set(UFC_VERSION_OK TRUE) else() MESSAGE(STATUS "Could not find UFC header.") endif() else() MESSAGE(STATUS "Asking Python module FFCx for location of UFC... (${Python_EXECUTABLE})") execute_process( COMMAND ${Python3_EXECUTABLE} -c "import ffcx.codegeneration, sys; sys.stdout.write(ffcx.codegeneration.get_include_path())" OUTPUT_VARIABLE UFC_INCLUDE_DIR ) if (UFC_INCLUDE_DIR) set(UFC_INCLUDE_DIRS ${UFC_INCLUDE_DIR} CACHE STRING "Where to find ufc.h") execute_process( COMMAND ${Python3_EXECUTABLE} -c "import ffcx, sys; sys.stdout.write(ffcx.__version__)" OUTPUT_VARIABLE UFC_VERSION ) if (UFC_FIND_VERSION) # Check if version found is >= required version if (NOT "${UFC_VERSION}" VERSION_LESS "${UFC_FIND_VERSION}") set(UFC_VERSION_OK TRUE) endif() else() # No specific version requested set(UFC_VERSION_OK TRUE) endif() endif() execute_process( COMMAND ${Python3_EXECUTABLE} -c "import ffcx.codegeneration, sys; sys.stdout.write(ffcx.codegeneration.get_signature())" OUTPUT_VARIABLE UFC_SIGNATURE ) endif() mark_as_advanced(UFC_VERSION UFC_INCLUDE_DIRS UFC_SIGNATURE UFC_VERSION_OK) # Standard package handling find_package_handle_standard_args(UFC "UFC could not be found." UFC_INCLUDE_DIRS UFC_VERSION UFC_VERSION_OK UFC_SIGNATURE) dolfinx-0.3.0/cpp/cmake/post-install/000077500000000000000000000000001411141764300174525ustar00rootroot00000000000000dolfinx-0.3.0/cpp/cmake/post-install/CMakeLists.txt000066400000000000000000000012621411141764300222130ustar00rootroot00000000000000install(CODE "MESSAGE( \"---------------------------------------------------------------------------- DOLFINx has now been installed in ${CMAKE_INSTALL_PREFIX} and demo programs have been installed in ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_DATAROOTDIR}/dolfinx/demo Don't forget to update your environment variables. This can be done easily using the helper file 'dolfinx.conf' which sets the appropriate variables (for users of the Bash shell). To update your environment variables, run the following command: source ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}/dolfinx/dolfinx.conf ----------------------------------------------------------------------------\")") dolfinx-0.3.0/cpp/cmake/scripts/000077500000000000000000000000001411141764300165105ustar00rootroot00000000000000dolfinx-0.3.0/cpp/cmake/scripts/copy-test-demo-data.py000066400000000000000000000056211411141764300226460ustar00rootroot00000000000000# Copyright (C) 2013 Johan Hake # # This file is part of DOLFINx. # # DOLFINx is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DOLFINx is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with DOLFINx. If not, see . # # Copy all data, tests and demo to a given directory relative to the top DOLFINx # source directory import os import re import shutil import sys # Subdirectories sub_directories = ["demo", "test"] # Copy all files with the following suffixes suffix_patterns = ["txt", "h", "hpp", "c", "cpp", "ufl", "xdmf", "h5"] suffix_pattern = re.compile("(%s)," % ("|".join("[\w-]+\.%s" % pattern for pattern in suffix_patterns))) script_rel_path = os.sep.join(__file__.split(os.sep)[:-1]) script_rel_path = script_rel_path or "." dolfinx_dir = os.path.abspath(os.path.join( script_rel_path, os.pardir, os.pardir)) def copy_data(top_destdir, complex_mode): abs_destdir = top_destdir if os.path.isabs( top_destdir) else os.path.join(dolfinx_dir, top_destdir) if abs_destdir == dolfinx_dir: raise RuntimeError("destination directory cannot be the same as " "the DOLFINx source directory") if not os.path.isdir(abs_destdir): raise RuntimeError("%s is not a directory." % abs_destdir) skip = set() if complex_mode is True: skip.update(["demo/hyperelasticity"]) skip = {os.path.join(dolfinx_dir, skip_) for skip_ in skip} for subdir in sub_directories: top_dir = os.path.join(dolfinx_dir, subdir) for dirpath, dirnames, filenames in os.walk(top_dir): if not dirpath in skip: destdir = dirpath.replace(dolfinx_dir, abs_destdir) if not os.path.isdir(destdir): os.makedirs(destdir) for f in re.findall(suffix_pattern, " ".join("%s," % f for f in filenames)): srcfile = os.path.join(dirpath, f) shutil.copy(srcfile, destdir) if __name__ == "__main__": # Expecting a destination argument if len(sys.argv) == 2: # Call with "./copy-test-demo-data.py dest_dir" for PETSc real mode copy_data(sys.argv[-1], False) elif len(sys.argv) == 3: # Call with "./copy-test-demo-data.py dest_dir 1" for PETSc complex mode copy_data(sys.argv[-2], sys.argv[-1] == "1") else: raise RuntimeError( "Expecting either one or two arguments") dolfinx-0.3.0/cpp/cmake/scripts/generate-cmakefiles.py000066400000000000000000000101661411141764300227610ustar00rootroot00000000000000# Copyright (C) 2017-2018 Chris N. Richardson and Garth N. Wells # # This file is part of DOLFINx (https://www.fenicsproject.org) # # SPDX-License-Identifier: LGPL-3.0-or-later import os import warnings cmakelists_str = \ """# This file is automatically generated by running # # cmake/scripts/generate-cmakefiles # cmake_minimum_required(VERSION 3.16) # Set C++17 standard set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(PROJECT_NAME {project_name}) project(${{PROJECT_NAME}}) # Get DOLFINx configuration data (DOLFINXConfig.cmake must be in # DOLFINX_CMAKE_CONFIG_PATH) if (NOT TARGET dolfinx) find_package(DOLFINX REQUIRED) endif() # Executable add_executable(${{PROJECT_NAME}} {src_files}) # Target libraries target_link_libraries(${{PROJECT_NAME}} dolfinx) # Do not throw error for 'multi-line comments' (these are typical in # rst which includes LaTeX) include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-Wno-comment" HAVE_NO_MULTLINE) set_source_files_properties(main.cpp PROPERTIES COMPILE_FLAGS "$<$:-Wno-comment -Wall -Wextra -pedantic -Werror>") # Test targets set(TEST_PARAMETERS2 -np 2 ${{MPIEXEC_PARAMS}} "./${{PROJECT_NAME}}") set(TEST_PARAMETERS3 -np 3 ${{MPIEXEC_PARAMS}} "./${{PROJECT_NAME}}") add_test(NAME ${{PROJECT_NAME}}_mpi_2 COMMAND "mpirun" ${{TEST_PARAMETERS2}}) add_test(NAME ${{PROJECT_NAME}}_mpi_3 COMMAND "mpirun" ${{TEST_PARAMETERS3}}) add_test(NAME ${{PROJECT_NAME}}_serial COMMAND ${{PROJECT_NAME}}) """ # Subdirectories sub_directories = ["demo"] # Prefix map for subdirectories executable_prefixes = dict(demo="demo_") # Main file name map for subdirectories main_file_names = dict(demo=set(["main.cpp"])) # Projects that use custom CMakeLists.txt (shouldn't overwrite) exclude_projects = [] def generate_cmake_files(subdirectory, generated_files): """Search for C++ code and write CMakeLists.txt files""" cwd = os.getcwd() executable_prefix = executable_prefixes[subdirectory] main_file_name = main_file_names[subdirectory] for root, dirs, files in os.walk(cwd + "/" + subdirectory): cpp_files = set() c_files = set() executable_names = set() program_dir = root program_name = os.path.split(root)[-1] skip = False for exclude in exclude_projects: if exclude in root: skip = True if skip: print("Skipping custom CMakeLists.txt file:", root) continue name_forms = dict( project_name=executable_prefix + program_name, src_files="NOT_SET") for f in os.listdir(program_dir): filename, extension = os.path.splitext(f) if extension == ".cpp": cpp_files.add(f) elif extension == ".c": c_files.add(f) elif extension == ".ufl": c_files.add(f.replace(".ufl", ".c")) if ".cpp.rst" in f: cpp_files.add(filename) # If no .cpp, continue if not cpp_files: continue # Name of demo and cpp source files assert not main_file_name.isdisjoint(cpp_files) # If directory contains a main file we assume that only one # executable should be generated for this directory and all # other .cpp files should be linked to this name_forms["src_files"] = ' '.join(cpp_files | c_files) # Check for duplicate executable names if program_name not in executable_names: executable_names.add(program_name) else: warnings.warn("Duplicate executable names found when generating CMakeLists.txt files.") # Write file filename = os.path.join(program_dir, "CMakeLists.txt") generated_files.append(filename) with open(filename, "w") as f: f.write(cmakelists_str.format(**name_forms)) # Generate CMakeLists.txt files for all subdirectories generated_files = [] for subdirectory in sub_directories: generate_cmake_files(subdirectory, generated_files) # Print list of generated files print("The following files were generated:") print("\n".join(generated_files)) dolfinx-0.3.0/cpp/cmake/scripts/generate-form-files.py000066400000000000000000000040171411141764300227170ustar00rootroot00000000000000# Copyright (C) 2005-2019 Anders Logg and Garth N. Wells # # This file is part of DOLFINx. # # DOLFINx is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DOLFINx is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DOLFINx. If not, see . """Recompile all forms. This script should be run from the top level C++ directory.""" import os import sys from ffcx.main import main as ffcx_main # Call with "./generate-form-files.py 1" for PETSc complex mode complex_mode = (sys.argv[-1] == "1") # Directories to scan subdirs = ["demo", "test"] # Compile all form files topdir = os.getcwd() failures = [] for subdir in subdirs: for root, dirs, files in os.walk(subdir): # Build list of UFL form files formfiles = [f for f in files if f[-4:] == ".ufl"] if not formfiles: continue # Compile files os.chdir(root) print("Compiling %d forms in %s..." % (len(formfiles), root)) for f in set(formfiles): args = [] if complex_mode: args += ["--scalar_type", "double complex"] args.append(f) try: ffcx_main(args) except Exception as e: failures.append((root, f, e)) os.chdir(topdir) # Raise exception of any error have been caught if failures: s = ''.join("\nForm: {}/{}\nException type: {} \nMessage: {}\n".format( failure[0], failure[1], type(failure[2]).__name__, failure[2]) for failure in failures) raise RuntimeError("Failed to compile the forms:\n{}".format(s)) dolfinx-0.3.0/cpp/cmake/templates/000077500000000000000000000000001411141764300170175ustar00rootroot00000000000000dolfinx-0.3.0/cpp/cmake/templates/DOLFINXConfig.cmake.in000066400000000000000000000027701411141764300226650ustar00rootroot00000000000000# - Build details for DOLFINx: Dynamic Object-oriented Library for # - FINite element computation # # This file has been automatically generated. # FIXME: Check that naming conforms to CMake standards @PACKAGE_INIT@ include(CMakeFindDependencyMacro) include(CMakeFindDependencyMacro) find_dependency(MPI REQUIRED) # Check for Boost if(DEFINED ENV{BOOST_ROOT} OR DEFINED BOOST_ROOT) set(Boost_NO_SYSTEM_PATHS on) endif() set(Boost_USE_MULTITHREADED $ENV{BOOST_USE_MULTITHREADED}) set(Boost_VERBOSE TRUE) find_dependency(Boost 1.70 REQUIRED COMPONENTS timer filesystem) # xtensor find_dependency(xtl) find_dependency(xtensor) # Basix find_dependency(Basix) # Add xsimd if the C++ interface was compiled with it if(@xsimd_FOUND@) find_dependency(xsimd) endif() # HDF5 if (NOT TARGET hdf5::hdf5) set(HDF5_PREFER_PARALLEL TRUE) set(HDF5_FIND_DEBUG TRUE) find_dependency(HDF5 COMPONENTS C) if (HDF5_FOUND AND NOT HDF5_IS_PARALLEL) message(FATAL_ERROR "Found serial HDF5 build, MPI HDF5 build required") endif() endif() if (NOT TARGET PETSC::petsc) set(DOLFINX_SKIP_BUILD_TESTS TRUE) list(APPEND CMAKE_MODULE_PATH "@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_LIBDIR@/cmake/dolfinx") find_dependency(PETSc REQUIRED QUIET) endif() if (@SLEPC_FOUND@) if (NOT TARGET SLEPC::slepc) set(DOLFINX_SKIP_BUILD_TESTS TRUE) find_dependency(SLEPc REQUIRED QUIET) endif() endif() if (NOT TARGET dolfinx) include("${CMAKE_CURRENT_LIST_DIR}/DOLFINXTargets.cmake") endif() check_required_components(DOLFINX) dolfinx-0.3.0/cpp/cmake/templates/cmake_uninstall.cmake.in000066400000000000000000000014311411141764300235760ustar00rootroot00000000000000if (NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") message(FATAL_ERROR "Cannot find install manifest: \"@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt\"") endif() file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files) string(REGEX REPLACE "\n" ";" files "${files}") foreach (file ${files}) message(STATUS "Uninstalling \"$ENV{DESTDIR}${file}\"") if (EXISTS "$ENV{DESTDIR}${file}") exec_program( "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\"" OUTPUT_VARIABLE rm_out RETURN_VALUE rm_retval ) if (NOT "${rm_retval}" STREQUAL 0) message(FATAL_ERROR "Problem when removing \"$ENV{DESTDIR}${file}\"") endif() else() message(STATUS "File \"$ENV{DESTDIR}${file}\" does not exist.") endif() endforeach() dolfinx-0.3.0/cpp/cmake/templates/dolfinx.conf.in000066400000000000000000000007121411141764300217360ustar00rootroot00000000000000# Helper file for setting non-default DOLFINx environment variables # Common Unix variables export @OS_LIBRARY_PATH_NAME@=@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_LIBDIR@:$@OS_LIBRARY_PATH_NAME@ export PATH=@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_BINDIR@:$PATH export PKG_CONFIG_PATH=@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_LIBDIR@/pkgconfig:$PKG_CONFIG_PATH # Special macOS variables export DYLD_FRAMEWORK_PATH=/opt/local/Library/Frameworks:$DYLD_FRAMEWORK_PATH dolfinx-0.3.0/cpp/cmake/templates/dolfinx.pc.in000066400000000000000000000011171411141764300214130ustar00rootroot00000000000000# pkg-config configuration for DOLFINx prefix=@CMAKE_INSTALL_PREFIX@ exec_prefix=@CMAKE_INSTALL_PREFIX@ libdir=${exec_prefix}/@CMAKE_INSTALL_LIBDIR@ includedir=${prefix}/@CMAKE_INSTALL_INCLUDEDIR@ compiler=@CMAKE_CXX_COMPILER@ definitions=@PKG_DEFINITIONS@ extlibs=@DOLFINX_EXT_LIBS@ Name: DOLFINx Description: Dynamic Object-oriented Library for FINite element computation Version: @DOLFINX_VERSION@ Requires: @PKG_REQUIRES@ Conflicts: Libs: @PKG_LINKFLAGS@ -L${libdir} -ldolfinx Cflags: @PKG_CXXFLAGS@ -DDOLFINX_VERSION=\"@DOLFINX_VERSION@\" ${definitions} -I${includedir} @PKG_INCLUDES@ dolfinx-0.3.0/cpp/demo/000077500000000000000000000000001411141764300146655ustar00rootroot00000000000000dolfinx-0.3.0/cpp/demo/CMakeLists.txt000066400000000000000000000010611411141764300174230ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.16) project(dolfinx-demos) # Find DOLFINx config file find_package(Basix REQUIRED) find_package(DOLFINX REQUIRED) # Enable testing enable_testing() # Macro to add demos. Some subdirectories might be skipped because demos may # not be running in both real and complex modes. macro(add_demo_subdirectory subdir) if (IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${subdir}) add_subdirectory(${subdir}) endif() endmacro(add_demo_subdirectory) # Add demos add_demo_subdirectory(poisson) add_demo_subdirectory(hyperelasticity) dolfinx-0.3.0/cpp/demo/README000066400000000000000000000042321411141764300155460ustar00rootroot00000000000000Documenting DOLFINx demos ========================= The documentation for the DOLFINx demos is written by hand and located together with the demos in the DOLFINx source tree. To document a (new) DOLFINx demo located in the directory foo (for instance pde/poisson), follow the two steps below. In general, the simplest way is probably to look at one of the documented demos for instance (demo/pde/poisson/) and follow the same setup. 1) Add these 3 files * foo/common.txt -- containing common information such as the main features the demo illustrates and, if applicable, a mathematical description of the differential equation that is solved. This file should then be included in the C++ and Python versions. * foo/cpp/documentation.rst -- containing the reST source file with the documentation that is specific to the C++ version of the demo. * foo/python/documentation.rst -- containing the reST source file with the documentation that is specific to the Python version of the demo. If either the C++ or the Python version of the demo does not exist, feel free to add the version and continue. 2) Move the directory foo from the directory undocumented/ to the suitable directory (for instance pde/ or la/). Note The demo documentation is automatically included in the complete DOLFINx documentation when running make doc after building DOLFINx. While documenting a demo, it may be handy to only run make doc_demo and then make doc_html_[python|cpp]. Note Tests for the validity of the code snippets used in the demo documentation are included in the standard DOLFINx tests. C++ and Python specific contents ================================ The C++ and Python documentation reST source files should * Explain each step of the solution procedure. Do this by including and explaining code snippets from the demo source code. * Include links to the API documentation using the :cpp:class: and :py:class: directives. Note that for the Python classes, the full module path is required (for instance py:class:dolfinx.cpp.NewtonSolver) * Include the complete set of files needed to run the demo using the include directive. dolfinx-0.3.0/cpp/demo/hyperelasticity/000077500000000000000000000000001411141764300201075ustar00rootroot00000000000000dolfinx-0.3.0/cpp/demo/hyperelasticity/hyperelasticity.ufl000066400000000000000000000052041411141764300240420ustar00rootroot00000000000000# UFL input for hyperleasticity # ============================= # # The first step is to define the variational problem at hand. We define # the variational problem in UFL terms in a separate form file # :download:`HyperElasticity.ufl`. # # We are interested in solving for a discrete vector field in three # dimensions, so first we need the appropriate finite element space and # trial and test functions on this space:: # Function spaces element = VectorElement("Lagrange", tetrahedron, 1) # Trial and test functions du = TrialFunction(element) # Incremental displacement v = TestFunction(element) # Test function # Note that ``VectorElement`` creates a finite element space of vector # fields. The dimension of the vector field (the number of components) # is assumed to be the same as the spatial dimension (in this case 3), # unless otherwise specified. # # Next, we will be needing functions for the boundary source ``B``, the # traction ``T`` and the displacement solution itself ``u``:: # Functions u = Coefficient(element) # Displacement from previous iteration # B = Coefficient(element) # Body force per unit volume # T = Coefficient(element) # Traction force on the boundary # Now, we can define the kinematic quantities involved in the model:: # Kinematics d = len(u) I = Identity(d) # Identity tensor F = I + grad(u) # Deformation gradient C = F.T*F # Right Cauchy-Green tensor # Invariants of deformation tensors Ic = tr(C) J = det(F) # Before defining the energy density and thus the total potential # energy, it only remains to specify constants for the elasticity # parameters:: # Elasticity parameters E = 10.0 nu = 0.3 mu = E/(2*(1 + nu)) lmbda = E*nu/((1 + nu)*(1 - 2*nu)) # Both the first variation of the potential energy, and the Jacobian of # the variation, can be automatically computed by a call to # ``derivative``:: # Stored strain energy density (compressible neo-Hookean model) psi = (mu/2)*(Ic - 3) - mu*ln(J) + (lmbda/2)*(ln(J))**2 # Total potential energy Pi = psi*dx # - inner(B, u)*dx - inner(T, u)*ds # First variation of Pi (directional derivative about u in the direction of v) F = derivative(Pi, u, v) # Compute Jacobian of F J = derivative(F, u, du) # Note that ``derivative`` is here used with three arguments: the form # to be differentiated, the variable (function) we are supposed to # differentiate with respect too, and the direction the derivative is # taken in. # # Before the form file can be used in the C++ program, it must be # compiled using FFCx by running (on the command-line): # # .. code-block:: sh # # ffcx HyperElasticity.ufl dolfinx-0.3.0/cpp/demo/hyperelasticity/main.cpp000066400000000000000000000163351411141764300215470ustar00rootroot00000000000000#include "hyperelasticity.h" #include #include #include #include #include #include #include #include using namespace dolfinx; // Next: // // .. code-block:: cpp class HyperElasticProblem { public: HyperElasticProblem( std::shared_ptr> L, std::shared_ptr> J, std::vector>> bcs) : _l(L), _j(J), _bcs(bcs), _b(L->function_spaces()[0]->dofmap()->index_map, L->function_spaces()[0]->dofmap()->index_map_bs()), _matA(la::PETScMatrix(fem::create_matrix(*J, "baij"), false)) { auto map = L->function_spaces()[0]->dofmap()->index_map; const int bs = L->function_spaces()[0]->dofmap()->index_map_bs(); std::int32_t size_local = bs * map->size_local(); std::vector ghosts(map->ghosts().begin(), map->ghosts().end()); std::int64_t size_global = bs * map->size_global(); VecCreateGhostBlockWithArray( map->comm(common::IndexMap::Direction::forward), bs, size_local, size_global, ghosts.size(), ghosts.data(), _b.array().data(), &_b_petsc); } /// Destructor virtual ~HyperElasticProblem() { if (_b_petsc) VecDestroy(&_b_petsc); } auto form() { return [](Vec x) { VecGhostUpdateBegin(x, INSERT_VALUES, SCATTER_FORWARD); VecGhostUpdateEnd(x, INSERT_VALUES, SCATTER_FORWARD); }; } /// Compute F at current point x auto F() { return [&](const Vec x, Vec) { // Assemble b and update ghosts xtl::span b(_b.mutable_array()); std::fill(b.begin(), b.end(), 0.0); fem::assemble_vector(b, *_l); VecGhostUpdateBegin(_b_petsc, ADD_VALUES, SCATTER_REVERSE); VecGhostUpdateEnd(_b_petsc, ADD_VALUES, SCATTER_REVERSE); // Set bcs Vec x_local; VecGhostGetLocalForm(x, &x_local); PetscInt n = 0; VecGetSize(x_local, &n); const PetscScalar* array = nullptr; VecGetArrayRead(x_local, &array); fem::set_bc(b, _bcs, xtl::span(array, n), -1.0); VecRestoreArrayRead(x, &array); }; } /// Compute J = F' at current point x auto J() { return [&](const Vec, Mat A) { MatZeroEntries(A); fem::assemble_matrix(la::PETScMatrix::set_block_fn(A, ADD_VALUES), *_j, _bcs); MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY); MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY); fem::set_diagonal(la::PETScMatrix::set_fn(A, INSERT_VALUES), *_j->function_spaces()[0], _bcs); MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); }; } Vec vector() { return _b_petsc; } Mat matrix() { return _matA.mat(); } private: std::shared_ptr> _l, _j; std::vector>> _bcs; la::Vector _b; Vec _b_petsc = nullptr; la::PETScMatrix _matA; }; int main(int argc, char* argv[]) { common::subsystem::init_logging(argc, argv); common::subsystem::init_petsc(argc, argv); // Set the logging thread name to show the process rank int mpi_rank; MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); std::string thread_name = "RANK " + std::to_string(mpi_rank); loguru::set_thread_name(thread_name.c_str()); { // Inside the ``main`` function, we begin by defining a tetrahedral mesh // of the domain and the function space on this mesh. Here, we choose to // create a unit cube mesh with 25 ( = 24 + 1) vertices in one direction // and 17 ( = 16 + 1) vertices in the other two directions. With this // mesh, we initialize the (finite element) function space defined by the // generated code. // // .. code-block:: cpp // Create mesh and define function space auto mesh = std::make_shared(generation::BoxMesh::create( MPI_COMM_WORLD, {{{0.0, 0.0, 0.0}, {1.0, 1.0, 1.0}}}, {10, 10, 10}, mesh::CellType::tetrahedron, mesh::GhostMode::none)); auto V = fem::create_functionspace(functionspace_form_hyperelasticity_F, "u", mesh); // Define solution function auto u = std::make_shared>(V); auto a = std::make_shared>( fem::create_form(*form_hyperelasticity_J, {V, V}, {{"u", u}}, {}, {})); auto L = std::make_shared>( fem::create_form(*form_hyperelasticity_F, {V}, {{"u", u}}, {}, {})); auto u_rotation = std::make_shared>(V); u_rotation->interpolate( [](auto& x) { constexpr double scale = 0.005; // Center of rotation constexpr double x1_c = 0.5; constexpr double x2_c = 0.5; // Large angle of rotation (60 degrees) constexpr double theta = 1.04719755; xt::xarray values = xt::zeros_like(x); // New coordinates auto x1 = xt::row(x, 1); auto x2 = xt::row(x, 2); xt::row(values, 1) = scale * (x1_c + (x1 - x1_c) * std::cos(theta) - (x2 - x2_c) * std::sin(theta) - x1); xt::row(values, 2) = scale * (x2_c + (x1 - x1_c) * std::sin(theta) - (x2 - x2_c) * std::cos(theta) - x2); return values; }); auto u_clamp = std::make_shared>(V); u_clamp->interpolate( [](auto& x) -> xt::xarray { return xt::zeros_like(x); }); // Create Dirichlet boundary conditions auto u0 = std::make_shared>(V); const auto bdofs_left = fem::locate_dofs_geometrical({*V}, [](auto& x) -> xt::xtensor { return xt::isclose(xt::row(x, 0), 0.0); }); const auto bdofs_right = fem::locate_dofs_geometrical({*V}, [](auto& x) -> xt::xtensor { return xt::isclose(xt::row(x, 0), 1.0); }); auto bcs = std::vector({std::make_shared>( u_clamp, std::move(bdofs_left)), std::make_shared>( u_rotation, std::move(bdofs_right))}); HyperElasticProblem problem(L, a, bcs); nls::NewtonSolver newton_solver(MPI_COMM_WORLD); newton_solver.setF(problem.F(), problem.vector()); newton_solver.setJ(problem.J(), problem.matrix()); newton_solver.set_form(problem.form()); newton_solver.solve(u->vector()); // Save solution in VTK format io::VTKFile file(MPI_COMM_WORLD, "u.pvd", "w"); file.write({*u}, 0.0); } common::subsystem::finalize_petsc(); return 0; } dolfinx-0.3.0/cpp/demo/poisson/000077500000000000000000000000001411141764300163575ustar00rootroot00000000000000dolfinx-0.3.0/cpp/demo/poisson/main.cpp000066400000000000000000000217711411141764300200170ustar00rootroot00000000000000// Poisson equation (C++) // ====================== // // This demo illustrates how to: // // * Solve a linear partial differential equation // * Create and apply Dirichlet boundary conditions // * Define Expressions // * Define a FunctionSpace // // The solution for :math:`u` in this demo will look as follows: // // .. image:: ../poisson_u.png // :scale: 75 % // // // Equation and problem definition // ------------------------------- // // The Poisson equation is the canonical elliptic partial differential // equation. For a domain :math:`\Omega \subset \mathbb{R}^n` with // boundary :math:`\partial \Omega = \Gamma_{D} \cup \Gamma_{N}`, the // Poisson equation with particular boundary conditions reads: // // .. math:: // - \nabla^{2} u &= f \quad {\rm in} \ \Omega, \\ // u &= 0 \quad {\rm on} \ \Gamma_{D}, \\ // \nabla u \cdot n &= g \quad {\rm on} \ \Gamma_{N}. \\ // // Here, :math:`f` and :math:`g` are input data and :math:`n` denotes the // outward directed boundary normal. The most standard variational form // of Poisson equation reads: find :math:`u \in V` such that // // .. math:: // a(u, v) = L(v) \quad \forall \ v \in V, // // where :math:`V` is a suitable function space and // // .. math:: // a(u, v) &= \int_{\Omega} \nabla u \cdot \nabla v \, {\rm d} x, \\ // L(v) &= \int_{\Omega} f v \, {\rm d} x // + \int_{\Gamma_{N}} g v \, {\rm d} s. // // The expression :math:`a(u, v)` is the bilinear form and :math:`L(v)` // is the linear form. It is assumed that all functions in :math:`V` // satisfy the Dirichlet boundary conditions (:math:`u = 0 \ {\rm on} \ // \Gamma_{D}`). // // In this demo, we shall consider the following definitions of the input // functions, the domain, and the boundaries: // // * :math:`\Omega = [0,1] \times [0,1]` (a unit square) // * :math:`\Gamma_{D} = \{(0, y) \cup (1, y) \subset \partial \Omega\}` // (Dirichlet boundary) // * :math:`\Gamma_{N} = \{(x, 0) \cup (x, 1) \subset \partial \Omega\}` // (Neumann boundary) // * :math:`g = \sin(5x)` (normal derivative) // * :math:`f = 10\exp(-((x - 0.5)^2 + (y - 0.5)^2) / 0.02)` (source term) // // // Implementation // -------------- // // The implementation is split in two files: a form file containing the // definition of the variational forms expressed in UFL and a C++ file // containing the actual solver. // // Running this demo requires the files: :download:`main.cpp`, // :download:`Poisson.ufl` and :download:`CMakeLists.txt`. // // // UFL form file // ^^^^^^^^^^^^^ // // The UFL file is implemented in :download:`Poisson.ufl`, and the // explanation of the UFL file can be found at :doc:`here `. // // // C++ program // ^^^^^^^^^^^ // // The main solver is implemented in the :download:`main.cpp` file. // // At the top we include the DOLFINx header file and the generated header // file "Poisson.h" containing the variational forms for the Poisson // equation. For convenience we also include the DOLFINx namespace. // // .. code-block:: cpp #include "poisson.h" #include #include #include #include #include #include using namespace dolfinx; // Then follows the definition of the coefficient functions (for // :math:`f` and :math:`g`), which are derived from the // :cpp:class:`Expression` class in DOLFINx // // .. code-block:: cpp // Inside the ``main`` function, we begin by defining a mesh of the // domain. As the unit square is a very standard domain, we can use a // built-in mesh provided by the :cpp:class:`UnitSquareMesh` factory. In // order to create a mesh consisting of 32 x 32 squares with each square // divided into two triangles, and the finite element space (specified in // the form file) defined relative to this mesh, we do as follows // // .. code-block:: cpp int main(int argc, char* argv[]) { common::subsystem::init_logging(argc, argv); common::subsystem::init_petsc(argc, argv); { // Create mesh and function space auto mesh = std::make_shared(generation::RectangleMesh::create( MPI_COMM_WORLD, {{{0.0, 0.0, 0.0}, {1.0, 1.0, 0.0}}}, {32, 32}, mesh::CellType::triangle, mesh::GhostMode::none)); auto V = fem::create_functionspace(functionspace_form_poisson_a, "u", mesh); // Next, we define the variational formulation by initializing the // bilinear and linear forms (:math:`a`, :math:`L`) using the previously // defined :cpp:class:`FunctionSpace` ``V``. Then we can create the // source and boundary flux term (:math:`f`, :math:`g`) and attach these // to the linear form. // // .. code-block:: cpp // Prepare and set Constants for the bilinear form auto kappa = std::make_shared>(2.0); auto f = std::make_shared>(V); auto g = std::make_shared>(V); // Define variational forms auto a = std::make_shared>( fem::create_form(*form_poisson_a, {V, V}, {}, {{"kappa", kappa}}, {})); auto L = std::make_shared>( fem::create_form(*form_poisson_L, {V}, {{"f", f}, {"g", g}}, {}, {})); // Now, the Dirichlet boundary condition (:math:`u = 0`) can be created // using the class :cpp:class:`DirichletBC`. A :cpp:class:`DirichletBC` // takes two arguments: the value of the boundary condition, // and the part of the boundary on which the condition applies. // In our example, the value of the boundary condition (0.0) can // represented using a :cpp:class:`Function`, and the Dirichlet boundary // is defined by the indices of degrees of freedom to which the boundary // condition applies. // The definition of the Dirichlet boundary condition then looks // as follows: // // .. code-block:: cpp // FIXME: zero function and make sure ghosts are updated // Define boundary condition auto u0 = std::make_shared>(V); const auto bdofs = fem::locate_dofs_geometrical( {*V}, [](const xt::xtensor& x) -> xt::xtensor { auto x0 = xt::row(x, 0); return xt::isclose(x0, 0.0) or xt::isclose(x0, 1.0); }); std::vector bc{std::make_shared>( u0, std::move(bdofs))}; f->interpolate( [](const xt::xtensor& x) -> xt::xarray { auto dx = xt::square(xt::row(x, 0) - 0.5) + xt::square(xt::row(x, 1) - 0.5); return 10 * xt::exp(-(dx) / 0.02); }); g->interpolate( [](const xt::xtensor& x) -> xt::xarray { return xt::sin(5 * xt::row(x, 0)); }); // Now, we have specified the variational forms and can consider the // solution of the variational problem. First, we need to define a // :cpp:class:`Function` ``u`` to store the solution. (Upon // initialization, it is simply set to the zero function.) Next, we can // call the ``solve`` function with the arguments ``a == L``, ``u`` and // ``bc`` as follows: // // .. code-block:: cpp // Compute solution fem::Function u(V); la::PETScMatrix A = la::PETScMatrix(fem::create_matrix(*a), false); la::PETScVector b(*L->function_spaces()[0]->dofmap()->index_map, L->function_spaces()[0]->dofmap()->index_map_bs()); MatZeroEntries(A.mat()); fem::assemble_matrix(la::PETScMatrix::set_block_fn(A.mat(), ADD_VALUES), *a, bc); MatAssemblyBegin(A.mat(), MAT_FLUSH_ASSEMBLY); MatAssemblyEnd(A.mat(), MAT_FLUSH_ASSEMBLY); fem::set_diagonal(la::PETScMatrix::set_fn(A.mat(), INSERT_VALUES), *V, bc); MatAssemblyBegin(A.mat(), MAT_FINAL_ASSEMBLY); MatAssemblyEnd(A.mat(), MAT_FINAL_ASSEMBLY); VecSet(b.vec(), 0.0); VecGhostUpdateBegin(b.vec(), INSERT_VALUES, SCATTER_FORWARD); VecGhostUpdateEnd(b.vec(), INSERT_VALUES, SCATTER_FORWARD); fem::assemble_vector_petsc(b.vec(), *L); fem::apply_lifting_petsc(b.vec(), {a}, {{bc}}, {}, 1.0); VecGhostUpdateBegin(b.vec(), ADD_VALUES, SCATTER_REVERSE); VecGhostUpdateEnd(b.vec(), ADD_VALUES, SCATTER_REVERSE); fem::set_bc_petsc(b.vec(), bc, nullptr); la::PETScKrylovSolver lu(MPI_COMM_WORLD); la::PETScOptions::set("ksp_type", "preonly"); la::PETScOptions::set("pc_type", "lu"); lu.set_from_options(); lu.set_operator(A.mat()); lu.solve(u.vector(), b.vec()); // The function ``u`` will be modified during the call to solve. A // :cpp:class:`Function` can be saved to a file. Here, we output the // solution to a ``VTK`` file (specified using the suffix ``.pvd``) for // visualisation in an external program such as Paraview. // // .. code-block:: cpp // Save solution in VTK format io::VTKFile file(MPI_COMM_WORLD, "u.pvd", "w"); file.write({u}, 0.0); } common::subsystem::finalize_petsc(); return 0; } dolfinx-0.3.0/cpp/demo/poisson/poisson.ufl000066400000000000000000000026671411141764300205740ustar00rootroot00000000000000# UFL input for the Poisson equation # ================================== # # The first step is to define the variational problem at hand. We define # the variational problem in UFL terms in a separate form file # :download:`Poisson.ufl`. We begin by defining the finite element:: element = FiniteElement("Lagrange", triangle, 1) # The first argument to :py:class:`FiniteElement` is the finite element # family, the second argument specifies the domain, while the third # argument specifies the polynomial degree. Thus, in this case, our # element ``element`` consists of first-order, continuous Lagrange basis # functions on triangles (or in order words, continuous piecewise linear # polynomials on triangles). # # Next, we use this element to initialize the trial and test functions # (:math:`u` and :math:`v`) and the coefficient functions (:math:`f` and # :math:`g`):: coord_element = VectorElement("Lagrange", triangle, 1) mesh = Mesh(coord_element) V = FunctionSpace(mesh, element) u = TrialFunction(V) v = TestFunction(V) f = Coefficient(V) g = Coefficient(V) kappa = Constant(mesh) # Finally, we define the bilinear and linear forms according to the # variational formulation of the equations:: a = kappa * inner(grad(u), grad(v)) * dx L = inner(f, v) * dx + inner(g, v) * ds # Before the form file can be used in the C++ program, it must be # compiled using FFCx by running (on the command-line): # # .. code-block:: sh # # ffcx Poisson.ufl dolfinx-0.3.0/cpp/doc/000077500000000000000000000000001411141764300145065ustar00rootroot00000000000000dolfinx-0.3.0/cpp/doc/Doxyfile000066400000000000000000003310751411141764300162250ustar00rootroot00000000000000# Doxyfile 1.8.17 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the configuration # file that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = "DOLFINx" # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = $(DOLFINX_VERSION) # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = "DOLFINx C++ interface" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = YES # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = YES # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all generated output in the proper direction. # Possible values are: None, LTR, RTL and Context. # The default value is: None. OUTPUT_TEXT_DIRECTION = None # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ is \ provides \ specifies \ contains \ represents \ a \ an \ the # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line # such as # /*************** # as being the beginning of a Javadoc-style comment "banner". If set to NO, the # Javadoc-style will behave just like regular comments and it will not be # interpreted by doxygen. # The default value is: NO. JAVADOC_BANNER = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = YES # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 4 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines (in the resulting output). You can put ^^ in the value part of an # alias to insert a newline as if a physical newline was in the original file. # When you need a literal { or } or , in the value part of an alias you have to # escape them by means of a backslash (\), this can lead to conflicts with the # commands \{ and \} for these it is advised to use the version @{ and @} or use # a double escape (\\{ and \\}) ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice # sources only. Doxygen will then generate output that is more tailored for that # language. For instance, namespaces will be presented as modules, types will be # separated into more groups, etc. # The default value is: NO. OPTIMIZE_OUTPUT_SLICE = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, JavaScript, # Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, # Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the # default for Fortran type files), VHDL, tcl. For instance to make doxygen treat # .inc files as Fortran files (default is PHP), and .f files as C (default is # Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 0 # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual # methods of a class will be included in the documentation. # The default value is: NO. EXTRACT_PRIV_VIRTUAL = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # declarations. If set to NO, these declarations will be included in the # documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # (including Cygwin) ands Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = YES # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. If # EXTRACT_ALL is set to YES then this flag will automatically be disabled. # The default value is: NO. WARN_NO_PARAMDOC = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. # The default value is: NO. WARN_AS_ERROR = YES # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = ../dolfinx # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: https://www.gnu.org/software/libiconv/) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), # *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen # C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f, *.for, *.tcl, *.vhd, # *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.c \ *.cc \ *.cxx \ *.cpp \ *.c++ \ *.java \ *.ii \ *.ixx \ *.ipp \ *.i++ \ *.inl \ *.idl \ *.ddl \ *.odl \ *.h \ *.hh \ *.hxx \ *.hpp \ *.h++ \ *.cs \ *.d \ *.php \ *.php4 \ *.php5 \ *.phtml \ *.inc \ *.m \ *.markdown \ *.md \ *.mm \ *.dox \ *.py \ *.pyw \ *.f90 \ *.f95 \ *.f03 \ *.f08 \ *.f \ *.for \ *.tcl \ *.vhd \ *.vhdl \ *.ucf \ *.qsf # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = ../dolfinx/io/pugixml.hpp \ ../dolfinx/io/pugixml.cpp \ ../dolfinx/common/loguru.hpp \ ../dolfinx/common/loguru.cpp # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = _* # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = * # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # entity all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = NO # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML # page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_MENUS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: https://developer.apple.com/xcode/), introduced with OSX # 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy # genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands # to create new LaTeX commands to be used in formulas as building blocks. See # the section "Including formulas" for details. FORMULA_MACROFILE = # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = YES # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. # The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = https://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /