pax_global_header00006660000000000000000000000064145040053730014513gustar00rootroot0000000000000052 comment=85c76d91f47ce976dc25cffb4ecbeaf9f0d0f095 fortran-language-server-2.13.0+dfsg.1/000077500000000000000000000000001450400537300174145ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/.coveragerc000066400000000000000000000002521450400537300215340ustar00rootroot00000000000000[run] omit = fortls/__init__.py fortls/version.py [report] exclude_lines = if debug: log.debug except: if not PY3K: [html] show_contexts = True fortran-language-server-2.13.0+dfsg.1/.github/000077500000000000000000000000001450400537300207545ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/.github/CODEOWNERS000066400000000000000000000000171450400537300223450ustar00rootroot00000000000000* @gnikit fortran-language-server-2.13.0+dfsg.1/.github/FUNDING.yml000066400000000000000000000013521450400537300225720ustar00rootroot00000000000000# These are supported funding model platforms github: gnikit # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] patreon: # Replace with a single Patreon username open_collective: # Replace with a single Open Collective username ko_fi: # Replace with a single Ko-fi username tidelift: pypi/fortls community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry liberapay: # Replace with a single Liberapay username issuehunt: # Replace with a single IssueHunt username otechie: # Replace with a single Otechie username lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] fortran-language-server-2.13.0+dfsg.1/.github/ISSUE_TEMPLATE/000077500000000000000000000000001450400537300231375ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000032071450400537300256330ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: bug assignees: '' --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Try and reproduce the `fortls` error through the debug interface, for more see `fortls --debug_help`. Usually debug requests start like `fortls --debug_filepath your_file.f90 --debug_rootpath . ...`. Start with posting: 1. a **Minimal Working Example** to demonstrate the bug 2. the `fortls` command to reproduce the issue, or your `fortls` 3. the output of the `fortls` command 4. Any additional JSONRPC requests like the ones produced with `--debug_log` Alternatively, you can try and describe the steps that you followed to encounter the bug: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots & Animations** If applicable, add screenshots or GIF/MP4 animations to help explain your problem. **Setup information (please complete the following information):** - OS: [e.g. Linux, Mac] - Python Version [e.g. 3.10] - fortls Version [e.g. 2.3] - Code editor used [e.g. VS Code, Vim] - the Fortran extension for the code editor and its version [e.g. Modern Fortran v3.0.0] (if applicable) **Configuration information (please complete the following information):** - Your `.fortlsrc` or `.fortls.json` or `.fortls` configuration file OR any other JSON config being used (if any) - Any settings specified through your extension [e.g. for VS Code settings from `settings.json`] **Additional context** Add any other context about the problem here. fortran-language-server-2.13.0+dfsg.1/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000013651450400537300266710ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: '' labels: enhancement assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. fortran-language-server-2.13.0+dfsg.1/.github/dependabot.yml000066400000000000000000000001551450400537300236050ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" fortran-language-server-2.13.0+dfsg.1/.github/workflows/000077500000000000000000000000001450400537300230115ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/.github/workflows/codeql-analysis.yml000066400000000000000000000044531450400537300266320ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ master, dev ] pull_request: # The branches below must be a subset of the branches above branches: [ master, dev ] schedule: - cron: '24 7 * * 2' jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Learn more about CodeQL language support at https://git.io/codeql-language-support steps: - name: Checkout repository uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 fortran-language-server-2.13.0+dfsg.1/.github/workflows/docs.yml000066400000000000000000000007501450400537300244660ustar00rootroot00000000000000name: "Docs" on: [push, pull_request] jobs: docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: sphinx-toolbox/sphinx-action@master with: pre-build-command: "pip install .[docs]" docs-folder: "docs/" - name: Deploy uses: peaceiris/actions-gh-pages@v3 if: github.ref == 'refs/heads/master' with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: docs/_build/html fortran-language-server-2.13.0+dfsg.1/.github/workflows/main.yml000066400000000000000000000024661450400537300244700ustar00rootroot00000000000000name: Tests on: [push, pull_request] # Allows you to run this workflow manually from the Actions tab # workflow_dispatch: jobs: build: strategy: matrix: os: [ubuntu-latest, windows-latest] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] fail-fast: false runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: x64 - name: Setup run: pip install .[dev] - name: Unittests run: pytest --doctest-modules - name: Lint run: black --diff --check --verbose . coverage: strategy: matrix: os: [ubuntu-latest, windows-latest] python-version: ["3.8","3.10"] fail-fast: false runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Coverage report run: | pip install .[dev] pytest --doctest-modules shell: bash - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true fortran-language-server-2.13.0+dfsg.1/.github/workflows/python-publish.yml000066400000000000000000000025461450400537300265300ustar00rootroot00000000000000# This workflow will upload a Python Package using Twine when a release is created # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries # This workflow uses actions that are not certified by GitHub. # They are provided by a third-party and are governed by # separate terms of service, privacy policy, and support # documentation. name: PyPi Release on: release: types: [published] jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install dependencies run: | python -m pip install --upgrade pip pip install build - name: Build package run: python -m build - name: Publish to Test PyPi if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@master with: user: __token__ password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository_url: https://test.pypi.org/legacy/ - name: Publish to PyPi if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@master with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} fortran-language-server-2.13.0+dfsg.1/.gitignore000066400000000000000000000002241450400537300214020ustar00rootroot00000000000000*.pyc .vscode *.egg-info dist/ build/ docs/_build/ docs/fortls_changes.md fortls/_version.py .idea *.o *.mod *.smod *.log .coverage coverage.xml fortran-language-server-2.13.0+dfsg.1/.pre-commit-config.yaml000066400000000000000000000013321450400537300236740ustar00rootroot00000000000000# See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files args: ['--maxkb=2000'] - repo: https://github.com/PyCQA/flake8 rev: 5.0.4 hooks: - id: flake8 - repo: https://github.com/asottile/pyupgrade rev: v2.37.3 hooks: - id: pyupgrade - repo: https://github.com/pycqa/isort rev: 5.10.1 hooks: - id: isort name: isort (python) - repo: https://github.com/psf/black rev: 22.6.0 hooks: - id: black fortran-language-server-2.13.0+dfsg.1/CHANGELOG.md000066400000000000000000001115621450400537300212330ustar00rootroot00000000000000# CHANGELOG ## Unreleased ## 2.13.0 ### Added - Added additional default configuration file names `.fortlsrc`, `.fortls.json` ([#184](https://github.com/gnikit/fortls/issues/184)) - Added coverage testing for multiple Python versions ([#168](https://github.com/gnikit/fortls/pull/178)) - Added pre-commit.ci to `fortls` ([#168](https://github.com/gnikit/fortls/issues/168)) ### Fixed - Fixed `intent(in out)` not being parsed correctly ([#160](https://github.com/gnikit/fortls/issues/160)) ## 2.12.0 ### Fixed - Fixed issue where `pip` would install without `--user` and fail ([#163](https://github.com/gnikit/fortls/issues/163)) ## 2.11.0 ### Fixed - Fixed bug thorowing diagnostic errors if arguments were named `pure`, `elemental`, etc. ([#157](https://github.com/gnikit/fortls/issues/157)) ## 2.10.0 ### Fixed - Fixed `workspace/didChangeConfiguration` requests that caused errors in VS Code logs ([#114](https://github.com/gnikit/fortls/issues/114)) ## 2.9.0 ### Fixed - Fixed glob pattern resolution for command line arguments ([#142](https://github.com/gnikit/fortls/issues/142)) ### Changed - Changed the default value of the following options when a mix the command line interface and the `json` interface are used. Instead of having the `json` interface default the values to an empty set it now defaults to the values of the command line: `excl_paths`, `source_dirs`, `incl_suffixes`, `excl_suffixes`, `include_dirs` ([#143](https://github.com/gnikit/fortls/issues/143)) ## 2.8.0 ### Added - Added support for comments in the `json` configuration files ([#137](https://github.com/gnikit/fortls/issues/137)) - Added `sitemap.xml` to documentation webpage ([#134](https://github.com/gnikit/fortls/pull/134)) ### Fixed - Fixed bug where error messages did not post correctly ([#135](https://github.com/gnikit/fortls/issues/135)) ## 2.7.0 ### Added - Added doctests in the pytest test suite ([#131](https://github.com/gnikit/fortls/issues/131)) ### Changed - Renamed variables to simplify parser ([#133](https://github.com/gnikit/fortls/pull/133)) - Redesigned parsing functions for short-hand declarations of array dimensions, character length and parsing of kind ([#130](https://github.com/gnikit/fortls/pull/130)) ## 2.6.0 ### Changed - Redesigned the `fortls` website to be more aesthetically pleasing and user-friendly ([#112](https://github.com/gnikit/fortls/issues/112)) ### Fixed - Fixed bug where submodule procedure scopes would terminate early if keyword modifiers were used ([#119](https://github.com/gnikit/fortls/issues/119)) ## 2.5.0 ### Added - Added `textDocument/documentHighlight` request for Vim and EMACS ([#43](https://github.com/gnikit/fortls/issues/43)) - Added `pre-commit` hook with `flake`, `black`, `isort` and `pyupgrade` ([#106](https://github.com/gnikit/fortls/issues/106)) - Added test support for diagnostic messages ### Changed - Changed code structure, reformatted files and changed name conventions ([#109](https://github.com/gnikit/fortls/issues/109)) - Updated CONTRIBUTING instructions ## 2.4.0 ### Added - Added multiple unittests for diagnostic messages - Added `pre-commit` hook to the project ([#106](https://github.com/gnikit/fortls/issues/106)) - Added Code of Conduct - Added basic support for hovering over `ASSOCIATE` blocks ([#62](https://github.com/gnikit/fortls/issues/62)) ### Changed - Changed the naming convention for Fortran Objects ([#109](https://github.com/gnikit/fortls/issues/109)) - Formatted all files with `pre-commit` ## 2.3.1 ### Fixed - Fixed remote coverage report line diff ([#101](https://github.com/gnikit/fortls/issues/101)) ## 2.3.0 ### Added - Added keywords to the PyPi manifest ([#99](https://github.com/gnikit/fortls/issues/99)) ### Changed - Updated `README` to include logo and animations - Updated `README` to include conda-forge installation instructions ## 2.2.14 ### Added - Added unittests for intrinsics and improved overall coverage ### Changed - Restructured unittests to individual files for more granular reporting ## 2.2.13 ### Added - Automated the update for GitHub Actions ## 2.2.12 ### Added - Added coverage reporting for Unix + Windows ## 2.2.11 ### Added - Improved autocompletion for Fortran statements F2018 compliant ([#63](https://github.com/gnikit/fortls/issues/63)) ## 2.2.10 ### Fixed - Fixes GoTo Implementation error for intrinsics ([#80](https://github.com/gnikit/fortls/issues/80)) ## 2.2.9 ### Changed - Changed how renaming of implicitly named type-bound procedures and their implementations is handled. Unittest was added. - Rewrote the Fortran parser to be clearer and more modular ## 2.2.8 ### Changed - Disable PyPi autoupdating for pre-releases and dev versions ## 2.2.7 ### Changed - Changed the auto-update feature to skip `anaconda` environments since they handle their dependencies through `conda` and not `pip` ### Fixed - Fixed missing dependency from `setup.cfg` ([#78](https://github.com/gnikit/fortls/issues/78)) - Updated configuration file variables in documentation ## 2.2.6 ### Added - Added the capability for `fortls` to auto-update use `--disable_autoupdate` to disable ([#76](https://github.com/gnikit/fortls/issues/76)) ## Deprecated - Deprecated `--variable_hover` option and now is always enabled ([#46](https://github.com/gnikit/fortls/issues/46)) ## 2.2.5 ### Changed - Updated `setup.cfg` in preparation of submitting package to `conda-forge` - Added `Editor Integration` section in documentation ### Fixed - Fixed parsing of `defined` without by parenthesis surrounding the definition ([#67](https://github.com/gnikit/fortls/pull/67)) ## 2.2.4 ### Fixed - Fixed hovering value of literal and constant strings not displaying ([#54](https://github.com/gnikit/fortls/issues/54)) - Fixed hovering string length size is now matching the string ([#55](https://github.com/gnikit/fortls/issues/55)) - Fixed space separated keywords not being displayed upon hover ([#60](https://github.com/gnikit/fortls/issues/60)) ## 2.2.3 ### Changed - Changed reading in `json` files to be encoded as UTF-8 ([#51](https://github.com/gnikit/fortls/pull/51)) ## 2.2.2 ### Changed - Changed the way function hover messages are displayed, now signatures are standardised ([gnikit/fortls#47](https://github.com/gnikit/fortls/issues/47)) ### Fixed - Fixed hovering over functions displaying as theire result types ([gnikit/fortls#22](https://github.com/gnikit/fortls/issues/22)) - Fixed function modifiers not displaying upon hover ([gnikit/fortls#48](https://github.com/gnikit/fortls/issues/48)) - Fixed function hover when returning arrays ([gnikit/fortls#50](https://github.com/gnikit/fortls/issues/50)) ## 2.2.1 ### Changed - Changed default branch to master and updated Actions workflows ## 2.2.0 ### Added - Added semantic versioning with `setuptools_scm` ([gnikit/fortls#34](https://github.com/gnikit/fortls/issues/34)) ### Changed - Changes from `setup.py` to `setup.toml` and `pyproject.toml` ([gnikit/fortls#33](https://github.com/gnikit/fortls/issues/33)) - Changed documentation CI to up to date action - Formatted Python imports with `isort` ## 2.1.2 ### Fixed - Fixed code autocompletion bug with f-strings ([#39](https://github.com/hansec/fortran-language-server/issues/39)) ## 2.1.1 ### Added - Added additional shields to REAMDE, including coverage and Python versions ## 2.1.0 ### Added - Added coverage metric for Codecov - Added coverage for `WHERE`, `ENUM`, max line/comment diagnostics and multilines - Adds Windows CI ### Fixed - Fixed global `sort_keywords` option not propagating during parsing on Windows ([gnikit/fortls#36](https://github.com/gnikit/fortls/issues/36)) - Fixed unittests not propagating debugger state ([gnikit/fortls#35](https://github.com/gnikit/fortls/issues/35)) ## 2.0.1 ### Added - Add support for absolute include, source and exclude paths ### Changed - Changed `USE_info` named tuple to storing use modules as `sets` instead of `lists` - Changed `include_dirs` from a `list` to a `set` - Automates the versioning with GitHub releases ### Fixed - Fixed some mutable default argument warnings in methods and classes - Fixed private variables showing in autocomplete ([#191](https://github.com/hansec/fortran-language-server/issues/191)) ([gnikit/fortls#3](https://github.com/gnikit/fortls/issues/3)) ## 2.0.0 ### Added - Adds support for including preprocessor definitions from files same as `pp_defs` - Adds hover support for preprocessor variables - Adds Go To Definition for `include` statements - Adds intrinsic support for `OpenACC` version 3.1 - Adds sphinx autogenerated documentation - Adds `incl_suffixes` as a configuration option - Adds `EXTERNAL` as an attribute upon hover ### Changed - Update constant parameters for `omp_lib` and `omp_lib_kinds` Interface v5.0 - Format json files with `prettier` - Initialises the log channel and adds `$/setTrace` to override client's (i.e. VS Code) loglevel - Unified the interfaces from the command line and the configuration options file ([gnikit/fortls#17](https://github.com/gnikit/fortls/issues/17)) - Updated the documentation and simplified the README.md ### Deprecated - Option `--preserve_keyword_order` has been substituted with its opposite `--sort_keywords` ### Fixed - Fixes the hover of preprocessor functions. It now displays the function name witout the argument list and the function body. The argument list cannot be multiline but the function body can. - Fixes objects marked `EXTERNAL` across multiple lines ([#169](https://github.com/hansec/fortran-language-server/issues/169)) - Fixes include with external files ([gnikit/fortls#13](https://github.com/gnikit/fortls/issues/13)) - `POINTER` attribute now displays upon hover ([gnikit/fortls#16](https://github.com/gnikit/fortls/issues/16)) - Fixes `END FORALL` end of scope error ([gnikit/fortls#18](https://github.com/gnikit/fortls/issues/18)) - Fixes Fortran line continuation definitions intermingled with preprocessor directives ([#203](https://github.com/hansec/fortran-language-server/issues/203)) ([gnikit/fortls#4](https://github.com/gnikit/fortls/issues/4)) - Fixes `USE` directive ordering issues ([#184](https://github.com/hansec/fortran-language-server/issues/184)) ([gnikit/fortls#7](https://github.com/gnikit/fortls/issues/7)) ## 1.16.0 ### Added - Adds value for `PARAMETER` variables on hover ([#116](https://github.com/hansec/fortran-language-server/issues/116)) ([gnikit/fortls#1](https://github.com/gnikit/fortls/issues/1)) ## 1.15.2 ### Fixed - Further improves the literal variable hover added in v1.14.0 ## 1.15.1 ### Fixed - Fixes premature end of scope with variables named `end` ([gnikit/fortls#9](https://github.com/gnikit/fortls/issues/9)) ## 1.15.0 ### Added - Adds `--config` option which allows arbitrary named configuration files ## 1.14.4 ### Fixed - Fixes import host association includes (autocomplete work not complete) ([#187](https://github.com/hansec/fortran-language-server/issues/187)) ## 1.14.3 ### Fixed - Fixes parsing of `non_intrinsic` modules ([#206](https://github.com/hansec/fortran-language-server/issues/206)) ## 1.14.2 ### Fixed - Fixes error while parsing submodule parent name with spaces ([#207](https://github.com/hansec/fortran-language-server/issues/207)) ## 1.14.1 ### Fixed - Fixes language server features not triggering for variables in column 0 ## 1.14.0 ### Fixed - Fixes (partially) Fortran literal variable hover ([#188](https://github.com/hansec/fortran-language-server/issues/188)) ## 1.13.0 ### Added - Adds Python glob support for `excl_paths`, `source_dirs`, `include_dirs` ## 1.12.1 ### Fixed - Fixes diagnostic error with interfaces as function arguments ([#200](https://github.com/hansec/fortran-language-server/issues/200)) ## 1.12.0 ### Changed - Add support for disabling diagnostics globally or on a per-project basis, ref [PR 163](https://github.com/hansec/fortran-language-server/pull/163) ### Fixed - Fix bug with enum declarations, fixes [#167](https://github.com/hansec/fortran-language-server/issues/167) - Fix typo in "ISHIFT" and "ISHIFTC" intrinsic functions, ref [PR 165](https://github.com/hansec/fortran-language-server/pull/165) ## 1.11.1 ### Fixed - Fix bug with hover requests introduced in v1.11.0, fixes [#159](https://github.com/hansec/fortran-language-server/issues/159) ## 1.11.0 ### Changed - Add support for specifying the language name returned for hover requests, ref [Fortran IntelliSense #17](https://github.com/hansec/vscode-fortran-ls/issues/17) - Add support for submodule implementations using the "PROCEDURE" keyword, fixes [#152](https://github.com/hansec/fortran-language-server/issues/152) ### Fixed - Fix bug with keywords in old style function declarations, fixes [#154](https://github.com/hansec/fortran-language-server/issues/154) - Fix bug when searching an empty scope, fixes [#151](https://github.com/hansec/fortran-language-server/issues/151) - Remove erroneous double definition/masking checks for interfaces, fixes [#18](https://github.com/hansec/fortran-language-server/issues/18) and [#138](https://github.com/hansec/fortran-language-server/issues/138) - README: Add fix for possible installation error ## 1.10.3 ### Fixed - Fix parsing bug with spaces in "old-style" kind specifications, fixes [#142](https://github.com/hansec/fortran-language-server/issues/142) - Fix issue with erroneous sub-word matching in preprocessor macro substitutions, fixes [#141](https://github.com/hansec/fortran-language-server/issues/141) ## 1.10.2 ### Changed - Add support for "old-style" character length specification, fixes [#130](https://github.com/hansec/fortran-language-server/issues/130) and [#134](https://github.com/hansec/fortran-language-server/issues/134) ### Fixed - Fix "can't set attribute" error in USE traversal, fixes [#132](https://github.com/hansec/fortran-language-server/issues/132) - Fix bugs related to optional leading ampersands on continuation lines, fixes [#131](https://github.com/hansec/fortran-language-server/issues/131) - Fix bug in block parsing with string literals, fixes [#128](https://github.com/hansec/fortran-language-server/issues/128) ## 1.10.1 ### Fixed - Fix bug in semicolon parsing, fixes [#127](https://github.com/hansec/fortran-language-server/issues/127) ## 1.10.0 ### Changed - Initial implementation of preprocessor include file handling, ref [#115](https://github.com/hansec/fortran-language-server/issues/115) - Add support for specifying file suffixes for preprocessing, ref [#115](https://github.com/hansec/fortran-language-server/issues/115) - Add support for completion in visibility statements, fixes [#120](https://github.com/hansec/fortran-language-server/issues/120) - Support "onOpen" requests before a file is written to disk, fixes [#123](https://github.com/hansec/fortran-language-server/issues/123) - Add support for IMPURE keyword (contributed by @mcocdawc) - Improve readability by replacing various result arrays with namedtuples ### Fixed - Fix bug in open string literal detection, fixes [#124](https://github.com/hansec/fortran-language-server/issues/124) - Fix bug with multiline docstrings that start with a trailing comment, fixes [#118](https://github.com/hansec/fortran-language-server/issues/118) - Fix symbols types for subroutines and methods in "documentSymbol" and "completion" requests, fixes [#117](https://github.com/hansec/fortran-language-server/issues/117) - Fix bug where ONLY renaming was not fully tracked in some circumstances - Fix bug with inline dimension specifications for variables - Fix accidental message swap in "object not found" and "object not imported" diagnostics - Fix bug where errors were reported with "module subroutine" and "module function" definitions (no import required) ## 1.9.1 ### Fixed - Fix bug in USE ONLY accounting used for graph pruning, fixes [#122](https://github.com/hansec/fortran-language-server/issues/122) ## 1.9.0 ### Changed - Add support for USE statement renaming requests, ref [#109](https://github.com/hansec/fortran-language-server/issues/109) - Add support for argument information in variable hover requests, fixes [#107](https://github.com/hansec/fortran-language-server/issues/107) - Add support for disabling snippets in autocomplete results, fixes [#112](https://github.com/hansec/fortran-language-server/issues/112) - Prevent file AST updates on Open/Close requests when contents have not changed, ref [#105](https://github.com/hansec/fortran-language-server/issues/105) - Reduce unnecessary parsing with single line file changes - Debugging: Add support for printing full result object ### Fixed - Remove required space between "DOUBLE PRECISION" and "DOUBLE COMPLEX" definitions, fixes [#110](https://github.com/hansec/fortran-language-server/issues/110) - Fix requests when a user-defined type variable has the same name as a defined type used in that scope ## 1.8.2 ### Fixed - Fix parsing single line WHERE statements with trailing parentheses, fixes [#106](https://github.com/hansec/fortran-language-server/issues/106) - Fix erroneous object duplication diagnostics for DO, INTERFACE, etc. blocks - Remove erroneous "unimplemented procedure" diagnostics from abstract type definitions - Fix parsing bugs with semicolons in trailing comments ## 1.8.1 ### Fixed - Fix bug with requests in lines with tab characters, fixes [#93](https://github.com/hansec/fortran-language-server/issues/93) - Fix bug with requests following "WRITE(\*,\*)" statements ## 1.8.0 ### Changed - Add full support for ASSOCIATE statements, fixes [#101](https://github.com/hansec/fortran-language-server/issues/101) - Filter completion suggestions after "MODULE PROCEDURE" statements, fixes [#103](https://github.com/hansec/fortran-language-server/issues/103) - Filter completion suggestions in type-bound procedure links - Add support for including external source file directories - Diagnostics: Line length exceeds maximum length errors - Speedup language server initialization - Speedup "textDocument/references" requests ## 1.7.3 ### Fixed - Fix case preservation in hover requests, fixes [#102](https://github.com/hansec/fortran-language-server/issues/102) - Fix rename requests for type-bound procedures without an explicit link statement (ie. "=>"), fixes [#104](https://github.com/hansec/fortran-language-server/issues/104) - Fix incorrect "CONTAINS" diagnostic errors with procedure pointers and external interfaces - Fix bug in diagnostic construction/reporting (introduced in v1.7) - Fix bugs caused by accidental modification of child object lists ## 1.7.2 ### Fixed - Fix bug with definition/hover requests involving intrinsic functions/modules/variables (introduced in v1.7) ## 1.7.1 ### Fixed - Fix bug with completion and signatureHelp requests on continuation lines (introduced in v1.7) - Fix out-of-range error with various requests on zero-length lines (introduced in v1.7) ## 1.7.0 ### Changed - Add initial support for "textDocument/codeAction" requests, generate unimplemented deferred procedures - Show subroutine/function keywords ("PURE", "ELEMENTAL", etc.) - Add position of object in line to "textDocument/definition" and "textDocument/implementation" results - Diagnostics: CONTAINS statement placement errors - Diagnostics: Visibility statement placement errors - Command line options: Notify when workspace initialization is complete - Command line options: Set number of threads used during initialization - Significant refactoring of core code ### Fixed - Fix "RecursionError" exception with circular user-defined type references, fixes [#100](https://github.com/hansec/fortran-language-server/issues/100) - Fix bug detecting TYPE definitions with an immediately following colon, ref [#100](https://github.com/hansec/fortran-language-server/issues/100) - Fix incorrect diagnostics for interface statements with USE instead of IMPORT statements ## 1.6.0 ### Changed - Add support for EXTERNAL subroutines - Diagnostics: Missing subroutine/function arguments and argument declarations - Diagnostics: Unimplemented deferred type-bound procedures - Diagnostics: Unknown TYPE/KIND objects (only if candidate is visible in workspace) - Diagnostics: IMPORT statements (missing objects and placement) - Diagnostics: Basic handling for IMPLICIT statements ## 1.5.1 ### Changed - Add support for semicolon separators and multiline preprocessor macros, fixes [#98](https://github.com/hansec/fortran-language-server/issues/98) - Add various "parsing errors" to debug_parser output ### Fixed - Use consistent file access method across debug_parser run and language server requests ## 1.5.0 ### Changed - Add support for "textDocument/rename" requests - Add initial support for Doxygen and FORD style comment blocks, ref [#44](https://github.com/hansec/fortran-language-server/issues/44) ### Fixed - Fix language server crash with unknown user-defined type fields ### Other changes - Deprecate "mod_dirs" option in favor of more accurate "source_dirs". Support for "mod_dirs" will be removed in a future release. ## 1.4.0 ### Changed - Add support for "textDocument/implementation" requests, ref [#94](https://github.com/hansec/fortran-language-server/issues/94) - Add option to preserve keyword ordering, ref [#97](https://github.com/hansec/fortran-language-server/issues/97) ### Fixed - Fix parsing bug with single line WHERE statements, fixes [#92](https://github.com/hansec/fortran-language-server/issues/92) - Fix bug with keyword parsing with nested parenthesis, fixes [#97](https://github.com/hansec/fortran-language-server/issues/97) - Differentiate between type-bound procedures and implementations in "textDocument/references" requests, fixes [#94](https://github.com/hansec/fortran-language-server/issues/94) - Fix typos in MAX and MIN intrinsic functions, ref [#96](https://github.com/hansec/fortran-language-server/pull/96) ## 1.3.0 ### Changed - Add support for user-defined type members in "textDocument/references" requests, fixes [#88](https://github.com/hansec/fortran-language-server/issues/88) - Link type-bound procedures with no explicit link to matching named scope in module, fixes [#89](https://github.com/hansec/fortran-language-server/issues/89) - Report diagnostics related to misplaced "CONTAINS" statements - Restructure README for improved clarity on capabilities/limitations ### Fixed - Fix bug with blank/empty lines in free-format continuations, fixes [#91](https://github.com/hansec/fortran-language-server/issues/91) - Fix exception in "textDocument/references" requests when no object is found, fixes [#86](https://github.com/hansec/fortran-language-server/issues/86) - Fix bug when relative path is used for --debug_rootpath, fixes [#87](https://github.com/hansec/fortran-language-server/issues/87) ## 1.2.1 ### Fixed - Fix bug in nested user-defined type inheritance, fixes [#85](https://github.com/hansec/fortran-language-server/issues/85) - Fix bug in completion requests with empty parenthesis in request line ## 1.2.0 ### Changed - Add support for local variables/objects in "textDocument/references" requests, ref [#84](https://github.com/hansec/fortran-language-server/issues/78) - Improve preprocessing to handle more types of conditional statements and macro substitution, ref [#78](https://github.com/hansec/fortran-language-server/issues/78) - Report diagnostics for excess "END" statements instead of causing parser failure, ref [#78](https://github.com/hansec/fortran-language-server/issues/78) ### Fixed - Fix missing "textDocument/references" results when line starts with target object, fixes [#84](https://github.com/hansec/fortran-language-server/issues/84) ## 1.1.1 ### Fixed - Fix bug with backslash URI separators on Windows, fixes [#83](https://github.com/hansec/fortran-language-server/issues/83) ## 1.1.0 ### Changed - Add initial implementation of simple preprocessor, ref [#78](https://github.com/hansec/fortran-language-server/issues/78) ### Fixed - Updated Fixed/Free detection logic using ampersands to check for comment line, fixes [#81](https://github.com/hansec/fortran-language-server/issues/81) - Support use of "END" as a variable, fixes [#82](https://github.com/hansec/fortran-language-server/issues/82) ## 1.0.5 ### Fixed - Add support for named "SELECT" statements, fixes [#80](https://github.com/hansec/fortran-language-server/issues/80) - Track scopes for "ASSIGNMENT" and "OPERATOR" interface statements, fixes [#79](https://github.com/hansec/fortran-language-server/issues/79) - Fix bug in parsing "SELECT" statements with no space, fixes [#77](https://github.com/hansec/fortran-language-server/issues/77) - Further improve discrimination between end statements and other items, ref [#73](https://github.com/hansec/fortran-language-server/issues/73) ## 1.0.4 ### Fixed - Normalize file paths when storing/accessing file index, fixes [#75](https://github.com/hansec/fortran-language-server/issues/75) - Fix intrinsic statement "COUNT" ([#76](https://github.com/hansec/fortran-language-server/pull/76)) ## 1.0.3 ### Fixed - Further improve discrimination between end statements and variables/block labels, ref [#73](https://github.com/hansec/fortran-language-server/issues/73) - Fix autocomplete errors when ASSOCIATE and ENUM statements are present - Fix severity reporting with "debug_diagnostics" command line option ## 1.0.2 ### Fixed - Fix discrimination between end statements and variables with underscores, fixes [#73](https://github.com/hansec/fortran-language-server/issues/73) - Detect enum definitions, fixes [#74](https://github.com/hansec/fortran-language-server/issues/74) ## 1.0.1 ### Fixed - Detect and support associate statements, fixes [#72](https://github.com/hansec/fortran-language-server/issues/72) ## 1.0.0 ### Changed - Add parsing of DO/IF/WHERE blocks and report scope end errors - Detect and report errors with invalid parent for scope definitions - Improve highlighting for hover requests in VSCode - Downgrade missing use warnings to information level - Add intrinsic declaration statement "double complex" ([#70](https://github.com/hansec/fortran-language-server/pull/70)) ### Fixed - Fix bug with leading whitespace on visibility statements, fixes [#69](https://github.com/hansec/fortran-language-server/issues/69) - Fix parsing errors when "&" and "!" characters are present inside string literals - Fix parsing bug with multiple leading function/subroutine modifiers (PURE, ELEMENTAL, etc.) ## 0.9.3 ### Fixed - Fix detection of function definitions with leading module and variable statements, fixes [#66](https://github.com/hansec/fortran-language-server/issues/66) - Properly close remaining open scopes at end of file - Initialize scope "eline" property, [PR #67](https://github.com/hansec/fortran-language-server/pull/67) ## 0.9.2 ### Changed - Improve handling of different file encodings, [PR #57](https://github.com/hansec/fortran-language-server/pull/57) ### Fixed - Fix autocomplete results for inherited members of user-defined types when the member type definition is only available in parent type's scope ## 0.9.1 ### Changed - Add support for generic interfaces in type-bound procedures, [#64](https://github.com/hansec/fortran-language-server/issues/64) - Add parent scope information to masked variable errors, [#48](https://github.com/hansec/fortran-language-server/issues/48) ### Fixed - Fix parsing deferred length character definitions, [#61](https://github.com/hansec/fortran-language-server/issues/61) - Fix parsing function definitions with modifiers before type, [#63](https://github.com/hansec/fortran-language-server/issues/63) - Fix parsing with array construction in subroutine/function calls, [#60](https://github.com/hansec/fortran-language-server/issues/60) ## 0.9.0 ### Changed - Add basic support for workspace/symbol requests - Add support for excluding source files based on a common suffix ### Fixed - Prevent detection of variables starting with "use" as USE statements, [#59](https://github.com/hansec/fortran-language-server/issues/59) - Improve parsing of USE ONLY statements, [#53](https://github.com/hansec/fortran-language-server/issues/53) - Make sure explicitly specified module directories exist, fixes [#52](https://github.com/hansec/fortran-language-server/issues/52) - Fix visibility statements with trailing comments, [#49](https://github.com/hansec/fortran-language-server/issues/49) ## 0.8.4 ### Fixed - Check for existence of file during "textDocument/didClose" requests, [#46](https://github.com/hansec/fortran-language-server/issues/46) - Encode text as UTF-8 in change requests, fixes [#41](https://github.com/hansec/fortran-language-server/issues/41) ## 0.8.3 ### Changed - Add support for generating debug logs - Add Fortran statements to autocomplete suggestions - Add support for explicit dimension specifications, fixes [#37](https://github.com/hansec/fortran-language-server/issues/37) ## 0.8.2 ### Changed - Add support for F03 style bracket array initialization, fixes [#35](https://github.com/hansec/fortran-language-server/issues/35) ## 0.8.1 ### Fixed - Fix crash in completion requests with intrinsic modules ## 0.8.0 ### Changed - Reformat completion information and snippets to match common language server conventions - Provide hover information for overloaded interfaces - Add support for autocompletion in select type statements - Add support for type bound procedures with explicit pass statements - Add support for arguments defined as interfaces in hover and signatureHelp requests - Unbetafy signatureHelp support ### Fixed - Fix linking type bound procedures with same name as subroutine/function definition ## 0.7.3 ### Fixed - Improve detection of block statements, fixes [#32](https://github.com/hansec/fortran-language-server/issues/32) - Fix autocompletion with mixed case object definitions ## 0.7.2 ### Fixed - Fix variable definition detection without spaces, fixes [#30](https://github.com/hansec/fortran-language-server/issues/30) ## 0.7.1 ### Changed - Add option for displaying hover information for variables - Add subroutine/function keywords to hover information - Add more keywords to variable information - Support spaces between subroutine name and parentheses in signatureHelp ### Fixed - Fix bug with file paths that include spaces, fixes [#29](https://github.com/hansec/fortran-language-server/issues/29) - Fix bug where arguments were erroneously dropped for procedure variables - Fix bug where arguments of procedure type did not have definition information in subroutine/function hover results - Correct spelling of incremental_sync argument, fixes [#28](https://github.com/hansec/fortran-language-server/issues/28) ## 0.7.0 ### Changed - Add support for signatureHelp requests with non-overloaded subroutines/functions - Provide autocomplete and hover information for procedures with explicit interface definitions - Add support for Fortran 2008 block constructs, fixes [#23](https://github.com/hansec/fortran-language-server/issues/23) - Add support for "DOUBLE COMPLEX" datatype ### Fixed - Fix bug where external interfaces were erroneously public in default private modules - Fix bug producing repeated objects with include statements ## 0.6.2 ### Changed - Catch and report more types of errors related to file processing, fixes [#21](https://github.com/hansec/fortran-language-server/issues/21) ## 0.6.1 ### Fixed - Fix bug with incremental sync using VSCode on windows, fixes [#20](https://github.com/hansec/fortran-language-server/issues/20) ## 0.6.0 ### Changed - Add keywords to autocomplete results in variable definition statements - Filter autocompletion results in extend, import, and procedure statements - Ignore completion requests on scope definition and ending lines to reduce autocomplete noise - Filter autocompletion results in variable definition statements to reduce autocomplete noise (variables only) - Ignore autocomplete and definition requests on preprocessor lines - Add option to test completion and definition requests in debug mode ### Fixed - Improve export of abstract and external interfaces for completion and definition requests - Fix scope name detection to prevent confusing variables that start with Fortran statement names - Fix handling of external and abstract interface specifications - Fix bug preventing unrestricted USE statements from overriding USE only statements - Fix bug where file parsing ended prematurely in some cases with line continuations ## 0.5.0 ### Changed - Add intrinsic functions and modules to autocomplete suggestions - Add support for include statements ### Fixed - Remove erroneously included global objects from autocomplete results in USE ONLY statements - Fix displayed type for derived type objects in autocomplete requests ## 0.4.0 ### Changed - Add support for find_references, global and top-level module objects only - Filter autocomplete suggestions for callable objects in call statements - Speedup initialization and updates on large projects by accelerating construction of USE tree ### Fixed - Fix parser error with definitions requiring enclosing scopes in #include files and unnamed programs, fixes [#17](https://github.com/hansec/fortran-language-server/issues/17) - Fix parser failure with visibility statements in included fortran files, fixes [#16](https://github.com/hansec/fortran-language-server/issues/16) - Fix detection of lines with trailing comments ## 0.3.7 ### Changed - Automatically trigger autocomplete on `%` character - Show named interfaces and prototypes in document outline - Add support for autocomplete without prefix filtering ### Fixed - Fix occasional language server error in autocompletion with class methods ## 0.3.6 ### Changed - Add support for fortran submodules, fixes [#14](https://github.com/hansec/fortran-language-server/issues/14) and [#15](https://github.com/hansec/fortran-language-server/issues/15) - Improve line tokenization and parsing ### Fixed - Fix parsing errors with incomplete function definitions - Fix bugs in symbol and parser debugging ## 0.3.5 ### Fixed - Improve unicode file handling with Python 3.x - Add support for unnamed programs, fixes [#13](https://github.com/hansec/fortran-language-server/issues/13) ## 0.3.4 ### Fixed - Fix parser error with uppercase characters in scope names, fixes [#11](https://github.com/hansec/fortran-language-server/issues/11) - Add support for object names with a leading underscore, fixes [#9](https://github.com/hansec/fortran-language-server/issues/9) - Do not report diagnostics inside preprocessor if statements, fixes [#7](https://github.com/hansec/fortran-language-server/issues/7) ## 0.3.3 ### Changed - Improved Windows support and added AppVeyor CI testing - Add support for snippets in autocompletion - Ignore requests in comment sections ### Fixed - Fix bug with string/byte handling in Python 3 - Fix bug with multiprocess support on Windows - Fix bug with URI formatting and paths on Windows, fixes [#8](https://github.com/hansec/fortran-language-server/issues/8) ## 0.3.2 ### Fixed - Fix parsing variable definitions containing separators inside strings, fixes [#4](https://github.com/hansec/fortran-language-server/issues/4) - Fix incorrect variable masking error in functions, fixes [#5](https://github.com/hansec/fortran-language-server/issues/5) - Do not report intrinsic modules as unknown, fixes [#2](https://github.com/hansec/fortran-language-server/issues/2) and [#3](https://github.com/hansec/fortran-language-server/issues/3) ## 0.3.1 ### Changed - Do not show warnings for variable masking in interface definitions - Respect visibility statements when searching for object in scope ### Fixed - Fix bug in incremental document sync with ending newline ## 0.3.0 ### Changed - Add basic file diagnostics (double declaration, variable masking, unknown USE) - Indicate optional arguments in autocomplete suggestions - Detect source code format from file contents instead of extension - Add support for incremental document synchronization ### Fixed - Fix parsing error when variable definition line is incomplete - Fix incorrect line handling with open parentheses - Fix bug when file parsing/hashing fails in workspace initialization ## 0.2.0 ### Changed - Add support for recursive directory inclusion from "root_path" - Provide option to skip type members in documentSymbol requests - Apply visibility statements to objects for autocomplete suggestions - Filter interface suggestions to only show unique signatures - Link imported procedures in interface definitions ### Fixed - Fix line continuation handling for free form files with trailing and leading ampersands - Improve parentheses matching in line parsing ## 0.1.4 ### Changed - Handle line continuations in language server requests - Add server version number to help output ### Fixed - Fix bug when parsing files with unicode characters ## 0.1.3 ### Changed - Include interfaces in autocomplete suggestions - Restrict autocomplete suggestions by object visibility - Improve USE statement traversal - Add notifications for parser failures ### Fixed - Fix bug where parsing errors during workspace initialization could crash the language server ## 0.1.2 - Synchronize version numbers ## 0.1.1 - fix download link in setup.py ## 0.1.0 - First Release - Initial release fortran-language-server-2.13.0+dfsg.1/CODE_OF_CONDUCT.md000066400000000000000000000121541450400537300222160ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at giannis.nikiteas@gmail.com. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. fortran-language-server-2.13.0+dfsg.1/CONTRIBUTING.md000066400000000000000000000052731450400537300216540ustar00rootroot00000000000000# Contributing 👍🎉 Thank you for taking the time to contribute! 🎉👍 In this file you will find all the steps necessary to guide you through your first contribution to the project. Please note our [Code of Conduct](https://github.com/gnikit/fortls/blob/master/CODE_OF_CONDUCT.md) and adhere to it in all your interactions with this project. ## 📚 Getting Started A good place to start is the [Issues tab](https://github.com/gnikit/fortls/issues) on GitHub. Look for any issues with the `help wanted` tag. ### Downloading âŦ‡ī¸ Firstly, fork the repository from . Then clone the forked repository into your local machine. ```sh git@github.com:/fortls.git ``` Where `` should be your GitHub username. ### Dependencies To build this project you will need [Python](https://www.python.org/) `>= 3.7` and [pip](https://www.python.org/) `>= 21.0`. To install all Python dependencies open a terminal go into the `fortls` cloned folder and run: ```sh pip install -e .[dev,docs] ``` ### Testing đŸ§Ē To verify that your cloning of the GitHub repository worked as expected open a terminal and run: ```sh pytest -v ``` This will run the entire unit test suite. You can also run this to verify that you haven't broken anything in the code. 👉 **Tip!** You can run individual tests by selecting the path to the Python file and the method ```sh pytest test/test_interface.py::test_version_update_pypi ``` ### Developing & Debugging đŸžī¸ â—ī¸ Before you start developing, open a terminal inside `fortls` and run: ```sh pre-commit install ``` This will ensure that all you commits meet the formatting standards of the project. --- You can now start writing code! Your local `fortls` version will be updated with every code change you make, so you can use your normal code editor to checkout the `fortls` features that you have implemented. It is however considerably easier to create compact unittests to check if your changes have worked. A `fortls` test normally involves writing a Python function which sends a JSONRPC request to the server and then test checks for the correct server response. Often times small bits of Fortran source code also have to be submited to be used by the test. You can find varisous test examples in the `tests` directory. 👉 **Tip!** You can attach a debugger to the main `fortls` source code during unittesting which should allow you to pause, break, step into, etc. while testing, thus making it easier to find mistakes. ### Merging To merge your changes to the main `fortls` repository push your branch on GitHub and open a [Pull Request](https://github.com/gnikit/fortls/pulls). Ping `@gnikit` to review your PR. fortran-language-server-2.13.0+dfsg.1/LICENSE000066400000000000000000000021361450400537300204230ustar00rootroot00000000000000The MIT License (MIT) Copyright 2017-2019 Chris Hansen Copyright 2021-2022 Giannis Nikiteas Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. fortran-language-server-2.13.0+dfsg.1/README.md000066400000000000000000000211021450400537300206670ustar00rootroot00000000000000![alt](https://raw.githubusercontent.com/gnikit/fortls/master/assets/logo.png) # fortls - Fortran Language Server ![PyPI](https://img.shields.io/pypi/v/fortls?style=flat-square) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/fortls?style=flat-square) [![PyPi Downloads via PePy](https://img.shields.io/badge/dynamic/json?style=flat-square&color&maxAge=86400&label=PyPi%20Downloads&query=%24.total_downloads&url=https%3A%2F%2Fapi.pepy.tech%2Fapi%2Fprojects%2Ffortls)](https://pepy.tech/project/fortls) ![Conda](https://img.shields.io/conda/dn/conda-forge/fortls?label=Anaconda&style=flat-square) ![GitHub License](https://img.shields.io/github/license/gnikit/fortls?style=flat-square) ![GitHub Workflow Status (branch)](https://img.shields.io/github/workflow/status/gnikit/fortls/Tests/master?label=CI&style=flat-square) ![GitHub Workflow Status (branch)](https://img.shields.io/github/workflow/status/gnikit/fortls/Docs/master?label=Docs&style=flat-square) ![Codecov](https://img.shields.io/codecov/c/github/gnikit/fortls?style=flat-square) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black) ![GitHub Repo stars](https://img.shields.io/github/stars/gnikit/fortls?color=yellow&style=flat-square) [https://github.com/sponsors/gnikit](https://github.com/sponsors/gnikit) [https://paypal.me/inikit](https://paypal.me/inikit) ![alt](https://raw.githubusercontent.com/gnikit/fortls/master/assets/animations/intro-demo.gif) `fortls` is an implementation of the [Language Server Protocol](https://github.com/Microsoft/language-server-protocol) (LSP) for Fortran using Python (3.7+). All code editors that support LSP can integrate with `fortls` see the section [Editor Integration](https://gnikit.github.io/fortls/editor_integration.html#editor-integration) in the documentation. Some supported code editors include: [Visual Studio Code](https://gnikit.github.io/fortls/editor_integration.html#visual-studio-code), [Atom](https://gnikit.github.io/fortls/editor_integration.html#atom), [Sublime Text](https://gnikit.github.io/fortls/editor_integration.html#sublime-text), [(Neo)Vim](https://gnikit.github.io/fortls/editor_integration.html#vim-neovim-gvim), and [Emacs](https://gnikit.github.io/fortls/editor_integration.html#emacs). ## Features - Project-wide and Document symbol detection and Renaming - Hover support, Signature help and Auto-completion - GoTo/Peek implementation and Find/Peek references - Preprocessor support - Documentation parsing ([Doxygen](http://www.doxygen.org/) and [FORD](https://github.com/Fortran-FOSS-Programmers/ford) styles) - Access to multiple intrinsic modules and functions - `ISO_FORTRAN_ENV` GCC 11.2.0 - `IOS_C_BINDING` GCC 11.2.0 - `IEEE_EXCEPTIONS`, `IEEE_ARITHMETIC`, `IEEE_FEATURES` GCC 11.2.0 - OpenMP `OMP_LIB`, `OMP_LIB_KINDS` v5.0 - OpenACC `OPENACC`, `OPENACC_KINDS` v3.1 - Diagnostics - Multiple definitions with the same variable name - Variable definition masks definition from parent scope - Missing subroutine/function arguments - Unknown user-defined type used in `TYPE`/`CLASS` definition (only if visible in project) - Unclosed blocks/scopes - Invalid scope nesting - Unknown modules in `USE` statement - Unimplemented deferred type-bound procedures - Use of non-imported variables/objects in interface blocks - Statement placement errors (`CONTAINS`, `IMPLICIT`, `IMPORT`) - Code actions - Generate type-bound procedures and implementation templates for deferred procedures ### Notes/Limitations - Signature help and hover does not handle elegantly overloaded functions i.e. interfaces ## `fortls` vs `fortran-language-server` This project was originally based on `fortran-language-server` LSP implementation, but the two projects have since diverged. `fortls` (this project) is now developed independently of the upstream `hansec/fortran-language-server` project and contains numerous new features and bug fixes the original `fortran-language-server` does not. For a complete and detailed list of the differences between the two Language Servers see the Documentation section: [Unique fortls features (not in fortran-language-server)](https://gnikit.github.io/fortls/fortls_changes.html) The name of executable for this project has been chosen to remain `fortls` to allow for integration with pre-existing plugins and workflows, but it could change in the future. ## Installation ### PyPi ```sh pip install fortls ``` ### Anaconda ```sh conda install -c conda-forge fortls ``` for more information about the Anaconda installation [see](https://github.com/conda-forge/fortls-feedstock#about-fortls) ### Common installation problems It is **NOT** recommended having `fortls` and `fortran-language-server` simultaneously installed, since they use the same binary name. If you are having trouble getting `fortls` to work try uninstalling `fortran-language-server` and reinstalling `fortls`. With `pip` ```sh pip uninstall fortran-language-server pip install fortls --upgrade ``` or with Anaconda ```sh conda uninstall fortran-language-server conda install -c conda-forge fortls ``` ## Settings `fortls` can be configured through both the command line e.g. `fortls --hover_signature` or through a Configuration json file. The two interfaces are identical and a full list of the available options can be found in the [Documentation](https://gnikit.github.io/fortls/options.html) or through `fortls -h` An example for a Configuration file is given below ```json { "incremental_sync": true, "lowercase_intrinsics": true, "hover_signature": true, "use_signature_help": true, "excl_paths": ["tests/**", "tools/**"], "excl_suffixes": ["_skip.f90"], "include_dirs": ["include/**"], "pp_suffixes": [".F90", ".h"], "pp_defs": { "HAVE_HDF5": "", "MPI_Comm": "integer" } } ``` ## Implemented server requests | Request | Description | | -------------------------------- | ------------------------------------------------------ | | `workspace/symbol` | Get workspace-wide symbols | | `textDocument/documentSymbol` | Get document symbols e.g. functions, subroutines, etc. | | `textDocument/completion` | Suggested tab-completion when typing | | `textDocument/signatureHelp` | Get signature information at a given cursor position | | `textDocument/definition` | GoTo definition/Peek definition | | `textDocument/references` | Find all/Peek references | | `textDocument/documentHighlight` | Same as `textDocument/references` | | `textDocument/hover` | Show messages and signatures upon hover | | `textDocument/implementation` | GoTo implementation/Peek implementation | | `textDocument/rename` | Rename a symbol across the workspace | | `textDocument/didOpen` | Document synchronisation upon opening | | `textDocument/didSave` | Document synchronisation upon saving | | `textDocument/didClose` | Document synchronisation upon closing | | `textDocument/didChange` | Document synchronisation upon changes to the document | | `textDocument/codeAction` | **Experimental** Generate code | ## Acknowledgements This project would not have been possible without the original work of [@hansec](https://github.com/hansec/) in [`fortran-language-server`](https://github.com/hansec/fortran-language-server) ## Bug reports When [filing bugs](https://github.com/gnikit/fortls/issues/new) please provide example code to reproduce the observed issue. ## Security Policy To report a security vulnerability please follow the instructions in our [Security page](https://github.com/gnikit/fortls/security/policy). ## License This project is made available under the [MIT License](https://github.com/gnikit/fortls/blob/master/LICENSE). fortran-language-server-2.13.0+dfsg.1/SECURITY.md000066400000000000000000000011041450400537300212010ustar00rootroot00000000000000# Security Policy ## Supported Versions `fortls` supports **ONLY** the latest Release. An autoupdate function is enabled by default to fetch the newest updates from `PyPi`. For Anaconda environments the autoupdate functionality is disabled and it is up to the user to update to the latest version. ## Reporting a Vulnerability The codebase is regularly scanned and patched for any potential security vulnerabilities. If you manage to find a vulnerability in the Language Server please open an [Bug Report](https://github.com/gnikit/fortls/issues) with prefix: **SECURITY:**. fortran-language-server-2.13.0+dfsg.1/docs/000077500000000000000000000000001450400537300203445ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/docs/Makefile000066400000000000000000000013731450400537300220100ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SPHINXAPIDOC ?= sphinx-apidoc PANDOC ?= pandoc SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) modules: @$(SPHINXAPIDOC) -f -H "Developers' documentations" ../fortls -o . fortran-language-server-2.13.0+dfsg.1/docs/conf.py000066400000000000000000000101711450400537300216430ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath("..")) from fortls import __version__ # noqa: E402 # Generate the agglomerated changes (from the CHANGELOG) between fortls # and the fortran-language-server project with open("../CHANGELOG.md", "r") as f: lns = f.readlines() lns = lns[0 : lns.index("## 1.12.0\n")] changes = { "Added": [], "Changed": [], "Deprecated": [], "Removed": [], "Fixed": [], "Security": [], } field = "" for i in lns: if i.startswith("## "): continue if i.startswith("### "): field = i[4:-1] continue if i.startswith("- ") or i.startswith(" "): changes[field].append(i) new_file = ["# Unique fortls features (not in fortran-language-server)\n"] for key, val in changes.items(): if val: new_file.append(f"\n## {key}\n\n") new_file.extend(val) with open("fortls_changes.md", "w") as f: f.writelines(new_file) # -- Project information ----------------------------------------------------- project = "fortls" copyright = "2021-2022, Giannis Nikiteas" author = "Giannis Nikiteas" # The full version, including alpha/beta/rc tags release = __version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinxarg.ext", "sphinx.ext.autodoc", "sphinx.ext.autosectionlabel", "sphinx.ext.autosummary", "sphinx.ext.napoleon", "sphinx.ext.intersphinx", "sphinx.ext.inheritance_diagram", "sphinx_autodoc_typehints", "sphinx.ext.autosectionlabel", "sphinx_design", "sphinx_copybutton", "myst_parser", "sphinx_sitemap", ] # For sphinx_design in Markdown myst_enable_extensions = ["colon_fence"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] source_suffix = [".rst", ".md"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "alabaster" html_theme = "sphinx_rtd_theme" html_theme = "furo" html_title = "fortls" html_logo = "../assets/logo.svg" html_favicon = "../assets/icon.svg" html_baseurl = "https://gnikit.github.io/fortls/" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. html_extra_path = ["html_extra"] # Default is {version}{lang}{link} sitemap_url_scheme = "{link}" display_toc = True # autodoc_default_flags = ["members"] autosummary_generate = True intersphinx_mapping = { "python": ("https://docs.python.org/3.10", None), } inheritance_graph_attrs = { "size": '"6.0, 8.0"', "fontsize": 32, "bgcolor": "transparent", } inheritance_node_attrs = { "color": "black", "fillcolor": "white", "style": '"filled,solid"', } inheritance_edge_attrs = { "penwidth": 1.2, "arrowsize": 0.8, } fortran-language-server-2.13.0+dfsg.1/docs/contact.rst000066400000000000000000000014651450400537300225370ustar00rootroot00000000000000Contact Us =============== Are you a company that uses ``fortls``? Do you need technical support? Is there a feature missing that you would like to see or have you spotted a bug? **Reach out and let us know!** You can reach out in a number of ways: - Start a `GitHub Discussion `__. - Ask a question on `Fortran Language Discourse `__ and tag `@gnikit `__ in your post. - For Feature Requests open an issue on `GitHub `__. - For Bug Reports, open a bug report on `GitHub `__. Make sure to check the open GitHub issues! - For any other inquiry contact ``gnikit [@] duck [.] com`` fortran-language-server-2.13.0+dfsg.1/docs/contributing.rst000066400000000000000000000020341450400537300236040ustar00rootroot00000000000000 Contributing to fortls ====================== There are a few ways you can support the ``fortls`` project. Financial Support ------------------ You can support us financially by becoming a **GitHub Sponsor** or by making a **PayPal Donation**. .. grid:: 2 :gutter: 0 :class-container: sd-text-center sd-pt-4 :class-row: sd-align-minor-center .. grid-item:: .. button-link:: https://github.com/sponsors/gnikit :ref-type: ref :outline: :color: danger :class: sd-px-2 sd-fs-4 Become a **Sponsor** :octicon:`mark-github;2em;sd-text-black` :octicon:`heart-fill;2em;sd-text-danger` .. grid-item:: .. button-link:: https://www.paypal.com/paypalme/inikit :ref-type: ref :color: primary :class: sd-px-2 sd-fs-5 Make a **Donation** :fa:`fa-paypal` Contributing Code ----------------- .. include:: ./../CONTRIBUTING.md :parser: myst_parser.sphinx_ :start-line: 2 fortran-language-server-2.13.0+dfsg.1/docs/editor_integration.rst000066400000000000000000000171271450400537300247770ustar00rootroot00000000000000Editor Integration =================== `Visual Studio Code `__ ------------------------------------------------------- The Language Server is natively supported through the `Modern Fortran`_ extension. Install ``fortls`` then install the extension and all the server's features should be instantly available. .. _Modern Fortran: https://marketplace.visualstudio.com/items?itemName=fortran-lang.linter-gfortran .. important:: Make sure that ``fortls`` is reachable in your ``$PATH``. If not you can specify the option ``"fortran.fortls.path": "/custom/path/to/fortls"`` `Atom `__ --------------------------- Firstly ``fortls`` then install the `language-fortran`_ plugin by `@dparkins`_ to get Fortran syntax highlighting. Finally, install either `fortran-lsp`_ by `@gnikit`_ or `ide-fortran`_ by `@hansec`_ .. warning:: `fortran-lsp`_ has been created solely for the ``fortls`` Language Server, hence it natively interfaces with ``fortls``. `ide-fortran`_ was created for an older, now deprecated, Fortran Language Server hence the options available through the extension are not representative of ``fortls``'s interface. .. _language-fortran: https://atom.io/packages/language-fortran .. _@dparkins: https://github.com/dparkins .. _fortran-lsp: https://atom.io/packages/fortran-lsp .. _@gnikit: https://github.com/gnikit .. _ide-fortran: https://atom.io/packages/ide-fortran .. _@hansec: https://github.com/hansec `Sublime Text `__ ----------------------------------------------- Firstly, install ``fortls`` then install the `LSP`_ package from package control. Finally, install the `Fortran`_ package and add the following in your configuration .. code-block:: json { "clients": { "fortls": { "enabled": true, "command": ["fortls", "--notify_init"], "selector": "source.modern-fortran | source.fixedform-fortran" } } } For more details see the LSP `documentation`_. .. _LSP: https://github.com/sublimelsp/LSP .. _Fortran: https://packagecontrol.io/packages/Fortran .. _documentation: https://lsp.sublimetext.io/language_servers/#fortran `neovim `__ ------------------------------- .. warning:: For neovim versions < 0.5.0 follow the instructions in the :ref:`vim` section. Neovim version >= 0.5.0 `natively supports LSP `_. To enable the native LSP functionality install the `lspconfig`_ plugin with your favourite plugin manager. Then in your configuration file (i.e. ``init.lua``) add the following: .. code-block:: lua require'lspconfig'.fortls.setup{} If additional ``fortls`` options need to be passed to you can do that through the ``cmd`` option in ``setup{}`` .. code-block:: lua require'lspconfig'.fortls.setup{ cmd = { 'fortls', '--lowercase_intrisics', '--hover_signature', '--hover_language=fortran', '--use_signature_help' } } .. important:: If you are just starting with ``neovim`` it is strongly recommended using the `Suggested configuration`_ from `lspconfig`_ for keybingings and server attaching. **Remember to attach the server during setup{}** .. _lspconfig: https://github.com/neovim/nvim-lspconfig .. _Suggested configuration: https://github.com/neovim/nvim-lspconfig#suggested-configuration .. _vim: `Vim `__ ------------------------------ Vim does not support LSP natively, so a 3rd party extensions need to be installed. A few options are available: `YouCompleteMe `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `YouCompleteMe `__ is a popular Vim plugin and code-completion engine that also provides an LSP interface. You can therefore use it to register Language Servers like ``fortls``. For more information about configuring an arbitrary Language Server in YouCompleteMe, `see here `__. .. code-block:: vim " YouCompleteMe configuration options let g:ycm_language_server = \[ \ { \ 'name': 'fortls', \ 'cmdline': ['fortls', '--hover_language', 'fortran', '--notify_init', '--hover_signature', '--use_signature_help'], \ 'filetypes': ['fortran'], \ 'project_root_files': ['.fortls'], \ }, \] nmap yfw (YCMFindSymbolInWorkspace) nmap yfd (YCMFindSymbolInDocument) `LanguageClient-neovim `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Firstly install the plugin `LanguageClient-neovim`_. Then edit your ``~/.vimrc`` settings file to set ``fortls`` for Fortran files .. code-block:: vim " Required for operations modifying multiple buffers like rename. set hidden let g:LanguageClient_serverCommands = { " Add any default arguments you want fortls to have inside [] \ 'fortran': ['fortls', '--hover_signature', '--hover_language', 'fortran', '--use_signature_help'], \ } " note that if you are using Plug mapping you should not use `noremap` mappings. nmap (lcn-menu) " Or map each action separately nmap K (lcn-hover) nmap gd (lcn-definition) nmap (lcn-rename) .. _LanguageClient-neovim: https://github.com/autozimu/LanguageClient-neovim `EMACS `__ ----------------------------------------------- `LSP Mode `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Install the `lsp-mode`_ plugin. This should then allow for the variables `lsp-clients-fortls-args`_ and `lsp-clients-fortls-executable`_ to be defined in the ``~/.emacs`` configuration file. .. _lsp-mode: https://emacs-lsp.github.io/lsp-mode/page/installation .. _lsp-clients-fortls-args: https://emacs-lsp.github.io/lsp-mode/page/lsp-fortran/#lsp-clients-fortls-args .. _lsp-clients-fortls-executable: https://emacs-lsp.github.io/lsp-mode/page/lsp-fortran/#lsp-clients-fortls-executable `Eglot `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Install the `eglot`_ package which supports fortls out of the box. This can be done in emacs version > 26.1 via ``M-x package-install RET eglot RET``. Arguments to ``fortls`` can be provided in the form .. code-block:: elisp (add-to-list 'eglot-server-programs '(f90-mode . ("fortls" "--notify_init" "--nthreads=4"))) .. _eglot: https://github.com/joaotavora/eglot Visual Studio 2017 ------------------ Installing this `VS17 extension`_ should enable ``fortls`` features in Visual Studio .. _VS17 extension: https://github.com/michaelkonecny/vs-fortran-ls-client `Kakoune `__ ---------------------------------- Install `kak-lsp `_. Edit the ``kak-lsp.toml`` config file to include: .. code-block:: sh [language.fortran] filetypes = ["fortran"] roots = [".git", ".fortls"] command = "fortls" args = ["--symbol_skip_mem", "--incremental_sync", "--autocomplete_no_prefix", "--lowercase_intrisics"] Edit your ``kakrc`` config to enable ``kak-lsp``, adding ``fortran`` as a filetype: .. code-block:: sh eval %sh{kak-lsp --kakoune -s $kak_session} # lsp-enable hook global WinSetOption filetype=(fortran) %{ lsp-enable-window } fortran-language-server-2.13.0+dfsg.1/docs/features.rst000066400000000000000000000070531450400537300227210ustar00rootroot00000000000000Features =============== - Project-wide and Document symbol detection and Renaming - Hover support, Signature help and Auto-completion - GoTo/Peek implementation and Find/Peek references - Preprocessor support - Documentation parsing `Doxygen `__ and `FORD `__ styles - Diagnostics - Code actions - Intrinsics modules Completion ---------- .. image:: ../assets/lsp/completion-ani.gif .. image:: ../assets/lsp/completion.png Hover ------- .. image:: ../assets/lsp/hover.png .. image:: ../assets/lsp/hover2.png Symbols ------------ Project-wide and single Document symbol search .. image:: ../assets/lsp/symbols-workspace.png .. image:: ../assets/lsp/symbols-doc.png Signature Help ---------------- .. image:: ../assets/lsp/sig-help.gif Find References ------------------ .. figure:: ../assets/lsp/definition-goto.gif :align: left *Go To Definition of a function* .. figure:: ../assets/lsp/definition-peek.png :align: left *Peek into the Definition of a function* .. figure:: ../assets/lsp/references-peek.png :align: left *Peek into all the References of a function* Renaming ------------ .. figure:: ../assets/lsp/rename.gif :align: left *Rename a variable* Diagnostics ------------- - Multiple definitions with the same variable name - Variable definition masks definition from parent scope - Missing subroutine/function arguments - Unknown user-defined type used in ``TYPE``/ ``CLASS`` definition (only if visible in project) - Unclosed blocks/scopes - Invalid scope nesting - Unknown modules in ``USE`` statement - Unimplemented deferred type-bound procedures - Use of non-imported variables/objects in interface blocks - Statement placement errors (``CONTAINS``, ``IMPLICIT``, ``IMPORT``) Code Actions --------------- - Generate type-bound procedures and implementation templates for deferred procedures Intrinsics Modules ------------------ - ``ISO_FORTRAN_ENV``, ``IOS_C_BINDING`` GCC 11.2.0 - ``IEEE_EXCEPTIONS``, ``IEEE_ARITHMETIC``, ``IEEE_FEATURES`` GCC 11.2.0 - OpenMP ``OMP_LIB``, ``OMP_LIB_KINDS`` v5.0 - OpenACC ``OPENACC``, ``OPENACC_KINDS`` v3.1 All LSP Requests -------------------- .. list-table:: tmp :header-rows: 1 * - Request - Description * - ``workspace/symbol`` - Get workspace-wide symbols * - ``textDocument/documentSymbol`` - Get document symbols e.g. functions, subroutines, etc. * - ``textDocument/completion`` - Suggested tab-completion when typing * - ``textDocument/signatureHelp`` - Get signature information at a given cursor position * - ``textDocument/definition`` - GoTo definition/Peek definition * - ``textDocument/references`` - Find all/Peek references * - ``textDocument/documentHighlight`` - Same as ``textDocument/references`` * - ``textDocument/hover`` - Show messages and signatures upon hover * - ``textDocument/implementation`` - GoTo implementation/Peek implementation * - ``textDocument/rename`` - Rename a symbol across the workspace * - ``textDocument/didOpen`` - Document synchronisation upon opening * - ``textDocument/didSave`` - Document synchronisation upon saving * - ``textDocument/didClose`` - Document synchronisation upon closing * - ``textDocument/didChange`` - Document synchronisation upon changes to the document * - ``textDocument/codeAction`` - **Experimental** Generate code fortran-language-server-2.13.0+dfsg.1/docs/fortls.rst000066400000000000000000000035021450400537300224070ustar00rootroot00000000000000fortls package ============== Submodules ---------- fortls.constants module ----------------------- .. automodule:: fortls.constants :members: :undoc-members: :show-inheritance: fortls.ftypes module -------------------- .. automodule:: fortls.ftypes :members: :undoc-members: :show-inheritance: fortls.helper\_functions module ------------------------------- .. automodule:: fortls.helper_functions :members: :undoc-members: :show-inheritance: fortls.interface module ----------------------- .. automodule:: fortls.interface :members: :undoc-members: :show-inheritance: fortls.intrinsics module ------------------------ .. automodule:: fortls.intrinsics :members: :undoc-members: :show-inheritance: fortls.json\_templates module ----------------------------- .. automodule:: fortls.json_templates :members: :undoc-members: :show-inheritance: fortls.jsonrpc module --------------------- .. automodule:: fortls.jsonrpc :members: :undoc-members: :show-inheritance: fortls.langserver module ------------------------ .. automodule:: fortls.langserver :members: :undoc-members: :show-inheritance: fortls.objects module --------------------- .. automodule:: fortls.objects :members: :undoc-members: :show-inheritance: fortls.parse\_fortran module ---------------------------- .. automodule:: fortls.parse_fortran :members: :undoc-members: :show-inheritance: fortls.regex\_patterns module ----------------------------- .. automodule:: fortls.regex_patterns :members: :undoc-members: :show-inheritance: fortls.version module --------------------- .. automodule:: fortls.version :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: fortls :members: :undoc-members: :show-inheritance: fortran-language-server-2.13.0+dfsg.1/docs/html_extra/000077500000000000000000000000001450400537300225135ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/docs/html_extra/google3e426562ce42e98f.html000066400000000000000000000000661450400537300267520ustar00rootroot00000000000000google-site-verification: google3e426562ce42e98f.html fortran-language-server-2.13.0+dfsg.1/docs/html_extra/robots.txt000066400000000000000000000001151450400537300245610ustar00rootroot00000000000000User-agent: * Disallow: Sitemap: https://gnikit.github.io/fortls/sitemap.xml fortran-language-server-2.13.0+dfsg.1/docs/index.rst000066400000000000000000000107461450400537300222150ustar00rootroot00000000000000:sd_hide_title: ============== fortls ============== .. div:: landing-title :style: padding: 0.1rem 0.5rem 0.6rem 0; background-image: linear-gradient(315deg, #2753e3 0%, #734f96 74%); clip-path: polygon(0px 0px, 100% 0%, 100% 100%, 0% calc(100% - 1.5rem)); -webkit-clip-path: polygon(0px 0px, 100% 0%, 100% 100%, 0% calc(100% - 1.5rem)); .. grid:: :reverse: :gutter: 2 3 3 3 :margin: 4 4 1 2 .. grid-item:: :columns: 12 6 6 6 .. image:: ../assets/logo2-animated.svg :alt: fortls :width: 100% .. grid-item:: :columns: 12 6 6 6 :child-align: justify :class: sd-text-white sd-fs-3 A Language Server for Fortran providing code completion, diagnostics, hovering and more. .. button-ref:: quickstart :ref-type: doc :outline: :color: white :class: sd-px-4 sd-fs-5 Get Started .. grid:: 2 :gutter: 0 :class-container: sd-text-center sd-pt-4 :class-row: sd-align-minor-center .. grid-item:: .. button-link:: https://github.com/sponsors/gnikit :ref-type: ref :outline: :color: danger :class: sd-px-2 sd-fs-4 Become a **Sponsor** :octicon:`mark-github;2em;sd-text-black` :octicon:`heart-fill;2em;sd-text-danger` .. grid-item:: .. button-link:: https://www.paypal.com/paypalme/inikit :ref-type: ref :color: primary :class: sd-px-2 sd-fs-5 Make a **Donation** :fa:`fa-paypal` .. div:: sd-text-center *A tool to supercharge Fortran development!* .. tab-set:: :class: sd-align-major-center .. tab-item:: Completion :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/completion-ani.gif .. tab-item:: Hover :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/hover2.png .. tab-item:: Rename :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/rename2.gif .. tab-item:: Symbols :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/symbols-crop.png .. tab-item:: References :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/definition-peek.png .. tab-item:: Diagnostics :class-label: sd-rounded-2 sd-border-1 sd-my-2 sd-mx-2 sd-px-2 sd-py-1 .. image:: ../assets/lsp/diagnostics1.png .. TODO: here go the sponsors .. toctree:: :hidden: quickstart.rst .. toctree:: :maxdepth: 2 :caption: Components :hidden: features.rst editor_integration.rst options.rst fortls_changes.md .. toctree:: :maxdepth: 2 :caption: Get Involved :hidden: contributing.rst .. toctree:: :maxdepth: 2 :caption: Contact Us :hidden: contact.rst .. toctree:: :hidden: :caption: Development modules.rst .. grid:: 1 2 3 3 :margin: 4 4 0 0 :gutter: 1 .. grid-item-card:: :octicon:`desktop-download;5em;sd-text-primary` :link-type: any :link: Download :class-body: sd-text-center Download .. grid-item-card:: :material-sharp:`import_contacts;5em;sd-text-primary` :class-body: sd-text-center :link: features :link-type: doc Features .. grid-item-card:: :material-outlined:`settings;5em;sd-text-primary` :link-type: doc :link: options :class-body: sd-text-center Configuration Options .. grid-item-card:: :octicon:`browser;5em;sd-text-primary` :link-type: doc :link: editor_integration :class-body: sd-text-center Editor Integration .. grid-item-card:: :material-round:`mail;5em;sd-text-primary` :link-type: doc :link: contact :class-body: sd-text-center Contact Us .. grid-item-card:: :octicon:`git-pull-request;5em;sd-text-primary` :link-type: doc :link: contributing :class-body: sd-text-center Contribute .. Include native markdown into native rst .. include:: README.md :parser: myst_parser.sphinx_ fortran-language-server-2.13.0+dfsg.1/docs/make.bat000066400000000000000000000013751450400537300217570ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd fortran-language-server-2.13.0+dfsg.1/docs/modules.rst000066400000000000000000000002631450400537300225470ustar00rootroot00000000000000Documentation ========================== .. toctree:: :maxdepth: 4 fortls Indices and tables ================== * :ref:`genindex` * :ref:`modindex` .. * :ref:`search` fortran-language-server-2.13.0+dfsg.1/docs/options.rst000066400000000000000000000160071450400537300225750ustar00rootroot00000000000000Configuration options ===================== ``fortls`` can be configured through the command line interface and/or through a configuration file (by default named ``.fortls``). The options available from the command line and through the configuration file are identical and interchangeable. .. important:: Options defined in the configuration file have precedence over command line arguments. The following sections discuss the available settings in detail. .. _cmd_interface: Configuration using the command line ------------------------------------ .. argparse:: :module: fortls :func: cli :prog: fortls :nodefault: Configuration using a file -------------------------- A configuration file is a JSONC (JSON with comments) file that contains project specific settings for ``fortls``. By default, the Language Server will recognise 3 default names ``.fortlsrc``, ``.fortls.json`` and ``.fortls`` (in that order) under the ``root_path`` of the project, e.g. ``root_path/.fortlsrc``. A different configuration file name can be passed with the command line interface options ``--config`` e.g. ``fortls --config my_project.json``. The settings that can be specified in the configuration file are identical to the ones available through the command line interface having removed the leading ``--`` characters. For the command line interface see :ref:`cmd_interface`. Available options ################# All the ``fortls`` settings with their default arguments can be found below .. code-block:: json { "nthreads": 4, "notify_init": false, "incremental_sync": false, "sort_keywords": false, "disable_autoupdate": false, "debug_log": false, "source_dirs": ["./**"], "incl_suffixes": [], "excl_suffixes": [], "excl_paths": [], "autocomplete_no_prefix": false, "autocomplete_no_snippets": false, "autocomplete_name_only": false, "lowercase_intrinsics": false, "use_signature_help": false, "hover_signature": false, "hover_language": "fortran90", "max_line_length": -1, "max_comment_line_length": -1, "disable_diagnostics": false, "pp_suffixes": [], "include_dirs": [], "pp_defs": {}, "symbol_skip_mem": false, "enable_code_actions": false } Sources file parsing #################### source_dirs *********** .. code-block:: json { "source_dirs": ["./**", "/external/fortran/src"] } By default all directories under the current project will be recursively parsed for Fortran sources. Alternatively, one can define a series of directories for ``fortls`` to look for source files .. note:: glob fnmatch style patterns are allowed incl_suffixes ************* .. code-block:: json { "incl_suffixes": [".h", ".FYP"] } ``fortls`` will parse only files with ``incl_suffixes`` extensions found in ``source_dirs``. By default ``incl_suffixes`` are defined as .F .f .F03 .f03 .F05 .f05 .F08 .f08 .F18 .f18 .F77 .f77 .F90 .f90 .F95 .f95 .FOR .for .FPP .fpp. Additional source file extensions can be defined in ``incl_suffixes``. .. note:: The default file extensions cannot be overwritten. ``incl_suffixes`` will only append to the default extensions. excl_suffixes ************* .. code-block:: json { "excl_suffixes": ["_tmp.f90", "_hdf5.F90"] } If certain files or suffixes do not need to be parsed these can be excluded by deffining ``excl_suffixes`` excl_paths ********** Entire directories can be excluded from parsing by including them in ``excl_paths``. .. note:: glob fnmatch style patterns are allowed ``excl_paths`` uses glob patterns so if you want to exclude a directory and all its subdirectories from being parsed you should define it like so .. code-block:: json { "excl_paths": ["exclude_dir/**"] } Preprocessor ############ pp_suffixes *********** .. code-block:: json { "pp_suffixes" : [".h", ".F90", ".fpp"] } By default preprocessor definitions are parsed for all Fortran source files with uppercase extensions e.g. ``.F90``, ``.F``, ``.F08``, etc.. However, the default behaviour can be overriden by defining ``pp_defs``. include_dirs ************ .. code-block:: json { "include_dirs": ["include", "preprocessor", "/usr/include"] } By default ``fortls`` will scan the project's directories for files with extensions ``PP_SUFFIXES`` to parse for **preprocessor definitions**. However, if the preprocessor files are external to the project, their locations can be specific via ``include_dirs``. .. note:: glob fnmatch style patterns are allowed .. warning:: Source files detected in ``include_dirs`` will not be parsed for Fortran objects unless they are also included in ``source_dirs``. pp_defs ******* .. code-block:: json { "pp_defs": { "HAVE_PETSC": "" "Mat": "type(tMat)" } } Additional **preprocessor definitions** from what are specified in files found in ``include_dirs`` can be defined in ``pp_defs``. .. note:: Definitions in ``pp_defs`` will override definitions from ``include_dirs`` Limitations *********** - Recursive substitution is not available e.g. .. code-block:: cpp #define VAR1 10 #define VAR2 VAR1 Debug Options (command line only) --------------------------------- Options for debugging language server - ``--debug_filepath DEBUG_FILEPATH`` File path for language server tests - ``--debug_rootpath DEBUG_ROOTPATH`` Root path for language server tests - ``--debug_parser`` Test source code parser on specified file - ``--debug_hover`` Test `textDocument/hover` request for specified file and position - ``--debug_rename RENAME_STRING`` Test `textDocument/rename` request for specified file and position - ``--debug_actions`` Test `textDocument/codeAction` request for specified file and position - ``--debug_symbols`` Test `textDocument/documentSymbol` request for specified file - ``--debug_completion`` Test `textDocument/completion` request for specified file and position - ``--debug_signature`` Test `textDocument/signatureHelp` request for specified file and position - ``--debug_definition`` Test `textDocument/definition` request for specified file and position - ``--debug_references`` Test `textDocument/references` request for specified file and position - ``--debug_diagnostics`` Test diagnostic notifications for specified file - ``--debug_implementation`` Test `textDocument/implementation` request for specified file and position - ``--debug_workspace_symbols QUERY_STRING`` Test `workspace/symbol` request - ``--debug_line INTEGER`` Line position for language server tests (1-indexed) - ``--debug_char INTEGER`` Character position for language server tests (1-indexed) - ``--debug_full_result`` Print full result object instead of condensed version fortran-language-server-2.13.0+dfsg.1/docs/quickstart.rst000066400000000000000000000051261450400537300232740ustar00rootroot00000000000000Get Started ########### .. article-info:: :avatar: ../assets/f.svg :avatar-link: https://github.com/gnikit :author: `gnikit `__ :date: |today| :read-time: 1 min read :class-avatar: sd-animate-grow50-rot20 ``fortls`` is a tool known as a language server that interfaces with your code editor (VS Code, Vim, etc.) to provide features like code completion, code navigation, hover messages, and many more. Download ******** The project is available for download through the **PyPi** and **Anaconda** package managers .. tab-set:: .. tab-item:: PyPi .. code-block:: sh pip install fortls For more information see `pypi/fortls`_ .. _pypi/fortls: https://pypi.python.org/pypi/fortls .. tab-item:: Anaconda .. code-block:: sh conda install -c conda-forge fortls For more installation instructions, see `conda-forge/fortls`_. .. _conda-forge/fortls: https://github.com/conda-forge/fortls-feedstock#about-fortls .. tab-item:: Source Alternatively, one can install the development version from **GitHub** via .. code-block:: sh pip install --user --upgrade git+git://github.com/gnikit/fortls .. warning:: It is **NOT** possible having ``fortls`` and ``fortran-language-server`` simultaneously installed, since they use the same binary name. If you are having trouble getting ``fortls`` to work try uninstalling ``fortran-language-server`` and reinstalling ``fortls``. Usage ***** To make full use of ``fortls`` in your workflow you need to - integrate it into your code editor, see: :doc:`editor_integration` - (Optional) configure any additional settings to ``fortls``, see: :doc:`options` Integration =========== Depending on the code editor used, different steps will have to be followed to integrate ``fortls``. Luckily, we support numerous code editors and have detailed instructions in the :doc:`editor_integration` section. .. card:: Example: VS Code Setting up ``fortls`` with `VS Code`_ is as simple as installing the `Modern Fortran`_ extension. .. _VS Code: https://code.visualstudio.com .. _Modern Fortran: https://marketplace.visualstudio.com/items?itemName=fortran-lang.linter-gfortran Configuration ============= The Language Server by default is configured with reasonable settings however, depending on the project additional settings might need to be configured, such as source file paths, or additional preprocessor definitions. Instructions on how to do this and much more can be found in the :doc:`options` section. fortran-language-server-2.13.0+dfsg.1/fortls.py000077500000000000000000000001641450400537300213030ustar00rootroot00000000000000#!/usr/bin/env python3 # file used for unit testing if __name__ == "__main__": import fortls fortls.main() fortran-language-server-2.13.0+dfsg.1/fortls/000077500000000000000000000000001450400537300207255ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/fortls/__init__.py000066400000000000000000000475111450400537300230460ustar00rootroot00000000000000from __future__ import print_function import json import os import pprint import sys from multiprocessing import freeze_support from .helper_functions import only_dirs, resolve_globs from .interface import cli from .jsonrpc import JSONRPC2Connection, ReadWriter, path_from_uri from .langserver import LangServer from .parse_fortran import FortranFile from .version import __version__ __all__ = ["__version__"] def error_exit(error_str: str): print(f"ERROR: {error_str}") sys.exit(-1) def main(): # freeze_support() args = cli(__name__).parse_args() if args.version: print(__version__) sys.exit(0) debug_server = ( args.debug_diagnostics or args.debug_symbols or args.debug_completion or args.debug_signature or args.debug_definition or args.debug_hover or args.debug_implementation or args.debug_references or args.debug_rename or args.debug_actions or args.debug_rootpath or args.debug_workspace_symbols ) if args.debug_parser: debug_server_parser(args) elif debug_server: debug_server_general(args, vars(args)) else: stdin, stdout = sys.stdin.buffer, sys.stdout.buffer LangServer( conn=JSONRPC2Connection(ReadWriter(stdin, stdout)), settings=vars(args), ).run() def debug_server_general(args, settings): """Outputs debug information about the server. Triggers with any option under the DEBUG group except debug parser. For the parser see `debug_server_parser` Parameters ---------- args : Namespace The arguments parsed from the `ArgumentParser` settings : dict Language server settings """ prb, pwb = os.pipe() tmpin = os.fdopen(prb, "rb") tmpout = os.fdopen(pwb, "wb") s = LangServer( conn=JSONRPC2Connection(ReadWriter(tmpin, tmpout)), settings=settings, ) # if args.debug_rootpath: dir_exists = os.path.isdir(args.debug_rootpath) if dir_exists is False: error_exit( "Specified 'debug_rootpath' does not exist or is not a directory" ) print('\nTesting "initialize" request:') print(' Root = "{}"'.format(args.debug_rootpath)) s.serve_initialize({"params": {"rootPath": args.debug_rootpath}}) if len(s.post_messages) == 0: print(" Successful!") else: print(" Successful with errors:") for message in s.post_messages: print(" {}".format(message[1])) # Print module directories print("\n Source directories:") for source_dir in s.source_dirs: print(" {}".format(source_dir)) # if args.debug_diagnostics: print('\nTesting "textDocument/publishDiagnostics" notification:') check_request_params(args, loc_needed=False) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) diag_results, _ = s.get_diagnostics(args.debug_filepath) if diag_results is not None: if args.debug_full_result: print(json.dumps(diag_results, indent=2)) else: sev_map = ["ERROR", "WARNING", "INFO"] if len(diag_results) == 0: print("\nNo errors or warnings") else: print("\nReported errors or warnings:") for diag in diag_results: sline = diag["range"]["start"]["line"] message = diag["message"] sev = sev_map[diag["severity"] - 1] print(' {:5d}:{} "{}"'.format(sline, sev, message)) # if args.debug_symbols: print('\nTesting "textDocument/documentSymbol" request:') check_request_params(args, loc_needed=False) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) symbol_results = s.serve_document_symbols( {"params": {"textDocument": {"uri": args.debug_filepath}}} ) if args.debug_full_result: print(json.dumps(symbol_results, indent=2)) else: for symbol in symbol_results: sline = symbol["location"]["range"]["start"]["line"] if "containerName" in symbol: parent = symbol["containerName"] else: parent = "null" print( " line {2:5d} symbol -> {1:3d}:{0:30} parent = {3}".format( symbol["name"], symbol["kind"], sline, parent ) ) # if args.debug_workspace_symbols is not None: print('\nTesting "workspace/symbol" request:') if args.debug_rootpath is None: error_exit("'debug_rootpath' not specified for debug request") symbol_results = s.serve_workspace_symbol( {"params": {"query": args.debug_workspace_symbols}} ) if args.debug_full_result: print(json.dumps(symbol_results, indent=2)) else: for symbol in symbol_results: path = path_from_uri(symbol["location"]["uri"]) sline = symbol["location"]["range"]["start"]["line"] if "containerName" in symbol: parent = symbol["containerName"] else: parent = "null" print( " {2}::{3:d} symbol -> {1:3d}:{0:30} parent = {4}".format( symbol["name"], symbol["kind"], os.path.relpath(path, args.debug_rootpath), sline, parent, ) ) # if args.debug_completion: print('\nTesting "textDocument/completion" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) completion_results = s.serve_autocomplete( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) if completion_results is None: print(" No results!") else: print(" Results:") if args.debug_full_result: print(json.dumps(completion_results, indent=2)) else: for obj in completion_results: print( " {}: {} -> {}".format( obj["kind"], obj["label"], obj["detail"] ) ) # if args.debug_signature: print('\nTesting "textDocument/signatureHelp" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) signature_results = s.serve_signature( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) if signature_results is None: print(" No Results!") else: print(" Results:") if args.debug_full_result: print(json.dumps(signature_results, indent=2)) else: active_param = signature_results.get("activeParameter", 0) print(" Active param = {}".format(active_param)) active_signature = signature_results.get("activeSignature", 0) print(" Active sig = {}".format(active_signature)) for i, signature in enumerate(signature_results["signatures"]): print(" {}".format(signature["label"])) for j, obj in enumerate(signature["parameters"]): if (i == active_signature) and (j == active_param): active_mark = "*" else: active_mark = " " arg_desc = obj.get("documentation") if arg_desc is not None: print( "{2} {0} :: {1}".format( arg_desc, obj["label"], active_mark ) ) else: print("{1} {0}".format(obj["label"], active_mark)) # if args.debug_definition or args.debug_implementation: if args.debug_definition: print('\nTesting "textDocument/definition" request:') elif args.debug_implementation: print('\nTesting "textDocument/implementation" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) if args.debug_definition: definition_results = s.serve_definition( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) elif args.debug_implementation: definition_results = s.serve_implementation( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) print(" Result:") if definition_results is None: print(" No result found!") else: if args.debug_full_result: print(json.dumps(definition_results, indent=2)) else: print(' URI = "{}"'.format(definition_results["uri"])) print( " Line = {}".format( definition_results["range"]["start"]["line"] + 1 ) ) print( " Char = {}".format( definition_results["range"]["start"]["character"] + 1 ) ) # if args.debug_hover: print('\nTesting "textDocument/hover" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) hover_results = s.serve_hover( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) print(" Result:") if hover_results is None: print(" No result found!") else: if args.debug_full_result: print(json.dumps(hover_results, indent=2)) else: contents = hover_results["contents"] print("=======") if isinstance(contents, dict): print(contents["value"]) else: print(contents) print("=======") # if args.debug_references: print('\nTesting "textDocument/references" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) ref_results = s.serve_references( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, } } ) print(" Result:") if ref_results is None: print(" No result found!") else: if args.debug_full_result: print(json.dumps(ref_results, indent=2)) else: print("=======") for result in ref_results: print( " {} ({}, {})".format( result["uri"], result["range"]["start"]["line"] + 1, result["range"]["start"]["character"] + 1, ) ) print("=======") # if args.debug_rename is not None: print('\nTesting "textDocument/rename" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) ref_results = s.serve_rename( { "params": { "textDocument": {"uri": args.debug_filepath}, "position": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, "newName": args.debug_rename, } } ) print(" Result:") if ref_results is None: print(" No changes found!") else: if args.debug_full_result: print(json.dumps(ref_results, indent=2)) else: print("=======") for uri, result in ref_results["changes"].items(): path = path_from_uri(uri) print('File: "{}"'.format(path)) file_obj = s.workspace.get(path) if file_obj is not None: file_contents = file_obj.contents_split for change in result: start_line = change["range"]["start"]["line"] end_line = change["range"]["end"]["line"] start_col = change["range"]["start"]["character"] end_col = change["range"]["end"]["character"] print(" {}, {}".format(start_line + 1, end_line + 1)) new_contents = [] for i in range(start_line, end_line + 1): line = file_contents[i] print(" - {}".format(line)) if i == start_line: new_contents.append( line[:start_col] + change["newText"] ) if i == end_line: new_contents[-1] += line[end_col:] for line in new_contents: print(" + {}".format(line)) print() else: print('Unknown file: "{}"'.format(path)) print("=======") # if args.debug_actions: pp = pprint.PrettyPrinter(indent=2, width=120) print('\nTesting "textDocument/getActions" request:') check_request_params(args) s.serve_onSave({"params": {"textDocument": {"uri": args.debug_filepath}}}) action_results = s.serve_codeActions( { "params": { "textDocument": {"uri": args.debug_filepath}, "range": { "start": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, "end": { "line": args.debug_line - 1, "character": args.debug_char - 1, }, }, } } ) if args.debug_full_result: print(json.dumps(action_results, indent=2)) else: for result in action_results: print( "Kind = '{}', Title = '{}'".format(result["kind"], result["title"]) ) for editUri, editChange in result["edit"]["changes"].items(): print("\nChange: URI = '{}'".format(editUri)) pp.pprint(editChange) print() tmpout.close() tmpin.close() def debug_server_parser(args): """Debug the parser of the Language Server Triggered by `--debug_parser` option. Parameters ---------- args : Namespace The arguments parsed from the `ArgumentParser` """ if args.debug_filepath is None: error_exit("'debug_filepath' not specified for parsing test") file_exists = os.path.isfile(args.debug_filepath) if file_exists is False: error_exit("Specified 'debug_filepath' does not exist") # Get preprocessor definitions from config file pp_suffixes = None pp_defs = {} include_dirs = set() if args.debug_rootpath: config_path = os.path.join(args.debug_rootpath, args.config) config_exists = os.path.isfile(config_path) if config_exists: try: with open(config_path, "r") as fhandle: config_dict = json.load(fhandle) pp_suffixes = config_dict.get("pp_suffixes", None) pp_defs = config_dict.get("pp_defs", {}) include_dirs = set() for path in config_dict.get("include_dirs", set()): include_dirs.update( only_dirs(resolve_globs(path, args.debug_rootpath)) ) if isinstance(pp_defs, list): pp_defs = {key: "" for key in pp_defs} except: print(f"Error while parsing '{args.config}' settings file") # print("\nTesting parser") print(' File = "{}"'.format(args.debug_filepath)) file_obj = FortranFile(args.debug_filepath, pp_suffixes) err_str, _ = file_obj.load_from_disk() if err_str: error_exit(f"Reading file failed: {err_str}") print(f" Detected format: {'fixed' if file_obj.fixed else 'free'}") print("\n=========\nParser Output\n=========\n") file_ast = file_obj.parse(debug=True, pp_defs=pp_defs, include_dirs=include_dirs) print("\n=========\nObject Tree\n=========\n") for obj in file_ast.get_scopes(): print("{}: {}".format(obj.get_type(), obj.FQSN)) print_children(obj) print("\n=========\nExportable Objects\n=========\n") for _, obj in file_ast.global_dict.items(): print("{}: {}".format(obj.get_type(), obj.FQSN)) def check_request_params(args, loc_needed=True): if args.debug_filepath is None: error_exit("'debug_filepath' not specified for debug request") file_exists = os.path.isfile(args.debug_filepath) if file_exists is False: error_exit("Specified 'debug_filepath' does not exist") print(' File = "{}"'.format(args.debug_filepath)) if loc_needed: if args.debug_line is None: error_exit("'debug_line' not specified for debug request") print(" Line = {}".format(args.debug_line)) if args.debug_char is None: error_exit("'debug_char' not specified for debug request") print(" Char = {}\n".format(args.debug_char)) def print_children(obj, indent=""): for child in obj.get_children(): print(" {}{}: {}".format(indent, child.get_type(), child.FQSN)) print_children(child, indent + " ") fortran-language-server-2.13.0+dfsg.1/fortls/constants.py000066400000000000000000000027401450400537300233160ustar00rootroot00000000000000from __future__ import annotations import logging from fortls.regex_patterns import FortranRegularExpressions log = logging.getLogger(__name__) # Global variables sort_keywords = True # Keyword identifiers KEYWORD_LIST = [ "pointer", "allocatable", "optional", "public", "private", "nopass", "target", "save", "parameter", "contiguous", "deferred", "dimension", "intent", "pass", "pure", "impure", "elemental", "recursive", "abstract", "external", ] KEYWORD_ID_DICT = {keyword: ind for (ind, keyword) in enumerate(KEYWORD_LIST)} # Type identifiers BASE_TYPE_ID = -1 MODULE_TYPE_ID = 1 SUBROUTINE_TYPE_ID = 2 FUNCTION_TYPE_ID = 3 CLASS_TYPE_ID = 4 INTERFACE_TYPE_ID = 5 VAR_TYPE_ID = 6 METH_TYPE_ID = 7 SUBMODULE_TYPE_ID = 8 BLOCK_TYPE_ID = 9 SELECT_TYPE_ID = 10 DO_TYPE_ID = 11 WHERE_TYPE_ID = 12 IF_TYPE_ID = 13 ASSOC_TYPE_ID = 14 ENUM_TYPE_ID = 15 class Severity: error = 1 warn = 2 info = 3 #: A string used to mark literals e.g. 10, 3.14, "words", etc. #: The description name chosen is non-ambiguous and cannot naturally #: occur in Fortran (with/out C preproc) code #: It is invalid syntax to define a type starting with numerics #: it cannot also be a comment that requires !, c, d #: and ^= (xor_eq) operator is invalid in Fortran C++ preproc FORTRAN_LITERAL = "0^=__LITERAL_INTERNAL_DUMMY_VAR_" # Fortran Regular Expressions dataclass variable, immutable FRegex = FortranRegularExpressions() fortran-language-server-2.13.0+dfsg.1/fortls/ftypes.py000066400000000000000000000071021450400537300226110ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass, field from typing import NamedTuple #: A single line range tuple Range = NamedTuple("Range", [("start", int), ("end", int)]) @dataclass class VarInfo: """Holds information about a Fortran VARIABLE""" var_type: str #: Type of variable e.g. ``INTEGER``, ``REAL``, etc. #: keywords associated with this variable e.g. SAVE, DIMENSION, etc. keywords: list[str] #: Keywords associated with variable var_names: list[str] #: Variable names var_kind: str = field(default=None) #: Kind of variable e.g. ``INTEGER*4`` etc. @dataclass class SelectInfo: """Holds information about a SELECT construct""" type: int #: Type of SELECT e.g. normal, select type, select kind, select rank binding: str #: Variable/Object being selected upon desc: str #: Description of select e.g. "TYPE", "CLASS", None @dataclass class ClassInfo: """Holds information about a Fortran CLASS""" name: str #: Class name parent: str #: Parent object of class e.g. ``TYPE, EXTENDS(scaled_vector) :: a`` keywords: list[str] #: Keywords associated with the class @dataclass class UseInfo: """Holds information about a Fortran USE statement""" mod_name: str #: Module name #: List of procedures, variables, interfaces, etc. imported via only only_list: set[str] #: A dictionary holding the new names after a rename operation rename_map: dict[str, str] @dataclass class GenProcDefInfo: """Holds information about a GENERIC PROCEDURE DEFINITION""" bound_name: str #: Procedure name pro_links: list[str] #: Procedure links vis_flag: int #: Visibility flag, public or private @dataclass class SmodInfo: """Holds information about Fortran SUBMODULES""" name: str #: Submodule name parent: str #: Submodule i.e. module, parent @dataclass class InterInfo: """Holds information about a Fortran INTERFACE""" name: str #: Interface name abstract: bool #: Whether or not the interface is abstract @dataclass class VisInfo: """Holds information about the VISIBILITY of a module's contents""" type: int #: Visibility type 0: PUBLIC 1: PRIVATE TODO: convert to boolean obj_names: list[str] #: Module variables, procedures, etc. with that visibility @dataclass class IncludeInfo: """Holds information about a Fortran INCLUDE statement""" line_number: int #: Line number of include path: str #: File path to include file: None # fortran_file #: fortran_file object scope_objs: list[str] #: A list of available scopes @dataclass class SubInfo: """Holds information about a Fortran SUBROUTINE""" name: str #: Procedure name args: str #: Argument list #: Keywords associated with procedure keywords: list[str] = field(default_factory=list) #: Whether or not this is a ``MODULE PROCEDURE`` mod_flag: bool = field(default=False) @dataclass class ResultSig: """Holds information about the RESULT section of a Fortran FUNCTION""" name: str = field(default=None) #: Variable name of result type: str = field(default=None) #: Variable type of result #: Keywords associated with the result variable, can append without init keywords: list[str] = field(default_factory=list) @dataclass class FunSig(SubInfo): """Holds information about a Fortran FUNCTION""" #: Function's result with default ``result.name = name`` result: ResultSig = field(default_factory=ResultSig) def __post_init__(self): if not self.result.name: self.result.name = self.name fortran-language-server-2.13.0+dfsg.1/fortls/helper_functions.py000066400000000000000000000366261450400537300246630ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from fortls.constants import KEYWORD_ID_DICT, KEYWORD_LIST, FRegex, sort_keywords from fortls.ftypes import Range def expand_name(line: str, char_pos: int) -> str: """Get full word containing given cursor position Parameters ---------- line : str Text line char_pos : int Column position along the line Returns ------- str Word regex match for the input column """ # The order here is important. # WORD will capture substrings in logical and strings regexs = [ FRegex.LOGICAL, FRegex.SQ_STRING, FRegex.DQ_STRING, FRegex.WORD, FRegex.NUMBER, ] for r in regexs: for num_match in r.finditer(line): if num_match.start(0) <= char_pos <= num_match.end(0): return num_match.group(0) return "" def detect_fixed_format(file_lines: list[str]) -> bool: """Detect fixed/free format by looking for characters in label columns and variable declarations before column 6. Treat intersection format files as free format. Parameters ---------- file_lines : list[str] List of consecutive file lines Returns ------- bool True if file_lines are of Fixed Fortran style Examples -------- >>> detect_fixed_format([' free format']) False >>> detect_fixed_format([' INTEGER, PARAMETER :: N = 10']) False >>> detect_fixed_format(['C Fixed format']) True Lines wih ampersands are not fixed format >>> detect_fixed_format(['trailing line & ! comment']) False """ for line in file_lines: if FRegex.FREE_FORMAT_TEST.match(line): return False tmp_match = FRegex.VAR.match(line) if tmp_match and tmp_match.start(1) < 6: return False # Trailing ampersand indicates free or intersection format if not FRegex.FIXED_COMMENT.match(line): line_end = line.split("!")[0].strip() if len(line_end) > 0 and line_end.endswith("&"): return False return True def strip_line_label(line: str) -> tuple[str, str | None]: """Strip leading numeric line label Parameters ---------- line : str Text line Returns ------- tuple[str, str | None] Output string, Line label returns None if no line label present """ match = FRegex.LINE_LABEL.match(line) if match is None: return line, None else: line_label = match.group(1) out_str = line[: match.start(1)] + " " * len(line_label) + line[match.end(1) :] return out_str, line_label def strip_strings(in_line: str, maintain_len: bool = False) -> str: """Strips string literals from code line Parameters ---------- in_line : str Text string maintain_len : bool, optional Maintain the len(in_line) in the output string, by default False Returns ------- str Stripped string """ def repl_sq(m): return "'{}'".format(" " * (len(m.group()) - 2)) def repl_dq(m): return '"{}"'.format(" " * (len(m.group()) - 2)) if maintain_len: out_line = FRegex.SQ_STRING.sub(repl_sq, in_line) out_line = FRegex.DQ_STRING.sub(repl_dq, out_line) else: out_line = FRegex.SQ_STRING.sub("", in_line) out_line = FRegex.DQ_STRING.sub("", out_line) return out_line def separate_def_list(test_str: str) -> list[str] | None: """Separate definition lists, skipping parenthesis and bracket groups Parameters ---------- test_str : str Text string Returns ------- list[str] | None [description] Examples -------- >>> separate_def_list('var1, var2, var3') ['var1', 'var2', 'var3'] >>> separate_def_list('var, init_var(3) = [1,2,3], array(3,3)') ['var', 'init_var(3) = [1,2,3]', 'array(3,3)'] """ stripped_str = strip_strings(test_str) paren_count = 0 def_list: list[str] = [] curr_str = "" for char in stripped_str: if char in ("(", "["): paren_count += 1 elif char in (")", "]"): paren_count -= 1 elif (char == ",") and (paren_count == 0): curr_str = curr_str.strip() if curr_str != "": def_list.append(curr_str) curr_str = "" elif (curr_str == "") and (len(def_list) == 0): return None continue curr_str += char curr_str = curr_str.strip() if curr_str != "": def_list.append(curr_str) return def_list def find_word_in_line(line: str, word: str) -> Range: """Find Fortran word in line Parameters ---------- line : str Text line word : str word to find in line Returns ------- Range start and end positions (indices) of the word if not found it returns -1, len(word) -1 """ i = -1 for poss_name in FRegex.WORD.finditer(line): if poss_name.group() == word: i = poss_name.start() break # TODO: if i == -1: return None makes more sense return Range(i, i + len(word)) def find_paren_match(string: str) -> int: """Find matching closing parenthesis **from an already open parenthesis scope** by forward search of the string, returns -1 if no match is found Parameters ---------- string : str Input string Returns ------- int The index of the matching ``)`` character in the string Examples -------- >>> find_paren_match('a, b)') 4 Multiple parenthesis that are closed >>> find_paren_match('a, (b, c), d)') 12 If the outermost parenthesis is not closed function returns -1 >>> find_paren_match('a, (b, (c, d)') -1 """ paren_count = 1 ind = -1 for (i, char) in enumerate(string): if char == "(": paren_count += 1 elif char == ")": paren_count -= 1 if paren_count == 0: return i return ind def get_line_prefix( pre_lines: list[str], curr_line: str, col: int, qs: bool = True ) -> str: """Get code line prefix from current line and preceding continuation lines Parameters ---------- pre_lines : list for multiline cases get all the previous, relevant lines curr_line : str the current line col : int column index of the current line qs : bool, optional strip quotes i.e. string literals from ``curr_line`` and ``pre_lines``. Need this disable when hovering over string literals, by default True Returns ------- str part of the line including any relevant line continuations before ``col`` Examples -------- >>> get_line_prefix([''], '#pragma once', 0) is None True """ if (curr_line is None) or (col > len(curr_line)) or (curr_line.startswith("#")): return None prepend_string = "".join(pre_lines) curr_line = prepend_string + curr_line col += len(prepend_string) line_prefix = curr_line[:col].lower() # Ignore string literals if qs: if (line_prefix.find("'") > -1) or (line_prefix.find('"') > -1): sq_count = 0 dq_count = 0 for char in line_prefix: if (char == "'") and (dq_count % 2 == 0): sq_count += 1 elif (char == '"') and (sq_count % 2 == 0): dq_count += 1 if (dq_count % 2 == 1) or (sq_count % 2 == 1): return None return line_prefix def resolve_globs(glob_path: str, root_path: str = None) -> list[str]: """Resolve paths (absolute and relative) and glob patterns while nonexistent paths are ignored Parameters ---------- glob_path : str Path containing the glob pattern follows ``fnmatch`` glob pattern, can include relative paths, etc. see fnmatch: https://docs.python.org/3/library/fnmatch.html#module-fnmatch root_path : str, optional root path to start glob search. If left empty the root_path will be extracted from the glob_path, by default None Returns ------- list[str] Expanded glob patterns with absolute paths. Absolute paths are used to resolve any potential ambiguity Examples -------- Relative to a root path >>> import os, pathlib >>> resolve_globs('test', os.getcwd()) == [str(pathlib.Path(os.getcwd()) / 'test')] True Absolute path resolution >>> resolve_globs('test') == [str(pathlib.Path(os.getcwd()) / 'test')] True """ # Resolve absolute paths i.e. not in our root_path if os.path.isabs(glob_path) or not root_path: p = Path(glob_path).resolve() root = p.anchor # drive letter + root path rel = str(p.relative_to(root)) # contains glob pattern return [str(p.resolve()) for p in Path(root).glob(rel)] else: return [str(p.resolve()) for p in Path(root_path).resolve().glob(glob_path)] def only_dirs(paths: list[str]) -> list[str]: """From a list of strings returns only paths that are directories Parameters ---------- paths : list[str] A list containing the files and directories Returns ------- list[str] A list containing only valid directories Raises ------ FileNotFoundError A list containing all the non existing directories Examples -------- >>> only_dirs(['./test/', './test/test_source/', './test/test_source/test.f90']) ['./test/', './test/test_source/'] >>> only_dirs(['/fake/dir/a', '/fake/dir/b', '/fake/dir/c']) Traceback (most recent call last): FileNotFoundError: /fake/dir/a /fake/dir/b /fake/dir/c """ dirs: list[str] = [] errs: list[str] = [] for p in paths: if os.path.isdir(p): dirs.append(p) elif os.path.isfile(p): continue else: errs.append(p) if errs: raise FileNotFoundError("\n".join(errs)) return dirs def set_keyword_ordering(sorted): global sort_keywords sort_keywords = sorted def map_keywords(keywords: list[str]): mapped_keywords = [] keyword_info = {} for keyword in keywords: keyword_prefix = keyword.split("(")[0].lower().strip() keyword_ind = KEYWORD_ID_DICT.get(keyword_prefix) # keyword_ind can be 0 which if 0: evaluates to False if keyword_ind is not None: mapped_keywords.append(keyword_ind) if keyword_prefix in ("intent", "dimension", "pass"): keyword_substring = get_paren_substring(keyword) if keyword_substring is not None: keyword_info[keyword_prefix] = keyword_substring if sort_keywords: mapped_keywords.sort() return mapped_keywords, keyword_info def get_keywords(keywords: list, keyword_info: dict = {}): keyword_strings = [] for keyword_id in keywords: string_rep = KEYWORD_LIST[keyword_id] addl_info = keyword_info.get(string_rep) string_rep = string_rep.upper() if addl_info is not None: string_rep += f"({addl_info})" keyword_strings.append(string_rep) return keyword_strings def parenthetic_contents(string: str): """Generate parenthesized contents in string as pairs (contents, start-position, level). Examples -------- >>> list(parenthetic_contents('character*(10*size(val(1), 2)) :: name')) [('1', 22, 2), ('val(1), 2', 18, 1), ('10*size(val(1), 2)', 10, 0)] """ stack = [] for i, c in enumerate(string): if c == "(": stack.append(i) elif c == ")" and stack: start = stack.pop() yield (string[start + 1 : i], start, len(stack)) def get_paren_substring(string: str) -> str | None: """Get the contents enclosed by the first pair of parenthesis Parameters ---------- string : str A string Returns ------- str | None The part of the string enclosed in parenthesis e.g. or None Examples -------- >>> get_paren_substring('some line(a, b, (c, d))') 'a, b, (c, d)' If the line has incomplete parenthesis however, ``None`` is returned >>> get_paren_substring('some line(a, b') is None True """ i1 = string.find("(") i2 = string.rfind(")") if -1 < i1 < i2: return string[i1 + 1 : i2] else: return None def get_paren_level(line: str) -> tuple[str, list[Range]]: """Get sub-string corresponding to a single parenthesis level, via backward search up through the line. Parameters ---------- line : str Document line Returns ------- tuple[str, list[Range]] Arguments as a string and a list of Ranges for the arguments against ``line`` Examples -------- >>> get_paren_level('CALL sub1(arg1,arg2') ('arg1,arg2', [Range(start=10, end=19)]) If the range is interrupted by parenthesis, another Range variable is used to mark the ``start`` and ``end`` of the argument >>> get_paren_level('CALL sub1(arg1(i),arg2') ('arg1,arg2', [Range(start=10, end=14), Range(start=17, end=22)]) >>> get_paren_level('') ('', [Range(start=0, end=0)]) """ if line == "": return "", [Range(0, 0)] level = 0 in_string = False string_char = "" i1 = len(line) sections: list[Range] = [] for i in range(len(line) - 1, -1, -1): char = line[i] if in_string: if char == string_char: in_string = False continue if char in ("(", "["): level -= 1 if level == 0: i1 = i elif level < 0: sections.append(Range(i + 1, i1)) break elif char in (")", "]"): level += 1 if level == 1: sections.append(Range(i + 1, i1)) elif char in ("'", '"'): in_string = True string_char = char if level == 0: sections.append(Range(i, i1)) sections.reverse() out_string = "" for section in sections: out_string += line[section.start : section.end] return out_string, sections def get_var_stack(line: str) -> list[str]: """Get user-defined type field sequence terminating the given line Parameters ---------- line : str Document line Returns ------- list[str] list of objects split by ``%`` Examples -------- >>> get_var_stack('myvar%foo%bar') ['myvar', 'foo', 'bar'] >>> get_var_stack('myarray(i)%foo%bar') ['myarray', 'foo', 'bar'] In this case it will operate at the end of the string i.e. ``'this%foo'`` >>> get_var_stack('CALL self%method(this%foo') ['this', 'foo'] >>> get_var_stack('') [''] """ if len(line) == 0: return [""] final_var, sections = get_paren_level(line) if final_var == "": return [""] # Continuation of variable after paren requires '%' character iLast = 0 for (i, section) in enumerate(sections): if not line[section.start : section.end].startswith("%"): iLast = i final_var = "" for section in sections[iLast:]: final_var += line[section.start : section.end] if final_var is not None: final_op_split: list[str] = FRegex.OBJBREAK.split(final_var) return final_op_split[-1].split("%") else: return None fortran-language-server-2.13.0+dfsg.1/fortls/interface.py000066400000000000000000000306511450400537300232440ustar00rootroot00000000000000from __future__ import annotations import argparse import json import sys class SetAction(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, set(values)) def cli(name: str = "fortls") -> argparse.ArgumentParser: """Parses the command line arguments to the Language Server Returns ------- argparse.ArgumentParser command line arguments """ parser = argparse.ArgumentParser( description="fortls - Fortran Language Server", prog=name, usage="fortls [options] [debug options]", formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=60), epilog=( "All options starting with '--' can also be set in a configuration file, by" " default named '.fortlsrc', '.fortls.json' or '.fortls'" " (other names/paths can specified via -c or" " --config). For more details see our documentation:" " https://gnikit.github.io/fortls/options.html#available-options" ), ) # General options ---------------------------------------------------------- parser.add_argument( "-v", "--version", action="store_true", help="Print server version number and exit", ) parser.add_argument( "-c", "--config", type=str, default=".fortlsrc", help=( "Configuration options file (default file name: %(default)s, other" " default supported names: .fortls.json, .fortls)" ), ) parser.add_argument( "-n", "--nthreads", type=int, default=4, metavar="INTEGER", help=( "Number of threads to use during workspace initialization (default:" " %(default)s)" ), ) parser.add_argument( "--notify_init", action="store_true", help="Send notification message when workspace initialization is complete", ) parser.add_argument( "--incremental_sync", action="store_true", help="Use incremental document synchronization (beta)", ) parser.add_argument( "--sort_keywords", action="store_true", help=( "Display variable keywords information, function/subroutine definitions," " etc. in a consistent (sorted) manner default: no sorting, display code" " as is)" ), ) parser.add_argument( "--disable_autoupdate", action="store_true", help=( "fortls automatically checks PyPi for newer version and installs them." "Use this option to disable the autoupdate feature." ), ) # XXX: Deprecated, argument not attached to anything. Remove parser.add_argument( "--preserve_keyword_order", action="store_true", help="DEPRECATED, this is now the default. To sort use sort_keywords", ) parser.add_argument( "--debug_log", action="store_true", help="Generate debug log in project root folder", ) parser.add_argument( "--debug_help", action="help", help="Display options for debugging fortls" ) # File parsing options ----------------------------------------------------- group = parser.add_argument_group("Sources file parsing options") group.add_argument( "--source_dirs", type=str, nargs="*", default=set(), action=SetAction, metavar="DIRS", help="Folders containing source files (default: %(default)s)", ) group.add_argument( "--incl_suffixes", type=str, nargs="*", default=set(), action=SetAction, metavar="SUFFIXES", help=( "Consider additional file extensions to the default (default: " "F,F77,F90,F95,F03,F08,FOR,FPP (lower & upper casing))" ), ) group.add_argument( "--excl_suffixes", type=str, nargs="*", default=set(), action=SetAction, metavar="SUFFIXES", help="Source file extensions to be excluded (default: %(default)s)", ) group.add_argument( "--excl_paths", type=str, nargs="*", default=set(), action=SetAction, metavar="DIRS", help="Folders to exclude from parsing", ) # Autocomplete options ----------------------------------------------------- group = parser.add_argument_group("Autocomplete options") group.add_argument( "--autocomplete_no_prefix", action="store_true", help="Do not filter autocomplete results by variable prefix", ) group.add_argument( "--autocomplete_no_snippets", action="store_true", help="Do not use snippets with place holders in autocomplete results", ) group.add_argument( "--autocomplete_name_only", action="store_true", help="Complete only the name of procedures and not the parameters", ) group.add_argument( "--lowercase_intrinsics", action="store_true", help="Use lowercase for intrinsics and keywords in autocomplete requests", ) group.add_argument( "--use_signature_help", action="store_true", help=( "Use signature help instead of subroutine/function snippets. This" " effectively sets --autocomplete_no_snippets" ), ) # Hover options ------------------------------------------------------------ group = parser.add_argument_group("Hover options") group.add_argument( "--variable_hover", action="store_true", help=( "DEPRECATED: This option is always on. Show hover information for variables" ), ) group.add_argument( "--hover_signature", action="store_true", help="Show signature information in hover for arguments ", ) group.add_argument( "--hover_language", type=str, default="fortran90", help=( "Language used for responses to hover requests a VSCode language id" " (default: %(default)s)" ), ) # Diagnostic options ------------------------------------------------------- group = parser.add_argument_group("Diagnostic options (error swigles)") group.add_argument( "--max_line_length", type=int, default=-1, metavar="INTEGER", help="Maximum line length (default: %(default)s)", ) group.add_argument( "--max_comment_line_length", type=int, default=-1, metavar="INTEGER", help="Maximum comment line length (default: %(default)s)", ) group.add_argument( "--disable_diagnostics", action="store_true", help="Disable diagnostics" ) # Preprocessor options ----------------------------------------------------- group = parser.add_argument_group("Preprocessor options") group.add_argument( "--pp_suffixes", type=str, nargs="*", metavar="SUFFIXES", help=( "File extensions to be parsed ONLY for preprocessor commands " "(default: all uppercase source file suffixes)" ), ) group.add_argument( "--include_dirs", # "--pp_include_dirs", # TODO: make main type=str, nargs="*", default=set(), action=SetAction, metavar="DIRS", help="Folders containing preprocessor files with extensions PP_SUFFIXES.", ) group.add_argument( "--pp_defs", type=json.loads, default={}, help=( "A dictionary with additional preprocessor definitions. " "Preprocessor definitions are normally included via INCLUDE_DIRS" ), ) # Symbols options ---------------------------------------------------------- group = parser.add_argument_group("Symbols options") group.add_argument( "--symbol_skip_mem", action="store_true", help="Do not include type members in document symbol results", ) # Code Actions options ----------------------------------------------------- group = parser.add_argument_group("CodeActions options [limited]") group.add_argument( "--enable_code_actions", action="store_true", help="Enable experimental code actions (default: false)", ) # Debug # By default debug arguments are hidden _debug_commandline_args(parser) return parser # TODO: make this return a parser def _debug_commandline_args(parser: argparse.ArgumentParser) -> None: """Parse the debug arguments if any are present. if none are present the arguments are suppressed in the help menu Parameters ---------- parser : argparse.ArgumentParser an argument parser Returns ------- None Operates and updates the parser """ # Only show debug options if an argument starting with --debug_ was input. # if suppressed the option will be hidden from the help menu. HIDE_DEBUG = True if any("--debug_" in arg for arg in sys.argv): HIDE_DEBUG = False def hide_opt(help: str) -> str: if not HIDE_DEBUG: return help else: return argparse.SUPPRESS group = parser.add_argument_group( hide_opt("DEBUG"), hide_opt("Options for debugging language server") ) group.add_argument( "--debug_filepath", type=str, help=hide_opt("File path for language server tests"), ) group.add_argument( "--debug_rootpath", type=str, help=hide_opt("Root path for language server tests"), ) group.add_argument( "--debug_parser", action="store_true", help=hide_opt("Test source code parser on specified file"), ) group.add_argument( "--debug_hover", action="store_true", help=hide_opt( "Test `textDocument/hover` request for specified file and position" ), ) group.add_argument( "--debug_rename", type=str, metavar="RENAME_STRING", help=hide_opt( "Test `textDocument/rename` request for specified file and position" ), ) group.add_argument( "--debug_actions", action="store_true", help=hide_opt( "Test `textDocument/codeAction` request for specified file and position" ), ) group.add_argument( "--debug_symbols", action="store_true", help=hide_opt("Test `textDocument/documentSymbol` request for specified file"), ) group.add_argument( "--debug_completion", action="store_true", help=hide_opt( "Test `textDocument/completion` request for specified file and position" ), ) group.add_argument( "--debug_signature", action="store_true", help=hide_opt( "Test `textDocument/signatureHelp` request for specified file and position" ), ) group.add_argument( "--debug_definition", action="store_true", help=hide_opt( "Test `textDocument/definition` request for specified file and position" ), ) group.add_argument( "--debug_references", action="store_true", help=hide_opt( "Test `textDocument/references` request for specified file and position" ), ) group.add_argument( "--debug_diagnostics", action="store_true", help=hide_opt("Test diagnostic notifications for specified file"), ) group.add_argument( "--debug_implementation", action="store_true", help=hide_opt( "Test `textDocument/implementation` request for specified file and position" ), ) group.add_argument( "--debug_workspace_symbols", type=str, metavar="QUERY_STRING", help=hide_opt("Test `workspace/symbol` request"), ) group.add_argument( "--debug_line", type=int, metavar="INTEGER", help=hide_opt("Line position for language server tests (1-indexed)"), ) group.add_argument( "--debug_char", type=int, metavar="INTEGER", help=hide_opt("Character position for language server tests (1-indexed)"), ) group.add_argument( "--debug_full_result", action="store_true", help=hide_opt("Print full result object instead of condensed version"), ) fortran-language-server-2.13.0+dfsg.1/fortls/intrinsic_funs.json000066400000000000000000001261741450400537300246700ustar00rootroot00000000000000{ "ABORT": { "doc": "ABORT causes immediate termination of the program.", "type": 2 }, "ABS": { "args": "A", "doc": "ABS(A) computes the absolute value of A.", "type": 3 }, "ACCESS": { "args": "NAME,MODE", "doc": "ACCESS(NAME,MODE) checks whether the file NAME exists, is readable, writable or executable.", "type": 3 }, "ACHAR": { "args": "I,KIND=kind", "doc": "ACHAR(I,KIND=kind) returns the character located at position I in the ASCII collating sequence.", "type": 3 }, "ACOS": { "args": "X", "doc": "ACOS(X) computes the arccosine of X (inverse of COS(X)).", "type": 3 }, "ACOSD": { "args": "X", "doc": "ACOSD(X) computes the arccosine of X in degrees (inverse of COSD(X).", "type": 3 }, "ACOSH": { "args": "X", "doc": "ACOSH(X) computes the inverse hyperbolic cosine of X.", "type": 3 }, "ADJUSTL": { "args": "STRING", "doc": "ADJUSTL(STRING) will left adjust a string by removing leading spaces.", "type": 3 }, "ADJUSTR": { "args": "STRING", "doc": "ADJUSTR(STRING) will right adjust a string by removing trailing spaces.", "type": 3 }, "AIMAG": { "args": "Z", "doc": "AIMAG(Z) yields the imaginary part of complex argument Z.", "type": 3 }, "AINT": { "args": "A,KIND=kind", "doc": "AINT(A,KIND=kind) truncates its argument to a whole number.", "type": 3 }, "ALARM": { "args": "SECONDS,HANDLER,STATUS=status", "doc": "ALARM(SECONDS,HANDLER,STATUS=status) causes external subroutine HANDLER to be executed after a delay of SECONDS by using alarm(2) to set up a signal and signal(2) to catch it. If STATUS is supplied, it will be returned with the number of seconds remaining until any previously scheduled alarm was due to be delivered, or zero if there was no previously scheduled alarm.", "type": 2 }, "ALL": { "args": "MASK,DIM=dim", "doc": "ALL(MASK,DIM=dim) determines if all the values are true in MASK in the array along dimension DIM.", "type": 3 }, "ALLOCATED": { "args": "A", "doc": "ALLOCATED(A) check the allocation status of A.", "type": 3 }, "ANINT": { "args": "A,KIND=kind", "doc": "ANINT(A,KIND=kind) rounds its argument to the nearest whole number.", "type": 3 }, "ANY": { "args": "MASK,DIM=dim", "doc": "ANY(MASK,DIM=dim) determines if any of the values are true in MASK in the array along dimension DIM.", "type": 3 }, "ASIN": { "args": "X", "doc": "ASIN(X) computes the arcsine of X (inverse of SIN(X)).", "type": 3 }, "ASIND": { "args": "X", "doc": "ASIND(X) computes the arcsine of its X in degrees (inverse of SIND(X)).", "type": 3 }, "ASINH": { "args": "X", "doc": "ASINH(X) computes the inverse hyperbolic sine of X.", "type": 3 }, "ASSOCIATED": { "args": "POINTER,TARGET=target", "doc": "ASSOCIATED(POINTER,TARGET=target) determines the status of the pointer POINTER or if POINTER is associated with the target TARGET.", "type": 3 }, "ATAN": { "args": "X", "doc": "ATAN(X) computes the arctangent of X (inverse of TAN(X)).", "type": 3 }, "ATAND": { "args": "X", "doc": "ATAND(X) computes the arctangent of X in degrees (inverse of TAND).", "type": 3 }, "ATAN2": { "args": "Y,X", "doc": "ATAN2(Y,X) computes the principal value of the argument function of the complex number X + i Y.", "type": 3 }, "ATAN2D": { "args": "Y,X", "doc": "ATAN2D(Y,X) computes the principal value of the argument function of the complex number X + i Y in degrees.", "type": 3 }, "ATANH": { "args": "X", "doc": "ATANH(X) computes the inverse hyperbolic tangent of X.", "type": 3 }, "ATOMIC_ADD": { "args": "ATOM,VALUE", "doc": "ATOMIC_ADD(ATOM,VALUE) atomically adds the value of VALUE to the variable ATOM.", "type": 2 }, "ATOMIC_AND": { "args": "ATOM,VALUE", "doc": "ATOMIC_AND(ATOM,VALUE) atomically defines ATOM with the bitwise AND between the values of ATOM and VALUE.", "type": 2 }, "ATOMIC_CAS": { "args": "ATOM,OLD,COMPARE,NEW,STAT=stat", "doc": "ATOMIC_CAS compares the variable ATOM with the value of COMPARE; if the value is the same, ATOM is set to the value of NEW. Additionally, OLD is set to the value of ATOM that was used for the comparison.", "type": 2 }, "ATOMIC_DEFINE": { "args": "ATOM,VALUE,STAT=stat", "doc": "ATOMIC_DEFINE(ATOM,VALUE) defines the variable ATOM with the value VALUE atomically.", "type": 2 }, "ATOMIC_FETCH_ADD": { "args": "ATOM,VALUE,OLD,STAT=stat", "doc": "ATOMIC_FETCH_ADD(ATOM,VALUE,OLD) atomically stores the value of ATOM in OLD and adds the value of VALUE to the variable ATOM.", "type": 2 }, "ATOMIC_FETCH_AND": { "args": "ATOM,VALUE,OLD,STAT=stat", "doc": "ATOMIC_AND(ATOM,VALUE) atomically stores the value of ATOM in OLD and defines ATOM with the bitwise AND between the values of ATOM and VALUE.", "type": 2 }, "ATOMIC_FETCH_OR": { "args": "ATOM,VALUE,OLD,STAT=stat", "doc": "ATOMIC_OR(ATOM,VALUE) atomically stores the value of ATOM in OLD and defines ATOM with the bitwise OR between the values of ATOM and VALUE.", "type": 2 }, "ATOMIC_FETCH_XOR": { "args": "ATOM,VALUE,OLD,STAT=stat", "doc": "ATOMIC_XOR(ATOM,VALUE) atomically stores the value of ATOM in OLD and defines ATOM with the bitwise XOR between the values of ATOM and VALUE.", "type": 2 }, "ATOMIC_OR": { "args": "ATOM,VALUE,STAT=stat", "doc": "ATOMIC_OR(ATOM,VALUE) atomically defines ATOM with the bitwise AND between the values of ATOM and VALUE.", "type": 2 }, "ATOMIC_REF": { "args": "ATOM,VALUE,STAT=stat", "doc": "ATOMIC_DEFINE(ATOM,VALUE) atomically assigns the value of the variable ATOM to VALUE.", "type": 2 }, "ATOMIC_XOR": { "args": "ATOM,VALUE,STAT=stat", "doc": "ATOMIC_AND(ATOM,VALUE) atomically defines ATOM with the bitwise XOR between the values of ATOM and VALUE.", "type": 2 }, "BACKTRACE": { "args": "", "doc": "BACKTRACE shows a backtrace at an arbitrary place in user code. Program execution continues normally afterwards. The backtrace information is printed to the unit corresponding to ERROR_UNIT in ISO_FORTRAN_ENV.", "type": 2 }, "BESSEL_J0": { "args": "X", "doc": "BESSEL_J0(X) computes the Bessel function of the first kind of order 0 of X.", "type": 3 }, "BESSEL_J1": { "args": "X", "doc": "BESSEL_J1(X) computes the Bessel function of the first kind of order 1 of X.", "type": 3 }, "BESSEL_JN": { "args": "N,X", "doc": "BESSEL_JN(N,X) computes the Bessel function of the first kind of order N of X.", "type": 3 }, "BESSEL_Y0": { "args": "X", "doc": "BESSEL_Y0(X) computes the Bessel function of the second kind of order 0 of X.", "type": 3 }, "BESSEL_Y1": { "args": "X", "doc": "BESSEL_Y1(X) computes the Bessel function of the second kind of order 1 of X.", "type": 3 }, "BESSEL_YN": { "args": "N,X", "doc": "BESSEL_YN(N,X) computes the Bessel function of the second kind of order N of X.", "type": 3 }, "BGE": { "args": "I,J", "doc": "BGE(I,J) determines whether an integral is a bitwise greater than or equal to another.", "type": 3 }, "BGT": { "args": "I,J", "doc": "BGT(I,J) determines whether an integral is a bitwise greater than another.", "type": 3 }, "BIT_SIZE": { "args": "I", "doc": "BIT_SIZE(I) returns the number of bits represented by the type of I", "type": 3 }, "BLE": { "args": "I,J", "doc": "BLE(I,J) determines whether an integral is a bitwise less than or equal to another.", "type": 3 }, "BLT": { "args": "I,J", "doc": "BLT(I,J) determines whether an integral is a bitwise less than another.", "type": 3 }, "BTEST": { "args": "I,J", "doc": "BTEST(I,POS) returns logical .TRUE. if the bit at POS in I is set.", "type": 3 }, "CEILING": { "args": "A,KIND=kind", "doc": "CEILING(A,KIND=kind) returns the least integer greater than or equal to A.", "type": 3 }, "CHAR": { "args": "I,KIND=kind", "doc": "CHAR(I,KIND=kind) returns the character represented by the integer I.", "type": 3 }, "CHDIR": { "args": "NAME,STATUS=status", "doc": "CHDIR(NAME,STATUS=status) change current working directory to a specified path.", "type": 2 }, "CHMOD": { "args": "NAME,MODE,STATUS=status", "doc": "CHMOD(NAME,MODE,STATUS=status) changes the permissions of a file.", "type": 2 }, "CMPLX": { "args": "X,Y=y,KIND=kind", "doc": "CMPLX(X,Y=y,KIND=kind) returns a complex number where X is converted to the real component.", "type": 3 }, "CO_BROADCAST": { "args": "A,SOURCE_IMAGE,STAT=stat,ERRMSG=errmsg", "doc": "CO_BROADCAST(A,SOURCE_IMAGE,STAT=stat,ERRMSG=errmsg) copies the value of argument A on the image with image index SOURCE_IMAGE to all images in the current team.", "type": 2 }, "CO_MAX": { "args": "A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg", "doc": "CO_MAX(A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg) determines element-wise the maximal value of A on all images of the current team.", "type": 2 }, "CO_MIN": { "args": "A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg", "doc": "CO_MIN(A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg) determines element-wise the minimal value of A on all images of the current team.", "type": 2 }, "CO_REDUCE": { "args": "A,OPERATION,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg", "doc": "CO_REDUCE(A,OPERATION,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg) determines element-wise the reduction of the value of A on all images of the current team.", "type": 2 }, "CO_SUM": { "args": "A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg", "doc": "CO_SUM(A,RESULT_IMAGE=result_image,STAT=stat,ERRMSG=errmsg) sums up the values of each element of A on all images of the current team.", "type": 2 }, "COMMAND_ARGUMENT_COUNT": { "args": "X", "doc": "COMMAND_ARGUMENT_COUNT() returns the number of arguments passed on the command line when the containing program was invoked.", "type": 3 }, "COMPLEX": { "args": "X,Y", "doc": "COMPLEX(X,Y) returns a complex number where X is converted to the real component and Y is converted to the imaginary component.", "type": 3 }, "CONJG": { "args": "Z", "doc": "CONJG(Z) returns the conjugate of Z.", "type": 3 }, "COS": { "args": "X", "doc": "COS(X) computes the cosine of X.", "type": 3 }, "COSD": { "args": "X", "doc": "COSD(X) computes the cosine of X in degrees.", "type": 3 }, "COSH": { "args": "X", "doc": "COSH(X) computes the hyperbolic cosine of X.", "type": 3 }, "COTAN": { "args": "X", "doc": "COTAN(X) computes the cotangent of X.", "type": 3 }, "COTAND": { "args": "X", "doc": "COTAND(X) computes the cotangent of X in degrees.", "type": 3 }, "COUNT": { "args": "MASK,DIM=dim,KIND=kind", "doc": "COUNT(MASK,DIM=dim,KIND=kind) Count the number of true elements of MASK along dimension DIM.", "type": 3 }, "CPU_TIME": { "args": "TIME", "doc": "CPU_TIME(TIME) returns a REAL value representing the elapsed CPU time in seconds.", "type": 2 }, "CSHIFT": { "args": "ARRAY,SHIFT,DIM=dim", "doc": "CSHIFT(ARRAY,SHIFT,DIM=dim) performs a circular shift on elements of ARRAY along the dimension of DIM.", "type": 3 }, "CTIME": { "args": "TIME", "doc": "CTIME(TIME) converts a system time value, such as returned by TIME8, to a string. The output will be of the form ‘Sat Aug 19 18:13:14 1995’.", "type": 3 }, "DATE_AND_TIME": { "args": "DATE,TIME,ZONE,VALUES", "doc": "DATE_AND_TIME(DATE,TIME,ZONE,VALUES) gets the corresponding date and time information from the real-time system clock.", "type": 2 }, "DBLE": { "args": "A", "doc": "DBLE(A) converts A to double precision real type.", "type": 3 }, "DIGITS": { "args": "X", "doc": "DIGITS(X) returns the number of significant binary digits of the internal model representation of X.", "type": 3 }, "DIM": { "args": "X,Y", "doc": "DIM(X,Y) returns the difference X-Y if the result is positive; otherwise returns zero.", "type": 3 }, "DOT_PRODUCT": { "args": "A,B", "doc": "DOT_PRODUCT(A,B) computes the dot product multiplication of two vectors A and B.", "type": 3 }, "DPROD": { "args": "X,Y", "doc": "DPROD(X,Y) returns the product X*Y.", "type": 3 }, "DSHIFTL": { "args": "I,J,SHIFT", "doc": "DSHIFTL(I,J,SHIFT) combines bits of I and J.", "type": 3 }, "DSHIFTR": { "args": "I,J,SHIFT", "doc": "DSHIFTR(I,J,SHIFT) combines bits of I and J.", "type": 3 }, "EOSHIFT": { "args": "ARRAY,SHIFT,DIM=dim", "doc": "EOSHIFT(ARRAY,SHIFT,BOUNDARY=boundary,DIM=dim) performs a end-off shift on elements of ARRAY along the dimension of DIM.", "type": 3 }, "EPSILON": { "args": "X", "doc": "EPSILON(X) returns the smallest number E of the same kind as X such that 1 + E > 1.", "type": 3 }, "ERF": { "args": "X", "doc": "ERF(X) computes the error function of X.", "type": 3 }, "ERFC": { "args": "X", "doc": "ERFC(X) computes the complementary error function of X.", "type": 3 }, "ERFC_SCALED": { "args": "X", "doc": "ERFC_SCALED(X) computes the exponentially-scaled complementary error function of X.", "type": 3 }, "ETIME": { "args": "VALUES(2),TIME,", "doc": "ETIME(VALUES(2),TIME) returns the number of seconds of runtime since the start of the process’s execution in TIME.", "type": 3 }, "EVENT_QUERY": { "args": "EVENT,COUNT,STAT=stat", "doc": "EVENT_QUERY(EVENT,COUNT,STAT=stat) assigns the number of events to COUNT which have been posted to the EVENT variable and not yet been removed by calling EVENT WAIT.", "type": 2 }, "EXECUTE_COMMAND_LINE": { "args": "COMMAND,WAIT=wait,EXITSTAT=exitstat,CMDSTAT=cmdstat,CMDMSG=cmdmsg", "doc": "EXECUTE_COMMAND_LINE(COMMAND,WAIT=wait,EXITSTAT=exitstat,CMDSTAT=cmdstat,CMDMSG=cmdmsg) runs a shell command, synchronously or asynchronously.", "type": 2 }, "EXIT": { "args": "STATUS=status", "doc": "EXIT(STATUS=status) causes immediate termination of the program with status.", "type": 2 }, "EXP": { "args": "X", "doc": "EXP(X) computes the base e exponential of X.", "type": 3 }, "EXPONENT": { "args": "X", "doc": "EXPONENT(X) returns the value of the exponent part of X.", "type": 3 }, "EXTENDS_TYPE_OF": { "args": "A,MOLD", "doc": "EXTENDS_TYPE_OF(A,MOLD) queries dynamic type for extension.", "type": 3 }, "FDATE": { "args": "DATE", "doc": "FDATE(DATE) returns the current date (using the same format as CTIME) in DATE. It is equivalent to CALL CTIME(DATE, TIME()).", "type": 2 }, "FGET": { "args": "C,STATUS=status", "doc": "FDATE(C,STATUS=status) Read a single character in stream mode from stdin by bypassing normal formatted output.", "type": 2 }, "FGETC": { "args": "UNIT,C,STATUS=status", "doc": "FDATE(UNIT,C,STATUS=status) Read a single character in stream mode by bypassing normal formatted output.", "type": 2 }, "FINDLOC": { "args": "ARRAY,VALUE,DIM=dim,MASK=mask,KIND=kind,BACK=back", "doc": "FINDLOC(ARRAY,VALUE,DIM=dim,MASK=mask,KIND=kind,BACK=back) determines the location of the element in the array with the value given in the VALUE argument.", "type": 3 }, "FLOOR": { "args": "A,KIND=kind", "doc": "FLOOR(A,KIND=kind) returns the greatest integer less than or equal to A.", "type": 3 }, "FLUSH": { "args": "UNIT=unit", "doc": "FLUSH(UNIT=unit) Flushes Fortran unit(s) currently open for output.", "type": 2 }, "FNUM": { "args": "UNIT", "doc": "FNUM(UNIT) returns the POSIX file descriptor number corresponding to the open Fortran I/O unit UNIT.", "type": 3 }, "FPUT": { "args": "C,STATUS=status", "doc": "FPUT(C,STATUS=status) Write a single character in stream mode to stdout by bypassing normal formatted output.", "type": 3 }, "FPUTC": { "args": "C,UNIT=unit,STATUS=status", "doc": "FPUTC(C,UNIT=unit,STATUS=status) Write a single character in stream mode to stdout by bypassing normal formatted output.", "type": 3 }, "FRACTION": { "args": "X", "doc": "FRACTION(X) returns the fractional part of the model representation of X.", "type": 3 }, "FREE": { "args": "PTR", "doc": "FREE(PTR) Frees memory previously allocated by MALLOC.", "type": 2 }, "FSEEK": { "args": "UNIT,OFFSET,WHENCE,STATUS=status", "doc": "FSEEK(UNIT,OFFSET,WHENCE,STATUS=status) Moves UNIT to the specified OFFSET.", "type": 2 }, "FSTAT": { "args": "UNIT,VALUES", "doc": "FSTAT(UNIT,VALUES) FSTAT is identical to STAT, except that information about an already opened file is obtained.", "type": 3 }, "FTELL": { "args": "UNIT", "doc": "FSTAT(UNIT) Retrieves the current position within an open file.", "type": 3 }, "GAMMA": { "args": "X", "doc": "GAMMA(X) computes the gamma function of X.", "type": 3 }, "GERROR": { "args": "RESULT", "doc": "GERROR(RESULT) Returns the system error message corresponding to the last system error.", "type": 2 }, "GETARG": { "args": "POS,VALUE", "doc": "GETARG(POS,VALUE) Retrieve the POS-th argument that was passed on the command line when the containing program was invoked.", "type": 2 }, "GET_COMMAND": { "args": "COMMAND=command,LENGTH=length,STATUS=status", "doc": "GET_COMMAND(COMMAND=command,LENGTH=length,STATUS=status) retrieve the entire command line that was used to invoke the program.", "type": 2 }, "GET_COMMAND_ARGUMENT": { "args": "NUMBER=number,VALUE=value,LENGTH=length,STATUS=status", "doc": "GET_COMMAND_ARGUMENT(NUMBER=number,VALUE=value,LENGTH=length,STATUS=status) retrieve the NUMBER-th argument that was passed on the command line when the containing program was invoked.", "type": 2 }, "GETCWD": { "args": "C,STATUS=status", "doc": "GETCWD(C,STATUS=status) Get current working directory.", "type": 3 }, "GETENV": { "args": "NAME,VALUE", "doc": "GETENV(NAME,VALUE) Get the VALUE of the environmental variable NAME.", "type": 2 }, "GET_ENVIRONMENT_VARIABLE": { "args": "NAME=name,VALUE=value,LENGTH=length,STATUS=status,TRIM_NAME=trim_name", "doc": "GET_ENVIRONMENT_VARIABLE(NAME=name,VALUE=value,LENGTH=length,STATUS=status,TRIM_NAME=trim_name) gets the VALUE of the environmental variable NAME.", "type": 2 }, "GETGID": { "args": "", "doc": "GETGID() Returns the numerical group ID of the current process.", "type": 3 }, "GETLOG": { "args": "C", "doc": "GETLOG(C)Gets the username under which the program is running.", "type": 2 }, "GETPID": { "args": "", "doc": "GETPID() Returns the numerical process identifier of the current process.", "type": 3 }, "GETUID": { "args": "", "doc": "GETUID() Returns the numerical user ID of the current process.", "type": 3 }, "GMTIME": { "args": "TIME,VALUES", "doc": "GMTIME(TIME,VALUES) Given a system time value TIME (as provided by the TIME intrinsic), fills VALUES with values extracted from it appropriate to the UTC time zone, using gmtime(3).", "type": 2 }, "HOSTNM": { "args": "C,STATUS=status", "doc": "HOSTNM(C,STATUS=status) Retrieves the host name of the system on which the program is running.", "type": 3 }, "HUGE": { "args": "X", "doc": "HUGE(X) returns the largest number that is not an infinity in the model of the type of X.", "type": 3 }, "HYPOT": { "args": "X,Y", "doc": "HYPOT(X,Y) is the Euclidean distance function without undue underflow or overflow.", "type": 3 }, "IACHAR": { "args": "I,KIND=kind", "doc": "IACHAR(C,KIND=kind) returns the code for the ASCII character in the first character position of C.", "type": 3 }, "IALL": { "args": "MASK,DIM=dim", "doc": "IALL(MASK,DIM=dim) reduces with bitwise AND the elements of ARRAY along dimension DIM.", "type": 3 }, "IAND": { "args": "I,J", "doc": "IAND(I,J) Bitwise logical AND.", "type": 3 }, "IANY": { "args": "MASK,DIM=dim", "doc": "IANY(MASK,DIM=dim) reduces with bitwise OR the elements of ARRAY along dimension DIM.", "type": 3 }, "IARGC": { "args": "", "doc": "IARGC() returns the number of arguments passed on the command line when the containing program was invoked.", "type": 3 }, "IBCLR": { "args": "I,POS", "doc": "IBCLR(I,POS) returns the value of I with the bit at position POS set to zero.", "type": 3 }, "IBITS": { "args": "I,POS,LEN", "doc": "IBITS(I,POS,LEN) extracts a field of length LEN from I, starting from bit position POS and extending left for LEN bits.", "type": 3 }, "IBSET": { "args": "I,POS", "doc": "IBSET(I,POS) returns the value of I with the bit at position POS set to one.", "type": 3 }, "ICHAR": { "args": "I,KIND=kind", "doc": "ICHAR(C,KIND=kind) returns the code for the character in the first character position of C in the system's native character set.", "type": 3 }, "IDATE": { "args": "VALUES", "doc": "IDATE(VALUES) Fills VALUES with the numerical values at the current local time.", "type": 2 }, "IEOR": { "args": "I,J", "doc": "IEOR(I,J) Bitwise logical exclusive OR.", "type": 3 }, "IEORNO": { "args": "", "doc": "IEORNO() Returns the last system error number, as given by the C errno variable.", "type": 3 }, "IMAGE_INDEX": { "args": "COARRAY,SUB", "doc": "IMAGE_INDEX(COARRAY,SUB) returns the image index belonging to a cosubscript.", "type": 3 }, "INDEX": { "args": "STRING,SUBSTRING,BACK=back,KIND=kind", "doc": "INDEX(STRING,SUBSTRING,BACK=back,KIND=kind) returns the position of the start of the first occurrence of string SUBSTRING as a substring in STRING, counting from one.", "type": 3 }, "INT": { "args": "A,KIND=kind", "doc": "INT(A,KIND=kind) Convert to integer type.", "type": 3 }, "INT2": { "args": "A", "doc": "INT2(A) Convert to a KIND=2 integer type.", "type": 3 }, "INT8": { "args": "A", "doc": "INT8(A) Convert to a KIND=8 integer type.", "type": 3 }, "IOR": { "args": "I,J", "doc": "IOR(I,J) Bitwise logical inclusive OR.", "type": 3 }, "IPARITY": { "args": "ARRAY,DIM=dim,MASK=mask", "doc": "IPARITY(ARRAY,DIM=dim,MASK=mask) reduces with bitwise XOR (exclusive or) the elements of ARRAY along dimension DIM if the corresponding element in MASK is TRUE.", "type": 3 }, "IRAND": { "args": "FLAG", "doc": "IRAND(FLAG) returns a pseudo-random number from a uniform distribution between 0 and a system-dependent limit (which is in most cases 2147483647).", "type": 3 }, "IS_CONTIGUOUS": { "args": "ARRAY", "doc": "IS_CONTIGUOUS(ARRAY) tests whether an array is contiguous.", "type": 3 }, "IS_IOSTAT_END": { "args": "I", "doc": "IS_IOSTAT_END(I) tests whether the variable I has the value of the I/O status 'end of file'", "type": 3 }, "IS_IOSTAT_EOR": { "args": "I", "doc": "IS_IOSTAT_EOR(I) tests whether the variable I has the value of the I/O status 'end of record'", "type": 3 }, "ISATTY": { "args": "UNIT", "doc": "ISATTY(UNIT) Determine whether a unit is connected to a terminal device.", "type": 3 }, "ISHFT": { "args": "I,SHIFT", "doc": "ISHFT(I,SHIFT) returns a value corresponding to I with all of the bits shifted SHIFT places.", "type": 3 }, "ISHFTC": { "args": "I,SHIFT,SIZE=size", "doc": "ISHFTC(I,SHIFT,SIZE=size) returns a value corresponding to I with the rightmost SIZE bits shifted circularly SHIFT places; that is, bits shifted out one end are shifted into the opposite end.", "type": 3 }, "ISNAN": { "args": "X", "doc": "ISNAN(X) tests whether a floating-point value is an IEEE Not-a-Number (NaN).", "type": 3 }, "ITIME": { "args": "VALUES", "doc": "ITIME(VALUES) Fills VALUES with the numerical values at the current local time.", "type": 2 }, "KILL": { "args": "PID,STATUS=status", "doc": "KILL(PID,STATUS=status) Sends the signal specified by SIG to the process PID. See kill(2).", "type": 3 }, "KIND": { "args": "X", "doc": "KIND(X) returns the kind value of the entity X.", "type": 3 }, "LBOUND": { "args": "ARRAY,DIM=dim,KIND=kind", "doc": "LBOUND(ARRAY,DIM=dim,KIND=kind) returns the lower bounds of an array, or a single lower bound along the DIM dimension.", "type": 3 }, "LCOBOUND": { "args": "COARRAY,DIM=dim,KIND=kind", "doc": "LCOBOUND(COARRAY,DIM=dim,KIND=kind) Returns the lower bounds of a coarray, or a single lower cobound along the DIM codimension.", "type": 3 }, "LEADZ": { "args": "I", "doc": "LEADZ(I) returns the number of leading zero bits of an integer.", "type": 3 }, "LEN": { "args": "STRING,KIND=kind", "doc": "LEN(STRING,KIND=kind) returns the length of a character string.", "type": 3 }, "LEN_TRIM": { "args": "STRING,KIND=kind", "doc": "LEN_TRIM(STRING,KIND=kind) returns the length of a character string, ignoring any trailing blanks.", "type": 3 }, "LGE": { "args": "STRING_A,STRING_B", "doc": "LGE(STRING_A,STRING_B) determines whether one string is lexically greater than or equal to another string.", "type": 3 }, "LGT": { "args": "STRING_A,STRING_B", "doc": "LGT(STRING_A,STRING_B) determines whether one string is lexically greater than another string.", "type": 3 }, "LINK": { "args": "PATH1,PATH2", "doc": "LINK(PATH1,PATH2) Makes a (hard) link from file PATH1 to PATH2.", "type": 3 }, "LLE": { "args": "STRING_A,STRING_B", "doc": "LLE(STRING_A,STRING_B) determines whether one string is lexically less than or equal to another string.", "type": 3 }, "LLT": { "args": "STRING_A,STRING_B", "doc": "LLT(STRING_A,STRING_B) determines whether one string is lexically less than another string.", "type": 3 }, "LNBLNK": { "args": "STRING", "doc": "LNBLNK(STRING) Returns the length of a character string, ignoring any trailing blanks.", "type": 3 }, "LOC": { "args": "X", "doc": "LOC(X) returns the address of X as an integer.", "type": 3 }, "LOG": { "args": "X", "doc": "LOG(X) computes the natural logarithm of X, i.e. the logarithm to the base e.", "type": 3 }, "LOG10": { "args": "X", "doc": "LOG10(X) computes the base 10 logarithm of X.", "type": 3 }, "LOG_GAMMA": { "args": "X", "doc": "LOG_GAMMA(X) computes the natural logarithm of the absolute value of the Gamma function.", "type": 3 }, "LOGICAL": { "args": "L,KIND=kind", "doc": "LOGICAL(L,KIND=kind) Converts one kind of LOGICAL variable to another.", "type": 3 }, "LSHIFT": { "args": "I,SHIFT", "doc": "LSHIFT(I,SHIFT) returns a value corresponding to I with all of the bits shifted left by SHIFT places.", "type": 3 }, "LSTAT": { "args": "NAME,VALUES,STATUS=status", "doc": "LSTAT(NAME,VALUES,STATUS=status) is identical to STAT, except that if path is a symbolic link, then the link itself is statted, not the file that it refers to.", "type": 3 }, "LTIME": { "args": "TIME,VALUES", "doc": "LTIME(TIME,VALUES) Given a system time value TIME (as provided by the TIME intrinsic), fills VALUES with values extracted from it appropriate to the local time zone using localtime(3).", "type": 2 }, "MALLOC": { "args": "SIZE", "doc": "MALLOC(SIZE) allocates SIZE bytes of dynamic memory and returns the address of the allocated memory.", "type": 3 }, "MASKL": { "args": "I,KIND=kind", "doc": "MASKL(I,KIND=kind) has its leftmost I bits set to 1, and the remaining bits set to 0.", "type": 3 }, "MASKR": { "args": "I,KIND=kind", "doc": "MASKR(I,KIND=kind) has its rightmost I bits set to 1, and the remaining bits set to 0.", "type": 3 }, "MATMUL": { "args": "MATRIX_A,MATRIX_B", "doc": "MATMUL(MATRIX_A,MATRIX_B) performs a matrix multiplication on numeric or logical arguments.", "type": 3 }, "MAX": { "args": "A1,A2", "doc": "MAX(A1,A2,...) returns the argument with the largest (most positive) value.", "type": 3 }, "MAXEXPONENT": { "args": "X", "doc": "MAXEXPONENT(X) returns the maximum exponent in the model of the type of X.", "type": 3 }, "MAXLOC": { "args": "ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back", "doc": "MAXLOC(ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back) determines the location of the element in the array with the maximum value.", "type": 3 }, "MAXVAL": { "args": "ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back", "doc": "MAXVAL(ARRAY,DIM=dim,MASK=mask) determines the maximum value of the elements in an array.", "type": 3 }, "MCLOCK": { "args": "", "doc": "MCLOCK() Returns the number of clock ticks since the start of the process, based on the function clock(3) in the C standard library.", "type": 3 }, "MCLOCK8": { "args": "", "doc": "MCLOCK8() Returns the number of clock ticks since the start of the process, based on the function clock(3) in the C standard library.", "type": 3 }, "MERGE": { "args": "TSOURCE,FSOURCE,MASK", "doc": "MERGE(TSOURCE,FSOURCE,MASK) select values from two arrays according to a logical mask.", "type": 3 }, "MERGE_BITS": { "args": "I,J,MASK", "doc": "MERGE_BITS(I,J,MASK) merges the bits of I and J as determined by the mask.", "type": 3 }, "MIN": { "args": "A1,A2", "doc": "MIN(A1,A2,...) returns the argument with the smallest (most negative) value.", "type": 3 }, "MINEXPONENT": { "args": "X", "doc": "MINEXPONENT(X) returns the minimum exponent in the model of the type of X.", "type": 3 }, "MINLOC": { "args": "ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back", "doc": "MINLOC(ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back) determines the location of the element in the array with the minimum value.", "type": 3 }, "MINVAL": { "args": "ARRAY,DIM=dim,MASK=mask,KIND=kind,BACK=back", "doc": "MINVAL(ARRAY,DIM=dim,MASK=mask) determines the minimum value of the elements in an array.", "type": 3 }, "MOD": { "args": "A,P", "doc": "MOD(A,P) computes the remainder of the division of A by P.", "type": 3 }, "MODULO": { "args": "A,P", "doc": "MODULO(A,P) computes the A modulo P.", "type": 3 }, "MOVE_ALLOC": { "args": "FROM,TO", "doc": "MOVE_ALLOC(FROM,TO) moves the allocation from FROM to TO.", "type": 3 }, "MVBITS": { "args": "FROM,TO", "doc": "MVBITS(FROM,FROMPOS,LEN,TO,TOPOS) moves LEN bits from positions FROMPOS through FROMPOS+LEN-1 of FROM to positions TOPOS through TOPOS+LEN-1 of TO.", "type": 3 }, "NEAREST": { "args": "X,S", "doc": "NEAREST(X,S) returns the processor-representable number nearest to X in the direction indicated by the sign of S.", "type": 3 }, "NEW_LINE": { "args": "C", "doc": "NEW_LINE(C) returns the new-line character.", "type": 3 }, "NINT": { "args": "A,KIND=kind", "doc": "NINT(A,KIND=kind) rounds its argument to the nearest whole number.", "type": 3 }, "NORM2": { "args": "ARRAY,DIM=dim", "doc": "NORM2(ARRAY,DIM=dim) Calculates the Euclidean vector norm (L_2 norm) of ARRAY along dimension DIM.", "type": 3 }, "NOT": { "args": "I", "doc": "NOT(I) returns the bitwise Boolean inverse of I.", "type": 3 }, "NULL": { "doc": "NULL() returns a disassociated pointer.", "type": 3 }, "PACK": { "args": "ARRAY,MASK=mask,VECTOR=vector", "doc": "PACK(ARRAY,MASK=mask,VECTOR=vector) stores the elements of ARRAY in an array of rank one.", "type": 3 }, "PARITY": { "args": "MASK,DIM=dim", "doc": "PARITY(MASK,DIM=dim) Calculates the parity, i.e. the reduction using .XOR., of MASK along dimension DIM.", "type": 3 }, "PERROR": { "args": "STRING", "doc": "PERROR(STRING) Prints (on the C stderr stream) a newline-terminated error message corresponding to the last system error. This is prefixed by STRING, a colon and a space.", "type": 2 }, "POPCNT": { "args": "I", "doc": "POPCNT(I) returns the number of bits set (’1’ bits) in the binary representation of I.", "type": 3 }, "POPPAR": { "args": "I", "doc": "POPPAR(I) returns parity of the integer I, i.e. the parity of the number of bits set ('1' bits) in the binary representation of I. It is equal to 0 if I has an even number of bits set, and 1 for an odd number of '1' bits.", "type": 3 }, "PRECISION": { "args": "X", "doc": "PRECISION(X) returns the decimal precision in the model of the type of X.", "type": 3 }, "PRESENT": { "args": "A", "doc": "PRESENT(A) determines whether an optional dummy argument is present.", "type": 3 }, "PRODUCT": { "args": "ARRAY,DIM=dim,MASK=mask", "doc": "PRODUCT(ARRAY,DIM=dim,MASK=mask) multiplies the elements of ARRAY along dimension DIM if the corresponding element in MASK is TRUE.", "type": 3 }, "RADIX": { "args": "X", "doc": "RADIX(X) returns the base of the model representing the entity X.", "type": 3 }, "RAN": { "args": "I", "doc": "RAN(I) For compatibility with HP FORTRAN 77/iX, the RAN intrinsic is provided as an alias for RAND.", "type": 3 }, "RAND": { "args": "I", "doc": "RAND(I) returns a pseudo-random number from a uniform distribution between 0 and 1.", "type": 3 }, "RANDOM_INIT": { "args": "REPEATABLE,IMAGE_DISTINCT", "doc": "RANDOM_INIT(REPEATABLE,IMAGE_DISTINCT) Initializes the state of the pseudorandom number generator used by RANDOM_NUMBER.", "type": 2 }, "RANDOM_NUMBER": { "args": "HARVEST", "doc": "RANDOM_NUMBER(HARVEST) returns a single pseudorandom number or an array of pseudorandom numbers.", "type": 2 }, "RANDOM_SEED": { "args": "SIZE=size,PUT=put,GET=get", "doc": "RANDOM_SEED(SIZE=size,PUT=put,GET=get) restarts or queries the state of the pseudorandom number generator used by RANDOM_NUMBER.", "type": 2 }, "RANGE": { "args": "X", "doc": "RANGE(X) returns the decimal exponent range in the model of the type of X.", "type": 3 }, "RANK": { "args": "A", "doc": "RANK(A) returns the rank of a scalar or array data object.", "type": 3 }, "REAL": { "args": "A,KIND=kind", "doc": "REAL(A,KIND=kind) converts its argument A to a real type.", "type": 3 }, "RENAME": { "args": "PATH1,PATH2", "doc": "RENAME(PATH1,PATH2) Renames a file from file PATH1 to PATH2.", "type": 3 }, "REPEAT": { "args": "STRING,NCOPIES", "doc": "REPEAT(STRING,NCOPIES) concatenates NCOPIES copies of a string.", "type": 3 }, "RESHAPE": { "args": "SOURCE,SHAPE,PAD=pad,ORDER=order", "doc": "RESHAPE(SOURCE,SHAPE,PAD=pad,ORDER=order) reshapes SOURCE to correspond to SHAPE.", "type": 3 }, "RRSPACING": { "args": "X", "doc": "RRSPACING(X) returns the reciprocal of the relative spacing of model numbers near X.", "type": 3 }, "RSHIFT": { "args": "I,SHIFT", "doc": "RSHIFT(I,SHIFT) eturns a value corresponding to I with all of the bits shifted right by SHIFT places.", "type": 3 }, "SAME_TYPE_AS": { "args": "A,B", "doc": "SAME_TYPE_AS(A,B) query dynamic types for equality.", "type": 3 }, "SCALE": { "args": "X,I", "doc": "SCALE(X,I) returns X * RADIX(X)**I.", "type": 3 }, "SCAN": { "args": "STRING,SET,BACK=back,KIND=kind", "doc": "SCAN(STRING,SET,BACK=back,KIND=kind) scans a STRING for any of the characters in a SET of characters.", "type": 3 }, "SECNDS": { "args": "X", "doc": "SECNDS(X) gets the time in seconds from the real-time system clock.", "type": 3 }, "SECOND": { "args": "TIME", "doc": "SECOND(TIME) Returns a REAL(4) value representing the elapsed CPU time in seconds.", "type": 3 }, "SELECTED_CHAR_KIND": { "args": "NAME", "doc": "SELECTED_CHAR_KIND(NAME) returns the kind value for the character set named NAME, if a character set with such a name is supported, or -1 otherwise.", "type": 3 }, "SELECTED_INT_KIND": { "args": "R", "doc": "SELECTED_INT_KIND(R) return the kind value of the smallest integer type that can represent all values ranging from -10^R (exclusive) to 10^R (exclusive).", "type": 3 }, "SELECTED_REAL_KIND": { "args": "P,R", "doc": "SELECTED_REAL_KIND(P,R) returns the kind value of a real data type with decimal precision of at least P digits, exponent range of at least R, and with a radix of RADIX.", "type": 3 }, "SET_EXPONENT": { "args": "X,I", "doc": "SET_EXPONENT(X,I) returns the real number whose fractional part is that that of X and whose exponent part is I.", "type": 3 }, "SHAPE": { "args": "SOURCE,KIND=kind", "doc": "SHAPE(SOURCE,KIND=kind) determines the shape of an array.", "type": 3 }, "SHIFTA": { "args": "I,SHIFT", "doc": "SHIFTA(I,SHIFT) returns a value corresponding to I with all of the bits shifted right by SHIFT places.", "type": 3 }, "SHIFTL": { "args": "I,SHIFT", "doc": "SHIFTL(I,SHIFT) returns a value corresponding to I with all of the bits shifted left by SHIFT places.", "type": 3 }, "SHIFTR": { "args": "I,SHIFT", "doc": "SHIFTR(I,SHIFT) returns a value corresponding to I with all of the bits shifted right by SHIFT places.", "type": 3 }, "SIGN": { "args": "A,B", "doc": "SIGN(A,B) returns the value of A with the sign of B.", "type": 3 }, "SIGNAL": { "args": "NUMBER,HANDLER", "doc": "SIGNAL(NUMBER,HANDLER) causes external subroutine HANDLER to be executed with a single integer argument when signal NUMBER occurs.", "type": 3 }, "SIN": { "args": "X", "doc": "SIN(X) computes the sine of X.", "type": 3 }, "SIND": { "args": "X", "doc": "SIND(X) computes the sine of X in degrees.", "type": 3 }, "SINH": { "args": "X", "doc": "SINH(X) computes the hyperbolic sine of X.", "type": 3 }, "SIZE": { "args": "ARRAY,DIM=dim,KIND=kind", "doc": "SIZE(ARRAY,DIM=dim,KIND=kind) determines the extent of ARRAY along a specified dimension DIM, or the total number of elements in ARRAY if DIM is absent.", "type": 3 }, "SIZEOF": { "args": "X", "doc": "SIZEOF(X) calculates the number of bytes of storage the expression X occupies.", "type": 3 }, "SLEEP": { "args": "SECONDS", "doc": "SLEEP(SECONDS) Calling this subroutine causes the process to pause for SECONDS seconds.", "type": 2 }, "SPACING": { "args": "X", "doc": "SPACING(X) determines the distance between the argument X and the nearest adjacent number of the same type.", "type": 3 }, "SPREAD": { "args": "SOURCE,DIM,NCOPIES", "doc": "SPREAD(SOURCE,DIM,NCOPIES) replicates a SOURCE array NCOPIES times along a specified dimension DIM.", "type": 3 }, "SQRT": { "args": "X", "doc": "SQRT(X) computes the square root of X.", "type": 3 }, "SRAND": { "args": "SEED", "doc": "SRAND(SEED) reinitializes the pseudo-random number generator called by RAND and IRAND.", "type": 2 }, "STAT": { "args": "NAME,VALUES", "doc": "STAT(NAME,VALUES) This function returns information about a file.", "type": 3 }, "STORAGE_SIZE": { "args": "A,KIND=kind", "doc": "STORAGE_SIZE(A,KIND=kind) Returns the storage size of argument A in bits.", "type": 3 }, "SUM": { "args": "ARRAY,DIM=dim,MASK=mask", "doc": "SUM(ARRAY,DIM=dim,MASK=mask) adds the elements of ARRAY along dimension DIM if the corresponding element in MASK is TRUE.", "type": 3 }, "SYMLNK": { "args": "PATH1,PATH2", "doc": "SYMLNK(PATH1,PATH2) Makes a symbolic link from file PATH1 to PATH2.", "type": 3 }, "SYSTEM": { "args": "COMMAND,STATUS=status", "doc": "SYSTEM(COMMAND,STATUS=status) Passes the command COMMAND to a shell (see system(3)).", "type": 3 }, "SYSTEM_CLOCK": { "args": "COUNT=count,COUNT_RATE=count_rate,COUNT_MAX=count_max", "doc": "SYSTEM_CLOCK(COUNT=count,COUNT_RATE=count_rate,COUNT_MAX=count_max) determines the COUNT of a processor clock since an unspecified time in the past modulo COUNT_MAX, COUNT_RATE determines the number of clock ticks per second.", "type": 3 }, "TAN": { "args": "X", "doc": "TAN(X) computes the tangent of X.", "type": 3 }, "TAND": { "args": "X", "doc": "TAND(X) computes the tangent of X in degrees.", "type": 3 }, "TANH": { "args": "X", "doc": "TANH(X) computes the hyperbolic tangent of X.", "type": 3 }, "THIS_IMAGE": { "args": "DISTANCE=distance|COARRAY,DIM=dim", "doc": "THIS_IMAGE(DISTANCE=distance|COARRAY,DIM=dim) Returns the cosubscript for this image.", "type": 3 }, "TIME": { "args": "", "doc": "TIME() Returns the current time encoded as an integer.", "type": 3 }, "TIME8": { "args": "", "doc": "TIME8() Returns the current time encoded as an integer. This value is suitable for passing to CTIME, GMTIME, and LTIME.", "type": 3 }, "TINY": { "args": "X", "doc": "TINY(X) returns the smallest positive (non zero) number in the model of the type of X.", "type": 3 }, "TRAILZ": { "args": "I", "doc": "TRAILZ(I) returns the number of trailing zero bits of an integer.", "type": 3 }, "TRANSFER": { "args": "SOURCE,MOLD,SIZE=size", "doc": "TRANSFER(SOURCE,MOLD,SIZE=size) interprets the bitwise representation of SOURCE in memory as if it is the representation of a variable or array of the same type and type parameters as MOLD.", "type": 3 }, "TRANSPOSE": { "args": "MATRIX", "doc": "TRANSPOSE(MATRIX) transpose an array of rank two.", "type": 3 }, "TRIM": { "args": "STRING", "doc": "TRIM(STRING) removes trailing blank characters of a string.", "type": 3 }, "TTYNAM": { "args": "UNIT", "doc": "TTYNAM(UNIT) Get the name of a terminal device.", "type": 3 }, "UBOUND": { "args": "ARRAY,DIM=dim,KIND=kind", "doc": "UBOUND(ARRAY,DIM=dim,KIND=kind) returns the upper bounds of an array, or a single upper bound along the DIM dimension.", "type": 3 }, "UCOBOUND": { "args": "ARRAY,DIM=dim,KIND=kind", "doc": "UCOBOUND(ARRAY,DIM=dim,KIND=kind) Returns the upper cobounds of a coarray, or a single upper cobound along the DIM codimension.", "type": 3 }, "UMASK": { "args": "MASK", "doc": "UMASK(MASK) Sets the file creation mask to MASK.", "type": 3 }, "UNLINK": { "args": "PATH", "doc": "UNLINK(PATH) Unlinks the file PATH.", "type": 3 }, "UNPACK": { "args": "VECTOR,MASK,FIELD", "doc": "UNPACK(VECTOR,MASK,FIELD) Store the elements of VECTOR in an array of higher rank.", "type": 3 }, "VERIFY": { "args": "STRING,SET,BACK=back,KIND=kind", "doc": "VERIFY(STRING,SET,BACK=back,KIND=kind) verifies that all the characters in STRING belong to the set of characters in SET.", "type": 3 }, "XOR": { "args": "I,J", "doc": "XOR(I,J) Bitwise logical exclusive or.", "type": 3 } } fortran-language-server-2.13.0+dfsg.1/fortls/intrinsic_mods.json000066400000000000000000002162721450400537300246560ustar00rootroot00000000000000{ "omp_lib": { "type": 0, "name": "omp_lib", "children": [ { "name": "openmp_version", "type": 3, "desc": "INTEGER" }, { "name": "omp_alloctrait", "type": 4 }, { "name": "omp_sched_static", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "omp_sched_dynamic", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "omp_sched_guided", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "omp_sched_auto", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "omp_proc_bind_false", "type": 3, "desc": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_proc_bind_true", "type": 3, "desc": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_proc_bind_master", "type": 3, "desc": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_proc_bind_close", "type": 3, "desc": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_proc_bind_spread", "type": 3, "desc": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_lock_hint_none", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_lock_hint_uncontended", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_lock_hint_contended", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_lock_hint_nonspeculative", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_lock_hint_speculative", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_sync_hint_none", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_sync_hint_uncontended", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_sync_hint_contended", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_sync_hint_nonspeculative", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_sync_hint_speculative", "type": 3, "desc": "INTEGER(KIND=omp_lock_hint_kind)" }, { "name": "omp_pause_soft", "type": 3, "desc": "INTEGER(KIND=omp_pause_resource_kind)" }, { "name": "omp_pause_hard", "type": 3, "desc": "INTEGER(KIND=omp_pause_resource_kind)" }, { "name": "omp_atk_sync_hint", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_alignment", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_access", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_pool_size", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_fallback", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_fb_data", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_pinned", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atk_partition", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_key_kind)" }, { "name": "omp_atv_default", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_false", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_true", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_contended", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_uncontended", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_serialized", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_sequential", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_private", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_all", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_thread", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_pteam", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_cgroup", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_default_mem_fb", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_null_fb", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_abort_fb", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_allocator_fb", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_environment", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_nearest", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_atv_blocked", "type": 3, "desc": "INTEGER(KIND=omp_alloctrait_val_kind)" }, { "name": "omp_null_allocator", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_default_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_large_cap_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_const_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_high_bw_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_low_lat_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_cgroup_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_pteam_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_thread_mem_alloc", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_default_mem_space", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)" }, { "name": "omp_large_cap_mem_space", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)" }, { "name": "omp_const_mem_space", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)" }, { "name": "omp_high_bw_mem_space", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)" }, { "name": "omp_low_lat_mem_space", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)" }, { "name": "omp_get_supported_active_levels", "type": 2, "return": "INTEGER" }, { "name": "omp_get_num_threads", "type": 2, "return": "INTEGER" }, { "name": "omp_get_max_threads", "type": 2, "return": "INTEGER" }, { "name": "omp_get_thread_num", "type": 2, "return": "INTEGER" }, { "name": "omp_get_num_procs", "type": 2, "return": "INTEGER" }, { "name": "omp_get_thread_limit", "type": 2, "return": "INTEGER" }, { "name": "omp_get_max_active_levels", "type": 2, "return": "INTEGER" }, { "name": "omp_get_level", "type": 2, "return": "INTEGER" }, { "name": "omp_get_ancestor_thread_num", "type": 2, "return": "INTEGER", "args": "level", "children": [ { "name": "level", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_get_team_size", "type": 2, "return": "INTEGER", "args": "level", "children": [ { "name": "level", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_get_active_level", "type": 2, "return": "INTEGER" }, { "name": "omp_get_proc_bind", "type": 2, "return": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_get_num_places", "type": 2, "return": "INTEGER(KIND=omp_proc_bind_kind)" }, { "name": "omp_get_place_num_procs", "type": 2, "return": "INTEGER", "args": "place_num", "children": [ { "name": "place_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_get_place_proc_ids", "type": 1, "args": "place_num,ids", "children": [ { "name": "place_num", "type": 3, "desc": "INTEGER" }, { "name": "ids", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(*)"] } ] }, { "name": "omp_get_partition_place_nums", "type": 1, "args": "place_nums", "children": [ { "name": "place_nums", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(*)"] } ] }, { "name": "omp_set_affinity_format", "type": 1, "args": "format", "children": [ { "name": "format", "type": 3, "desc": "CHARACTER(LEN=*)", "mods": ["INTENT(in)"] } ] }, { "name": "omp_get_affinity_format", "type": 2, "return": "INTEGER", "args": "buffer", "children": [ { "name": "buffer", "type": 3, "desc": "CHARACTER(LEN=*)", "mods": ["INTENT(out)"] } ] }, { "name": "omp_display_affinity", "type": 1, "args": "format", "children": [ { "name": "format", "type": 3, "desc": "CHARACTER(LEN=*)", "mods": ["INTENT(in)"] } ] }, { "name": "omp_capture_affinity", "type": 1, "args": "buffer,format", "children": [ { "name": "buffer", "type": 3, "desc": "CHARACTER(LEN=*)", "mods": ["INTENT(out)"] }, { "name": "format", "type": 3, "desc": "CHARACTER(LEN=*)", "mods": ["INTENT(in)"] } ] }, { "name": "omp_get_default_device", "type": 2, "return": "INTEGER" }, { "name": "omp_get_num_devices", "type": 2, "return": "INTEGER" }, { "name": "omp_get_device_num", "type": 2, "return": "INTEGER" }, { "name": "omp_get_num_teams", "type": 2, "return": "INTEGER" }, { "name": "omp_get_team_num", "type": 2, "return": "INTEGER" }, { "name": "omp_in_parallel", "type": 2, "return": "LOGICAL" }, { "name": "omp_get_dynamic", "type": 2, "return": "LOGICAL" }, { "name": "omp_get_cancellation", "type": 2, "return": "LOGICAL" }, { "name": "omp_get_nested", "type": 2, "return": "LOGICAL" }, { "name": "omp_in_final", "type": 2, "return": "LOGICAL" }, { "name": "omp_is_initial_device", "type": 2, "return": "LOGICAL" }, { "name": "omp_get_initial_device", "type": 2, "return": "INTEGER" }, { "name": "omp_get_max_task_priority", "type": 2, "return": "INTEGER" }, { "name": "omp_pause_resource", "type": 2, "return": "INTEGER", "args": "kind,device_num", "children": [ { "name": "kind", "type": 3, "desc": "INTEGER(KIND=omp_pause_resource_kind)" }, { "name": "device_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_pause_resource_all", "type": 2, "return": "INTEGER", "args": "kind", "children": [ { "name": "kind", "type": 3, "desc": "INTEGER(KIND=omp_pause_resource_kind)" } ] }, { "name": "omp_get_wtime", "type": 2, "return": "DOUBLE PRECISION" }, { "name": "omp_get_wtick", "type": 2, "return": "DOUBLE PRECISION" }, { "name": "omp_fulfill_event", "type": 1, "args": "event", "children": [ { "name": "event", "type": 3, "desc": "INTEGER(KIND=omp_event_handle_kind)" } ] }, { "name": "omp_init_allocator", "type": 2, "return": "INTEGER(KIND=omp_allocator_handle_kind)", "args": "memspace,ntraits,traits", "children": [ { "name": "memspace", "type": 3, "desc": "INTEGER(KIND=omp_memspace_handle_kind)", "mods": ["INTENT(in)"] }, { "name": "ntraits", "type": 3, "desc": "INTEGER", "mods": ["INTENT(in)"] }, { "name": "traits", "type": 3, "desc": "TYPE(omp_alloctrait)", "mods": ["DIMENSION(*)", "INTENT(in)"] } ] }, { "name": "omp_destroy_allocator", "type": 1, "args": "allocator", "children": [ { "name": "allocator", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)", "mods": ["INTENT(in)"] } ] }, { "name": "omp_set_default_allocator", "type": 1, "args": "allocator", "children": [ { "name": "allocator", "type": 3, "desc": "INTEGER(KIND=omp_allocator_handle_kind)", "mods": ["INTENT(in)"] } ] }, { "name": "omp_get_default_allocator", "type": 2, "return": "INTEGER(KIND=omp_allocator_handle_kind)" }, { "name": "omp_control_tool", "type": 2, "return": "INTEGER", "args": "command,modifier", "children": [ { "name": "command", "type": 3, "desc": "INTEGER(KIND=omp_control_tool_kind)" }, { "name": "modifier", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_test_lock", "type": 2, "return": "LOGICAL", "args": "svar", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" } ] }, { "name": "omp_test_nest_lock", "type": 2, "return": "LOGICAL", "args": "nvar", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" } ] }, { "name": "omp_set_num_threads", "type": 1, "args": "num_threads", "children": [ { "name": "num_threads", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_set_dynamic", "type": 1, "args": "dynamic_threads", "children": [ { "name": "dynamic_threads", "type": 3, "desc": "LOGICAL" } ] }, { "name": "omp_set_nested", "type": 1, "args": "nested", "children": [ { "name": "nested", "type": 3, "desc": "LOGICAL" } ] }, { "name": "omp_set_schedule", "type": 1, "args": "kind,chunk_size", "children": [ { "name": "kind", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "chunk_size", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_get_schedule", "type": 1, "args": "kind,chunk_size", "children": [ { "name": "kind", "type": 3, "desc": "INTEGER(KIND=omp_sched_kind)" }, { "name": "chunk_size", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_set_max_active_levels", "type": 1, "args": "max_levels", "children": [ { "name": "max_levels", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_set_default_device", "type": 1, "args": "device_num", "children": [ { "name": "device_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "omp_init_lock", "type": 1, "args": "svar", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" } ] }, { "name": "omp_init_nest_lock", "type": 1, "args": "nvar", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" } ] }, { "name": "omp_init_lock_with_hint", "type": 1, "args": "svar,hint", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" }, { "name": "hint", "type": 3, "desc": "INTEGER(KIND=omp_sync_hint_kind)" } ] }, { "name": "omp_init_nest_lock_with_hint", "type": 1, "args": "nvar,hint", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" }, { "name": "hint", "type": 3, "desc": "INTEGER(KIND=omp_sync_hint_kind)" } ] }, { "name": "omp_destroy_lock", "type": 1, "args": "svar", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" } ] }, { "name": "omp_destroy_nest_lock", "type": 1, "args": "nvar", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" } ] }, { "name": "omp_set_lock", "type": 1, "args": "svar", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" } ] }, { "name": "omp_set_nest_lock", "type": 1, "args": "nvar", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" } ] }, { "name": "omp_unset_lock", "type": 1, "args": "svar", "children": [ { "name": "svar", "type": 3, "desc": "INTEGER(KIND=omp_lock_kind)" } ] }, { "name": "omp_unset_nest_lock", "type": 1, "args": "nvar", "children": [ { "name": "nvar", "type": 3, "desc": "INTEGER(KIND=omp_nest_lock_kind)" } ] } ] }, "omp_lib_kinds": { "type": 0, "name": "omp_lib_kinds", "children": [ { "name": "omp_allocator_handle_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_alloctrait_key_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_alloctrait_val_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_depend_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_lock_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_lock_hint_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_nest_lock_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_pause_resource_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_memspace_handle_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_proc_bind_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_sched_kind", "type": 3, "desc": "INTEGER" }, { "name": "omp_sync_hint_kind", "type": 3, "desc": "INTEGER" } ] }, "openacc": { "type": 0, "name": "openacc", "children": [ { "name": "openacc_version", "type": 3, "desc": "INTEGER" }, { "name": "acc_device_property_kind", "type": 3, "desc": "INTEGER" }, { "name": "acc_get_num_devices", "type": 2, "return": "INTEGER", "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_set_device_type", "type": 1, "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_get_device_type", "type": 2, "return": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_set_device_num", "type": 1, "args": "dev_num,dev_type", "children": [ { "name": "dev_num", "type": 3, "desc": "INTEGER" }, { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_get_device_num", "type": 2, "return": "INTEGER", "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_get_property", "type": 2, "return": "INTEGER(KIND=c_size_t)", "args": "dev_num,dev_type,property", "children": [ { "name": "dev_num", "type": 3, "desc": "INTEGER", "mods": ["VALUE"] }, { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)", "mods": ["VALUE"] }, { "name": "property", "type": 3, "desc": "INTEGER(KIND=acc_device_property_kind)", "mods": ["VALUE"] } ] }, { "name": "acc_get_property_string", "type": 1, "args": "dev_num,dev_type,property,string", "children": [ { "name": "dev_num", "type": 3, "desc": "INTEGER", "mods": ["VALUE"] }, { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)", "mods": ["VALUE"] }, { "name": "property", "type": 3, "desc": "INTEGER(KIND=acc_device_property_kind)", "mods": ["VALUE"] }, { "name": "string", "type": 3, "desc": "CHARACTER(LEN=*)" } ] }, { "name": "acc_init", "type": 1, "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)", "mods": ["VALUE"] } ] }, { "name": "acc_shutdown", "type": 1, "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_async_test", "type": 2, "return": "LOGICAL", "args": "wait_arg", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_async_test_device", "type": 2, "return": "LOGICAL", "args": "wait_arg,dev_num", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_async_test_all", "type": 2, "return": "LOGICAL" }, { "name": "acc_async_test_all_device", "type": 2, "return": "LOGICAL", "args": "dev_num", "children": [ { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_wait", "type": 1, "args": "wait_arg", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_wait", "type": 1, "args": "wait_arg,dev_num", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_wait_async", "type": 1, "args": "wait_arg,async_arg", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_wait_device_async", "type": 1, "args": "wait_arg,async_arg,dev_num", "children": [ { "name": "wait_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_wait_all", "type": 1 }, { "name": "acc_wait_all_device", "type": 1, "args": "dev_num", "children": [ { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_wait_all_async", "type": 1, "args": "async_arg", "children": [ { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_wait_all_device_async", "type": 1, "args": "async_arg,dev_num", "children": [ { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "dev_num", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_get_default_async", "type": 2, "return": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_set_default_async", "type": 1, "args": "async_arg", "children": [ { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_on_device", "type": 2, "return": "LOGICAL", "args": "dev_type", "children": [ { "name": "dev_type", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" } ] }, { "name": "acc_copyin", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_copyin", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_copyin_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_copyin_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_create", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_create", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_create_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_create_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_copyout", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_copyout", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_copyout_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_copyout_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_copyout_finalize", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_copyout_finalize", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_copyout_finalize_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_copyout_finalize_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_delete", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_delete", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_delete_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_delete_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_delete_finalize", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_delete_finalize", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_delete_finalize_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_delete_finalize_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_update_device", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_update_device", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_update_device_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_update_device_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_update_self", "type": 1, "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_update_self", "type": 1, "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_update_self_async", "type": 1, "args": "data_arg,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_update_self_async", "type": 1, "args": "data_arg,bytes,async_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "async_arg", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, { "name": "acc_is_present", "type": 2, "return": "LOGICAL", "args": "data_arg", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] } ] }, { "name": "acc_is_present", "type": 2, "return": "LOGICAL", "args": "data_arg,bytes", "children": [ { "name": "data_arg", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" } ] }, { "name": "acc_memcpy_d2d", "type": 1, "args": "data_arg_dest,data_arg_src,bytes,dev_num_dest,dev_num_src", "children": [ { "name": "data_arg_dest", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "data_arg_src", "type": 3, "desc": "TYPE(*)", "mods": ["DIMENSION(*)"] }, { "name": "bytes", "type": 3, "desc": "INTEGER" }, { "name": "dev_num_dest", "type": 3, "desc": "INTEGER" }, { "name": "dev_num_src", "type": 3, "desc": "INTEGER" } ] } ] }, "openacc_kinds": { "type": 0, "name": "openacc_kinds", "children": [ { "name": "acc_device_kind", "type": 3, "desc": "INTEGER" }, { "name": "acc_device_none", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_device_default", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_device_host", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_device_not_host", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_device_nvidia", "type": 3, "desc": "INTEGER(KIND=acc_device_kind)" }, { "name": "acc_handle_kind", "type": 3, "desc": "INTEGER" }, { "name": "acc_async_noval", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" }, { "name": "acc_async_sync", "type": 3, "desc": "INTEGER(KIND=acc_handle_kind)" } ] }, "iso_fortran_env": { "type": 0, "name": "iso_fortran_env", "children": [ { "name": "atomic_int_kind", "type": 3, "desc": "INTEGER" }, { "name": "atomic_logical_kind", "type": 3, "desc": "INTEGER" }, { "name": "character_kinds", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(:)"] }, { "name": "character_storage_size", "type": 3, "desc": "INTEGER" }, { "name": "error_unit", "type": 3, "desc": "INTEGER" }, { "name": "file_storage_size", "type": 3, "desc": "INTEGER" }, { "name": "input_unit", "type": 3, "desc": "INTEGER" }, { "name": "int8", "type": 3, "desc": "INTEGER" }, { "name": "int16", "type": 3, "desc": "INTEGER" }, { "name": "int32", "type": 3, "desc": "INTEGER" }, { "name": "int64", "type": 3, "desc": "INTEGER" }, { "name": "integer_kinds", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(:)"] }, { "name": "iostat_end", "type": 3, "desc": "INTEGER" }, { "name": "iostat_eor", "type": 3, "desc": "INTEGER" }, { "name": "iostat_inquire_internal_unit", "type": 3, "desc": "INTEGER" }, { "name": "numeric_storage_size", "type": 3, "desc": "INTEGER" }, { "name": "logical_kinds", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(:)"] }, { "name": "output_unit", "type": 3, "desc": "INTEGER" }, { "name": "real32", "type": 3, "desc": "INTEGER" }, { "name": "real64", "type": 3, "desc": "INTEGER" }, { "name": "real128", "type": 3, "desc": "INTEGER" }, { "name": "real_kinds", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(:)"] }, { "name": "stat_locked", "type": 3, "desc": "INTEGER" }, { "name": "stat_locked_other_image", "type": 3, "desc": "INTEGER" }, { "name": "stat_stopped_image", "type": 3, "desc": "INTEGER" }, { "name": "stat_failed_image", "type": 3, "desc": "INTEGER" }, { "name": "stat_unlocked", "type": 3, "desc": "INTEGER" }, { "name": "lock_type", "type": 4 }, { "name": "compiler_options", "type": 2, "return": "CHARACTER(LEN=*)" }, { "name": "compiler_version", "type": 2, "return": "CHARACTER(LEN=*)" } ] }, "iso_c_binding": { "type": 0, "name": "iso_c_binding", "children": [ { "name": "c_int", "type": 3, "desc": "INTEGER" }, { "name": "c_short", "type": 3, "desc": "INTEGER" }, { "name": "c_long", "type": 3, "desc": "INTEGER" }, { "name": "c_long_long", "type": 3, "desc": "INTEGER" }, { "name": "c_signed_char", "type": 3, "desc": "INTEGER" }, { "name": "c_size_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int8_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int16_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int32_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int64_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int128_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_least8_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_least16_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_least32_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_least64_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_least128_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_fast8_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_fast16_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_fast32_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_fast64_t", "type": 3, "desc": "INTEGER" }, { "name": "c_int_fast128_t", "type": 3, "desc": "INTEGER" }, { "name": "c_intmax_t", "type": 3, "desc": "INTEGER" }, { "name": "c_intptr_t", "type": 3, "desc": "INTEGER" }, { "name": "c_ptrdiff_t", "type": 3, "desc": "INTEGER" }, { "name": "c_float", "type": 3, "desc": "INTEGER" }, { "name": "c_double", "type": 3, "desc": "INTEGER" }, { "name": "c_long_double", "type": 3, "desc": "INTEGER" }, { "name": "c_float128", "type": 3, "desc": "INTEGER" }, { "name": "c_float_complex", "type": 3, "desc": "INTEGER" }, { "name": "c_double_complex", "type": 3, "desc": "INTEGER" }, { "name": "c_long_double_complex", "type": 3, "desc": "INTEGER" }, { "name": "c_float128_complex", "type": 3, "desc": "INTEGER" }, { "name": "c_bool", "type": 3, "desc": "INTEGER" }, { "name": "c_char", "type": 3, "desc": "INTEGER" }, { "name": "c_null_char", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_alert", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_backspace", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_form_feed", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_new_line", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_carriage_return", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_horizontal_tab", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_vertical_tab", "type": 3, "desc": "CHARACTER(KIND=c_char)" }, { "name": "c_null_ptr", "type": 3, "desc": "TYPE(c_ptr)" }, { "name": "c_null_funptr", "type": 3, "desc": "TYPE(c_funptr)" }, { "name": "c_ptr", "type": 4 }, { "name": "c_funptr", "type": 4 }, { "name": "c_associated", "type": 2, "return": "LOGICAL", "args": "c_ptr_1,c_ptr_2", "children": [ { "name": "c_ptr_1", "type": 3, "desc": "TYPE(c_ptr)", "mods": ["INTENT(in)"] }, { "name": "c_ptr_2", "type": 3, "desc": "TYPE(c_ptr)", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "c_f_pointer", "type": 1, "args": "cptr,fptr,shape", "children": [ { "name": "cptr", "type": 3, "desc": "INTENT(in)", "mods": ["INTENT(in)"] }, { "name": "fptr", "type": 3, "desc": "ANY", "mods": ["POINTER", "INTENT(out)"] }, { "name": "shape", "type": 3, "desc": "INTEGER", "mods": ["DIMENSION(:)", "INTENT(in)"] } ] }, { "name": "c_f_procpointer", "type": 1, "args": "cptr,fptr", "children": [ { "name": "cptr", "type": 3, "desc": "TYPE(c_funptr)", "mods": ["INTENT(in)"] }, { "name": "fptr", "type": 3, "desc": "PROCEDURE", "mods": ["POINTER", "INTENT(out)"] } ] }, { "name": "c_funloc", "type": 2, "return": "TYPE(c_funptr)", "args": "x", "children": [ { "name": "x", "type": 3, "desc": "PROCEDURE", "mods": ["INTENT(in)"] } ] }, { "name": "c_loc", "type": 2, "return": "TYPE(c_ptr)", "args": "x", "children": [ { "name": "x", "type": 3, "desc": "ANY", "mods": ["POINTER", "INTENT(in)"] } ] }, { "name": "c_sizeof", "type": 2, "return": "INTEGER(KIND=c_size_t)", "args": "x", "children": [ { "name": "x", "type": 3, "desc": "ANY", "mods": ["INTENT(in)"] } ] } ] }, "ieee_exceptions": { "type": 0, "name": "ieee_exceptions", "children": [ { "name": "ieee_flag_type", "type": 4 }, { "name": "ieee_overflow", "type": 3, "desc": "TYPE(ieee_flag_type)" }, { "name": "ieee_divide_by_zero", "type": 3, "desc": "TYPE(ieee_flag_type)" }, { "name": "ieee_invalid", "type": 3, "desc": "TYPE(ieee_flag_type)" }, { "name": "ieee_underflow", "type": 3, "desc": "TYPE(ieee_flag_type)" }, { "name": "ieee_inexact", "type": 3, "desc": "TYPE(ieee_flag_type)" }, { "name": "ieee_usual", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["DIMENSION(3)"] }, { "name": "ieee_all", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["DIMENSION(5)"] } ] }, "ieee_arithmetic": { "type": 0, "name": "ieee_arithmetic", "use": "ieee_exceptions", "children": [ { "name": "ieee_status_type", "type": 4 }, { "name": "ieee_class_type", "type": 4 }, { "name": "ieee_round_type", "type": 4 }, { "name": "ieee_signaling_nan", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_quiet_nan", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_negative_inf", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_negative_normal", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_negative_denormal", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_negative_zero", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_positive_zero", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_positive_denormal", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_positive_normal", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_positive_inf", "type": 3, "desc": "TYPE(ieee_class_type)" }, { "name": "ieee_nearest", "type": 3, "desc": "TYPE(ieee_round_type)" }, { "name": "ieee_to_zero", "type": 3, "desc": "TYPE(ieee_round_type)" }, { "name": "ieee_up", "type": 3, "desc": "TYPE(ieee_round_type)" }, { "name": "ieee_down", "type": 3, "desc": "TYPE(ieee_round_type)" }, { "name": "ieee_other", "type": 3, "desc": "TYPE(ieee_round_type)" }, { "name": "ieee_class", "type": 2, "return": "TYPE(ieee_class_type)", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_copy_sign", "type": 1, "args": "X,Y", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(inout)"] }, { "name": "Y", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_get_flag", "type": 1, "args": "FLAG,FLAG_VALUE", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] }, { "name": "FLAG_VALUE", "type": 3, "desc": "LOGICAL", "mods": ["INTENT(out)"] } ] }, { "name": "ieee_set_flag", "type": 1, "args": "FLAG,FLAG_VALUE", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] }, { "name": "FLAG_VALUE", "type": 3, "desc": "LOGICAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_get_halting_mode", "type": 1, "args": "FLAG,HALTING", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] }, { "name": "HALTING", "type": 3, "desc": "LOGICAL", "mods": ["INTENT(out)"] } ] }, { "name": "ieee_set_halting_mode", "type": 1, "args": "FLAG,HALTING", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] }, { "name": "HALTING", "type": 3, "desc": "LOGICAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_get_rounding_mode", "type": 1, "args": "ROUND_VALUE", "children": [ { "name": "ROUND_VALUE", "type": 3, "desc": "TYPE(ieee_round_type)", "mods": ["INTENT(out)"] } ] }, { "name": "ieee_set_rounding_mode", "type": 1, "args": "ROUND_VALUE", "children": [ { "name": "ROUND_VALUE", "type": 3, "desc": "TYPE(ieee_round_type)", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_get_status", "type": 1, "args": "STATUS_VALUE", "children": [ { "name": "STATUS_VALUE", "type": 3, "desc": "TYPE(ieee_status_type)", "mods": ["INTENT(out)"] } ] }, { "name": "ieee_set_status", "type": 1, "args": "STATUS_VALUE", "children": [ { "name": "STATUS_VALUE", "type": 3, "desc": "TYPE(ieee_status_type)", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_is_finite", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_is_nan", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_is_negative", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_is_normal", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_logb", "type": 2, "return": "REAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_next_after", "type": 2, "return": "REAL", "args": "X,Y", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] }, { "name": "Y", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_rem", "type": 2, "return": "REAL", "args": "X,Y", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] }, { "name": "Y", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_rint", "type": 2, "return": "REAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_scalb", "type": 2, "return": "REAL", "args": "X,I", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] }, { "name": "I", "type": 3, "desc": "INTEGER", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_selected_real_kind", "type": 2, "return": "INTEGER", "args": "P,R", "children": [ { "name": "P", "type": 3, "desc": "INTEGER", "mods": ["OPTIONAL", "INTENT(in)"] }, { "name": "R", "type": 3, "desc": "INTEGER", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_datatype", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_denormal", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_divide", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_flag", "type": 2, "return": "LOGICAL", "args": "FLAG,X", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] }, { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_halting", "type": 2, "return": "LOGICAL", "args": "FLAG", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_flag_type)", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_support_inf", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_io", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_nan", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_rounding", "type": 2, "return": "LOGICAL", "args": "ROUND_VALUE,X", "children": [ { "name": "FLAG", "type": 3, "desc": "TYPE(ieee_round_type)", "mods": ["INTENT(in)"] }, { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_sqrt", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_support_standard", "type": 2, "return": "LOGICAL", "args": "X", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["OPTIONAL", "INTENT(in)"] } ] }, { "name": "ieee_unordered", "type": 2, "return": "LOGICAL", "args": "X,Y", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] }, { "name": "Y", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] } ] }, { "name": "ieee_value", "type": 2, "return": "REAL", "args": "X,CLASS", "children": [ { "name": "X", "type": 3, "desc": "REAL", "mods": ["INTENT(in)"] }, { "name": "CLASS", "type": 3, "desc": "TYPE(ieee_class_type)", "mods": ["INTENT(in)"] } ] } ] }, "ieee_features": { "type": 0, "name": "ieee_features", "children": [ { "name": "ieee_features_type", "type": 4 }, { "name": "ieee_denormal", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_divide", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_halting", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_inexact_flag", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_inf", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_invalid_flag", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_nan", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_rounding", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_sqrt", "type": 3, "desc": "TYPE(ieee_features_type)" }, { "name": "ieee_underflow_flag", "type": 3, "desc": "TYPE(ieee_features_type)" } ] } } fortran-language-server-2.13.0+dfsg.1/fortls/intrinsics.py000066400000000000000000000154641450400537300234760ustar00rootroot00000000000000import json import os from fortls.helper_functions import map_keywords from fortls.objects import ( FortranAST, FortranObj, Function, Module, Subroutine, Type, Variable, ) none_ast = FortranAST() lowercase_intrinsics = False def set_lowercase_intrinsics(): global lowercase_intrinsics lowercase_intrinsics = True class Intrinsic(FortranObj): def __init__(self, name, type, doc_str=None, args="", parent=None): self.name = name self.type = type self.doc_str = doc_str self.args = args.replace(" ", "") self.parent = parent self.file_ast = none_ast if lowercase_intrinsics: self.name = self.name.lower() self.args = self.args.lower() def get_type(self): return self.type def get_desc(self): if self.type == 2: return "SUBROUTINE" elif self.type == 14: return "KEYWORD" elif self.type == 15: return "STATEMENT" else: return "INTRINSIC" def get_snippet(self, name_replace=None, drop_arg=-1): if self.args == "": if self.type >= 14: return None, None arg_str = "()" arg_snip = None else: arg_list = self.args.split(",") arg_str, arg_snip = self.get_placeholders(arg_list) name = self.name if name_replace is not None: name = name_replace snippet = None if arg_snip is not None: snippet = name + arg_snip return name + arg_str, snippet def get_signature(self): arg_sigs = [] for arg in self.args.split(","): arg_sigs.append({"label": arg}) call_sig, _ = self.get_snippet() return call_sig, self.doc_str, arg_sigs def get_hover(self, long=False): return self.doc_str, False def is_callable(self): if self.type == 2: return True else: return False def load_intrinsics(): def create_int_object(name, json_obj, type): args = json_obj.get("args", "") doc_str = json_obj.get("doc") if lowercase_intrinsics: name = name.lower() args = args.lower() return Intrinsic(name, type, doc_str=doc_str, args=args) def create_object(json_obj, enc_obj=None): if enc_obj is not None: none_ast.enc_scope_name = enc_obj.FQSN else: none_ast.enc_scope_name = None if "mods" in json_obj: keywords, keyword_info = map_keywords(json_obj["mods"]) else: keywords = [] keyword_info = {} name = json_obj["name"] args = json_obj.get("args", "") if lowercase_intrinsics: name = name.lower() args = args.lower() if json_obj["type"] == 0: mod_tmp = Module(none_ast, 0, name) if "use" in json_obj: mod_tmp.add_use(json_obj["use"], 0) return mod_tmp elif json_obj["type"] == 1: return Subroutine(none_ast, 0, name, args=args) elif json_obj["type"] == 2: return Function( none_ast, 0, name, args=args, result_type=json_obj["return"], keywords=keywords, # keyword_info=keyword_info, ) elif json_obj["type"] == 3: return Variable(none_ast, 0, name, json_obj["desc"], keywords, keyword_info) elif json_obj["type"] == 4: return Type(none_ast, 0, name, keywords) else: raise ValueError def add_children(json_obj, fort_obj): for child in json_obj.get("children", []): child_obj = create_object(child, enc_obj=fort_obj) fort_obj.add_child(child_obj) add_children(child, child_obj) # Fortran statments taken from Intel Fortran documentation # (https://www.intel.com/content/www/us/en/develop/documentation/fortran-compiler-oneapi-dev-guide-and-reference/top/language-reference/a-to-z-reference) json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "statements.json" ) statements = {"var_def": [], "int_stmnts": []} with open(json_file, "r", encoding="utf-8") as fid: intrin_file = json.load(fid) for key in statements: for name, json_obj in sorted(intrin_file[key].items()): statements[key].append(create_int_object(name, json_obj, 15)) # Fortran keywords taken from Intel Fortran documentation # (https://www.intel.com/content/www/us/en/develop/documentation/fortran-compiler-oneapi-dev-guide-and-reference/top/language-reference/a-to-z-reference) json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "keywords.json" ) keywords = {"var_def": [], "arg": [], "type_mem": [], "vis": [], "param": []} with open(json_file, "r", encoding="utf-8") as fid: intrin_file = json.load(fid) for key in keywords: for name, json_obj in sorted(intrin_file[key].items()): keywords[key].append(create_int_object(name, json_obj, 14)) # Definitions taken from gfortran documentation # (https://gcc.gnu.org/onlinedocs/gfortran/Intrinsic-Procedures.html#Intrinsic-Procedures) json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "intrinsic_funs.json" ) int_funs = [] with open(json_file, "r", encoding="utf-8") as fid: intrin_file = json.load(fid) for name, json_obj in sorted(intrin_file.items()): int_funs.append(create_int_object(name, json_obj, json_obj["type"])) # Definitions taken from gfortran documentation # (https://gcc.gnu.org/onlinedocs/gfortran/Intrinsic-Modules.html#Intrinsic-Modules) # Update OpenACC from here https://www.openacc.org/specification json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "intrinsic_mods.json" ) int_mods = [] with open(json_file, "r", encoding="utf-8") as fid: intrin_file = json.load(fid) for key, json_obj in intrin_file.items(): fort_obj = create_object(json_obj) add_children(json_obj, fort_obj) int_mods.append(fort_obj) return statements, keywords, int_funs, int_mods def get_intrinsic_keywords(statements, keywords, context=-1): if context == 0: return statements["int_stmnts"] + statements["var_def"] + keywords["vis"] elif context == 1: return keywords["var_def"] + keywords["vis"] + keywords["param"] elif context == 2: return keywords["var_def"] + keywords["arg"] + keywords["param"] elif context == 3: return keywords["var_def"] + keywords["type_mem"] + keywords["vis"] return keywords["var_def"] + keywords["param"] fortran-language-server-2.13.0+dfsg.1/fortls/json_templates.py000066400000000000000000000024771450400537300243400ustar00rootroot00000000000000from __future__ import annotations def range_json(sln: int, sch: int, eln: int = None, ech: int = None): return { "range": { "start": {"line": sln, "character": sch}, "end": {"line": eln if eln else sln, "character": ech if ech else sch}, } } def diagnostic_json(sln: int, sch: int, eln: int, ech: int, msg: str, sev: str): return {**range_json(sln, sch, eln, ech), "message": msg, "severity": sev} def uri_json(uri: str, sln: int, sch: int, eln: int = None, ech: int = None): return {"uri": uri, **range_json(sln, sch, eln, ech)} def location_json(uri: str, sln: int, sch: int, eln: int = None, ech: int = None): return {"location": uri_json(uri, sln, sch, eln, ech)} def symbol_json( name: str, kind: int, uri: str, sln: int, sch: int, eln: int = None, ech: int = None, container_name: str = None, ): if container_name: return { "name": name, "kind": kind, **location_json(uri, sln, sch, eln, ech), "containerName": container_name, } return {"name": name, "kind": kind, **location_json(uri, sln, sch, eln, ech)} def change_json(new_text: str, sln: int, sch: int, eln: int = None, ech: int = None): return {**range_json(sln, sch, eln, ech), "newText": new_text} fortran-language-server-2.13.0+dfsg.1/fortls/jsonrpc.py000066400000000000000000000160721450400537300227630ustar00rootroot00000000000000import json import os import queue import threading from collections import deque from pathlib import Path from urllib.parse import quote, unquote from fortls.constants import log def path_from_uri(uri: str) -> str: # Convert file uri to path (strip html like head part) if not uri.startswith("file://"): return os.path.abspath(uri) if os.name == "nt": _, path = uri.split("file:///", 1) else: _, path = uri.split("file://", 1) return str(Path(unquote(path)).resolve()) def path_to_uri(path: str) -> str: # Convert path to file uri (add html like head part) if os.name == "nt": return "file:///" + quote(path.replace("\\", "/")) else: return "file://" + quote(path) class JSONRPC2ProtocolError(Exception): pass class ReadWriter: def __init__(self, reader, writer): self.reader = reader self.writer = writer def readline(self, *args): data = self.reader.readline(*args) return data.decode("utf-8") def read(self, *args): data = self.reader.read(*args) return data.decode("utf-8") def write(self, out): self.writer.write(out.encode()) self.writer.flush() class TCPReadWriter(ReadWriter): def readline(self, *args): data = self.reader.readline(*args) return data.decode("utf-8") def read(self, *args): return self.reader.read(*args).decode("utf-8") def write(self, out): self.writer.write(out.encode()) self.writer.flush() class JSONRPC2Connection: def __init__(self, conn=None): self.conn = conn self._msg_buffer = deque() self._next_id = 1 def _read_header_content_length(self, line): if len(line) < 2 or line[-2:] != "\r\n": raise JSONRPC2ProtocolError("Line endings must be \\r\\n") if line.startswith("Content-Length: "): _, value = line.split("Content-Length: ") value = value.strip() try: return int(value) except ValueError: raise JSONRPC2ProtocolError(f"Invalid Content-Length header: {value}") def _receive(self): line = self.conn.readline() if line == "": raise EOFError() length = self._read_header_content_length(line) # Keep reading headers until we find the sentinel # line for the JSON request. while line != "\r\n": line = self.conn.readline() body = self.conn.read(length) log.debug("RECV %s", body) return json.loads(body) def read_message(self, want=None): """Read a JSON RPC message sent over the current connection. If id is None, the next available message is returned.""" if want is None: if self._msg_buffer: return self._msg_buffer.popleft() return self._receive() # First check if our buffer contains something we want. msg = deque_find_and_pop(self._msg_buffer, want) if msg: return msg # We need to keep receiving until we find something we want. # Things we don't want are put into the buffer for future callers. while True: msg = self._receive() if want(msg): return msg self._msg_buffer.append(msg) def _send(self, body): body = json.dumps(body, separators=(",", ":")) content_length = len(body) response = ( f"Content-Length: {content_length}\r\n" "Content-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\n" f"{body}" ) self.conn.write(response) log.debug("SEND %s", body) def write_response(self, rid, result): body = { "jsonrpc": "2.0", "id": rid, "result": result, } self._send(body) def write_error(self, rid, code, message, data=None): e = { "code": code, "message": message, } if data is not None: e["data"] = data body = { "jsonrpc": "2.0", "id": rid, "error": e, } self._send(body) def send_request(self, method, params): rid = self._next_id self._next_id += 1 body = { "jsonrpc": "2.0", "id": rid, "method": method, "params": params, } self._send(body) return self.read_message(want=lambda msg: msg.get("id") == rid) def send_notification(self, method, params): body = { "jsonrpc": "2.0", "method": method, "params": params, } self._send(body) def send_request_batch(self, requests): """Pipelines requests and returns responses. The responses is a generator where the nth response corresponds with the nth request. Users must read the generator until the end, otherwise you will leak a thread.""" # We communicate the request ids using a thread safe queue. # It also allows us to bound the number of concurrent requests. q = queue.Queue(100) def send(): for method, params in requests: rid = self._next_id self._next_id += 1 q.put(rid) body = { "jsonrpc": "2.0", "id": rid, "method": method, "params": params, } self._send(body) # Sentinel value to indicate we are done q.put(None) threading.Thread(target=send).start() while True: rid = q.get() if rid is None: break yield self.read_message(want=lambda msg: msg.get("id") == rid) def deque_find_and_pop(d, f): idx = -1 for i, v in enumerate(d): if f(v): idx = i break if idx < 0: return None d.rotate(-idx) v = d.popleft() d.rotate(idx) return v def write_rpc_request(rid, method, params): body = { "jsonrpc": "2.0", "id": rid, "method": method, "params": params, } body = json.dumps(body, separators=(",", ":")) content_length = len(body) return ( f"Content-Length: {content_length}\r\n" "Content-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\n" f"{body}" ) def write_rpc_notification(method, params): body = { "jsonrpc": "2.0", "method": method, "params": params, } body = json.dumps(body, separators=(",", ":")) content_length = len(body) return ( f"Content-Length: {content_length}\r\n" "Content-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\n" f"{body}" ) def read_rpc_messages(content): conn = JSONRPC2Connection(content) result_list = [] while True: try: result = conn._receive() except EOFError: break else: result_list.append(result) return result_list fortran-language-server-2.13.0+dfsg.1/fortls/keywords.json000066400000000000000000000060121450400537300234660ustar00rootroot00000000000000{ "var_def": { "ALLOCATABLE": { "doc": "Specifies that an object is allocatable." }, "ASYNCHRONOUS": { "doc": "Specifies that a variable can be used for asynchronous input and output." }, "BIND": { "doc": "Specifies that an object is interoperable with C and has external linkage." }, "CODIMENSION": { "doc": "Specifies that an entity is a coarray, and specifies its corank and cobounds, if any." }, "CONTIGUOUS": { "doc": "Specifies that the target of a pointer or an assumed-sized array is contiguous." }, "DIMENSION(:)": { "doc": "Specifies that an object is an array, and defines the shape of the array." }, "EXTERNAL": { "doc": "Allows an external procedure, a dummy procedure, a procedure pointer, or a block data subprogram to be used as an actual argument." }, "INTRINSIC": { "doc": "Allows the specific name of an intrinsic procedure to be used as an actual argument." }, "POINTER": { "doc": "Specifies that an object or a procedure is a pointer (a dynamic variable)." }, "PROTECTED": { "doc": "Specifies limitations on the use of module entities." }, "TARGET": { "doc": "Specifies that an object can become the target of a pointer (it can be pointed to)." }, "VOLATILE": { "doc": "Specifies that the value of an object is entirely unpredictable, based on information local to the current program unit. It prevents objects from being optimized during compilation." } }, "arg": { "INTENT(IN)": { "doc": "Specifies that the dummy argument will be used only to provide data to the procedure." }, "INTENT(OUT)": { "doc": "Specifies that the dummy argument will be used to pass data from the procedure back to the calling program." }, "INTENT(INOUT)": { "doc": "Specifies that the dummy argument can both provide data to the procedure and return data to the calling program." }, "OPTIONAL": { "doc": "Permits dummy arguments to be omitted in a procedure reference." }, "SAVE": { "doc": "Causes the values and definition of objects to be retained after execution of a RETURN or END statement in a subprogram." }, "VALUE": { "doc": "Specifies a type of argument association for a dummy argument." } }, "type_mem": { "DEFERRED": { "doc": "Indicates that the procedure is deferred. Deferred bindings must only be specified for derived-type definitions with the ABSTRACT attribute." }, "NON_OVERRIDABLE": { "doc": "Determines whether a binding can be overridden in an extended type. You must not specify NON_OVERRIDABLE for a binding with the DEFERRED attribute." }, "NOPASS": { "doc": "Indicate that the procedure has no passed-object dummy argument." }, "PASS": { "doc": "Indicates that the procedure has no passed-object dummy argument.", "args": "arg_name" } }, "vis": { "PRIVATE": {}, "PUBLIC": {} }, "param": { "PARAMETER": {} } } fortran-language-server-2.13.0+dfsg.1/fortls/langserver.py000066400000000000000000002136411450400537300234560ustar00rootroot00000000000000from __future__ import annotations import json import logging import os import re import subprocess import sys import traceback import urllib.request from multiprocessing import Pool from pathlib import Path from typing import Pattern from urllib.error import URLError import json5 from packaging import version # Local modules from fortls.constants import ( CLASS_TYPE_ID, FORTRAN_LITERAL, FUNCTION_TYPE_ID, INTERFACE_TYPE_ID, METH_TYPE_ID, MODULE_TYPE_ID, SELECT_TYPE_ID, SUBROUTINE_TYPE_ID, VAR_TYPE_ID, FRegex, Severity, log, ) from fortls.helper_functions import ( expand_name, get_line_prefix, get_paren_level, get_var_stack, only_dirs, resolve_globs, set_keyword_ordering, ) from fortls.intrinsics import ( Intrinsic, get_intrinsic_keywords, load_intrinsics, set_lowercase_intrinsics, ) from fortls.json_templates import change_json, symbol_json, uri_json from fortls.jsonrpc import JSONRPC2Connection, path_from_uri, path_to_uri from fortls.objects import ( FortranAST, Variable, climb_type_tree, find_in_scope, find_in_workspace, get_use_tree, ) from fortls.parse_fortran import FortranFile, get_line_context from fortls.regex_patterns import src_file_exts from fortls.version import __version__ # Global regexes # TODO: I think this can be replaced by fortls.regex_patterns type & class TYPE_DEF_REGEX = re.compile(r"[ ]*(TYPE|CLASS)[ ]*\([a-z0-9_ ]*$", re.I) class LangServer: def __init__(self, conn, settings: dict): self.conn: JSONRPC2Connection = conn self.running: bool = True self.root_path: str = None self.workspace: dict[str, FortranFile] = {} self.obj_tree: dict = {} self.link_version = 0 self._version = version.parse(__version__) # Parse a dictionary of the command line interface and make them into # class variable. This way the command line and the file interfaces # are always on sync, with the same default arguments for k, v in settings.items(): # Do not parse command line debug arguments if k.startswith("debug_") and k != "debug_log": continue setattr(self, k, v) self.sync_type: int = 2 if self.incremental_sync else 1 self.post_messages = [] self.FORTRAN_SRC_EXT_REGEX: Pattern[str] = src_file_exts() # Intrinsic (re-loaded during initialize) ( self.statements, self.keywords, self.intrinsic_funs, self.intrinsic_mods, ) = load_intrinsics() # Set object settings set_keyword_ordering(self.sort_keywords) def post_message(self, msg: str, severity: int = Severity.error, exc_info=False): self.conn.send_notification( "window/showMessage", {"type": severity, "message": msg} ) if severity == 1: log.error(msg, exc_info=exc_info) elif severity == 2: log.warning(msg, exc_info=exc_info) elif severity == 3: log.info(msg, exc_info=exc_info) def run(self): # Run server while self.running: try: request = self.conn.read_message() self.handle(request) except EOFError: break except Exception as e: self.post_message(f"Unexpected error: {e}", exc_info=True) break else: for message in self.post_messages: self.post_message(message[1], message[0]) self.post_messages = [] def handle(self, request: dict): def noop(request: dict): return None # Request handler log.debug("REQUEST %s %s", request.get("id"), request.get("method")) handler = { "initialize": self.serve_initialize, "textDocument/documentSymbol": self.serve_document_symbols, "textDocument/completion": self.serve_autocomplete, "textDocument/signatureHelp": self.serve_signature, "textDocument/definition": self.serve_definition, "textDocument/references": self.serve_references, "textDocument/documentHighlight": self.serve_references, "textDocument/hover": self.serve_hover, "textDocument/implementation": self.serve_implementation, "textDocument/rename": self.serve_rename, "textDocument/didOpen": self.serve_onOpen, "textDocument/didSave": self.serve_onSave, "textDocument/didClose": self.serve_onClose, "textDocument/didChange": self.serve_onChange, "textDocument/codeAction": self.serve_codeActions, "initialized": noop, "workspace/didChangeWatchedFiles": noop, "workspace/didChangeConfiguration": noop, "workspace/symbol": self.serve_workspace_symbol, "$/cancelRequest": noop, "$/setTrace": noop, "shutdown": noop, "exit": self.serve_exit, }.get(request["method"], self.serve_default) # We handle notifications differently since we can't respond if "id" not in request: try: handler(request) except: log.exception("error handling request: %s", request, exc_info=True) return # try: resp = handler(request) except JSONRPC2Error as e: self.conn.write_error( request["id"], code=e.code, message=e.message, data=e.data ) log.warning("RPC error handling request %s", request, exc_info=True) except Exception as e: self.conn.write_error( request["id"], code=-32603, message=str(e), data={ "traceback": traceback.format_exc(), }, ) log.warning("error handling request %s", request, exc_info=True) else: self.conn.write_response(request["id"], resp) def serve_initialize(self, request: dict): # Setup language server params: dict = request["params"] self.root_path = path_from_uri( params.get("rootUri") or params.get("rootPath") or "" ) self.source_dirs.add(self.root_path) self._load_config_file() self._resolve_globs_in_paths() self._config_logger(request) self._load_intrinsics() self._add_source_dirs() if self._update_version_pypi(): self.post_message( "Please restart the server for the new version to activate", Severity.info, ) # Initialize workspace self.workspace_init() log.info(f"fortls - Fortran Language Server {__version__} Initialized") # server_capabilities = { "completionProvider": { "resolveProvider": False, "triggerCharacters": ["%"], }, "definitionProvider": True, "documentSymbolProvider": True, "referencesProvider": True, "hoverProvider": True, "implementationProvider": True, "renameProvider": True, "workspaceSymbolProvider": True, "textDocumentSync": self.sync_type, } if self.use_signature_help: server_capabilities["signatureHelpProvider"] = { "triggerCharacters": ["(", ","] } if self.enable_code_actions: server_capabilities["codeActionProvider"] = True if self.notify_init: self.post_message("fortls initialization complete", Severity.info) return {"capabilities": server_capabilities} def serve_workspace_symbol(self, request): def map_types(type): if type == 1: return 2 elif type == 2: return 6 elif type == 3: return 12 elif type == 4: return 5 elif type == 5: return 11 elif type == 6: return 13 elif type == 7: return 6 else: return 1 matching_symbols = [] query = request["params"]["query"].lower() for candidate in find_in_workspace(self.obj_tree, query): tmp_out = { "name": candidate.name, "kind": map_types(candidate.get_type()), "location": { "uri": path_to_uri(candidate.file_ast.path), "range": { "start": {"line": candidate.sline - 1, "character": 0}, "end": {"line": candidate.eline - 1, "character": 0}, }, }, } # Set containing scope if candidate.FQSN.find("::") > 0: tmp_list = candidate.FQSN.split("::") tmp_out["containerName"] = tmp_list[0] matching_symbols.append(tmp_out) return sorted(matching_symbols, key=lambda k: k["name"]) def serve_document_symbols(self, request: dict): def map_types(type, in_class: bool = False): if type == 1: return 2 elif type in (2, 3): if in_class: return 6 else: return 12 elif type == 4: return 5 elif type == 5: return 11 elif type == 6: return 13 elif type == 7: return 6 else: return 1 # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] path: str = path_from_uri(uri) file_obj = self.workspace.get(path) if file_obj is None: return [] # Add scopes to outline view test_output = [] for scope in file_obj.ast.get_scopes(): if (scope.name[0] == "#") or (scope.get_type() == SELECT_TYPE_ID): continue scope_tree = scope.FQSN.split("::") if len(scope_tree) > 2: if scope_tree[1].startswith("#gen_int"): scope_type = 11 else: continue else: scope_type = map_types(scope.get_type()) # Set containing scope if scope.FQSN.find("::") > 0: tmp_list = scope.FQSN.split("::") test_output.append( symbol_json( scope.name, scope_type, uri, scope.sline - 1, 0, scope.eline - 1, 0, tmp_list[0], ) ) else: test_output.append( symbol_json( scope.name, scope_type, uri, scope.sline - 1, 0, scope.eline - 1, 0, ) ) # If class add members if scope.get_type() == CLASS_TYPE_ID and not self.symbol_skip_mem: for child in scope.children: test_output.append( symbol_json( child.name, map_types(child.get_type(), True), uri, child.sline - 1, 0, container_name=scope.name, ) ) return test_output def serve_autocomplete(self, request: dict): # def map_types(type: int): if type == 1: return 9 elif type == 2: return 3 elif type == 4: return 7 elif type == 6: return 6 else: return type def set_type_mask(def_value): return [def_value if i < 8 else True for i in range(16)] def get_candidates( scope_list: list, var_prefix: str, inc_globals: bool = True, public_only: bool = False, abstract_only: bool = False, no_use: bool = False, ): # def child_candidates( scope, only_list: list = None, filter_public=True, req_abstract=False ): if only_list is None: only_list = [] tmp_list = [] # Filter children nonly = len(only_list) for child in scope.get_children(filter_public): if req_abstract: if child.is_abstract(): tmp_list += child_candidates( child, only_list, filter_public ) else: if child.is_external_int(): tmp_list += child_candidates( child, only_list, filter_public ) else: if (nonly > 0) and (child.name.lower() not in only_list): continue tmp_list.append(child) return tmp_list var_list = [] use_dict = {} for scope in scope_list: var_list += child_candidates( scope, filter_public=public_only, req_abstract=abstract_only ) # Traverse USE tree and add to list if not no_use: use_dict = get_use_tree(scope, use_dict, self.obj_tree) # Look in found use modules rename_list = [None for _ in var_list] for use_mod, use_info in use_dict.items(): scope = self.obj_tree[use_mod][0] only_list = use_info.only_list if len(use_info.rename_map) > 0: only_list = [ use_info.rename_map.get(only_name, only_name) for only_name in only_list ] tmp_list = child_candidates( scope, only_list, req_abstract=abstract_only ) # Setup renaming if len(use_info.rename_map) > 0: rename_reversed = { value: key for (key, value) in use_info.rename_map.items() } for tmp_obj in tmp_list: var_list.append(tmp_obj) rename_list.append( rename_reversed.get(tmp_obj.name.lower(), None) ) else: var_list += tmp_list rename_list += [None for _ in tmp_list] # Add globals if inc_globals: tmp_list = [obj[0] for (_, obj) in self.obj_tree.items()] var_list += tmp_list + self.intrinsic_funs rename_list += [None for _ in tmp_list + self.intrinsic_funs] # Filter by prefix if necessary if var_prefix == "": return var_list, rename_list else: tmp_list = [] tmp_rename = [] for (i, var) in enumerate(var_list): var_name = rename_list[i] if var_name is None: var_name = var.name if var_name.lower().startswith(var_prefix): tmp_list.append(var) tmp_rename.append(rename_list[i]) return tmp_list, tmp_rename def build_comp( candidate, name_only: bool = self.autocomplete_name_only, name_replace: str = None, is_interface: bool = False, is_member: bool = False, ): comp_obj = {} call_sig = None if name_only: comp_obj["label"] = candidate.name else: comp_obj["label"] = candidate.name if name_replace is not None: comp_obj["label"] = name_replace call_sig, snippet = candidate.get_snippet(name_replace) if self.autocomplete_no_snippets: snippet = call_sig if snippet is not None: if self.use_signature_help and (not is_interface): arg_open = snippet.find("(") if arg_open > 0: snippet = snippet[:arg_open] comp_obj["insertText"] = snippet comp_obj["insertTextFormat"] = 2 comp_obj["kind"] = map_types(candidate.get_type()) if is_member and (comp_obj["kind"] == 3): comp_obj["kind"] = 2 comp_obj["detail"] = candidate.get_desc() if call_sig is not None: comp_obj["detail"] += " " + call_sig doc_str, _ = candidate.get_hover() if doc_str is not None: comp_obj["documentation"] = doc_str return comp_obj # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] path: str = path_from_uri(uri) file_obj: FortranFile = self.workspace.get(path) if file_obj is None: return None # Check line ac_line: int = params["position"]["line"] ac_char: int = params["position"]["character"] # Get full line (and possible continuations) from file pre_lines, curr_line, _ = file_obj.get_code_line( ac_line, forward=False, strip_comment=True ) line_prefix = get_line_prefix(pre_lines, curr_line, ac_char) if line_prefix is None: return None is_member = False try: var_stack = get_var_stack(line_prefix) is_member = len(var_stack) > 1 var_prefix = var_stack[-1].strip() except (TypeError, AttributeError): return None # print(var_stack) item_list = [] # Get context name_only = self.autocomplete_name_only public_only = False include_globals = True line_context, context_info = get_line_context(line_prefix) if (line_context == "skip") or (var_prefix == "" and (not is_member)): return None if self.autocomplete_no_prefix: var_prefix = "" # Suggestions for user-defined type members scope_list = [] if is_member: curr_scope = file_obj.ast.get_inner_scope(ac_line + 1) type_scope = climb_type_tree(var_stack, curr_scope, self.obj_tree) # Set enclosing type as scope if type_scope is None: return None else: include_globals = False scope_list = [type_scope] else: scope_list = file_obj.ast.get_scopes(ac_line + 1) # Setup based on context req_callable = False abstract_only = False no_use = False type_mask = set_type_mask(False) type_mask[MODULE_TYPE_ID] = True type_mask[CLASS_TYPE_ID] = True if line_context == "mod_only": # Module names only (USE statement) for key in self.obj_tree: candidate = self.obj_tree[key][0] if ( candidate.get_type() == MODULE_TYPE_ID ) and candidate.name.lower().startswith(var_prefix): item_list.append(build_comp(candidate, name_only=True)) return item_list elif line_context == "mod_mems": # Public module members only (USE ONLY statement) name_only = True mod_name = context_info.lower() if mod_name in self.obj_tree: scope_list = [self.obj_tree[mod_name][0]] public_only = True include_globals = False type_mask[CLASS_TYPE_ID] = False else: return None elif line_context == "pro_link": # Link to local subroutine/functions type_mask = set_type_mask(True) type_mask[SUBROUTINE_TYPE_ID] = False type_mask[FUNCTION_TYPE_ID] = False name_only = True include_globals = False no_use = True elif line_context == "call": # Callable objects only ("CALL" statements) req_callable = True elif line_context == "type_only": # User-defined types only (variable definitions, select clauses) type_mask = set_type_mask(True) type_mask[CLASS_TYPE_ID] = False elif line_context == "import": # Import statement (variables and user-defined types only) name_only = True type_mask = set_type_mask(True) type_mask[CLASS_TYPE_ID] = False type_mask[VAR_TYPE_ID] = False elif line_context == "vis": # Visibility statement (local objects only) include_globals = False name_only = True type_mask = set_type_mask(True) type_mask[CLASS_TYPE_ID] = False type_mask[VAR_TYPE_ID] = False type_mask[SUBROUTINE_TYPE_ID] = False type_mask[FUNCTION_TYPE_ID] = False curr_scope = [file_obj.ast.get_inner_scope(ac_line + 1)] elif line_context == "int_only": # Interfaces only (procedure definitions) abstract_only = True include_globals = False name_only = True type_mask = set_type_mask(True) type_mask[SUBROUTINE_TYPE_ID] = False type_mask[FUNCTION_TYPE_ID] = False elif line_context == "var_only": # Variables only (variable definitions) name_only = True type_mask[SUBROUTINE_TYPE_ID] = True type_mask[FUNCTION_TYPE_ID] = True elif line_context == "var_key": # Variable definition keywords only (variable definition) key_context = 0 enc_scope_type = scope_list[-1].get_type() if enc_scope_type == MODULE_TYPE_ID: key_context = 1 elif (enc_scope_type == SUBROUTINE_TYPE_ID) or ( enc_scope_type == FUNCTION_TYPE_ID ): key_context = 2 elif enc_scope_type == CLASS_TYPE_ID: key_context = 3 for candidate in get_intrinsic_keywords( self.statements, self.keywords, key_context ): if candidate.name.lower().startswith(var_prefix): item_list.append(build_comp(candidate)) return item_list elif line_context == "first": # First word -> default context plus Fortran statements for candidate in get_intrinsic_keywords(self.statements, self.keywords, 0): if candidate.name.lower().startswith(var_prefix): item_list.append(build_comp(candidate)) # Build completion list candidate_list, rename_list = get_candidates( scope_list, var_prefix, include_globals, public_only, abstract_only, no_use ) for (i, candidate) in enumerate(candidate_list): # Skip module names (only valid in USE) candidate_type = candidate.get_type() if type_mask[candidate_type]: continue if req_callable and (not candidate.is_callable()): continue # name_replace = rename_list[i] if candidate_type == INTERFACE_TYPE_ID: tmp_list = [] if name_replace is None: name_replace = candidate.name for member in candidate.mems: tmp_text, _ = member.get_snippet(name_replace) if tmp_list.count(tmp_text) > 0: continue tmp_list.append(tmp_text) item_list.append( build_comp( member, name_replace=name_replace, is_interface=True, is_member=is_member, ) ) continue # item_list.append( build_comp(candidate, name_only=name_only, name_replace=name_replace) ) return item_list def get_definition( self, def_file: FortranFile, def_line: int, def_char: int, hover_req: bool = False, ): """Return the Fortran object for the definition that matches the Fortran file, line number, column number Parameters ---------- def_file : fortran_file File to query def_line : int Line position in the file def_char : int Column position in the file hover_req : bool, optional Flag to enable if calling from a hover request, by default False Returns ------- fortran_var | fortran_include | None Fortran object """ # Get full line (and possible continuations) from file pre_lines, curr_line, _ = def_file.get_code_line( def_line, forward=False, strip_comment=True ) # Returns none for string literals, when the query is in the middle line_prefix = get_line_prefix(pre_lines, curr_line, def_char, qs=False) if line_prefix is None: return None is_member = False try: var_stack = get_var_stack(line_prefix) is_member = len(var_stack) > 1 def_name = expand_name(curr_line, def_char) except (TypeError, AttributeError): return None if def_name == "": return None # Search in Preprocessor defined variables if def_name in def_file.pp_defs: var = Variable( def_file.ast, def_line + 1, def_name, f"#define {def_name} {def_file.pp_defs.get(def_name)}", [], ) return var curr_scope = def_file.ast.get_inner_scope(def_line + 1) # Traverse type tree if necessary if is_member: type_scope = climb_type_tree(var_stack, curr_scope, self.obj_tree) # Set enclosing type as scope if type_scope is None: return None else: curr_scope = type_scope # Find in available scopes var_obj = None if curr_scope is not None: if ( (curr_scope.get_type() == CLASS_TYPE_ID) and (not is_member) and ( ( line_prefix.lstrip().lower().startswith("procedure") and (line_prefix.count("=>") > 0) ) or TYPE_DEF_REGEX.match(line_prefix) ) ): curr_scope = curr_scope.parent var_obj = find_in_scope( curr_scope, def_name, self.obj_tree, var_line_number=def_line + 1 ) # Search in global scope if var_obj is None: if is_member: return None key = def_name.lower() if key in self.obj_tree: return self.obj_tree[key][0] for obj in self.intrinsic_funs: if obj.name.lower() == key: return obj # If we have a Fortran literal constant e.g. 100, .false., etc. # Return a dummy object with the correct type & position in the doc if hover_req and curr_scope: var_type = None if FRegex.NUMBER.match(def_name): if any(s in def_name for s in [".", "e", "d"]): var_type = f"{FORTRAN_LITERAL}REAL" else: var_type = f"{FORTRAN_LITERAL}INTEGER" elif FRegex.LOGICAL.match(def_name): var_type = f"{FORTRAN_LITERAL}LOGICAL" elif FRegex.SQ_STRING.match(def_name) or FRegex.DQ_STRING.match( def_name ): var_type = f"{FORTRAN_LITERAL}STRING" if var_type: return Variable( curr_scope.file_ast, def_line + 1, def_name, var_type, curr_scope.keywords, ) else: return var_obj return None def serve_signature(self, request: dict): def get_sub_name(line: str): _, sections = get_paren_level(line) if sections[0].start <= 1: return None, None, None arg_string = line[sections[0].start : sections[-1].end] sub_string, sections = get_paren_level(line[: sections[0].start - 1]) return sub_string.strip(), arg_string.split(","), sections[-1].start def check_optional(arg, params: dict): opt_split = arg.split("=") if len(opt_split) > 1: opt_arg = opt_split[0].strip().lower() for i, param in enumerate(params): param_split = param["label"].split("=")[0] if param_split.lower() == opt_arg: return i return None # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] path: str = path_from_uri(uri) file_obj = self.workspace.get(path) if file_obj is None: return None # Check line sig_line: int = params["position"]["line"] sig_char: int = params["position"]["character"] # Get full line (and possible continuations) from file pre_lines, curr_line, _ = file_obj.get_code_line( sig_line, forward=False, strip_comment=True ) line_prefix = get_line_prefix(pre_lines, curr_line, sig_char) if line_prefix is None: return None # Test if scope declaration or end statement if FRegex.SCOPE_DEF.match(curr_line) or FRegex.END.match(curr_line): return None is_member = False try: sub_name, arg_strings, sub_end = get_sub_name(line_prefix) var_stack = get_var_stack(sub_name) is_member = len(var_stack) > 1 except (TypeError, AttributeError): return None # curr_scope = file_obj.ast.get_inner_scope(sig_line + 1) # Traverse type tree if necessary if is_member: type_scope = climb_type_tree(var_stack, curr_scope, self.obj_tree) # Set enclosing type as scope if type_scope is None: curr_scope = None else: curr_scope = type_scope sub_name = var_stack[-1] # Find in available scopes var_obj = None if curr_scope is not None: var_obj = find_in_scope(curr_scope, sub_name, self.obj_tree) # Search in global scope if var_obj is None: key = sub_name.lower() if key in self.obj_tree: var_obj = self.obj_tree[key][0] else: for obj in self.intrinsic_funs: if obj.name.lower() == key: var_obj = obj break # Check keywords if (var_obj is None) and ( FRegex.INT_STMNT.match(line_prefix[:sub_end]) is not None ): key = sub_name.lower() for candidate in get_intrinsic_keywords(self.statements, self.keywords, 0): if candidate.name.lower() == key: var_obj = candidate break if var_obj is None: return None # Build signature label, doc_str, params = var_obj.get_signature() if label is None: return None # Find current parameter by index or by # looking at last arg with optional name param_num = len(arg_strings) - 1 opt_num = check_optional(arg_strings[-1], params) if opt_num is None: if len(arg_strings) > 1: opt_num = check_optional(arg_strings[-2], params) if opt_num is not None: param_num = opt_num + 1 else: param_num = opt_num signature = {"label": label, "parameters": params} if doc_str is not None: signature["documentation"] = doc_str req_dict = {"signatures": [signature], "activeParameter": param_num} return req_dict def get_all_references( self, def_obj, type_mem: bool, file_obj: FortranFile = None, ): # Search through all files def_name: str = def_obj.name.lower() def_fqsn: str = def_obj.FQSN NAME_REGEX = re.compile(rf"(?:\W|^)({def_name})(?:\W|$)", re.I) if file_obj is None: file_set = self.workspace.items() else: file_set = ((file_obj.path, file_obj),) # A container that includes all the FQSN signatures for objects that # are linked to the rename request and that should also be replaced override_cache: list[str] = [] refs = {} ref_objs = [] for filename, file_obj in file_set: file_refs = [] # Search through file line by line for (i, line) in enumerate(file_obj.contents_split): if len(line) == 0: continue # Skip comment lines line = file_obj.strip_comment(line) if (line == "") or (line[0] == "#"): continue for match in NAME_REGEX.finditer(line): var_def = self.get_definition(file_obj, i, match.start(1) + 1) if var_def is None: continue ref_match = False try: # NOTE: throws AttributeError if object is intrinsic since # it will not have a FQSN # BUG: intrinsic objects should be excluded, but get_definition # does not recognise the arguments if def_fqsn == var_def.FQSN or var_def.FQSN in override_cache: ref_match = True # NOTE: throws AttributeError if object is None elif var_def.parent.get_type() == CLASS_TYPE_ID: if type_mem: for inherit_def in var_def.parent.get_overridden( def_name ): if def_fqsn == inherit_def.FQSN: ref_match = True override_cache.append(var_def.FQSN) break # Standalone definition of a type-bound procedure, # no pointer replace all its instances in the current scope # NOTE: throws AttributeError if object has no link_obj if ( var_def.sline - 1 == i and var_def.file_ast.path == filename and line.count("=>") == 0 and var_def.link_obj is def_obj ): ref_objs.append(var_def) override_cache.append(var_def.FQSN) ref_match = True # Object is a Method and the linked object i.e. the # implementation # shares the same parent signature as the current variable # NOTE:: throws and AttributeError if the link_object or # parent are not present OR they are set to None # hence not having a FQSN elif ( def_obj.get_type(True) == METH_TYPE_ID and def_obj.link_obj.parent.FQSN == var_def.parent.FQSN ): ref_match = True override_cache.append(var_def.FQSN) except AttributeError: ref_match = False if ref_match: file_refs.append([i, match.start(1), match.end(1)]) if len(file_refs) > 0: refs[filename] = file_refs return refs, ref_objs def serve_references(self, request): # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] def_line: int = params["position"]["line"] def_char: int = params["position"]["character"] path = path_from_uri(uri) # Find object file_obj = self.workspace.get(path) if file_obj is None: return None def_obj = self.get_definition(file_obj, def_line, def_char) if def_obj is None: return None # Determine global accessibility and type membership restrict_file = None type_mem = False if def_obj.FQSN.count(":") > 2: if def_obj.parent.get_type() == CLASS_TYPE_ID: type_mem = True else: restrict_file = def_obj.file_ast.file if restrict_file is None: return None all_refs, _ = self.get_all_references(def_obj, type_mem, file_obj=restrict_file) refs = [] for (filename, file_refs) in all_refs.items(): for ref in file_refs: refs.append( uri_json(path_to_uri(filename), ref[0], ref[1], ref[0], ref[2]) ) return refs def serve_definition(self, request: dict): # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] def_line: int = params["position"]["line"] def_char: int = params["position"]["character"] path = path_from_uri(uri) # Find object file_obj = self.workspace.get(path) if file_obj is None: return None var_obj = self.get_definition(file_obj, def_line, def_char) if var_obj is None: return None # Construct link reference if var_obj.file_ast.file is not None: return self._create_ref_link(var_obj) return None def serve_hover(self, request: dict): def create_hover(string: str, highlight: bool): if highlight: return {"language": self.hover_language, "value": string} else: return string def create_signature_hover(): sig_request = request.copy() sig_result = self.serve_signature(sig_request) try: arg_id = sig_result.get("activeParameter") if arg_id is not None: arg_info = sig_result["signatures"][0]["parameters"][arg_id] arg_doc = arg_info["documentation"] doc_split = arg_doc.find("\n !!") if doc_split < 0: arg_string = f"{arg_doc} :: {arg_info['label']}" else: arg_string = ( f"{arg_doc[:doc_split]} :: " f"{arg_info['label']}{arg_doc[doc_split:]}" ) return create_hover(arg_string, True) except: pass # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] def_line: int = params["position"]["line"] def_char: int = params["position"]["character"] path = path_from_uri(uri) file_obj = self.workspace.get(path) if file_obj is None: return None # Find object var_obj = self.get_definition(file_obj, def_line, def_char, hover_req=True) if var_obj is None: return None # Construct hover information var_type = var_obj.get_type() hover_array = [] if var_type in (SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID): hover_str, highlight = var_obj.get_hover(long=True) hover_array.append(create_hover(hover_str, highlight)) elif var_type == INTERFACE_TYPE_ID: for member in var_obj.mems: hover_str, highlight = member.get_hover(long=True) if hover_str is not None: hover_array.append(create_hover(hover_str, highlight)) elif var_type == VAR_TYPE_ID: # Unless we have a Fortran literal include the desc in the hover msg # See get_definition for an explanation about this default name if not var_obj.desc.startswith(FORTRAN_LITERAL): hover_str, highlight = var_obj.get_hover() hover_array.append(create_hover(hover_str, highlight)) # Hover for Literal variables elif var_obj.desc.endswith("REAL"): hover_array.append(create_hover("REAL", True)) elif var_obj.desc.endswith("INTEGER"): hover_array.append(create_hover("INTEGER", True)) elif var_obj.desc.endswith("LOGICAL"): hover_array.append(create_hover("LOGICAL", True)) elif var_obj.desc.endswith("STRING"): hover_str = f"CHARACTER(LEN={len(var_obj.name)-2})" hover_array.append(create_hover(hover_str, True)) # Include the signature if one is present e.g. if in an argument list if self.hover_signature: hover_str = create_signature_hover() if hover_str is not None: hover_array.append(hover_str) # if len(hover_array) > 0: return {"contents": hover_array} return None def serve_implementation(self, request: dict): # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] def_line: int = params["position"]["line"] def_char: int = params["position"]["character"] path = path_from_uri(uri) file_obj = self.workspace.get(path) if file_obj is None: return None # Find object var_obj = self.get_definition(file_obj, def_line, def_char) if var_obj is None: return None # Intrinsics do not have implementations we can access if isinstance(var_obj, Intrinsic): return None # Construct implementation reference if var_obj.parent.get_type() == CLASS_TYPE_ID: impl_obj = var_obj.link_obj if (impl_obj is not None) and (impl_obj.file_ast.file is not None): return self._create_ref_link(impl_obj) return None def serve_rename(self, request: dict): # Get parameters from request params: dict = request["params"] uri: str = params["textDocument"]["uri"] def_line: int = params["position"]["line"] def_char: int = params["position"]["character"] path = path_from_uri(uri) # Find object file_obj = self.workspace.get(path) if file_obj is None: return None def_obj = self.get_definition(file_obj, def_line, def_char) if def_obj is None: return None if isinstance(def_obj, Intrinsic): self.post_message("Rename failed: Cannot rename intrinsics", Severity.warn) return None # Determine global accesibility and type membership restrict_file = None type_mem = False if def_obj.FQSN.count(":") > 2: if def_obj.parent.get_type() == CLASS_TYPE_ID: type_mem = True else: restrict_file = def_obj.file_ast.file if restrict_file is None: return None all_refs, ref_objs = self.get_all_references( def_obj, type_mem, file_obj=restrict_file ) if len(all_refs) == 0: self.post_message("Rename failed: No usages found to rename", Severity.warn) return None # Create rename changes new_name = params["newName"] changes: dict[str, list[dict]] = {} for filename, file_refs in all_refs.items(): file_uri = path_to_uri(filename) changes[file_uri] = [] for ref in file_refs: changes[file_uri].append( change_json(new_name, ref[0], ref[1], ref[0], ref[2]) ) return {"changes": changes} def serve_codeActions(self, request: dict): params: dict = request["params"] uri: str = params["textDocument"]["uri"] sline: int = params["range"]["start"]["line"] eline: int = params["range"]["end"]["line"] path = path_from_uri(uri) file_obj = self.workspace.get(path) # Find object if file_obj is None: return None curr_scope = file_obj.ast.get_inner_scope(sline) if curr_scope is None: return None action_list = curr_scope.get_actions(sline, eline) if action_list is None: return None # Convert diagnostics for action in action_list: diagnostics = action.get("diagnostics") if diagnostics is not None: new_diags = [] for diagnostic in diagnostics: new_diags.append(diagnostic.build(file_obj)) action["diagnostics"] = new_diags return action_list def send_diagnostics(self, uri: str): diag_results, diag_exp = self.get_diagnostics(uri) if diag_results is not None: self.conn.send_notification( "textDocument/publishDiagnostics", {"uri": uri, "diagnostics": diag_results}, ) elif diag_exp is not None: self.conn.write_error( -1, code=-32603, message=str(diag_exp), data={ "traceback": traceback.format_exc(), }, ) def get_diagnostics(self, uri: str): filepath = path_from_uri(uri) file_obj = self.workspace.get(filepath) if file_obj is not None: try: diags = file_obj.check_file( self.obj_tree, max_line_length=self.max_line_length, max_comment_line_length=self.max_comment_line_length, ) except Exception as e: return None, e else: return diags, None return None, None def serve_onChange(self, request: dict): # Update workspace from file sent by editor params: dict = request["params"] uri: str = params["textDocument"]["uri"] path = path_from_uri(uri) file_obj = self.workspace.get(path) if file_obj is None: self.post_message(f"Change request failed for unknown file '{path}'") return else: # Update file contents with changes reparse_req = True if self.sync_type == 1: file_obj.apply_change(params["contentChanges"][0]) else: try: reparse_req = False for change in params["contentChanges"]: reparse_flag = file_obj.apply_change(change) reparse_req = reparse_req or reparse_flag except: self.post_message( f"Change request failed for file '{path}': Could not apply" " change", Severity.error, exc_info=True, ) return # Parse newly updated file if reparse_req: _, err_str = self.update_workspace_file(path, update_links=True) if err_str is not None: self.post_message(f"Change request failed for file '{path}': {err_str}") return # Update include statements linking to this file for _, tmp_file in self.workspace.items(): tmp_file.ast.resolve_includes(self.workspace, path=path) file_obj.ast.resolve_includes(self.workspace) # Update inheritance (currently file only) # tmp_file.ast.resolve_links(self.obj_tree, self.link_version) elif file_obj.preproc: file_obj.preprocess(pp_defs=self.pp_defs) self.pp_defs = {**self.pp_defs, **file_obj.pp_defs} def serve_onOpen(self, request: dict): self.serve_onSave(request, did_open=True) def serve_onClose(self, request: dict): self.serve_onSave(request, did_close=True) def serve_onSave( self, request: dict, did_open: bool = False, did_close: bool = False ): # Update workspace from file on disk params: dict = request["params"] uri: str = params["textDocument"]["uri"] filepath = path_from_uri(uri) # Skip update and remove objects if file is deleted if did_close and (not os.path.isfile(filepath)): # Remove old objects from tree file_obj = self.workspace.get(filepath) if file_obj is not None: ast_old = file_obj.ast if ast_old is not None: for key in ast_old.global_dict: self.obj_tree.pop(key, None) return did_change, err_str = self.update_workspace_file( filepath, read_file=True, allow_empty=did_open ) if err_str is not None: self.post_message(f"Save request failed for file '{filepath}': {err_str}") return if did_change: # Update include statements linking to this file for _, file_obj in self.workspace.items(): file_obj.ast.resolve_includes(self.workspace, path=filepath) file_obj = self.workspace.get(filepath) file_obj.ast.resolve_includes(self.workspace) # Update inheritance/links self.link_version = (self.link_version + 1) % 1000 for _, file_obj in self.workspace.items(): file_obj.ast.resolve_links(self.obj_tree, self.link_version) if not self.disable_diagnostics: self.send_diagnostics(uri) def update_workspace_file( self, filepath: str, read_file: bool = False, allow_empty: bool = False, update_links: bool = False, ): # Update workspace from file contents and path try: file_obj = self.workspace.get(filepath) if read_file: if file_obj is None: file_obj = FortranFile(filepath, self.pp_suffixes) # Create empty file if not yet saved to disk if not os.path.isfile(filepath): if allow_empty: file_obj.ast = FortranAST(file_obj) self.workspace[filepath] = file_obj return False, None else: return False, "File does not exist" # Error during load err_string, file_changed = file_obj.load_from_disk() if err_string: log.error(f"{err_string} : {filepath}") return False, err_string # Error during file read if not file_changed: return False, None ast_new = file_obj.parse( pp_defs=self.pp_defs, include_dirs=self.include_dirs ) # Add the included read in pp_defs from to the ones specified in the # configuration file self.pp_defs = {**self.pp_defs, **file_obj.pp_defs} except: log.error("Error while parsing file %s", filepath, exc_info=True) return False, "Error during parsing" # Error during parsing # Remove old objects from tree ast_old = file_obj.ast if ast_old is not None: for key in ast_old.global_dict: self.obj_tree.pop(key, None) # Add new file to workspace file_obj.ast = ast_new if filepath not in self.workspace: self.workspace[filepath] = file_obj # Add top-level objects to object tree for key, obj in ast_new.global_dict.items(): self.obj_tree[key] = [obj, filepath] # Update local links/inheritance if necessary if update_links: self.link_version = (self.link_version + 1) % 1000 ast_new.resolve_links(self.obj_tree, self.link_version) return True, None @staticmethod def file_init( filepath: str, pp_defs: dict, pp_suffixes: list[str], include_dirs: set[str], sort: bool, ): """Initialise a Fortran file Parameters ---------- filepath : str Path to file pp_defs : dict Preprocessor definitions pp_suffixes : list[str] Preprocessor file extension, additional to default include_dirs : set[str] Preprocessor only include directories, not used by normal parser sort : bool Whether or not keywords should be sorted Returns ------- fortran_file | str A Fortran file object or a string containing the error message """ file_obj = FortranFile(filepath, pp_suffixes) err_str, _ = file_obj.load_from_disk() if err_str: return err_str try: # On Windows multiprocess does not propagate global variables in a shell. # Windows uses 'spawn' while Unix uses 'fork' which propagates globals. # This is a bypass. # For more see on SO: shorturl.at/hwAG1 set_keyword_ordering(sort) file_ast = file_obj.parse(pp_defs=pp_defs, include_dirs=include_dirs) except: log.error("Error while parsing file %s", filepath, exc_info=True) return "Error during parsing" file_obj.ast = file_ast return file_obj def workspace_init(self): """Initialise the workspace root across multiple threads""" file_list = self._get_source_files() # Process files pool = Pool(processes=self.nthreads) results = {} for filepath in file_list: results[filepath] = pool.apply_async( self.file_init, args=( filepath, self.pp_defs, self.pp_suffixes, self.include_dirs, self.sort_keywords, ), ) pool.close() pool.join() for path, result in results.items(): result_obj = result.get() if isinstance(result_obj, str): self.post_message( f"Initialization failed for file '{path}': {result_obj}" ) continue self.workspace[path] = result_obj # Add top-level objects to object tree ast_new = self.workspace[path].ast for key in ast_new.global_dict: self.obj_tree[key] = [ast_new.global_dict[key], path] # Update include statements for _, file_obj in self.workspace.items(): file_obj.ast.resolve_includes(self.workspace) # Update inheritance/links self.link_version = (self.link_version + 1) % 1000 for _, file_obj in self.workspace.items(): file_obj.ast.resolve_links(self.obj_tree, self.link_version) def serve_exit(self, request: dict) -> None: # Exit server self.workspace = {} self.obj_tree = {} self.running = False def serve_default(self, request: dict): """Raise an error in the Language Server Parameters ---------- request : dict client dictionary with requests Raises ------ JSONRPC2Error error with code -32601 """ # Default handler (errors!) raise JSONRPC2Error( code=-32601, message=f"method {request['method']} not found" ) def _load_config_file(self) -> None: """Loads the configuration file for the Language Server""" # Check for config files default_conf_files = [self.config, ".fortlsrc", ".fortls.json", ".fortls"] present_conf_files = [ os.path.isfile(os.path.join(self.root_path, f)) for f in default_conf_files ] if not any(present_conf_files): return None # Load the first config file found for f, present in zip(default_conf_files, present_conf_files): if not present: continue config_path = os.path.join(self.root_path, f) break try: with open(config_path, "r") as jsonfile: config_dict = json5.load(jsonfile) # Include and Exclude directories self._load_config_file_dirs(config_dict) # General options self._load_config_file_general(config_dict) # Preprocessor options self._load_config_file_preproc(config_dict) # Debug options debugging: bool = config_dict.get("debug_log", self.debug_log) # If conf option is different than the debug option passed as a # command line argument return True so that debug log is setup if debugging != self.debug_log and not self.debug_log: self.debug_log = True except FileNotFoundError: self.post_message(f"Configuration file '{self.config}' not found") # Erroneous json file syntax except ValueError as e: msg = f'Error: "{e}" while reading "{self.config}" Configuration file' self.post_message(msg) def _load_config_file_dirs(self, config_dict: dict) -> None: self.excl_paths = set(config_dict.get("excl_paths", self.excl_paths)) self.source_dirs = set(config_dict.get("source_dirs", self.source_dirs)) self.incl_suffixes = set(config_dict.get("incl_suffixes", self.incl_suffixes)) # Update the source file REGEX self.FORTRAN_SRC_EXT_REGEX = src_file_exts(self.incl_suffixes) self.excl_suffixes = set(config_dict.get("excl_suffixes", self.excl_suffixes)) def _load_config_file_general(self, config_dict: dict) -> None: # General options ------------------------------------------------------ self.nthreads = config_dict.get("nthreads", self.nthreads) self.notify_init = config_dict.get("notify_init", self.notify_init) self.incremental_sync = config_dict.get( "incremental_sync", self.incremental_sync ) self.sync_type: int = 2 if self.incremental_sync else 1 self.sort_keywords = config_dict.get("sort_keywords", self.sort_keywords) self.disable_autoupdate = config_dict.get( "disable_autoupdate", self.disable_autoupdate ) # Autocomplete options ------------------------------------------------- self.autocomplete_no_prefix = config_dict.get( "autocomplete_no_prefix", self.autocomplete_no_prefix ) self.autocomplete_no_snippets = config_dict.get( "autocomplete_no_snippets", self.autocomplete_no_snippets ) self.autocomplete_name_only = config_dict.get( "autocomplete_name_only", self.autocomplete_name_only ) self.lowercase_intrinsics = config_dict.get( "lowercase_intrinsics", self.lowercase_intrinsics ) self.use_signature_help = config_dict.get( "use_signature_help", self.use_signature_help ) # Hover options -------------------------------------------------------- self.hover_signature = config_dict.get("hover_signature", self.hover_signature) self.hover_language = config_dict.get("hover_language", self.hover_language) # Diagnostic options --------------------------------------------------- self.max_line_length = config_dict.get("max_line_length", self.max_line_length) self.max_comment_line_length = config_dict.get( "max_comment_line_length", self.max_comment_line_length ) self.disable_diagnostics = config_dict.get( "disable_diagnostics", self.disable_diagnostics ) # Symbols options ------------------------------------------------------ self.symbol_skip_mem = config_dict.get("symbol_skip_mem", self.symbol_skip_mem) # Code Actions options ------------------------------------------------- self.enable_code_actions = config_dict.get( "enable_code_actions", self.enable_code_actions ) def _load_config_file_preproc(self, config_dict: dict) -> None: self.pp_suffixes = config_dict.get("pp_suffixes", None) self.pp_defs = config_dict.get("pp_defs", {}) if isinstance(self.pp_defs, list): self.pp_defs = {key: "" for key in self.pp_defs} self.include_dirs = set(config_dict.get("include_dirs", self.include_dirs)) def _resolve_globs_in_paths(self) -> None: """Resolves glob patterns in `excl_paths`, `source_dirs` and `include_dirs`. Also performs the exclusion of `excl_paths` from `source_dirs`. """ # Exclude paths (directories & files) with glob resolution excl_paths = set() for path in self.excl_paths: excl_paths.update(set(resolve_globs(path, self.root_path))) self.excl_paths = excl_paths.copy() # Source directory paths (directories) with glob resolution source_dirs = set() for path in self.source_dirs: # resolve_globs filters any nonexisting directories so FileNotFoundError # found inside only_dirs can never be raised source_dirs.update(set(only_dirs(resolve_globs(path, self.root_path)))) self.source_dirs = source_dirs.copy() # Keep all directories present in source_dirs but not excl_paths self.source_dirs = {i for i in self.source_dirs if i not in self.excl_paths} # Preprocessor includes include_dirs = set() for path in self.include_dirs: # resolve_globs filters any nonexisting directories so FileNotFoundError # found inside only_dirs can never be raised include_dirs.update(set(only_dirs(resolve_globs(path, self.root_path)))) self.include_dirs = include_dirs.copy() def _add_source_dirs(self) -> None: """Will recursively add all subdirectories that contain Fortran source files only if the option `source_dirs` has not been specified in the configuration file or no configuration file is present """ # Recursively add sub-directories that only match Fortran extensions if len(self.source_dirs) != 1: return None if self.root_path not in self.source_dirs: return None self.source_dirs = set() for root, dirs, files in os.walk(self.root_path): # Match not found if not list(filter(self.FORTRAN_SRC_EXT_REGEX.search, files)): continue if root not in self.source_dirs and root not in self.excl_paths: self.source_dirs.add(str(Path(root).resolve())) def _get_source_files(self) -> list[str]: """Get all the source files present in `self.source_dirs`, exclude any files found in `self.excl_paths`^ and ignore any files ending with `self.excl_suffixes`. ^: the only case where this has not allready happened is when `source_dirs` is not specified or a configuration file is not present Returns ------- list[str] List of source Fortran source files """ # Get filenames file_list = [] for src_dir in self.source_dirs: for f in os.listdir(src_dir): p = os.path.join(src_dir, f) # Process only files if not os.path.isfile(p): continue # File extension must match supported extensions if not self.FORTRAN_SRC_EXT_REGEX.search(f): continue # File cannot be in excluded paths/files if p in self.excl_paths: continue # File cannot have an excluded extension if any(f.endswith(ext) for ext in self.excl_suffixes): continue file_list.append(p) return file_list def _config_logger(self, request) -> None: """Configures the logger to save Language Server requests/responses to a file the logger will by default output to the main (stderr, stdout) channels. """ file_log = True if self.debug_log and self.root_path else False fmt = "[%(levelname)-.4s - %(asctime)s] %(message)s" if file_log: fname = "fortls_debug.log" fname = os.path.join(self.root_path, fname) logging.basicConfig(filename=fname, level=logging.DEBUG, filemode="w") # Also forward logs to the console consoleHandler = logging.StreamHandler() log.addHandler(consoleHandler) log.debug("REQUEST %s %s", request.get("id"), request.get("method")) self.post_messages.append([Severity.info, "fortls debugging enabled"]) else: logging.basicConfig(format=fmt, datefmt="%H:%M:%S", level=logging.INFO) def _load_intrinsics(self) -> None: # Load intrinsics set_keyword_ordering(True) # Always sort intrinsics if self.lowercase_intrinsics: set_lowercase_intrinsics() ( self.statements, self.keywords, self.intrinsic_funs, self.intrinsic_mods, ) = load_intrinsics() for module in self.intrinsic_mods: self.obj_tree[module.FQSN] = [module, None] # Set object settings set_keyword_ordering(self.sort_keywords) def _create_ref_link(self, obj) -> dict: """Create a link reference to an object""" obj_file: FortranFile = obj.file_ast.file sline, (schar, echar) = obj_file.find_word_in_code_line(obj.sline - 1, obj.name) if schar < 0: schar = echar = 0 return uri_json(path_to_uri(obj_file.path), sline, schar, sline, echar) def _update_version_pypi(self, test: bool = False): """Fetch updates from PyPi for fortls Parameters ---------- test : bool, optional flag used to override exit checks, only for unittesting, by default False """ if self.disable_autoupdate: return False # Do not run for prerelease and dev release if self._version.is_prerelease and not test: return False try: # For security reasons register as Request before opening request = urllib.request.Request("https://pypi.org/pypi/fortls/json") with urllib.request.urlopen(request) as resp: info = json.loads(resp.read().decode("utf-8")) remote_v = version.parse(info["info"]["version"]) # Do not update from remote if it is a prerelease if remote_v.is_prerelease: return False # This is the only reliable way to compare version semantics if remote_v > self._version or test: self.post_message( "A newer version of fortls is available for download", Severity.info, ) # Anaconda environments should handle their updates through conda if os.path.exists(os.path.join(sys.prefix, "conda-meta")): return False self.post_message( f"Downloading from PyPi fortls {info['info']['version']}", Severity.info, ) # Run pip result = subprocess.run( [ sys.executable, "-m", "pip", "install", "fortls", "--upgrade", "--user", ], capture_output=True, ) if result.stdout: log.info(result.stdout.decode("utf-8")) if result.stderr: log.error(result.stderr.decode("utf-8")) return True # No internet connection exceptions except (URLError, KeyError): self.post_message("Failed to update the fortls", Severity.warn) return False class JSONRPC2Error(Exception): def __init__(self, code, message, data=None): self.code = code self.message = message self.data = data fortran-language-server-2.13.0+dfsg.1/fortls/objects.py000066400000000000000000002321611450400537300227350ustar00rootroot00000000000000from __future__ import annotations import copy import os import re from dataclasses import dataclass, replace from typing import Pattern from fortls.constants import ( ASSOC_TYPE_ID, BASE_TYPE_ID, BLOCK_TYPE_ID, CLASS_TYPE_ID, DO_TYPE_ID, ENUM_TYPE_ID, FUNCTION_TYPE_ID, IF_TYPE_ID, INTERFACE_TYPE_ID, KEYWORD_ID_DICT, METH_TYPE_ID, MODULE_TYPE_ID, SELECT_TYPE_ID, SUBMODULE_TYPE_ID, SUBROUTINE_TYPE_ID, VAR_TYPE_ID, WHERE_TYPE_ID, FRegex, ) from fortls.ftypes import IncludeInfo, UseInfo from fortls.helper_functions import get_keywords, get_paren_substring, get_var_stack from fortls.json_templates import diagnostic_json, location_json, range_json from fortls.jsonrpc import path_to_uri def get_use_tree( scope: Scope, use_dict: dict[str, UseInfo], obj_tree: dict, only_list: list[str] = None, rename_map: dict[str, str] = None, curr_path: list[str] = None, ): def intersect_only(use_stmnt): tmp_list = [] tmp_map = rename_map.copy() for val1 in only_list: mapped1 = tmp_map.get(val1, val1) if mapped1 in use_stmnt.only_list: tmp_list.append(val1) new_rename = use_stmnt.rename_map.get(mapped1, None) if new_rename is not None: tmp_map[val1] = new_rename else: tmp_map.pop(val1, None) return tmp_list, tmp_map if only_list is None: only_list = [] if rename_map is None: rename_map = {} if curr_path is None: curr_path = [] # Detect and break circular references if scope.FQSN in curr_path: return use_dict new_path = curr_path + [scope.FQSN] # Add recursively for use_stmnt in scope.use: if use_stmnt.mod_name not in obj_tree: continue # Intersect parent and current ONLY list and renaming if len(only_list) == 0: merged_use_list = use_stmnt.only_list[:] merged_rename = use_stmnt.rename_map.copy() elif len(use_stmnt.only_list) == 0: merged_use_list = only_list[:] merged_rename = rename_map.copy() else: merged_use_list, merged_rename = intersect_only(use_stmnt) if len(merged_use_list) == 0: continue # Update ONLY list and renaming for current module use_dict_mod = use_dict.get(use_stmnt.mod_name) if use_dict_mod is not None: old_len = len(use_dict_mod.only_list) if (old_len > 0) and (len(merged_use_list) > 0): only_len = old_len for only_name in merged_use_list: use_dict_mod.only_list.add(only_name) if len(use_dict_mod.only_list) != only_len: only_len = len(use_dict_mod.only_list) new_rename = merged_rename.get(only_name, None) if new_rename is not None: use_dict[use_stmnt.mod_name] = replace( use_dict_mod, rename_map=merged_rename ) else: use_dict[use_stmnt.mod_name] = UseInfo(use_stmnt.mod_name, set(), {}) # Skip if we have already visited module with the same only list if old_len == len(use_dict_mod.only_list): continue else: use_dict[use_stmnt.mod_name] = UseInfo( use_stmnt.mod_name, set(merged_use_list), merged_rename ) # Descend USE tree use_dict = get_use_tree( obj_tree[use_stmnt.mod_name][0], use_dict, obj_tree, merged_use_list, merged_rename, new_path, ) return use_dict def find_in_scope( scope: Scope, var_name: str, obj_tree: dict, interface: bool = False, local_only: bool = False, var_line_number: int = None, ): def check_scope( local_scope: Scope, var_name_lower: str, filter_public: bool = False, var_line_number: int = None, ): for child in local_scope.get_children(): if child.name.startswith("#GEN_INT"): tmp_var = check_scope(child, var_name_lower, filter_public) if tmp_var is not None: return tmp_var if filter_public: if (child.vis < 0) or ((local_scope.def_vis < 0) and (child.vis <= 0)): continue if child.name.lower() == var_name_lower: # For functions with an implicit result() variable the name # of the function is used. If we are hovering over the function # definition, we do not want the implicit result() to be returned. # If scope is from a function and child's name is same as functions name # and start of scope i.e. function definition is equal to the request ln # then we are need to skip this child if ( isinstance(local_scope, Function) and local_scope.name.lower() == child.name.lower() and var_line_number in (local_scope.sline, local_scope.eline) ): return None return child return None # var_name_lower = var_name.lower() # Check local scope if scope is None: return None tmp_var = check_scope(scope, var_name_lower, var_line_number=var_line_number) if local_only or (tmp_var is not None): return tmp_var # Check INCLUDE statements if scope.file_ast.include_statements: strip_str = var_name.replace('"', "") strip_str = strip_str.replace("'", "") for inc in scope.file_ast.include_statements: if strip_str == inc.path: if inc.file is None: return None return Include(inc.file.ast, inc.line_number, inc.path) # Setup USE search use_dict = get_use_tree(scope, {}, obj_tree) # Look in found use modules for use_mod, use_info in use_dict.items(): use_scope = obj_tree[use_mod][0] # Module name is request if use_mod.lower() == var_name_lower: return use_scope # Filter children by only_list if len(use_info.only_list) > 0: if var_name_lower not in use_info.only_list: continue mod_name = use_info.rename_map.get(var_name_lower, var_name_lower) tmp_var = check_scope(use_scope, mod_name, filter_public=True) if tmp_var is not None: return tmp_var # Only search local and imported names for interfaces if interface: in_import = False for use_stmnt in scope.use: if use_stmnt.mod_name.startswith("#import"): if var_name_lower in use_stmnt.only_list: in_import = True break if not in_import: return None # Check parent scopes if scope.parent is not None: tmp_var = find_in_scope(scope.parent, var_name, obj_tree) if tmp_var is not None: return tmp_var # Check ancestor scopes for ancestor in scope.get_ancestors(): tmp_var = find_in_scope(ancestor, var_name, obj_tree) if tmp_var is not None: return tmp_var return None def find_in_workspace( obj_tree: dict, query: str, filter_public: bool = False, exact_match: bool = False ): def add_children(mod_obj, query: str): tmp_list = [] for child_obj in mod_obj.get_children(filter_public): if child_obj.name.lower().find(query) >= 0: tmp_list.append(child_obj) return tmp_list matching_symbols = [] query = query.lower() for (_, obj_packed) in obj_tree.items(): top_obj = obj_packed[0] top_uri = obj_packed[1] if top_uri is not None: if top_obj.name.lower().find(query) > -1: matching_symbols.append(top_obj) if top_obj.get_type() == MODULE_TYPE_ID: matching_symbols += add_children(top_obj, query) if exact_match: filtered_symbols = [] n = len(query) for symbol in matching_symbols: if len(symbol.name) == n: filtered_symbols.append(symbol) matching_symbols = filtered_symbols return matching_symbols def climb_type_tree(var_stack, curr_scope: Scope, obj_tree: dict): """Walk up user-defined type sequence to determine final field type""" # Find base variable in current scope iVar = 0 var_name = var_stack[iVar].strip().lower() var_obj = find_in_scope(curr_scope, var_name, obj_tree) if var_obj is None: return None # Search for type, then next variable in stack and so on for _ in range(30): # Find variable type object type_obj = var_obj.get_type_obj(obj_tree) # Return if not found if type_obj is None: return None # Go to next variable in stack and exit if done iVar += 1 if iVar == len(var_stack) - 1: break # Find next variable by name in type var_name = var_stack[iVar].strip().lower() var_obj = find_in_scope(type_obj, var_name, obj_tree, local_only=True) # Return if not found if var_obj is None: return None else: raise KeyError return type_obj # Helper classes class USE_line: def __init__( self, mod_name: str, line_number: int, only_list: list = None, rename_map: dict = None, ): self.mod_name: str = mod_name.lower() self.line_number: int = line_number if only_list is not None: self.only_list: list = [only.lower() for only in only_list] if rename_map is not None: self.rename_map: dict = { key.lower(): value.lower() for key, value in rename_map.items() } @dataclass class AssociateMap: var: Variable bind_name: str link_name: str class Diagnostic: def __init__( self, sline: int, message: str, severity: int = 1, find_word: str = None ): self.sline: int = sline self.message: str = message self.severity: int = severity self.find_word: str = find_word self.has_related: bool = False self.related_path = None self.related_line = None self.related_message = None def add_related(self, path: str, line: int, message: str): self.has_related = True self.related_path = path self.related_line = line self.related_message = message def build(self, file_obj): schar = echar = 0 if self.find_word is not None: self.sline, obj_range = file_obj.find_word_in_code_line( self.sline, self.find_word ) if obj_range.start >= 0: schar = obj_range.start echar = obj_range.end diag = diagnostic_json( self.sline, schar, self.sline, echar, self.message, self.severity ) if self.has_related: diag["relatedInformation"] = [ { **location_json( path_to_uri(self.related_path), self.related_line, 0 ), "message": self.related_message, } ] return diag # Fortran object classes class FortranObj: def __init__(self): self.vis: int = 0 self.def_vis: int = 0 self.doc_str: str = None self.parent = None self.eline: int = -1 self.implicit_vars = None def set_default_vis(self, new_vis: int): self.def_vis = new_vis def set_visibility(self, new_vis: int): self.vis = new_vis def set_parent(self, parent_obj): self.parent = parent_obj def add_doc(self, doc_str: str): self.doc_str = doc_str def update_fqsn(self, enc_scope=None): return None def end(self, line_number: int): self.eline = line_number def resolve_inherit(self, obj_tree, inherit_version): return None def require_inherit(self): return False def resolve_link(self, obj_tree): return None def require_link(self): return False def get_type(self, no_link=False): return BASE_TYPE_ID def get_type_obj(self, obj_tree): return None def get_desc(self): return "unknown" def get_snippet(self, name_replace=None, drop_arg=-1): return None, None @staticmethod def get_placeholders(arg_list: list[str]): place_holders = [] for i, arg in enumerate(arg_list): opt_split = arg.split("=") if len(opt_split) > 1: place_holders.append(f"{opt_split[0]}=${{{i+1}:{opt_split[1]}}}") else: place_holders.append(f"${{{i+1}:{arg}}}") arg_str = f"({', '.join(arg_list)})" arg_snip = f"({', '.join(place_holders)})" return arg_str, arg_snip def get_documentation(self): return self.doc_str def get_hover(self, long=False, include_doc=True, drop_arg=-1): return None, False def get_signature(self, drop_arg=-1): return None, None, None def get_interface(self, name_replace=None, drop_arg=-1, change_strings=None): return None def get_children(self, public_only=False): return [] def get_ancestors(self): return [] def get_diagnostics(self): return [] def get_implicit(self): if self.parent is None: return self.implicit_vars else: parent_implicit = self.parent.get_implicit() if (self.implicit_vars is not None) or (parent_implicit is None): return self.implicit_vars return parent_implicit def get_actions(self, sline, eline): return None def is_optional(self): return False def is_mod_scope(self): return False def is_callable(self): return False def is_external_int(self): return False def is_abstract(self): return False def req_named_end(self): return False def check_valid_parent(self): return True def check_definition(self, obj_tree, known_types: dict = None, interface=False): if known_types is None: known_types = {} return None, known_types class Scope(FortranObj): def __init__(self, file_ast, line_number: int, name: str, keywords: list = None): super().__init__() if keywords is None: keywords = [] self.file_ast: FortranAST = file_ast self.sline: int = line_number self.eline: int = line_number self.name: str = name self.children: list = [] self.members: list = [] self.use: list[USE_line] = [] self.keywords: list = keywords self.inherit = None self.parent = None self.contains_start = None self.implicit_line = None self.FQSN: str = self.name.lower() if file_ast.enc_scope_name is not None: self.FQSN = file_ast.enc_scope_name.lower() + "::" + self.name.lower() def copy_from(self, copy_source: Scope): # Pass the reference, we don't want shallow copy since that would still # result into 2 versions of attributes between copy_source and self for k, v in copy_source.__dict__.items(): setattr(self, k, v) def add_use( self, use_mod, line_number, only_list: list = None, rename_map: dict = None ): if only_list is None: only_list = [] if rename_map is None: rename_map = {} self.use.append(USE_line(use_mod, line_number, only_list, rename_map)) def set_inherit(self, inherit_type): self.inherit = inherit_type def set_parent(self, parent_obj): self.parent = parent_obj def set_implicit(self, implicit_flag, line_number): self.implicit_vars = implicit_flag self.implicit_line = line_number def mark_contains(self, line_number): if self.contains_start is not None: raise ValueError self.contains_start = line_number def add_child(self, child): self.children.append(child) child.set_parent(self) def update_fqsn(self, enc_scope=None): if enc_scope is not None: self.FQSN = enc_scope.lower() + "::" + self.name.lower() else: self.FQSN = self.name.lower() for child in self.children: child.update_fqsn(self.FQSN) def add_member(self, member): self.members.append(member) def get_children(self, public_only=False): if public_only: pub_children = [] for child in self.children: if (child.vis < 0) or ((self.def_vis < 0) and (child.vis <= 0)): continue if child.name.startswith("#GEN_INT"): pub_children.append(child) continue pub_children.append(child) return pub_children else: return copy.copy(self.children) def check_definitions(self, obj_tree): """Check for definition errors in scope""" FQSN_dict = {} for child in self.children: # Skip masking/double checks for interfaces if child.get_type() == INTERFACE_TYPE_ID: continue # Check other variables in current scope if child.FQSN in FQSN_dict: if child.sline < FQSN_dict[child.FQSN]: FQSN_dict[child.FQSN] = child.sline - 1 else: FQSN_dict[child.FQSN] = child.sline - 1 # contains_line = -1 after_contains_list = (SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID) if self.get_type() in ( MODULE_TYPE_ID, SUBMODULE_TYPE_ID, SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID, ): if self.contains_start is None: contains_line = self.eline else: contains_line = self.contains_start # Detect interface definitions is_interface = False if ( (self.parent is not None) and (self.parent.get_type() == INTERFACE_TYPE_ID) and (not self.is_mod_scope()) ): is_interface = True errors = [] known_types = {} for child in self.children: if child.name.startswith("#"): continue line_number = child.sline - 1 # Check for type definition in scope def_error, known_types = child.check_definition( obj_tree, known_types=known_types, interface=is_interface ) if def_error is not None: errors.append(def_error) # Detect contains errors if (contains_line >= child.sline) and ( child.get_type(no_link=True) in after_contains_list ): new_diag = Diagnostic( line_number, message="Subroutine/Function definition before CONTAINS statement", severity=1, ) errors.append(new_diag) # Skip masking/double checks for interfaces and members if (self.get_type() == INTERFACE_TYPE_ID) or ( child.get_type() == INTERFACE_TYPE_ID ): continue # Check other variables in current scope if child.FQSN in FQSN_dict: if line_number > FQSN_dict[child.FQSN]: new_diag = Diagnostic( line_number, message=f'Variable "{child.name}" declared twice in scope', severity=1, find_word=child.name, ) new_diag.add_related( path=self.file_ast.path, line=FQSN_dict[child.FQSN], message="First declaration", ) errors.append(new_diag) continue # Check for masking from parent scope in subroutines, functions, and blocks if (self.parent is not None) and ( self.get_type() in (SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID, BLOCK_TYPE_ID) ): parent_var = find_in_scope(self.parent, child.name, obj_tree) if parent_var is not None: # Ignore if function return variable if (self.get_type() == FUNCTION_TYPE_ID) and ( parent_var.FQSN == self.FQSN ): continue new_diag = Diagnostic( line_number, message=( f'Variable "{child.name}" masks variable in parent scope' ), severity=2, find_word=child.name, ) new_diag.add_related( path=parent_var.file_ast.path, line=parent_var.sline - 1, message="First declaration", ) errors.append(new_diag) return errors def check_use(self, obj_tree): errors = [] last_use_line = -1 for use_stmnt in self.use: last_use_line = max(last_use_line, use_stmnt.line_number) if use_stmnt.mod_name.startswith("#import"): if (self.parent is None) or ( self.parent.get_type() != INTERFACE_TYPE_ID ): new_diag = Diagnostic( use_stmnt.line_number - 1, message="IMPORT statement outside of interface", severity=1, ) errors.append(new_diag) continue if use_stmnt.mod_name not in obj_tree: new_diag = Diagnostic( use_stmnt.line_number - 1, message=f'Module "{use_stmnt.mod_name}" not found in project', severity=3, find_word=use_stmnt.mod_name, ) errors.append(new_diag) if (self.implicit_line is not None) and (last_use_line >= self.implicit_line): new_diag = Diagnostic( self.implicit_line - 1, message="USE statements after IMPLICIT statement", severity=1, find_word="IMPLICIT", ) errors.append(new_diag) return errors def add_subroutine(self, interface_string, no_contains=False): edits = [] line_number = self.eline - 1 if (self.contains_start is None) and (not no_contains): first_sub_line = line_number for child in self.children: if child.get_type() in (SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID): first_sub_line = min(first_sub_line, child.sline - 1) edits.append( { **range_json(first_sub_line, 0, first_sub_line, 0), "newText": "CONTAINS\n", } ) edits.append( { **range_json(line_number, 0, line_number, 0), "newText": interface_string + "\n", } ) return self.file_ast.path, edits class Module(Scope): def get_type(self, no_link=False): return MODULE_TYPE_ID def get_desc(self): return "MODULE" def check_valid_parent(self): if self.parent is not None: return False return True class Include(Scope): def get_desc(self): return "INCLUDE" class Program(Module): def get_desc(self): return "PROGRAM" class Submodule(Module): def __init__( self, file_ast: FortranAST, line_number: int, name: str, ancestor_name: str = None, ): super().__init__(file_ast, line_number, name) self.ancestor_name = ancestor_name self.ancestor_obj = None def get_type(self, no_link=False): return SUBMODULE_TYPE_ID def get_desc(self): return "SUBMODULE" def get_ancestors(self): if self.ancestor_obj is not None: great_ancestors = self.ancestor_obj.get_ancestors() if great_ancestors is not None: return [self.ancestor_obj] + great_ancestors return [self.ancestor_obj] return [] def resolve_inherit(self, obj_tree, inherit_version): if self.ancestor_name is None: return if self.ancestor_name in obj_tree: self.ancestor_obj = obj_tree[self.ancestor_name][0] def require_inherit(self): return True def resolve_link(self, obj_tree): # Link subroutine/function implementations to prototypes if self.ancestor_obj is None: return # Grab ancestor interface definitions (function/subroutine only) ancestor_interfaces = [] for child in self.ancestor_obj.children: if child.get_type() == INTERFACE_TYPE_ID: for prototype in child.children: prototype_type = prototype.get_type() if ( prototype_type in (SUBROUTINE_TYPE_ID, FUNCTION_TYPE_ID, BASE_TYPE_ID) ) and prototype.is_mod_scope(): ancestor_interfaces.append(prototype) # Match interface definitions to implementations for prototype in ancestor_interfaces: for (i, child) in enumerate(self.children): if child.name.lower() == prototype.name.lower(): # Create correct object for interface if child.get_type() == BASE_TYPE_ID: child_old = child if prototype.get_type() == SUBROUTINE_TYPE_ID: child = Subroutine( child_old.file_ast, child_old.sline, child_old.name ) elif prototype.get_type() == FUNCTION_TYPE_ID: child = Function( child_old.file_ast, child_old.sline, child_old.name ) child.copy_from(child_old) # Replace in child and scope lists self.children[i] = child for (j, file_scope) in enumerate(child.file_ast.scope_list): if file_scope is child_old: child.file_ast.scope_list[j] = child if child.get_type() == prototype.get_type(): prototype.resolve_link(obj_tree) child.copy_interface(prototype) break def require_link(self): return True class Subroutine(Scope): def __init__( self, file_ast: FortranAST, line_number: int, name: str, args: str = "", mod_flag: bool = False, keywords: list = None, ): super().__init__(file_ast, line_number, name, keywords) self.args: str = args.replace(" ", "") self.args_snip: str = self.args self.arg_objs: list = [] self.in_children: list = [] self.missing_args: list = [] self.mod_scope: bool = mod_flag def is_mod_scope(self): return self.mod_scope def is_callable(self): return True def copy_interface(self, copy_source: Subroutine) -> list[str]: # Copy arguments self.args = copy_source.args self.args_snip = copy_source.args_snip self.arg_objs = copy_source.arg_objs # Get current fields child_names = [] for child in self.children: child_names.append(child.name.lower()) # Import arg_objs from copy object self.in_children = [] for child in copy_source.arg_objs: if child is None: continue if child.name.lower() not in child_names: self.in_children.append(child) return child_names def get_children(self, public_only=False): tmp_list = copy.copy(self.children) tmp_list.extend(self.in_children) return tmp_list def resolve_arg_link(self, obj_tree): if (self.args == "") or (len(self.in_children) > 0): return arg_list = self.args.replace(" ", "").split(",") arg_list_lower = self.args.lower().replace(" ", "").split(",") self.arg_objs = [None] * len(arg_list) # check_objs = copy.copy(self.children) # for child in self.children: # if child.is_external_int(): # check_objs += child.get_children() self.missing_args = [] for child in self.children: ind = -1 for (i, arg) in enumerate(arg_list_lower): if arg == child.name.lower(): ind = i break # If an argument is part of an interface block go through the # block's children i.e. functions and subroutines to see if one matches elif child.name.lower().startswith("#gen_int"): for sub_child in child.children: if arg == sub_child.name: self.arg_objs[i] = sub_child break if ind < 0: if child.keywords.count(KEYWORD_ID_DICT["intent"]) > 0: self.missing_args.append(child) else: self.arg_objs[ind] = child if child.is_optional(): arg_list[ind] = f"{arg_list[ind]}={arg_list[ind]}" self.args_snip = ",".join(arg_list) def resolve_link(self, obj_tree): self.resolve_arg_link(obj_tree) def require_link(self): return True def get_type(self, no_link=False): return SUBROUTINE_TYPE_ID def get_snippet(self, name_replace=None, drop_arg=-1): arg_list = self.args_snip.split(",") if (drop_arg >= 0) and (drop_arg < len(arg_list)): del arg_list[drop_arg] arg_snip = None if len(arg_list) > 0: arg_str, arg_snip = self.get_placeholders(arg_list) else: arg_str = "()" name = self.name if name_replace is not None: name = name_replace snippet = None if arg_snip is not None: snippet = name + arg_snip return name + arg_str, snippet def get_desc(self): return "SUBROUTINE" def get_hover(self, long=False, include_doc=True, drop_arg=-1): sub_sig, _ = self.get_snippet(drop_arg=drop_arg) keyword_list = get_keywords(self.keywords) keyword_list.append(f"{self.get_desc()} ") hover_array = [" ".join(keyword_list) + sub_sig] hover_array = self.get_docs_full(hover_array, long, include_doc, drop_arg) return "\n ".join(hover_array), long def get_docs_full( self, hover_array: list[str], long=False, include_doc=True, drop_arg=-1 ): doc_str = self.get_documentation() if include_doc and doc_str is not None: hover_array[0] += "\n" + doc_str if long: for i, arg_obj in enumerate(self.arg_objs): if arg_obj is None or i == drop_arg: continue arg_doc, _ = arg_obj.get_hover(include_doc=False) hover_array.append(f"{arg_doc} :: {arg_obj.name}") doc_str = arg_obj.get_documentation() if include_doc and (doc_str is not None): hover_array += doc_str.splitlines() return hover_array def get_signature(self, drop_arg=-1): arg_sigs = [] arg_list = self.args.split(",") for (i, arg_obj) in enumerate(self.arg_objs): if i == drop_arg: continue if arg_obj is None: arg_sigs.append({"label": arg_list[i]}) else: if arg_obj.is_optional(): label = f"{arg_obj.name.lower()}={arg_obj.name.lower()}" else: label = arg_obj.name.lower() arg_sigs.append( {"label": label, "documentation": arg_obj.get_hover()[0]} ) call_sig, _ = self.get_snippet() return call_sig, self.get_documentation(), arg_sigs def get_interface_array( self, keywords: list[str], signature: str, change_arg=-1, change_strings=None ): interface_array = [" ".join(keywords) + signature] for i, arg_obj in enumerate(self.arg_objs): if arg_obj is None: return None arg_doc, _ = arg_obj.get_hover(include_doc=False) if i == change_arg: i0 = arg_doc.lower().find(change_strings[0].lower()) if i0 >= 0: i1 = i0 + len(change_strings[0]) arg_doc = arg_doc[:i0] + change_strings[1] + arg_doc[i1:] interface_array.append(f"{arg_doc} :: {arg_obj.name}") return interface_array def get_interface(self, name_replace=None, change_arg=-1, change_strings=None): sub_sig, _ = self.get_snippet(name_replace=name_replace) keyword_list = get_keywords(self.keywords) keyword_list.append("SUBROUTINE ") interface_array = self.get_interface_array( keyword_list, sub_sig, change_arg, change_strings ) name = self.name if name_replace is not None: name = name_replace interface_array.append(f"END SUBROUTINE {name}") return "\n".join(interface_array) def check_valid_parent(self): if self.parent is not None: parent_type = self.parent.get_type() if (parent_type == CLASS_TYPE_ID) or (parent_type >= BLOCK_TYPE_ID): return False return True def get_diagnostics(self): errors = [] for missing_obj in self.missing_args: new_diag = Diagnostic( missing_obj.sline - 1, f'Variable "{missing_obj.name}" with INTENT keyword not found in' " argument list", severity=1, find_word=missing_obj.name, ) errors.append(new_diag) implicit_flag = self.get_implicit() if (implicit_flag is None) or implicit_flag: return errors arg_list = self.args.replace(" ", "").split(",") for (i, arg_obj) in enumerate(self.arg_objs): if arg_obj is None: arg_name = arg_list[i].strip() new_diag = Diagnostic( self.sline - 1, f'No matching declaration found for argument "{arg_name}"', severity=1, find_word=arg_name, ) errors.append(new_diag) return errors class Function(Subroutine): def __init__( self, file_ast: FortranAST, line_number: int, name: str, args: str = "", mod_flag: bool = False, keywords: list = None, result_type: str = None, result_name: str = None, ): super().__init__(file_ast, line_number, name, args, mod_flag, keywords) self.args: str = args.replace(" ", "").lower() self.args_snip: str = self.args self.arg_objs: list = [] self.in_children: list = [] self.missing_args: list = [] self.mod_scope: bool = mod_flag self.result_name: str = result_name self.result_type: str = result_type self.result_obj: Variable = None # Set the implicit result() name to be the function name if self.result_name is None: self.result_name = self.name def copy_interface(self, copy_source: Function): # Call the parent class method child_names = super().copy_interface(copy_source) # Return specific options self.result_name = copy_source.result_name self.result_type = copy_source.result_type self.result_obj = copy_source.result_obj if copy_source.result_obj is not None: if copy_source.result_obj.name.lower() not in child_names: self.in_children.append(copy_source.result_obj) def resolve_link(self, obj_tree): self.resolve_arg_link(obj_tree) result_var_lower = self.result_name.lower() for child in self.children: if child.name.lower() == result_var_lower: self.result_obj = child # Update result value and type self.result_name = child.name self.result_type = child.get_desc() def get_type(self, no_link=False): return FUNCTION_TYPE_ID def get_desc(self): if self.result_type: return self.result_type + " FUNCTION" return "FUNCTION" def is_callable(self): return False def get_hover( self, long: bool = False, include_doc: bool = True, drop_arg: int = -1 ) -> tuple[str, bool]: """Construct the hover message for a FUNCTION. Two forms are produced here the `long` i.e. the normal for hover requests [MODIFIERS] FUNCTION NAME([ARGS]) RESULT(RESULT_VAR) TYPE, [ARG_MODIFIERS] :: [ARGS] TYPE, [RESULT_MODIFIERS] :: RESULT_VAR note: intrinsic functions will display slightly different, `RESULT_VAR` and its `TYPE` might not always be present short form, used when functions are arguments in functions and subroutines: FUNCTION NAME([ARGS]) :: ARG_LIST_NAME Parameters ---------- long : bool, optional toggle between long and short hover results, by default False include_doc : bool, optional if to include any documentation, by default True drop_arg : int, optional Ignore argument at position `drop_arg` in the argument list, by default -1 Returns ------- tuple[str, bool] String representative of the hover message and the `long` flag used """ fun_sig, _ = self.get_snippet(drop_arg=drop_arg) # short hover messages do not include the result() fun_sig += f" RESULT({self.result_name})" if long else "" keyword_list = get_keywords(self.keywords) keyword_list.append("FUNCTION") hover_array = [f"{' '.join(keyword_list)} {fun_sig}"] hover_array = self.get_docs_full(hover_array, long, include_doc, drop_arg) # Only append the return value if using long form if self.result_obj and long: arg_doc, _ = self.result_obj.get_hover(include_doc=False) hover_array.append(f"{arg_doc} :: {self.result_obj.name}") # intrinsic functions, where the return type is missing but can be inferred elif self.result_type and long: # prepend type to function signature hover_array[0] = f"{self.result_type} {hover_array[0]}" return "\n ".join(hover_array), long def get_interface(self, name_replace=None, change_arg=-1, change_strings=None): fun_sig, _ = self.get_snippet(name_replace=name_replace) fun_sig += f" RESULT({self.result_name})" # XXX: keyword_list = [] if self.result_type: keyword_list.append(self.result_type) keyword_list += get_keywords(self.keywords) keyword_list.append("FUNCTION ") interface_array = self.get_interface_array( keyword_list, fun_sig, change_arg, change_strings ) if self.result_obj is not None: arg_doc, _ = self.result_obj.get_hover(include_doc=False) interface_array.append(f"{arg_doc} :: {self.result_obj.name}") name = self.name if name_replace is not None: name = name_replace interface_array.append(f"END FUNCTION {name}") return "\n".join(interface_array) class Type(Scope): def __init__( self, file_ast: FortranAST, line_number: int, name: str, keywords: list ): super().__init__(file_ast, line_number, name, keywords) # self.in_children: list = [] self.inherit = None self.inherit_var = None self.inherit_tmp = None self.inherit_version = -1 if keywords.count(KEYWORD_ID_DICT["abstract"]) > 0: self.abstract = True else: self.abstract = False if self.keywords.count(KEYWORD_ID_DICT["public"]) > 0: self.vis = 1 if self.keywords.count(KEYWORD_ID_DICT["private"]) > 0: self.vis = -1 def get_type(self, no_link=False): return CLASS_TYPE_ID def get_desc(self): return "TYPE" def get_children(self, public_only=False): tmp_list = copy.copy(self.children) tmp_list.extend(self.in_children) return tmp_list def resolve_inherit(self, obj_tree, inherit_version): if (self.inherit is None) or (self.inherit_version == inherit_version): return self.inherit_version = inherit_version self.inherit_var = find_in_scope(self.parent, self.inherit, obj_tree) if self.inherit_var is not None: # Resolve parent inheritance while avoiding circular recursion self.inherit_tmp = self.inherit self.inherit = None self.inherit_var.resolve_inherit(obj_tree, inherit_version) self.inherit = self.inherit_tmp self.inherit_tmp = None # Get current fields child_names = [] for child in self.children: child_names.append(child.name.lower()) # Import for parent objects self.in_children = [] for child in self.inherit_var.get_children(): if child.name.lower() not in child_names: self.in_children.append(child) def require_inherit(self): return True def get_overridden(self, field_name): ret_list = [] field_name = field_name.lower() for child in self.children: if field_name == child.name.lower(): ret_list.append(child) break if self.inherit_var is not None: ret_list += self.inherit_var.get_overridden(field_name) return ret_list def check_valid_parent(self): if self.parent is None: return False else: parent_type = self.parent.get_type() if (parent_type == CLASS_TYPE_ID) or (parent_type >= BLOCK_TYPE_ID): return False return True def get_diagnostics(self): errors = [] for in_child in self.in_children: if (not self.abstract) and ( in_child.keywords.count(KEYWORD_ID_DICT["deferred"]) > 0 ): new_diag = Diagnostic( self.eline - 1, f'Deferred procedure "{in_child.name}" not implemented', severity=1, ) new_diag.add_related( path=in_child.file_ast.path, line=in_child.sline - 1, message="Inherited procedure declaration", ) errors.append(new_diag) return errors def get_actions(self, sline, eline): actions = [] edits = [] line_number = self.eline - 1 if (line_number < sline) or (line_number > eline): return actions if self.contains_start is None: edits.append( { **range_json(line_number, 0, line_number, 0), "newText": "CONTAINS\n", } ) # diagnostics = [] has_edits = False file_uri = path_to_uri(self.file_ast.path) for in_child in self.in_children: if in_child.keywords.count(KEYWORD_ID_DICT["deferred"]) > 0: # Get interface interface_string = in_child.get_interface( name_replace=in_child.name, change_strings=( f"class({in_child.parent.name})", f"CLASS({self.name})", ), ) if interface_string is None: continue interface_path, interface_edits = self.parent.add_subroutine( interface_string, no_contains=has_edits ) if interface_path != self.file_ast.path: continue edits.append( { **range_json(line_number, 0, line_number, 0), "newText": " PROCEDURE :: {0} => {0}\n".format(in_child.name), } ) edits += interface_edits new_diag = Diagnostic( line_number, f'Deferred procedure "{in_child.name}" not implemented', severity=1, ) new_diag.add_related( path=in_child.file_ast.path, line=in_child.sline - 1, message="Inherited procedure declaration", ) diagnostics.append(new_diag) has_edits = True # if has_edits: actions = [ { "title": "Implement deferred procedures", "kind": "quickfix", "edit": {"changes": {file_uri: edits}}, "diagnostics": diagnostics, } ] return actions class Block(Scope): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) def get_type(self, no_link=False): return BLOCK_TYPE_ID def get_desc(self): return "BLOCK" def get_children(self, public_only=False): return copy.copy(self.children) def req_named_end(self): return True class Do(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) def get_type(self, no_link=False): return DO_TYPE_ID def get_desc(self): return "DO" class Where(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) def get_type(self, no_link=False): return WHERE_TYPE_ID def get_desc(self): return "WHERE" class If(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) def get_type(self, no_link=False): return IF_TYPE_ID def get_desc(self): return "IF" class Associate(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) self.links: list[AssociateMap] = [] # holds the info to associate variables def get_type(self, no_link=False): return ASSOC_TYPE_ID def get_desc(self): return "ASSOCIATE" def create_binding_variable( self, file_ast: FortranAST, line_number: int, bind_name: str, link_name: str ) -> Variable: """Create a new variable to be linked upon resolution to the real variable that contains the information of the mapping from the parent scope to the ASSOCIATE block scope. Parameters ---------- file_ast : fortran_ast AST file line_number : int Line number bind_name : str Name of the ASSOCIATE block variable link_name : str Name of the parent scope variable Returns ------- fortran_var Variable object holding the ASSOCIATE block variable, pending resolution """ new_var = Variable(file_ast, line_number, bind_name, "UNKNOWN", []) self.links.append(AssociateMap(new_var, bind_name, link_name)) return new_var def resolve_link(self, obj_tree): # Loop through the list of the associated variables map and resolve the links # find the AST node that that corresponds to the variable with link_name for assoc in self.links: # TODO: extract the dimensions component from the link_name # re.sub(r'\(.*\)', '', link_name) removes the dimensions component # keywords = re.match(r'(.*)\((.*)\)', link_name).groups() # now pass the keywords through the dimension_parser and set the keywords # in the associate object. Hover should now pick the local keywords # over the linked_object keywords assoc.link_name = re.sub(r"\(.*\)", "", assoc.link_name) var_stack = get_var_stack(assoc.link_name) is_member = len(var_stack) > 1 if is_member: type_scope = climb_type_tree(var_stack, self, obj_tree) if type_scope is None: continue var_obj = find_in_scope(type_scope, var_stack[-1], obj_tree) if var_obj is not None: assoc.var.link_obj = var_obj else: var_obj = find_in_scope(self, assoc.link_name, obj_tree) if var_obj is not None: assoc.var.link_obj = var_obj def require_link(self): return True class Enum(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str): super().__init__(file_ast, line_number, name) def get_type(self, no_link=False): return ENUM_TYPE_ID def get_desc(self): return "ENUM" class Select(Block): def __init__(self, file_ast: FortranAST, line_number: int, name: str, select_info): super().__init__(file_ast, line_number, name) self.select_type = select_info.type self.binding_name = None self.bound_var = None self.binding_type = None if self.select_type == 2: binding_split = select_info.binding.split("=>") if len(binding_split) == 1: self.bound_var = binding_split[0].strip() elif len(binding_split) == 2: self.binding_name = binding_split[0].strip() self.bound_var = binding_split[1].strip() elif self.select_type == 3: self.binding_type = select_info.binding # Close previous "TYPE IS" region if open if ( (file_ast.current_scope is not None) and (file_ast.current_scope.get_type() == SELECT_TYPE_ID) and file_ast.current_scope.is_type_region() ): file_ast.end_scope(line_number) def get_type(self, no_link=False): return SELECT_TYPE_ID def get_desc(self): return "SELECT" def is_type_binding(self): return self.select_type == 2 def is_type_region(self): return (self.select_type == 3) or (self.select_type == 4) def create_binding_variable(self, file_ast, line_number, var_desc, case_type): if self.parent.get_type() != SELECT_TYPE_ID: return None binding_name = None bound_var = None if (self.parent is not None) and self.parent.is_type_binding(): binding_name = self.parent.binding_name bound_var = self.parent.bound_var # Check for default case if (binding_name is not None) and (case_type != 4): bound_var = None # Create variable if binding_name is not None: return Variable( file_ast, line_number, binding_name, var_desc, [], link_obj=bound_var ) elif (binding_name is None) and (bound_var is not None): return Variable(file_ast, line_number, bound_var, var_desc, []) return None class Interface(Scope): def __init__( self, file_ast: FortranAST, line_number: int, name: str, abstract: bool = False, ): super().__init__(file_ast, line_number, name) self.mems = [] self.abstract = abstract self.external = name.startswith("#GEN_INT") and (not abstract) def get_type(self, no_link=False): return INTERFACE_TYPE_ID def get_desc(self): return "INTERFACE" def is_callable(self): return True def is_external_int(self): return self.external def is_abstract(self): return self.abstract def resolve_link(self, obj_tree): if self.parent is None: return self.mems = [] for member in self.members: mem_obj = find_in_scope(self.parent, member, obj_tree) if mem_obj is not None: self.mems.append(mem_obj) def require_link(self): return True class Variable(FortranObj): def __init__( self, file_ast: FortranAST, line_number: int, name: str, var_desc: str, keywords: list, keyword_info: dict = None, link_obj=None, ): super().__init__() if keyword_info is None: keyword_info = {} self.file_ast: FortranAST = file_ast self.sline: int = line_number self.eline: int = line_number self.name: str = name self.desc: str = var_desc self.keywords: list = keywords self.keyword_info: dict = keyword_info self.callable: bool = FRegex.CLASS_VAR.match(var_desc) is not None self.children: list = [] self.use: list[USE_line] = [] self.link_obj = None self.type_obj = None self.is_const: bool = False self.is_external: bool = False self.param_val: str = None self.link_name: str = None self.FQSN: str = self.name.lower() if link_obj is not None: self.link_name = link_obj.lower() if file_ast.enc_scope_name is not None: self.FQSN = file_ast.enc_scope_name.lower() + "::" + self.name.lower() if self.keywords.count(KEYWORD_ID_DICT["public"]) > 0: self.vis = 1 if self.keywords.count(KEYWORD_ID_DICT["private"]) > 0: self.vis = -1 if self.keywords.count(KEYWORD_ID_DICT["parameter"]) > 0: self.is_const = True if ( self.keywords.count(KEYWORD_ID_DICT["external"]) > 0 or self.desc.lower() == "external" ): self.is_external = True def update_fqsn(self, enc_scope=None): if enc_scope is not None: self.FQSN = enc_scope.lower() + "::" + self.name.lower() else: self.FQSN = self.name.lower() for child in self.children: child.update_fqsn(self.FQSN) def resolve_link(self, obj_tree): self.link_obj = None if self.link_name is None: return if self.parent is not None: link_obj = find_in_scope(self.parent, self.link_name, obj_tree) if link_obj is not None: self.link_obj = link_obj def require_link(self): return self.link_name is not None def get_type(self, no_link=False): if (not no_link) and (self.link_obj is not None): return self.link_obj.get_type() # Normal variable return VAR_TYPE_ID def get_desc(self): if self.link_obj is not None: return self.link_obj.get_desc() # Normal variable return self.desc def get_type_obj(self, obj_tree): if self.link_obj is not None: return self.link_obj.get_type_obj(obj_tree) if (self.type_obj is None) and (self.parent is not None): type_name = get_paren_substring(self.desc) if type_name is not None: search_scope = self.parent if search_scope.get_type() == CLASS_TYPE_ID: search_scope = search_scope.parent if search_scope is not None: type_name = type_name.strip().lower() type_obj = find_in_scope(search_scope, type_name, obj_tree) if type_obj is not None: self.type_obj = type_obj return self.type_obj # XXX: unused delete or use for associate blocks def set_dim(self, dim_str): if KEYWORD_ID_DICT["dimension"] not in self.keywords: self.keywords.append(KEYWORD_ID_DICT["dimension"]) self.keyword_info["dimension"] = dim_str self.keywords.sort() def get_snippet(self, name_replace=None, drop_arg=-1): name = self.name if name_replace is not None: name = name_replace if self.link_obj is not None: return self.link_obj.get_snippet(name, drop_arg) # Normal variable return None, None def get_hover(self, long=False, include_doc=True, drop_arg=-1): doc_str = self.get_documentation() # In associated blocks we need to fetch the desc and keywords of the # linked object hover_str = ", ".join([self.get_desc()] + self.get_keywords()) # TODO: at this stage we can mae this lowercase # Add parameter value in the output if self.is_parameter() and self.param_val: hover_str += f" :: {self.name} = {self.param_val}" if include_doc and (doc_str is not None): hover_str += "\n {}".format("\n ".join(doc_str.splitlines())) return hover_str, True def get_keywords(self): # TODO: if local keywords are set they should take precedence over link_obj # Alternatively, I could do a dictionary merge with local variables # having precedence by default and use a flag to override? if self.link_obj is not None: return get_keywords(self.link_obj.keywords, self.link_obj.keyword_info) return get_keywords(self.keywords, self.keyword_info) def is_optional(self): if self.keywords.count(KEYWORD_ID_DICT["optional"]) > 0: return True else: return False def is_callable(self): return self.callable def is_parameter(self): return self.is_const def set_parameter_val(self, val: str): self.param_val = val def set_external_attr(self): self.keywords.append(KEYWORD_ID_DICT["external"]) self.is_external = True def check_definition(self, obj_tree, known_types={}, interface=False): # Check for type definition in scope type_match = FRegex.DEF_KIND.match(self.desc) if type_match is not None: var_type = type_match.group(1).strip().lower() if var_type == "procedure": return None, known_types desc_obj_name = type_match.group(2).strip().lower() if desc_obj_name not in known_types: type_def = find_in_scope( self.parent, desc_obj_name, obj_tree, interface=interface ) if type_def is None: type_defs = find_in_workspace( obj_tree, desc_obj_name, filter_public=True, exact_match=True ) known_types[desc_obj_name] = None var_type = type_match.group(1).strip().lower() filter_id = VAR_TYPE_ID if (var_type == "class") or (var_type == "type"): filter_id = CLASS_TYPE_ID for type_def in type_defs: if type_def.get_type() == filter_id: known_types[desc_obj_name] = (1, type_def) break else: known_types[desc_obj_name] = (0, type_def) type_info = known_types[desc_obj_name] if type_info is not None: if type_info[0] == 1: if interface: out_diag = Diagnostic( self.sline - 1, message=( f'Object "{desc_obj_name}" not imported in interface' ), severity=1, find_word=desc_obj_name, ) else: out_diag = Diagnostic( self.sline - 1, message=f'Object "{desc_obj_name}" not found in scope', severity=1, find_word=desc_obj_name, ) type_def = type_info[1] out_diag.add_related( path=type_def.file_ast.path, line=type_def.sline - 1, message="Possible object", ) return out_diag, known_types return None, known_types class Method(Variable): # i.e. TypeBound procedure def __init__( self, file_ast: FortranAST, line_number: int, name: str, var_desc: str, keywords: list, keyword_info: dict, link_obj=None, ): super().__init__( file_ast, line_number, name, var_desc, keywords, keyword_info, link_obj ) self.drop_arg: int = -1 self.pass_name: str = keyword_info.get("pass") if link_obj is None: self.link_name = get_paren_substring(var_desc.lower()) def set_parent(self, parent_obj): self.parent = parent_obj if self.parent.get_type() == CLASS_TYPE_ID: if self.keywords.count(KEYWORD_ID_DICT["nopass"]) == 0: self.drop_arg = 0 if ( (self.parent.contains_start is not None) and (self.sline > self.parent.contains_start) and (self.link_name is None) ): self.link_name = self.name.lower() def get_snippet(self, name_replace=None, drop_arg=-1): if self.link_obj is not None: if name_replace is None: name = self.name else: name = name_replace return self.link_obj.get_snippet(name, self.drop_arg) return None, None def get_type(self, no_link=False): if (not no_link) and (self.link_obj is not None): return self.link_obj.get_type() # Generic return METH_TYPE_ID def get_documentation(self): if (self.link_obj is not None) and (self.doc_str is None): return self.link_obj.get_documentation() return self.doc_str def get_hover(self, long=False, include_doc=True, drop_arg=-1): doc_str = self.get_documentation() if long: if self.link_obj is None: sub_sig, _ = self.get_snippet() hover_str = f"{self.get_desc()} {sub_sig}" if include_doc and (doc_str is not None): hover_str += f"\n{doc_str}" else: link_hover, _ = self.link_obj.get_hover( long=True, include_doc=include_doc, drop_arg=self.drop_arg ) hover_split = link_hover.splitlines() call_sig = hover_split[0] paren_start = call_sig.rfind("(") link_name_len = len(self.link_obj.name) call_sig = ( call_sig[: paren_start - link_name_len] + self.name + call_sig[paren_start:] ) hover_split = hover_split[1:] if include_doc and (self.doc_str is not None): # Replace linked docs with current object's docs if (len(hover_split) > 0) and (hover_split[0].count("!!") > 0): for (i, hover_line) in enumerate(hover_split): if hover_line.count("!!") == 0: hover_split = hover_split[i:] break else: # All lines are docs hover_split = [] hover_split = [self.doc_str] + hover_split hover_str = "\n".join([call_sig] + hover_split) return hover_str, True else: hover_str = ", ".join([self.desc] + get_keywords(self.keywords)) if include_doc and (doc_str is not None): hover_str += f"\n{doc_str}" return hover_str, True def get_signature(self, drop_arg=-1): if self.link_obj is not None: call_sig, _ = self.get_snippet() _, _, arg_sigs = self.link_obj.get_signature(self.drop_arg) return call_sig, self.get_documentation(), arg_sigs return None, None, None def get_interface(self, name_replace=None, change_arg=-1, change_strings=None): if self.link_obj is not None: return self.link_obj.get_interface( name_replace, self.drop_arg, change_strings ) return None def resolve_link(self, obj_tree): if self.link_name is None: return if self.parent is not None: if self.parent.get_type() == CLASS_TYPE_ID: link_obj = find_in_scope(self.parent.parent, self.link_name, obj_tree) else: link_obj = find_in_scope(self.parent, self.link_name, obj_tree) if link_obj is not None: self.link_obj = link_obj if self.pass_name is not None: self.pass_name = self.pass_name.lower() for i, arg in enumerate(link_obj.args_snip.split(",")): if arg.lower() == self.pass_name: self.drop_arg = i break def is_callable(self): return True def check_definition(self, obj_tree, known_types={}, interface=False): return None, known_types class FortranAST: def __init__(self, file_obj=None): self.file = file_obj self.path: str = None if file_obj is not None: self.path = file_obj.path self.global_dict: dict = {} self.scope_list: list = [] self.variable_list: list = [] self.public_list: list = [] self.private_list: list = [] self.scope_stack: list = [] self.end_stack: list = [] self.pp_if: list = [] self.include_statements: list = [] self.end_errors: list = [] self.parse_errors: list = [] self.inherit_objs: list = [] self.linkable_objs: list = [] self.external_objs: list = [] self.none_scope = None self.inc_scope = None self.current_scope = None self.END_SCOPE_REGEX: Pattern = None self.enc_scope_name: str = None self.last_obj = None self.pending_doc: str = None def create_none_scope(self): """Create empty scope to hold non-module contained items""" if self.none_scope is not None: raise ValueError self.none_scope = Program(self, 1, "main") self.add_scope( self.none_scope, re.compile(r"[ ]*END[ ]*PROGRAM", re.I), exportable=False ) def get_enc_scope_name(self): """Get current enclosing scope name""" if self.current_scope is None: return None return self.current_scope.FQSN def add_scope( self, new_scope: Scope, END_SCOPE_REGEX: Pattern[str], exportable: bool = True, req_container: bool = False, ): self.scope_list.append(new_scope) if new_scope.require_inherit(): self.inherit_objs.append(new_scope) if new_scope.require_link(): self.linkable_objs.append(new_scope) if self.current_scope is None: if req_container: self.create_none_scope() new_scope.FQSN = self.none_scope.FQSN + "::" + new_scope.name.lower() self.current_scope.add_child(new_scope) self.scope_stack.append(self.current_scope) else: if exportable: self.global_dict[new_scope.FQSN] = new_scope else: self.current_scope.add_child(new_scope) self.scope_stack.append(self.current_scope) if self.END_SCOPE_REGEX is not None: self.end_stack.append(self.END_SCOPE_REGEX) self.current_scope = new_scope self.END_SCOPE_REGEX = END_SCOPE_REGEX self.enc_scope_name = self.get_enc_scope_name() self.last_obj = new_scope if self.pending_doc is not None: self.last_obj.add_doc(self.pending_doc) self.pending_doc = None def end_scope(self, line_number: int, check: bool = True): if ( (self.current_scope is None) or (self.current_scope is self.none_scope) ) and check: self.end_errors.append([-1, line_number]) return self.current_scope.end(line_number) if len(self.scope_stack) > 0: self.current_scope = self.scope_stack.pop() else: self.current_scope = None if len(self.end_stack) > 0: self.END_SCOPE_REGEX = self.end_stack.pop() else: self.END_SCOPE_REGEX = None self.enc_scope_name = self.get_enc_scope_name() def add_variable(self, new_var: Variable): if self.current_scope is None: self.create_none_scope() new_var.FQSN = self.none_scope.FQSN + "::" + new_var.name.lower() self.current_scope.add_child(new_var) self.variable_list.append(new_var) if new_var.is_external: self.external_objs.append(new_var) if new_var.require_link(): self.linkable_objs.append(new_var) self.last_obj = new_var if self.pending_doc is not None: self.last_obj.add_doc(self.pending_doc) self.pending_doc = None def add_int_member(self, key): self.current_scope.add_member(key) def add_private(self, name: str): self.private_list.append(self.enc_scope_name + "::" + name) def add_public(self, name: str): self.public_list.append(self.enc_scope_name + "::" + name) def add_use( self, mod_word: str, line_number: int, only_list: list = [], rename_map: dict = {}, ): if self.current_scope is None: self.create_none_scope() self.current_scope.add_use(mod_word, line_number, only_list, rename_map) def add_include(self, path: str, line_number: int): self.include_statements.append(IncludeInfo(line_number, path, None, [])) def add_doc(self, doc_string: str, forward: bool = False): if doc_string == "": return if forward: self.pending_doc = doc_string else: if self.last_obj is not None: self.last_obj.add_doc(doc_string) def add_error(self, msg: str, sev: int, ln: int, sch: int, ech: int = None): """Add a Diagnostic error, encountered during parsing, for a range in the document. Parameters ---------- msg : str Error message sev : int Severity, Error, Warning, Notification ln : int Line number sch : int Start character ech : int End character """ # Convert from Editor line numbers 1-base index to LSP index which is 0-based self.parse_errors.append(diagnostic_json(ln - 1, sch, ln - 1, ech, msg, sev)) def start_ppif(self, line_number: int): self.pp_if.append([line_number - 1, -1]) def end_ppif(self, line_number): if len(self.pp_if) > 0: self.pp_if[-1][1] = line_number - 1 def get_scopes(self, line_number: int = None): if line_number is None: return self.scope_list scope_list = [] for scope in self.scope_list: if (line_number >= scope.sline) and (line_number <= scope.eline): scope_list.append(scope) for ancestor in scope.get_ancestors(): scope_list.append(ancestor) if (len(scope_list) == 0) and (self.none_scope is not None): return [self.none_scope] return scope_list def get_inner_scope(self, line_number: int): scope_sline = -1 curr_scope = None for scope in self.scope_list: if scope.sline > scope_sline: if (line_number >= scope.sline) and (line_number <= scope.eline): curr_scope = scope scope_sline = scope.sline if (curr_scope is None) and (self.none_scope is not None): return self.none_scope return curr_scope def get_object(self, FQSN: str): FQSN_split = FQSN.split("::") curr_obj = self.global_dict.get(FQSN_split[0]) if curr_obj is None: # Look for non-exportable scopes for scope in self.scope_list: if FQSN_split[0] == scope.FQSN: curr_obj = scope break if curr_obj is None: return None if len(FQSN_split) > 1: for name in FQSN_split[1:]: next_obj = None for child in curr_obj.children: if child.name.startswith("#GEN_INT"): for int_child in child.get_children(): if int_child.name == name: next_obj = int_child break if next_obj is not None: break if child.name == name: next_obj = child break if next_obj is None: return None curr_obj = next_obj return curr_obj def resolve_includes(self, workspace, path: str = None): file_dir = os.path.dirname(self.path) for inc in self.include_statements: file_path = os.path.normpath(os.path.join(file_dir, inc.path)) if path and not (path == file_path): continue parent_scope = self.get_inner_scope(inc.line_number) added_entities = inc.scope_objs if file_path in workspace: include_file = workspace[file_path] include_ast = include_file.ast inc.file = include_file if include_ast.none_scope: if include_ast.inc_scope is None: include_ast.inc_scope = include_ast.none_scope # Remove old objects for obj in added_entities: parent_scope.children.remove(obj) added_entities = [] for child in include_ast.inc_scope.children: added_entities.append(child) parent_scope.add_child(child) child.update_fqsn(parent_scope.FQSN) include_ast.none_scope = parent_scope inc.scope_objs = added_entities def resolve_links(self, obj_tree, link_version): for inherit_obj in self.inherit_objs: inherit_obj.resolve_inherit(obj_tree, inherit_version=link_version) for linkable_obj in self.linkable_objs: linkable_obj.resolve_link(obj_tree) def close_file(self, line_number: int): # Close open scopes while self.current_scope is not None: self.end_scope(line_number, check=False) # Close and delist none_scope if self.none_scope is not None: self.none_scope.end(line_number) self.scope_list.remove(self.none_scope) # Tasks to be done when file parsing is finished for private_name in self.private_list: obj = self.get_object(private_name) if obj is not None: obj.set_visibility(-1) for public_name in self.public_list: obj = self.get_object(public_name) if obj is not None: obj.set_visibility(1) def check_file(self, obj_tree): errors = [] tmp_list = self.scope_list[:] # shallow copy if self.none_scope is not None: tmp_list += [self.none_scope] for error in self.end_errors: if error[0] >= 0: message = f"Unexpected end of scope at line {error[0]}" else: message = "Unexpected end statement: No open scopes" errors.append(Diagnostic(error[1] - 1, message=message, severity=1)) for scope in tmp_list: if not scope.check_valid_parent(): errors.append( Diagnostic( scope.sline - 1, message=f'Invalid parent for "{scope.get_desc()}" declaration', severity=1, ) ) errors += scope.check_use(obj_tree) errors += scope.check_definitions(obj_tree) errors += scope.get_diagnostics() return errors, self.parse_errors fortran-language-server-2.13.0+dfsg.1/fortls/parse_fortran.py000066400000000000000000002351421450400537300241530ustar00rootroot00000000000000from __future__ import annotations import hashlib import logging import os import re import sys from collections import Counter, deque # Python < 3.8 does not have typing.Literals try: from typing import Literal except ImportError: from typing_extensions import Literal from re import Pattern from fortls.constants import ( DO_TYPE_ID, INTERFACE_TYPE_ID, MODULE_TYPE_ID, SELECT_TYPE_ID, SUBMODULE_TYPE_ID, FRegex, Severity, log, ) from fortls.ftypes import ( ClassInfo, FunSig, GenProcDefInfo, InterInfo, Range, ResultSig, SelectInfo, SmodInfo, SubInfo, UseInfo, VarInfo, VisInfo, ) from fortls.helper_functions import ( detect_fixed_format, find_paren_match, find_word_in_line, get_paren_level, get_paren_substring, map_keywords, separate_def_list, strip_line_label, strip_strings, ) from fortls.objects import ( Associate, Block, Do, Enum, FortranAST, Function, If, Interface, Method, Module, Program, Scope, Select, Submodule, Subroutine, Type, Variable, Where, ) def get_line_context(line: str) -> tuple[str, None] | tuple[str, str]: """Get context of ending position in line (for completion) Parameters ---------- line : str file line Returns ------- tuple[str, None] Possible string values: `var_key`, `pro_line`, `var_only`, `mod_mems`, `mod_only`, `pro_link`, `skip`, `import`, `vis`, `call`, `type_only`, `int_only`, `first`, `default` """ last_level, sections = get_paren_level(line) lev1_end = sections[-1].end # Test if variable definition statement test_match = read_var_def(line) if test_match is not None: if test_match[0] == "var": if (test_match[1].var_names is None) and (lev1_end == len(line)): return "var_key", None # Procedure link? if (test_match[1].var_type == "PROCEDURE") and (line.find("=>") > 0): return "pro_link", None return "var_only", None # Test if in USE statement test_match = read_use_stmt(line) if test_match is not None: if len(test_match[1].only_list) > 0: return "mod_mems", test_match[1].mod_name else: return "mod_only", None # Test for interface procedure link if FRegex.PRO_LINK.match(line): return "pro_link", None # Test if scope declaration or end statement (no completion provided) if FRegex.SCOPE_DEF.match(line) or FRegex.END.match(line): return "skip", None # Test if import statement if FRegex.IMPORT.match(line): return "import", None # Test if visibility statement if FRegex.VIS.match(line): return "vis", None # In type-def type_def = False if FRegex.TYPE_DEF.match(line): type_def = True # Test if in call statement if (lev1_end == len(line)) and FRegex.CALL.match(last_level): return "call", None # Test if variable definition using type/class or procedure if (len(sections) == 1) and (sections[0].start >= 1): # Get string one level up test_str, _ = get_paren_level(line[: sections[0].start - 1]) if FRegex.TYPE_STMNT.match(test_str) or ( type_def and FRegex.EXTENDS.search(test_str) ): return "type_only", None if FRegex.PROCEDURE_STMNT.match(test_str): return "int_only", None # Only thing on line? if FRegex.INT_STMNT.match(line): return "first", None # Default or skip context if type_def: return "skip", None else: return "default", None def parse_var_keywords(test_str: str) -> tuple[list[str], str]: """Parse Fortran variable declaration keywords""" # Needs to be this way and not simply call finditer because no regex can # capture nested parenthesis keyword_match = FRegex.KEYWORD_LIST.match(test_str) keywords = [] while keyword_match: tmp_str = re.sub(r"^[, ]*", "", keyword_match.group(0)) test_str = test_str[keyword_match.end(0) :] if tmp_str.lower().startswith("dimension"): match_char = find_paren_match(test_str) if match_char < 0: break # Incomplete dimension statement else: tmp_str += test_str[: match_char + 1] test_str = test_str[match_char + 1 :] tmp_str = re.sub(r"^[, ]*", "", tmp_str) keywords.append(tmp_str.strip().upper()) keyword_match = FRegex.KEYWORD_LIST.match(test_str) return keywords, test_str def read_var_def(line: str, var_type: str = None, fun_only: bool = False): """Attempt to read variable definition line""" def parse_kind(line: str): match = FRegex.KIND_SPEC.match(line) if not match: return None, line kind_str = match.group(1).replace(" ", "") line = line[match.end(0) :] if kind_str.find("(") >= 0: match_char = find_paren_match(line) if match_char < 0: # this triggers while typing with autocomplete raise ValueError("Incomplete kind specification") kind_str += line[: match_char + 1].strip() line = line[match_char + 1 :] return kind_str, line if var_type is None: type_match = FRegex.VAR.match(line) if type_match is None: return None var_type = type_match.group(0).strip() trailing_line = line[type_match.end(0) :] else: trailing_line = line[len(var_type) :] var_type = var_type.upper() trailing_line = trailing_line.split("!")[0] if len(trailing_line) == 0: return None # Parse the global kind, if any, for the current line definition # The global kind in some cases, like characters can be overriden by a locally # defined kind try: kind_str, trailing_line = parse_kind(trailing_line) var_type += kind_str # XXX: see below except ValueError: return None except TypeError: # XXX: remove with explicit kind specification in VarInfo pass # Class and Type statements need a kind spec if not kind_str and var_type in ("TYPE", "CLASS"): return None # Make sure next character is space or comma or colon if not kind_str and not trailing_line[0] in (" ", ",", ":"): return None # keywords, trailing_line = parse_var_keywords(trailing_line) # Check if this is a function definition fun_def = read_fun_def(trailing_line, ResultSig(type=var_type, keywords=keywords)) if fun_def or fun_only: return fun_def # Split the type and variable name line_split = trailing_line.split("::") if len(line_split) == 1: if len(keywords) > 0: var_words = None else: trailing_line = line_split[0] var_words = separate_def_list(trailing_line.strip()) else: trailing_line = line_split[1] var_words = separate_def_list(trailing_line.strip()) if var_words is None: var_words = [] return "var", VarInfo(var_type, keywords, var_words, kind_str) def get_procedure_modifiers( line: str, regex: Pattern ) -> tuple[str, str, str] | tuple[None, None, None]: """Attempt to match procedure modifiers for FUNCTIONS and SUBROUTINES Parameters ---------- line : str document line regex : Pattern regular expression to use e.g. Function or Subroutine sig Returns ------- tuple[str, str, str] | tuple[None, None, None] procedure name, arguments, trailing line """ match = regex.match(line) if match is None: return None, None, None name: str = match.group(1) trailing_line = line[match.end(0) :].split("!")[0] trailing_line = trailing_line.strip() paren_match = FRegex.SUB_PAREN.match(trailing_line) args = "" if paren_match is not None: word_match = FRegex.WORD.findall(paren_match.group(0)) if word_match is not None: word_match = [word for word in word_match] args = ",".join(word_match) trailing_line = trailing_line[paren_match.end(0) :] return name, args, trailing_line def read_fun_def( line: str, result: ResultSig = None, mod_flag: bool = False ) -> tuple[Literal["fun"], FunSig] | None: """Attempt to read FUNCTION definition line To infer the `result` `type` and `name` the variable definition is called with the function only flag Parameters ---------- line : str file line result : RESULT_sig, optional a dataclass containing the result signature of the function mod_flag : bool, optional flag for module and module procedure parsing, by default False Returns ------- tuple[Literal["fun"], FUN_sig] | None a named tuple """ # Get all the keyword modifier mathces keywords = re.findall(FRegex.SUB_MOD, line) # remove modifiers from line line = re.sub(FRegex.SUB_MOD, "", line) # Try and get the result type # Recursively will call read_var_def which will then call read_fun_def # with the variable result having been populated if keywords: tmp_var = read_var_def(line, fun_only=True) if tmp_var is not None: # Update keywords for function into dataclass tmp_var[1].keywords = keywords return tmp_var name, args, trailing_line = get_procedure_modifiers(line, FRegex.FUN) if name is None: return None # Extract if possible the variable name of the result() trailing_line = trailing_line.strip() results_match = FRegex.RESULT.match(trailing_line) if result is None: result = ResultSig() if results_match: result.name = results_match.group(1).strip().lower() return "fun", FunSig(name, args, keywords, mod_flag, result) def read_sub_def( line: str, mod_flag: bool = False ) -> tuple[Literal["sub"], SubInfo] | None: """Attempt to read a SUBROUTINE definition line Parameters ---------- line : str document line mod_flag : bool, optional flag for module and module procedure parsing, by default False Returns ------- tuple[Literal["sub"], SUB_info] | None a SUB_info dataclass object """ # Get all the keyword modifier matches keywords = re.findall(FRegex.SUB_MOD, line) # remove modifiers from line line = re.sub(FRegex.SUB_MOD, "", line) name, args, _ = get_procedure_modifiers(line, FRegex.SUB) if name is None: return None return "sub", SubInfo(name, args, keywords, mod_flag) def read_block_def(line: str) -> tuple[Literal["block"], str] | None: """Attempt to read BLOCK definition line""" block_match = FRegex.BLOCK.match(line) if block_match: name: str = block_match.group(1) if name: name = name.replace(":", " ").strip() return "block", name return None def read_do_def(line: str) -> tuple[Literal["do"], str] | None: """Attempt to read a DO loop Returns ------- tuple[Literal["do"], str] | None Tuple with "do" and a fixed format tag if present """ line_stripped = strip_strings(line, maintain_len=True) line_no_comment = line_stripped.split("!")[0].rstrip() do_match = FRegex.DO.match(line_no_comment) if do_match: return "do", do_match.group(1).strip() return None def read_where_def(line: str) -> tuple[Literal["where"], bool] | None: """Attempt to read a WHERE block Returns ------- tuple[Literal["where"], bool] | None Tuple with "where" and a boolean indicating if labelled on unlabelled """ line_stripped = strip_strings(line, maintain_len=True) line_no_comment = line_stripped.split("!")[0].rstrip() # Match WHERE blocks where_match = FRegex.WHERE.match(line_no_comment) if where_match: trailing_line = line[where_match.end(0) :] close_paren = find_paren_match(trailing_line) if close_paren < 0: return "where", True if FRegex.WORD.match(trailing_line[close_paren + 1 :].strip()): return "where", True else: return "where", False return None def read_if_def(line: str) -> tuple[Literal["if"], None] | None: """Attempt to read an IF conditional Returns ------- tuple[Literal["if"], None] | None A Literal "if" and None tuple """ line_stripped = strip_strings(line, maintain_len=True) line_no_comment = line_stripped.split("!")[0].rstrip() if FRegex.IF.match(line_no_comment) and FRegex.THEN.search(line_no_comment): return "if", None return None def read_associate_def(line: str): assoc_match = FRegex.ASSOCIATE.match(line) if assoc_match is not None: trailing_line = line[assoc_match.end(0) :] match_char = find_paren_match(trailing_line) if match_char < 0: return "assoc", [] var_words = separate_def_list(trailing_line[:match_char].strip()) return "assoc", var_words def read_select_def(line: str): """Attempt to read SELECT definition line""" select_match = FRegex.SELECT.match(line) select_desc = None select_binding = None if select_match is None: select_type_match = FRegex.SELECT_TYPE.match(line) if select_type_match is None: select_default_match = FRegex.SELECT_DEFAULT.match(line) if select_default_match is None: return None else: return "select", SelectInfo(4, None, None) select_type = 3 select_desc = select_type_match.group(1).upper() select_binding = select_type_match.group(2) else: select_word = select_match.group(1) select_type = -1 if select_word.lower().startswith("case"): select_type = 1 elif select_word.lower().startswith("type"): select_type = 2 select_binding = select_match.group(2) return "select", SelectInfo(select_type, select_binding, select_desc) def read_type_def(line: str): """Attempt to read TYPE definition line""" type_match = FRegex.TYPE_DEF.match(line) if type_match is None: return None trailing_line = line[type_match.end(1) :].split("!")[0] trailing_line = trailing_line.strip() # Parse keywords keyword_match = FRegex.TATTR_LIST.match(trailing_line) keywords: list[str] = [] parent = None while keyword_match: keyword_strip = keyword_match.group(0).replace(",", " ").strip().upper() extend_match = FRegex.EXTENDS.match(keyword_strip) if extend_match: parent = extend_match.group(1).lower() else: keywords.append(keyword_strip) # Get visibility and/or extends/abstract modifiers trailing_line = trailing_line[keyword_match.end(0) :] keyword_match = FRegex.TATTR_LIST.match(trailing_line) # Get name line_split = trailing_line.split("::") if len(line_split) == 1: if len(keywords) > 0 and parent is None: return None else: if trailing_line.split("(")[0].strip().lower() == "is": return None trailing_line = line_split[0] else: trailing_line = line_split[1] # word_match = FRegex.WORD.match(trailing_line.strip()) if word_match: name: str = word_match.group(0) else: return None # return "typ", ClassInfo(name, parent, keywords) def read_enum_def(line: str): """Attempt to read ENUM definition line""" if FRegex.ENUM_DEF.match(line): return "enum", None return None def read_generic_def(line: str): """Attempt to read generic procedure definition line""" generic_match = FRegex.GENERIC_PRO.match(line) if generic_match is None: return None # trailing_line = line[generic_match.end(0) - 1 :].split("!")[0].strip() if len(trailing_line) == 0: return None # Set visibility if generic_match.group(2) is None: vis_flag = 0 else: if generic_match.group(2).lower() == "private": vis_flag = -1 else: vis_flag = 1 # i1 = trailing_line.find("=>") if i1 < 0: return None bound_name: str = trailing_line[:i1].strip() if FRegex.GEN_ASSIGN.match(bound_name): return None pro_list = trailing_line[i1 + 2 :].split(",") # pro_out: list[str] = [] for bound_pro in pro_list: if len(bound_pro.strip()) > 0: pro_out.append(bound_pro.strip()) if len(pro_out) == 0: return None # return "gen", GenProcDefInfo(bound_name, pro_out, vis_flag) def read_mod_def(line: str): """Attempt to read MODULE and MODULE PROCEDURE, MODULE FUNCTION definition lines""" # Get all the keyword modifier mathces keywords = re.findall(FRegex.SUB_MOD, line) # remove modifiers from line line = re.sub(FRegex.SUB_MOD, "", line) mod_match = FRegex.MOD.match(line) if mod_match is None: return None name = mod_match.group(1) if name.lower() == "procedure": trailing_line = line[mod_match.end(1) :] pro_names = [] line_split = trailing_line.split(",") for name in line_split: pro_names.append(name.strip().lower()) return "int_pro", pro_names # Check for submodule definition trailing_line = line[mod_match.start(1) :] # module procedure sub_res = read_sub_def(trailing_line, mod_flag=True) if sub_res is not None: return sub_res # module function fun_res = read_var_def(trailing_line, fun_only=True) if fun_res is not None: fun_res[1].mod_flag = True fun_res[1].keywords = keywords return fun_res fun_res = read_fun_def(trailing_line, mod_flag=True) if fun_res is not None: fun_res[1].keywords = keywords return fun_res return "mod", name def read_submod_def(line: str): """Attempt to read SUBMODULE definition line""" submod_match = FRegex.SUBMOD.match(line) if submod_match is None: return None parent_name: str = None name: str = None trailing_line = line[submod_match.end(0) :].split("!")[0] trailing_line = trailing_line.strip() parent_match = FRegex.WORD.match(trailing_line) if parent_match: parent_name = parent_match.group(0).lower() if len(trailing_line) > parent_match.end(0) + 1: trailing_line = trailing_line[parent_match.end(0) + 1 :].strip() else: trailing_line = "" name_match = FRegex.WORD.search(trailing_line) if name_match: name = name_match.group(0).lower() return "smod", SmodInfo(name, parent_name) def read_prog_def(line: str) -> tuple[Literal["prog"], str] | None: """Attempt to read PROGRAM definition line""" prog_match = FRegex.PROG.match(line) if prog_match is None: return None return "prog", prog_match.group(1) def read_int_def(line: str) -> tuple[Literal["int"], InterInfo] | None: """Attempt to read INTERFACE definition line""" int_match = FRegex.INT.match(line) if int_match is None: return None int_name = int_match.group(2).lower() is_abstract = int_match.group(1) is not None if int_name == "": return "int", InterInfo(None, is_abstract) if int_name == "assignment" or int_name == "operator": return "int", InterInfo(None, False) return "int", InterInfo(int_match.group(2), is_abstract) def read_use_stmt(line: str) -> tuple[Literal["use"], UseInfo] | None: """Attempt to read USE statement""" use_match = FRegex.USE.match(line) if use_match is None: return None trailing_line = line[use_match.end(0) :].lower() use_mod = use_match.group(2) only_list: set[str] = set() rename_map: dict[str, str] = {} if use_match.group(3): for only_stmt in trailing_line.split(","): only_split = only_stmt.split("=>") only_name = only_split[0].strip() only_list.add(only_name) if len(only_split) == 2: rename_map[only_name] = only_split[1].strip() return "use", UseInfo(use_mod, only_list, rename_map) def read_imp_stmt(line: str) -> tuple[Literal["import"], list[str]] | None: """Attempt to read IMPORT statement""" import_match = FRegex.IMPORT.match(line) if import_match is None: return None trailing_line = line[import_match.end(0) - 1 :].lower() import_list = [import_obj.strip() for import_obj in trailing_line.split(",")] return "import", import_list def read_inc_stmt(line: str) -> tuple[Literal["inc"], str] | None: """Attempt to read INCLUDE statement""" inc_match = FRegex.INCLUDE.match(line) if inc_match is None: return None inc_path: str = inc_match.group(1) return "inc", inc_path def read_vis_stmnt(line: str) -> tuple[Literal["vis"], VisInfo] | None: """Attempt to read PUBLIC/PRIVATE statement""" vis_match = FRegex.VIS.match(line) if vis_match is None: return None vis_type = 0 if vis_match.group(1).lower() == "private": vis_type = 1 trailing_line = line[vis_match.end(0) :].split("!")[0] mod_words = FRegex.WORD.findall(trailing_line) return "vis", VisInfo(vis_type, mod_words) def_tests = [ read_var_def, read_sub_def, read_fun_def, read_block_def, read_where_def, read_do_def, read_if_def, read_associate_def, read_select_def, read_type_def, read_enum_def, read_use_stmt, read_imp_stmt, read_int_def, read_generic_def, read_mod_def, read_prog_def, read_submod_def, read_inc_stmt, read_vis_stmnt, ] def find_external_type(file_ast: FortranAST, desc_string: str, name: str) -> bool: """Encountered a variable with EXTERNAL as its type Try and find an already defined variable with a NORMAL Fortran Type""" if not desc_string.upper() == "EXTERNAL": return False counter = 0 # Definition without EXTERNAL has already been parsed for v in file_ast.variable_list: if name == v.name: # If variable is already in external objs it has # been parsed correctly so exit if v in file_ast.external_objs: return False v.set_external_attr() file_ast.external_objs.append(v) counter += 1 # TODO: do I need to update AST any more? if counter == 1: return True else: return False def find_external_attr(file_ast: FortranAST, name: str, new_var: Variable) -> bool: """Check if this NORMAL Fortran variable is in the external_objs with only ``EXTERNAL`` as its type. Used to detect seperated ``EXTERNAL`` declarations. Parameters ---------- file_ast : fortran_ast AST file name : str Variable name, stripped new_var : fortran_var Fortran variable to check against Returns ------- bool True if only a single ``EXTERNAL`` definition is encountered False for everything else, which will cause a diagnostic error to be raised """ counter = 0 for v in file_ast.external_objs: if v.name != name: continue if v.desc.upper() != "EXTERNAL": continue # We do this once if counter == 0: v.desc = new_var.desc v.set_external_attr() # TODO: do i need to update AST any more? counter += 1 # Only one definition encountered if counter == 1: return True # If no variable or multiple variables add to AST. # Multiple defs will throw diagnostic error as it should else: return False def find_external( file_ast: FortranAST, desc_string: str, name: str, new_var: Variable, ) -> bool: """Find a procedure, function, subroutine, etc. that has been defined as ``EXTERNAL``. ``EXTERNAL``s are parsed as ``fortran_var``, since there is no way of knowing if ``real, external :: val`` is a function or a subroutine. This method exists solely for ``EXTERNAL`` s that are defined across multiple lines e.g. .. code-block:: fortran EXTERNAL VAR REAL VAR or .. code-block:: fortran REAL VAR EXTERNAL VAR Parameters ---------- file_ast : fortran_ast AST desc_string : str Variable type e.g. ``REAL``, ``INTEGER``, ``EXTERNAL`` name : str Variable name new_var : fortran_var The line variable that we are attempting to match with an ``EXTERNAL`` definition Returns ------- bool True if the variable is ``EXTERNAL`` and we manage to link it to the rest of its components, else False """ if find_external_type(file_ast, desc_string, name): return True elif desc_string.upper() != "EXTERNAL": if find_external_attr(file_ast, name, new_var): return True return False class FortranFile: def __init__(self, path: str = None, pp_suffixes: list = None): self.path: str = path self.contents_split: list[str] = [] self.contents_pp: list[str] = [] self.pp_defs: dict = {} self.nLines: int = 0 self.fixed: bool = False self.preproc: bool = False self.ast: FortranAST = None self.hash: str = None if path: _, file_ext = os.path.splitext(os.path.basename(path)) if pp_suffixes: self.preproc = file_ext in pp_suffixes else: self.preproc = file_ext == file_ext.upper() self.COMMENT_LINE_MATCH, self.DOC_COMMENT_MATCH = self.get_comment_regexs() def copy(self) -> FortranFile: """Copy content to new file object (does not copy objects)""" copy_obj = FortranFile(self.path) copy_obj.preproc = self.preproc copy_obj.fixed = self.fixed copy_obj.contents_pp = self.contents_pp copy_obj.contents_split = self.contents_split copy_obj.pp_defs = self.pp_defs copy_obj.set_contents(self.contents_split) return copy_obj def load_from_disk(self) -> tuple[str | None, bool | None]: """Read file from disk or update file contents only if they have changed A MD5 hash is used to determine that Returns ------- tuple[str|None, bool|None] ``str`` : string containing IO error message else None ``bool``: boolean indicating if the file has changed """ contents: str try: with open(self.path, "r", encoding="utf-8", errors="replace") as f: contents = re.sub(r"\t", r" ", f.read()) except OSError: return "Could not read/decode file", None else: # Check if files are the same try: hash = hashlib.md5( contents.encode("utf-8"), usedforsecurity=False ).hexdigest() # Python <=3.8 does not have the `usedforsecurity` option except TypeError: hash = hashlib.md5(contents.encode("utf-8")).hexdigest() if hash == self.hash: return None, False self.hash = hash self.contents_split = contents.splitlines() self.fixed = detect_fixed_format(self.contents_split) self.contents_pp = self.contents_split self.nLines = len(self.contents_split) return None, True def apply_change(self, change: dict) -> bool: """Apply a change to the file.""" def check_change_reparse(line_no: int) -> bool: if (line_no < 0) or (line_no > self.nLines - 1): return True pre_lines, curr_line, _ = self.get_code_line(line_no, forward=False) # Skip comment lines if self.fixed: if FRegex.FIXED_COMMENT.match(curr_line): return False else: if FRegex.FREE_COMMENT.match(curr_line): return False # Check for line labels and semicolons full_line = "".join(pre_lines) + curr_line full_line, line_label = strip_line_label(full_line) if line_label is not None: return True line_stripped = strip_strings(full_line, maintain_len=True) if line_stripped.find(";") >= 0: return True # Find trailing comments comm_ind = line_stripped.find("!") if comm_ind >= 0: line_no_comment = full_line[:comm_ind] else: line_no_comment = full_line # Various single line tests if FRegex.END_WORD.match(line_no_comment): return True if FRegex.IMPLICIT.match(line_no_comment): return True if FRegex.CONTAINS.match(line_no_comment): return True # Generic "non-definition" line if FRegex.NON_DEF.match(line_no_comment): return False # Loop through tests for test in def_tests: if test(line_no_comment): return True return False self.hash = None text = change.get("text", "") change_range = change.get("range") if len(text) == 0: text_split = [""] else: text_split = text.splitlines() # Check for ending newline if (text[-1] == "\n") or (text[-1] == "\r"): text_split.append("") if change_range is None: # The whole file has changed self.set_contents(text_split) return True start_line = change_range["start"]["line"] start_col = change_range["start"]["character"] end_line = change_range["end"]["line"] end_col = change_range["end"]["character"] # Check for an edit occurring at the very end of the file if start_line == self.nLines: self.set_contents(self.contents_split + text_split) return True # Check for single line edit if (start_line == end_line) and (len(text_split) == 1): prev_line = self.contents_split[start_line] self.contents_split[start_line] = ( prev_line[:start_col] + text + prev_line[end_col:] ) self.contents_pp[start_line] = self.contents_split[start_line] return check_change_reparse(start_line) # Apply standard change to document new_contents = [] for i, line in enumerate(self.contents_split): if (i < start_line) or (i > end_line): new_contents.append(line) continue if i == start_line: for j, change_line in enumerate(text_split): if j == 0: new_contents.append(line[:start_col] + change_line) else: new_contents.append(change_line) if i == end_line: new_contents[-1] += line[end_col:] self.set_contents(new_contents) return True def set_contents(self, contents_split: list, detect_format: bool = True): """Set file contents""" self.contents_split = contents_split self.contents_pp = self.contents_split self.nLines = len(self.contents_split) if detect_format: self.fixed = detect_fixed_format(self.contents_split) def get_line(self, line_no: int, pp_content: bool = False) -> str: """Get single line from file""" try: if pp_content: return self.contents_pp[line_no] return self.contents_split[line_no] except (TypeError, IndexError): return None def get_code_line( self, line_no: int, forward: bool = True, backward: bool = True, pp_content: bool = False, strip_comment: bool = False, ) -> tuple[list[str], str, list[str]]: """Get full code line from file including any adjacent continuations""" curr_line = self.get_line(line_no, pp_content) if curr_line is None: return [], None, [] # Search backward for prefix lines line_ind = line_no - 1 pre_lines = [] if backward: if self.fixed: # Fixed format file tmp_line = curr_line while line_ind > 0: if FRegex.FIXED_CONT.match(tmp_line): prev_line = tmp_line tmp_line = self.get_line(line_ind, pp_content) if line_ind == line_no - 1: curr_line = " " * 6 + curr_line[6:] else: pre_lines[-1] = " " * 6 + prev_line[6:] pre_lines.append(tmp_line) else: break line_ind -= 1 else: # Free format file opt_cont_match = FRegex.FREE_CONT.match(curr_line) if opt_cont_match: curr_line = ( " " * opt_cont_match.end(0) + curr_line[opt_cont_match.end(0) :] ) while line_ind > 0: tmp_line = strip_strings( self.get_line(line_ind, pp_content), maintain_len=True ) tmp_no_comm = tmp_line.split("!")[0] cont_ind = tmp_no_comm.rfind("&") opt_cont_match = FRegex.FREE_CONT.match(tmp_no_comm) if opt_cont_match: if cont_ind == opt_cont_match.end(0) - 1: break tmp_no_comm = ( " " * opt_cont_match.end(0) + tmp_no_comm[opt_cont_match.end(0) :] ) if cont_ind >= 0: pre_lines.append(tmp_no_comm[:cont_ind]) else: break line_ind -= 1 # Search forward for trailing lines with continuations line_ind = line_no + 1 post_lines = [] if forward: if self.fixed: if line_ind < self.nLines: next_line = self.get_line(line_ind, pp_content) line_ind += 1 cont_match = FRegex.FIXED_CONT.match(next_line) while (cont_match is not None) and (line_ind < self.nLines): post_lines.append(" " * 6 + next_line[6:]) next_line = self.get_line(line_ind, pp_content) line_ind += 1 cont_match = FRegex.FIXED_CONT.match(next_line) else: line_stripped = strip_strings(curr_line, maintain_len=True) iAmper = line_stripped.find("&") iComm = line_stripped.find("!") if iComm < 0: iComm = iAmper + 1 next_line = "" # Read the next line if needed while (iAmper >= 0) and (iAmper < iComm): if line_ind == line_no + 1: curr_line = curr_line[:iAmper] elif next_line != "": post_lines[-1] = next_line[:iAmper] next_line = self.get_line(line_ind, pp_content) line_ind += 1 # Skip any preprocessor statements when seeking the next line if FRegex.PP_ANY.match(next_line): next_line = "" post_lines.append("") continue # Skip empty or comment lines match = FRegex.FREE_COMMENT.match(next_line) if next_line.rstrip() == "" or match: next_line = "" post_lines.append("") continue opt_cont_match = FRegex.FREE_CONT.match(next_line) if opt_cont_match: next_line = ( " " * opt_cont_match.end(0) + next_line[opt_cont_match.end(0) :] ) post_lines.append(next_line) line_stripped = strip_strings(next_line, maintain_len=True) iAmper = line_stripped.find("&") iComm = line_stripped.find("!") if iComm < 0: iComm = iAmper + 1 # Detect start of comment in current line if strip_comment: curr_line = self.strip_comment(curr_line) pre_lines.reverse() return pre_lines, curr_line, post_lines def strip_comment(self, line: str) -> str: """Strip comment from line""" if self.fixed: if FRegex.FIXED_COMMENT.match(line) and FRegex.FIXED_OPENMP.match(line): return "" else: if FRegex.FREE_OPENMP.match(line) is None: line = line.split("!")[0] return line def find_word_in_code_line( self, line_no: int, word: str, forward: bool = True, backward: bool = False, pp_content: bool = False, ) -> tuple[int, Range]: back_lines, curr_line, forward_lines = self.get_code_line( line_no, forward=forward, backward=backward, pp_content=pp_content ) word_range = Range(-1, -1) if curr_line is not None: find_word_lower = word.lower() word_range = find_word_in_line(curr_line.lower(), find_word_lower) if backward and (word_range.start < 0): back_lines.reverse() for (i, line) in enumerate(back_lines): word_range = find_word_in_line(line.lower(), find_word_lower) if word_range.start >= 0: line_no -= i + 1 return line_no, word_range if forward and (word_range.start < 0): for (i, line) in enumerate(forward_lines): word_range = find_word_in_line(line.lower(), find_word_lower) if word_range.start >= 0: line_no += i + 1 return line_no, word_range return line_no, word_range def preprocess( self, pp_defs: dict = None, include_dirs: set = None, debug: bool = False ) -> tuple[list, list]: if pp_defs is None: pp_defs = {} if include_dirs is None: include_dirs = set() self.contents_pp, pp_skips, pp_defines, self.pp_defs = preprocess_file( self.contents_split, self.path, pp_defs=pp_defs, include_dirs=include_dirs, debug=debug, ) return pp_skips, pp_defines def check_file(self, obj_tree, max_line_length=-1, max_comment_line_length=-1): diagnostics = [] if (max_line_length > 0) or (max_comment_line_length > 0): msg_line = f'Line length exceeds "max_line_length" ({max_line_length})' msg_comment = ( 'Comment line length exceeds "max_comment_line_length"' f" ({max_comment_line_length})" ) if self.fixed: COMMENT_LINE_MATCH = FRegex.FIXED_COMMENT else: COMMENT_LINE_MATCH = FRegex.FREE_COMMENT for (i, line) in enumerate(self.contents_split): if COMMENT_LINE_MATCH.match(line) is None: if 0 < max_line_length < len(line): self.ast.add_error( msg_line, Severity.warn, i + 1, max_line_length, len(line) ) else: if 0 < max_comment_line_length < len(line): self.ast.add_error( msg_comment, Severity.warn, i + 1, max_comment_line_length, len(line), ) errors, diags_ast = self.ast.check_file(obj_tree) diagnostics += diags_ast for error in errors: diagnostics.append(error.build(self)) return diagnostics def parse( self, debug: bool = False, pp_defs: dict = None, include_dirs: set = None, ) -> FortranAST: """Parse Fortran file contents of a fortran_file object and build an Abstract Syntax Tree (AST) Parameters ---------- debug : bool, optional Set to true to enable debugging, by default False pp_defs : dict, optional Preprocessor definitions and their values, by default None include_dirs : set, optional Preprocessor include directories, by default None Returns ------- fortran_ast An Abstract Syntax Tree """ if pp_defs is None: pp_defs = {} if include_dirs is None: include_dirs = set() # Configure the parser logger if debug: logging.basicConfig( level=logging.DEBUG, stream=sys.stdout, format="%(message)s" ) # This is not necessarily the same as self.ast file_ast = FortranAST(self) if self.preproc: log.debug("=== PreProc Pass ===\n") pp_skips, pp_defines = self.preprocess( pp_defs=pp_defs, include_dirs=include_dirs, debug=debug ) for pp_reg in pp_skips: file_ast.start_ppif(pp_reg[0]) file_ast.end_ppif(pp_reg[1]) log.debug("\n=== Parsing Pass ===\n") else: log.debug("=== No PreProc ===\n") pp_skips = [] pp_defines = [] line_no = 0 block_id_stack = [] docs: list[str] = [] # list used to temporarily store docstrings counters = Counter( do=0, ifs=0, block=0, select=0, interface=0, ) multi_lines = deque() self.COMMENT_LINE_MATCH, self.DOC_COMMENT_MATCH = self.get_comment_regexs() while (line_no < self.nLines) or multi_lines: # Get next line # Get a normal line, i.e. the stack is empty if not multi_lines: # get_line has a 0-based index line = self.get_line(line_no, pp_content=True) line_no += 1 get_full = True # Line is part of a multi-line construct, i.e. contained ';' else: line = multi_lines.pop() get_full = False if line == "": continue # Skip empty lines # Parse documentation strings to AST nodes, this implicitly operates # on docs, i.e. appends or nullifies it idx = self.parse_docs(line, line_no, file_ast, docs) if idx: line_no = idx continue # Handle preprocessing regions do_skip = False for pp_reg in pp_skips: if (line_no >= pp_reg[0]) and (line_no <= pp_reg[1]): do_skip = True break if line_no in pp_defines: do_skip = True if do_skip: continue # Get full line, seek forward for code lines # @note line_no-1 refers to the array index for the current line if get_full: _, line, post_lines = self.get_code_line( line_no - 1, backward=False, pp_content=True ) line_no += len(post_lines) line = "".join([line] + post_lines) line, line_label = strip_line_label(line) line_stripped = strip_strings(line, maintain_len=True) # Find trailing comments comm_ind = line_stripped.find("!") if comm_ind >= 0: line_no_comment = line[:comm_ind] line_stripped = line_stripped[:comm_ind] docs = self.get_single_line_docstring(line[comm_ind:]) else: line_no_comment = line # Split lines with semicolons, place the multiple lines into a stack if line_stripped.find(";") >= 0: multi_lines.extendleft(line_stripped.split(";")) line = multi_lines.pop() line_stripped = line # Test for scope end if file_ast.END_SCOPE_REGEX is not None: match = FRegex.END_WORD.match(line_no_comment) # Handle end statement if self.parse_end_scope_word(line_no_comment, line_no, file_ast, match): continue # Look for old-style end of DO loops with line labels if self.parse_do_fixed_format( line, line_no, file_ast, line_label, block_id_stack ): continue # Skip if known generic code line if FRegex.NON_DEF.match(line_no_comment): continue # Mark implicit statement if self.parse_implicit(line_no_comment, line_no, file_ast): continue # Mark contains statement if self.parse_contains(line_no_comment, line_no, file_ast): continue # Loop through tests obj_read = self.get_fortran_definition(line) # Move to next line if nothing in the definition tests matches if obj_read is None: continue obj_type: str = obj_read[0] obj_info = obj_read[1] if obj_type == "var": if obj_info.var_names is None: continue link_name: str = None procedure_def = False if obj_info.var_type[:3] == "PRO": if file_ast.current_scope.get_type() == INTERFACE_TYPE_ID: for var_name in obj_info.var_names: file_ast.add_int_member(var_name) log.debug("%s !!! INTERFACE-PRO - Ln:%d", line.strip(), line_no) continue procedure_def = True link_name = get_paren_substring(obj_info.var_type) for var_name in obj_info.var_names: desc = obj_info.var_type link_name: str = None if var_name.find("=>") > -1: name_split = var_name.split("=>") name = name_split[0] link_name = name_split[1].split("(")[0].strip() if link_name.lower() == "null": link_name = None else: name = var_name.split("=")[0] # Add dimension if specified # TODO: turn into function and add support for co-arrays i.e. [*] # Copy global keywords to the individual variable var_keywords: list[str] = obj_info.keywords[:] # The name starts with ( if name.find("(") == 0: continue name, dims = self.parse_imp_dim(name) name, char_len = self.parse_imp_char(name) if dims: var_keywords.append(dims) if char_len: desc += char_len name = name.strip() keywords, keyword_info = map_keywords(var_keywords) if procedure_def: new_var = Method( file_ast, line_no, name, desc, keywords, keyword_info=keyword_info, link_obj=link_name, ) else: new_var = Variable( file_ast, line_no, name, desc, keywords, keyword_info=keyword_info, # kind=obj_info.var_kind, link_obj=link_name, ) # If the object is fortran_var and a parameter include # the value in hover if new_var.is_parameter(): _, col = find_word_in_line(line, name) match = FRegex.PARAMETER_VAL.match(line[col:]) if match: var = match.group(1).strip() new_var.set_parameter_val(var) # Check if the "variable" is external and if so cycle if find_external(file_ast, desc, name, new_var): continue # if not merge_external: file_ast.add_variable(new_var) log.debug("%s !!! VARIABLE - Ln:%d", line, line_no) elif obj_type == "mod": new_mod = Module(file_ast, line_no, obj_info) file_ast.add_scope(new_mod, FRegex.END_MOD) log.debug("%s !!! MODULE - Ln:%d", line, line_no) elif obj_type == "smod": new_smod = Submodule( file_ast, line_no, obj_info.name, ancestor_name=obj_info.parent ) file_ast.add_scope(new_smod, FRegex.END_SMOD) log.debug("%s !!! SUBMODULE - Ln:%d", line, line_no) elif obj_type == "prog": new_prog = Program(file_ast, line_no, obj_info) file_ast.add_scope(new_prog, FRegex.END_PROG) log.debug("%s !!! PROGRAM - Ln:%d", line, line_no) elif obj_type == "sub": keywords, _ = map_keywords(obj_info.keywords) new_sub = Subroutine( file_ast, line_no, obj_info.name, args=obj_info.args, mod_flag=obj_info.mod_flag, keywords=keywords, ) file_ast.add_scope(new_sub, FRegex.END_SUB) log.debug("%s !!! SUBROUTINE - Ln:%d", line, line_no) elif obj_type == "fun": keywords, _ = map_keywords(obj_info.keywords) new_fun = Function( file_ast, line_no, obj_info.name, args=obj_info.args, mod_flag=obj_info.mod_flag, keywords=keywords, result_type=obj_info.result.type, result_name=obj_info.result.name, ) file_ast.add_scope(new_fun, FRegex.END_FUN) # function type is present without result(), register the automatic # result() variable that is the function name if obj_info.result.type: keywords, keyword_info = map_keywords(obj_info.result.keywords) new_obj = Variable( file_ast, line_no, name=obj_info.result.name, var_desc=obj_info.result.type, keywords=keywords, keyword_info=keyword_info, ) file_ast.add_variable(new_obj) log.debug("%s !!! FUNCTION - Ln:%d", line, line_no) elif obj_type == "block": name = obj_info if name is None: counters["block"] += 1 name = f"#BLOCK{counters['block']}" new_block = Block(file_ast, line_no, name) file_ast.add_scope(new_block, FRegex.END_BLOCK, req_container=True) log.debug("%s !!! BLOCK - Ln:%d", line, line_no) elif obj_type == "do": counters["do"] += 1 name = f"#DO{counters['do']}" if obj_info != "": block_id_stack.append(obj_info) new_do = Do(file_ast, line_no, name) file_ast.add_scope(new_do, FRegex.END_DO, req_container=True) log.debug("%s !!! DO - Ln:%d", line, line_no) elif obj_type == "where": # Add block if WHERE is not single line if not obj_info: counters["do"] += 1 name = f"#WHERE{counters['do']}" new_do = Where(file_ast, line_no, name) file_ast.add_scope(new_do, FRegex.END_WHERE, req_container=True) log.debug("%s !!! WHERE - Ln:%d", line, line_no) elif obj_type == "assoc": counters["block"] += 1 name = f"#ASSOC{counters['block']}" new_assoc = Associate(file_ast, line_no, name) file_ast.add_scope(new_assoc, FRegex.END_ASSOCIATE, req_container=True) for bound_var in obj_info: try: bind_name, link_name = bound_var.split("=>") file_ast.add_variable( new_assoc.create_binding_variable( file_ast, line_no, bind_name.strip(), link_name.strip(), ) ) except ValueError: pass log.debug("%s !!! ASSOCIATE - Ln:%d", line, line_no) elif obj_type == "if": counters["if"] += 1 name = f"#IF{counters['if']}" new_if = If(file_ast, line_no, name) file_ast.add_scope(new_if, FRegex.END_IF, req_container=True) log.debug("%s !!! IF - Ln:%d", line, line_no) elif obj_type == "select": counters["select"] += 1 name = f"#SELECT{counters['select']}" new_select = Select(file_ast, line_no, name, obj_info) file_ast.add_scope(new_select, FRegex.END_SELECT, req_container=True) new_var = new_select.create_binding_variable( file_ast, line_no, f"{obj_info.desc}({obj_info.binding})", obj_info.type, ) if new_var is not None: file_ast.add_variable(new_var) log.debug("%s !!! SELECT - Ln:%d", line, line_no) elif obj_type == "typ": keywords, _ = map_keywords(obj_info.keywords) new_type = Type(file_ast, line_no, obj_info.name, keywords) if obj_info.parent is not None: new_type.set_inherit(obj_info.parent) file_ast.add_scope(new_type, FRegex.END_TYPED, req_container=True) log.debug("%s !!! TYPE - Ln:%d", line, line_no) elif obj_type == "enum": counters["block"] += 1 name = f"#ENUM{counters['block']}" new_enum = Enum(file_ast, line_no, name) file_ast.add_scope(new_enum, FRegex.END_ENUMD, req_container=True) log.debug("%s !!! ENUM - Ln:%d", line, line_no) elif obj_type == "int": name = obj_info.name if name is None: counters["interface"] += 1 name = f"#GEN_INT{counters['interface']}" new_int = Interface(file_ast, line_no, name, abstract=obj_info.abstract) file_ast.add_scope(new_int, FRegex.END_INT, req_container=True) log.debug("%s !!! INTERFACE - Ln:%d", line, line_no) elif obj_type == "gen": new_int = Interface( file_ast, line_no, obj_info.bound_name, abstract=False ) new_int.set_visibility(obj_info.vis_flag) file_ast.add_scope(new_int, FRegex.END_INT, req_container=True) for pro_link in obj_info.pro_links: file_ast.add_int_member(pro_link) file_ast.end_scope(line_no) log.debug("%s !!! GENERIC - Ln:%d", line, line_no) elif obj_type == "int_pro": if file_ast.current_scope is not None: if file_ast.current_scope.get_type() == INTERFACE_TYPE_ID: for name in obj_info: file_ast.add_int_member(name) log.debug("%s !!! INTERFACE-PRO - Ln:%d", line, line_no) elif file_ast.current_scope.get_type() == SUBMODULE_TYPE_ID: new_impl = Scope(file_ast, line_no, obj_info[0]) file_ast.add_scope(new_impl, FRegex.END_PRO) log.debug("%s !!! INTERFACE-IMPL - Ln:%d", line, line_no) elif obj_type == "use": file_ast.add_use( obj_info.mod_name, line_no, obj_info.only_list, obj_info.rename_map, ) log.debug("%s !!! USE - Ln:%d", line, line_no) elif obj_type == "import": file_ast.add_use("#IMPORT", line_no, obj_info) log.debug("%s !!! IMPORT - Ln:%d", line, line_no) elif obj_type == "inc": file_ast.add_include(obj_info, line_no) log.debug("%s !!! INCLUDE - Ln:%d", line, line_no) elif obj_type == "vis": if file_ast.current_scope is None: msg = "Visibility statement without enclosing scope" file_ast.add_error(msg, Severity.error, line_no, 0) else: if (len(obj_info.obj_names) == 0) and (obj_info.type == 1): file_ast.current_scope.set_default_vis(-1) else: if obj_info.type == MODULE_TYPE_ID: for word in obj_info.obj_names: file_ast.add_private(word) else: for word in obj_info.obj_names: file_ast.add_public(word) log.debug("%s !!! VISIBILITY - Ln:%d", line, line_no) file_ast.close_file(line_no) if debug: if len(file_ast.end_errors) > 0: log.debug("\n=== Scope Errors ===\n") for error in file_ast.end_errors: if error[0] >= 0: message = f"Unexpected end of scope at line {error[0]}" else: message = "Unexpected end statement: No open scopes" log.debug(f"{error[1]}: {message}") if len(file_ast.parse_errors) > 0: log.debug("\n=== Parsing Errors ===\n") for error in file_ast.parse_errors: log.debug(f"{error['range']}: {error['message']}") return file_ast def parse_imp_dim(self, line: str): """Parse the implicit dimension of an array e.g. var(3,4), var_name(size(val,1)*10) Parameters ---------- line : str line containing variable name Returns ------- tuple[str, str] truncated line, dimension string """ m = re.compile(r"[ ]*\w+[ ]*(\()", re.I).match(line) if not m: return line, None i = find_paren_match(line[m.end(1) :]) if i < 0: return line, None # triggers for autocomplete dims = line[m.start(1) : m.end(1) + i + 1] line = line[: m.start(1)] + line[m.end(1) + i + 1 :] return line, f"dimension{dims}" def parse_imp_char(self, line: str): """Parse the implicit character length from a variable e.g. var_name*10 or var_name*(10), var_name*(size(val, 1)) Parameters ---------- line : str line containing potential variable Returns ------- tuple[str, str] truncated line, character length """ match = re.compile(r"(\w+)[ ]*\*[ ]*(\d+|\()", re.I).match(line) if not match: return line, None if match.group(2) == "(": i = find_paren_match(line[match.end(2) :]) if i < 0: return line, None # triggers for autocomplete char_len = line[match.start(2) : match.end(2) + i + 1] elif match.group(2).isdigit(): char_len = match.group(2) return match.group(1), f"*{char_len}" def parse_end_scope_word( self, line: str, ln: int, file_ast: FortranAST, match: re.Match ) -> bool: """Parses END keyword marking the end of scopes Parameters ---------- line : str Document line ln : int Line number file_ast : fortran_ast AST object match : re.Match END word regular expression match Returns ------- bool True if a AST scope is closed, False otherwise """ if match is None: return False end_scope_word: str = None if match.group(1) is None: end_scope_word = "" if file_ast.current_scope.req_named_end() and ( file_ast.current_scope is not file_ast.none_scope ): file_ast.end_errors.append([ln, file_ast.current_scope.sline]) else: scope_match = file_ast.END_SCOPE_REGEX.match(line[match.start(1) :]) if scope_match is not None: end_scope_word = scope_match.group(0) if end_scope_word is not None: if (file_ast.current_scope.get_type() == SELECT_TYPE_ID) and ( file_ast.current_scope.is_type_region() ): file_ast.end_scope(ln) file_ast.end_scope(ln) log.debug("%s !!! END %s Scope - Ln:%d", line, end_scope_word.upper(), ln) return True return False def parse_do_fixed_format( self, line: str, ln: int, file_ast: FortranAST, line_label: str, block_id_stack: list[str], ): if (file_ast.current_scope.get_type() == DO_TYPE_ID) and ( line_label is not None ): # TODO: try and move to end_scope pattern did_close = False while (len(block_id_stack) > 0) and (line_label == block_id_stack[-1]): file_ast.end_scope(ln) block_id_stack.pop() did_close = True log.debug("%s !!! END DO-LABELLED - Ln:%d", line, ln) if did_close: return True return False def parse_implicit(self, line: str, ln: int, file_ast: FortranAST) -> bool: """Parse implicit statements from a line Parameters ---------- line : str Document line ln : int Line number file_ast : fortran_ast AST object Returns ------- bool True if an IMPLICIT statements present, False otherwise """ match = FRegex.IMPLICIT.match(line) if match is None: return False if file_ast.current_scope is None: msg = "IMPLICIT statement without enclosing scope" file_ast.add_error(msg, Severity.error, ln, match.start(1), match.end(1)) else: if match.group(1).lower() == "none": file_ast.current_scope.set_implicit(False, ln) else: file_ast.current_scope.set_implicit(True, ln) log.debug("%s !!! IMPLICIT - Ln:%d", line, ln) return True def parse_contains(self, line: str, ln: int, file_ast: FortranAST) -> bool: """Parse contain statements Parameters ---------- line : str Document line ln : int Line number file_ast : fortran_ast AST object Returns ------- bool True if a contains is present, False otherwise """ match = FRegex.CONTAINS.match(line) if match is None: return False msg: str = None try: if file_ast.current_scope is None: msg = "CONTAINS statement without enclosing scope" else: file_ast.current_scope.mark_contains(ln) except ValueError: msg = "Multiple CONTAINS statements in scope" if msg: file_ast.add_error(msg, Severity.error, ln, match.start(1), match.end(1)) log.debug("%s !!! CONTAINS - Ln:%d", line, ln) return True def parse_docs(self, line: str, ln: int, file_ast: FortranAST, docs: list[str]): """Parse documentation stings of style Doxygen or FORD. Multiline docstrings are detected if the first comment starts with `!>` docstring continuations are detected with either `!>`, `!<` or `!!` Parameters ---------- line : str Document line ln : int Line number file_ast : fortran_ast AST object docs : list[str] Docstrings that are pending processing e.g. single line docstrings """ def format(docs: list[str]) -> str: if len(docs) == 1: return f"!! {docs[0]}" return "!! " + "\n!! ".join(docs) def add_line_comment(file_ast: FortranAST, docs: list[str]): # Handle dangling comments from previous line if docs: file_ast.add_doc(format(docs)) log.debug(f"{format(docs)} !!! Doc string - Line:{ln}") docs[:] = [] # empty the documentation stack # Check for comments in line if not self.COMMENT_LINE_MATCH.match(line): add_line_comment(file_ast, docs) return False # Check for documentation doc_match = self.DOC_COMMENT_MATCH.match(line) if not doc_match: add_line_comment(file_ast, docs) return False _ln = ln ln, docs[:], predocmark = self.get_docstring(ln, line, doc_match, docs) # Count the total length of all the stings in docs # most efficient implementation, see: shorturl.at/dfmyV if len("".join(docs)) > 0: file_ast.add_doc(format(docs), forward=predocmark) for (i, doc_line) in enumerate(docs): log.debug(f"{doc_line} !!! Doc string - Line:{_ln + i}") docs[:] = [] return ln def get_docstring( self, ln: int, line: str, match: Pattern, docs: list[str] ) -> tuple[int, list[str], bool]: """Extract entire documentation strings from the current file position Parameters ---------- ln : int Line number line : str Document line, not necessarily produced by `get_line()` match : Pattern Regular expression DOC match docs : list[str] Docstrings that are pending processing e.g. single line docstrings Returns ------- tuple[int, list[str], bool] The new line number at the end of the docstring, the docstring and a boolean flag indicating whether the docstring precedes the AST node (Doxygen style) or succeeds it (traditional FORD style) """ docstring: list[str] = docs docstring.append(line[match.end(0) :].strip()) predocmark = True if match.group(1) == ">" else False if ln >= self.nLines: return ln, docstring, predocmark # @note line index is 0-based # Start from the current line until EOF and check for docs for i in range(ln, self.nLines): next_line = self.get_line(i, pp_content=True) match = self.DOC_COMMENT_MATCH.match(next_line) if not match: ln = i break docstring.append(next_line[match.end(0) :].strip()) return ln, docstring, predocmark def get_single_line_docstring(self, line: str) -> list[str]: """Get a docstring of a single line. This is the same for both Legacy and Modern Fortran Parameters ---------- line : str Line of code Returns ------- list[str] A list containing the docstring. List will be empty if there is no match or the match is an empty string itself """ match = FRegex.FREE_DOC.match(line) if not match: return [] # if the string is empty return an empty list instead doc = line[match.end(0) :].strip() return [doc] if doc else [] def get_comment_regexs(self) -> tuple[Pattern, Pattern]: if self.fixed: return FRegex.FIXED_COMMENT, FRegex.FIXED_DOC return FRegex.FREE_COMMENT, FRegex.FREE_DOC def get_fortran_definition(self, line: str): for fortran_def in def_tests: obj = fortran_def(line) if obj is not None: return obj return None def preprocess_file( contents_split: list, file_path: str = None, pp_defs: dict = None, include_dirs: set = None, debug: bool = False, ): # Look for and mark excluded preprocessor paths in file # Initial implementation only looks for "if" and "ifndef" statements. # For "if" statements all blocks are excluded except the "else" block if present # For "ifndef" statements all blocks excluding the first block are excluded def eval_pp_if(text, defs: dict = None): def replace_ops(expr: str): expr = expr.replace("&&", " and ") expr = expr.replace("||", " or ") expr = expr.replace("!=", " <> ") expr = expr.replace("!", " not ") expr = expr.replace(" <> ", " != ") return expr def replace_defined(line: str): i0 = 0 out_line = "" for match in FRegex.DEFINED.finditer(line): if match.group(1) in defs: out_line += line[i0 : match.start(0)] + "($@)" else: out_line += line[i0 : match.start(0)] + "($%)" i0 = match.end(0) if i0 < len(line): out_line += line[i0:] return out_line def replace_vars(line: str): i0 = 0 out_line = "" for match in FRegex.WORD.finditer(line): if match.group(0) in defs: out_line += line[i0 : match.start(0)] + defs[match.group(0)] else: out_line += line[i0 : match.start(0)] + "False" i0 = match.end(0) if i0 < len(line): out_line += line[i0:] out_line = out_line.replace("$@", "True") out_line = out_line.replace("$%", "False") return out_line if defs is None: defs = {} out_line = replace_defined(text) out_line = replace_vars(out_line) try: line_res = eval(replace_ops(out_line)) except: return False else: return line_res if pp_defs is None: pp_defs = {} if include_dirs is None: include_dirs = set() if file_path is not None: include_dirs.add(os.path.abspath(os.path.dirname(file_path))) pp_skips = [] pp_defines = [] pp_stack = [] defs_tmp = pp_defs.copy() def_regexes = {} output_file = [] def_cont_name = None for (i, line) in enumerate(contents_split): # Handle multiline macro continuation if def_cont_name is not None: output_file.append("") if line.rstrip()[-1] != "\\": defs_tmp[def_cont_name] += line.strip() def_cont_name = None else: defs_tmp[def_cont_name] += line[0:-1].strip() continue # Handle conditional statements match = FRegex.PP_REGEX.match(line) if match: output_file.append(line) def_name = None if_start = False # Opening conditional statements if match.group(1) == "if ": is_path = eval_pp_if(line[match.end(1) :], defs_tmp) if_start = True elif match.group(1) == "ifdef": if_start = True def_name = line[match.end(0) :].strip() is_path = def_name in defs_tmp elif match.group(1) == "ifndef": if_start = True def_name = line[match.end(0) :].strip() is_path = not (def_name in defs_tmp) if if_start: if is_path: pp_stack.append([-1, -1]) log.debug(f"{line.strip()} !!! Conditional TRUE({i + 1})") else: pp_stack.append([i + 1, -1]) log.debug(f"{line.strip()} !!! Conditional FALSE({i + 1})") continue if len(pp_stack) == 0: continue # Closing/middle conditional statements inc_start = False exc_start = False if match.group(1) == "elif": if pp_stack[-1][0] < 0: pp_stack[-1][0] = i + 1 exc_start = True else: if eval_pp_if(line[match.end(1) :], defs_tmp): pp_stack[-1][1] = i - 1 pp_stack.append([-1, -1]) inc_start = True elif match.group(1) == "else": if pp_stack[-1][0] < 0: pp_stack[-1][0] = i + 1 exc_start = True else: pp_stack[-1][1] = i + 1 inc_start = True elif match.group(1) == "endif": if pp_stack[-1][0] < 0: pp_stack.pop() continue if pp_stack[-1][1] < 0: pp_stack[-1][1] = i + 1 log.debug(f"{line.strip()} !!! Conditional FALSE/END({i + 1})") pp_skips.append(pp_stack.pop()) if debug: if inc_start: log.debug(f"{line.strip()} !!! Conditional TRUE({i + 1})") elif exc_start: log.debug(f"{line.strip()} !!! Conditional FALSE({i + 1})") continue # Handle variable/macro definitions files match = FRegex.PP_DEF.match(line) if (match is not None) and ((len(pp_stack) == 0) or (pp_stack[-1][0] < 0)): output_file.append(line) pp_defines.append(i + 1) def_name = match.group(2) # If this is an argument list of a function add them to the name # get_definition will only return the function name upon hover # hence if the argument list is appended in the def_name then # querying the dictionary will not yield a result. # Need to properly parse the preprocessor files instead of this. # This also does not allow for multiline argument list definitions. # if match.group(3): # def_name += match.group(3) if (match.group(1) == "define") and (def_name not in defs_tmp): eq_ind = line[match.end(0) :].find(" ") if eq_ind >= 0: # Handle multiline macros if line.rstrip()[-1] == "\\": defs_tmp[def_name] = line[match.end(0) + eq_ind : -1].strip() def_cont_name = def_name else: defs_tmp[def_name] = line[match.end(0) + eq_ind :].strip() else: defs_tmp[def_name] = "True" elif (match.group(1) == "undef") and (def_name in defs_tmp): defs_tmp.pop(def_name, None) log.debug(f"{line.strip()} !!! Define statement({i + 1})") continue # Handle include files match = FRegex.PP_INCLUDE.match(line) if (match is not None) and ((len(pp_stack) == 0) or (pp_stack[-1][0] < 0)): log.debug(f"{line.strip()} !!! Include statement({i + 1})") include_filename = match.group(1).replace('"', "") include_path = None # Intentionally keep this as a list and not a set. There are cases # where projects play tricks with the include order of their headers # to get their codes to compile. Using a set would not permit that. for include_dir in include_dirs: include_path_tmp = os.path.join(include_dir, include_filename) if os.path.isfile(include_path_tmp): include_path = os.path.abspath(include_path_tmp) break if include_path is not None: try: include_file = FortranFile(include_path) err_string, _ = include_file.load_from_disk() if err_string is None: log.debug(f'\n!!! Parsing include file "{include_path}"') _, _, _, defs_tmp = preprocess_file( include_file.contents_split, file_path=include_path, pp_defs=defs_tmp, include_dirs=include_dirs, debug=debug, ) log.debug("!!! Completed parsing include file\n") else: log.debug(f"!!! Failed to parse include file: {err_string}") except: log.debug("!!! Failed to parse include file: exception") else: log.debug(f"{line.strip()} !!! Could not locate include file ({i + 1})") # Substitute (if any) read in preprocessor macros for def_tmp, value in defs_tmp.items(): def_regex = def_regexes.get(def_tmp) if def_regex is None: def_regex = re.compile(rf"\b{def_tmp}\b") def_regexes[def_tmp] = def_regex line_new, nsubs = def_regex.subn(value, line) if nsubs > 0: log.debug( f"{line.strip()} !!! Macro sub({i + 1}) '{def_tmp}' -> {value}" ) line = line_new output_file.append(line) return output_file, pp_skips, pp_defines, defs_tmp fortran-language-server-2.13.0+dfsg.1/fortls/regex_patterns.py000066400000000000000000000167101450400537300243360ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from re import I, compile from typing import Pattern @dataclass(frozen=True) class FortranRegularExpressions: USE: Pattern = compile( r"[ ]*USE([, ]+(?:INTRINSIC|NON_INTRINSIC))?[ :]+(\w*)([, ]+ONLY[ :]+)?", I, ) IMPORT: Pattern = compile(r"[ ]*IMPORT[ :]+([a-z_])", I) INCLUDE: Pattern = compile(r"[ ]*INCLUDE[ :]*[\'\"]([^\'\"]*)", I) CONTAINS: Pattern = compile(r"[ ]*(CONTAINS)[ ]*$", I) IMPLICIT: Pattern = compile(r"[ ]*IMPLICIT[ ]+([a-z]*)", I) #: Parse procedure keywords but not if they start with , or ( or end with , or ) #: This is to avoid parsing as keywords variables named pure, impure, etc. SUB_MOD: Pattern = compile( r"[ ]*(?!<[,\()][ ]*)\b(PURE|IMPURE|ELEMENTAL|RECURSIVE)\b(?![,\)][ ]*)", I ) SUB: Pattern = compile(r"[ ]*SUBROUTINE[ ]+(\w+)", I) END_SUB: Pattern = compile(r"SUBROUTINE", I) FUN: Pattern = compile(r"[ ]*FUNCTION[ ]+(\w+)", I) RESULT: Pattern = compile(r"RESULT[ ]*\((\w*)\)", I) END_FUN: Pattern = compile(r"FUNCTION", I) MOD: Pattern = compile(r"[ ]*MODULE[ ]+(\w+)", I) END_MOD: Pattern = compile(r"MODULE", I) SUBMOD: Pattern = compile(r"[ ]*SUBMODULE[ ]*\(", I) END_SMOD: Pattern = compile(r"SUBMODULE", I) END_PRO: Pattern = compile(r"(MODULE)?[ ]*PROCEDURE", I) BLOCK: Pattern = compile(r"[ ]*([a-z_]\w*[ ]*:[ ]*)?BLOCK(?!\w)", I) END_BLOCK: Pattern = compile(r"BLOCK", I) DO: Pattern = compile(r"[ ]*(?:[a-z_]\w*[ ]*:[ ]*)?DO([ ]+[0-9]*|$)", I) END_DO: Pattern = compile(r"DO", I) WHERE: Pattern = compile(r"[ ]*WHERE[ ]*\(", I) END_WHERE: Pattern = compile(r"WHERE", I) IF: Pattern = compile(r"[ ]*(?:[a-z_]\w*[ ]*:[ ]*)?IF[ ]*\(", I) THEN: Pattern = compile(r"\)[ ]*THEN$", I) END_IF: Pattern = compile(r"IF", I) ASSOCIATE: Pattern = compile(r"[ ]*ASSOCIATE[ ]*\(", I) END_ASSOCIATE: Pattern = compile(r"ASSOCIATE", I) END_FIXED: Pattern = compile(r"[ ]*([0-9]*)[ ]*CONTINUE", I) SELECT: Pattern = compile( r"[ ]*(?:[a-z_]\w*[ ]*:[ ]*)?SELECT[ ]*" r"(CASE|TYPE)[ ]*\(([\w=> ]*)", I, ) SELECT_TYPE: Pattern = compile(r"[ ]*(TYPE|CLASS)[ ]+IS[ ]*\(([\w ]*)", I) SELECT_DEFAULT: Pattern = compile(r"[ ]*CLASS[ ]+DEFAULT", I) END_SELECT: Pattern = compile(r"SELECT", I) PROG: Pattern = compile(r"[ ]*PROGRAM[ ]+(\w+)", I) END_PROG: Pattern = compile(r"PROGRAM", I) INT: Pattern = compile(r"[ ]*(ABSTRACT)?[ ]*INTERFACE[ ]*(\w*)", I) END_INT: Pattern = compile(r"INTERFACE", I) END_WORD: Pattern = compile( r"[ ]*END[ ]*(DO|WHERE|IF|BLOCK|ASSOCIATE|SELECT" r"|TYPE|ENUM|MODULE|SUBMODULE|PROGRAM|INTERFACE" r"|SUBROUTINE|FUNCTION|PROCEDURE|FORALL)?([ ]+(?!\W)|$)", I, ) TYPE_DEF: Pattern = compile(r"[ ]*(TYPE)[, :]+", I) EXTENDS: Pattern = compile(r"EXTENDS[ ]*\((\w*)\)", I) GENERIC_PRO: Pattern = compile( r"[ ]*(GENERIC)[, ]*(PRIVATE|PUBLIC)?[ ]*::[ ]*[a-z]", I ) GEN_ASSIGN: Pattern = compile(r"(ASSIGNMENT|OPERATOR)\(", I) END_TYPED: Pattern = compile(r"TYPE", I) ENUM_DEF: Pattern = compile(r"[ ]*ENUM[, ]+", I) END_ENUMD: Pattern = compile(r"ENUM", I) VAR: Pattern = compile( r"[ ]*(INTEGER|REAL|DOUBLE[ ]*PRECISION|COMPLEX" r"|DOUBLE[ ]*COMPLEX|CHARACTER|LOGICAL|PROCEDURE" r"|EXTERNAL|CLASS|TYPE)", # external :: variable is handled by this I, ) KIND_SPEC: Pattern = compile(r"[ ]*([*]?\([ ]*[\w*:]|\*[ ]*[0-9:]*)", I) KEYWORD_LIST: Pattern = compile( r"[ ]*,[ ]*(PUBLIC|PRIVATE|ALLOCATABLE|" r"POINTER|TARGET|DIMENSION[ ]*\(|" r"OPTIONAL|INTENT[ ]*\([ ]*(?:IN|OUT|IN[ ]*OUT)[ ]*\)|DEFERRED|NOPASS|" r"PASS[ ]*\(\w*\)|SAVE|PARAMETER|EXTERNAL|" r"CONTIGUOUS)", I, ) PARAMETER_VAL: Pattern = compile(r"\w*[\s\&]*=[\s\&]*([\w\.\*\-\+\\\'\"]*)", I) TATTR_LIST: Pattern = compile( r"[ ]*,[ ]*(PUBLIC|PRIVATE|ABSTRACT|EXTENDS\(\w*\))", I ) VIS: Pattern = compile(r"[ ]*\b(PUBLIC|PRIVATE)\b", I) WORD: Pattern = compile(r"[a-z_]\w*", I) NUMBER: Pattern = compile( r"[\+\-]?(\b\d+\.?\d*|\.\d+)(_\w+|d[\+\-]?\d+|e[\+\-]?\d+(_\w+)?)?(?!\w)", I, ) LOGICAL: Pattern = compile(r".true.|.false.", I) SUB_PAREN: Pattern = compile(r"\([\w, ]*\)", I) # KIND_SPEC_MATCH: Pattern = compile(r"\([\w, =*]*\)", I) SQ_STRING: Pattern = compile(r"\'[^\']*\'", I) DQ_STRING: Pattern = compile(r"\"[^\"]*\"", I) LINE_LABEL: Pattern = compile(r"[ ]*([0-9]+)[ ]+", I) NON_DEF: Pattern = compile(r"[ ]*(CALL[ ]+[a-z_]|[a-z_][\w%]*[ ]*=)", I) # Fixed format matching rules FIXED_COMMENT: Pattern = compile(r"([!cd*])", I) FIXED_CONT: Pattern = compile(r"( {5}[\S])") FIXED_DOC: Pattern = compile(r"(?:[!cd\*])([<>!])", I) FIXED_OPENMP: Pattern = compile(r"[!c\*]\$OMP", I) # Free format matching rules FREE_COMMENT: Pattern = compile(r"([ ]*!)") FREE_CONT: Pattern = compile(r"([ ]*&)") FREE_DOC: Pattern = compile(r"[ ]*!([<>!])") FREE_OPENMP: Pattern = compile(r"[ ]*!\$OMP", I) FREE_FORMAT_TEST: Pattern = compile(r"[ ]{1,4}[a-z]", I) # Preprocessor matching rules DEFINED: Pattern = compile(r"defined[ ]*\(?[ ]*([a-z_]\w*)[ ]*\)?", I) PP_REGEX: Pattern = compile(r"#(if |ifdef|ifndef|else|elif|endif)") PP_DEF: Pattern = compile(r"#(define|undef)[ ]*([\w]+)(\((\w+(,[ ]*)?)+\))?", I) PP_DEF_TEST: Pattern = compile(r"(![ ]*)?defined[ ]*\([ ]*(\w*)[ ]*\)$", I) PP_INCLUDE: Pattern = compile(r"#include[ ]*([\"\w\.]*)", I) PP_ANY: Pattern = compile(r"(^#:?\w+)") # Context matching rules CALL: Pattern = compile(r"[ ]*CALL[ ]+[\w%]*$", I) INT_STMNT: Pattern = compile(r"^[ ]*[a-z]*$", I) TYPE_STMNT: Pattern = compile(r"[ ]*(TYPE|CLASS)[ ]*(IS)?[ ]*$", I) PROCEDURE_STMNT: Pattern = compile(r"[ ]*(PROCEDURE)[ ]*$", I) PRO_LINK: Pattern = compile(r"[ ]*(MODULE[ ]*PROCEDURE )", I) SCOPE_DEF: Pattern = compile( r"[ ]*(MODULE|PROGRAM|SUBROUTINE|FUNCTION|INTERFACE)[ ]+", I ) END: Pattern = compile( r"[ ]*(END)(" r" |MODULE|PROGRAM|SUBROUTINE|FUNCTION|PROCEDURE|TYPE|DO|IF|SELECT)?", I, ) # Object regex patterns CLASS_VAR: Pattern = compile(r"(TYPE|CLASS)[ ]*\(", I) DEF_KIND: Pattern = compile(r"([a-z]*)[ ]*\((?:KIND|LEN)?[ =]*([a-z_]\w*)", I) OBJBREAK: Pattern = compile(r"[\/\-(.,+*<>=$: ]", I) def src_file_exts(input_exts: list[str] = []) -> Pattern[str]: """Create a REGEX for which file extensions the Language Server should parse Default extensions are F F03 F05 F08 F18 F77 F90 F95 FOR FPP f f03 f05 f08 f18 f77 f90 f95 for fpp Parameters ---------- input_exts : list[str], optional Additional Fortran, by default [] Returns ------- Pattern[str] A compiled regular expression, by default '.(F|F03|F05|F08|F18|F77|F90|F95|FOR|FPP|f|f03|f05|f08|f18|f77|f90|f95|for|fpp)?' """ EXTS = ["", "77", "90", "95", "03", "05", "08", "18", "OR", "PP"] FORTRAN_FILE_EXTS = [] for e in EXTS: FORTRAN_FILE_EXTS.extend([f"F{e}".upper(), f"f{e}".lower()]) # Add the custom extensions for the server to parse for e in input_exts: if e.startswith("."): FORTRAN_FILE_EXTS.append(e.replace(".", "")) # Cast into a set to ensure uniqueness of extensions & sort for consistency # Create a regular expression from this return compile(rf"\.({'|'.join(sorted(set(FORTRAN_FILE_EXTS)))})?$") fortran-language-server-2.13.0+dfsg.1/fortls/statements.json000066400000000000000000000111351450400537300240100ustar00rootroot00000000000000{ "var_def": { "CHARACTER": { "args": "LEN=len" }, "CLASS": { "args": "name" }, "COMPLEX": { "args": "KIND=kind" }, "DOUBLE COMPLEX": {}, "DOUBLE PRECISION": {}, "INTEGER": { "args": "KIND=kind" }, "LOGICAL": { "args": "KIND=kind" }, "REAL": { "args": "KIND=kind" }, "TYPE": { "args": "KIND=kind" } }, "int_stmnts": { "ALLOCATE": { "doc": "Dynamically creates storage for allocatable variables and pointer targets." }, "BACKSPACE": { "doc": "Positions a sequential file at the beginning of the preceding record, making it available for subsequent I/O processing." }, "CALL": { "doc": "Transfers control to a subroutine subprogram." }, "CLOSE": { "doc": "Disconnects a file from a unit." }, "CONTINUE": { "doc": "Primarily used to terminate a labelled DO construct when the construct would otherwise end improperly with either a GO TO, arithmetic IF, or other prohibited control statement." }, "CYCLE": { "doc": "Interrupts the current execution cycle of the innermost (or named) DO construct." }, "DEALLOCATE": { "doc": "Frees the storage allocated for allocatable variables and nonprocedure pointer targets (and causes the pointers to become disassociated)." }, "ENDFILE": { "doc": "For sequential files, writes an end-of-file record to the file and positions the file after this record (the terminal point)." }, "ERROR STOP": { "doc": "Initiates error termination of an image before the execution of an END statement of the main program." }, "EVENT POST": { "doc": "Allows an image to notify another image that it can proceed to work on tasks that use common resources." }, "EVENT WAIT": { "doc": "Allows an image to wait on events posted by other images." }, "FAIL IMAGE": { "doc": "Forces the failure of the current image of the program unit." }, "FLUSH": { "doc": "Causes data written to a file to become available to other processes or causes data written to a file outside of Fortran to be accessible to a READ statement." }, "FORM TEAM": { "args": "team_number,team_variable", "doc": "Defines team variables; creates one or more teams of images from the images on the current team." }, "FORMAT": { "doc": "Specifies the form of data being transferred and the data conversion (editing) required to achieve that form." }, "INQUIRE": { "doc": "Returns information on the status of specified properties of a file or logical unit." }, "LOCK": { "doc": "Causes a lock variable to become locked by an image." }, "NAMELIST": { "doc": "Associates a name with a list of variables. This group name can be referenced in some input/output operations." }, "NULLIFY": { "doc": "Disassociates a pointer from a target." }, "OPEN": { "doc": "Connects an external file to a unit, creates a new file and connects it to a unit, creates a preconnected file, or changes certain properties of a connection." }, "PRINT": { "doc": "Displays output on the screen." }, "READ": { "doc": "Transfers input data from external sequential, direct-access, or internal records." }, "RETURN": { "doc": "Return control to the calling program unit." }, "REWIND": { "doc": "Positions a sequential or direct access file at the beginning of the file (the initial point)." }, "STOP": { "doc": "Initiates normal termination of an image before the execution of an END statement of the main program." }, "SYNC ALL": { "args": "STAT=stat,ERRMSG=errmsg", "doc": "Performs a synchronization of all images in the current team." }, "SYNC IMAGES": { "args": "image_set,STAT=stat,ERRMSG=errmsg", "doc": "Performs a synchronization of the image with each of the other images in the image set." }, "SYNC MEMORY": { "args": "STAT=stat,ERRMSG=errmsg", "doc": "Ends one image segment and begins another. Each segment can then be ordered in some way with respect to segments on other images." }, "SYNC TEAM": { "args": "team_value,STAT=stat,ERRMSG=errmsg", "doc": "Performs a synchronization of all images on the specified team." }, "UNLOCK": { "doc": "Causes a lock variable to become unlocked by an image." }, "WAIT": { "doc": "Performs a wait operation for a specified pending asynchronous data transfer operation." }, "WRITE": { "doc": "Transfers output data to external sequential, direct-access, or internal records." } } } fortran-language-server-2.13.0+dfsg.1/fortls/version.py000066400000000000000000000005271450400537300227700ustar00rootroot00000000000000try: from importlib.metadata import PackageNotFoundError, version except ModuleNotFoundError: from importlib_metadata import PackageNotFoundError, version try: __version__ = version(__package__) except PackageNotFoundError: from setuptools_scm import get_version __version__ = get_version(root="..", relative_to=__file__) fortran-language-server-2.13.0+dfsg.1/licenses/000077500000000000000000000000001450400537300212215ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/licenses/fortran-language-server-license.txt000066400000000000000000000020701450400537300301410ustar00rootroot00000000000000The MIT License (MIT) Copyright 2017-2019 Chris Hansen Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. fortran-language-server-2.13.0+dfsg.1/pyproject.toml000066400000000000000000000006461450400537300223360ustar00rootroot00000000000000[build-system] requires = [ "setuptools >= 45", "wheel", "setuptools_scm[toml] >= 6.2", "setuptools_scm_git_archive", ] build-backend = "setuptools.build_meta" [tool.setuptools_scm] write_to = "fortls/_version.py" [tool.isort] profile = "black" [tool.pytest.ini_options] minversion = "7.0" addopts = "-v --cov=fortls --cov-report=html --cov-report=xml --cov-context=test" testpaths = ["fortls", "test"] fortran-language-server-2.13.0+dfsg.1/setup.cfg000066400000000000000000000036361450400537300212450ustar00rootroot00000000000000[metadata] name = fortls url = https://gnikit.github.io/fortls author = Giannis Nikiteas author_email = giannis.nikiteas@gmail.com description = fortls - Fortran Language Server long_description = file: README.md long_description_content_type = text/markdown license = MIT classifiers = Development Status :: 4 - Beta Intended Audience :: Developers Intended Audience :: Science/Research License :: OSI Approved :: MIT License Natural Language :: English Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Fortran Operating System :: Microsoft :: Windows Operating System :: POSIX Operating System :: Unix Operating System :: MacOS keywords = fortran language server language server protocol lsp fortls project_urls = Donate = https://github.com/sponsors/gnikit Documentation = https://gnikit.github.io/fortls Changes = https://github.com/gnikit/fortls/blob/master/CHANGELOG.md Tracker = https://github.com/gnikit/fortls/issues Source Code = https://github.com/gnikit/fortls [options] packages = find: python_requires = >= 3.7 install_requires = json5 packaging importlib-metadata; python_version < "3.8" typing-extensions; python_version < "3.8" [options.package_data] fortls = *.json [options.entry_points] console_scripts = fortls = fortls.__init__:main [options.extras_require] dev = pytest >= 5.4.3 pytest-cov >= 2.12.1 black isort pre-commit docs = sphinx >= 4.0.0 sphinx-argparse sphinx-autodoc-typehints sphinx_design sphinx-copybutton furo myst-parser sphinx-sitemap [flake8] max-line-length = 88 extend-ignore = E203, E722 fortran-language-server-2.13.0+dfsg.1/setup.py000066400000000000000000000001461450400537300211270ustar00rootroot00000000000000#!/usr/bin/env python """Builds the fortls Language Server """ import setuptools setuptools.setup() fortran-language-server-2.13.0+dfsg.1/test/000077500000000000000000000000001450400537300203735ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/setup_tests.py000066400000000000000000000036661450400537300233420ustar00rootroot00000000000000from __future__ import annotations import shlex import subprocess import sys from io import StringIO from pathlib import Path root_dir = Path(__file__).parent.parent.resolve() sys.path.insert(0, root_dir) # Compromise since isort does not respect noqa from fortls.jsonrpc import path_to_uri # noqa: E402, F401 from fortls.jsonrpc import read_rpc_messages # noqa: E402 from fortls.jsonrpc import write_rpc_notification # noqa: E402, F401 from fortls.jsonrpc import write_rpc_request # noqa: E402, F401 test_dir = root_dir / "test" / "test_source" def check_post_msg(result: dict, msg: str, severity: int): assert result["type"] == severity assert result["message"] == msg def run_request(request, fortls_args: list[str] = None): command = [ sys.executable, str(root_dir / "fortls.py"), "--incremental_sync", ] if fortls_args: # Input args might not be sanitised, fix that for i in fortls_args: command.extend(shlex.split(i, posix=False)) pid = subprocess.Popen( command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) results = pid.communicate(input=request.encode()) tmp_file = StringIO(results[0].decode()) results = read_rpc_messages(tmp_file) parsed_results = [] for result in results: try: parsed_results.append(result["result"]) except KeyError: try: # Present in `method`s parsed_results.append(result["params"]) except: raise RuntimeError( "Only 'result' and 'params' keys have been implemented for testing." " Please add the new key." ) except: raise RuntimeError( "Unexpected error encountered trying to extract server results" ) errcode = pid.poll() return errcode, parsed_results fortran-language-server-2.13.0+dfsg.1/test/test_interface.py000066400000000000000000000135141450400537300237500ustar00rootroot00000000000000import os import sys from pathlib import Path sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) from fortls.interface import cli # noqa: E402 parser = cli("fortls") def test_command_line_general_options(): args = parser.parse_args( "-c config_file.json -n 2 --notify_init --incremental_sync --sort_keywords" " --disable_autoupdate --debug_log".split() ) assert args.config == "config_file.json" assert args.nthreads == 2 assert args.notify_init assert args.incremental_sync assert args.sort_keywords assert args.disable_autoupdate assert args.debug_log def test_command_line_file_parsing_options(): args = parser.parse_args( "--source_dirs tmp ./local /usr/include/** --incl_suffixes .FF .fpc .h f20" " --excl_suffixes _tmp.f90 _h5hut_tests.F90 --excl_paths exclude tests".split() ) assert args.source_dirs == {"tmp", "./local", "/usr/include/**"} assert args.incl_suffixes == {".FF", ".fpc", ".h", "f20"} assert args.excl_suffixes == {"_tmp.f90", "_h5hut_tests.F90"} assert args.excl_paths == {"exclude", "tests"} def test_command_line_autocomplete_options(): args = parser.parse_args( "--autocomplete_no_prefix --autocomplete_no_snippets --autocomplete_name_only" " --lowercase_intrinsics --use_signature_help".split() ) assert args.autocomplete_no_prefix assert args.autocomplete_no_snippets assert args.autocomplete_name_only assert args.lowercase_intrinsics assert args.use_signature_help def test_command_line_hover_options(): args = parser.parse_args( "--hover_signature --hover_language FortranFreeForm".split() ) assert args.hover_signature assert args.hover_language == "FortranFreeForm" def test_command_line_diagnostic_options(): args = parser.parse_args( "--max_line_length 80 --max_comment_line_length 8 --disable_diagnostics".split() ) assert args.max_line_length == 80 assert args.max_comment_line_length == 8 assert args.disable_diagnostics def test_command_line_preprocessor_options(): args = parser.parse_args( "--pp_suffixes .h .fh --include_dirs /usr/include/** ./local/incl --pp_defs" ' {"HAVE_PETSC":"","HAVE_ZOLTAN":"","Mat":"type(tMat)"}'.split() ) assert args.pp_suffixes == [".h", ".fh"] assert args.include_dirs == {"/usr/include/**", "./local/incl"} assert args.pp_defs == {"HAVE_PETSC": "", "HAVE_ZOLTAN": "", "Mat": "type(tMat)"} def test_command_line_symbol_options(): args = parser.parse_args("--symbol_skip_mem".split()) assert args.symbol_skip_mem def test_command_line_code_actions_options(): args = parser.parse_args("--enable_code_actions".split()) assert args.enable_code_actions def unittest_server_init(): from fortls.langserver import LangServer root = (Path(__file__).parent / "test_source").resolve() parser = cli("fortls") args = parser.parse_args("-c f90_config.json".split()) server = LangServer(None, vars(args)) server.root_path = root server._load_config_file() return server, root def test_config_file_general_options(): server, root = unittest_server_init() assert server.nthreads == 8 assert server.notify_init assert server.incremental_sync assert server.sort_keywords assert server.disable_autoupdate def test_config_file_dir_parsing_options(): server, r = unittest_server_init() # File parsing assert server.source_dirs == {"pp/**", "subdir"} assert server.incl_suffixes == {".FF", ".fpc", ".h", "f20"} assert server.excl_suffixes == {"_tmp.f90", "_h5hut_tests.F90"} assert server.excl_paths == {"excldir", "hover/**"} def test_config_file_autocomplete_options(): server, root = unittest_server_init() # Autocomplete options assert server.autocomplete_no_prefix assert server.autocomplete_no_snippets assert server.autocomplete_name_only assert server.lowercase_intrinsics assert server.use_signature_help def test_config_file_hover_options(): server, root = unittest_server_init() # Hover options assert server.hover_signature assert server.hover_language == "FortranFreeForm" def test_config_file_diagnostic_options(): server, root = unittest_server_init() # Diagnostic options assert server.max_line_length == 80 assert server.max_comment_line_length == 80 assert server.disable_diagnostics def test_config_file_preprocessor_options(): server, root = unittest_server_init() # Preprocessor options assert server.pp_suffixes == [".h", ".fh"] assert server.include_dirs == {"./include/**"} assert server.pp_defs == { "HAVE_PETSC": "", "HAVE_ZOLTAN": "", "Mat": "type(tMat)", } def test_config_file_symbols_options(): server, root = unittest_server_init() # Symbols options assert server.symbol_skip_mem def test_config_file_codeactions_options(): server, root = unittest_server_init() # Code Actions options assert server.enable_code_actions def test_version_update_pypi(): from packaging import version from fortls.jsonrpc import JSONRPC2Connection, ReadWriter from fortls.langserver import LangServer parser = cli("fortls") args = parser.parse_args("-c f90_config.json".split()) args = vars(args) args["disable_autoupdate"] = False stdin, stdout = sys.stdin.buffer, sys.stdout.buffer s = LangServer(conn=JSONRPC2Connection(ReadWriter(stdin, stdout)), settings=args) s.root_path = (Path(__file__).parent / "test_source").resolve() did_update = s._update_version_pypi(test=True) assert did_update s.disable_autoupdate = True did_update = s._update_version_pypi() assert not did_update s.disable_autoupdate = False s._version = version.parse("999.0.0") did_update = s._update_version_pypi() assert not did_update fortran-language-server-2.13.0+dfsg.1/test/test_preproc.py000066400000000000000000000036211450400537300234600ustar00rootroot00000000000000from __future__ import annotations from setup_tests import run_request, test_dir, write_rpc_request def test_hover(): def hover_req(file_path: str, ln: int, col: int) -> str: return write_rpc_request( 1, "textDocument/hover", { "textDocument": {"uri": str(file_path)}, "position": {"line": ln, "character": col}, }, ) def check_return(result_array, checks): assert len(result_array) == len(checks) for (i, check) in enumerate(checks): assert result_array[i]["contents"][0]["value"] == check root_dir = test_dir / "pp" string = write_rpc_request(1, "initialize", {"rootPath": str(root_dir)}) file_path = root_dir / "preproc.F90" string += hover_req(file_path, 5, 8) # user defined type string += hover_req(file_path, 7, 30) # variable string += hover_req(file_path, 7, 40) # multi-lin variable string += hover_req(file_path, 8, 7) # function with if conditional string += hover_req(file_path, 9, 7) # multiline function with if conditional string += hover_req(file_path, 10, 15) # defined without () file_path = root_dir / "preproc_keywords.F90" string += hover_req(file_path, 6, 2) # ignores PP across Fortran line continuations config = str(root_dir / ".pp_conf.json") errcode, results = run_request(string, ["--config", config]) assert errcode == 0 # Reference solution ref_results = ( "#define PCType character*(80)", "#define PETSC_ERR_INT_OVERFLOW 84", "#define varVar 55", "#define ewrite if (priority <= 3) write((priority), format)", "#define ewrite2 if (priority <= 3) write((priority), format)", "#define SUCCESS .true.", "REAL, CONTIGUOUS, POINTER, DIMENSION(:)", ) assert len(ref_results) == len(results) - 1 check_return(results[1:], ref_results) fortran-language-server-2.13.0+dfsg.1/test/test_server.py000066400000000000000000000164331450400537300233210ustar00rootroot00000000000000from setup_tests import run_request, test_dir, write_rpc_notification, write_rpc_request def test_init(): def check_return(result_dict): # Expected capabilities # { # "completionProvider": { # "resolveProvider": false, # "triggerCharacters": ["%"] # }, # "definitionProvider": true, # "documentSymbolProvider": true, # "referencesProvider": True, # "hoverProvider": true, # "textDocumentSync": 2 # } # assert "capabilities" in result_dict assert result_dict["capabilities"]["textDocumentSync"] == 2 assert result_dict["capabilities"]["definitionProvider"] is True assert result_dict["capabilities"]["documentSymbolProvider"] is True assert result_dict["capabilities"]["hoverProvider"] is True assert result_dict["capabilities"]["referencesProvider"] is True assert ( result_dict["capabilities"]["completionProvider"]["resolveProvider"] is False ) assert ( result_dict["capabilities"]["completionProvider"]["triggerCharacters"][0] == "%" ) # string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) errcode, results = run_request(string) # assert errcode == 0 check_return(results[0]) def test_logger(): """Test the logger""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) errcode, results = run_request(string, ["--debug_log"]) assert errcode == 0 assert results[1]["type"] == 3 assert results[1]["message"] == "fortls debugging enabled" def test_open(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = str(test_dir / "subdir" / "test_free.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string, fortls_args=["--disable_diagnostics"]) # assert errcode == 0 assert len(results) == 1 def test_change(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_unknown.f90" string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": str(file_path)}} ) string += write_rpc_notification( "textDocument/didChange", { "textDocument": {"uri": str(file_path)}, "contentChanges": [ { "text": "module test_unkown\nend module test_unknown\n", "range": { "start": {"line": 0, "character": 0}, "end": {"line": 0, "character": 0}, }, } ], }, ) string += write_rpc_request( 2, "textDocument/documentSymbol", {"textDocument": {"uri": str(file_path)}} ) file_path = test_dir / "subdir" / "test_free.f90" string += write_rpc_notification( "textDocument/didChange", { "textDocument": {"uri": str(file_path)}, "contentChanges": [ { "text": " unicode test", "range": { "start": {"line": 3, "character": 3}, "end": {"line": 3, "character": 3}, }, }, { "text": "", "range": { "start": {"line": 6, "character": 0}, "end": {"line": 31, "character": 0}, }, }, { "text": "", "range": { "start": {"line": 7, "character": 0}, "end": {"line": 39, "character": 0}, }, }, ], }, ) string += write_rpc_request( 3, "textDocument/documentSymbol", {"textDocument": {"uri": str(file_path)}} ) errcode, results = run_request(string, fortls_args=["--disable_diagnostics"]) # assert errcode == 0 assert len(results) == 3 assert len(results[1]) == 1 assert len(results[2]) == 5 def test_symbols(): def check_return(result_array): # Expected objects objs = ( ["test_free", 2, 0, 81], ["scale_type", 5, 4, 6], ["val", 13, 5, 5], ["vector", 5, 8, 16], ["n", 13, 9, 9], ["v", 13, 10, 10], ["bound_nopass", 6, 11, 11], ["create", 6, 13, 13], ["norm", 6, 14, 14], ["bound_pass", 6, 15, 15], ["scaled_vector", 5, 18, 23], ["scale", 13, 19, 19], ["set_scale", 6, 21, 21], ["norm", 6, 22, 22], ["fort_wrap", 11, 26, 29], ["vector_create", 12, 35, 41], ["vector_norm", 12, 43, 47], ["scaled_vector_set", 12, 49, 53], ["scaled_vector_norm", 12, 55, 59], ["unscaled_norm", 12, 61, 65], ["test_sig_Sub", 12, 67, 70], ["bound_pass", 12, 72, 80], ) assert len(result_array) == len(objs) for i, obj in enumerate(objs): assert result_array[i]["name"] == obj[0] assert result_array[i]["kind"] == obj[1] assert result_array[i]["location"]["range"]["start"]["line"] == obj[2] assert result_array[i]["location"]["range"]["end"]["line"] == obj[3] # string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" string += write_rpc_request( 2, "textDocument/documentSymbol", {"textDocument": {"uri": str(file_path)}} ) errcode, results = run_request(string) # assert errcode == 0 check_return(results[1]) def test_workspace_symbols(): def check_return(result_array): # Expected objects objs = ( ["test", 6, 7], ["test_abstract", 2, 0], ["test_associate_block", 2, 0], ["test_free", 2, 0], ["test_gen_type", 5, 1], ["test_generic", 2, 0], ["test_inherit", 2, 0], ["test_int", 2, 0], ["test_mod", 2, 0], ["test_nan", 2, 0], ["test_nonint_mod", 2, 0], ["test_preproc_keywords", 2, 0], ["test_private", 2, 8], ["test_program", 2, 0], ["test_rename_sub", 6, 9], ["test_select", 2, 0], ["test_select_sub", 6, 16], ["test_sig_Sub", 6, 67], ["test_str1", 13, 5], ["test_str2", 13, 5], ["test_sub", 6, 8], ["test_vis_mod", 2, 0], ) assert len(result_array) == len(objs) for i, obj in enumerate(objs): assert result_array[i]["name"] == obj[0] assert result_array[i]["kind"] == obj[1] assert result_array[i]["location"]["range"]["start"]["line"] == obj[2] # string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) string += write_rpc_request(2, "workspace/symbol", {"query": "test"}) errcode, results = run_request(string) # assert errcode == 0 check_return(results[1]) fortran-language-server-2.13.0+dfsg.1/test/test_server_completion.py000066400000000000000000000253661450400537300255570ustar00rootroot00000000000000from setup_tests import run_request, test_dir, write_rpc_request def validate_comp(result_array, checks): assert len(result_array) == checks[0] if checks[0] > 0: assert result_array[0]["label"] == checks[1] assert result_array[0]["detail"] == checks[2] try: assert result_array[0]["insertText"] == checks[3] except KeyError: pass def comp_request(file_path, line, char): return write_rpc_request( 1, "textDocument/completion", { "textDocument": {"uri": str(file_path)}, "position": {"line": line, "character": char}, }, ) def test_comp1(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += comp_request(file_path, 12, 6) string += comp_request(file_path, 13, 6) string += comp_request(file_path, 17, 24) string += comp_request(file_path, 18, 23) string += comp_request(file_path, 20, 7) string += comp_request(file_path, 21, 20) string += comp_request(file_path, 21, 42) string += comp_request(file_path, 23, 26) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # test_prog.f08 [1, "myfun", "DOUBLE PRECISION FUNCTION myfun(n, xval)", "myfun"], [9, "glob_sub", "SUBROUTINE glob_sub(n, xval, yval)", "glob_sub"], [1, "bound_nopass", "SUBROUTINE bound_nopass(a, b)", "bound_nopass"], [1, "bound_pass", "SUBROUTINE bound_pass(arg1)", "bound_pass"], [1, "stretch_vector", "TYPE(scaled_vector)"], [6, "scale", "TYPE(scale_type)"], [2, "n", "INTEGER(4)"], [1, "val", "REAL(8)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp2(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_submod.F90" string += comp_request(file_path, 30, 12) string += comp_request(file_path, 31, 8) string += comp_request(file_path, 31, 23) string += comp_request(file_path, 35, 12) string += comp_request(file_path, 36, 48) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_submod.F90 [1, "point", "TYPE"], [1, "distance", "REAL"], [2, "x", "REAL"], [1, "point", "TYPE"], [2, "x", "REAL"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp3(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_inc.f90" string += comp_request(file_path, 10, 2) file_path = test_dir / "subdir" / "test_inc2.f90" string += comp_request(file_path, 3, 2) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # test_inc.f90 [2, "val1", "REAL(8)"], # subdir/test_inc2.f90 [2, "val1", "REAL(8)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp4(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_abstract.f90" string += comp_request(file_path, 7, 12) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_abstract.f90 [1, "abs_interface", "SUBROUTINE"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp5(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" string += comp_request(file_path, 10, 22) string += comp_request(file_path, 14, 27) string += comp_request(file_path, 28, 15) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_free.f90 [1, "DIMENSION(:)", "KEYWORD"], [2, "vector_create", "SUBROUTINE"], [3, "INTENT(IN)", "KEYWORD"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp6(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_select.f90" string += comp_request(file_path, 21, 7) string += comp_request(file_path, 23, 7) string += comp_request(file_path, 25, 7) string += comp_request(file_path, 30, 7) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_select.f90 [2, "a", "REAL(8)"], [2, "a", "COMPLEX(8)"], [1, "n", "INTEGER(4)"], [2, "a", "REAL(8)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp7(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_block.f08" string += comp_request(file_path, 2, 2) string += comp_request(file_path, 5, 4) string += comp_request(file_path, 8, 6) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # test_block.f08 [9, "READ", "STATEMENT"], [10, "READ", "STATEMENT"], [11, "READ", "STATEMENT"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp8(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_inherit.f90" string += comp_request(file_path, 10, 11) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_inherit.f90 [1, "val", "REAL(8)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp9(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_rename.F90" string += comp_request(file_path, 13, 5) string += comp_request(file_path, 14, 5) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_rename.F90 [1, "localname", "INTEGER"], [2, "renamed_var2", "REAL(8)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp10(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_vis.f90" string += comp_request(file_path, 8, 10) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_vis.f90 [3, "some_type", "TYPE"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp_import_host_association(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_import.f90" string += comp_request(file_path, 15, 20) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # TODO: this should be 1, mytype2 should not appear in autocomplete # see #5 and #8 on GitHub [2, "mytype", "TYPE"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp_visibility_scopes(): """Test that PUBLIC, PRIVATE scopes are enforced in autocomplete results.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "completion" / "test_vis_mod_completion.f90" string += comp_request(file_path, 12, 16) string += comp_request(file_path, 12, 24) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # completion/test_vis_mod_completion.f90 [1, "some_var", "INTEGER"], [3, "length", "INTEGER"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp_interface(): """Test that the interface signature autocompletion, with placeholders, works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_generic.f90" string += comp_request(file_path, 14, 10) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_generic.f90 [ 4, "my_gen", "SUBROUTINE my_gen(self, a, b)", "my_gen(${1:self}, ${2:a}, ${3:b})", ], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp_no_signature_help(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += comp_request(file_path, 12, 6) errcode, results = run_request(string) assert errcode == 0 exp_results = ( # test_prog.f08, completion without signature_help # returns the entire completion as a snippet [ 1, "myfun", "DOUBLE PRECISION FUNCTION myfun(n, xval)", "myfun(${1:n}, ${2:xval})", ], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) def test_comp_fixed(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_fixed.f" string += comp_request(file_path, 15, 8) string += comp_request(file_path, 15, 21) errcode, results = run_request(string, ["--use_signature_help"]) assert errcode == 0 exp_results = ( # subdir/test_fixed.f90 [1, "bob", "CHARACTER*(LEN=200)"], [1, "dave", "CHARACTER*(20)"], ) assert len(exp_results) == len(results) - 1 for i, ref in enumerate(exp_results): validate_comp(results[i + 1], ref) fortran-language-server-2.13.0+dfsg.1/test/test_server_definitions.py000066400000000000000000000204461450400537300257130ustar00rootroot00000000000000from pathlib import Path from setup_tests import path_to_uri, run_request, test_dir, write_rpc_request def validate_def(result_array, checks): # If no definition is given result is None if result_array is None: assert not checks[0] return None assert result_array["uri"] == path_to_uri(checks[2]) assert result_array["range"]["start"]["line"] == checks[0] assert result_array["range"]["start"]["line"] == checks[1] def def_request(uri: Path, line, char): return write_rpc_request( 1, "textDocument/definition", { "textDocument": {"uri": str(uri)}, "position": {"line": line - 1, "character": char - 1}, }, ) def test_def_fun_sub_fixed(): """Test that going to definition of a function or submodule works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += def_request(file_path, 13, 7) string += def_request(file_path, 14, 7) errcode, results = run_request(string) assert errcode == 0 fixed_path = str(test_dir / "subdir" / "test_fixed.f") ref_res = [[0, 0, fixed_path], [22, 22, fixed_path]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_variable(): """Test that going to definition of a variable works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += def_request(file_path, 21, 8) errcode, results = run_request(string) assert errcode == 0 ref_res = [[10, 10, str(test_dir / "test_prog.f08")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_type_bound_procedure1(): """Test that going to definition of type bound procedure works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += def_request(file_path, 22, 21) errcode, results = run_request(string) assert errcode == 0 ref_res = [[21, 21, str(test_dir / "subdir" / "test_free.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_type_bound_procedure2(): """Test that going to definition of type bound procedure works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += def_request(file_path, 22, 43) errcode, results = run_request(string) assert errcode == 0 ref_res = [[14, 14, str(test_dir / "subdir" / "test_free.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_type_nested_variable(): """Test that going to definition of type bound nested variables works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += def_request(file_path, 24, 27) errcode, results = run_request(string) assert errcode == 0 ref_res = [[5, 5, str(test_dir / "subdir" / "test_free.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_type_in_submod_function(): """Test that going into the definition of a type bound function in a submodule""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_submod.F90" string += def_request(file_path, 31, 13) errcode, results = run_request(string) assert errcode == 0 ref_res = [[1, 1, str(test_dir / "subdir" / "test_submod.F90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_type_in_submod_procedure(): """Test that going into the definition of a type bound procedure in a submodule""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_submod.F90" string += def_request(file_path, 36, 13) errcode, results = run_request(string) assert errcode == 0 ref_res = [[1, 1, str(test_dir / "subdir" / "test_submod.F90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_include_file(): """Test that going into the location of an include file works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_inc.f90" string += def_request(file_path, 3, 16) errcode, results = run_request(string) assert errcode == 0 ref_res = [[2, 2, str(test_dir / "subdir" / "test_inc2.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_include_variable1(): """Test that going to definition of a variable in an include file works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_inc.f90" string += def_request(file_path, 11, 3) errcode, results = run_request(string) assert errcode == 0 ref_res = [[0, 0, str(test_dir / "subdir" / "test_inc2.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_include_variable2(): """Test that going to definition of a variable in an include file works.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_inc2.f90" string += def_request(file_path, 4, 3) errcode, results = run_request(string) assert errcode == 0 ref_res = [[4, 4, str(test_dir / "test_inc.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_include_file_missing(): """Test that going to the definition of a missing file will not break fortls""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_inc.f90" string += def_request(file_path, 13, 14) errcode, results = run_request(string) assert errcode == 0 ref_res = [[None]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_rename_only_variable(): """Test that going to definition of a renamed list variable will take you to the original definition. """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_rename.F90" string += def_request(file_path, 14, 6) errcode, results = run_request(string) assert errcode == 0 ref_res = [[6, 6, str(test_dir / "subdir" / "test_rename.F90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_rename_only_variable_nested(): """Test that going to definition of a renamed list variable will take you to the original definition, tests the multiply renamed case. """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_rename.F90" string += def_request(file_path, 15, 6) errcode, results = run_request(string) assert errcode == 0 ref_res = [[1, 1, str(test_dir / "subdir" / "test_rename.F90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) def test_def_function_implicit_result_variable(): """Test that going to definition on the implicitly defined variable RESULT works. """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "functions.f90" string += def_request(file_path, 4, 18) errcode, results = run_request(string) assert errcode == 0 ref_res = [[3, 3, str(test_dir / "hover" / "functions.f90")]] assert len(ref_res) == len(results) - 1 for i, res in enumerate(ref_res): validate_def(results[i + 1], res) fortran-language-server-2.13.0+dfsg.1/test/test_server_diagnostics.py000066400000000000000000000346351450400537300257140ustar00rootroot00000000000000# from types import NoneType from setup_tests import ( path_to_uri, run_request, test_dir, write_rpc_notification, write_rpc_request, ) def test_interface_args(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test subroutines and functions with interfaces as arguments file_path = str(test_dir / "test_diagnostic_int.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_nonintrinsic(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test that use, non_intrinsic does not raise a diagnostic error file_path = str(test_dir / "test_nonintrinsic.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_submodules_spaced(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test that submodules with spacings in their parent's names are parsed file_path = str(test_dir / "test_submodule.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_end_named_variables(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Tests that variables named end do not close the scope prematurely file_path = str(test_dir / "diag" / "test_scope_end_name_var.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_external(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test that externals can be split between multiple lines # and that diagnostics for multiple definitions of externals can account # for that file_path = str(test_dir / "diag" / "test_external.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) root = path_to_uri(str((test_dir / "diag" / "test_external.f90").resolve())) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 7, "character": 17}, "end": {"line": 7, "character": 22}, }, "message": 'Variable "VAR_B" declared twice in scope', "severity": 1, "relatedInformation": [ { "location": { "uri": str(root), "range": { "start": {"line": 5, "character": 0}, "end": {"line": 5, "character": 0}, }, }, "message": "First declaration", } ], }, { "range": { "start": {"line": 8, "character": 17}, "end": {"line": 8, "character": 22}, }, "message": 'Variable "VAR_A" declared twice in scope', "severity": 1, "relatedInformation": [ { "location": { "uri": str(root), "range": { "start": {"line": 3, "character": 0}, "end": {"line": 3, "character": 0}, }, }, "message": "First declaration", } ], }, ] def test_forall(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Checks that forall with end forall inside a case select does not cause # unexpected end of scope. file_path = str(test_dir / "diag" / "test_forall.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_use_ordering(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test USE directive ordering errors file_path = str(test_dir / "diag" / "test_use_ordering.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_where(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test where blocks file_path = str(test_dir / "diag" / "test_where.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_multiline(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test where semicolon (multi-line) file_path = str(test_dir / "diag" / "test_semicolon.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_enum(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test ENUM block file_path = str(test_dir / "diag" / "test_enum.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_module_procedure(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "subdir" / "test_submod.F90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_max_line_length(): root = test_dir / "diag" string = write_rpc_request(1, "initialize", {"rootPath": str(root)}) file_path = str(root / "test_lines.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) file_path = str(root / "conf_long_lines.json") errcode, results = run_request(string, [f"--config {file_path}"]) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 2, "character": 80}, "end": {"line": 2, "character": 155}, }, "message": 'Line length exceeds "max_line_length" (80)', "severity": 2, }, { "range": { "start": {"line": 3, "character": 100}, "end": {"line": 3, "character": 127}, }, "message": 'Comment line length exceeds "max_comment_line_length" (100)', "severity": 2, }, ] def test_implicit_none(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_implicit_none.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 4, "character": 9}, "end": {"line": 4, "character": 13}, }, "message": "IMPLICIT statement without enclosing scope", "severity": 1, }, ] def test_contains(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_contains.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 3, "character": 4}, "end": {"line": 3, "character": 12}, }, "message": "Multiple CONTAINS statements in scope", "severity": 1, }, { "range": { "start": {"line": 5, "character": 0}, "end": {"line": 5, "character": 8}, }, "message": "CONTAINS statement without enclosing scope", "severity": 1, }, { "range": { "start": {"line": 8, "character": 0}, "end": {"line": 8, "character": 0}, }, "message": "Subroutine/Function definition before CONTAINS statement", "severity": 1, }, ] def test_visibility(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_visibility.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 5, "character": 0}, "end": {"line": 5, "character": 0}, }, "message": "Visibility statement without enclosing scope", "severity": 1, }, { "range": { "start": {"line": 1, "character": 8}, "end": {"line": 1, "character": 26}, }, "message": 'Module "nonexisting_module" not found in project', "severity": 3, }, { "range": { "start": {"line": 3, "character": 8}, "end": {"line": 3, "character": 11}, }, "message": 'Module "mod" not found in project', "severity": 3, }, { "range": { "start": {"line": 2, "character": 4}, "end": {"line": 2, "character": 12}, }, "message": "USE statements after IMPLICIT statement", "severity": 1, }, ] def test_import(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_import.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 1, "character": 0}, "end": {"line": 1, "character": 0}, }, "message": "IMPORT statement outside of interface", "severity": 1, } ] def test_variable(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_variable.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 4, "character": 19}, "end": {"line": 4, "character": 22}, }, "message": 'Variable "val" masks variable in parent scope', "severity": 2, "relatedInformation": [ { "location": { "uri": path_to_uri(str(file_path)), "range": { "start": {"line": 1, "character": 0}, "end": {"line": 1, "character": 0}, }, }, "message": "First declaration", } ], } ] def test_function(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # Test module procedure in submodules importing scopes file_path = str(test_dir / "diag" / "test_function.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string) assert errcode == 0 assert results[1]["diagnostics"] == [ { "range": { "start": {"line": 3, "character": 31}, "end": {"line": 3, "character": 34}, }, "message": 'Variable "bar" with INTENT keyword not found in argument list', "severity": 1, } ] def test_submodule_scopes(): """Test that submodule procedures and functions with modifier keywords are correctly parsed and their scopes correctly closed.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "diag")}) file_path = str(test_dir / "diag" / "test_scope_overreach.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1]["diagnostics"] == [] def test_keyword_arg_list_var_names(): """Test argument list variables named as keywords are correctly parsed.""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "diag")}) file_path = str(test_dir / "diag" / "test_function_arg_list.f90") string += write_rpc_notification( "textDocument/didOpen", {"textDocument": {"uri": file_path}} ) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1]["diagnostics"] == [] fortran-language-server-2.13.0+dfsg.1/test/test_server_documentation.py000066400000000000000000000151741450400537300262530ustar00rootroot00000000000000from setup_tests import run_request, test_dir, write_rpc_request def check_return(result_array, checks): comm_lines = [] for (i, hover_line) in enumerate(result_array["contents"][0]["value"].splitlines()): if hover_line.count("!!") > 0: comm_lines.append((i, hover_line)) assert len(comm_lines) == len(checks) for i in range(len(checks)): assert comm_lines[i][0] == checks[i][0] assert comm_lines[i][1] == checks[i][1] def hover_request(file_path, line, char): return write_rpc_request( 1, "textDocument/hover", { "textDocument": {"uri": str(file_path)}, "position": {"line": line, "character": char}, }, ) def test_doxygen(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "docs")}) file_path = test_dir / "docs" / "test_doxygen.f90" string += hover_request(file_path, 15, 17) errcode, results = run_request(string) assert errcode == 0 ref = ( (1, "!! @brief inserts a value into an ordered array"), (2, "!! "), ( 3, '!! An array "list" consisting of n ascending ordered values. The method' " insert a", ), (4, '!! "new_entry" into the array.'), (5, "!! hint: use cshift and eo-shift"), (6, "!! "), (7, "!! @param[in,out] list a real array, size: max_size"), (8, "!! @param[in] n current values in the array"), (9, "!! @param[in] max_size size if the array"), (10, "!! @param[in] new_entry the value to insert"), ) check_return(results[1], ref) def test_ford(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "docs")}) file_path = test_dir / "docs" / "test_ford.f90" string += hover_request(file_path, 5, 20) errcode, results = run_request(string) assert errcode == 0 ref = ( (1, "!! Feeds your cats and dogs, if enough food is available. If not enough"), (2, "!! food is available, some of your pets will get angry."), (4, " !! The number of cats to keep track of."), (6, " !! The number of dogs to keep track of."), (8, " !! The amount of pet food (in kilograms) which you have on hand."), (10, " !! The number of pets angry because they weren't fed."), ) check_return(results[1], ref) def test_doc_overwrite_type_bound_procedure_sub(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # Test we can override method docstring e.g. # procedure :: name => name_imp !< Doc override # We want to preserve the argument list docstring string += hover_request(file_path, 13, 19) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 1"), (3, " !! Doc 5"))) def test_doc_type_bound_procedure_sub_implementation(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # procedure :: name => name_imp !< Doc override # Test that name_imp will yield the full docstring present in the implementation string += hover_request(file_path, 13, 31) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 4"), (4, " !! Doc 5"))) def test_doc_variable(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # n !! Doc 5 # Test that a variable can carry over documentation string += hover_request(file_path, 37, 26) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, " !! Doc 5"),)) def test_doc_overwrite_type_bound_procedure_fun(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # Test we can override function docstring e.g. # procedure :: name => name_imp !< Doc override # We want to preserve the argument list docstring string += hover_request(file_path, 14, 17) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 2"),)) def test_doc_type_bound_procedure_fun_implementation(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # procedure :: name => name_imp !< Doc override # Test that name_imp will yield the full docstring present in the implementation string += hover_request(file_path, 14, 28) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 6"),)) def test_doc_empty_overwrite_type_bound_procedure_sub(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # Test we can ignore overriding method docstring and return the original e.g. # procedure :: name => name_imp !< # We want to preserve the argument list docstring string += hover_request(file_path, 21, 18) string += hover_request(file_path, 21, 37) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 7"), (3, " !! Doc 8"))) check_return(results[2], ((1, "!! Doc 7"), (4, " !! Doc 8"))) def test_doc_empty_overwrite_type_bound_procedure_fun(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # procedure :: name => name_imp !< Doc # We want to preserve the procedure docstring but also fetch the empty # docs for the implementation string += hover_request(file_path, 22, 17) string += hover_request(file_path, 22, 32) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((1, "!! Doc 3"),)) check_return(results[2], ()) def test_doc_multiline_type_bound_procedure_arg_list(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_free.f90" # Check that inline docstrings can be input and carried over in multiple lines # for both the procedure pointer and the implementation string += hover_request(file_path, 15, 32) string += hover_request(file_path, 15, 47) errcode, results = run_request(string) assert errcode == 0 check_return(results[1], ((2, " !! Doc 9"), (3, " !! Doc 10"))) check_return( results[2], ((2, " !! Doc 9"), (3, " !! Doc 10"), (5, " !! Doc 11"), (6, " !! Doc 12")), ) fortran-language-server-2.13.0+dfsg.1/test/test_server_hover.py000066400000000000000000000435351450400537300245270ustar00rootroot00000000000000from setup_tests import run_request, test_dir, write_rpc_request def hover_req(file_path: str, ln: int, col: int) -> str: return write_rpc_request( 1, "textDocument/hover", { "textDocument": {"uri": str(file_path)}, "position": {"line": ln, "character": col}, }, ) def validate_hover(result_array: list, checks: list): assert len(result_array) - 1 == len(checks) for (i, check) in enumerate(checks): assert result_array[i + 1]["contents"][0]["value"] == check def test_hover_abstract_int_procedure(): """Tests that the binding of an abstract interface is correctly resolved""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_abstract.f90" string += hover_req(file_path, 7, 30) errcode, results = run_request(string, fortls_args=["--sort_keywords", "-n1"]) assert errcode == 0 ref_results = [ """SUBROUTINE test(a, b) INTEGER(4), DIMENSION(3,6), INTENT(IN) :: a REAL(8), DIMENSION(4), INTENT(OUT) :: b""" ] validate_hover(results, ref_results) def test_hover_parameter_multiline(): """Test that hover parameters display value correctly across lines""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 2, 28) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER, PARAMETER :: var = 1000"] validate_hover(results, ref_results) def test_hover_literal_num(): """Test that hovering over literals shows their type INTEGER""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 3, 28) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER"] validate_hover(results, ref_results) def test_hover_parameter(): """Test that hover parameters display value correctly""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 4, 28) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER, PARAMETER :: var2 = 23"] validate_hover(results, ref_results) def test_hover_parameter_nested(): """Test that hover parameters using other parameter values works""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 4, 41) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER, PARAMETER :: var3 = var*var2"] validate_hover(results, ref_results) def test_hover_parameter_multiline_missing_type(): """Test that hover parameters display correctly when type is split across lines""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 6, 28) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER, PARAMETER :: var4 = 123"] validate_hover(results, ref_results) def test_hover_literal_real(): """Test that hovering over literals shows their values REAL""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 7, 47) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["REAL"] validate_hover(results, ref_results) def test_hover_parameter_double(): """Test that hovering over parameters shows their type DOUBLE PRECISION""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 7, 38) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["DOUBLE PRECISION, PARAMETER :: somevar = 23.12"] validate_hover(results, ref_results) def test_hover_parameter_double_sf(): """Test that hovering over parameters shows their type scientific notation""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 7, 55) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["DOUBLE PRECISION, PARAMETER :: some = 1e-19"] validate_hover(results, ref_results) def test_hover_parameter_bool(): """Test that hovering over parameters shows their values LOGICAL""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 8, 38) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["LOGICAL(kind=8), PARAMETER :: long_bool = .true."] validate_hover(results, ref_results) def test_hover_literal_bool(): """Test that hovering over literals shows their type LOGICAL""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 8, 50) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["LOGICAL"] validate_hover(results, ref_results) def test_hover_parameter_str_sq(): """Test that hovering over parameters shows their value, single quote STRING""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 9, 37) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["CHARACTER(len=5), PARAMETER :: sq_str = '12345'"] validate_hover(results, ref_results) def test_hover_literal_string_sq(): """Test that hovering over literals shows their values single quote STRING""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 9, 48) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["CHARACTER(LEN=5)"] validate_hover(results, ref_results) def test_hover_parameter_str_dq(): """Test that hovering over parameters shows their value, double quote STRING""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 10, 37) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ['CHARACTER(len=5), PARAMETER :: dq_str = "12345"'] validate_hover(results, ref_results) def test_hover_literal_string_dq(): """Test that hovering over literals shows their values double quote STRING""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "parameters.f90" string += hover_req(file_path, 10, 48) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["CHARACTER(LEN=5)"] validate_hover(results, ref_results) def test_hover_pointer_attr(): """Test that hovering maintains the variable attributes e.g. POINTER""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "pointers.f90" string += hover_req(file_path, 1, 26) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ["INTEGER, POINTER"] validate_hover(results, ref_results) def test_hover_functions(): """Test that hovering over functions provides the expected results""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "functions.f90" string += hover_req(file_path, 1, 11) string += hover_req(file_path, 7, 19) string += hover_req(file_path, 12, 12) string += hover_req(file_path, 18, 19) string += hover_req(file_path, 23, 34) string += hover_req(file_path, 28, 11) string += hover_req(file_path, 34, 21) string += hover_req(file_path, 46, 11) string += hover_req(file_path, 51, 11) string += hover_req(file_path, 55, 11) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = [ """FUNCTION fun1(arg) RESULT(fun1) INTEGER, INTENT(IN) :: arg INTEGER :: fun1""", """FUNCTION fun2(arg) RESULT(fun2) INTEGER, INTENT(IN) :: arg INTEGER :: fun2""", """FUNCTION fun3(arg) RESULT(retval) INTEGER, INTENT(IN) :: arg INTEGER :: retval""", """FUNCTION fun4(arg) RESULT(retval) INTEGER, INTENT(IN) :: arg INTEGER :: retval""", # Notice that the order of the modifiers does not match the source code # This is part of the test, ideally they would be identical but previously # any modifiers before the type would be discarded """PURE ELEMENTAL FUNCTION fun5(arg) RESULT(retval) INTEGER, INTENT(IN) :: arg INTEGER :: retval""", """FUNCTION fun6(arg) RESULT(retval) INTEGER, INTENT(IN) :: arg INTEGER, DIMENSION(10,10) :: retval""", """PURE FUNCTION outer_product(x, y) RESULT(outer_product) REAL, DIMENSION(:), INTENT(IN) :: x REAL, DIMENSION(:), INTENT(IN) :: y REAL, DIMENSION(SIZE(X), SIZE(Y)) :: outer_product""", """FUNCTION dlamch(cmach) RESULT(dlamch) CHARACTER :: CMACH""", """FUNCTION fun7() RESULT(val) TYPE(c_ptr) :: val""", """TYPE(c_ptr) FUNCTION c_loc(x) RESULT(c_loc)""", ] validate_hover(results, ref_results) def test_hover_spaced_keywords(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "spaced_keywords.f90" string += hover_req(file_path, 1, 45) string += hover_req(file_path, 2, 99) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = [ """REAL, DIMENSION(:, :), INTENT(IN)""", """REAL, DIMENSION( SIZE(ARG1, 1), MAXVAL([SIZE(ARG1, 2), """ """SIZE(ARG1, 1)]) ), INTENT(OUT)""", ] validate_hover(results, ref_results) def test_hover_recursive(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "recursive.f90" string += hover_req(file_path, 9, 40) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = [ """RECURSIVE SUBROUTINE recursive_assign_descending(node, vector, current_loc) TYPE(tree_inode), POINTER, INTENT(IN) :: node INTEGER, DIMENSION(:), INTENT(INOUT) :: vector INTEGER, INTENT(INOUT) :: current_loc""" ] validate_hover(results, ref_results) def test_hover_subroutine(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "subdir" / "test_submod.F90" string += hover_req(file_path, 29, 24) string += hover_req(file_path, 34, 24) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = [ """FUNCTION point_dist(a, b) RESULT(distance) TYPE(point), INTENT(IN) :: a TYPE(point), INTENT(IN) :: b REAL :: distance""", """FUNCTION is_point_equal_a(a, b) RESULT(is_point_equal_a) TYPE(point), INTENT(IN) :: a TYPE(point), INTENT(IN) :: b LOGICAL :: is_point_equal_a""", ] validate_hover(results, ref_results) def test_hover_interface_as_argument(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_diagnostic_int.f90" string += hover_req(file_path, 19, 14) errcode, results = run_request(string, fortls_args=["--sort_keywords"]) assert errcode == 0 ref_results = ( # Could be subject to change """FUNCTION foo2(f, g, h) RESULT(arg3) FUNCTION f(x) :: f FUNCTION g(x) :: g FUNCTION h(x) :: h REAL :: arg3""", ) validate_hover(results, ref_results) def test_hover_block(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "hover")}) file_path = test_dir / "hover" / "associate_block.f90" string += hover_req(file_path, 4, 17) string += hover_req(file_path, 4, 20) # string += hover_req(file_path, 10, 11) # slice of array errcode, results = run_request(string, fortls_args=["--sort_keywords", "-n", "1"]) assert errcode == 0 ref_results = ["REAL, DIMENSION(5)", "REAL"] validate_hover(results, ref_results) def test_hover_submodule_procedure(): """Test that submodule procedures and functions with modifier keywords are correctly displayed when hovering. """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "diag")}) file_path = test_dir / "diag" / "test_scope_overreach.f90" string += hover_req(file_path, 18, 37) string += hover_req(file_path, 23, 37) errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ """PURE RECURSIVE FUNCTION foo_sp(x) RESULT(fi) REAL(sp), INTENT(IN) :: x REAL(sp) :: fi""", """PURE RECURSIVE FUNCTION foo_dp(x) RESULT(fi) REAL(dp), INTENT(IN) :: x REAL(dp) :: fi""", ] validate_hover(results, ref_results) def test_var_type_kinds(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "parse")}) file_path = test_dir / "parse" / "test_kinds_and_dims.f90" string += hover_req(file_path, 2, 24) string += hover_req(file_path, 2, 27) string += hover_req(file_path, 3, 15) string += hover_req(file_path, 3, 19) string += hover_req(file_path, 4, 20) string += hover_req(file_path, 4, 25) string += hover_req(file_path, 5, 23) string += hover_req(file_path, 6, 25) errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ "INTEGER(kind=4)", "INTEGER(kind=4), DIMENSION(3,4)", "INTEGER*8", "INTEGER*8, DIMENSION(3,4)", "INTEGER(8)", "INTEGER(8), DIMENSION(3,4)", "REAL(kind=r15)", "REAL(kind(0.d0))", ] validate_hover(results, ref_results) def test_kind_function_result(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "parse")}) file_path = test_dir / "parse" / "test_kinds_and_dims.f90" string += hover_req(file_path, 9, 18) string += hover_req(file_path, 14, 25) errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ """FUNCTION foo(val) RESULT(r) REAL(8), INTENT(IN) :: val REAL*8 :: r""", """FUNCTION phi(val) RESULT(r) REAL(8), INTENT(IN) :: val REAL(kind=8) :: r""", ] validate_hover(results, ref_results) def test_var_type_asterisk(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "parse")}) file_path = test_dir / "parse" / "test_kinds_and_dims.f90" string += hover_req(file_path, 2 + 19, 18) string += hover_req(file_path, 2 + 19, 21) string += hover_req(file_path, 2 + 19, 29) string += hover_req(file_path, 3 + 19, 21) string += hover_req(file_path, 4 + 19, 17) string += hover_req(file_path, 5 + 19, 23) errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ "CHARACTER*17", "CHARACTER*17, DIMENSION(3,4)", "CHARACTER*17, DIMENSION(9)", "CHARACTER*(6+3)", "CHARACTER*10, DIMENSION(3,4)", "CHARACTER*(LEN(B)), DIMENSION(3,4)", ] validate_hover(results, ref_results) def test_var_name_asterisk(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "parse")}) file_path = test_dir / "parse" / "test_kinds_and_dims.f90" string += hover_req(file_path, 26, 15) string += hover_req(file_path, 26, 22) string += hover_req(file_path, 26, 34) string += hover_req(file_path, 27, 15) string += hover_req(file_path, 28, 15) string += hover_req(file_path, 29, 15) string += hover_req(file_path, 31, 24) string += hover_req(file_path, 32, 32) # string += hover_req(file_path, 33, 32) # FIXME: this is not displayed correctly errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ "CHARACTER*17", "CHARACTER*17, DIMENSION(3,4)", "CHARACTER*17, DIMENSION(9)", "CHARACTER*(6+3)", "CHARACTER*(LEN(A))", "CHARACTER*10, DIMENSION(*)", "CHARACTER(LEN=200)", "CHARACTER(KIND=4, LEN=200), DIMENSION(3,4)", # "CHARACTER(KIND=4, LEN=100), DIMENSION(3,4)", ] validate_hover(results, ref_results) def test_intent(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "hover" / "intent.f90" string += hover_req(file_path, 2, 31) string += hover_req(file_path, 3, 29) string += hover_req(file_path, 4, 34) string += hover_req(file_path, 5, 35) string += hover_req(file_path, 6, 35) errcode, results = run_request(string, fortls_args=["-n", "1"]) assert errcode == 0 ref_results = [ """INTEGER(4), INTENT(IN)""", """INTEGER, INTENT(OUT)""", """INTEGER(4), INTENT(INOUT)""", """INTEGER(4), INTENT(IN OUT)""", """REAL, OPTIONAL, INTENT(IN)""", ] validate_hover(results, ref_results) fortran-language-server-2.13.0+dfsg.1/test/test_server_implementation.py000066400000000000000000000055231450400537300264240ustar00rootroot00000000000000# from types import NoneType from setup_tests import path_to_uri, run_request, test_dir, write_rpc_request from fortls.json_templates import uri_json def imp_request(file, line, char): return write_rpc_request( 1, "textDocument/implementation", { "textDocument": {"uri": path_to_uri(str(file))}, "position": {"line": line, "character": char}, }, ) def check_imp_request(response: dict, references: dict): for uri, changes in response.items(): refs = references[uri] # Loop over all the changes in the current URI, instances of object for c, r in zip(changes, refs): assert c["range"] == r["range"] def create(file, line, schar, echar): return uri_json(path_to_uri(str(file)), line, schar, line, echar) def test_implementation_type_bound(): """Go to implementation of type-bound procedures""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test.f90" string += imp_request(file_path, 3, 17) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] == create(test_dir / "subdir" / "test_free.f90", 49, 11, 28) def test_implementation_intrinsics(): """Go to implementation of implicit methods is handled gracefully""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_intrinsic.f90" string += imp_request(file_path, 11, 18) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] is None def test_implementation_integer(): """Go to implementation when no implementation is present is handled gracefully""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_intrinsic.f90" string += imp_request(file_path, 20, 31) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] is None def test_implementation_empty(): """Go to implementation for empty lines is handled gracefully""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_intrinsic.f90" string += imp_request(file_path, 13, 0) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] is None def test_implementation_no_file(): """Go to implementation for empty lines is handled gracefully""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "fake.f90" string += imp_request(file_path, 13, 0) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] is None fortran-language-server-2.13.0+dfsg.1/test/test_server_messages.py000066400000000000000000000016671450400537300252130ustar00rootroot00000000000000from setup_tests import run_request, test_dir, write_rpc_request def check_msg(ref, res): assert ref["type"] == res["type"] assert ref["message"] == res["message"] # def test_config_file_non_existent(): # string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) # errcode, results = run_request(string, ["-c", "fake.json"]) # # ref = {"type": 1, "message": "Configuration file 'fake.json' not found"} # assert errcode == 0 # check_msg(ref, results[0]) def test_config_file_non_existent_options(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) errcode, results = run_request(string, ["-c", "wrong_syntax.json"]) ref = { "type": 1, "message": ( 'Error: ":2 Unexpected "," at column 18" while reading' ' "wrong_syntax.json" Configuration file' ), } assert errcode == 0 check_msg(ref, results[0]) fortran-language-server-2.13.0+dfsg.1/test/test_server_references.py000066400000000000000000000032161450400537300255150ustar00rootroot00000000000000from pathlib import Path from setup_tests import path_to_uri, run_request, test_dir, write_rpc_request def validate_refs(result_array, checks): def find_in_results(uri, sline): for (i, result) in enumerate(result_array): if (result["uri"] == uri) and (result["range"]["start"]["line"] == sline): del result_array[i] return result return None assert len(result_array) == len(checks) for check in checks: result = find_in_results(path_to_uri(check[0]), check[1]) assert result is not None assert result["range"]["start"]["character"] == check[2] assert result["range"]["end"]["character"] == check[3] def ref_req(uri: Path, ln: int, ch: int): return write_rpc_request( 2, "textDocument/references", { "textDocument": {"uri": str(uri)}, "position": {"line": ln - 1, "character": ch - 1}, }, ) def test_references(): string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += ref_req(file_path, 10, 9) errcode, results = run_request(string) assert errcode == 0 # free_path = str(test_dir / "subdir" / "test_free.f90") validate_refs( results[1], ( [str(test_dir / "test_prog.f08"), 2, 21, 27], [str(test_dir / "test_prog.f08"), 9, 5, 11], [free_path, 8, 8, 14], [free_path, 16, 9, 15], [free_path, 18, 14, 20], [free_path, 36, 6, 12], [free_path, 44, 6, 12], [free_path, 78, 6, 12], ), ) fortran-language-server-2.13.0+dfsg.1/test/test_server_rename.py000066400000000000000000000172651450400537300246540ustar00rootroot00000000000000from setup_tests import ( check_post_msg, path_to_uri, run_request, test_dir, write_rpc_request, ) def rename_request(new_name: str, file_path, ln: int, ch: int): return write_rpc_request( 1, "textDocument/rename", { "newName": new_name, "textDocument": {"uri": str(file_path)}, "position": {"line": ln, "character": ch}, }, ) def check_rename_response(response: dict, references: dict): # Loop over URI's if the change spans multiple files there will be more than 1 for uri, changes in response.items(): refs = references[uri] # Loop over all the changes in the current URI, instances of object for c, r in zip(changes, refs): assert c["range"] == r["range"] assert c["newText"] == r["newText"] def create(new_text: str, sln: int, sch: int, eln: int, ech: int): return { "range": { "start": {"line": sln, "character": sch}, "end": {"line": eln, "character": ech}, }, "newText": new_text, } def test_rename_var(): """ "Test simple variable rename""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += rename_request("str_rename", file_path, 5, 25) errcode, results = run_request(string) assert errcode == 0 ref = {} ref[path_to_uri(str(file_path))] = [create("str_rename", 5, 20, 5, 29)] check_rename_response(results[1]["changes"], ref) def test_rename_var_across_module(): """Test renaming objects like variables across modules works""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += rename_request("new_module_var", file_path, 26, 15) errcode, results = run_request(string) assert errcode == 0 ref = {} ref[path_to_uri(str(test_dir / "subdir" / "test_free.f90"))] = [ create("new_module_var", 32, 11, 32, 26) ] ref[path_to_uri(str(file_path))] = [create("new_module_var", 2, 44, 2, 59)] ref[path_to_uri(str(file_path))].append(create("new_module_var", 26, 8, 26, 23)) check_rename_response(results[1]["changes"], ref) def test_rename_empty(): """Test that renaming nothing will not error""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_imp_type_bound_proc.f90" string += rename_request("bar", file_path, 9, 0) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 assert results[1] is None def test_rename_member_type_ptr(): """Test that renaming type bound pointers of procedure methods rename only the pointer and not the implementation, even if the pointer and the implementation share the same name """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += rename_request("bp_rename", file_path, 18, 25) errcode, results = run_request(string) assert errcode == 0 ref = {} ref[path_to_uri(str(file_path))] = [create("bp_rename", 18, 16, 18, 26)] ref[path_to_uri(str(test_dir / "subdir" / "test_free.f90"))] = [ create("bp_rename", 15, 27, 15, 37) ] check_rename_response(results[1]["changes"], ref) def test_rename_member_type_ptr_null(): """Test renaming type bound pointers of procedure methods works when pointing to null """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += rename_request("bp_rename", file_path, 17, 25) errcode, results = run_request(string) assert errcode == 0 ref = {} ref[path_to_uri(str(file_path))] = [create("bp_rename", 17, 16, 17, 28)] ref[path_to_uri(str(test_dir / "subdir" / "test_free.f90"))] = [ create("bp_rename", 11, 43, 11, 55) ] check_rename_response(results[1]["changes"], ref) def test_rename_type_bound_proc_no_ptr(): """Test renaming type bound pointers of procedure methods works when no pointer is setup. Requesting to rename the procedure should rename, the implementation and the Method itself i.e. call self%foo() Requesting to rename the implementation should also rename the procedure and all the locations it is called in """ root = test_dir / "rename" string = write_rpc_request(1, "initialize", {"rootPath": str(root)}) file_path = root / "test_rename_imp_type_bound_proc.f90" # Rename the procedure name and check if implementation also renames string += rename_request("bar", file_path, 5, 23) # Rename the implementation name and check if declaration, references change string += rename_request("bar", file_path, 10, 18) errcode, results = run_request(string) assert errcode == 0 ref = {} ref[path_to_uri(str(file_path))] = [create("bar", 5, 21, 5, 24)] ref[path_to_uri(str(file_path))].append(create("bar", 10, 15, 10, 18)) ref[path_to_uri(str(file_path))].append(create("bar", 12, 18, 12, 21)) ref[path_to_uri(str(file_path))].append(create("bar", 13, 19, 13, 22)) check_rename_response(results[1]["changes"], ref) check_rename_response(results[2]["changes"], ref) def test_rename_non_existent_file(): """Test renaming type bound pointers of procedure methods works when pointing to null """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "fake.f90" string += rename_request("bar", file_path, 5, 23) errcode, results = run_request(string) assert errcode == 0 assert results[1] is None def test_rename_nested(): """Test renaming heavily nested constructs works""" string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_nested.f90" string += rename_request("bar", file_path, 6, 23) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 ref = {} ref[path_to_uri(str(file_path))] = [create("bar", 6, 23, 6, 26)] ref[path_to_uri(str(file_path))].append(create("bar", 9, 27, 9, 30)) check_rename_response(results[1]["changes"], ref) def test_rename_intrinsic(): """Test renaming an intrinsic function, while no other function exists with the same name, will throw an error """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_nested.f90" string += rename_request("bar", file_path, 8, 27) errcode, results = run_request(string, ["-n", "1"]) assert errcode == 0 check_post_msg(results[1], "Rename failed: Cannot rename intrinsics", 2) assert results[2] is None def test_rename_use_only_rename(): """Test renaming constructs of `use mod, only: val => root_val are handled correctly """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "subdir")}) file_path = test_dir / "subdir" / "test_rename.F90" string += rename_request("bar", file_path, 13, 5) errcode, results = run_request(string, ["-n", "1"]) # FIXME: to be implemented assert errcode == 0 def test_rename_skip_intrinsic(): """Test that renaming functions named the same as intrinsic functions e.g. size() will only rename the user defined functions """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir / "rename")}) file_path = test_dir / "rename" / "test_rename_intrinsic.f90" string += rename_request("bar", file_path, 22, 13) errcode, results = run_request(string, ["-n", "1"]) # FIXME: to be implemented assert errcode == 0 fortran-language-server-2.13.0+dfsg.1/test/test_server_signature_help.py000066400000000000000000000040321450400537300264020ustar00rootroot00000000000000from pathlib import Path from setup_tests import run_request, test_dir, write_rpc_request def sigh_request(uri: Path, line: int, char: int): return write_rpc_request( 1, "textDocument/signatureHelp", { "textDocument": {"uri": str(uri)}, "position": {"line": line, "character": char}, }, ) def validate_sigh(results, refs): assert results.get("activeParameter", -1) == refs[0] signatures = results.get("signatures") assert signatures[0].get("label") == refs[2] assert len(signatures[0].get("parameters")) == refs[1] def test_subroutine_signature_help(): """Test that the signature help is correctly resolved for all arguments and that the autocompletion is correct for the subroutine signature. """ string = write_rpc_request(1, "initialize", {"rootPath": str(test_dir)}) file_path = test_dir / "test_prog.f08" string += sigh_request(file_path, 25, 18) string += sigh_request(file_path, 25, 20) string += sigh_request(file_path, 25, 22) string += sigh_request(file_path, 25, 27) string += sigh_request(file_path, 25, 29) errcode, results = run_request(string) assert errcode == 0 sub_sig = "test_sig_Sub(arg1, arg2, opt1=opt1, opt2=opt2, opt3=opt3)" ref = ( [0, 5, sub_sig], [1, 5, sub_sig], [2, 5, sub_sig], [3, 5, sub_sig], [4, 5, sub_sig], ) assert len(ref) == len(results) - 1 for i, r in enumerate(ref): validate_sigh(results[i + 1], r) def test_intrinsics(): string = write_rpc_request( 1, "initialize", {"rootPath": str(test_dir / "signature")} ) file_path = test_dir / "signature" / "nested_sigs.f90" string += sigh_request(file_path, 8, 77) errcode, results = run_request( string, ["--hover_signature", "--use_signature_help", "-n", "1"] ) assert errcode == 0 ref = [[0, 2, "REAL(A, KIND=kind)"]] assert len(ref) == len(results) - 1 for i, r in enumerate(ref): validate_sigh(results[i + 1], r) fortran-language-server-2.13.0+dfsg.1/test/test_source/000077500000000000000000000000001450400537300227325ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/.fortls000066400000000000000000000003741450400537300242500ustar00rootroot00000000000000{ // Directories to be scanned for source files "source_dirs": [ "**/" ], // These are regular expressions, files and paths that can be ignored "excl_paths": [ "excldir/**", "./diag/", "docs", "rename", "parse" ] } fortran-language-server-2.13.0+dfsg.1/test/test_source/completion/000077500000000000000000000000001450400537300251035ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/completion/test_vis_mod_completion.f90000066400000000000000000000003721450400537300323550ustar00rootroot00000000000000module foo implicit none public :: length private integer :: len integer :: length end module foo program test_private use foo, only: length use test_vis_mod implicit none print*, some_var, length end program test_private fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/000077500000000000000000000000001450400537300236365ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/conf_long_lines.json000066400000000000000000000000761450400537300276720ustar00rootroot00000000000000{ "max_line_length": 80, "max_comment_line_length": 100 } fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_contains.f90000066400000000000000000000003451450400537300270350ustar00rootroot00000000000000program test_contains implicit none contains contains end program test_contains contains module test_contains2 subroutine foo() ! Err: before contains end subroutine contains end module test_contains2 fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_enum.f90000066400000000000000000000003761450400537300261670ustar00rootroot00000000000000program test_enum implicit none enum, bind(c) enumerator :: red =1, blue, black =5 enumerator yellow enumerator gold, silver, bronze enumerator :: purple enumerator :: pink, lavender endenum end program test_enum fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_external.f90000066400000000000000000000004351450400537300270410ustar00rootroot00000000000000program test_external implicit none REAL, EXTERNAL :: VAL REAL VAR_A EXTERNAL VAR_A EXTERNAL VAR_B REAL VAR_B EXTERNAL VAR_B ! throw error REAL VAR_A ! throw error EXTERNAL VAR_C end program test_external fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_forall.f90000066400000000000000000000004361450400537300264770ustar00rootroot00000000000000program test_forall implicit none integer :: i, j, dim=3, a(10) = 2 select case (dim) case(3) forall(i=1:10) a(i) = a(i) **2 forall (j=1:i) a(j) = a(j) ** 2 end forall case default call abort() end select end program test_forall fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_function.f90000066400000000000000000000002131450400537300270360ustar00rootroot00000000000000module test_functions contains subroutine foo(val) integer, intent(in) :: bar end subroutine end module test_functions fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_function_arg_list.f90000066400000000000000000000016061450400537300307310ustar00rootroot00000000000000program test_arg_names_as_keywords implicit none integer, parameter :: impure = 8 contains subroutine foo(recursive, ierr) integer, intent(in) :: recursive integer, intent(out) :: ierr print*, recursive end subroutine foo real(8) impure elemental function foo2(recursive, elemental) result(pure) integer, intent(in) :: recursive, elemental end function foo2 real( kind = impure ) pure elemental function foo3(recursive) result(pure) integer, intent(in) :: recursive end function foo3 subroutine foo4(& recursive, & ierr) integer, intent(in) :: recursive integer, intent(out) :: ierr print*, recursive end subroutine foo4 pure real(impure) function foo5(recursive) result(val) integer, intent(in) :: recursive end function foo5 end program test_arg_names_as_keywords fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_implicit_none.f90000066400000000000000000000001071450400537300300440ustar00rootroot00000000000000program test_imp implicit none end program test_imp implicit none fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_import.f90000066400000000000000000000001221450400537300265220ustar00rootroot00000000000000program test_diagnostic_import import some end program test_diagnostic_import fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_lines.f90000066400000000000000000000005301450400537300263250ustar00rootroot00000000000000program test_lines implicit none character(len=123) :: val = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam sodales imperdiet dolor, sit amet venenatis magna dictum id." ! Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam sodales imperdiet dolor, sit amet venenatis magna dictum id. end program test_lines fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_scope_end_name_var.f90000066400000000000000000000002311450400537300310200ustar00rootroot00000000000000program scope_end_named_var implicit none integer :: end, endif if (.true.) then end = 10 end if end program scope_end_named_var fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_scope_overreach.f90000066400000000000000000000012201450400537300303570ustar00rootroot00000000000000module m interface module subroutine sub(arg) integer :: arg end subroutine end interface end module m submodule (m) n use, intrinsic :: iso_fortran_env, only: int8, int16, int32, int64 implicit none integer, parameter :: sp = selected_real_kind(6) integer, parameter :: dp = selected_real_kind(15) contains pure recursive module function foo_sp(x) result(fi) real(sp), intent(in) :: x real(sp) :: fi end function foo_sp pure recursive module function foo_dp(x) result(fi) real(dp), intent(in) :: x real(dp) :: fi end function foo_dp end submodule n fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_semicolon.f90000066400000000000000000000010271450400537300272050ustar00rootroot00000000000000program test_semicolon implicit none integer :: a = 1; character(len=1) :: v; real, parameter :: p = 0.1E-4; character(len=10), parameter :: str = "a;val;that" character(len=100), parameter :: str2 = "a;string;"& "that;becomes"// & ";"& &"multiline";integer& :: b;real & &,& parameter& ::& c& =& 100& &0090;real :: d;real::e;real::f print*, "one"; print*, str2 print*, a; print*, p; ! a; comment; that;contains; semi-colons end program test_semicolon fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_use_ordering.f90000066400000000000000000000004141450400537300277010ustar00rootroot00000000000000module mod_a integer, parameter :: q_a = 4 end module module mod_b use mod_a integer, parameter :: q_b = 8 end module program test_use_ordering use mod_b, only: q_b use mod_a real(q_a) :: r_a real(q_b) :: r_b end program test_use_ordering fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_variable.f90000066400000000000000000000002471450400537300270050ustar00rootroot00000000000000program test_variable integer :: val contains subroutine foo() integer :: val ! Warn: shadows parent end subroutine end program test_variable fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_visibility.f90000066400000000000000000000002141450400537300274010ustar00rootroot00000000000000program test_visibility use nonexisting_module ! Info: missing module implicit none use mod end program test_visibility public fortran-language-server-2.13.0+dfsg.1/test/test_source/diag/test_where.f90000066400000000000000000000006501450400537300263300ustar00rootroot00000000000000program test_where implicit none ! Example variables real:: A(5),B(5),C(5) A = 0.0 B = 1.0 C = [0.0, 4.0, 5.0, 10.0, 0.0] ! Oneliner WHERE(B .GT. 0.0) B = SUM(A, DIM=1) ! Simple where construct use where (C/=0) A=B/C elsewhere A=0.0 end where ! Named where construct named: where (C/=0) A=B/C elsewhere A=0.0 end where named end program test_where fortran-language-server-2.13.0+dfsg.1/test/test_source/docs/000077500000000000000000000000001450400537300236625ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/docs/test_doxygen.f90000066400000000000000000000023221450400537300267150ustar00rootroot00000000000000module test_doxygen implicit none contains !> @brief inserts a value into an ordered array !! !! An array "list" consisting of n ascending ordered values. The method insert a !! "new_entry" into the array. !! hint: use cshift and eo-shift !! !! @param[in,out] list a real array, size: max_size !! @param[in] n current values in the array !! @param[in] max_size size if the array !! @param[in] new_entry the value to insert subroutine insert(list, n, max_size, new_entry) real, dimension (:), intent (inout) :: list integer, intent (in) :: n, max_size real, intent (in) :: new_entry end subroutine insert !> @brief calcs the angle between two given vectors !! !! using the standard formula: !! \f$\cos \theta = \frac{ \vec v \cdot \vec w}{\abs{v}\abs{w}}\f$. !! !! @param[in] \f$v,w\f$ real vectors !! @return a real value describing the angle. 0 if \f$\abs v\f$ or \f$\abs w\f$ below a !! threshold. pure function calc_angle(v, w) result (theta) real, dimension (:), intent (in) :: v, w real :: theta end function calc_angle end module test_doxygen fortran-language-server-2.13.0+dfsg.1/test/test_source/docs/test_ford.f90000066400000000000000000000013251450400537300261740ustar00rootroot00000000000000module test_fortd implicit none contains subroutine feed_pets(cats, dogs, food, angry) !! Feeds your cats and dogs, if enough food is available. If not enough !! food is available, some of your pets will get angry. ! Arguments integer, intent(in) :: cats !! The number of cats to keep track of. integer, intent(in) :: dogs !! The number of dogs to keep track of. real, intent(inout) :: food !! The amount of pet food (in kilograms) which you have on hand. integer, intent(out) :: angry !! The number of pets angry because they weren't fed. return end subroutine feed_pets end module test_fortd fortran-language-server-2.13.0+dfsg.1/test/test_source/excldir/000077500000000000000000000000001450400537300243645ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/excldir/sub1/000077500000000000000000000000001450400537300252365ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/excldir/sub1/tmp.f90000066400000000000000000000013401450400537300263540ustar00rootroot00000000000000module oumods use, intrinsic :: iso_c_binding implicit integer(c_int) (i-k), integer(c_int) (m,n), & & real(c_double) (a-h), real(c_double) (l), real(c_double) (o-z) TYPE :: ex_type INTEGER :: A = 0 CONTAINS FINAL :: del_ex_type PROCEDURE :: sub => ex_sub END TYPE ex_type contains subroutine zI12(t,c,alpha,beta,r) complex(c_double_complex) c,r, x,y,z z = c*t y = exp(z) x = (2.0_c_double * cosh((z - cmplx(0._c_double,3.14159265358979324_c_double, kind(1._c_double))) & & /2._c_double )) / (c / exp((z + cmplx(0._c_double,3.14159265358979324_c_double,kind(1._c_double)))/2._c_double)) r = beta*r+alpha*((t*y - x)/c) end subroutine end module fortran-language-server-2.13.0+dfsg.1/test/test_source/excldir/sub2/000077500000000000000000000000001450400537300252375ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/excldir/sub2/fake2.f90000066400000000000000000000000001450400537300265350ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/f90_config.json000066400000000000000000000015541450400537300255550ustar00rootroot00000000000000{ "nthreads": 8, "notify_init": true, "incremental_sync": true, "sort_keywords": true, "disable_autoupdate": true, "source_dirs": ["subdir", "pp/**"], "incl_suffixes": [".FF", ".fpc", ".h", "f20"], "excl_suffixes": ["_tmp.f90", "_h5hut_tests.F90"], "excl_paths": ["excldir", "hover/**"], "autocomplete_no_prefix": true, "autocomplete_no_snippets": true, "autocomplete_name_only": true, "lowercase_intrinsics": true, "use_signature_help": true, "variable_hover": true, "hover_signature": true, "hover_language": "FortranFreeForm", "max_line_length": 80, "max_comment_line_length": 80, "disable_diagnostics": true, "pp_suffixes": [".h", ".fh"], "include_dirs": ["./include/**"], "pp_defs": { "HAVE_PETSC": "", "HAVE_ZOLTAN": "", "Mat": "type(tMat)" }, "symbol_skip_mem": true, "enable_code_actions": true } fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/000077500000000000000000000000001450400537300240555ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/associate_block.f90000066400000000000000000000005271450400537300275260ustar00rootroot00000000000000PROGRAM test_associate_block IMPLICIT NONE REAL :: A(5), B(5,5), C, III = 1 ASSOCIATE (X => A, Y => C) PRINT*, X, Y, III END ASSOCIATE ASSOCIATE (X => 1) PRINT*, X END ASSOCIATE ASSOCIATE (ARRAY => B(:,1)) ARRAY (3) = ARRAY (1) + ARRAY (2) END ASSOCIATE END PROGRAM test_associate_block fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/functions.f90000066400000000000000000000027671450400537300264210ustar00rootroot00000000000000! simple function function fun1(arg) integer, intent(in) :: arg integer :: fun1 end function fun1 ! function with type on definition, implied result integer function fun2(arg) integer, intent(in) :: arg end function fun2 ! function with return function fun3(arg) result(retval) integer, intent(in) :: arg integer :: retval end function fun3 ! function with type on definition and return integer function fun4(arg) result(retval) integer, intent(in) :: arg end function fun4 ! function with type on definition, return and keywords pure integer elemental function fun5(arg) result(retval) integer, intent(in) :: arg end function fun5 ! function with type on definition and return function fun6(arg) result(retval) integer, intent(in) :: arg integer, dimension(10,10) :: retval end function fun6 ! functions with complex result type pure function outer_product(x, y) real, dimension(:), intent(in) :: x, y real, dimension(size(x), size(y)) :: outer_product integer :: i, j forall (i=1:size(x)) forall (j=1:size(y)) outer_product(i, j) = x(i) * y(j) end forall end forall end function outer_product ! functions with no result type, common in interfaces function dlamch(CMACH) character :: CMACH end function dlamch ! intrinsic functions like c_loc display a return type function fun7() result(val) use, intrinsic :: iso_c_binding integer, dimension(1), target :: ar type(c_ptr) :: val val = c_loc(ar) end function fun7 fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/intent.f90000066400000000000000000000004201450400537300256720ustar00rootroot00000000000000subroutine intent(arg1, arg2, arg3, arg4, arg5) implicit none integer(4), intent(in) :: arg1 integer, intent(out) :: arg2 integer(4), intent(inout) :: arg3 integer(4), intent(in out) :: arg4 real, optional, intent(in) :: arg5 end subroutine intent fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/parameters.f90000066400000000000000000000007171450400537300265450ustar00rootroot00000000000000program params implicit none integer, parameter :: var = & 1000, & var2 = 23, var3 = & var*var2, & var4 = 123 double precision, parameter :: somevar = 23.12, some = 1e-19 logical(kind=8), parameter :: long_bool = .true. character(len=5), parameter :: sq_str = '12345' character(len=5), parameter :: dq_str = "12345" end program params fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/pointers.f90000066400000000000000000000000721450400537300262370ustar00rootroot00000000000000program pointers INTEGER, POINTER :: val1 end program fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/recursive.f90000066400000000000000000000015511450400537300264060ustar00rootroot00000000000000module tree type tree_inode integer :: value = 0 type (tree_inode), pointer :: left=>null() type (tree_inode), pointer :: right=>null() type (tree_inode), pointer :: parent=>null() end type tree_inode contains recursive subroutine recursive_assign_descending(node, vector, current_loc) type(tree_inode), pointer, intent(in) :: node integer, dimension(:), intent(inout) :: vector integer, intent(inout) :: current_loc if (associated(node)) then call recursive_assign_descending(node%right, vector, current_loc) vector(current_loc) = node%value current_loc = current_loc + 1 call recursive_assign_descending(node%left, vector, current_loc) end if return end subroutine recursive_assign_descending end module tree fortran-language-server-2.13.0+dfsg.1/test/test_source/hover/spaced_keywords.f90000066400000000000000000000003341450400537300275630ustar00rootroot00000000000000subroutine spaced_keywords(arg1, arg2) real, dimension (:, :), intent (in) :: arg1 real, dimension ( size(arg1, 1), maxval([size(arg1, 2), size(arg1, 1)]) ), intent (out) :: arg2 end subroutine spaced_keywords fortran-language-server-2.13.0+dfsg.1/test/test_source/include/000077500000000000000000000000001450400537300243555ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/include/empty.h000066400000000000000000000000001450400537300256520ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/parse/000077500000000000000000000000001450400537300240445ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/parse/test_incomplete_dims.f90000066400000000000000000000004531450400537300306000ustar00rootroot00000000000000 ! Tests that the parser will not break, when parsing incomplete variables ! constructs. This is particularly important for autocompletion. program test_incomplete_dims implicit none integer :: dim_val(1, 2 character :: char_val*(10 integer :: ( end program test_incomplete_dims fortran-language-server-2.13.0+dfsg.1/test/test_source/parse/test_kinds_and_dims.f90000066400000000000000000000025351450400537300303760ustar00rootroot00000000000000subroutine normal_kinds() integer, parameter :: r15 = selected_real_kind(15) integer(kind=4) :: a, b(3,4) integer*8 aa, bb(3,4) integer(8) :: aaa, bbb(3,4) real(kind=r15) :: r real(kind(0.d0)) :: rr end subroutine normal_kinds real*8 function foo(val) result(r) real(8), intent(in) :: val r = val end function foo real(kind=8) function phi(val) result(r) real(8), intent(in) :: val r = val end function phi subroutine character_len_parsing(input) ! global variable_type * length variable_name1, variable_name2,... CHARACTER*17 A, B(3,4), V(9) CHARACTER*(6+3) C CHARACTER*10D(3,4) CHARACTER*(LEN(B))DD(3,4) ! local variable_type variable_name1 * length, variable_name2 * length,... CHARACTER AA*17, BB(3,4)*17, VV(9)*17 CHARACTER CC*(6+3) CHARACTER AAA*(LEN(A)) CHARACTER INPUT(*)*10 ! explicit len and kind for characters CHARACTER(LEN=200) F CHARACTER(KIND=4, LEN=200) FF(3,4) CHARACTER(KIND=4, LEN=200) AAAA(3,4)*100 ! override global length with local length CHARACTER*10 BBB(3,4)*(LEN(B)) ! has the length of len(b) CHARACTER*10CCC(3,4)*(LEN(B)) ! no-space CHARACTER(KIND=4) BBBB(3,4)*(LEN(B)) ! cannot have *10(kind=4) or vice versa INTEGER((4)) INT_KIND_IMP ! FIXME: (()) trips up the regex end subroutine character_len_parsing fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/000077500000000000000000000000001450400537300233515ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/.fortls000077700000000000000000000000001450400537300272532.pp_conf.jsonustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/.pp_conf.json000066400000000000000000000004301450400537300257430ustar00rootroot00000000000000{ "lowercase_intrinsics": true, "use_signature_help": true, "variable_hover": true, "hover_signature": true, "enable_code_actions": true, "pp_suffixes": [".h", ".F90"], "incl_suffixes": [".h"], "include_dirs": ["include"], "pp_defs": { "HAVE_CONTIGUOUS": "" } } fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/include/000077500000000000000000000000001450400537300247745ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/include/petscerror.h000066400000000000000000000004151450400537300273350ustar00rootroot00000000000000#if !defined (PETSCERRORDEF_H) #define PETSCERRORDEF_H #define PETSC_ERR_MEM 55 #define PETSC_ERR_INT_OVERFLOW 84 #define PETSC_ERR_FLOP_COUNT 90 #if defined PETSC_ERR_MEM || defined PETSC_ERR_INT_OVERFLOW #define SUCCESS .true. #endif #endif fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/include/petscpc.h000066400000000000000000000005041450400537300266050ustar00rootroot00000000000000#if !defined (PETSCPCDEF_H) #define PETSCPCDEF_H #include "petscerror.h" #define PC type(tPC) #define PCType character*(80) #define ewrite(priority, format) if (priority <= 3) write((priority), format) #define ewrite2(priority, format) \ if (priority <= 3) write((priority), format) #define varVar \ 55 #endif fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/preproc.F90000066400000000000000000000005651450400537300253110ustar00rootroot00000000000000program preprocessor #include "petscpc.h" #ifdef PETSCPCDEF_H integer, parameter :: var = 1000 PCType :: tmp print*, 999, 3.14, "some", var, PETSC_ERR_MEM print*, PETSC_ERR_INT_OVERFLOW, varVar ewrite(1,*) 'Assemble EP P1 matrix and rhs sytem' ewrite2(1,*) 'Assemble EP P1 matrix and rhs sytem' print*, SUCCESS #endif end program preprocessor fortran-language-server-2.13.0+dfsg.1/test/test_source/pp/preproc_keywords.F90000066400000000000000000000002271450400537300272330ustar00rootroot00000000000000program test_preproc_keywords REAL & #ifdef HAVE_CONTIGUOUS , CONTIGUOUS & #endif , POINTER :: & var1(:), & var2(:) end program test_preproc_keywords fortran-language-server-2.13.0+dfsg.1/test/test_source/rename/000077500000000000000000000000001450400537300242015ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/rename/test_rename_imp_type_bound_proc.f90000066400000000000000000000003501450400537300331450ustar00rootroot00000000000000module mod implicit none type :: t contains procedure :: foo end type t contains subroutine foo(self) class(t), intent(in) :: self call self%foo() end subroutine foo end module mod fortran-language-server-2.13.0+dfsg.1/test/test_source/rename/test_rename_intrinsic.f90000066400000000000000000000013021450400537300311050ustar00rootroot00000000000000module test_rename_intrinsic implicit none interface size module procedure size_comp end interface size contains subroutine size_comp(val, ret) integer, intent(in) :: val(:) integer, intent(out) :: ret integer, dimension(5) :: fixed ret = maxval([size(val), size(fixed)]) end subroutine size_comp end module test_rename_intrinsic program driver use test_rename_intrinsic implicit none integer, dimension(10) :: val integer, dimension(5) :: tmp integer :: sz call size(val, sz) ! This is fortran_sub and should be renamed print*, size(val) ! This is an intrinsic, should be skipped in renaming end program driver fortran-language-server-2.13.0+dfsg.1/test/test_source/rename/test_rename_nested.f90000066400000000000000000000004041450400537300303670ustar00rootroot00000000000000module mod implicit none contains subroutine fi() contains subroutine phi() integer :: a(5) print*, size(a) ! this is an intrinsic end subroutine phi end subroutine fi end module mod fortran-language-server-2.13.0+dfsg.1/test/test_source/signature/000077500000000000000000000000001450400537300247335ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/signature/nested_sigs.f90000066400000000000000000000005711450400537300275650ustar00rootroot00000000000000program test_nan use, intrinsic :: iso_fortran_env, only: sp=>real32, dp=>real64, qp=>real128 use, intrinsic :: ieee_arithmetic, only: ieee_value, ieee_quiet_nan, ieee_is_nan implicit none complex(qp) :: nan_zp nan_zp = ieee_value(1.,ieee_quiet_nan) print '(A4,2X,F5.1,6X,L1,2X,Z32)','zp',real(nan_zp), ieee_is_nan(real(nan_zp)),nan_zp end program test_nan fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/000077500000000000000000000000001450400537300242225ustar00rootroot00000000000000fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_abstract.f90000066400000000000000000000003741450400537300274100ustar00rootroot00000000000000MODULE test_abstract ABSTRACT INTERFACE SUBROUTINE abs_interface(a,b) INTEGER(4), DIMENSION(3,6), INTENT(in) :: a REAL(8), INTENT(out) :: b(4) END SUBROUTINE abs_interface END INTERFACE PROCEDURE(abs_interface) :: test END MODULE test_abstract fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_fixed.f000066400000000000000000000013021450400537300265230ustar00rootroot00000000000000 double precision function myfun(n,xval) integer i,n c ********** double precision xval integer ieq1(2), ieq2(2) double precision req(2) character*(LEN=200) bob character dave*(20) equivalence (req(1),ieq1(1)) equivalence (req(2),ieq2(1)) c data req(1) /1.0000000d-16/ data req(2) /1.0000000d-308/ c myfun = xval bob(1:20) = dave do 10 i = 1, n 10 myfun = myfun + xval return c end c subroutine glob_sub(n,xval,yval) integer i,n c ********** double complex xval,yval c yval = xval do 20 i = 1, n yval = yval + xval 20 continue return c end fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_free.f90000066400000000000000000000041321450400537300265220ustar00rootroot00000000000000MODULE test_free USE, INTRINSIC :: iso_fortran_env, ONLY: error_unit IMPLICIT NONE ! ą TYPE :: scale_type REAL(8) :: val = 1.d0 END TYPE scale_type ! TYPE :: vector INTEGER(4) :: n REAL(8), POINTER, DIMENSION(:) :: v => NULL() PROCEDURE(fort_wrap), NOPASS, POINTER :: bound_nopass => NULL() CONTAINS PROCEDURE :: create => vector_create !< Doc 1 PROCEDURE :: norm => vector_norm !< Doc 2 PROCEDURE, PASS(self) :: bound_pass => bound_pass END TYPE vector ! TYPE, EXTENDS(vector) :: scaled_vector TYPE(scale_type) :: scale CONTAINS PROCEDURE :: set_scale => scaled_vector_set !< PROCEDURE :: norm => scaled_vector_norm !< Doc 3 END TYPE scaled_vector ! INTERFACE SUBROUTINE fort_wrap(a,b) INTEGER(4), INTENT(in) :: a REAL(8), INTENT(out) :: b END SUBROUTINE fort_wrap END INTERFACE ! LOGICAL :: module_variable CONTAINS !> Doc 4 SUBROUTINE vector_create(self, n) CLASS(vector), INTENT(inout) :: self INTEGER(4), INTENT(in) :: n !! Doc 5 self%n=n ALLOCATE(self%v(n)) self%v=0.d0 END SUBROUTINE vector_create !> Doc 6 FUNCTION vector_norm(self) RESULT(norm) CLASS(vector), INTENT(in) :: self REAL(8) :: norm norm = SQRT(DOT_PRODUCT(self%v,self%v)) END FUNCTION vector_norm !> Doc 7 SUBROUTINE scaled_vector_set(self, scale) CLASS(scaled_vector), INTENT(inout) :: self REAL(8), INTENT(in) :: scale !< Doc 8 self%scale%val = scale END SUBROUTINE scaled_vector_set !> FUNCTION scaled_vector_norm(self) RESULT(norm) CLASS(scaled_vector), INTENT(in) :: self REAL(8) :: norm norm = self%scale%val*SQRT(DOT_PRODUCT(self%v,self%v)) END FUNCTION scaled_vector_norm ! PURE REAL(8) FUNCTION unscaled_norm(self) CLASS(scaled_vector), INTENT(in) :: self ! REAL(8) :: unscaled_norm unscaled_norm = SQRT(DOT_PRODUCT(self%v,self%v)) END FUNCTION unscaled_norm ! SUBROUTINE test_sig_Sub(arg1,arg2,opt1,opt2,opt3) INTEGER, INTENT(in) :: arg1,arg2 INTEGER, OPTIONAL, INTENT(in) :: opt1,opt2,opt3 END SUBROUTINE test_sig_Sub ! SUBROUTINE bound_pass(arg1, self) INTEGER(4), INTENT(in) :: arg1 !< Doc 9 !! Doc 10 !> Doc 11 !! Doc 12 CLASS(vector), INTENT(inout) :: self self%n = arg1 END SUBROUTINE bound_pass END MODULE test_free fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_generic.f90000066400000000000000000000023261450400537300272200ustar00rootroot00000000000000MODULE test_generic TYPE :: test_gen_type CONTAINS GENERIC :: my_gen => gen1,gen2 GENERIC :: ASSIGNMENT(=) => assign1, assign2 GENERIC :: OPERATOR(+) => plusop1, plusop2 GENERIC, PRIVATE :: my_gen2 => gen3, gen4 END TYPE test_gen_type CONTAINS ! SUBROUTINE gen1(self,a,b) CLASS(test_gen_type) :: self REAL(8), INTENT(IN) :: a REAL(8), INTENT(OUT) :: b CALL self% END SUBROUTINE gen1 ! SUBROUTINE gen2(self,a,b,c) CLASS(test_gen_type) :: self REAL(8), INTENT(IN) :: a,c REAL(8), INTENT(OUT) :: b END SUBROUTINE gen2 ! SUBROUTINE assign1(outvar,invar) REAL(8) :: outvar CLASS(test_gen_type) :: invar END SUBROUTINE assign1 ! SUBROUTINE assign2(outvar,invar) LOGICAL :: outvar CLASS(test_gen_type) :: invar END SUBROUTINE assign2 ! REAL(8) FUNCTION plusop1(var1,var2) REAL(8) :: var1 CLASS(test_gen_type) :: var2 END FUNCTION plusop1 ! LOGICAL FUNCTION plusop2(var1,var2) LOGICAL :: var1 CLASS(test_gen_type) :: var2 END FUNCTION plusop2 ! SUBROUTINE gen3(self,a,b) CLASS(test_gen_type) :: self REAL(8), INTENT(IN) :: a REAL(8), INTENT(OUT) :: b CALL self% END SUBROUTINE gen3 ! SUBROUTINE gen4(self,a,b,c) CLASS(test_gen_type) :: self REAL(8), INTENT(IN) :: a,c REAL(8), INTENT(OUT) :: b END SUBROUTINE gen4 END MODULE test_generic fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_inc2.f90000066400000000000000000000000441450400537300264320ustar00rootroot00000000000000INTEGER :: val2 REAL :: cross val1 fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_inherit.f90000066400000000000000000000004471450400537300272500ustar00rootroot00000000000000MODULE test_inherit USE :: test_free, ONLY: scaled_vector IMPLICIT NONE ! TYPE, EXTENDS(scaled_vector) :: myvec REAL(8) :: x END TYPE myvec CONTAINS SUBROUTINE inherit_completion(self) TYPE(myvec), INTENT(INOUT) :: self self%scale%val END SUBROUTINE inherit_completion END MODULE test_inherit fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_rename.F90000066400000000000000000000005471450400537300270160ustar00rootroot00000000000000module rename_mod1 real(8) :: var1 end module rename_mod1 ! module rename_mod2 use rename_mod1, only: renamed_var1 => var1 integer :: originalname end module rename_mod2 ! subroutine test_rename_sub() use rename_mod2, only : localname => originalname, renamed_var2 => renamed_var1 implicit none ! localname = 4 renamed_var2 = 4 end subroutine test_rename_sub fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_select.f90000066400000000000000000000011061450400537300270560ustar00rootroot00000000000000MODULE test_select IMPLICIT NONE ! TYPE :: parent INTEGER(4) :: n END TYPE parent ! TYPE, EXTENDS(parent) :: child1 REAL(8) :: a END TYPE child1 ! TYPE, EXTENDS(parent) :: child2 COMPLEX(8) :: a END TYPE child2 CONTAINS ! SUBROUTINE test_select_sub(self) CLASS(parent), INTENT(inout) :: self ! Select statement with binding SELECT TYPE(this=>self) TYPE IS(child1) this%a CLASS IS(child2) this%a CLASS DEFAULT this%n END SELECT ! Select statement without binding SELECT TYPE(self) TYPE IS(child1) self%a END SELECT END SUBROUTINE test_select_sub END MODULE test_select fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_submod.F90000066400000000000000000000023751450400537300270410ustar00rootroot00000000000000module points type :: point real :: x, y end type point interface module function point_dist(a, b) result(distance) type(point), intent(in) :: a, b real :: distance end function point_dist module logical function is_point_equal_a(a, b) type(point), intent(in) :: a, b end function is_point_equal_a module subroutine is_point_equal_sub(a, b, test) type(point), intent(in) :: a, b logical, intent(out) :: test end subroutine is_point_equal_sub end interface contains logical function is_point_equal(a, b) type(point), intent(in) :: a, b is_point_equal = merge(.true., .false., a%x == b%x .and. a%y == b%y) end function is_point_equal end module points #define __PARENT_MOD__ points submodule (__PARENT_MOD__) points_a contains module function point_dist(a, b) type(point), intent(in) :: a, b distance = sqrt((a%x - b%x)**2 + (a%y - b%y)**2) end function point_dist module procedure is_point_equal_a type(point) :: c is_point_equal_a = merge(.true., .false., a%x == b%x .and. a%y == b%y) end procedure is_point_equal_a module procedure is_point_equal_sub type(point) :: c test = is_point_equal(a,b) end procedure is_point_equal_sub end submodule points_a fortran-language-server-2.13.0+dfsg.1/test/test_source/subdir/test_vis.f90000066400000000000000000000003031450400537300263760ustar00rootroot00000000000000module test_vis_mod implicit none private type :: some_type end type some_type integer :: some_var public some_var contains subroutine some_sub end subroutine some_sub end module test_vis_mod fortran-language-server-2.13.0+dfsg.1/test/test_source/test.f90000066400000000000000000000001761450400537300242350ustar00rootroot00000000000000PROGRAM myprog USE test_free, ONLY: scaled_vector TYPE(scaled_vector) :: myvec CALL myvec%set_scale(scale) END PROGRAM myprog fortran-language-server-2.13.0+dfsg.1/test/test_source/test_block.f08000066400000000000000000000007661450400537300254130ustar00rootroot00000000000000SUBROUTINE block_sub() INTEGER :: res0,i,j,end_var res0 = 0 add1 : BLOCK INTEGER :: res1 res1 = res0 + 1 BLOCK INTEGER :: res2,blockVar res2 = res1 + 1 blockVar = res0 + 1 END BLOCK END BLOCK add1 ! outer: DO i=1,10 DO j=1,i res0=res0+1 END DO END DO outer ! IF(res0>10)THEN i=res0 END IF ! ASSOCIATE( x=>1 ) i=i+x END ASSOCIATE ! Test variables/labels starting with "end" end_var= 1 end_label: DO i=1,3 end_var = end_var + i END DO end_label END SUBROUTINE block_sub fortran-language-server-2.13.0+dfsg.1/test/test_source/test_diagnostic_int.f90000066400000000000000000000015551450400537300273150ustar00rootroot00000000000000module test_int implicit none contains subroutine foo(f, arg2) interface function f(x) real, intent(in) :: x real :: f end function end interface integer, intent(in) :: arg2 real :: y y = 1. print*, f(y) end subroutine foo function foo2(f, g, h) result(arg3) interface function f(x) result(z) real, intent(in) :: x real :: z end function function g(x) result(z) real, intent(in) :: x real :: z end function end interface interface function h(x) result(z) real, intent(in) :: x real :: z end function h end interface real :: y real :: arg3 y = 1. arg3 = f(g(h(y))) end function foo2 end module test_int fortran-language-server-2.13.0+dfsg.1/test/test_source/test_import.f90000066400000000000000000000006351450400537300256270ustar00rootroot00000000000000module mymod implicit none private public mytype, mytype2 integer, public :: int1, int2, int3, int4, int5 type :: mytype integer :: comp end type mytype type :: mytype2 integer :: comp end type mytype2 interface subroutine sub() import int1 import mytype, int2 type(mytype) :: some end subroutine sub end interface end module mymod fortran-language-server-2.13.0+dfsg.1/test/test_source/test_inc.f90000066400000000000000000000002431450400537300250610ustar00rootroot00000000000000MODULE test_mod include "subdir/test_inc2.f90" REAL(8) :: val1 CONTAINS SUBROUTINE test_sub val2 END SUBROUTINE test_sub include 'mpi.f' END MODULE test_mod fortran-language-server-2.13.0+dfsg.1/test/test_source/test_nonintrinsic.f90000066400000000000000000000003661450400537300270330ustar00rootroot00000000000000module test_nonint_mod private integer, parameter, public :: DP = kind(0.0D0) end module test_nonint_mod program nonint use, non_intrinsic :: test_nonint_mod, only : DP implicit none real(DP) :: x x = 0.0_DP end program nonint fortran-language-server-2.13.0+dfsg.1/test/test_source/test_prog.f08000066400000000000000000000013541450400537300252620ustar00rootroot00000000000000PROGRAM test_program ! Here is a commonly included unicode character "–" USE test_free, ONLY: vector, scaled_vector, module_variable, test_sig_sub IMPLICIT NONE ! CHARACTER(LEN=*) :: test_str1 = "i2.2,':',i2.2", test_str2 = 'i2.2,":",i2.2' INTEGER(4) :: n,a,b,c,d REAL(8) :: x,y COMPLEX(8) :: xc,yc TYPE(vector) :: loc_vector TYPE(scaled_vector) :: stretch_vector ! y = myfun(n,x) CALL glob_sub(n,xc,yc) ! CALL loc_vector%create(n) x = loc_vector%norm() CALL loc_vector%bound_nopass(a,x) CALL loc_vector%bound_pass(n) ! CALL stretch_vector%create(n) CALL stretch_vector%set_scale(loc_vector%norm(self)) x = stretch_vector%norm() y = stretch_vector%scale%val ! CALL test_sig_Sub(a,b,opt2=c,opt3=d) PRINT*, module_variable END PROGRAM test_program fortran-language-server-2.13.0+dfsg.1/test/test_source/test_submodule.f90000066400000000000000000000003071450400537300263100ustar00rootroot00000000000000submodule( foo_module ) submodule1 implicit none contains module procedure foo1 WRITE(*,"(A)") "testing :: "// trim(a) // "::"// trim(b) end procedure foo1 end submodule submodule1 fortran-language-server-2.13.0+dfsg.1/test/test_source/wrong_syntax.json000066400000000000000000000000331450400537300263630ustar00rootroot00000000000000{ "source_dirs", "s" }