pax_global_header00006660000000000000000000000064141556762250014527gustar00rootroot0000000000000052 comment=7e4359356931255a74a82c4a4c1a2e802e2a1bcb python-lsp-server-1.3.3/000077500000000000000000000000001415567622500151545ustar00rootroot00000000000000python-lsp-server-1.3.3/.coveragerc000066400000000000000000000000371415567622500172750ustar00rootroot00000000000000[run] omit = pylsp/_version.py python-lsp-server-1.3.3/.gitattributes000066400000000000000000000000371415567622500200470ustar00rootroot00000000000000pylsp/_version.py export-subst python-lsp-server-1.3.3/.github/000077500000000000000000000000001415567622500165145ustar00rootroot00000000000000python-lsp-server-1.3.3/.github/workflows/000077500000000000000000000000001415567622500205515ustar00rootroot00000000000000python-lsp-server-1.3.3/.github/workflows/release.yml000066400000000000000000000023141415567622500227140ustar00rootroot00000000000000name: PyLS Release on: release: types: - created jobs: build: name: Linux Py${{ matrix.PYTHON_VERSION }} runs-on: ubuntu-latest env: CI: 'true' OS: 'linux' PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} strategy: fail-fast: false matrix: PYTHON_VERSION: ['3.8'] timeout-minutes: 10 steps: - uses: actions/cache@v1 with: path: ~/.cache/pip key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - uses: actions/checkout@v2 with: fetch-depth: 0 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - run: python -m pip install --upgrade pip setuptools wheel twine - name: Build and publish python-language-server env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PYLS_TOKEN }} run: | python setup.py bdist_wheel --universal python setup.py sdist python -m twine check dist/* python -m twine upload dist/* python-lsp-server-1.3.3/.github/workflows/static.yml000066400000000000000000000024171415567622500225670ustar00rootroot00000000000000name: Static code analysis on: push: branches: - develop pull_request: branches: - '*' jobs: build: name: Static code analysis runs-on: ubuntu-latest env: CI: 'true' OS: 'linux' timeout-minutes: 2 steps: - uses: actions/cache@v1 with: path: ~/.cache/pip key: static-pip-${{ hashFiles('setup.py') }} restore-keys: static-pip- - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: # TODO: check with Python 3, but need to fix the # errors first python-version: '3.6' architecture: 'x64' - run: python -m pip install --upgrade pip setuptools jsonschema - run: pip install -e .[pylint,pycodestyle,pyflakes] - name: Pylint checks run: pylint pylsp test - name: Code style checks run: pycodestyle pylsp test - name: Pyflakes checks run: pyflakes pylsp test - name: Validate JSON schema run: jsonschema pylsp/config/schema.json - name: Ensure JSON schema and Markdown docs are in sync run: | python scripts/jsonschema2md.py pylsp/config/schema.json EXPECTED_CONFIGURATION.md diff EXPECTED_CONFIGURATION.md CONFIGURATION.md python-lsp-server-1.3.3/.github/workflows/test-linux.yml000066400000000000000000000025521415567622500234140ustar00rootroot00000000000000name: Linux tests on: push: branches: - develop pull_request: branches: - '*' jobs: build: name: Linux Py${{ matrix.PYTHON_VERSION }} runs-on: ubuntu-latest env: CI: 'true' OS: 'linux' PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} strategy: fail-fast: false matrix: PYTHON_VERSION: ['3.9', '3.8', '3.7', '3.6'] timeout-minutes: 10 steps: - uses: actions/cache@v1 with: path: ~/.cache/pip key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - name: Create Jedi environment for testing if: matrix.PYTHON_VERSION != '2.7' run: | python3 -m venv /tmp/pyenv /tmp/pyenv/bin/python -m pip install loghub - run: python -m pip install --upgrade pip setuptools - run: pip install -e .[all,test] - name: Show test environment run: pip list - run: pytest -v test/ # Enable this if SSH debugging is required # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 # if: ${{ failure() }} python-lsp-server-1.3.3/.github/workflows/test-mac.yml000066400000000000000000000025551415567622500230200ustar00rootroot00000000000000name: Mac tests on: push: branches: - develop pull_request: branches: - '*' jobs: build: name: Mac Py${{ matrix.PYTHON_VERSION }} runs-on: macos-latest env: CI: 'true' OS: 'macos' PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} strategy: fail-fast: false matrix: PYTHON_VERSION: ['3.9', '3.8', '3.7', '3.6'] timeout-minutes: 10 steps: - uses: actions/cache@v1 with: path: ~/Library/Caches/pip key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - name: Create Jedi environment for testing if: matrix.PYTHON_VERSION != '2.7' run: | python3 -m venv /tmp/pyenv /tmp/pyenv/bin/python -m pip install loghub - run: python -m pip install --upgrade pip setuptools - run: pip install -e .[all,test] - name: Show test environment run: pip list - run: pytest -v test/ # Enable this if SSH debugging is required # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 # if: ${{ failure() }} python-lsp-server-1.3.3/.github/workflows/test-win.yml000066400000000000000000000020211415567622500230410ustar00rootroot00000000000000name: Windows tests on: push: branches: - develop pull_request: branches: - '*' jobs: build: name: Win Py${{ matrix.PYTHON_VERSION }} runs-on: windows-latest env: CI: 'true' OS: 'win' PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} strategy: fail-fast: false matrix: PYTHON_VERSION: ['3.9', '3.8', '3.7', '3.6'] timeout-minutes: 10 steps: - uses: actions/cache@v1 with: path: ~\AppData\Local\pip\Cache key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - run: python -m pip install --upgrade pip setuptools - run: pip install -e .[all,test] - name: Show test environment run: pip list - run: pytest -v test/ python-lsp-server-1.3.3/.gitignore000066400000000000000000000023661415567622500171530ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # Mypy cache .mypy_cache/ # IntelliJ *.iml *.ipr *.iws .idea/ out/ # C extensions *.so # Distribution / packaging .Python env/ env3/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ pytest.xml .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # IPython Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # dotenv .env # virtualenv venv/ ENV/ # Spyder project settings .spyderproject # Rope project settings .ropeproject # JavaScript **/*.vscode/ # vim *.sw[mnopqrs] # Idea .idea/ # Merge orig files *.orig # Special files .DS_Store *.temp python-lsp-server-1.3.3/.policy.yml000066400000000000000000000052351415567622500172610ustar00rootroot00000000000000# Excavator auto-updates this file. Please contribute improvements to the central template. policy: approval: - or: - one admin or contributor has approved - two admins have approved - changelog only and contributor approval - fixing excavator - excavator only touched baseline, circle, gradle files, godel files, docker-compose-rule config or versions.props - excavator only touched config files - bots updated package.json and lock files disapproval: requires: organizations: [ "palantir" ] approval_rules: - name: one admin or contributor has approved options: allow_contributor: true requires: count: 1 admins: true - name: two admins have approved options: allow_contributor: true requires: count: 2 admins: true - name: changelog only and contributor approval options: allow_contributor: true requires: count: 1 admins: true if: only_changed_files: paths: - "changelog/@unreleased/.*\\.yml" - name: fixing excavator options: allow_contributor: true requires: count: 1 admins: true if: has_author_in: users: [ "svc-excavator-bot" ] - name: excavator only touched baseline, circle, gradle files, godel files, docker-compose-rule config or versions.props requires: count: 0 if: has_author_in: users: [ "svc-excavator-bot" ] only_changed_files: # product-dependencies.lock should never go here, to force review of all product (SLS) dependency changes # this way excavator cannot change the deployability of a service or product via auto-merge paths: - "changelog/@unreleased/.*\\.yml" - "^\\.baseline/.*$" - "^\\.circleci/.*$" - "^\\.docker-compose-rule\\.yml$" - "^.*gradle$" - "^gradle/wrapper/.*" - "^gradlew$" - "^gradlew.bat$" - "^gradle.properties$" - "^settings.gradle$" - "^godelw$" - "^godel/config/godel.properties$" - "^versions.props$" - "^versions.lock$" - name: excavator only touched config files requires: count: 0 if: has_author_in: users: [ "svc-excavator-bot" ] only_changed_files: paths: - "^\\..*.yml$" - "^\\.github/.*$" - name: bots updated package.json and lock files requires: count: 0 if: has_author_in: users: - "svc-excavator-bot" - "dependabot[bot]" only_changed_files: paths: - "^.*yarn.lock$" - "^.*package.json$" python-lsp-server-1.3.3/.pylintrc000066400000000000000000000006751415567622500170310ustar00rootroot00000000000000[FORMAT] max-line-length = 120 [MESSAGES CONTROL] enable = useless-suppression disable = duplicate-code, invalid-name, fixme, missing-docstring, protected-access, too-few-public-methods, too-many-arguments, too-many-instance-attributes, import-error, consider-using-f-string, logging-fstring-interpolation [REPORTS] reports = no [TYPECHECK] generated-members = pylsp_* cache_clear python-lsp-server-1.3.3/CHANGELOG.md000066400000000000000000000311631415567622500167710ustar00rootroot00000000000000# History of changes ## Version 1.3.3 (2021-12-13) ### Issues Closed * [Issue 123](https://github.com/python-lsp/python-lsp-server/issues/123) - Resolving completion triggers an error ([PR 125](https://github.com/python-lsp/python-lsp-server/pull/125) by [@ccordoba12](https://github.com/ccordoba12)) In this release 1 issue was closed. ### Pull Requests Merged * [PR 133](https://github.com/python-lsp/python-lsp-server/pull/133) - Fix test_syntax_error_pylint_py3 for Python 3.10, by [@ArchangeGabriel](https://github.com/ArchangeGabriel) * [PR 125](https://github.com/python-lsp/python-lsp-server/pull/125) - Fix error when resolving completion items for Rope, by [@ccordoba12](https://github.com/ccordoba12) ([123](https://github.com/python-lsp/python-lsp-server/issues/123)) In this release 2 pull requests were closed. ---- ## Version 1.3.2 (2021-11-25) ### Issues Closed * [Issue 121](https://github.com/python-lsp/python-lsp-server/issues/121) - Error on triggering completions in import context ([PR 122](https://github.com/python-lsp/python-lsp-server/pull/122) by [@ccordoba12](https://github.com/ccordoba12)) In this release 1 issue was closed. ### Pull Requests Merged * [PR 122](https://github.com/python-lsp/python-lsp-server/pull/122) - Fix formatting a log message, by [@ccordoba12](https://github.com/ccordoba12) ([121](https://github.com/python-lsp/python-lsp-server/issues/121)) In this release 1 pull request was closed. ---- ## Version 1.3.1 (2021-11-22) ### Pull Requests Merged * [PR 118](https://github.com/python-lsp/python-lsp-server/pull/118) - Fix tests for Jedi 0.18.1, by [@ccordoba12](https://github.com/ccordoba12) In this release 1 pull request was closed. ---- ## Version 1.3.0 (2021-11-22) ### New features * Create a cache for code snippets to speed up completions. ### Important changes * Option `jedi_completion.resolve_at_most_labels` was renamed to `jedi_completion.resolve_at_most` because now it controls how many labels and snippets will be resolved per request. * Option `jedi_completion.cache_labels_for` was renamed to `jedi_completion.cache_for` because now it controls the modules for which labels and snippets should be cached. * Update requirements on Pylint, flake8, pycodestyle, pyflakes and autopep8. ### Pull Requests Merged * [PR 112](https://github.com/python-lsp/python-lsp-server/pull/112) - Fix another test with Python 3.10, by [@jspricke](https://github.com/jspricke) * [PR 111](https://github.com/python-lsp/python-lsp-server/pull/111) - Use sys.executable in flake8 plugin to make tests pass on systems that don't provide a python link, by [@0-wiz-0](https://github.com/0-wiz-0) * [PR 108](https://github.com/python-lsp/python-lsp-server/pull/108) - Fix test with Python 3.10, by [@jspricke](https://github.com/jspricke) * [PR 102](https://github.com/python-lsp/python-lsp-server/pull/102) - Update requirements on flake8 and its dependencies, by [@ccordoba12](https://github.com/ccordoba12) * [PR 99](https://github.com/python-lsp/python-lsp-server/pull/99) - Adjust readme to pylsp-mypy rename, by [@chaoflow](https://github.com/chaoflow) * [PR 94](https://github.com/python-lsp/python-lsp-server/pull/94) - Unpin Pylint, by [@bnavigator](https://github.com/bnavigator) * [PR 83](https://github.com/python-lsp/python-lsp-server/pull/83) - Create a cache for snippets, by [@ccordoba12](https://github.com/ccordoba12) In this release 7 pull requests were closed. ---- ## Version 1.2.4 (2021-10-11) ### Pull Requests Merged * [PR 96](https://github.com/python-lsp/python-lsp-server/pull/96) - Pin flake8 to be less than version 4, by [@ccordoba12](https://github.com/ccordoba12) In this release 1 pull request was closed. ---- ## Version 1.2.3 (2021-10-04) ### Pull Requests Merged * [PR 93](https://github.com/python-lsp/python-lsp-server/pull/93) - Document how to write python-lsp-server plugin + add pylsp-rope to Readme, by [@lieryan](https://github.com/lieryan) * [PR 88](https://github.com/python-lsp/python-lsp-server/pull/88) - Fix pylint test without pylsp installed, by [@jspricke](https://github.com/jspricke) In this release 2 pull requests were closed. ---- ## Version 1.2.2 (2021-09-01) ### Pull Requests Merged * [PR 78](https://github.com/python-lsp/python-lsp-server/pull/78) - Require Pylint less than 2.10, by [@ccordoba12](https://github.com/ccordoba12) * [PR 71](https://github.com/python-lsp/python-lsp-server/pull/71) - Improve how we determine if a symbol was imported from other libraries, by [@ccordoba12](https://github.com/ccordoba12) * [PR 67](https://github.com/python-lsp/python-lsp-server/pull/67) - Recognize the "I" pylint stdio message category, by [@Wuestengecko](https://github.com/Wuestengecko) * [PR 66](https://github.com/python-lsp/python-lsp-server/pull/66) - Remove temp file and ignore that kind of files, by [@ccordoba12](https://github.com/ccordoba12) In this release 4 pull requests were closed. ---- ## Version 1.2.1 (2021-08-04) ### Issues Closed * [Issue 65](https://github.com/python-lsp/python-lsp-server/issues/65) - Release v1.2.1 In this release 1 issue was closed. ### Pull Requests Merged * [PR 64](https://github.com/python-lsp/python-lsp-server/pull/64) - Catch errors when getting docstrings on _resolve_completion, by [@ccordoba12](https://github.com/ccordoba12) In this release 1 pull request was closed. ---- ## Version 1.2.0 (2021-08-01) ### New features * Implement completion item resolve requests for faster completions. * Initialize workspaces from the initialize request. ### Issues Closed * [Issue 55](https://github.com/python-lsp/python-lsp-server/issues/55) - Is emanspeaks/pyls-flake8 the preferred plugin for flake8 linting? ([PR 57](https://github.com/python-lsp/python-lsp-server/pull/57) by [@GerardoGR](https://github.com/GerardoGR)) * [Issue 48](https://github.com/python-lsp/python-lsp-server/issues/48) - Workspace folders not initialized properly ([PR 49](https://github.com/python-lsp/python-lsp-server/pull/49) by [@rchl](https://github.com/rchl)) * [Issue 24](https://github.com/python-lsp/python-lsp-server/issues/24) - Where to put structured documentation now? ([PR 51](https://github.com/python-lsp/python-lsp-server/pull/51) by [@krassowski](https://github.com/krassowski)) In this release 3 issues were closed. ### Pull Requests Merged * [PR 62](https://github.com/python-lsp/python-lsp-server/pull/62) - Make use_document_path equal to True when getting definitions and hovers, by [@ccordoba12](https://github.com/ccordoba12) * [PR 59](https://github.com/python-lsp/python-lsp-server/pull/59) - Validate if shared_data is not None when resolving completion items, by [@ccordoba12](https://github.com/ccordoba12) * [PR 58](https://github.com/python-lsp/python-lsp-server/pull/58) - Do not call `get_signatures()` if snippets are disabled, by [@krassowski](https://github.com/krassowski) * [PR 57](https://github.com/python-lsp/python-lsp-server/pull/57) - Document internal flake8 plugin schema and configuration, by [@GerardoGR](https://github.com/GerardoGR) ([55](https://github.com/python-lsp/python-lsp-server/issues/55)) * [PR 53](https://github.com/python-lsp/python-lsp-server/pull/53) - Fix skipping imported symbols, by [@ccordoba12](https://github.com/ccordoba12) * [PR 51](https://github.com/python-lsp/python-lsp-server/pull/51) - Restore the JSON schema, add human-readable configuration, by [@krassowski](https://github.com/krassowski) ([24](https://github.com/python-lsp/python-lsp-server/issues/24)) * [PR 49](https://github.com/python-lsp/python-lsp-server/pull/49) - Initialize workspaces from the initialize request, by [@rchl](https://github.com/rchl) ([48](https://github.com/python-lsp/python-lsp-server/issues/48)) * [PR 46](https://github.com/python-lsp/python-lsp-server/pull/46) - Improve release instructions, by [@ccordoba12](https://github.com/ccordoba12) * [PR 26](https://github.com/python-lsp/python-lsp-server/pull/26) - Implement cached label resolution and label resolution limit, by [@krassowski](https://github.com/krassowski) * [PR 25](https://github.com/python-lsp/python-lsp-server/pull/25) - Feature/completion item/resolve, by [@krassowski](https://github.com/krassowski) In this release 10 pull requests were closed. ---- ## Version 1.1.0 (2021-06-25) ### New features * Add support for flake8 per-file-ignores * Add --version CLI argument and return version in InitializeResult ### Issues Closed * [Issue 30](https://github.com/python-lsp/python-lsp-server/issues/30) - pylsp_document_symbols raising TypeError from os.path.samefile ([PR 31](https://github.com/python-lsp/python-lsp-server/pull/31) by [@douglasdavis](https://github.com/douglasdavis)) * [Issue 19](https://github.com/python-lsp/python-lsp-server/issues/19) - Linter and tests are failing on due to new "consider-using-with" ([PR 20](https://github.com/python-lsp/python-lsp-server/pull/20) by [@krassowski](https://github.com/krassowski)) In this release 2 issues were closed. ### Pull Requests Merged * [PR 44](https://github.com/python-lsp/python-lsp-server/pull/44) - Add --version CLI argument and return version in InitializeResult, by [@nemethf](https://github.com/nemethf) * [PR 42](https://github.com/python-lsp/python-lsp-server/pull/42) - Fix local timezone, by [@e-kwsm](https://github.com/e-kwsm) * [PR 38](https://github.com/python-lsp/python-lsp-server/pull/38) - Handling list merge in _utils.merge_dicts()., by [@GaetanLepage](https://github.com/GaetanLepage) * [PR 32](https://github.com/python-lsp/python-lsp-server/pull/32) - PR: Update third-party plugins in README, by [@haplo](https://github.com/haplo) * [PR 31](https://github.com/python-lsp/python-lsp-server/pull/31) - Catch a TypeError from os.path.samefile, by [@douglasdavis](https://github.com/douglasdavis) ([30](https://github.com/python-lsp/python-lsp-server/issues/30)) * [PR 28](https://github.com/python-lsp/python-lsp-server/pull/28) - Add support for flake8 per-file-ignores, by [@brandonwillard](https://github.com/brandonwillard) * [PR 20](https://github.com/python-lsp/python-lsp-server/pull/20) - PR: Address pylint's "consider-using-with" warnings, by [@krassowski](https://github.com/krassowski) ([19](https://github.com/python-lsp/python-lsp-server/issues/19)) * [PR 18](https://github.com/python-lsp/python-lsp-server/pull/18) - Fix Jedi type map (use types offered by modern Jedi), by [@krassowski](https://github.com/krassowski) In this release 8 pull requests were closed. ---- ## Version 1.0.1 (2021-04-22) ### Issues Closed * [Issue 16](https://github.com/python-lsp/python-lsp-server/issues/16) - Release v1.0.1 In this release 1 issue was closed. ### Pull Requests Merged * [PR 15](https://github.com/python-lsp/python-lsp-server/pull/15) - PR: Update pyflakes and pycodestyle dependency versions, by [@andfoy](https://github.com/andfoy) * [PR 14](https://github.com/python-lsp/python-lsp-server/pull/14) - PR: Small fix in Readme, by [@yaegassy](https://github.com/yaegassy) In this release 2 pull requests were closed. ---- ## Version 1.0.0 (2021/04/14) ### Issues Closed * [Issue 13](https://github.com/python-lsp/python-lsp-server/issues/13) - Release v1.0.0 * [Issue 4](https://github.com/python-lsp/python-lsp-server/issues/4) - Transition plan In this release 2 issues were closed. ### Pull Requests Merged * [PR 12](https://github.com/python-lsp/python-lsp-server/pull/12) - PR: Use python-lsp-jsonrpc instead of python-jsonrpc-server, by [@andfoy](https://github.com/andfoy) * [PR 11](https://github.com/python-lsp/python-lsp-server/pull/11) - PR: Remove references to VSCode in Readme, by [@ccordoba12](https://github.com/ccordoba12) * [PR 10](https://github.com/python-lsp/python-lsp-server/pull/10) - PR: Rename namespace to pylsp and package to python-lsp-server, by [@andfoy](https://github.com/andfoy) * [PR 9](https://github.com/python-lsp/python-lsp-server/pull/9) - TST: accept folding of decorator parameters in Python 3.9, by [@bnavigator](https://github.com/bnavigator) ([8](https://github.com/python-lsp/python-lsp-server/issues/8)) * [PR 7](https://github.com/python-lsp/python-lsp-server/pull/7) - Unpin numpy, by [@bnavigator](https://github.com/bnavigator) * [PR 6](https://github.com/python-lsp/python-lsp-server/pull/6) - Rewrite README from rst to md, by [@xiaoxiae](https://github.com/xiaoxiae) * [PR 5](https://github.com/python-lsp/python-lsp-server/pull/5) - Update README.rst, by [@marimeireles](https://github.com/marimeireles) * [PR 3](https://github.com/python-lsp/python-lsp-server/pull/3) - Fix CI tests by temporarily pinning numpy; update repo paths, by [@krassowski](https://github.com/krassowski) * [PR 2](https://github.com/python-lsp/python-lsp-server/pull/2) - bump jedi compatibility: compare to Path-like object, by [@bnavigator](https://github.com/bnavigator) In this release 9 pull requests were closed. python-lsp-server-1.3.3/CONFIGURATION.md000066400000000000000000000165631415567622500175200ustar00rootroot00000000000000# Python Language Server Configuration This server can be configured using `workspace/didChangeConfiguration` method. Each configuration option is described below: | **Configuration Key** | **Type** | **Description** | **Default** |----|----|----|----| | `pylsp.configurationSources` | `array` of unique `string` items | List of configuration sources to use. | `["pycodestyle"]` | | `pylsp.plugins.flake8.config` | `string` | Path to the config file that will be the authoritative config source. | `null` | | `pylsp.plugins.flake8.enabled` | `boolean` | Enable or disable the plugin. | `false` | | `pylsp.plugins.flake8.exclude` | `array` | List of files or directories to exclude. | `null` | | `pylsp.plugins.flake8.executable` | `string` | Path to the flake8 executable. | `"flake8"` | | `pylsp.plugins.flake8.filename` | `string` | Only check for filenames matching the patterns in this list. | `null` | | `pylsp.plugins.flake8.hangClosing` | `boolean` | Hang closing bracket instead of matching indentation of opening bracket's line. | `null` | | `pylsp.plugins.flake8.ignore` | `array` | List of errors and warnings to ignore (or skip). | `null` | | `pylsp.plugins.flake8.maxLineLength` | `integer` | Maximum allowed line length for the entirety of this run. | `null` | | `pylsp.plugins.flake8.perFileIgnores` | `array` | A pairing of filenames and violation codes that defines which violations to ignore in a particular file, for example: `["file_path.py:W305,W304"]`). | `null` | | `pylsp.plugins.flake8.select` | `array` | List of errors and warnings to enable. | `null` | | `pylsp.plugins.jedi.extra_paths` | `array` | Define extra paths for jedi.Script. | `[]` | | `pylsp.plugins.jedi.env_vars` | `object` | Define environment variables for jedi.Script and Jedi.names. | `null` | | `pylsp.plugins.jedi.environment` | `string` | Define environment for jedi.Script and Jedi.names. | `null` | | `pylsp.plugins.jedi_completion.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_completion.include_params` | `boolean` | Auto-completes methods and classes with tabstops for each parameter. | `true` | | `pylsp.plugins.jedi_completion.include_class_objects` | `boolean` | Adds class objects as a separate completion item. | `true` | | `pylsp.plugins.jedi_completion.fuzzy` | `boolean` | Enable fuzzy when requesting autocomplete. | `false` | | `pylsp.plugins.jedi_completion.eager` | `boolean` | Resolve documentation and detail eagerly. | `false` | | `pylsp.plugins.jedi_completion.resolve_at_most` | `number` | How many labels and snippets (at most) should be resolved? | `25` | | `pylsp.plugins.jedi_completion.cache_for` | `array` of `string` items | Modules for which labels and snippets should be cached. | `["pandas", "numpy", "tensorflow", "matplotlib"]` | | `pylsp.plugins.jedi_definition.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_definition.follow_imports` | `boolean` | The goto call will follow imports. | `true` | | `pylsp.plugins.jedi_definition.follow_builtin_imports` | `boolean` | If follow_imports is True will decide if it follow builtin imports. | `true` | | `pylsp.plugins.jedi_hover.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_references.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_signature_help.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_symbols.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_symbols.all_scopes` | `boolean` | If True lists the names of all scopes instead of only the module namespace. | `true` | | `pylsp.plugins.jedi_symbols.include_import_symbols` | `boolean` | If True includes symbols imported from other libraries. | `true` | | `pylsp.plugins.mccabe.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.mccabe.threshold` | `number` | The minimum threshold that triggers warnings about cyclomatic complexity. | `15` | | `pylsp.plugins.preload.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.preload.modules` | `array` of unique `string` items | List of modules to import on startup | `null` | | `pylsp.plugins.pycodestyle.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.pycodestyle.exclude` | `array` of unique `string` items | Exclude files or directories which match these patterns. | `null` | | `pylsp.plugins.pycodestyle.filename` | `array` of unique `string` items | When parsing directories, only check filenames matching these patterns. | `null` | | `pylsp.plugins.pycodestyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | | `pylsp.plugins.pycodestyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `null` | | `pylsp.plugins.pycodestyle.hangClosing` | `boolean` | Hang closing bracket instead of matching indentation of opening bracket's line. | `null` | | `pylsp.plugins.pycodestyle.maxLineLength` | `number` | Set maximum allowed line length. | `null` | | `pylsp.plugins.pydocstyle.enabled` | `boolean` | Enable or disable the plugin. | `false` | | `pylsp.plugins.pydocstyle.convention` | `string` | Choose the basic list of checked errors by specifying an existing convention. | `null` | | `pylsp.plugins.pydocstyle.addIgnore` | `array` of unique `string` items | Ignore errors and warnings in addition to the specified convention. | `null` | | `pylsp.plugins.pydocstyle.addSelect` | `array` of unique `string` items | Select errors and warnings in addition to the specified convention. | `null` | | `pylsp.plugins.pydocstyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `null` | | `pylsp.plugins.pydocstyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | | `pylsp.plugins.pydocstyle.match` | `string` | Check only files that exactly match the given regular expression; default is to match files that don't start with 'test_' but end with '.py'. | `"(?!test_).*\\.py"` | | `pylsp.plugins.pydocstyle.matchDir` | `string` | Search only dirs that exactly match the given regular expression; default is to match dirs which do not begin with a dot. | `"[^\\.].*"` | | `pylsp.plugins.pyflakes.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.pylint.enabled` | `boolean` | Enable or disable the plugin. | `false` | | `pylsp.plugins.pylint.args` | `array` of non-unique `string` items | Arguments to pass to pylint. | `null` | | `pylsp.plugins.pylint.executable` | `string` | Executable to run pylint with. Enabling this will run pylint on unsaved files via stdin. Can slow down workflow. Only works with python3. | `null` | | `pylsp.plugins.rope_completion.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.rope_completion.eager` | `boolean` | Resolve documentation and detail eagerly. | `false` | | `pylsp.plugins.yapf.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.rope.extensionModules` | `string` | Builtin and c-extension modules that are allowed to be imported and inspected by rope. | `null` | | `pylsp.rope.ropeFolder` | `array` of unique `string` items | The name of the folder in which rope stores project configurations and data. Pass `null` for not using such a folder at all. | `null` | This documentation was generated from `pylsp/config/schema.json`. Please do not edit this file directly. python-lsp-server-1.3.3/LICENSE000066400000000000000000000021731415567622500161640ustar00rootroot00000000000000The MIT License (MIT) Copyright 2017-2020 Palantir Technologies, Inc. Copyright 2021 Python Language Server Contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-lsp-server-1.3.3/MANIFEST.in000066400000000000000000000002001415567622500167020ustar00rootroot00000000000000include README.md include versioneer.py include pylsp/_version.py include LICENSE include .pylintrc recursive-include test *.py python-lsp-server-1.3.3/README.md000066400000000000000000000115351415567622500164400ustar00rootroot00000000000000# Python LSP Server [![image](https://github.com/python-ls/python-ls/workflows/Linux%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Linux+tests%22) [![image](https://github.com/python-ls/python-ls/workflows/Mac%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Mac+tests%22) [![image](https://github.com/python-ls/python-ls/workflows/Windows%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Windows+tests%22) [![image](https://img.shields.io/github/license/python-ls/python-ls.svg)](https://github.com/python-ls/python-ls/blob/master/LICENSE) A Python 3.6+ implementation of the [Language Server Protocol](https://github.com/Microsoft/language-server-protocol). ## Installation The base language server requires [Jedi](https://github.com/davidhalter/jedi) to provide Completions, Definitions, Hover, References, Signature Help, and Symbols: ``` pip install python-lsp-server ``` If the respective dependencies are found, the following optional providers will be enabled: - [Rope](https://github.com/python-rope/rope) for Completions and renaming - [Pyflakes](https://github.com/PyCQA/pyflakes) linter to detect various errors - [McCabe](https://github.com/PyCQA/mccabe) linter for complexity checking - [pycodestyle](https://github.com/PyCQA/pycodestyle) linter for style checking - [pydocstyle](https://github.com/PyCQA/pydocstyle) linter for docstring style checking (disabled by default) - [autopep8](https://github.com/hhatto/autopep8) for code formatting - [YAPF](https://github.com/google/yapf) for code formatting (preferred over autopep8) Optional providers can be installed using the `extras` syntax. To install [YAPF](https://github.com/google/yapf) formatting for example: ``` pip install 'python-lsp-server[yapf]' ``` All optional providers can be installed using: ``` pip install 'python-lsp-server[all]' ``` If you get an error similar to `'install_requires' must be a string or list of strings` then please upgrade setuptools before trying again. ``` pip install -U setuptools ``` ### 3rd Party Plugins Installing these plugins will add extra functionality to the language server: - [pyls-flake8](https://github.com/emanspeaks/pyls-flake8/): Error checking using [flake8](https://flake8.pycqa.org/en/latest/). - [pylsp-mypy](https://github.com/Richardk2n/pylsp-mypy): [MyPy](http://mypy-lang.org/) type checking for Python >=3.6. - [pyls-isort](https://github.com/paradoxxxzero/pyls-isort): code formatting using [isort](https://github.com/PyCQA/isort) (automatic import sorting). - [python-lsp-black](https://github.com/python-lsp/python-lsp-black): code formatting using [Black](https://github.com/psf/black). - [pyls-memestra](https://github.com/QuantStack/pyls-memestra): detecting the use of deprecated APIs. - [pylsp-rope](https://github.com/python-rope/pylsp-rope): Extended refactoring capabilities using [Rope](https://github.com/python-rope/rope). Please see the above repositories for examples on how to write plugins for the Python LSP Server. [cookiecutter-pylsp-plugin](https://github.com/python-lsp/cookiecutter-pylsp-plugin) is a [cookiecutter](https://cookiecutter.readthedocs.io/) template for setting up a basic plugin project for python-lsp-server. It documents all the essentials you need to know to kick start your own plugin project. Please file an issue if you require assistance writing a plugin. ## Configuration Configuration is loaded from zero or more configuration sources. Currently implemented are: - pycodestyle: discovered in `~/.config/pycodestyle`, `setup.cfg`, `tox.ini` and `pycodestyle.cfg`. - flake8: discovered in `~/.config/flake8`, `setup.cfg`, `tox.ini` and `flake8.cfg` The default configuration source is pycodestyle. Change the `pylsp.configurationSources` setting to `['flake8']` in order to respect flake8 configuration instead. Overall configuration is computed first from user configuration (in home directory), overridden by configuration passed in by the language client, and then overridden by configuration discovered in the workspace. To enable pydocstyle for linting docstrings add the following setting in your LSP configuration: `"pylsp.plugins.pydocstyle.enabled": true` All configuration options are described in [`CONFIGURATION.md`](https://github.com/python-lsp/python-lsp-server/blob/develop/CONFIGURATION.md). ## LSP Server Features * Auto Completion * Code Linting * Signature Help * Go to definition * Hover * Find References * Document Symbols * Document Formatting * Code folding * Multiple workspaces ## Development To run the test suite: ```sh pip install '.[test]' && pytest ``` After adding configuration options to `schema.json`, refresh the `CONFIGURATION.md` file with ``` python scripts/jsonschema2md.py pylsp/config/schema.json CONFIGURATION.md ``` ## License This project is made available under the MIT License. python-lsp-server-1.3.3/RELEASE.md000066400000000000000000000013371415567622500165620ustar00rootroot00000000000000To release a new version of python-lsp-server: 1. git fetch upstream && git checkout upstream/master 2. Close milestone on GitHub 3. git clean -xfdi 4. Update CHANGELOG.md with `loghub python-lsp/python-lsp-server -m vX.X.X` 5. git add -A && git commit -m "Update Changelog" 6. Update release version in `_version.py` (set release version, remove 'dev0') 7. git add -A && git commit -m "Release vX.X.X" 8. python setup.py sdist 9. python setup.py bdist_wheel 10. twine check dist/* 11. twine upload dist/* 12. git tag -a vX.X.X -m "Release vX.X.X" 13. Update development version in `_version.py` (add 'dev0' and increment minor) 14. git add -A && git commit -m "Back to work" 15. git push upstream develop 16. git push upstream --tags python-lsp-server-1.3.3/pylsp/000077500000000000000000000000001415567622500163235ustar00rootroot00000000000000python-lsp-server-1.3.3/pylsp/__init__.py000066400000000000000000000004701415567622500204350ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import pluggy from ._version import __version__ PYLSP = 'pylsp' IS_WIN = os.name == 'nt' hookspec = pluggy.HookspecMarker(PYLSP) hookimpl = pluggy.HookimplMarker(PYLSP) __all__ = [__version__] python-lsp-server-1.3.3/pylsp/__main__.py000066400000000000000000000070161415567622500204210ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import argparse import logging import logging.config import sys import time try: import ujson as json except Exception: # pylint: disable=broad-except import json from .python_lsp import (PythonLSPServer, start_io_lang_server, start_tcp_lang_server) from ._version import __version__ LOG_FORMAT = "%(asctime)s {0} - %(levelname)s - %(name)s - %(message)s".format( time.localtime().tm_zone) def add_arguments(parser): parser.description = "Python Language Server" parser.add_argument( "--tcp", action="store_true", help="Use TCP server instead of stdio" ) parser.add_argument( "--host", default="127.0.0.1", help="Bind to this address" ) parser.add_argument( "--port", type=int, default=2087, help="Bind to this port" ) parser.add_argument( '--check-parent-process', action="store_true", help="Check whether parent process is still alive using os.kill(ppid, 0) " "and auto shut down language server process when parent process is not alive." "Note that this may not work on a Windows machine." ) log_group = parser.add_mutually_exclusive_group() log_group.add_argument( "--log-config", help="Path to a JSON file containing Python logging config." ) log_group.add_argument( "--log-file", help="Redirect logs to the given file instead of writing to stderr." "Has no effect if used with --log-config." ) parser.add_argument( '-v', '--verbose', action='count', default=0, help="Increase verbosity of log output, overrides log config file" ) parser.add_argument( '-V', '--version', action='version', version='%(prog)s v' + __version__ ) def main(): parser = argparse.ArgumentParser() add_arguments(parser) args = parser.parse_args() _configure_logger(args.verbose, args.log_config, args.log_file) if args.tcp: start_tcp_lang_server(args.host, args.port, args.check_parent_process, PythonLSPServer) else: stdin, stdout = _binary_stdio() start_io_lang_server(stdin, stdout, args.check_parent_process, PythonLSPServer) def _binary_stdio(): """Construct binary stdio streams (not text mode). This seems to be different for Window/Unix Python2/3, so going by: https://stackoverflow.com/questions/2850893/reading-binary-data-from-stdin """ stdin, stdout = sys.stdin.buffer, sys.stdout.buffer return stdin, stdout def _configure_logger(verbose=0, log_config=None, log_file=None): root_logger = logging.root if log_config: with open(log_config, 'r', encoding='utf-8') as f: logging.config.dictConfig(json.load(f)) else: formatter = logging.Formatter(LOG_FORMAT) if log_file: log_handler = logging.handlers.RotatingFileHandler( log_file, mode='a', maxBytes=50*1024*1024, backupCount=10, encoding=None, delay=0 ) else: log_handler = logging.StreamHandler() log_handler.setFormatter(formatter) root_logger.addHandler(log_handler) if verbose == 0: level = logging.WARNING elif verbose == 1: level = logging.INFO elif verbose >= 2: level = logging.DEBUG root_logger.setLevel(level) if __name__ == '__main__': main() python-lsp-server-1.3.3/pylsp/_utils.py000066400000000000000000000156671415567622500202130ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import functools import inspect import logging import os import pathlib import threading import jedi JEDI_VERSION = jedi.__version__ log = logging.getLogger(__name__) def debounce(interval_s, keyed_by=None): """Debounce calls to this function until interval_s seconds have passed.""" def wrapper(func): timers = {} lock = threading.Lock() @functools.wraps(func) def debounced(*args, **kwargs): sig = inspect.signature(func) call_args = sig.bind(*args, **kwargs) key = call_args.arguments[keyed_by] if keyed_by else None def run(): with lock: del timers[key] return func(*args, **kwargs) with lock: old_timer = timers.get(key) if old_timer: old_timer.cancel() timer = threading.Timer(interval_s, run) timers[key] = timer timer.start() return debounced return wrapper def find_parents(root, path, names): """Find files matching the given names relative to the given path. Args: path (str): The file path to start searching up from. names (List[str]): The file/directory names to look for. root (str): The directory at which to stop recursing upwards. Note: The path MUST be within the root. """ if not root: return [] if not os.path.commonprefix((root, path)): log.warning("Path %s not in %s", path, root) return [] # Split the relative by directory, generate all the parent directories, then check each of them. # This avoids running a loop that has different base-cases for unix/windows # e.g. /a/b and /a/b/c/d/e.py -> ['/a/b', 'c', 'd'] dirs = [root] + os.path.relpath(os.path.dirname(path), root).split(os.path.sep) # Search each of /a/b/c, /a/b, /a while dirs: search_dir = os.path.join(*dirs) existing = list(filter(os.path.exists, [os.path.join(search_dir, n) for n in names])) if existing: return existing dirs.pop() # Otherwise nothing return [] def path_to_dot_name(path): """Given a path to a module, derive its dot-separated full name.""" directory = os.path.dirname(path) module_name, _ = os.path.splitext(os.path.basename(path)) full_name = [module_name] while os.path.exists(os.path.join(directory, '__init__.py')): this_directory = os.path.basename(directory) directory = os.path.dirname(directory) full_name = [this_directory] + full_name return '.'.join(full_name) def match_uri_to_workspace(uri, workspaces): if uri is None: return None max_len, chosen_workspace = -1, None path = pathlib.Path(uri).parts for workspace in workspaces: workspace_parts = pathlib.Path(workspace).parts if len(workspace_parts) > len(path): continue match_len = 0 for workspace_part, path_part in zip(workspace_parts, path): if workspace_part == path_part: match_len += 1 if match_len > 0: if match_len > max_len: max_len = match_len chosen_workspace = workspace return chosen_workspace def list_to_string(value): return ",".join(value) if isinstance(value, list) else value def merge_dicts(dict_a, dict_b): """Recursively merge dictionary b into dictionary a. If override_nones is True, then """ def _merge_dicts_(a, b): for key in set(a.keys()).union(b.keys()): if key in a and key in b: if isinstance(a[key], dict) and isinstance(b[key], dict): yield (key, dict(_merge_dicts_(a[key], b[key]))) elif isinstance(a[key], list) and isinstance(b[key], list): yield (key, list(set(a[key] + b[key]))) elif b[key] is not None: yield (key, b[key]) else: yield (key, a[key]) elif key in a: yield (key, a[key]) elif b[key] is not None: yield (key, b[key]) return dict(_merge_dicts_(dict_a, dict_b)) def format_docstring(contents): """Python doc strings come in a number of formats, but LSP wants markdown. Until we can find a fast enough way of discovering and parsing each format, we can do a little better by at least preserving indentation. """ contents = contents.replace('\t', '\u00A0' * 4) contents = contents.replace(' ', '\u00A0' * 2) return contents def clip_column(column, lines, line_number): """ Normalise the position as per the LSP that accepts character positions > line length https://microsoft.github.io/language-server-protocol/specification#position """ max_column = len(lines[line_number].rstrip('\r\n')) if len(lines) > line_number else 0 return min(column, max_column) def position_to_jedi_linecolumn(document, position): """ Convert the LSP format 'line', 'character' to Jedi's 'line', 'column' https://microsoft.github.io/language-server-protocol/specification#position """ code_position = {} if position: code_position = {'line': position['line'] + 1, 'column': clip_column(position['character'], document.lines, position['line'])} return code_position if os.name == 'nt': import ctypes kernel32 = ctypes.windll.kernel32 PROCESS_QUERY_INFROMATION = 0x1000 def is_process_alive(pid): """Check whether the process with the given pid is still alive. Running `os.kill()` on Windows always exits the process, so it can't be used to check for an alive process. see: https://docs.python.org/3/library/os.html?highlight=os%20kill#os.kill Hence ctypes is used to check for the process directly via windows API avoiding any other 3rd-party dependency. Args: pid (int): process ID Returns: bool: False if the process is not alive or don't have permission to check, True otherwise. """ process = kernel32.OpenProcess(PROCESS_QUERY_INFROMATION, 0, pid) if process != 0: kernel32.CloseHandle(process) return True return False else: import errno def is_process_alive(pid): """Check whether the process with the given pid is still alive. Args: pid (int): process ID Returns: bool: False if the process is not alive or don't have permission to check, True otherwise. """ if pid < 0: return False try: os.kill(pid, 0) except OSError as e: return e.errno == errno.EPERM else: return True python-lsp-server-1.3.3/pylsp/_version.py000066400000000000000000000003271415567622500205230ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """PyLSP versioning information.""" VERSION_INFO = (1, 3, 3) __version__ = '.'.join(map(str, VERSION_INFO)) python-lsp-server-1.3.3/pylsp/config/000077500000000000000000000000001415567622500175705ustar00rootroot00000000000000python-lsp-server-1.3.3/pylsp/config/__init__.py000066400000000000000000000001511415567622500216760ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. python-lsp-server-1.3.3/pylsp/config/config.py000066400000000000000000000127601415567622500214150ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. # pylint: disable=import-outside-toplevel import logging from functools import lru_cache import pkg_resources import pluggy from pylsp import _utils, hookspecs, uris, PYLSP log = logging.getLogger(__name__) # Sources of config, first source overrides next source DEFAULT_CONFIG_SOURCES = ['pycodestyle'] class Config: def __init__(self, root_uri, init_opts, process_id, capabilities): self._root_path = uris.to_fs_path(root_uri) self._root_uri = root_uri self._init_opts = init_opts self._process_id = process_id self._capabilities = capabilities self._settings = {} self._plugin_settings = {} self._config_sources = {} try: from .flake8_conf import Flake8Config self._config_sources['flake8'] = Flake8Config(self._root_path) except ImportError: pass try: from .pycodestyle_conf import PyCodeStyleConfig self._config_sources['pycodestyle'] = PyCodeStyleConfig(self._root_path) except ImportError: pass self._pm = pluggy.PluginManager(PYLSP) self._pm.trace.root.setwriter(log.debug) self._pm.enable_tracing() self._pm.add_hookspecs(hookspecs) # Pluggy will skip loading a plugin if it throws a DistributionNotFound exception. # However I don't want all plugins to have to catch ImportError and re-throw. So here we'll filter # out any entry points that throw ImportError assuming one or more of their dependencies isn't present. for entry_point in pkg_resources.iter_entry_points(PYLSP): try: entry_point.load() except ImportError as e: log.warning("Failed to load %s entry point '%s': %s", PYLSP, entry_point.name, e) self._pm.set_blocked(entry_point.name) # Load the entry points into pluggy, having blocked any failing ones self._pm.load_setuptools_entrypoints(PYLSP) for name, plugin in self._pm.list_name_plugin(): if plugin is not None: log.info("Loaded pylsp plugin %s from %s", name, plugin) for plugin_conf in self._pm.hook.pylsp_settings(config=self): self._plugin_settings = _utils.merge_dicts(self._plugin_settings, plugin_conf) self._update_disabled_plugins() @property def disabled_plugins(self): return self._disabled_plugins @property def plugin_manager(self): return self._pm @property def init_opts(self): return self._init_opts @property def root_uri(self): return self._root_uri @property def process_id(self): return self._process_id @property def capabilities(self): return self._capabilities @lru_cache(maxsize=32) def settings(self, document_path=None): """Settings are constructed from a few sources: 1. User settings, found in user's home directory 2. Plugin settings, reported by PyLS plugins 3. LSP settings, given to us from didChangeConfiguration 4. Project settings, found in config files in the current project. Since this function is nondeterministic, it is important to call settings.cache_clear() when the config is updated """ settings = {} sources = self._settings.get('configurationSources', DEFAULT_CONFIG_SOURCES) # Plugin configuration settings = _utils.merge_dicts(settings, self._plugin_settings) # LSP configuration settings = _utils.merge_dicts(settings, self._settings) # User configuration for source_name in reversed(sources): source = self._config_sources.get(source_name) if not source: continue source_conf = source.user_config() log.debug("Got user config from %s: %s", source.__class__.__name__, source_conf) settings = _utils.merge_dicts(settings, source_conf) # Project configuration for source_name in reversed(sources): source = self._config_sources.get(source_name) if not source: continue source_conf = source.project_config(document_path or self._root_path) log.debug("Got project config from %s: %s", source.__class__.__name__, source_conf) settings = _utils.merge_dicts(settings, source_conf) log.debug("With configuration: %s", settings) return settings def find_parents(self, path, names): root_path = uris.to_fs_path(self._root_uri) return _utils.find_parents(root_path, path, names) def plugin_settings(self, plugin, document_path=None): return self.settings(document_path=document_path).get('plugins', {}).get(plugin, {}) def update(self, settings): """Recursively merge the given settings into the current settings.""" self.settings.cache_clear() self._settings = settings log.info("Updated settings to %s", self._settings) self._update_disabled_plugins() def _update_disabled_plugins(self): # All plugins default to enabled self._disabled_plugins = [ plugin for name, plugin in self.plugin_manager.list_name_plugin() if not self.settings().get('plugins', {}).get(name, {}).get('enabled', True) ] log.info("Disabled plugins: %s", self._disabled_plugins) python-lsp-server-1.3.3/pylsp/config/flake8_conf.py000066400000000000000000000040301415567622500223160ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import os from pylsp._utils import find_parents from .source import ConfigSource log = logging.getLogger(__name__) CONFIG_KEY = 'flake8' PROJECT_CONFIGS = ['.flake8', 'setup.cfg', 'tox.ini'] OPTIONS = [ # mccabe ('max-complexity', 'plugins.mccabe.threshold', int), # pycodestyle ('exclude', 'plugins.pycodestyle.exclude', list), ('filename', 'plugins.pycodestyle.filename', list), ('hang-closing', 'plugins.pycodestyle.hangClosing', bool), ('ignore', 'plugins.pycodestyle.ignore', list), ('max-line-length', 'plugins.pycodestyle.maxLineLength', int), ('select', 'plugins.pycodestyle.select', list), # flake8 ('exclude', 'plugins.flake8.exclude', list), ('filename', 'plugins.flake8.filename', list), ('hang-closing', 'plugins.flake8.hangClosing', bool), ('ignore', 'plugins.flake8.ignore', list), ('max-line-length', 'plugins.flake8.maxLineLength', int), ('select', 'plugins.flake8.select', list), ('per-file-ignores', 'plugins.flake8.perFileIgnores', list), ] class Flake8Config(ConfigSource): """Parse flake8 configurations.""" def user_config(self): config_file = self._user_config_file() config = self.read_config_from_files([config_file]) return self.parse_config(config, CONFIG_KEY, OPTIONS) def _user_config_file(self): if self.is_windows: return os.path.expanduser('~\\.flake8') return os.path.join(self.xdg_home, 'flake8') def project_config(self, document_path): files = find_parents(self.root_path, document_path, PROJECT_CONFIGS) config = self.read_config_from_files(files) return self.parse_config(config, CONFIG_KEY, OPTIONS) @classmethod def _parse_list_opt(cls, string): if string.startswith("\n"): return [s.strip().rstrip(",") for s in string.split("\n") if s.strip()] return [s.strip() for s in string.split(",") if s.strip()] python-lsp-server-1.3.3/pylsp/config/pycodestyle_conf.py000066400000000000000000000022701415567622500235140ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import pycodestyle from pylsp._utils import find_parents from .source import ConfigSource CONFIG_KEY = 'pycodestyle' USER_CONFIGS = [pycodestyle.USER_CONFIG] if pycodestyle.USER_CONFIG else [] PROJECT_CONFIGS = ['pycodestyle.cfg', 'setup.cfg', 'tox.ini'] OPTIONS = [ ('exclude', 'plugins.pycodestyle.exclude', list), ('filename', 'plugins.pycodestyle.filename', list), ('hang-closing', 'plugins.pycodestyle.hangClosing', bool), ('ignore', 'plugins.pycodestyle.ignore', list), ('max-line-length', 'plugins.pycodestyle.maxLineLength', int), ('select', 'plugins.pycodestyle.select', list), ('aggressive', 'plugins.pycodestyle.aggressive', int), ] class PyCodeStyleConfig(ConfigSource): def user_config(self): config = self.read_config_from_files(USER_CONFIGS) return self.parse_config(config, CONFIG_KEY, OPTIONS) def project_config(self, document_path): files = find_parents(self.root_path, document_path, PROJECT_CONFIGS) config = self.read_config_from_files(files) return self.parse_config(config, CONFIG_KEY, OPTIONS) python-lsp-server-1.3.3/pylsp/config/schema.json000066400000000000000000000265051415567622500217330ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Python Language Server Configuration", "description": "This server can be configured using `workspace/didChangeConfiguration` method. Each configuration option is described below:", "type": "object", "properties": { "pylsp.configurationSources": { "type": "array", "default": ["pycodestyle"], "description": "List of configuration sources to use.", "items": { "type": "string", "enum": ["pycodestyle", "pyflakes"] }, "uniqueItems": true }, "pylsp.plugins.flake8.config": { "type": "string", "default": null, "description": "Path to the config file that will be the authoritative config source." }, "pylsp.plugins.flake8.enabled": { "type": "boolean", "default": false, "description": "Enable or disable the plugin." }, "pylsp.plugins.flake8.exclude": { "type": "array", "default": null, "description": "List of files or directories to exclude." }, "pylsp.plugins.flake8.executable": { "type": "string", "default": "flake8", "description": "Path to the flake8 executable." }, "pylsp.plugins.flake8.filename": { "type": "string", "default": null, "description": "Only check for filenames matching the patterns in this list." }, "pylsp.plugins.flake8.hangClosing": { "type": "boolean", "default": null, "description": "Hang closing bracket instead of matching indentation of opening bracket's line." }, "pylsp.plugins.flake8.ignore": { "type": "array", "default": null, "description": "List of errors and warnings to ignore (or skip)." }, "pylsp.plugins.flake8.maxLineLength": { "type": "integer", "default": null, "description": "Maximum allowed line length for the entirety of this run." }, "pylsp.plugins.flake8.perFileIgnores": { "type": "array", "default": null, "description": "A pairing of filenames and violation codes that defines which violations to ignore in a particular file, for example: `[\"file_path.py:W305,W304\"]`)." }, "pylsp.plugins.flake8.select": { "type": "array", "default": null, "description": "List of errors and warnings to enable." }, "pylsp.plugins.jedi.extra_paths": { "type": "array", "default": [], "description": "Define extra paths for jedi.Script." }, "pylsp.plugins.jedi.env_vars": { "type": "object", "default": null, "description": "Define environment variables for jedi.Script and Jedi.names." }, "pylsp.plugins.jedi.environment": { "type": "string", "default": null, "description": "Define environment for jedi.Script and Jedi.names." }, "pylsp.plugins.jedi_completion.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_completion.include_params": { "type": "boolean", "default": true, "description": "Auto-completes methods and classes with tabstops for each parameter." }, "pylsp.plugins.jedi_completion.include_class_objects": { "type": "boolean", "default": true, "description": "Adds class objects as a separate completion item." }, "pylsp.plugins.jedi_completion.fuzzy": { "type": "boolean", "default": false, "description": "Enable fuzzy when requesting autocomplete." }, "pylsp.plugins.jedi_completion.eager": { "type": "boolean", "default": false, "description": "Resolve documentation and detail eagerly." }, "pylsp.plugins.jedi_completion.resolve_at_most": { "type": "number", "default": 25, "description": "How many labels and snippets (at most) should be resolved?" }, "pylsp.plugins.jedi_completion.cache_for": { "type": "array", "items": { "type": "string" }, "default": ["pandas", "numpy", "tensorflow", "matplotlib"], "description": "Modules for which labels and snippets should be cached." }, "pylsp.plugins.jedi_definition.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_definition.follow_imports": { "type": "boolean", "default": true, "description": "The goto call will follow imports." }, "pylsp.plugins.jedi_definition.follow_builtin_imports": { "type": "boolean", "default": true, "description": "If follow_imports is True will decide if it follow builtin imports." }, "pylsp.plugins.jedi_hover.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_references.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_signature_help.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_symbols.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.jedi_symbols.all_scopes": { "type": "boolean", "default": true, "description": "If True lists the names of all scopes instead of only the module namespace." }, "pylsp.plugins.jedi_symbols.include_import_symbols": { "type": "boolean", "default": true, "description": "If True includes symbols imported from other libraries." }, "pylsp.plugins.mccabe.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.mccabe.threshold": { "type": "number", "default": 15, "description": "The minimum threshold that triggers warnings about cyclomatic complexity." }, "pylsp.plugins.preload.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.preload.modules": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "List of modules to import on startup" }, "pylsp.plugins.pycodestyle.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.pycodestyle.exclude": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Exclude files or directories which match these patterns." }, "pylsp.plugins.pycodestyle.filename": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "When parsing directories, only check filenames matching these patterns." }, "pylsp.plugins.pycodestyle.select": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Select errors and warnings" }, "pylsp.plugins.pycodestyle.ignore": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Ignore errors and warnings" }, "pylsp.plugins.pycodestyle.hangClosing": { "type": "boolean", "default": null, "description": "Hang closing bracket instead of matching indentation of opening bracket's line." }, "pylsp.plugins.pycodestyle.maxLineLength": { "type": "number", "default": null, "description": "Set maximum allowed line length." }, "pylsp.plugins.pydocstyle.enabled": { "type": "boolean", "default": false, "description": "Enable or disable the plugin." }, "pylsp.plugins.pydocstyle.convention": { "type": "string", "default": null, "enum": [ "pep257", "numpy" ], "description": "Choose the basic list of checked errors by specifying an existing convention." }, "pylsp.plugins.pydocstyle.addIgnore": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Ignore errors and warnings in addition to the specified convention." }, "pylsp.plugins.pydocstyle.addSelect": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Select errors and warnings in addition to the specified convention." }, "pylsp.plugins.pydocstyle.ignore": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Ignore errors and warnings" }, "pylsp.plugins.pydocstyle.select": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "Select errors and warnings" }, "pylsp.plugins.pydocstyle.match": { "type": "string", "default": "(?!test_).*\\.py", "description": "Check only files that exactly match the given regular expression; default is to match files that don't start with 'test_' but end with '.py'." }, "pylsp.plugins.pydocstyle.matchDir": { "type": "string", "default": "[^\\.].*", "description": "Search only dirs that exactly match the given regular expression; default is to match dirs which do not begin with a dot." }, "pylsp.plugins.pyflakes.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.pylint.enabled": { "type": "boolean", "default": false, "description": "Enable or disable the plugin." }, "pylsp.plugins.pylint.args": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": false, "description": "Arguments to pass to pylint." }, "pylsp.plugins.pylint.executable": { "type": "string", "default": null, "description": "Executable to run pylint with. Enabling this will run pylint on unsaved files via stdin. Can slow down workflow. Only works with python3." }, "pylsp.plugins.rope_completion.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.rope_completion.eager": { "type": "boolean", "default": false, "description": "Resolve documentation and detail eagerly." }, "pylsp.plugins.yapf.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.rope.extensionModules": { "type": "string", "default": null, "description": "Builtin and c-extension modules that are allowed to be imported and inspected by rope." }, "pylsp.rope.ropeFolder": { "type": "array", "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "The name of the folder in which rope stores project configurations and data. Pass `null` for not using such a folder at all." } } } python-lsp-server-1.3.3/pylsp/config/source.py000066400000000000000000000052511415567622500214450ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import configparser import logging import os import sys log = logging.getLogger(__name__) class ConfigSource: """Base class for implementing a config source.""" def __init__(self, root_path): self.root_path = root_path self.is_windows = sys.platform == 'win32' self.xdg_home = os.environ.get( 'XDG_CONFIG_HOME', os.path.expanduser('~/.config') ) def user_config(self): """Return user-level (i.e. home directory) configuration.""" raise NotImplementedError() def project_config(self, document_path): """Return project-level (i.e. workspace directory) configuration.""" raise NotImplementedError() @classmethod def read_config_from_files(cls, files): config = configparser.RawConfigParser() for filename in files: if os.path.exists(filename) and not os.path.isdir(filename): config.read(filename) return config @classmethod def parse_config(cls, config, key, options): """Parse the config with the given options.""" conf = {} for source, destination, opt_type in options: opt_value = cls._get_opt(config, key, source, opt_type) if opt_value is not None: cls._set_opt(conf, destination, opt_value) return conf @classmethod def _get_opt(cls, config, key, option, opt_type): """Get an option from a configparser with the given type.""" for opt_key in [option, option.replace('-', '_')]: if not config.has_option(key, opt_key): continue if opt_type == bool: return config.getboolean(key, opt_key) if opt_type == int: return config.getint(key, opt_key) if opt_type == str: return config.get(key, opt_key) if opt_type == list: return cls._parse_list_opt(config.get(key, opt_key)) raise ValueError("Unknown option type: %s" % opt_type) @classmethod def _parse_list_opt(cls, string): return [s.strip() for s in string.split(",") if s.strip()] @classmethod def _set_opt(cls, config_dict, path, value): """Set the value in the dictionary at the given path if the value is not None.""" if value is None: return if '.' not in path: config_dict[path] = value return key, rest = path.split(".", 1) if key not in config_dict: config_dict[key] = {} cls._set_opt(config_dict[key], rest, value) python-lsp-server-1.3.3/pylsp/hookspecs.py000066400000000000000000000043621415567622500207000ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. # pylint: disable=redefined-builtin, unused-argument from pylsp import hookspec @hookspec def pylsp_code_actions(config, workspace, document, range, context): pass @hookspec def pylsp_code_lens(config, workspace, document): pass @hookspec def pylsp_commands(config, workspace): """The list of command strings supported by the server. Returns: List[str]: The supported commands. """ @hookspec def pylsp_completions(config, workspace, document, position): pass @hookspec(firstresult=True) def pylsp_completion_item_resolve(config, workspace, document, completion_item): pass @hookspec def pylsp_definitions(config, workspace, document, position): pass @hookspec def pylsp_dispatchers(config, workspace): pass @hookspec def pylsp_document_did_open(config, workspace, document): pass @hookspec def pylsp_document_did_save(config, workspace, document): pass @hookspec def pylsp_document_highlight(config, workspace, document, position): pass @hookspec def pylsp_document_symbols(config, workspace, document): pass @hookspec(firstresult=True) def pylsp_execute_command(config, workspace, command, arguments): pass @hookspec def pylsp_experimental_capabilities(config, workspace): pass @hookspec def pylsp_folding_range(config, workspace, document): pass @hookspec(firstresult=True) def pylsp_format_document(config, workspace, document): pass @hookspec(firstresult=True) def pylsp_format_range(config, workspace, document, range): pass @hookspec(firstresult=True) def pylsp_hover(config, workspace, document, position): pass @hookspec def pylsp_initialize(config, workspace): pass @hookspec def pylsp_initialized(): pass @hookspec def pylsp_lint(config, workspace, document, is_saved): pass @hookspec def pylsp_references(config, workspace, document, position, exclude_declaration): pass @hookspec(firstresult=True) def pylsp_rename(config, workspace, document, position, new_name): pass @hookspec def pylsp_settings(config): pass @hookspec(firstresult=True) def pylsp_signature_help(config, workspace, document, position): pass python-lsp-server-1.3.3/pylsp/lsp.py000066400000000000000000000025171415567622500175000ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """Some Language Server Protocol constants https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md """ class CompletionItemKind: Text = 1 Method = 2 Function = 3 Constructor = 4 Field = 5 Variable = 6 Class = 7 Interface = 8 Module = 9 Property = 10 Unit = 11 Value = 12 Enum = 13 Keyword = 14 Snippet = 15 Color = 16 File = 17 Reference = 18 Folder = 19 EnumMember = 20 Constant = 21 Struct = 22 Event = 23 Operator = 24 TypeParameter = 25 class DocumentHighlightKind: Text = 1 Read = 2 Write = 3 class DiagnosticSeverity: Error = 1 Warning = 2 Information = 3 Hint = 4 class InsertTextFormat: PlainText = 1 Snippet = 2 class MessageType: Error = 1 Warning = 2 Info = 3 Log = 4 class SymbolKind: File = 1 Module = 2 Namespace = 3 Package = 4 Class = 5 Method = 6 Property = 7 Field = 8 Constructor = 9 Enum = 10 Interface = 11 Function = 12 Variable = 13 Constant = 14 String = 15 Number = 16 Boolean = 17 Array = 18 class TextDocumentSyncKind: NONE = 0 FULL = 1 INCREMENTAL = 2 python-lsp-server-1.3.3/pylsp/plugins/000077500000000000000000000000001415567622500200045ustar00rootroot00000000000000python-lsp-server-1.3.3/pylsp/plugins/__init__.py000066400000000000000000000001511415567622500221120ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. python-lsp-server-1.3.3/pylsp/plugins/_resolvers.py000066400000000000000000000104771415567622500225520ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from collections import defaultdict import logging from time import time from jedi.api.classes import Completion from pylsp import lsp log = logging.getLogger(__name__) # ---- Base class # ----------------------------------------------------------------------------- class Resolver: def __init__(self, callback, resolve_on_error, time_to_live=60 * 30): self.callback = callback self.resolve_on_error = resolve_on_error self._cache = {} self._time_to_live = time_to_live self._cache_ttl = defaultdict(set) self._clear_every = 2 # see https://github.com/davidhalter/jedi/blob/master/jedi/inference/helpers.py#L194-L202 self._cached_modules = {'pandas', 'numpy', 'tensorflow', 'matplotlib'} @property def cached_modules(self): return self._cached_modules @cached_modules.setter def cached_modules(self, new_value): self._cached_modules = set(new_value) def clear_outdated(self): now = self.time_key() to_clear = [ timestamp for timestamp in self._cache_ttl if timestamp < now ] for time_key in to_clear: for key in self._cache_ttl[time_key]: del self._cache[key] del self._cache_ttl[time_key] def time_key(self): return int(time() / self._time_to_live) def get_or_create(self, completion: Completion): if not completion.full_name: use_cache = False else: module_parts = completion.full_name.split('.') use_cache = module_parts and module_parts[0] in self._cached_modules if use_cache: key = self._create_completion_id(completion) if key not in self._cache: if self.time_key() % self._clear_every == 0: self.clear_outdated() self._cache[key] = self.resolve(completion) self._cache_ttl[self.time_key()].add(key) return self._cache[key] return self.resolve(completion) def _create_completion_id(self, completion: Completion): return ( completion.full_name, completion.module_path, completion.line, completion.column, self.time_key() ) def resolve(self, completion): try: sig = completion.get_signatures() return self.callback(completion, sig) except Exception as e: # pylint: disable=broad-except log.warning(f'Something went wrong when resolving label for {completion}: {e}') return self.resolve_on_error # ---- Label resolver # ----------------------------------------------------------------------------- def format_label(completion, sig): if sig and completion.type in ('function', 'method'): params = ', '.join(param.name for param in sig[0].params) label = '{}({})'.format(completion.name, params) return label return completion.name LABEL_RESOLVER = Resolver(callback=format_label, resolve_on_error='') # ---- Snippets resolver # ----------------------------------------------------------------------------- def format_snippet(completion, sig): if not sig: return {} snippet_completion = {} positional_args = [param for param in sig[0].params if '=' not in param.description and param.name not in {'/', '*'}] if len(positional_args) > 1: # For completions with params, we can generate a snippet instead snippet_completion['insertTextFormat'] = lsp.InsertTextFormat.Snippet snippet = completion.name + '(' for i, param in enumerate(positional_args): snippet += '${%s:%s}' % (i + 1, param.name) if i < len(positional_args) - 1: snippet += ', ' snippet += ')$0' snippet_completion['insertText'] = snippet elif len(positional_args) == 1: snippet_completion['insertTextFormat'] = lsp.InsertTextFormat.Snippet snippet_completion['insertText'] = completion.name + '($0)' else: snippet_completion['insertText'] = completion.name + '()' return snippet_completion SNIPPET_RESOLVER = Resolver(callback=format_snippet, resolve_on_error={}) python-lsp-server-1.3.3/pylsp/plugins/autopep8_format.py000066400000000000000000000051551415567622500235010ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import pycodestyle from autopep8 import fix_code, continued_indentation as autopep8_c_i from pylsp import hookimpl log = logging.getLogger(__name__) @hookimpl(tryfirst=True) # Prefer autopep8 over YAPF def pylsp_format_document(config, document): log.info("Formatting document %s with autopep8", document) return _format(config, document) @hookimpl(tryfirst=True) # Prefer autopep8 over YAPF def pylsp_format_range(config, document, range): # pylint: disable=redefined-builtin log.info("Formatting document %s in range %s with autopep8", document, range) # First we 'round' the range up/down to full lines only range['start']['character'] = 0 range['end']['line'] += 1 range['end']['character'] = 0 # Add 1 for 1-indexing vs LSP's 0-indexing line_range = (range['start']['line'] + 1, range['end']['line'] + 1) return _format(config, document, line_range=line_range) def _format(config, document, line_range=None): options = _autopep8_config(config, document) if line_range: options['line_range'] = list(line_range) # Temporarily re-monkey-patch the continued_indentation checker - #771 del pycodestyle._checks['logical_line'][pycodestyle.continued_indentation] pycodestyle.register_check(autopep8_c_i) new_source = fix_code(document.source, options=options) # Switch it back del pycodestyle._checks['logical_line'][autopep8_c_i] pycodestyle.register_check(pycodestyle.continued_indentation) if new_source == document.source: return [] # I'm too lazy at the moment to parse diffs into TextEdit items # So let's just return the entire file... return [{ 'range': { 'start': {'line': 0, 'character': 0}, # End char 0 of the line after our document 'end': {'line': len(document.lines), 'character': 0} }, 'newText': new_source }] def _autopep8_config(config, document=None): # We user pycodestyle settings to avoid redefining things path = document.path if document is not None else None settings = config.plugin_settings('pycodestyle', document_path=path) options = { 'exclude': settings.get('exclude'), 'hang_closing': settings.get('hangClosing'), 'ignore': settings.get('ignore'), 'max_line_length': settings.get('maxLineLength'), 'select': settings.get('select'), 'aggressive': settings.get('aggressive'), } # Filter out null options return {k: v for k, v in options.items() if v} python-lsp-server-1.3.3/pylsp/plugins/definition.py000066400000000000000000000023331415567622500225070ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl, uris, _utils log = logging.getLogger(__name__) @hookimpl def pylsp_definitions(config, document, position): settings = config.plugin_settings('jedi_definition') code_position = _utils.position_to_jedi_linecolumn(document, position) definitions = document.jedi_script(use_document_path=True).goto( follow_imports=settings.get('follow_imports', True), follow_builtin_imports=settings.get('follow_builtin_imports', True), **code_position) return [ { 'uri': uris.uri_with(document.uri, path=str(d.module_path)), 'range': { 'start': {'line': d.line - 1, 'character': d.column}, 'end': {'line': d.line - 1, 'character': d.column + len(d.name)}, } } for d in definitions if d.is_definition() and _not_internal_definition(d) ] def _not_internal_definition(definition): return ( definition.line is not None and definition.column is not None and definition.module_path is not None and not definition.in_builtin_module() ) python-lsp-server-1.3.3/pylsp/plugins/flake8_lint.py000066400000000000000000000134261415567622500225640ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """Linter pluging for flake8""" import logging import os.path import re import sys from pathlib import PurePath from subprocess import PIPE, Popen from pylsp import hookimpl, lsp log = logging.getLogger(__name__) FIX_IGNORES_RE = re.compile(r'([^a-zA-Z0-9_,]*;.*(\W+||$))') @hookimpl def pylsp_settings(): # Default flake8 to disabled return {'plugins': {'flake8': {'enabled': False}}} @hookimpl def pylsp_lint(workspace, document): config = workspace._config settings = config.plugin_settings('flake8', document_path=document.path) log.debug("Got flake8 settings: %s", settings) ignores = settings.get("ignore", []) per_file_ignores = settings.get("perFileIgnores") if per_file_ignores: for path in per_file_ignores: file_pat, errors = path.split(":") if PurePath(document.path).match(file_pat): ignores.extend(errors.split(",")) opts = { 'config': settings.get('config'), 'exclude': settings.get('exclude'), 'filename': settings.get('filename'), 'hang-closing': settings.get('hangClosing'), 'ignore': ignores or None, 'max-line-length': settings.get('maxLineLength'), 'select': settings.get('select'), } # flake takes only absolute path to the config. So we should check and # convert if necessary if opts.get('config') and not os.path.isabs(opts.get('config')): opts['config'] = os.path.abspath(os.path.expanduser(os.path.expandvars( opts.get('config') ))) log.debug("using flake8 with config: %s", opts['config']) # Call the flake8 utility then parse diagnostics from stdout flake8_executable = settings.get('executable', 'flake8') args = build_args(opts) output = run_flake8(flake8_executable, args, document) return parse_stdout(document, output) def run_flake8(flake8_executable, args, document): """Run flake8 with the provided arguments, logs errors from stderr if any. """ # a quick temporary fix to deal with Atom args = [(i if not i.startswith('--ignore=') else FIX_IGNORES_RE.sub('', i)) for i in args if i is not None] # if executable looks like a path resolve it if not os.path.isfile(flake8_executable) and os.sep in flake8_executable: flake8_executable = os.path.abspath( os.path.expanduser(os.path.expandvars(flake8_executable)) ) log.debug("Calling %s with args: '%s'", flake8_executable, args) try: cmd = [flake8_executable] cmd.extend(args) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) except IOError: log.debug("Can't execute %s. Trying with '%s -m flake8'", flake8_executable, sys.executable) cmd = [sys.executable, '-m', 'flake8'] cmd.extend(args) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) # pylint: disable=consider-using-with (stdout, stderr) = p.communicate(document.source.encode()) if stderr: log.error("Error while running flake8 '%s'", stderr.decode()) return stdout.decode() def build_args(options): """Build arguments for calling flake8. Args: options: dictionary of argument names and their values. """ args = ['-'] # use stdin for arg_name, arg_val in options.items(): if arg_val is None: continue arg = None if isinstance(arg_val, list): arg = '--{}={}'.format(arg_name, ','.join(arg_val)) elif isinstance(arg_val, bool): if arg_val: arg = '--{}'.format(arg_name) else: arg = '--{}={}'.format(arg_name, arg_val) args.append(arg) return args def parse_stdout(document, stdout): """ Build a diagnostics from flake8's output, it should extract every result and format it into a dict that looks like this: { 'source': 'flake8', 'code': code, # 'E501' 'range': { 'start': { 'line': start_line, 'character': start_column, }, 'end': { 'line': end_line, 'character': end_column, }, }, 'message': msg, 'severity': lsp.DiagnosticSeverity.*, } Args: document: The document to be linted. stdout: output from flake8 Returns: A list of dictionaries. """ diagnostics = [] lines = stdout.splitlines() for raw_line in lines: parsed_line = re.match(r'(.*):(\d*):(\d*): (\w*) (.*)', raw_line) if not parsed_line: log.debug("Flake8 output parser can't parse line '%s'", raw_line) continue parsed_line = parsed_line.groups() if len(parsed_line) != 5: log.debug("Flake8 output parser can't parse line '%s'", raw_line) continue _, line, character, code, msg = parsed_line line = int(line) - 1 character = int(character) - 1 # show also the code in message msg = code + ' ' + msg diagnostics.append( { 'source': 'flake8', 'code': code, 'range': { 'start': { 'line': line, 'character': character }, 'end': { 'line': line, # no way to determine the column 'character': len(document.lines[line]) } }, 'message': msg, 'severity': lsp.DiagnosticSeverity.Warning, } ) return diagnostics python-lsp-server-1.3.3/pylsp/plugins/folding.py000066400000000000000000000156121415567622500220050ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import re import parso import parso.python.tree as tree_nodes from pylsp import hookimpl SKIP_NODES = (tree_nodes.Module, tree_nodes.IfStmt, tree_nodes.TryStmt) IDENTATION_REGEX = re.compile(r'(\s+).+') @hookimpl def pylsp_folding_range(document): program = document.source + '\n' lines = program.splitlines() tree = parso.parse(program) ranges = __compute_folding_ranges(tree, lines) results = [] for (start_line, end_line) in ranges: start_line -= 1 end_line -= 1 # If start/end character is not defined, then it defaults to the # corresponding line last character results.append({ 'startLine': start_line, 'endLine': end_line, }) return results def __merge_folding_ranges(left, right): for start in list(left.keys()): right_start = right.pop(start, None) if right_start is not None: left[start] = max(right_start, start) left.update(right) return left def __empty_identation_stack(identation_stack, level_limits, current_line, folding_ranges): while identation_stack != []: upper_level = identation_stack.pop(0) level_start = level_limits.pop(upper_level) folding_ranges.append((level_start, current_line)) return folding_ranges def __match_identation_stack(identation_stack, level, level_limits, folding_ranges, current_line): upper_level = identation_stack.pop(0) while upper_level >= level: level_start = level_limits.pop(upper_level) folding_ranges.append((level_start, current_line)) upper_level = identation_stack.pop(0) identation_stack.insert(0, upper_level) return identation_stack, folding_ranges def __compute_folding_ranges_identation(text): lines = text.splitlines() folding_ranges = [] identation_stack = [] level_limits = {} current_level = 0 current_line = 0 while lines[current_line] == '': current_line += 1 for i, line in enumerate(lines): if i < current_line: continue i += 1 identation_match = IDENTATION_REGEX.match(line) if identation_match is not None: whitespace = identation_match.group(1) level = len(whitespace) if level > current_level: level_limits[current_level] = current_line identation_stack.insert(0, current_level) current_level = level elif level < current_level: identation_stack, folding_ranges = __match_identation_stack( identation_stack, level, level_limits, folding_ranges, current_line) current_level = level else: folding_ranges = __empty_identation_stack( identation_stack, level_limits, current_line, folding_ranges) current_level = 0 if line.strip() != '': current_line = i folding_ranges = __empty_identation_stack( identation_stack, level_limits, current_line, folding_ranges) return dict(folding_ranges) def __check_if_node_is_valid(node): valid = True if isinstance(node, tree_nodes.PythonNode): kind = node.type valid = kind not in {'decorated', 'parameters', 'dictorsetmaker', 'testlist_comp'} if kind == 'suite': if isinstance(node.parent, tree_nodes.Function): valid = False return valid def __handle_skip(stack, skip): body = stack[skip] children = [body] if hasattr(body, 'children'): children = body.children stack = stack[:skip] + children + stack[skip + 1:] node = body end_line, _ = body.end_pos return node, end_line def __handle_flow_nodes(node, end_line, stack): from_keyword = False if isinstance(node, tree_nodes.Keyword): from_keyword = True if node.value in {'if', 'elif', 'with', 'while'}: node, end_line = __handle_skip(stack, 2) elif node.value in {'except'}: first_node = stack[0] if isinstance(first_node, tree_nodes.Operator): node, end_line = __handle_skip(stack, 1) else: node, end_line = __handle_skip(stack, 2) elif node.value in {'for'}: node, end_line = __handle_skip(stack, 4) elif node.value in {'else'}: node, end_line = __handle_skip(stack, 1) return end_line, from_keyword, node, stack def __compute_start_end_lines(node, stack): start_line, _ = node.start_pos end_line, _ = node.end_pos modified = False end_line, from_keyword, node, stack = __handle_flow_nodes( node, end_line, stack) last_leaf = node.get_last_leaf() last_newline = isinstance(last_leaf, tree_nodes.Newline) last_operator = isinstance(last_leaf, tree_nodes.Operator) node_is_operator = isinstance(node, tree_nodes.Operator) last_operator = last_operator or not node_is_operator end_line -= 1 if isinstance(node.parent, tree_nodes.PythonNode) and not from_keyword: kind = node.type if kind in {'suite', 'atom', 'atom_expr', 'arglist'}: if len(stack) > 0: next_node = stack[0] next_line, _ = next_node.start_pos if next_line > end_line: end_line += 1 modified = True if not last_newline and not modified and not last_operator: end_line += 1 return start_line, end_line, stack def __compute_folding_ranges(tree, lines): folding_ranges = {} stack = [tree] while len(stack) > 0: node = stack.pop(0) if isinstance(node, tree_nodes.Newline): # Skip newline nodes continue if isinstance(node, tree_nodes.PythonErrorNode): # Fallback to indentation-based (best-effort) folding start_line, _ = node.start_pos start_line -= 1 padding = [''] * start_line text = '\n'.join(padding + lines[start_line:]) + '\n' identation_ranges = __compute_folding_ranges_identation(text) folding_ranges = __merge_folding_ranges( folding_ranges, identation_ranges) break if not isinstance(node, SKIP_NODES): valid = __check_if_node_is_valid(node) if valid: start_line, end_line, stack = __compute_start_end_lines( node, stack) if end_line > start_line: current_end = folding_ranges.get(start_line, -1) folding_ranges[start_line] = max(current_end, end_line) if hasattr(node, 'children'): stack = node.children + stack folding_ranges = sorted(folding_ranges.items()) return folding_ranges python-lsp-server-1.3.3/pylsp/plugins/highlight.py000066400000000000000000000017431415567622500223320ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl, lsp, _utils log = logging.getLogger(__name__) @hookimpl def pylsp_document_highlight(document, position): code_position = _utils.position_to_jedi_linecolumn(document, position) usages = document.jedi_script().get_references(**code_position) def is_valid(definition): return definition.line is not None and definition.column is not None def local_to_document(definition): return not definition.module_path or str(definition.module_path) == document.path return [{ 'range': { 'start': {'line': d.line - 1, 'character': d.column}, 'end': {'line': d.line - 1, 'character': d.column + len(d.name)} }, 'kind': lsp.DocumentHighlightKind.Write if d.is_definition() else lsp.DocumentHighlightKind.Read } for d in usages if is_valid(d) and local_to_document(d)] python-lsp-server-1.3.3/pylsp/plugins/hover.py000066400000000000000000000026401415567622500215030ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl, _utils log = logging.getLogger(__name__) @hookimpl def pylsp_hover(document, position): code_position = _utils.position_to_jedi_linecolumn(document, position) definitions = document.jedi_script(use_document_path=True).infer(**code_position) word = document.word_at_position(position) # Find first exact matching definition definition = next((x for x in definitions if x.name == word), None) # Ensure a definition is used if only one is available # even if the word doesn't match. An example of this case is 'np' # where 'numpy' doesn't match with 'np'. Same for NumPy ufuncs if len(definitions) == 1: definition = definitions[0] if not definition: return {'contents': ''} # raw docstring returns only doc, without signature doc = _utils.format_docstring(definition.docstring(raw=True)) # Find first exact matching signature signature = next((x.to_string() for x in definition.get_signatures() if x.name == word), '') contents = [] if signature: contents.append({ 'language': 'python', 'value': signature, }) if doc: contents.append(doc) if not contents: return {'contents': ''} return {'contents': contents} python-lsp-server-1.3.3/pylsp/plugins/jedi_completion.py000066400000000000000000000174421415567622500235320ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import os.path as osp import parso from pylsp import _utils, hookimpl, lsp from pylsp.plugins._resolvers import LABEL_RESOLVER, SNIPPET_RESOLVER log = logging.getLogger(__name__) # Map to the LSP type # > Valid values for type are ``module``, `` class ``, ``instance``, ``function``, # > ``param``, ``path``, ``keyword``, ``property`` and ``statement``. # see: https://jedi.readthedocs.io/en/latest/docs/api-classes.html#jedi.api.classes.BaseName.type _TYPE_MAP = { 'module': lsp.CompletionItemKind.Module, 'namespace': lsp.CompletionItemKind.Module, # to be added in Jedi 0.18+ 'class': lsp.CompletionItemKind.Class, 'instance': lsp.CompletionItemKind.Reference, 'function': lsp.CompletionItemKind.Function, 'param': lsp.CompletionItemKind.Variable, 'path': lsp.CompletionItemKind.File, 'keyword': lsp.CompletionItemKind.Keyword, 'property': lsp.CompletionItemKind.Property, # added in Jedi 0.18 'statement': lsp.CompletionItemKind.Variable } # Types of parso nodes for which snippet is not included in the completion _IMPORTS = ('import_name', 'import_from') # Types of parso node for errors _ERRORS = ('error_node', ) @hookimpl def pylsp_completions(config, document, position): """Get formatted completions for current code position""" # pylint: disable=too-many-locals settings = config.plugin_settings('jedi_completion', document_path=document.path) resolve_eagerly = settings.get('eager', False) code_position = _utils.position_to_jedi_linecolumn(document, position) code_position['fuzzy'] = settings.get('fuzzy', False) completions = document.jedi_script(use_document_path=True).complete(**code_position) if not completions: return None completion_capabilities = config.capabilities.get('textDocument', {}).get('completion', {}) snippet_support = completion_capabilities.get('completionItem', {}).get('snippetSupport') should_include_params = settings.get('include_params') should_include_class_objects = settings.get('include_class_objects', True) max_to_resolve = settings.get('resolve_at_most', 25) modules_to_cache_for = settings.get('cache_for', None) if modules_to_cache_for is not None: LABEL_RESOLVER.cached_modules = modules_to_cache_for SNIPPET_RESOLVER.cached_modules = modules_to_cache_for include_params = snippet_support and should_include_params and use_snippets(document, position) include_class_objects = snippet_support and should_include_class_objects and use_snippets(document, position) ready_completions = [ _format_completion( c, include_params, resolve=resolve_eagerly, resolve_label_or_snippet=(i < max_to_resolve) ) for i, c in enumerate(completions) ] # TODO split up once other improvements are merged if include_class_objects: for i, c in enumerate(completions): if c.type == 'class': completion_dict = _format_completion( c, False, resolve=resolve_eagerly, resolve_label_or_snippet=(i < max_to_resolve) ) completion_dict['kind'] = lsp.CompletionItemKind.TypeParameter completion_dict['label'] += ' object' ready_completions.append(completion_dict) for completion_dict in ready_completions: completion_dict['data'] = { 'doc_uri': document.uri } # most recently retrieved completion items, used for resolution document.shared_data['LAST_JEDI_COMPLETIONS'] = { # label is the only required property; here it is assumed to be unique completion['label']: (completion, data) for completion, data in zip(ready_completions, completions) } return ready_completions or None @hookimpl def pylsp_completion_item_resolve(completion_item, document): """Resolve formatted completion for given non-resolved completion""" shared_data = document.shared_data['LAST_JEDI_COMPLETIONS'].get(completion_item['label']) if shared_data: completion, data = shared_data return _resolve_completion(completion, data) return completion_item def is_exception_class(name): """ Determine if a class name is an instance of an Exception. This returns `False` if the name given corresponds with a instance of the 'Exception' class, `True` otherwise """ try: return name in [cls.__name__ for cls in Exception.__subclasses__()] except AttributeError: # Needed in case a class don't uses new-style # class definition in Python 2 return False def use_snippets(document, position): """ Determine if it's necessary to return snippets in code completions. This returns `False` if a completion is being requested on an import statement, `True` otherwise. """ line = position['line'] lines = document.source.split('\n', line) act_lines = [lines[line][:position['character']]] line -= 1 last_character = '' while line > -1: act_line = lines[line] if (act_line.rstrip().endswith('\\') or act_line.rstrip().endswith('(') or act_line.rstrip().endswith(',')): act_lines.insert(0, act_line) line -= 1 if act_line.rstrip().endswith('('): # Needs to be added to the end of the code before parsing # to make it valid, otherwise the node type could end # being an 'error_node' for multi-line imports that use '(' last_character = ')' else: break if '(' in act_lines[-1].strip(): last_character = ')' code = '\n'.join(act_lines).rsplit(';', maxsplit=1)[-1].strip() + last_character tokens = parso.parse(code) expr_type = tokens.children[0].type return (expr_type not in _IMPORTS and not (expr_type in _ERRORS and 'import' in code)) def _resolve_completion(completion, d): # pylint: disable=broad-except completion['detail'] = _detail(d) try: docs = _utils.format_docstring(d.docstring()) except Exception: docs = '' completion['documentation'] = docs return completion def _format_completion(d, include_params=True, resolve=False, resolve_label_or_snippet=False): completion = { 'label': _label(d, resolve_label_or_snippet), 'kind': _TYPE_MAP.get(d.type), 'sortText': _sort_text(d), 'insertText': d.name } if resolve: completion = _resolve_completion(completion, d) if d.type == 'path': path = osp.normpath(d.name) path = path.replace('\\', '\\\\') path = path.replace('/', '\\/') completion['insertText'] = path if include_params and not is_exception_class(d.name): snippet = _snippet(d, resolve_label_or_snippet) completion.update(snippet) return completion def _label(definition, resolve=False): if not resolve: return definition.name sig = LABEL_RESOLVER.get_or_create(definition) if sig: return sig return definition.name def _snippet(definition, resolve=False): if not resolve: return {} snippet = SNIPPET_RESOLVER.get_or_create(definition) return snippet def _detail(definition): try: return definition.parent().full_name or '' except AttributeError: return definition.full_name or '' def _sort_text(definition): """ Ensure builtins appear at the bottom. Description is of format : . """ # If its 'hidden', put it next last prefix = 'z{}' if definition.name.startswith('_') else 'a{}' return prefix.format(definition.name) python-lsp-server-1.3.3/pylsp/plugins/jedi_rename.py000066400000000000000000000034001415567622500226150ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl, uris, _utils log = logging.getLogger(__name__) @hookimpl def pylsp_rename(config, workspace, document, position, new_name): # pylint: disable=unused-argument log.debug('Executing rename of %s to %s', document.word_at_position(position), new_name) kwargs = _utils.position_to_jedi_linecolumn(document, position) kwargs['new_name'] = new_name try: refactoring = document.jedi_script().rename(**kwargs) except NotImplementedError as exc: raise Exception('No support for renaming in Python 2/3.5 with Jedi. ' 'Consider using the rope_rename plugin instead') from exc log.debug('Finished rename: %s', refactoring.get_diff()) changes = [] for file_path, changed_file in refactoring.get_changed_files().items(): uri = uris.from_fs_path(str(file_path)) doc = workspace.get_maybe_document(uri) changes.append({ 'textDocument': { 'uri': uri, 'version': doc.version if doc else None }, 'edits': [ { 'range': { 'start': {'line': 0, 'character': 0}, 'end': { 'line': _num_lines(changed_file.get_new_code()), 'character': 0, }, }, 'newText': changed_file.get_new_code(), } ], }) return {'documentChanges': changes} def _num_lines(file_contents): 'Count the number of lines in the given string.' return len(file_contents.splitlines()) python-lsp-server-1.3.3/pylsp/plugins/mccabe_lint.py000066400000000000000000000025261415567622500226230ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import ast import logging import mccabe from pylsp import hookimpl, lsp log = logging.getLogger(__name__) THRESHOLD = 'threshold' DEFAULT_THRESHOLD = 15 @hookimpl def pylsp_lint(config, document): threshold = config.plugin_settings('mccabe', document_path=document.path).get(THRESHOLD, DEFAULT_THRESHOLD) log.debug("Running mccabe lint with threshold: %s", threshold) try: tree = compile(document.source, document.path, "exec", ast.PyCF_ONLY_AST) except SyntaxError: # We'll let the other linters point this one out return None visitor = mccabe.PathGraphingAstVisitor() visitor.preorder(tree, visitor) diags = [] for graph in visitor.graphs.values(): if graph.complexity() >= threshold: diags.append({ 'source': 'mccabe', 'range': { 'start': {'line': graph.lineno - 1, 'character': graph.column}, 'end': {'line': graph.lineno - 1, 'character': len(document.lines[graph.lineno])}, }, 'message': 'Cyclomatic complexity too high: %s (threshold %s)' % (graph.complexity(), threshold), 'severity': lsp.DiagnosticSeverity.Warning }) return diags python-lsp-server-1.3.3/pylsp/plugins/preload_imports.py000066400000000000000000000025631415567622500235670ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl log = logging.getLogger(__name__) MODULES = [ "OpenGL", "PIL", "array", "audioop", "binascii", "cPickle", "cStringIO", "cmath", "collections", "datetime", "errno", "exceptions", "gc", "imageop", "imp", "itertools", "marshal", "math", "matplotlib", "mmap", "mpmath", "msvcrt", "networkx", "nose", "nt", "numpy", "operator", "os", "os.path", "pandas", "parser", "rgbimg", "scipy", "signal", "skimage", "sklearn", "statsmodels", "strop", "sympy", "sys", "thread", "time", "wx", "xxsubtype", "zipimport", "zlib" ] @hookimpl def pylsp_settings(): # Setup default modules to preload, and rope extension modules return { 'plugins': {'preload': {'modules': MODULES}}, 'rope': {'extensionModules': MODULES} } @hookimpl def pylsp_initialize(config): for mod_name in config.plugin_settings('preload').get('modules', []): try: __import__(mod_name) log.debug("Preloaded module %s", mod_name) except Exception: # pylint: disable=broad-except # Catch any exception since not only ImportError can be raised here # For example, old versions of NumPy can cause a ValueError. # See spyder-ide/spyder#13985 pass python-lsp-server-1.3.3/pylsp/plugins/pycodestyle_lint.py000066400000000000000000000061611415567622500237540ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import pycodestyle from pylsp import hookimpl, lsp try: from autopep8 import continued_indentation as autopep8_c_i except ImportError: pass else: # Check if autopep8's continued_indentation implementation # is overriding pycodestyle's and if so, re-register # the check using pycodestyle's implementation as expected if autopep8_c_i in pycodestyle._checks['logical_line']: del pycodestyle._checks['logical_line'][autopep8_c_i] pycodestyle.register_check(pycodestyle.continued_indentation) log = logging.getLogger(__name__) @hookimpl def pylsp_lint(workspace, document): config = workspace._config settings = config.plugin_settings('pycodestyle', document_path=document.path) log.debug("Got pycodestyle settings: %s", settings) opts = { 'exclude': settings.get('exclude'), 'filename': settings.get('filename'), 'hang_closing': settings.get('hangClosing'), 'ignore': settings.get('ignore'), 'max_line_length': settings.get('maxLineLength'), 'select': settings.get('select'), } kwargs = {k: v for k, v in opts.items() if v} styleguide = pycodestyle.StyleGuide(kwargs) c = pycodestyle.Checker( filename=document.uri, lines=document.lines, options=styleguide.options, report=PyCodeStyleDiagnosticReport(styleguide.options) ) c.check_all() diagnostics = c.report.diagnostics return diagnostics class PyCodeStyleDiagnosticReport(pycodestyle.BaseReport): def __init__(self, options): self.diagnostics = [] super().__init__(options=options) def error(self, line_number, offset, text, check): code = text[:4] if self._ignore_code(code): return # Don't care about expected errors or warnings if code in self.expected: return # PyCodeStyle will sometimes give you an error the line after the end of the file # e.g. no newline at end of file # In that case, the end offset should just be some number ~100 # (because why not? There's nothing to underline anyways) err_range = { 'start': {'line': line_number - 1, 'character': offset}, 'end': { # FIXME: It's a little naiive to mark until the end of the line, can we not easily do better? 'line': line_number - 1, 'character': 100 if line_number > len(self.lines) else len(self.lines[line_number - 1]) }, } self.diagnostics.append({ 'source': 'pycodestyle', 'range': err_range, 'message': text, 'code': code, # Are style errors really ever errors? 'severity': _get_severity(code) }) def _get_severity(code): # Are style errors ever really errors? if code[0] == 'E' or code[0] == 'W': return lsp.DiagnosticSeverity.Warning # If no severity is specified, why wouldn't this be informational only? return lsp.DiagnosticSeverity.Information python-lsp-server-1.3.3/pylsp/plugins/pydocstyle_lint.py000066400000000000000000000072461415567622500236140ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import contextlib import logging import os import re import sys import pydocstyle from pylsp import hookimpl, lsp log = logging.getLogger(__name__) # PyDocstyle is a little verbose in debug message pydocstyle_logger = logging.getLogger(pydocstyle.utils.__name__) pydocstyle_logger.setLevel(logging.INFO) DEFAULT_MATCH_RE = pydocstyle.config.ConfigurationParser.DEFAULT_MATCH_RE DEFAULT_MATCH_DIR_RE = pydocstyle.config.ConfigurationParser.DEFAULT_MATCH_DIR_RE @hookimpl def pylsp_settings(): # Default pydocstyle to disabled return {'plugins': {'pydocstyle': {'enabled': False}}} @hookimpl def pylsp_lint(config, document): settings = config.plugin_settings('pydocstyle', document_path=document.path) log.debug("Got pydocstyle settings: %s", settings) # Explicitly passing a path to pydocstyle means it doesn't respect the --match flag, so do it ourselves filename_match_re = re.compile(settings.get('match', DEFAULT_MATCH_RE) + '$') if not filename_match_re.match(os.path.basename(document.path)): return [] # Likewise with --match-dir dir_match_re = re.compile(settings.get('matchDir', DEFAULT_MATCH_DIR_RE) + '$') if not dir_match_re.match(os.path.basename(os.path.dirname(document.path))): return [] args = [document.path] if settings.get('convention'): args.append('--convention=' + settings['convention']) if settings.get('addSelect'): args.append('--add-select=' + ','.join(settings['addSelect'])) if settings.get('addIgnore'): args.append('--add-ignore=' + ','.join(settings['addIgnore'])) elif settings.get('select'): args.append('--select=' + ','.join(settings['select'])) elif settings.get('ignore'): args.append('--ignore=' + ','.join(settings['ignore'])) log.info("Using pydocstyle args: %s", args) conf = pydocstyle.config.ConfigurationParser() with _patch_sys_argv(args): # TODO(gatesn): We can add more pydocstyle args here from our pylsp config conf.parse() # Will only yield a single filename, the document path diags = [] for filename, checked_codes, ignore_decorators in conf.get_files_to_check(): errors = pydocstyle.checker.ConventionChecker().check_source( document.source, filename, ignore_decorators=ignore_decorators ) try: for error in errors: if error.code not in checked_codes: continue diags.append(_parse_diagnostic(document, error)) except pydocstyle.parser.ParseError: # In the case we cannot parse the Python file, just continue pass log.debug("Got pydocstyle errors: %s", diags) return diags def _parse_diagnostic(document, error): lineno = error.definition.start - 1 line = document.lines[0] if document.lines else "" start_character = len(line) - len(line.lstrip()) end_character = len(line) return { 'source': 'pydocstyle', 'code': error.code, 'message': error.message, 'severity': lsp.DiagnosticSeverity.Warning, 'range': { 'start': { 'line': lineno, 'character': start_character }, 'end': { 'line': lineno, 'character': end_character } } } @contextlib.contextmanager def _patch_sys_argv(arguments): old_args = sys.argv # Preserve argv[0] since it's the executable sys.argv = old_args[0:1] + arguments try: yield finally: sys.argv = old_args python-lsp-server-1.3.3/pylsp/plugins/pyflakes_lint.py000066400000000000000000000051631415567622500232270ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from pyflakes import api as pyflakes_api, messages from pylsp import hookimpl, lsp # Pyflakes messages that should be reported as Errors instead of Warns PYFLAKES_ERROR_MESSAGES = ( messages.UndefinedName, messages.UndefinedExport, messages.UndefinedLocal, messages.DuplicateArgument, messages.FutureFeatureNotDefined, messages.ReturnOutsideFunction, messages.YieldOutsideFunction, messages.ContinueOutsideLoop, messages.BreakOutsideLoop, messages.ContinueInFinally, messages.TwoStarredExpressions, ) @hookimpl def pylsp_lint(document): reporter = PyflakesDiagnosticReport(document.lines) pyflakes_api.check(document.source.encode('utf-8'), document.path, reporter=reporter) return reporter.diagnostics class PyflakesDiagnosticReport: def __init__(self, lines): self.lines = lines self.diagnostics = [] def unexpectedError(self, _filename, msg): # pragma: no cover err_range = { 'start': {'line': 0, 'character': 0}, 'end': {'line': 0, 'character': 0}, } self.diagnostics.append({ 'source': 'pyflakes', 'range': err_range, 'message': msg, 'severity': lsp.DiagnosticSeverity.Error, }) def syntaxError(self, _filename, msg, lineno, offset, text): # We've seen that lineno and offset can sometimes be None lineno = lineno or 1 offset = offset or 0 err_range = { 'start': {'line': lineno - 1, 'character': offset}, 'end': {'line': lineno - 1, 'character': offset + len(text)}, } self.diagnostics.append({ 'source': 'pyflakes', 'range': err_range, 'message': msg, 'severity': lsp.DiagnosticSeverity.Error, }) def flake(self, message): """ Get message like :: """ err_range = { 'start': {'line': message.lineno - 1, 'character': message.col}, 'end': {'line': message.lineno - 1, 'character': len(self.lines[message.lineno - 1])}, } severity = lsp.DiagnosticSeverity.Warning for message_type in PYFLAKES_ERROR_MESSAGES: if isinstance(message, message_type): severity = lsp.DiagnosticSeverity.Error break self.diagnostics.append({ 'source': 'pyflakes', 'range': err_range, 'message': message.message % message.message_args, 'severity': severity }) python-lsp-server-1.3.3/pylsp/plugins/pylint_lint.py000066400000000000000000000241371415567622500227320ustar00rootroot00000000000000# Copyright 2018 Google LLC. # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """Linter plugin for pylint.""" import collections import logging import sys import re from subprocess import Popen, PIPE from pylint.epylint import py_run from pylsp import hookimpl, lsp try: import ujson as json except Exception: # pylint: disable=broad-except import json log = logging.getLogger(__name__) class PylintLinter: last_diags = collections.defaultdict(list) @classmethod def lint(cls, document, is_saved, flags=''): """Plugin interface to pylsp linter. Args: document: The document to be linted. is_saved: Whether or not the file has been saved to disk. flags: Additional flags to pass to pylint. Not exposed to pylsp_lint, but used for testing. Returns: A list of dicts with the following format: { 'source': 'pylint', 'range': { 'start': { 'line': start_line, 'character': start_column, }, 'end': { 'line': end_line, 'character': end_column, }, } 'message': msg, 'severity': lsp.DiagnosticSeverity.*, } """ if not is_saved: # Pylint can only be run on files that have been saved to disk. # Rather than return nothing, return the previous list of # diagnostics. If we return an empty list, any diagnostics we'd # previously shown will be cleared until the next save. Instead, # continue showing (possibly stale) diagnostics until the next # save. return cls.last_diags[document.path] # py_run will call shlex.split on its arguments, and shlex.split does # not handle Windows paths (it will try to perform escaping). Turn # backslashes into forward slashes first to avoid this issue. path = document.path if sys.platform.startswith('win'): path = path.replace('\\', '/') pylint_call = '{} -f json {}'.format(path, flags) log.debug("Calling pylint with '%s'", pylint_call) json_out, err = py_run(pylint_call, return_std=True) # Get strings json_out = json_out.getvalue() err = err.getvalue() if err != '': log.error("Error calling pylint: '%s'", err) # pylint prints nothing rather than [] when there are no diagnostics. # json.loads will not parse an empty string, so just return. if not json_out.strip(): cls.last_diags[document.path] = [] return [] # Pylint's JSON output is a list of objects with the following format. # # { # "obj": "main", # "path": "foo.py", # "message": "Missing function docstring", # "message-id": "C0111", # "symbol": "missing-docstring", # "column": 0, # "type": "convention", # "line": 5, # "module": "foo" # } # # The type can be any of: # # * convention # * error # * fatal # * refactor # * warning diagnostics = [] for diag in json.loads(json_out): # pylint lines index from 1, pylsp lines index from 0 line = diag['line'] - 1 err_range = { 'start': { 'line': line, # Index columns start from 0 'character': diag['column'], }, 'end': { 'line': line, # It's possible that we're linting an empty file. Even an empty # file might fail linting if it isn't named properly. 'character': len(document.lines[line]) if document.lines else 0, }, } if diag['type'] == 'convention': severity = lsp.DiagnosticSeverity.Information elif diag['type'] == 'error': severity = lsp.DiagnosticSeverity.Error elif diag['type'] == 'fatal': severity = lsp.DiagnosticSeverity.Error elif diag['type'] == 'refactor': severity = lsp.DiagnosticSeverity.Hint elif diag['type'] == 'warning': severity = lsp.DiagnosticSeverity.Warning diagnostics.append({ 'source': 'pylint', 'range': err_range, 'message': '[{}] {}'.format(diag['symbol'], diag['message']), 'severity': severity, 'code': diag['message-id'] }) cls.last_diags[document.path] = diagnostics return diagnostics def _build_pylint_flags(settings): """Build arguments for calling pylint.""" pylint_args = settings.get('args') if pylint_args is None: return '' return ' '.join(pylint_args) @hookimpl def pylsp_settings(): # Default pylint to disabled because it requires a config # file to be useful. return {'plugins': {'pylint': { 'enabled': False, 'args': [], # disabled by default as it can slow down the workflow 'executable': None, }}} @hookimpl def pylsp_lint(config, document, is_saved): """Run pylint linter.""" settings = config.plugin_settings('pylint') log.debug("Got pylint settings: %s", settings) # pylint >= 2.5.0 is required for working through stdin and only # available with python3 if settings.get('executable') and sys.version_info[0] >= 3: flags = build_args_stdio(settings) pylint_executable = settings.get('executable', 'pylint') return pylint_lint_stdin(pylint_executable, document, flags) flags = _build_pylint_flags(settings) return PylintLinter.lint(document, is_saved, flags=flags) def build_args_stdio(settings): """Build arguments for calling pylint. :param settings: client settings :type settings: dict :return: arguments to path to pylint :rtype: list """ pylint_args = settings.get('args') if pylint_args is None: return [] return pylint_args def pylint_lint_stdin(pylint_executable, document, flags): """Run pylint linter from stdin. This runs pylint in a subprocess with popen. This allows passing the file from stdin and as a result run pylint on unsaved files. Can slowdown the workflow. :param pylint_executable: path to pylint executable :type pylint_executable: string :param document: document to run pylint on :type document: pylsp.workspace.Document :param flags: arguments to path to pylint :type flags: list :return: linting diagnostics :rtype: list """ pylint_result = _run_pylint_stdio(pylint_executable, document, flags) return _parse_pylint_stdio_result(document, pylint_result) def _run_pylint_stdio(pylint_executable, document, flags): """Run pylint in popen. :param pylint_executable: path to pylint executable :type pylint_executable: string :param document: document to run pylint on :type document: pylsp.workspace.Document :param flags: arguments to path to pylint :type flags: list :return: result of calling pylint :rtype: string """ log.debug("Calling %s with args: '%s'", pylint_executable, flags) try: cmd = [pylint_executable] cmd.extend(flags) cmd.extend(['--from-stdin', document.path]) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) except IOError: log.debug("Can't execute %s. Trying with 'python -m pylint'", pylint_executable) cmd = ['python', '-m', 'pylint'] cmd.extend(flags) cmd.extend(['--from-stdin', document.path]) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) # pylint: disable=consider-using-with (stdout, stderr) = p.communicate(document.source.encode()) if stderr: log.error("Error while running pylint '%s'", stderr.decode()) return stdout.decode() def _parse_pylint_stdio_result(document, stdout): """Parse pylint results. :param document: document to run pylint on :type document: pylsp.workspace.Document :param stdout: pylint results to parse :type stdout: string :return: linting diagnostics :rtype: list """ diagnostics = [] lines = stdout.splitlines() for raw_line in lines: parsed_line = re.match(r'(.*):(\d*):(\d*): (\w*): (.*)', raw_line) if not parsed_line: log.debug("Pylint output parser can't parse line '%s'", raw_line) continue parsed_line = parsed_line.groups() if len(parsed_line) != 5: log.debug("Pylint output parser can't parse line '%s'", raw_line) continue _, line, character, code, msg = parsed_line line = int(line) - 1 character = int(character) severity_map = { 'C': lsp.DiagnosticSeverity.Information, 'E': lsp.DiagnosticSeverity.Error, 'F': lsp.DiagnosticSeverity.Error, 'I': lsp.DiagnosticSeverity.Information, 'R': lsp.DiagnosticSeverity.Hint, 'W': lsp.DiagnosticSeverity.Warning, } severity = severity_map[code[0]] diagnostics.append( { 'source': 'pylint', 'code': code, 'range': { 'start': { 'line': line, 'character': character }, 'end': { 'line': line, # no way to determine the column 'character': len(document.lines[line]) - 1 } }, 'message': msg, 'severity': severity, } ) return diagnostics python-lsp-server-1.3.3/pylsp/plugins/references.py000066400000000000000000000017011415567622500224760ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from pylsp import hookimpl, uris, _utils log = logging.getLogger(__name__) @hookimpl def pylsp_references(document, position, exclude_declaration=False): code_position = _utils.position_to_jedi_linecolumn(document, position) usages = document.jedi_script().get_references(**code_position) if exclude_declaration: # Filter out if the usage is the actual declaration of the thing usages = [d for d in usages if not d.is_definition()] # Filter out builtin modules return [{ 'uri': uris.uri_with(document.uri, path=str(d.module_path)) if d.module_path else document.uri, 'range': { 'start': {'line': d.line - 1, 'character': d.column}, 'end': {'line': d.line - 1, 'character': d.column + len(d.name)} } } for d in usages if not d.in_builtin_module()] python-lsp-server-1.3.3/pylsp/plugins/rope_completion.py000066400000000000000000000120311415567622500235510ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from rope.contrib.codeassist import code_assist, sorted_proposals from pylsp import hookimpl, lsp log = logging.getLogger(__name__) @hookimpl def pylsp_settings(): # Default rope_completion to disabled return {'plugins': {'rope_completion': {'enabled': False, 'eager': False}}} def _resolve_completion(completion, data): # pylint: disable=broad-except try: doc = data.get_doc() except Exception as e: log.debug("Failed to resolve Rope completion: %s", e) doc = "" completion['detail'] = '{0} {1}'.format(data.scope or "", data.name) completion['documentation'] = doc return completion @hookimpl def pylsp_completions(config, workspace, document, position): # pylint: disable=too-many-locals settings = config.plugin_settings('rope_completion', document_path=document.path) resolve_eagerly = settings.get('eager', False) # Rope is a bit rubbish at completing module imports, so we'll return None word = document.word_at_position({ # The -1 should really be trying to look at the previous word, but that might be quite expensive # So we only skip import completions when the cursor is one space after `import` 'line': position['line'], 'character': max(position['character'] - 1, 0), }) if word == 'import': return None offset = document.offset_at_position(position) rope_config = config.settings(document_path=document.path).get('rope', {}) rope_project = workspace._rope_project_builder(rope_config) document_rope = document._rope_resource(rope_config) try: definitions = code_assist(rope_project, document.source, offset, document_rope, maxfixes=3) except Exception as e: # pylint: disable=broad-except log.debug("Failed to run Rope code assist: %s", e) return [] definitions = sorted_proposals(definitions) new_definitions = [] for d in definitions: item = { 'label': d.name, 'kind': _kind(d), 'sortText': _sort_text(d), 'data': { 'doc_uri': document.uri } } if resolve_eagerly: item = _resolve_completion(item, d) new_definitions.append(item) # most recently retrieved completion items, used for resolution document.shared_data['LAST_ROPE_COMPLETIONS'] = { # label is the only required property; here it is assumed to be unique completion['label']: (completion, data) for completion, data in zip(new_definitions, definitions) } definitions = new_definitions return definitions or None @hookimpl def pylsp_completion_item_resolve(completion_item, document): """Resolve formatted completion for given non-resolved completion""" shared_data = document.shared_data['LAST_ROPE_COMPLETIONS'].get(completion_item['label']) if shared_data: completion, data = shared_data return _resolve_completion(completion, data) return completion_item def _sort_text(definition): """ Ensure builtins appear at the bottom. Description is of format : . """ if definition.name.startswith("_"): # It's a 'hidden' func, put it next last return 'z' + definition.name if definition.scope == 'builtin': return 'y' + definition.name # Else put it at the front return 'a' + definition.name def _kind(d): """ Return the LSP type """ MAP = { 'none': lsp.CompletionItemKind.Value, 'type': lsp.CompletionItemKind.Class, 'tuple': lsp.CompletionItemKind.Class, 'dict': lsp.CompletionItemKind.Class, 'dictionary': lsp.CompletionItemKind.Class, 'function': lsp.CompletionItemKind.Function, 'lambda': lsp.CompletionItemKind.Function, 'generator': lsp.CompletionItemKind.Function, 'class': lsp.CompletionItemKind.Class, 'instance': lsp.CompletionItemKind.Reference, 'method': lsp.CompletionItemKind.Method, 'builtin': lsp.CompletionItemKind.Class, 'builtinfunction': lsp.CompletionItemKind.Function, 'module': lsp.CompletionItemKind.Module, 'file': lsp.CompletionItemKind.File, 'xrange': lsp.CompletionItemKind.Class, 'slice': lsp.CompletionItemKind.Class, 'traceback': lsp.CompletionItemKind.Class, 'frame': lsp.CompletionItemKind.Class, 'buffer': lsp.CompletionItemKind.Class, 'dictproxy': lsp.CompletionItemKind.Class, 'funcdef': lsp.CompletionItemKind.Function, 'property': lsp.CompletionItemKind.Property, 'import': lsp.CompletionItemKind.Module, 'keyword': lsp.CompletionItemKind.Keyword, 'constant': lsp.CompletionItemKind.Variable, 'variable': lsp.CompletionItemKind.Variable, 'value': lsp.CompletionItemKind.Value, 'param': lsp.CompletionItemKind.Variable, 'statement': lsp.CompletionItemKind.Keyword, } return MAP.get(d.type) python-lsp-server-1.3.3/pylsp/plugins/rope_rename.py000066400000000000000000000035531415567622500226600ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging from rope.base import libutils from rope.refactor.rename import Rename from pylsp import hookimpl, uris log = logging.getLogger(__name__) @hookimpl def pylsp_settings(): # Default rope_rename to disabled return {'plugins': {'rope_rename': {'enabled': False}}} @hookimpl def pylsp_rename(config, workspace, document, position, new_name): rope_config = config.settings(document_path=document.path).get('rope', {}) rope_project = workspace._rope_project_builder(rope_config) rename = Rename( rope_project, libutils.path_to_resource(rope_project, document.path), document.offset_at_position(position) ) log.debug("Executing rename of %s to %s", document.word_at_position(position), new_name) changeset = rename.get_changes(new_name, in_hierarchy=True, docs=True) log.debug("Finished rename: %s", changeset.changes) changes = [] for change in changeset.changes: uri = uris.from_fs_path(change.resource.path) doc = workspace.get_maybe_document(uri) changes.append({ 'textDocument': { 'uri': uri, 'version': doc.version if doc else None }, 'edits': [ { 'range': { 'start': {'line': 0, 'character': 0}, 'end': { 'line': _num_lines(change.resource), 'character': 0, }, }, 'newText': change.new_contents, } ] }) return {'documentChanges': changes} def _num_lines(resource): "Count the number of lines in a `File` resource." return len(resource.read().splitlines()) python-lsp-server-1.3.3/pylsp/plugins/signature.py000066400000000000000000000036111415567622500223600ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import re from pylsp import hookimpl, _utils log = logging.getLogger(__name__) SPHINX = re.compile(r"\s*:param\s+(?P\w+):\s*(?P[^\n]+)") EPYDOC = re.compile(r"\s*@param\s+(?P\w+):\s*(?P[^\n]+)") GOOGLE = re.compile(r"\s*(?P\w+).*:\s*(?P[^\n]+)") DOC_REGEX = [SPHINX, EPYDOC, GOOGLE] @hookimpl def pylsp_signature_help(document, position): code_position = _utils.position_to_jedi_linecolumn(document, position) signatures = document.jedi_script().get_signatures(**code_position) if not signatures: return {'signatures': []} s = signatures[0] # Docstring contains one or more lines of signature, followed by empty line, followed by docstring function_sig_lines = (s.docstring().split('\n\n') or [''])[0].splitlines() function_sig = ' '.join([line.strip() for line in function_sig_lines]) sig = { 'label': function_sig, 'documentation': _utils.format_docstring(s.docstring(raw=True)) } # If there are params, add those if s.params: sig['parameters'] = [{ 'label': p.name, 'documentation': _param_docs(s.docstring(), p.name) } for p in s.params] # We only return a single signature because Python doesn't allow overloading sig_info = {'signatures': [sig], 'activeSignature': 0} if s.index is not None and s.params: # Then we know which parameter we're looking at sig_info['activeParameter'] = s.index return sig_info def _param_docs(docstring, param_name): for line in docstring.splitlines(): for regex in DOC_REGEX: m = regex.match(line) if not m: continue if m.group('param') != param_name: continue return m.group('doc') or "" python-lsp-server-1.3.3/pylsp/plugins/symbols.py000066400000000000000000000172571415567622500220620ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import os from pylsp import hookimpl from pylsp.lsp import SymbolKind log = logging.getLogger(__name__) @hookimpl def pylsp_document_symbols(config, document): # pylint: disable=broad-except # pylint: disable=too-many-nested-blocks # pylint: disable=too-many-locals # pylint: disable=too-many-branches # pylint: disable=too-many-statements symbols_settings = config.plugin_settings('jedi_symbols') all_scopes = symbols_settings.get('all_scopes', True) add_import_symbols = symbols_settings.get('include_import_symbols', True) definitions = document.jedi_names(all_scopes=all_scopes) symbols = [] exclude = set({}) redefinitions = {} while definitions != []: d = definitions.pop(0) # Skip symbols imported from other modules. if not add_import_symbols: # Skip if there's an import in the code the symbol is defined. code = d.get_line_code() if ' import ' in code or 'import ' in code: continue # Skip imported symbols comparing module names. sym_full_name = d.full_name document_dot_path = document.dot_path if sym_full_name is not None: # We assume a symbol is imported from another module to start # with. imported_symbol = True # The last element of sym_full_name is the symbol itself, so # we need to discard it to do module comparisons below. if '.' in sym_full_name: sym_module_name = sym_full_name.rpartition('.')[0] # This is necessary to display symbols in init files (the checks # below fail without it). if document_dot_path.endswith('__init__'): document_dot_path = document_dot_path.rpartition('.')[0] # document_dot_path is the module where the symbol is imported, # whereas sym_module_name is the one where it was declared. if sym_module_name.startswith(document_dot_path): # If sym_module_name starts with the same string as document_dot_path, # we can safely assume it was declared in the document. imported_symbol = False elif sym_module_name.split('.')[0] in document_dot_path.split('.'): # If the first module in sym_module_name is one of the modules in # document_dot_path, we need to check if sym_module_name starts # with the modules in document_dot_path. document_mods = document_dot_path.split('.') for i in range(1, len(document_mods) + 1): submod = '.'.join(document_mods[-i:]) if sym_module_name.startswith(submod): imported_symbol = False break # When there's no __init__.py next to a file or in one of its # parents, the checks above fail. However, Jedi has a nice way # to tell if the symbol was declared in the same file: if # full_name starts by __main__. if imported_symbol: if not sym_module_name.startswith('__main__'): continue try: docismodule = os.path.samefile(document.path, d.module_path) except (TypeError, FileNotFoundError): # Python 2 on Windows has no .samefile, but then these are # strings for sure docismodule = document.path == d.module_path if _include_def(d) and docismodule: tuple_range = _tuple_range(d) if tuple_range in exclude: continue kind = redefinitions.get(tuple_range, None) if kind is not None: exclude |= {tuple_range} if d.type == 'statement': if d.description.startswith('self'): kind = 'field' symbol = { 'name': d.name, 'containerName': _container(d), 'location': { 'uri': document.uri, 'range': _range(d), }, 'kind': _kind(d) if kind is None else _SYMBOL_KIND_MAP[kind], } symbols.append(symbol) if d.type == 'class': try: defined_names = list(d.defined_names()) for method in defined_names: if method.type == 'function': redefinitions[_tuple_range(method)] = 'method' elif method.type == 'statement': redefinitions[_tuple_range(method)] = 'field' else: redefinitions[_tuple_range(method)] = method.type definitions = list(defined_names) + definitions except Exception: pass return symbols def _include_def(definition): return ( # Don't tend to include parameters as symbols definition.type != 'param' and # Unused vars should also be skipped definition.name != '_' and _kind(definition) is not None ) def _container(definition): try: # Jedi sometimes fails here. parent = definition.parent() # Here we check that a grand-parent exists to avoid declaring symbols # as children of the module. if parent.parent(): return parent.name except: # pylint: disable=bare-except return None return None def _range(definition): # This gets us more accurate end position definition = definition._name.tree_name.get_definition() (start_line, start_column) = definition.start_pos (end_line, end_column) = definition.end_pos return { 'start': {'line': start_line - 1, 'character': start_column}, 'end': {'line': end_line - 1, 'character': end_column} } def _tuple_range(definition): definition = definition._name.tree_name.get_definition() return (definition.start_pos, definition.end_pos) _SYMBOL_KIND_MAP = { 'none': SymbolKind.Variable, 'type': SymbolKind.Class, 'tuple': SymbolKind.Class, 'dict': SymbolKind.Class, 'dictionary': SymbolKind.Class, 'function': SymbolKind.Function, 'lambda': SymbolKind.Function, 'generator': SymbolKind.Function, 'class': SymbolKind.Class, 'instance': SymbolKind.Class, 'method': SymbolKind.Method, 'builtin': SymbolKind.Class, 'builtinfunction': SymbolKind.Function, 'module': SymbolKind.Module, 'file': SymbolKind.File, 'xrange': SymbolKind.Array, 'slice': SymbolKind.Class, 'traceback': SymbolKind.Class, 'frame': SymbolKind.Class, 'buffer': SymbolKind.Array, 'dictproxy': SymbolKind.Class, 'funcdef': SymbolKind.Function, 'property': SymbolKind.Property, 'import': SymbolKind.Module, 'keyword': SymbolKind.Variable, 'constant': SymbolKind.Constant, 'variable': SymbolKind.Variable, 'value': SymbolKind.Variable, 'param': SymbolKind.Variable, 'statement': SymbolKind.Variable, 'boolean': SymbolKind.Boolean, 'int': SymbolKind.Number, 'longlean': SymbolKind.Number, 'float': SymbolKind.Number, 'complex': SymbolKind.Number, 'string': SymbolKind.String, 'unicode': SymbolKind.String, 'list': SymbolKind.Array, 'field': SymbolKind.Field } def _kind(d): """ Return the VSCode Symbol Type """ return _SYMBOL_KIND_MAP.get(d.type) python-lsp-server-1.3.3/pylsp/plugins/yapf_format.py000066400000000000000000000033151415567622500226670ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import logging import os from yapf.yapflib import file_resources from yapf.yapflib.yapf_api import FormatCode from pylsp import hookimpl log = logging.getLogger(__name__) @hookimpl def pylsp_format_document(document): return _format(document) @hookimpl def pylsp_format_range(document, range): # pylint: disable=redefined-builtin # First we 'round' the range up/down to full lines only range['start']['character'] = 0 range['end']['line'] += 1 range['end']['character'] = 0 # From Yapf docs: # lines: (list of tuples of integers) A list of tuples of lines, [start, end], # that we want to format. The lines are 1-based indexed. It can be used by # third-party code (e.g., IDEs) when reformatting a snippet of code rather # than a whole file. # Add 1 for 1-indexing vs LSP's 0-indexing lines = [(range['start']['line'] + 1, range['end']['line'] + 1)] return _format(document, lines=lines) def _format(document, lines=None): new_source, changed = FormatCode( document.source, lines=lines, filename=document.filename, style_config=file_resources.GetDefaultStyleForDir( os.path.dirname(document.path) ) ) if not changed: return [] # I'm too lazy at the moment to parse diffs into TextEdit items # So let's just return the entire file... return [{ 'range': { 'start': {'line': 0, 'character': 0}, # End char 0 of the line after our document 'end': {'line': len(document.lines), 'character': 0} }, 'newText': new_source }] python-lsp-server-1.3.3/pylsp/python_lsp.py000066400000000000000000000463611415567622500211060ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from functools import partial import logging import os import socketserver import threading from pylsp_jsonrpc.dispatchers import MethodDispatcher from pylsp_jsonrpc.endpoint import Endpoint from pylsp_jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter from . import lsp, _utils, uris from .config import config from .workspace import Workspace from ._version import __version__ log = logging.getLogger(__name__) LINT_DEBOUNCE_S = 0.5 # 500 ms PARENT_PROCESS_WATCH_INTERVAL = 10 # 10 s MAX_WORKERS = 64 PYTHON_FILE_EXTENSIONS = ('.py', '.pyi') CONFIG_FILEs = ('pycodestyle.cfg', 'setup.cfg', 'tox.ini', '.flake8') class _StreamHandlerWrapper(socketserver.StreamRequestHandler): """A wrapper class that is used to construct a custom handler class.""" delegate = None def setup(self): super().setup() self.delegate = self.DELEGATE_CLASS(self.rfile, self.wfile) def handle(self): try: self.delegate.start() except OSError as e: if os.name == 'nt': # Catch and pass on ConnectionResetError when parent process # dies # pylint: disable=no-member, undefined-variable if isinstance(e, WindowsError) and e.winerror == 10054: pass self.SHUTDOWN_CALL() def start_tcp_lang_server(bind_addr, port, check_parent_process, handler_class): if not issubclass(handler_class, PythonLSPServer): raise ValueError('Handler class must be an instance of PythonLSPServer') def shutdown_server(check_parent_process, *args): # pylint: disable=unused-argument if check_parent_process: log.debug('Shutting down server') # Shutdown call must be done on a thread, to prevent deadlocks stop_thread = threading.Thread(target=server.shutdown) stop_thread.start() # Construct a custom wrapper class around the user's handler_class wrapper_class = type( handler_class.__name__ + 'Handler', (_StreamHandlerWrapper,), {'DELEGATE_CLASS': partial(handler_class, check_parent_process=check_parent_process), 'SHUTDOWN_CALL': partial(shutdown_server, check_parent_process)} ) server = socketserver.TCPServer((bind_addr, port), wrapper_class, bind_and_activate=False) server.allow_reuse_address = True try: server.server_bind() server.server_activate() log.info('Serving %s on (%s, %s)', handler_class.__name__, bind_addr, port) server.serve_forever() finally: log.info('Shutting down') server.server_close() def start_io_lang_server(rfile, wfile, check_parent_process, handler_class): if not issubclass(handler_class, PythonLSPServer): raise ValueError('Handler class must be an instance of PythonLSPServer') log.info('Starting %s IO language server', handler_class.__name__) server = handler_class(rfile, wfile, check_parent_process) server.start() class PythonLSPServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super().__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" workspace = self._match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller(hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': True, # We could know everything ahead of time, but this takes time to transfer 'triggerCharacters': ['.'], }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': flatten(self._hook('pylsp_commands')) }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'foldingRangeProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ',', '='] }, 'textDocumentSync': { 'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': { 'includeText': True, }, 'openClose': True, }, 'workspace': { 'workspaceFolders': { 'supported': True, 'changeNotifications': True } }, 'experimental': merge( self._hook('pylsp_experimental_capabilities')) } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, workspaceFolders=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path(rootPath) if rootPath is not None else '' self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace if workspaceFolders: for folder in workspaceFolders: uri = folder['uri'] if uri == rootUri: # Already created continue workspace_config = config.Config( uri, self.config._init_opts, self.config._process_id, self.config._capabilities) workspace_config.update(self.config._settings) self.workspaces[uri] = Workspace( uri, self._endpoint, workspace_config) self._dispatchers = self._hook('pylsp_dispatchers') self._hook('pylsp_initialize') if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread(target=watch_parent_process, args=(processId,)) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return { 'capabilities': self.capabilities(), 'serverInfo': { 'name': 'pylsp', 'version': __version__, }, } def m_initialized(self, **_kwargs): self._hook('pylsp_initialized') def code_actions(self, doc_uri, range, context): return flatten(self._hook('pylsp_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pylsp_code_lens', doc_uri)) def completions(self, doc_uri, position): completions = self._hook('pylsp_completions', doc_uri, position=position) return { 'isIncomplete': False, 'items': flatten(completions) } def completion_item_resolve(self, completion_item): doc_uri = completion_item.get('data', {}).get('doc_uri', None) return self._hook('pylsp_completion_item_resolve', doc_uri, completion_item=completion_item) def definitions(self, doc_uri, position): return flatten(self._hook('pylsp_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pylsp_document_symbols', doc_uri)) def execute_command(self, command, arguments): return self._hook('pylsp_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri): return self._hook('pylsp_format_document', doc_uri) def format_range(self, doc_uri, range): return self._hook('pylsp_format_range', doc_uri, range=range) def highlight(self, doc_uri, position): return flatten(self._hook('pylsp_document_highlight', doc_uri, position=position)) or None def hover(self, doc_uri, position): return self._hook('pylsp_hover', doc_uri, position=position) or {'contents': ''} @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: workspace.publish_diagnostics( doc_uri, flatten(self._hook('pylsp_lint', doc_uri, is_saved=is_saved)) ) def references(self, doc_uri, position, exclude_declaration): return flatten(self._hook( 'pylsp_references', doc_uri, position=position, exclude_declaration=exclude_declaration )) def rename(self, doc_uri, position, new_name): return self._hook('pylsp_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pylsp_signature_help', doc_uri, position=position) def folding(self, doc_uri): return flatten(self._hook('pylsp_folding_range', doc_uri)) def m_completion_item__resolve(self, **completionItem): return self.completion_item_resolve(completionItem) def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pylsp_document_did_open', textDocument['uri']) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document( textDocument['uri'], change, version=textDocument.get('version') ) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__folding_range(self, textDocument=None, **_kwargs): return self.folding(textDocument['uri']) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): if self.config is not None: self.config.update((settings or {}).get('pylsp', {})) for workspace in self.workspaces.values(): workspace.update_config(settings) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # pylint: disable=too-many-locals if event is None: return added = event.get('added', []) removed = event.get('removed', []) for removed_info in removed: if 'uri' in removed_info: removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri, None) for added_info in added: if 'uri' in added_info: added_uri = added_info['uri'] workspace_config = config.Config( added_uri, self.config._init_opts, self.config._process_id, self.config._capabilities) workspace_config.update(self.config._settings) self.workspaces[added_uri] = Workspace( added_uri, self._endpoint, workspace_config) root_workspace_removed = any(removed_info['uri'] == self.root_uri for removed_info in removed) workspace_added = len(added) > 0 and 'uri' in added[0] if root_workspace_removed and workspace_added: added_uri = added[0]['uri'] self.root_uri = added_uri new_root_workspace = self.workspaces[added_uri] self.config = new_root_workspace._config self.workspace = new_root_workspace elif root_workspace_removed: # NOTE: Removing the root workspace can only happen when the server # is closed, thus the else condition of this if can never happen. if self.workspaces: log.debug('Root workspace deleted!') available_workspaces = sorted(self.workspaces) first_workspace = available_workspaces[0] new_root_workspace = self.workspaces[first_workspace] self.root_uri = first_workspace self.config = new_root_workspace._config self.workspace = new_root_workspace # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace in self.workspaces.values(): for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments) def flatten(list_of_lists): return [item for lst in list_of_lists for item in lst] def merge(list_of_dicts): return {k: v for dictionary in list_of_dicts for k, v in dictionary.items()} python-lsp-server-1.3.3/pylsp/uris.py000066400000000000000000000072151415567622500176640ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """A collection of URI utilities with logic built on the VSCode URI library. https://github.com/Microsoft/vscode-uri/blob/e59cab84f5df6265aed18ae5f43552d3eef13bb9/lib/index.ts """ import re from urllib import parse from pylsp import IS_WIN RE_DRIVE_LETTER_PATH = re.compile(r'^\/[a-zA-Z]:') def urlparse(uri): """Parse and decode the parts of a URI.""" scheme, netloc, path, params, query, fragment = parse.urlparse(uri) return ( parse.unquote(scheme), parse.unquote(netloc), parse.unquote(path), parse.unquote(params), parse.unquote(query), parse.unquote(fragment) ) def urlunparse(parts): """Unparse and encode parts of a URI.""" scheme, netloc, path, params, query, fragment = parts # Avoid encoding the windows drive letter colon if RE_DRIVE_LETTER_PATH.match(path): quoted_path = path[:3] + parse.quote(path[3:]) else: quoted_path = parse.quote(path) return parse.urlunparse(( parse.quote(scheme), parse.quote(netloc), quoted_path, parse.quote(params), parse.quote(query), parse.quote(fragment) )) def to_fs_path(uri): """Returns the filesystem path of the given URI. Will handle UNC paths and normalize windows drive letters to lower-case. Also uses the platform specific path separator. Will *not* validate the path for invalid characters and semantics. Will *not* look at the scheme of this URI. """ # scheme://netloc/path;parameters?query#fragment scheme, netloc, path, _params, _query, _fragment = urlparse(uri) if netloc and path and scheme == 'file': # unc path: file://shares/c$/far/boo value = "//{}{}".format(netloc, path) elif RE_DRIVE_LETTER_PATH.match(path): # windows drive letter: file:///C:/far/boo value = path[1].lower() + path[2:] else: # Other path value = path if IS_WIN: value = value.replace('/', '\\') return value def from_fs_path(path): """Returns a URI for the given filesystem path.""" scheme = 'file' params, query, fragment = '', '', '' path, netloc = _normalize_win_path(path) return urlunparse((scheme, netloc, path, params, query, fragment)) def uri_with(uri, scheme=None, netloc=None, path=None, params=None, query=None, fragment=None): """Return a URI with the given part(s) replaced. Parts are decoded / encoded. """ old_scheme, old_netloc, old_path, old_params, old_query, old_fragment = urlparse(uri) path, _netloc = _normalize_win_path(path) return urlunparse(( scheme or old_scheme, netloc or old_netloc, path or old_path, params or old_params, query or old_query, fragment or old_fragment )) def _normalize_win_path(path): netloc = '' # normalize to fwd-slashes on windows, # on other systems bwd-slaches are valid # filename character, eg /f\oo/ba\r.txt if IS_WIN: path = path.replace('\\', '/') # check for authority as used in UNC shares # or use the path as given if path[:2] == '//': idx = path.index('/', 2) if idx == -1: netloc = path[2:] else: netloc = path[2:idx] path = path[idx:] # Ensure that path starts with a slash # or that it is at least a slash if not path.startswith('/'): path = '/' + path # Normalize drive paths to lower case if RE_DRIVE_LETTER_PATH.match(path): path = path[0] + path[1].lower() + path[2:] return path, netloc python-lsp-server-1.3.3/pylsp/workspace.py000066400000000000000000000251611415567622500207000ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import io import logging import os import re import functools from threading import RLock import jedi from . import lsp, uris, _utils log = logging.getLogger(__name__) # TODO: this is not the best e.g. we capture numbers RE_START_WORD = re.compile('[A-Za-z_0-9]*$') RE_END_WORD = re.compile('^[A-Za-z_0-9]*') def lock(method): """Define an atomic region over a method.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): with self._lock: return method(self, *args, **kwargs) return wrapper class Workspace: M_PUBLISH_DIAGNOSTICS = 'textDocument/publishDiagnostics' M_APPLY_EDIT = 'workspace/applyEdit' M_SHOW_MESSAGE = 'window/showMessage' def __init__(self, root_uri, endpoint, config=None): self._config = config self._root_uri = root_uri self._endpoint = endpoint self._root_uri_scheme = uris.urlparse(self._root_uri)[0] self._root_path = uris.to_fs_path(self._root_uri) self._docs = {} # Cache jedi environments self._environments = {} # Whilst incubating, keep rope private self.__rope = None self.__rope_config = None def _rope_project_builder(self, rope_config): # pylint: disable=import-outside-toplevel from rope.base.project import Project # TODO: we could keep track of dirty files and validate only those if self.__rope is None or self.__rope_config != rope_config: rope_folder = rope_config.get('ropeFolder') self.__rope = Project(self._root_path, ropefolder=rope_folder) self.__rope.prefs.set('extension_modules', rope_config.get('extensionModules', [])) self.__rope.prefs.set('ignore_syntax_errors', True) self.__rope.prefs.set('ignore_bad_imports', True) self.__rope.validate() return self.__rope @property def documents(self): return self._docs @property def root_path(self): return self._root_path @property def root_uri(self): return self._root_uri def is_local(self): return (self._root_uri_scheme in ['', 'file']) and os.path.exists(self._root_path) def get_document(self, doc_uri): """Return a managed document if-present, else create one pointing at disk. See https://github.com/Microsoft/language-server-protocol/issues/177 """ return self._docs.get(doc_uri) or self._create_document(doc_uri) def get_maybe_document(self, doc_uri): return self._docs.get(doc_uri) def put_document(self, doc_uri, source, version=None): self._docs[doc_uri] = self._create_document(doc_uri, source=source, version=version) def rm_document(self, doc_uri): self._docs.pop(doc_uri) def update_document(self, doc_uri, change, version=None): self._docs[doc_uri].apply_change(change) self._docs[doc_uri].version = version def update_config(self, settings): self._config.update((settings or {}).get('pylsp', {})) for doc_uri in self.documents: self.get_document(doc_uri).update_config(settings) def apply_edit(self, edit): return self._endpoint.request(self.M_APPLY_EDIT, {'edit': edit}) def publish_diagnostics(self, doc_uri, diagnostics): self._endpoint.notify(self.M_PUBLISH_DIAGNOSTICS, params={'uri': doc_uri, 'diagnostics': diagnostics}) def show_message(self, message, msg_type=lsp.MessageType.Info): self._endpoint.notify(self.M_SHOW_MESSAGE, params={'type': msg_type, 'message': message}) def source_roots(self, document_path): """Return the source roots for the given document.""" files = _utils.find_parents(self._root_path, document_path, ['setup.py', 'pyproject.toml']) or [] return list({os.path.dirname(project_file) for project_file in files}) or [self._root_path] def _create_document(self, doc_uri, source=None, version=None): path = uris.to_fs_path(doc_uri) return Document( doc_uri, self, source=source, version=version, extra_sys_path=self.source_roots(path), rope_project_builder=self._rope_project_builder, ) class Document: def __init__(self, uri, workspace, source=None, version=None, local=True, extra_sys_path=None, rope_project_builder=None): self.uri = uri self.version = version self.path = uris.to_fs_path(uri) self.dot_path = _utils.path_to_dot_name(self.path) self.filename = os.path.basename(self.path) self.shared_data = {} self._config = workspace._config self._workspace = workspace self._local = local self._source = source self._extra_sys_path = extra_sys_path or [] self._rope_project_builder = rope_project_builder self._lock = RLock() def __str__(self): return str(self.uri) def _rope_resource(self, rope_config): # pylint: disable=import-outside-toplevel from rope.base import libutils return libutils.path_to_resource(self._rope_project_builder(rope_config), self.path) @property @lock def lines(self): return self.source.splitlines(True) @property @lock def source(self): if self._source is None: with io.open(self.path, 'r', encoding='utf-8') as f: return f.read() return self._source def update_config(self, settings): self._config.update((settings or {}).get('pylsp', {})) @lock def apply_change(self, change): """Apply a change to the document.""" text = change['text'] change_range = change.get('range') if not change_range: # The whole file has changed self._source = text return start_line = change_range['start']['line'] start_col = change_range['start']['character'] end_line = change_range['end']['line'] end_col = change_range['end']['character'] # Check for an edit occuring at the very end of the file if start_line == len(self.lines): self._source = self.source + text return new = io.StringIO() # Iterate over the existing document until we hit the edit range, # at which point we write the new text, then loop until we hit # the end of the range and continue writing. for i, line in enumerate(self.lines): if i < start_line: new.write(line) continue if i > end_line: new.write(line) continue if i == start_line: new.write(line[:start_col]) new.write(text) if i == end_line: new.write(line[end_col:]) self._source = new.getvalue() def offset_at_position(self, position): """Return the byte-offset pointed at by the given position.""" return position['character'] + len(''.join(self.lines[:position['line']])) def word_at_position(self, position): """Get the word under the cursor returning the start and end positions.""" if position['line'] >= len(self.lines): return '' line = self.lines[position['line']] i = position['character'] # Split word in two start = line[:i] end = line[i:] # Take end of start and start of end to find word # These are guaranteed to match, even if they match the empty string m_start = RE_START_WORD.findall(start) m_end = RE_END_WORD.findall(end) return m_start[0] + m_end[-1] @lock def jedi_names(self, all_scopes=False, definitions=True, references=False): script = self.jedi_script() return script.get_names(all_scopes=all_scopes, definitions=definitions, references=references) @lock def jedi_script(self, position=None, use_document_path=False): extra_paths = [] environment_path = None env_vars = None if self._config: jedi_settings = self._config.plugin_settings('jedi', document_path=self.path) environment_path = jedi_settings.get('environment') extra_paths = jedi_settings.get('extra_paths') or [] env_vars = jedi_settings.get('env_vars') # Drop PYTHONPATH from env_vars before creating the environment because that makes # Jedi throw an error. if env_vars is None: env_vars = os.environ.copy() env_vars.pop('PYTHONPATH', None) environment = self.get_enviroment(environment_path, env_vars=env_vars) if environment_path else None sys_path = self.sys_path(environment_path, env_vars=env_vars) + extra_paths project_path = self._workspace.root_path # Extend sys_path with document's path if requested if use_document_path: sys_path += [os.path.normpath(os.path.dirname(self.path))] kwargs = { 'code': self.source, 'path': self.path, 'environment': environment, 'project': jedi.Project(path=project_path, sys_path=sys_path), } if position: # Deprecated by Jedi to use in Script() constructor kwargs += _utils.position_to_jedi_linecolumn(self, position) return jedi.Script(**kwargs) def get_enviroment(self, environment_path=None, env_vars=None): # TODO(gatesn): #339 - make better use of jedi environments, they seem pretty powerful if environment_path is None: environment = jedi.api.environment.get_cached_default_environment() else: if environment_path in self._workspace._environments: environment = self._workspace._environments[environment_path] else: environment = jedi.api.environment.create_environment(path=environment_path, safe=False, env_vars=env_vars) self._workspace._environments[environment_path] = environment return environment def sys_path(self, environment_path=None, env_vars=None): # Copy our extra sys path # TODO: when safe to break API, use env_vars explicitly to pass to create_environment path = list(self._extra_sys_path) environment = self.get_enviroment(environment_path=environment_path, env_vars=env_vars) path.extend(environment.get_sys_path()) return path python-lsp-server-1.3.3/scripts/000077500000000000000000000000001415567622500166435ustar00rootroot00000000000000python-lsp-server-1.3.3/scripts/circle/000077500000000000000000000000001415567622500201045ustar00rootroot00000000000000python-lsp-server-1.3.3/scripts/circle/pypi.sh000077500000000000000000000007201415567622500214230ustar00rootroot00000000000000#!/bin/bash -e if [ -z "$CI" ]; then echo "Will only continue on CI" exit fi # build package and upload to private pypi index rm -f ~/.pypirc echo "[distutils]" >> ~/.pypirc echo "index-servers = pypi-private" >> ~/.pypirc echo "[pypi-private]" >> ~/.pypirc echo "repository=https://$PYPI_HOST" >> ~/.pypirc echo "username=$PYPI_USERNAME" >> ~/.pypirc echo "password=$PYPI_PASSWORD" >> ~/.pypirc python setup.py bdist_wheel sdist upload -r pypi-private python-lsp-server-1.3.3/scripts/jsonschema2md.py000066400000000000000000000044211415567622500217530ustar00rootroot00000000000000import json import sys from argparse import ArgumentParser, FileType def describe_array(prop: dict) -> str: extra = "" if "items" in prop: unique_qualifier = "" if "uniqueItems" in prop: unique_qualifier = "unique" if prop["uniqueItems"] else "non-unique" item_type = describe_type(prop["items"]) extra += f" of {unique_qualifier} {item_type} items" return extra def describe_number(prop: dict) -> str: extra = [] if "minimum" in prop: extra.append(f">= {prop['minimum']}") if "maximum" in prop: extra.append(f"<= {prop['maximum']}") return ",".join(extra) EXTRA_DESCRIPTORS = { "array": describe_array, "number": describe_number, } def describe_type(prop: dict) -> str: prop_type = prop["type"] label = f"`{prop_type}`" if prop_type in EXTRA_DESCRIPTORS: label += " " + EXTRA_DESCRIPTORS[prop_type](prop) if "enum" in prop: allowed_values = [f"`{value}`" for value in prop["enum"]] label += "one of: " + ", ".join(allowed_values) return label def convert_schema(schema: dict, source: str = None) -> str: lines = [ f"# {schema['title']}", schema["description"], "", "| **Configuration Key** | **Type** | **Description** | **Default** ", "|----|----|----|----|", ] for key, prop in schema["properties"].items(): description = prop.get("description", "") default = json.dumps(prop.get("default", "")) lines.append( f"| `{key}` | {describe_type(prop)} | {description} | `{default}` |" ) if source: lines.append( f"\nThis documentation was generated from `{source}`." " Please do not edit this file directly." ) # ensure empty line at the end lines.append("") return "\n".join(lines) def main(argv): parser = ArgumentParser() parser.add_argument("schema", type=FileType()) parser.add_argument("markdown", type=FileType("w+"), default=sys.stdout) arguments = parser.parse_args(argv[1:]) schema = json.loads(arguments.schema.read()) markdown = convert_schema(schema, source=arguments.schema.name) arguments.markdown.write(markdown) if __name__ == "__main__": main(sys.argv) python-lsp-server-1.3.3/setup.cfg000066400000000000000000000003671415567622500170030ustar00rootroot00000000000000[pycodestyle] ignore = E226, E722, W504 max-line-length = 120 exclude = test/plugins/.ropeproject,test/.ropeproject [tool:pytest] testpaths = test addopts = --cov-report html --cov-report term --junitxml=pytest.xml --cov pylsp --cov test python-lsp-server-1.3.3/setup.py000077500000000000000000000065401415567622500166760ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import ast import os from setuptools import find_packages, setup HERE = os.path.abspath(os.path.dirname(__file__)) def get_version(module='pylsp'): """Get version.""" with open(os.path.join(HERE, module, '_version.py'), 'r') as f: data = f.read() lines = data.split('\n') for line in lines: if line.startswith('VERSION_INFO'): version_tuple = ast.literal_eval(line.split('=')[-1].strip()) version = '.'.join(map(str, version_tuple)) break return version README = open('README.md', 'r').read() install_requires = [ 'jedi>=0.17.2,<0.19.0', 'python-lsp-jsonrpc>=1.0.0', 'pluggy', 'ujson>=3.0.0', 'setuptools>=39.0.0' ] setup( name='python-lsp-server', version=get_version(), description='Python Language Server for the Language Server Protocol', long_description=README, long_description_content_type='text/markdown', url='https://github.com/python-lsp/python-lsp-server', author='Python Language Server Contributors', packages=find_packages(exclude=['contrib', 'docs', 'test', 'test.*']), install_requires=install_requires, python_requires='>=3.6', extras_require={ 'all': [ 'autopep8>=1.6.0,<1.7.0', 'flake8>=4.0.0,<4.1.0', 'mccabe>=0.6.0,<0.7.0', 'pycodestyle>=2.8.0,<2.9.0', 'pydocstyle>=2.0.0', 'pyflakes>=2.4.0,<2.5.0', 'pylint>=2.5.0', 'rope>=0.10.5', 'yapf', ], 'autopep8': ['autopep8>=1.6.0,<1.7.0'], 'flake8': ['flake8>=4.0.0,<4.1.0'], 'mccabe': ['mccabe>=0.6.0,<0.7.0'], 'pycodestyle': ['pycodestyle>=2.8.0,<2.9.0'], 'pydocstyle': ['pydocstyle>=2.0.0'], 'pyflakes': ['pyflakes>=2.4.0,<2.5.0'], 'pylint': ['pylint>=2.5.0'], 'rope': ['rope>0.10.5'], 'yapf': ['yapf'], 'test': ['pylint>=2.5.0', 'pytest', 'pytest-cov', 'coverage', 'numpy', 'pandas', 'matplotlib', 'pyqt5', 'flaky'], }, entry_points={ 'console_scripts': [ 'pylsp = pylsp.__main__:main', ], 'pylsp': [ 'autopep8 = pylsp.plugins.autopep8_format', 'folding = pylsp.plugins.folding', 'flake8 = pylsp.plugins.flake8_lint', 'jedi_completion = pylsp.plugins.jedi_completion', 'jedi_definition = pylsp.plugins.definition', 'jedi_hover = pylsp.plugins.hover', 'jedi_highlight = pylsp.plugins.highlight', 'jedi_references = pylsp.plugins.references', 'jedi_rename = pylsp.plugins.jedi_rename', 'jedi_signature_help = pylsp.plugins.signature', 'jedi_symbols = pylsp.plugins.symbols', 'mccabe = pylsp.plugins.mccabe_lint', 'preload = pylsp.plugins.preload_imports', 'pycodestyle = pylsp.plugins.pycodestyle_lint', 'pydocstyle = pylsp.plugins.pydocstyle_lint', 'pyflakes = pylsp.plugins.pyflakes_lint', 'pylint = pylsp.plugins.pylint_lint', 'rope_completion = pylsp.plugins.rope_completion', 'rope_rename = pylsp.plugins.rope_rename', 'yapf = pylsp.plugins.yapf_format' ] }, ) python-lsp-server-1.3.3/test/000077500000000000000000000000001415567622500161335ustar00rootroot00000000000000python-lsp-server-1.3.3/test/__init__.py000066400000000000000000000007011415567622500202420ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import sys import pytest from pylsp import IS_WIN IS_PY3 = sys.version_info.major == 3 unix_only = pytest.mark.skipif(IS_WIN, reason="Unix only") windows_only = pytest.mark.skipif(not IS_WIN, reason="Windows only") py3_only = pytest.mark.skipif(not IS_PY3, reason="Python3 only") py2_only = pytest.mark.skipif(IS_PY3, reason="Python2 only") python-lsp-server-1.3.3/test/conftest.py000066400000000000000000000004441415567622500203340ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. """ py.test configuration""" import logging from pylsp.__main__ import LOG_FORMAT logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) pytest_plugins = [ 'test.fixtures' ] python-lsp-server-1.3.3/test/fixtures.py000066400000000000000000000056271415567622500203700ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os from io import StringIO from unittest.mock import Mock import pytest from pylsp import uris from pylsp.config.config import Config from pylsp.python_lsp import PythonLSPServer from pylsp.workspace import Workspace, Document DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def main(): print sys.stdin.read() """ @pytest.fixture def pylsp(tmpdir): """ Return an initialized python LS """ ls = PythonLSPServer(StringIO, StringIO) ls.m_initialize( processId=1, rootUri=uris.from_fs_path(str(tmpdir)), initializationOptions={} ) return ls @pytest.fixture def pylsp_w_workspace_folders(tmpdir): """ Return an initialized python LS """ ls = PythonLSPServer(StringIO, StringIO) folder1 = tmpdir.mkdir('folder1') folder2 = tmpdir.mkdir('folder2') ls.m_initialize( processId=1, rootUri=uris.from_fs_path(str(folder1)), initializationOptions={}, workspaceFolders=[ { 'uri': uris.from_fs_path(str(folder1)), 'name': 'folder1' }, { 'uri': uris.from_fs_path(str(folder2)), 'name': 'folder2' } ] ) workspace_folders = [folder1, folder2] return (ls, workspace_folders) @pytest.fixture def workspace(tmpdir): """Return a workspace.""" ws = Workspace(uris.from_fs_path(str(tmpdir)), Mock()) ws._config = Config(ws.root_uri, {}, 0, {}) return ws @pytest.fixture def workspace_other_root_path(tmpdir): """Return a workspace with a root_path other than tmpdir.""" ws_path = str(tmpdir.mkdir('test123').mkdir('test456')) ws = Workspace(uris.from_fs_path(ws_path), Mock()) ws._config = Config(ws.root_uri, {}, 0, {}) return ws @pytest.fixture def config(workspace): # pylint: disable=redefined-outer-name """Return a config object.""" cfg = Config(workspace.root_uri, {}, 0, {}) cfg._plugin_settings = {'plugins': {'pylint': {'enabled': False, 'args': [], 'executable': None}}} return cfg @pytest.fixture def doc(workspace): # pylint: disable=redefined-outer-name return Document(DOC_URI, workspace, DOC) @pytest.fixture def temp_workspace_factory(workspace): # pylint: disable=redefined-outer-name ''' Returns a function that creates a temporary workspace from the files dict. The dict is in the format {"file_name": "file_contents"} ''' def fn(files): def create_file(name, content): fn = os.path.join(workspace.root_path, name) with open(fn, 'w', encoding='utf-8') as f: f.write(content) workspace.put_document(uris.from_fs_path(fn), content) for name, content in files.items(): create_file(name, content) return workspace return fn python-lsp-server-1.3.3/test/plugins/000077500000000000000000000000001415567622500176145ustar00rootroot00000000000000python-lsp-server-1.3.3/test/plugins/__init__.py000066400000000000000000000000001415567622500217130ustar00rootroot00000000000000python-lsp-server-1.3.3/test/plugins/test_autopep8_format.py000066400000000000000000000030731415567622500243450ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from pylsp import uris from pylsp.plugins.autopep8_format import pylsp_format_document, pylsp_format_range from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """a = 123 def func(): pass """ GOOD_DOC = """A = ['hello', 'world']\n""" INDENTED_DOC = """def foo(): print('asdf', file=None ) bar = { 'foo': foo } """ CORRECT_INDENTED_DOC = """def foo(): print('asdf', file=None ) bar = {'foo': foo } """ def test_format(config, workspace): doc = Document(DOC_URI, workspace, DOC) res = pylsp_format_document(config, doc) assert len(res) == 1 assert res[0]['newText'] == "a = 123\n\n\ndef func():\n pass\n" def test_range_format(config, workspace): doc = Document(DOC_URI, workspace, DOC) def_range = { 'start': {'line': 0, 'character': 0}, 'end': {'line': 2, 'character': 0} } res = pylsp_format_range(config, doc, def_range) assert len(res) == 1 # Make sure the func is still badly formatted assert res[0]['newText'] == "a = 123\n\n\n\n\ndef func():\n pass\n" def test_no_change(config, workspace): doc = Document(DOC_URI, workspace, GOOD_DOC) assert not pylsp_format_document(config, doc) def test_hanging_indentation(config, workspace): doc = Document(DOC_URI, workspace, INDENTED_DOC) res = pylsp_format_document(config, doc) assert len(res) == 1 assert res[0]['newText'] == CORRECT_INDENTED_DOC python-lsp-server-1.3.3/test/plugins/test_completion.py000066400000000000000000000431221415567622500234000ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import math import os import sys from pathlib import Path from typing import NamedTuple, Dict import pytest from pylsp import uris, lsp from pylsp.workspace import Document from pylsp.plugins.jedi_completion import pylsp_completions as pylsp_jedi_completions from pylsp.plugins.jedi_completion import pylsp_completion_item_resolve as pylsp_jedi_completion_item_resolve from pylsp.plugins.rope_completion import pylsp_completions as pylsp_rope_completions from pylsp._utils import JEDI_VERSION PY2 = sys.version[0] == '2' LINUX = sys.platform.startswith('linux') CI = os.environ.get('CI') LOCATION = os.path.realpath( os.path.join(os.getcwd(), os.path.dirname(__file__)) ) DOC_URI = uris.from_fs_path(__file__) DOC = """import os print os.path.isabs("/tmp") def hello(): pass def _a_hello(): pass class Hello(): @property def world(self): return None def everyone(self, a, b, c=None, d=2): pass print Hello().world print Hello().every def documented_hello(): \"\"\"Sends a polite greeting\"\"\" pass """ def test_rope_import_completion(config, workspace): com_position = {'line': 0, 'character': 7} doc = Document(DOC_URI, workspace, DOC) items = pylsp_rope_completions(config, workspace, doc, com_position) assert items is None class TypeCase(NamedTuple): document: str position: dict label: str expected: lsp.CompletionItemKind TYPE_CASES: Dict[str, TypeCase] = { 'variable': TypeCase( document='test = 1\ntes', position={'line': 1, 'character': 3}, label='test', expected=lsp.CompletionItemKind.Variable ), 'function': TypeCase( document='def test():\n pass\ntes', position={'line': 2, 'character': 3}, label='test()', expected=lsp.CompletionItemKind.Function ), 'keyword': TypeCase( document='fro', position={'line': 0, 'character': 3}, label='from', expected=lsp.CompletionItemKind.Keyword ), 'file': TypeCase( document='"' + __file__[:-2].replace('"', '\\"') + '"', position={'line': 0, 'character': len(__file__) - 2}, label=Path(__file__).name + '"', expected=lsp.CompletionItemKind.File ), 'module': TypeCase( document='import statis', position={'line': 0, 'character': 13}, label='statistics', expected=lsp.CompletionItemKind.Module ), 'class': TypeCase( document='KeyErr', position={'line': 0, 'character': 6}, label='KeyError', expected=lsp.CompletionItemKind.Class ), 'property': TypeCase( document=( 'class A:\n' ' @property\n' ' def test(self):\n' ' pass\n' 'A().tes' ), position={'line': 4, 'character': 5}, label='test', expected=lsp.CompletionItemKind.Property ) } @pytest.mark.parametrize('case', list(TYPE_CASES.values()), ids=list(TYPE_CASES.keys())) def test_jedi_completion_type(case, config, workspace): # property support was introduced in 0.18 if case.expected == lsp.CompletionItemKind.Property and JEDI_VERSION.startswith('0.17'): return doc = Document(DOC_URI, workspace, case.document) items = pylsp_jedi_completions(config, doc, case.position) items = {i['label']: i for i in items} assert items[case.label]['kind'] == case.expected def test_jedi_completion(config, workspace): # Over 'i' in os.path.isabs(...) com_position = {'line': 1, 'character': 15} doc = Document(DOC_URI, workspace, DOC) items = pylsp_jedi_completions(config, doc, com_position) assert items labels = [i['label'] for i in items] assert 'isfile(path)' in labels # Test we don't throw with big character pylsp_jedi_completions(config, doc, {'line': 1, 'character': 1000}) def test_jedi_completion_item_resolve(config, workspace): # Over the blank line com_position = {'line': 8, 'character': 0} doc = Document(DOC_URI, workspace, DOC) config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) completions = pylsp_jedi_completions(config, doc, com_position) items = {c['label']: c for c in completions} documented_hello_item = items['documented_hello()'] assert 'documentation' not in documented_hello_item assert 'detail' not in documented_hello_item resolved_documented_hello = pylsp_jedi_completion_item_resolve( completion_item=documented_hello_item, document=doc ) assert 'Sends a polite greeting' in resolved_documented_hello['documentation'] def test_jedi_completion_with_fuzzy_enabled(config, workspace): # Over 'i' in os.path.isabs(...) config.update({'plugins': {'jedi_completion': {'fuzzy': True}}}) com_position = {'line': 1, 'character': 15} doc = Document(DOC_URI, workspace, DOC) items = pylsp_jedi_completions(config, doc, com_position) assert items expected = 'commonprefix(m)' if JEDI_VERSION == '0.18.0': expected = 'commonprefix(list)' assert items[0]['label'] == expected # Test we don't throw with big character pylsp_jedi_completions(config, doc, {'line': 1, 'character': 1000}) def test_jedi_completion_resolve_at_most(config, workspace): # Over 'i' in os.path.isabs(...) com_position = {'line': 1, 'character': 15} doc = Document(DOC_URI, workspace, DOC) # Do not resolve any labels config.update({'plugins': {'jedi_completion': {'resolve_at_most': 0}}}) items = pylsp_jedi_completions(config, doc, com_position) labels = {i['label'] for i in items} assert 'isabs' in labels # Resolve all items config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) items = pylsp_jedi_completions(config, doc, com_position) labels = {i['label'] for i in items} assert 'isfile(path)' in labels def test_rope_completion(config, workspace): # Over 'i' in os.path.isabs(...) com_position = {'line': 1, 'character': 15} workspace.put_document(DOC_URI, source=DOC) doc = workspace.get_document(DOC_URI) items = pylsp_rope_completions(config, workspace, doc, com_position) assert items assert items[0]['label'] == 'isabs' def test_jedi_completion_ordering(config, workspace): # Over the blank line com_position = {'line': 8, 'character': 0} doc = Document(DOC_URI, workspace, DOC) config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) completions = pylsp_jedi_completions(config, doc, com_position) items = {c['label']: c['sortText'] for c in completions} # And that 'hidden' functions come after unhidden ones assert items['hello()'] < items['_a_hello()'] def test_jedi_property_completion(config, workspace): # Over the 'w' in 'print Hello().world' com_position = {'line': 18, 'character': 15} doc = Document(DOC_URI, workspace, DOC) completions = pylsp_jedi_completions(config, doc, com_position) items = {c['label']: c['sortText'] for c in completions} # Ensure we can complete the 'world' property assert 'world' in list(items.keys())[0] def test_jedi_method_completion(config, workspace): # Over the 'y' in 'print Hello().every' com_position = {'line': 20, 'character': 19} doc = Document(DOC_URI, workspace, DOC) config.capabilities['textDocument'] = {'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) completions = pylsp_jedi_completions(config, doc, com_position) everyone_method = [completion for completion in completions if completion['label'] == 'everyone(a, b, c, d)'][0] # Ensure we only generate snippets for positional args assert everyone_method['insertTextFormat'] == lsp.InsertTextFormat.Snippet assert everyone_method['insertText'] == 'everyone(${1:a}, ${2:b})$0' # Disable param snippets config.update({'plugins': {'jedi_completion': {'include_params': False}}}) completions = pylsp_jedi_completions(config, doc, com_position) everyone_method = [completion for completion in completions if completion['label'] == 'everyone(a, b, c, d)'][0] assert 'insertTextFormat' not in everyone_method assert everyone_method['insertText'] == 'everyone' @pytest.mark.skipif(PY2 or (sys.platform.startswith('linux') and os.environ.get('CI') is not None), reason="Test in Python 3 and not on CIs on Linux because wheels don't work on them.") def test_pyqt_completion(config, workspace): # Over 'QA' in 'from PyQt5.QtWidgets import QApplication' doc_pyqt = "from PyQt5.QtWidgets import QA" com_position = {'line': 0, 'character': len(doc_pyqt)} doc = Document(DOC_URI, workspace, doc_pyqt) completions = pylsp_jedi_completions(config, doc, com_position) assert completions is not None def test_numpy_completions(config, workspace): doc_numpy = "import numpy as np; np." com_position = {'line': 0, 'character': len(doc_numpy)} doc = Document(DOC_URI, workspace, doc_numpy) items = pylsp_jedi_completions(config, doc, com_position) assert items assert any('array' in i['label'] for i in items) def test_pandas_completions(config, workspace): doc_pandas = "import pandas as pd; pd." com_position = {'line': 0, 'character': len(doc_pandas)} doc = Document(DOC_URI, workspace, doc_pandas) items = pylsp_jedi_completions(config, doc, com_position) assert items assert any('DataFrame' in i['label'] for i in items) def test_matplotlib_completions(config, workspace): doc_mpl = "import matplotlib.pyplot as plt; plt." com_position = {'line': 0, 'character': len(doc_mpl)} doc = Document(DOC_URI, workspace, doc_mpl) items = pylsp_jedi_completions(config, doc, com_position) assert items assert any('plot' in i['label'] for i in items) def test_snippets_completion(config, workspace): doc_snippets = 'from collections import defaultdict \na=defaultdict' com_position = {'line': 0, 'character': 35} doc = Document(DOC_URI, workspace, doc_snippets) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) completions = pylsp_jedi_completions(config, doc, com_position) assert completions[0]['insertText'] == 'defaultdict' com_position = {'line': 1, 'character': len(doc_snippets)} completions = pylsp_jedi_completions(config, doc, com_position) assert completions[0]['insertText'] == 'defaultdict($0)' assert completions[0]['insertTextFormat'] == lsp.InsertTextFormat.Snippet def test_snippets_completion_at_most(config, workspace): doc_snippets = 'from collections import defaultdict \na=defaultdict' doc = Document(DOC_URI, workspace, doc_snippets) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) config.update({'plugins': {'jedi_completion': {'resolve_at_most': 0}}}) com_position = {'line': 1, 'character': len(doc_snippets)} completions = pylsp_jedi_completions(config, doc, com_position) assert completions[0]['insertText'] == 'defaultdict' assert not completions[0].get('insertTextFormat', None) def test_completion_with_class_objects(config, workspace): doc_text = 'class FOOBAR(Object): pass\nFOOB' com_position = {'line': 1, 'character': 4} doc = Document(DOC_URI, workspace, doc_text) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': { 'include_params': True, 'include_class_objects': True, }}}) completions = pylsp_jedi_completions(config, doc, com_position) assert len(completions) == 2 assert completions[0]['label'] == 'FOOBAR' assert completions[0]['kind'] == lsp.CompletionItemKind.Class assert completions[1]['label'] == 'FOOBAR object' assert completions[1]['kind'] == lsp.CompletionItemKind.TypeParameter def test_snippet_parsing(config, workspace): doc = 'divmod' completion_position = {'line': 0, 'character': 6} doc = Document(DOC_URI, workspace, doc) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) completions = pylsp_jedi_completions(config, doc, completion_position) out = 'divmod(${1:x}, ${2:y})$0' if JEDI_VERSION == '0.18.0': out = 'divmod(${1:a}, ${2:b})$0' assert completions[0]['insertText'] == out def test_multiline_import_snippets(config, workspace): document = 'from datetime import(\n date,\n datetime)\na=date' doc = Document(DOC_URI, workspace, document) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) position = {'line': 1, 'character': 5} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'date' position = {'line': 2, 'character': 9} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'datetime' def test_multiline_snippets(config, workspace): document = 'from datetime import\\\n date,\\\n datetime \na=date' doc = Document(DOC_URI, workspace, document) config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) position = {'line': 1, 'character': 5} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'date' position = {'line': 2, 'character': 9} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'datetime' def test_multistatement_snippet(config, workspace): config.capabilities['textDocument'] = { 'completion': {'completionItem': {'snippetSupport': True}}} config.update({'plugins': {'jedi_completion': {'include_params': True}}}) document = 'a = 1; from datetime import date' doc = Document(DOC_URI, workspace, document) position = {'line': 0, 'character': len(document)} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'date' document = 'from math import fmod; a = fmod' doc = Document(DOC_URI, workspace, document) position = {'line': 0, 'character': len(document)} completions = pylsp_jedi_completions(config, doc, position) assert completions[0]['insertText'] == 'fmod(${1:x}, ${2:y})$0' def test_jedi_completion_extra_paths(tmpdir, workspace): # Create a tempfile with some content and pass to extra_paths temp_doc_content = ''' def spam(): pass ''' p = tmpdir.mkdir("extra_path") extra_paths = [str(p)] p = p.join("foo.py") p.write(temp_doc_content) # Content of doc to test completion doc_content = """import foo foo.s""" doc = Document(DOC_URI, workspace, doc_content) # After 'foo.s' without extra paths com_position = {'line': 1, 'character': 5} completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions is None # Update config extra paths settings = {'pylsp': {'plugins': {'jedi': {'extra_paths': extra_paths}}}} doc.update_config(settings) # After 'foo.s' with extra paths com_position = {'line': 1, 'character': 5} completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions[0]['label'] == 'spam()' @pytest.mark.skipif(PY2 or not LINUX or not CI, reason="tested on linux and python 3 only") def test_jedi_completion_environment(workspace): # Content of doc to test completion doc_content = '''import logh ''' doc = Document(DOC_URI, workspace, doc_content) # After 'import logh' with default environment com_position = {'line': 0, 'character': 11} assert os.path.isdir('/tmp/pyenv/') settings = {'pylsp': {'plugins': {'jedi': {'environment': None}}}} doc.update_config(settings) completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions is None # Update config extra environment env_path = '/tmp/pyenv/bin/python' settings = {'pylsp': {'plugins': {'jedi': {'environment': env_path}}}} doc.update_config(settings) # After 'import logh' with new environment completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions[0]['label'] == 'loghub' resolved = pylsp_jedi_completion_item_resolve(completions[0], doc) assert 'changelog generator' in resolved['documentation'].lower() def test_document_path_completions(tmpdir, workspace_other_root_path): # Create a dummy module out of the workspace's root_path and try to get # completions for it in another file placed next to it. module_content = ''' def foo(): pass ''' p = tmpdir.join("mymodule.py") p.write(module_content) # Content of doc to test completion doc_content = """import mymodule mymodule.f""" doc_path = str(tmpdir) + os.path.sep + 'myfile.py' doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) com_position = {'line': 1, 'character': 10} completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions[0]['label'] == 'foo()' python-lsp-server-1.3.3/test/plugins/test_definitions.py000066400000000000000000000050311415567622500235370ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os from pylsp import uris from pylsp.plugins.definition import pylsp_definitions from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """def a(): pass print a() class Directory(object): def __init__(self): self.members = dict() def add_member(self, id, name): self.members[id] = name """ def test_definitions(config, workspace): # Over 'a' in print a cursor_pos = {'line': 3, 'character': 6} # The definition of 'a' def_range = { 'start': {'line': 0, 'character': 4}, 'end': {'line': 0, 'character': 5} } doc = Document(DOC_URI, workspace, DOC) assert [{'uri': DOC_URI, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) def test_builtin_definition(config, workspace): # Over 'i' in dict cursor_pos = {'line': 8, 'character': 24} # No go-to def for builtins doc = Document(DOC_URI, workspace, DOC) assert not pylsp_definitions(config, doc, cursor_pos) def test_assignment(config, workspace): # Over 's' in self.members[id] cursor_pos = {'line': 11, 'character': 19} # The assignment of 'self.members' def_range = { 'start': {'line': 8, 'character': 13}, 'end': {'line': 8, 'character': 20} } doc = Document(DOC_URI, workspace, DOC) assert [{'uri': DOC_URI, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) def test_document_path_definitions(config, workspace_other_root_path, tmpdir): # Create a dummy module out of the workspace's root_path and try to get # a definition on it in another file placed next to it. module_content = ''' def foo(): pass ''' p = tmpdir.join("mymodule.py") p.write(module_content) # Content of doc to test definition doc_content = """from mymodule import foo""" doc_path = str(tmpdir) + os.path.sep + 'myfile.py' doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) # The range where is defined in mymodule.py def_range = { 'start': {'line': 1, 'character': 4}, 'end': {'line': 1, 'character': 7} } # The position where foo is called in myfile.py cursor_pos = {'line': 0, 'character': 24} # The uri for mymodule.py module_path = str(p) module_uri = uris.from_fs_path(module_path) assert [{'uri': module_uri, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) python-lsp-server-1.3.3/test/plugins/test_flake8_lint.py000066400000000000000000000121061415567622500234250ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import tempfile import os from unittest.mock import patch from pylsp import lsp, uris from pylsp.plugins import flake8_lint from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """import pylsp t = "TEST" def using_const(): \ta = 8 + 9 \treturn t """ def temp_document(doc_text, workspace): with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file: name = temp_file.name temp_file.write(doc_text) doc = Document(uris.from_fs_path(name), workspace) return name, doc def test_flake8_unsaved(workspace): doc = Document('', workspace, DOC) diags = flake8_lint.pylsp_lint(workspace, doc) msg = 'F841 local variable \'a\' is assigned to but never used' unused_var = [d for d in diags if d['message'] == msg][0] assert unused_var['source'] == 'flake8' assert unused_var['code'] == 'F841' assert unused_var['range']['start'] == {'line': 5, 'character': 1} assert unused_var['range']['end'] == {'line': 5, 'character': 11} assert unused_var['severity'] == lsp.DiagnosticSeverity.Warning def test_flake8_lint(workspace): try: name, doc = temp_document(DOC, workspace) diags = flake8_lint.pylsp_lint(workspace, doc) msg = 'F841 local variable \'a\' is assigned to but never used' unused_var = [d for d in diags if d['message'] == msg][0] assert unused_var['source'] == 'flake8' assert unused_var['code'] == 'F841' assert unused_var['range']['start'] == {'line': 5, 'character': 1} assert unused_var['range']['end'] == {'line': 5, 'character': 11} assert unused_var['severity'] == lsp.DiagnosticSeverity.Warning finally: os.remove(name) def test_flake8_config_param(workspace): with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: mock_instance = popen_mock.return_value mock_instance.communicate.return_value = [bytes(), bytes()] flake8_conf = '/tmp/some.cfg' workspace._config.update({'plugins': {'flake8': {'config': flake8_conf}}}) _name, doc = temp_document(DOC, workspace) flake8_lint.pylsp_lint(workspace, doc) (call_args,) = popen_mock.call_args[0] assert 'flake8' in call_args assert '--config={}'.format(flake8_conf) in call_args def test_flake8_executable_param(workspace): with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: mock_instance = popen_mock.return_value mock_instance.communicate.return_value = [bytes(), bytes()] flake8_executable = '/tmp/flake8' workspace._config.update({'plugins': {'flake8': {'executable': flake8_executable}}}) _name, doc = temp_document(DOC, workspace) flake8_lint.pylsp_lint(workspace, doc) (call_args,) = popen_mock.call_args[0] assert flake8_executable in call_args def get_flake8_cfg_settings(workspace, config_str): """Write a ``setup.cfg``, load it in the workspace, and return the flake8 settings. This function creates a ``setup.cfg``; you'll have to delete it yourself. """ with open(os.path.join(workspace.root_path, "setup.cfg"), "w+", encoding='utf-8') as f: f.write(config_str) workspace.update_config({"pylsp": {"configurationSources": ["flake8"]}}) return workspace._config.plugin_settings("flake8") def test_flake8_multiline(workspace): config_str = r"""[flake8] exclude = blah/, file_2.py """ doc_str = "print('hi')\nimport os\n" doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, "blah/__init__.py")) workspace.put_document(doc_uri, doc_str) flake8_settings = get_flake8_cfg_settings(workspace, config_str) assert "exclude" in flake8_settings assert len(flake8_settings["exclude"]) == 2 with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: mock_instance = popen_mock.return_value mock_instance.communicate.return_value = [bytes(), bytes()] doc = workspace.get_document(doc_uri) flake8_lint.pylsp_lint(workspace, doc) call_args = popen_mock.call_args[0][0] assert call_args == ["flake8", "-", "--exclude=blah/,file_2.py"] os.unlink(os.path.join(workspace.root_path, "setup.cfg")) def test_flake8_per_file_ignores(workspace): config_str = r"""[flake8] ignores = F403 per-file-ignores = **/__init__.py:F401,E402 test_something.py:E402, exclude = file_1.py file_2.py """ doc_str = "print('hi')\nimport os\n" doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, "blah/__init__.py")) workspace.put_document(doc_uri, doc_str) flake8_settings = get_flake8_cfg_settings(workspace, config_str) assert "perFileIgnores" in flake8_settings assert len(flake8_settings["perFileIgnores"]) == 2 assert "exclude" in flake8_settings assert len(flake8_settings["exclude"]) == 2 doc = workspace.get_document(doc_uri) res = flake8_lint.pylsp_lint(workspace, doc) assert not res os.unlink(os.path.join(workspace.root_path, "setup.cfg")) python-lsp-server-1.3.3/test/plugins/test_folding.py000066400000000000000000000114721415567622500226540ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import sys from textwrap import dedent from pylsp import uris from pylsp.plugins.folding import pylsp_folding_range from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = dedent(""" def func(arg1, arg2, arg3, arg4, arg5, default=func( 2, 3, 4 )): return (2, 3, 4, 5) @decorator( param1, param2 ) def decorated_func(x, y, z): if x: return y elif y: return z elif x + y > z: return True else: return x class A(): def method(self, x1): def inner(): return x1 if x2: func(3, 4, 5, 6, 7) elif x3 < 2: pass else: more_complex_func(2, 3, 4, 5, 6, 8) return inner a = 2 operation = (a_large_variable_that_fills_all_space + other_embarrasingly_long_variable - 2 * 3 / 5) (a, b, c, d, e, f) = func(3, 4, 5, 6, 7, 8, 9, 10) for i in range(0, 3): i += 1 while x < i: expr = (2, 4) a = func(expr + i, arg2, arg3, arg4, arg5, var(2, 3, 4, 5)) for j in range(0, i): if i % 2 == 1: pass compren = [x for x in range(0, 3) if x == 2] with open('doc', 'r') as f: try: f / 0 except: pass finally: raise SomeException() def testC(): pass """) SYNTAX_ERR = dedent(""" def func(arg1, arg2, arg3, arg4, arg5, default=func( 2, 3, 4 )): return (2, 3, 4, 5) class A(: pass a = 2 operation = (a_large_variable_that_fills_all_space + other_embarrasingly_long_variable - 2 * 3 / (a, b, c, d, e, f) = func(3, 4, 5, 6, 7, 8, 9, 10 a = 2 for i in range(0, 3) i += 1 while x < i: expr = (2, 4) a = func(expr + i, arg2, arg3, arg4, arg5, var(2, 3, 4, 5)) for j in range(0, i): if i % 2 == 1: pass """) def test_folding(workspace): doc = Document(DOC_URI, workspace, DOC) ranges = pylsp_folding_range(doc) expected = [{'startLine': 1, 'endLine': 6}, {'startLine': 2, 'endLine': 3}, {'startLine': 5, 'endLine': 6}, {'startLine': 8, 'endLine': 11}, {'startLine': 12, 'endLine': 20}, {'startLine': 13, 'endLine': 14}, {'startLine': 15, 'endLine': 16}, {'startLine': 17, 'endLine': 18}, {'startLine': 19, 'endLine': 20}, {'startLine': 22, 'endLine': 35}, {'startLine': 23, 'endLine': 35}, {'startLine': 24, 'endLine': 25}, {'startLine': 27, 'endLine': 29}, {'startLine': 28, 'endLine': 29}, {'startLine': 30, 'endLine': 31}, {'startLine': 32, 'endLine': 34}, {'startLine': 33, 'endLine': 34}, {'startLine': 38, 'endLine': 39}, {'startLine': 41, 'endLine': 43}, {'startLine': 42, 'endLine': 43}, {'startLine': 45, 'endLine': 54}, {'startLine': 47, 'endLine': 51}, {'startLine': 49, 'endLine': 51}, {'startLine': 50, 'endLine': 51}, {'startLine': 52, 'endLine': 54}, {'startLine': 53, 'endLine': 54}, {'startLine': 56, 'endLine': 57}, {'startLine': 59, 'endLine': 65}, {'startLine': 60, 'endLine': 61}, {'startLine': 62, 'endLine': 63}, {'startLine': 64, 'endLine': 65}, {'startLine': 67, 'endLine': 68}] if sys.version_info[:2] >= (3, 9): # the argument list of the decorator is also folded in Python >= 3.9 expected.insert(4, {'startLine': 9, 'endLine': 10}) assert ranges == expected def test_folding_syntax_error(workspace): doc = Document(DOC_URI, workspace, SYNTAX_ERR) ranges = pylsp_folding_range(doc) expected = [{'startLine': 1, 'endLine': 6}, {'startLine': 2, 'endLine': 3}, {'startLine': 5, 'endLine': 6}, {'startLine': 8, 'endLine': 9}, {'startLine': 12, 'endLine': 13}, {'startLine': 15, 'endLine': 17}, {'startLine': 16, 'endLine': 17}, {'startLine': 19, 'endLine': 28}, {'startLine': 21, 'endLine': 25}, {'startLine': 23, 'endLine': 25}, {'startLine': 24, 'endLine': 25}, {'startLine': 26, 'endLine': 28}, {'startLine': 27, 'endLine': 28}] assert ranges == expected python-lsp-server-1.3.3/test/plugins/test_highlight.py000066400000000000000000000030361415567622500231760ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins.highlight import pylsp_document_highlight DOC_URI = uris.from_fs_path(__file__) DOC = """a = "hello" a.startswith("b") """ def test_highlight(workspace): # Over 'a' in a.startswith cursor_pos = {'line': 1, 'character': 0} doc = Document(DOC_URI, workspace, DOC) assert pylsp_document_highlight(doc, cursor_pos) == [{ 'range': { 'start': {'line': 0, 'character': 0}, 'end': {'line': 0, 'character': 1}, }, # The first usage is Write 'kind': lsp.DocumentHighlightKind.Write }, { 'range': { 'start': {'line': 1, 'character': 0}, 'end': {'line': 1, 'character': 1}, }, # The second usage is Read 'kind': lsp.DocumentHighlightKind.Read }] SYS_DOC = '''import sys print sys.path ''' def test_sys_highlight(workspace): cursor_pos = {'line': 0, 'character': 8} doc = Document(DOC_URI, workspace, SYS_DOC) assert pylsp_document_highlight(doc, cursor_pos) == [{ 'range': { 'start': {'line': 0, 'character': 7}, 'end': {'line': 0, 'character': 10} }, 'kind': lsp.DocumentHighlightKind.Write }, { 'range': { 'start': {'line': 1, 'character': 6}, 'end': {'line': 1, 'character': 9} }, 'kind': lsp.DocumentHighlightKind.Read }] python-lsp-server-1.3.3/test/plugins/test_hover.py000066400000000000000000000055221415567622500223540ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os from pylsp import uris from pylsp.plugins.hover import pylsp_hover from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """ def main(): \"\"\"hello world\"\"\" pass """ NUMPY_DOC = """ import numpy as np np.sin """ def test_numpy_hover(workspace): # Over the blank line no_hov_position = {'line': 1, 'character': 0} # Over 'numpy' in import numpy as np numpy_hov_position_1 = {'line': 2, 'character': 8} # Over 'np' in import numpy as np numpy_hov_position_2 = {'line': 2, 'character': 17} # Over 'np' in np.sin numpy_hov_position_3 = {'line': 3, 'character': 1} # Over 'sin' in np.sin numpy_sin_hov_position = {'line': 3, 'character': 4} doc = Document(DOC_URI, workspace, NUMPY_DOC) contents = '' assert contents in pylsp_hover(doc, no_hov_position)['contents'] contents = 'NumPy\n=====\n\nProvides\n' assert contents in pylsp_hover(doc, numpy_hov_position_1)['contents'][0] contents = 'NumPy\n=====\n\nProvides\n' assert contents in pylsp_hover(doc, numpy_hov_position_2)['contents'][0] contents = 'NumPy\n=====\n\nProvides\n' assert contents in pylsp_hover(doc, numpy_hov_position_3)['contents'][0] # https://github.com/davidhalter/jedi/issues/1746 # pylint: disable=import-outside-toplevel import numpy as np if np.lib.NumpyVersion(np.__version__) < '1.20.0': contents = 'Trigonometric sine, element-wise.\n\n' assert contents in pylsp_hover( doc, numpy_sin_hov_position)['contents'][0] def test_hover(workspace): # Over 'main' in def main(): hov_position = {'line': 2, 'character': 6} # Over the blank second line no_hov_position = {'line': 1, 'character': 0} doc = Document(DOC_URI, workspace, DOC) contents = [{'language': 'python', 'value': 'main()'}, 'hello world'] assert { 'contents': contents } == pylsp_hover(doc, hov_position) assert {'contents': ''} == pylsp_hover(doc, no_hov_position) def test_document_path_hover(workspace_other_root_path, tmpdir): # Create a dummy module out of the workspace's root_path and try to get # a definition on it in another file placed next to it. module_content = ''' def foo(): """A docstring for foo.""" pass ''' p = tmpdir.join("mymodule.py") p.write(module_content) # Content of doc to test definition doc_content = """from mymodule import foo foo""" doc_path = str(tmpdir) + os.path.sep + 'myfile.py' doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) cursor_pos = {'line': 1, 'character': 3} contents = pylsp_hover(doc, cursor_pos)['contents'] assert contents[1] == 'A docstring for foo.' python-lsp-server-1.3.3/test/plugins/test_jedi_rename.py000066400000000000000000000050151415567622500234700ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import sys import pytest from pylsp import uris from pylsp.plugins.jedi_rename import pylsp_rename from pylsp.workspace import Document LT_PY36 = sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 6) DOC_NAME = 'test1.py' DOC = '''class Test1(): pass class Test2(Test1): pass ''' DOC_NAME_EXTRA = 'test2.py' DOC_EXTRA = '''from test1 import Test1 x = Test1() ''' @pytest.fixture def tmp_workspace(temp_workspace_factory): return temp_workspace_factory({ DOC_NAME: DOC, DOC_NAME_EXTRA: DOC_EXTRA }) @pytest.mark.skipif(LT_PY36, reason='Jedi refactoring isnt supported on Python 2.x/3.5') def test_jedi_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name # rename the `Test1` class position = {'line': 0, 'character': 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) result = pylsp_rename(config, tmp_workspace, doc, position, 'ShouldBeRenamed') assert len(result.keys()) == 1 changes = result.get('documentChanges') assert len(changes) == 2 assert changes[0]['textDocument']['uri'] == doc.uri assert changes[0]['textDocument']['version'] == doc.version assert changes[0].get('edits') == [ { 'range': { 'start': {'line': 0, 'character': 0}, 'end': {'line': 5, 'character': 0}, }, 'newText': 'class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n', } ] path = os.path.join(tmp_workspace.root_path, DOC_NAME_EXTRA) uri_extra = uris.from_fs_path(path) assert changes[1]['textDocument']['uri'] == uri_extra # This also checks whether documents not yet added via textDocument/didOpen # but that do need to be renamed in the project have a `null` version # number. assert changes[1]['textDocument']['version'] is None expected = 'from test1 import ShouldBeRenamed\nx = ShouldBeRenamed()\n' if os.name == 'nt': # The .write method in the temp_workspace_factory functions writes # Windows-style line-endings. expected = expected.replace('\n', '\r\n') assert changes[1].get('edits') == [ { 'range': { 'start': {'line': 0, 'character': 0}, 'end': {'line': 2, 'character': 0}}, 'newText': expected } ] python-lsp-server-1.3.3/test/plugins/test_mccabe_lint.py000066400000000000000000000023171415567622500234700ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins import mccabe_lint DOC_URI = uris.from_fs_path(__file__) DOC = """def hello(): \tpass """ DOC_SYNTAX_ERR = """def hello() \tpass""" def test_mccabe(config, workspace): old_settings = config.settings try: config.update({'plugins': {'mccabe': {'threshold': 1}}}) doc = Document(DOC_URI, workspace, DOC) diags = mccabe_lint.pylsp_lint(config, doc) assert all(d['source'] == 'mccabe' for d in diags) # One we're expecting is: msg = 'Cyclomatic complexity too high: 1 (threshold 1)' mod_import = [d for d in diags if d['message'] == msg][0] assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning assert mod_import['range']['start'] == {'line': 0, 'character': 0} assert mod_import['range']['end'] == {'line': 0, 'character': 6} finally: config._settings = old_settings def test_mccabe_syntax_error(config, workspace): doc = Document(DOC_URI, workspace, DOC_SYNTAX_ERR) assert mccabe_lint.pylsp_lint(config, doc) is None python-lsp-server-1.3.3/test/plugins/test_pycodestyle_lint.py000066400000000000000000000100531415567622500246160ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins import pycodestyle_lint DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def hello( ): \tpass print("hello" ,"world" ) import json """ def test_pycodestyle(workspace): doc = Document(DOC_URI, workspace, DOC) diags = pycodestyle_lint.pylsp_lint(workspace, doc) assert all(d['source'] == 'pycodestyle' for d in diags) # One we're expecting is: msg = 'W191 indentation contains tabs' mod_import = [d for d in diags if d['message'] == msg][0] assert mod_import['code'] == 'W191' assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning assert mod_import['range']['start'] == {'line': 3, 'character': 0} assert mod_import['range']['end'] == {'line': 3, 'character': 6} msg = 'W391 blank line at end of file' mod_import = [d for d in diags if d['message'] == msg][0] assert mod_import['code'] == 'W391' assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning assert mod_import['range']['start'] == {'line': 10, 'character': 0} assert mod_import['range']['end'] == {'line': 10, 'character': 1} msg = "E201 whitespace after '('" mod_import = [d for d in diags if d['message'] == msg][0] assert mod_import['code'] == 'E201' assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning assert mod_import['range']['start'] == {'line': 2, 'character': 10} assert mod_import['range']['end'] == {'line': 2, 'character': 14} msg = "E128 continuation line under-indented for visual indent" mod_import = [d for d in diags if d['message'] == msg][0] assert mod_import['code'] == 'E128' assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning assert mod_import['range']['start'] == {'line': 5, 'character': 1} assert mod_import['range']['end'] == {'line': 5, 'character': 10} def test_pycodestyle_config(workspace): """ Test that we load config files properly. Config files are loaded in the following order: tox.ini pep8.cfg setup.cfg pycodestyle.cfg Each overriding the values in the last. These files are first looked for in the current document's directory and then each parent directory until any one is found terminating at the workspace root. If any section called 'pycodestyle' exists that will be solely used and any config in a 'pep8' section will be ignored """ doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, 'test.py')) workspace.put_document(doc_uri, DOC) doc = workspace.get_document(doc_uri) # Make sure we get a warning for 'indentation contains tabs' diags = pycodestyle_lint.pylsp_lint(workspace, doc) assert [d for d in diags if d['code'] == 'W191'] content = { 'setup.cfg': ('[pycodestyle]\nignore = W191, E201, E128', True), 'tox.ini': ('', False) } for conf_file, (content, working) in list(content.items()): # Now we'll add config file to ignore it with open(os.path.join(workspace.root_path, conf_file), 'w+', encoding='utf-8') as f: f.write(content) workspace._config.settings.cache_clear() # And make sure we don't get any warnings diags = pycodestyle_lint.pylsp_lint(workspace, doc) assert len([d for d in diags if d['code'] == 'W191']) == (0 if working else 1) assert len([d for d in diags if d['code'] == 'E201']) == (0 if working else 1) assert [d for d in diags if d['code'] == 'W391'] os.unlink(os.path.join(workspace.root_path, conf_file)) # Make sure we can ignore via the PYLS config as well workspace._config.update({'plugins': {'pycodestyle': {'ignore': ['W191', 'E201']}}}) # And make sure we only get one warning diags = pycodestyle_lint.pylsp_lint(workspace, doc) assert not [d for d in diags if d['code'] == 'W191'] assert not [d for d in diags if d['code'] == 'E201'] assert [d for d in diags if d['code'] == 'W391'] python-lsp-server-1.3.3/test/plugins/test_pydocstyle_lint.py000066400000000000000000000032751415567622500244610ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins import pydocstyle_lint DOC_URI = uris.from_fs_path(os.path.join(os.path.dirname(__file__), "pydocstyle.py")) TEST_DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def hello(): \tpass import json """ def test_pydocstyle(config, workspace): doc = Document(DOC_URI, workspace, DOC) diags = pydocstyle_lint.pylsp_lint(config, doc) assert all(d['source'] == 'pydocstyle' for d in diags) # One we're expecting is: assert diags[0] == { 'code': 'D100', 'message': 'D100: Missing docstring in public module', 'severity': lsp.DiagnosticSeverity.Warning, 'range': { 'start': {'line': 0, 'character': 0}, 'end': {'line': 0, 'character': 11}, }, 'source': 'pydocstyle' } def test_pydocstyle_test_document(config, workspace): # The default --match argument excludes test_* documents. doc = Document(TEST_DOC_URI, workspace, "") diags = pydocstyle_lint.pylsp_lint(config, doc) assert not diags def test_pydocstyle_empty_source(config, workspace): doc = Document(DOC_URI, workspace, "") diags = pydocstyle_lint.pylsp_lint(config, doc) assert diags[0]['message'] == 'D100: Missing docstring in public module' assert len(diags) == 1 def test_pydocstyle_invalid_source(config, workspace): doc = Document(DOC_URI, workspace, "bad syntax") diags = pydocstyle_lint.pylsp_lint(config, doc) # We're unable to parse the file, so can't get any pydocstyle diagnostics assert not diags python-lsp-server-1.3.3/test/plugins/test_pyflakes_lint.py000066400000000000000000000034511415567622500240740ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import sys from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins import pyflakes_lint DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def hello(): \tpass import json """ DOC_SYNTAX_ERR = """def hello() pass """ DOC_UNDEFINED_NAME_ERR = "a = b" DOC_ENCODING = """# encoding=utf-8 import sys """ def test_pyflakes(workspace): doc = Document(DOC_URI, workspace, DOC) diags = pyflakes_lint.pylsp_lint(doc) # One we're expecting is: msg = '\'sys\' imported but unused' unused_import = [d for d in diags if d['message'] == msg][0] assert unused_import['range']['start'] == {'line': 0, 'character': 0} assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning def test_syntax_error_pyflakes(workspace): doc = Document(DOC_URI, workspace, DOC_SYNTAX_ERR) diag = pyflakes_lint.pylsp_lint(doc)[0] if sys.version_info[:2] >= (3, 10): assert diag['message'] == "expected ':'" else: assert diag['message'] == 'invalid syntax' assert diag['range']['start'] == {'line': 0, 'character': 12} assert diag['severity'] == lsp.DiagnosticSeverity.Error def test_undefined_name_pyflakes(workspace): doc = Document(DOC_URI, workspace, DOC_UNDEFINED_NAME_ERR) diag = pyflakes_lint.pylsp_lint(doc)[0] assert diag['message'] == 'undefined name \'b\'' assert diag['range']['start'] == {'line': 0, 'character': 4} assert diag['severity'] == lsp.DiagnosticSeverity.Error def test_unicode_encoding(workspace): doc = Document(DOC_URI, workspace, DOC_ENCODING) diags = pyflakes_lint.pylsp_lint(doc) assert len(diags) == 1 assert diags[0]['message'] == '\'sys\' imported but unused' python-lsp-server-1.3.3/test/plugins/test_pylint_lint.py000066400000000000000000000123671415567622500236030ustar00rootroot00000000000000# Copyright 2018 Google LLC. # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import contextlib import os import sys import tempfile from test import py2_only, py3_only, IS_PY3 from pylsp import lsp, uris from pylsp.workspace import Document from pylsp.plugins import pylint_lint DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def hello(): \tpass import json """ DOC_SYNTAX_ERR = """def hello() pass """ @contextlib.contextmanager def temp_document(doc_text, workspace): try: with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file: name = temp_file.name temp_file.write(doc_text) yield Document(uris.from_fs_path(name), workspace) finally: os.remove(name) def write_temp_doc(document, contents): with open(document.path, 'w', encoding='utf-8') as temp_file: temp_file.write(contents) def test_pylint(config, workspace): with temp_document(DOC, workspace) as doc: diags = pylint_lint.pylsp_lint(config, doc, True) msg = '[unused-import] Unused import sys' unused_import = [d for d in diags if d['message'] == msg][0] assert unused_import['range']['start'] == {'line': 0, 'character': 0} assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning if IS_PY3: # test running pylint in stdin config.plugin_settings('pylint')['executable'] = 'pylint' diags = pylint_lint.pylsp_lint(config, doc, True) msg = 'Unused import sys (unused-import)' unused_import = [d for d in diags if d['message'] == msg][0] assert unused_import['range']['start'] == { 'line': 0, 'character': 0, } assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning @py3_only def test_syntax_error_pylint_py3(config, workspace): with temp_document(DOC_SYNTAX_ERR, workspace) as doc: diag = pylint_lint.pylsp_lint(config, doc, True)[0] if sys.version_info[:2] >= (3, 10): assert diag['message'].count("[syntax-error] expected ':'") else: assert diag['message'].startswith('[syntax-error] invalid syntax') # Pylint doesn't give column numbers for invalid syntax. assert diag['range']['start'] == {'line': 0, 'character': 12} assert diag['severity'] == lsp.DiagnosticSeverity.Error # test running pylint in stdin config.plugin_settings('pylint')['executable'] = 'pylint' diag = pylint_lint.pylsp_lint(config, doc, True)[0] if sys.version_info[:2] >= (3, 10): assert diag['message'].count("expected ':'") else: assert diag['message'].startswith('invalid syntax') # Pylint doesn't give column numbers for invalid syntax. assert diag['range']['start'] == {'line': 0, 'character': 12} assert diag['severity'] == lsp.DiagnosticSeverity.Error @py2_only def test_syntax_error_pylint_py2(config, workspace): with temp_document(DOC_SYNTAX_ERR, workspace) as doc: diag = pylint_lint.pylsp_lint(config, doc, True)[0] assert diag['message'].startswith('[syntax-error] invalid syntax') # Pylint doesn't give column numbers for invalid syntax. assert diag['range']['start'] == {'line': 0, 'character': 0} assert diag['severity'] == lsp.DiagnosticSeverity.Error def test_lint_free_pylint(config, workspace): # Can't use temp_document because it might give us a file that doesn't # match pylint's naming requirements. We should be keeping this file clean # though, so it works for a test of an empty lint. assert not pylint_lint.pylsp_lint( config, Document(uris.from_fs_path(__file__), workspace), True) def test_lint_caching(workspace): # Pylint can only operate on files, not in-memory contents. We cache the # diagnostics after a run so we can continue displaying them until the file # is saved again. # # We use PylintLinter.lint directly here rather than pylsp_lint so we can # pass --disable=invalid-name to pylint, since we want a temporary file but # need to ensure that pylint doesn't give us invalid-name when our temp # file has capital letters in its name. flags = '--disable=invalid-name' with temp_document(DOC, workspace) as doc: # Start with a file with errors. diags = pylint_lint.PylintLinter.lint(doc, True, flags) assert diags # Fix lint errors and write the changes to disk. Run the linter in the # in-memory mode to check the cached diagnostic behavior. write_temp_doc(doc, '') assert pylint_lint.PylintLinter.lint(doc, False, flags) == diags # Now check the on-disk behavior. assert not pylint_lint.PylintLinter.lint(doc, True, flags) # Make sure the cache was properly cleared. assert not pylint_lint.PylintLinter.lint(doc, False, flags) def test_per_file_caching(config, workspace): # Ensure that diagnostics are cached per-file. with temp_document(DOC, workspace) as doc: assert pylint_lint.pylsp_lint(config, doc, True) assert not pylint_lint.pylsp_lint( config, Document(uris.from_fs_path(__file__), workspace), False) python-lsp-server-1.3.3/test/plugins/test_references.py000066400000000000000000000046251415567622500233550ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import pytest from pylsp import uris from pylsp.workspace import Document from pylsp.plugins.references import pylsp_references DOC1_NAME = 'test1.py' DOC2_NAME = 'test2.py' DOC1 = """class Test1(): pass """ DOC2 = """from test1 import Test1 try: Test1() except UnicodeError: pass """ @pytest.fixture def tmp_workspace(temp_workspace_factory): return temp_workspace_factory({ DOC1_NAME: DOC1, DOC2_NAME: DOC2, }) def test_references(tmp_workspace): # pylint: disable=redefined-outer-name # Over 'Test1' in class Test1(): position = {'line': 0, 'character': 8} DOC1_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC1_NAME)) doc1 = Document(DOC1_URI, tmp_workspace) refs = pylsp_references(doc1, position) # Definition, the import and the instantiation assert len(refs) == 3 # Briefly check excluding the definitions (also excludes imports, only counts uses) no_def_refs = pylsp_references(doc1, position, exclude_declaration=True) assert len(no_def_refs) == 1 # Make sure our definition is correctly located doc1_ref = [u for u in refs if u['uri'] == DOC1_URI][0] assert doc1_ref['range']['start'] == {'line': 0, 'character': 6} assert doc1_ref['range']['end'] == {'line': 0, 'character': 11} # Make sure our import is correctly located doc2_import_ref = [u for u in refs if u['uri'] != DOC1_URI][0] assert doc2_import_ref['range']['start'] == {'line': 0, 'character': 18} assert doc2_import_ref['range']['end'] == {'line': 0, 'character': 23} doc2_usage_ref = [u for u in refs if u['uri'] != DOC1_URI][1] assert doc2_usage_ref['range']['start'] == {'line': 3, 'character': 4} assert doc2_usage_ref['range']['end'] == {'line': 3, 'character': 9} def test_references_builtin(tmp_workspace): # pylint: disable=redefined-outer-name # Over 'UnicodeError': position = {'line': 4, 'character': 7} doc2_uri = uris.from_fs_path(os.path.join(str(tmp_workspace.root_path), DOC2_NAME)) doc2 = Document(doc2_uri, tmp_workspace) refs = pylsp_references(doc2, position) assert len(refs) >= 1 expected = {'start': {'line': 4, 'character': 7}, 'end': {'line': 4, 'character': 19}} ranges = [r['range'] for r in refs] assert expected in ranges python-lsp-server-1.3.3/test/plugins/test_rope_rename.py000066400000000000000000000025541415567622500235270ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import pytest from pylsp import uris from pylsp.plugins.rope_rename import pylsp_rename from pylsp.workspace import Document DOC_NAME = "test1.py" DOC = """class Test1(): pass class Test2(Test1): pass """ @pytest.fixture def tmp_workspace(temp_workspace_factory): return temp_workspace_factory({DOC_NAME: DOC}) def test_rope_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name position = {"line": 0, "character": 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) result = pylsp_rename(config, tmp_workspace, doc, position, "ShouldBeRenamed") assert len(result.keys()) == 1 changes = result.get("documentChanges") assert len(changes) == 1 changes = changes[0] # Note that this test differs from test_jedi_rename, because rope does not # seem to modify files that haven't been opened with textDocument/didOpen. assert changes.get("edits") == [ { "range": { "start": {"line": 0, "character": 0}, "end": {"line": 5, "character": 0}, }, "newText": "class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n", } ] python-lsp-server-1.3.3/test/plugins/test_signature.py000066400000000000000000000047731415567622500232410ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import pytest from pylsp import uris from pylsp.plugins import signature from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """import sys def main(param1, param2): \"\"\" Main docstring Args: param1 (str): Docs for param1 \"\"\" raise Exception() main( """ MULTI_LINE_DOC = """import sys def main(param1=None, param2=None, param3=None, param4=None, param5=None, param6=None, param7=None, param8=None): \"\"\" Main docstring Args: param1 (str): Docs for param1 \"\"\" raise Exception() main( """ def test_no_signature(workspace): # Over blank line sig_position = {'line': 9, 'character': 0} doc = Document(DOC_URI, workspace, DOC) sigs = signature.pylsp_signature_help(doc, sig_position)['signatures'] assert not sigs def test_signature(workspace): # Over '( ' in main( sig_position = {'line': 10, 'character': 5} doc = Document(DOC_URI, workspace, DOC) sig_info = signature.pylsp_signature_help(doc, sig_position) sigs = sig_info['signatures'] assert len(sigs) == 1 assert sigs[0]['label'] == 'main(param1, param2)' assert sigs[0]['parameters'][0]['label'] == 'param1' assert sigs[0]['parameters'][0]['documentation'] == 'Docs for param1' assert sig_info['activeParameter'] == 0 def test_multi_line_signature(workspace): # Over '( ' in main( sig_position = {'line': 17, 'character': 5} doc = Document(DOC_URI, workspace, MULTI_LINE_DOC) sig_info = signature.pylsp_signature_help(doc, sig_position) sigs = sig_info['signatures'] assert len(sigs) == 1 assert sigs[0]['label'] == ( 'main(param1=None, param2=None, param3=None, param4=None, ' 'param5=None, param6=None, param7=None, param8=None)' ) assert sigs[0]['parameters'][0]['label'] == 'param1' assert sigs[0]['parameters'][0]['documentation'] == 'Docs for param1' assert sig_info['activeParameter'] == 0 @pytest.mark.parametrize('regex,doc', [ (signature.SPHINX, " :param test: parameter docstring"), (signature.EPYDOC, " @param test: parameter docstring"), (signature.GOOGLE, " test (str): parameter docstring") ]) def test_docstring_params(regex, doc): m = regex.match(doc) assert m.group('param') == "test" assert m.group('doc') == "parameter docstring" python-lsp-server-1.3.3/test/plugins/test_symbols.py000066400000000000000000000055341415567622500227240ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import sys import pytest from pylsp import uris from pylsp.plugins.symbols import pylsp_document_symbols from pylsp.lsp import SymbolKind from pylsp.workspace import Document PY2 = sys.version[0] == '2' LINUX = sys.platform.startswith('linux') CI = os.environ.get('CI') DOC_URI = uris.from_fs_path(__file__) DOC = """import sys a = 'hello' class B: def __init__(self): x = 2 self.y = x def main(x): y = 2 * x return y """ def helper_check_symbols_all_scope(symbols): # All eight symbols (import sys, a, B, __init__, x, y, main, y) assert len(symbols) == 8 def sym(name): return [s for s in symbols if s['name'] == name][0] # Check we have some sane mappings to VSCode constants assert sym('a')['kind'] == SymbolKind.Variable assert sym('B')['kind'] == SymbolKind.Class assert sym('__init__')['kind'] == SymbolKind.Method assert sym('main')['kind'] == SymbolKind.Function # Not going to get too in-depth here else we're just testing Jedi assert sym('a')['location']['range']['start'] == {'line': 2, 'character': 0} def test_symbols(config, workspace): doc = Document(DOC_URI, workspace, DOC) config.update({'plugins': {'jedi_symbols': {'all_scopes': False}}}) symbols = pylsp_document_symbols(config, doc) # All four symbols (import sys, a, B, main) # y is not in the root scope, it shouldn't be returned assert len(symbols) == 5 def sym(name): return [s for s in symbols if s['name'] == name][0] # Check we have some sane mappings to VSCode constants assert sym('a')['kind'] == SymbolKind.Variable assert sym('B')['kind'] == SymbolKind.Class assert sym('main')['kind'] == SymbolKind.Function # Not going to get too in-depth here else we're just testing Jedi assert sym('a')['location']['range']['start'] == {'line': 2, 'character': 0} # Ensure that the symbol range spans the whole definition assert sym('main')['location']['range']['start'] == {'line': 9, 'character': 0} assert sym('main')['location']['range']['end'] == {'line': 12, 'character': 0} def test_symbols_all_scopes(config, workspace): doc = Document(DOC_URI, workspace, DOC) symbols = pylsp_document_symbols(config, doc) helper_check_symbols_all_scope(symbols) @pytest.mark.skipif(PY2 or not LINUX or not CI, reason="tested on linux and python 3 only") def test_symbols_all_scopes_with_jedi_environment(workspace): doc = Document(DOC_URI, workspace, DOC) # Update config extra environment env_path = '/tmp/pyenv/bin/python' settings = {'pylsp': {'plugins': {'jedi': {'environment': env_path}}}} doc.update_config(settings) symbols = pylsp_document_symbols(doc._config, doc) helper_check_symbols_all_scope(symbols) python-lsp-server-1.3.3/test/plugins/test_yapf_format.py000066400000000000000000000031121415567622500235310ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from pylsp import uris from pylsp.plugins.yapf_format import pylsp_format_document, pylsp_format_range from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """A = [ 'h', 'w', 'a' ] B = ['h', 'w'] """ GOOD_DOC = """A = ['hello', 'world']\n""" def test_format(workspace): doc = Document(DOC_URI, workspace, DOC) res = pylsp_format_document(doc) assert len(res) == 1 assert res[0]['newText'] == "A = ['h', 'w', 'a']\n\nB = ['h', 'w']\n" def test_range_format(workspace): doc = Document(DOC_URI, workspace, DOC) def_range = { 'start': {'line': 0, 'character': 0}, 'end': {'line': 4, 'character': 10} } res = pylsp_format_range(doc, def_range) assert len(res) == 1 # Make sure B is still badly formatted assert res[0]['newText'] == "A = ['h', 'w', 'a']\n\nB = ['h',\n\n\n'w']\n" def test_no_change(workspace): doc = Document(DOC_URI, workspace, GOOD_DOC) assert not pylsp_format_document(doc) def test_config_file(tmpdir, workspace): # a config file in the same directory as the source file will be used conf = tmpdir.join('.style.yapf') conf.write('[style]\ncolumn_limit = 14') src = tmpdir.join('test.py') doc = Document(uris.from_fs_path(src.strpath), workspace, DOC) # A was split on multiple lines because of column_limit from config file assert pylsp_format_document(doc)[0]['newText'] == "A = [\n 'h', 'w',\n 'a'\n]\n\nB = ['h', 'w']\n" python-lsp-server-1.3.3/test/test_document.py000066400000000000000000000057301415567622500213670ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from test.fixtures import DOC_URI, DOC from pylsp.workspace import Document def test_document_props(doc): assert doc.uri == DOC_URI assert doc.source == DOC def test_document_lines(doc): assert len(doc.lines) == 4 assert doc.lines[0] == 'import sys\n' def test_document_source_unicode(workspace): document_mem = Document(DOC_URI, workspace, 'my source') document_disk = Document(DOC_URI, workspace) assert isinstance(document_mem.source, type(document_disk.source)) def test_offset_at_position(doc): assert doc.offset_at_position({'line': 0, 'character': 8}) == 8 assert doc.offset_at_position({'line': 1, 'character': 5}) == 16 assert doc.offset_at_position({'line': 2, 'character': 0}) == 12 assert doc.offset_at_position({'line': 2, 'character': 4}) == 16 assert doc.offset_at_position({'line': 4, 'character': 0}) == 51 def test_word_at_position(doc): """ Return the position under the cursor (or last in line if past the end) """ # import sys assert doc.word_at_position({'line': 0, 'character': 8}) == 'sys' # Past end of import sys assert doc.word_at_position({'line': 0, 'character': 1000}) == 'sys' # Empty line assert doc.word_at_position({'line': 1, 'character': 5}) == '' # def main(): assert doc.word_at_position({'line': 2, 'character': 0}) == 'def' # Past end of file assert doc.word_at_position({'line': 4, 'character': 0}) == '' def test_document_empty_edit(workspace): doc = Document('file:///uri', workspace, '') doc.apply_change({ 'range': { 'start': {'line': 0, 'character': 0}, 'end': {'line': 0, 'character': 0} }, 'text': 'f' }) assert doc.source == 'f' def test_document_line_edit(workspace): doc = Document('file:///uri', workspace, 'itshelloworld') doc.apply_change({ 'text': 'goodbye', 'range': { 'start': {'line': 0, 'character': 3}, 'end': {'line': 0, 'character': 8} } }) assert doc.source == 'itsgoodbyeworld' def test_document_multiline_edit(workspace): old = [ "def hello(a, b):\n", " print a\n", " print b\n" ] doc = Document('file:///uri', workspace, ''.join(old)) doc.apply_change({'text': 'print a, b', 'range': { 'start': {'line': 1, 'character': 4}, 'end': {'line': 2, 'character': 11} }}) assert doc.lines == [ "def hello(a, b):\n", " print a, b\n" ] def test_document_end_of_file_edit(workspace): old = [ "print 'a'\n", "print 'b'\n" ] doc = Document('file:///uri', workspace, ''.join(old)) doc.apply_change({'text': 'o', 'range': { 'start': {'line': 2, 'character': 0}, 'end': {'line': 2, 'character': 0} }}) assert doc.lines == [ "print 'a'\n", "print 'b'\n", "o", ] python-lsp-server-1.3.3/test/test_language_server.py000066400000000000000000000102441415567622500227160ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import os import time import multiprocessing import sys from threading import Thread from flaky import flaky from pylsp_jsonrpc.exceptions import JsonRpcMethodNotFound import pytest from pylsp.python_lsp import start_io_lang_server, PythonLSPServer CALL_TIMEOUT = 10 RUNNING_IN_CI = bool(os.environ.get('CI')) def start_client(client): client.start() class _ClientServer: """ A class to setup a client/server pair """ def __init__(self, check_parent_process=False): # Client to Server pipe csr, csw = os.pipe() # Server to client pipe scr, scw = os.pipe() if os.name == 'nt': ParallelKind = Thread else: if sys.version_info[:2] >= (3, 8): ParallelKind = multiprocessing.get_context("fork").Process else: ParallelKind = multiprocessing.Process self.process = ParallelKind(target=start_io_lang_server, args=( os.fdopen(csr, 'rb'), os.fdopen(scw, 'wb'), check_parent_process, PythonLSPServer )) self.process.start() self.client = PythonLSPServer(os.fdopen(scr, 'rb'), os.fdopen(csw, 'wb'), start_io_lang_server) self.client_thread = Thread(target=start_client, args=[self.client]) self.client_thread.daemon = True self.client_thread.start() @pytest.fixture def client_server(): """ A fixture that sets up a client/server pair and shuts down the server This client/server pair does not support checking parent process aliveness """ client_server_pair = _ClientServer() yield client_server_pair.client shutdown_response = client_server_pair.client._endpoint.request('shutdown').result(timeout=CALL_TIMEOUT) assert shutdown_response is None client_server_pair.client._endpoint.notify('exit') @pytest.fixture def client_exited_server(): """ A fixture that sets up a client/server pair that support checking parent process aliveness and assert the server has already exited """ client_server_pair = _ClientServer(True) # yield client_server_pair.client yield client_server_pair assert client_server_pair.process.is_alive() is False @flaky(max_runs=10, min_passes=1) @pytest.mark.skipif(sys.platform == 'darwin', reason='Too flaky on Mac') def test_initialize(client_server): # pylint: disable=redefined-outer-name response = client_server._endpoint.request('initialize', { 'rootPath': os.path.dirname(__file__), 'initializationOptions': {} }).result(timeout=CALL_TIMEOUT) assert 'capabilities' in response @flaky(max_runs=10, min_passes=1) @pytest.mark.skipif(not sys.platform.startswith('Linux'), reason='Skipped on win and flaky on mac') def test_exit_with_parent_process_died(client_exited_server): # pylint: disable=redefined-outer-name # language server should have already exited before responding lsp_server, mock_process = client_exited_server.client, client_exited_server.process # with pytest.raises(Exception): lsp_server._endpoint.request('initialize', { 'processId': mock_process.pid, 'rootPath': os.path.dirname(__file__), 'initializationOptions': {} }).result(timeout=CALL_TIMEOUT) mock_process.terminate() time.sleep(CALL_TIMEOUT) assert not client_exited_server.client_thread.is_alive() @flaky(max_runs=10, min_passes=1) @pytest.mark.skipif(sys.platform.startswith('linux'), reason='Fails on linux') def test_not_exit_without_check_parent_process_flag(client_server): # pylint: disable=redefined-outer-name response = client_server._endpoint.request('initialize', { 'processId': 1234, 'rootPath': os.path.dirname(__file__), 'initializationOptions': {} }).result(timeout=CALL_TIMEOUT) assert 'capabilities' in response @flaky(max_runs=10, min_passes=1) @pytest.mark.skipif(RUNNING_IN_CI, reason='This test is hanging on CI') def test_missing_message(client_server): # pylint: disable=redefined-outer-name with pytest.raises(JsonRpcMethodNotFound): client_server._endpoint.request('unknown_method').result(timeout=CALL_TIMEOUT) python-lsp-server-1.3.3/test/test_uris.py000066400000000000000000000027551415567622500205370ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. from test import unix_only, windows_only import pytest from pylsp import uris @unix_only @pytest.mark.parametrize('uri,path', [ ('file:///foo/bar#frag', '/foo/bar'), ('file:/foo/bar#frag', '/foo/bar'), ('file:/foo/space%20%3Fbar#frag', '/foo/space ?bar'), ]) def test_to_fs_path(uri, path): assert uris.to_fs_path(uri) == path @windows_only @pytest.mark.parametrize('uri,path', [ ('file:///c:/far/boo', 'c:\\far\\boo'), ('file:///C:/far/boo', 'c:\\far\\boo'), ('file:///C:/far/space%20%3Fboo', 'c:\\far\\space ?boo'), ]) def test_win_to_fs_path(uri, path): assert uris.to_fs_path(uri) == path @unix_only @pytest.mark.parametrize('path,uri', [ ('/foo/bar', 'file:///foo/bar'), ('/foo/space ?bar', 'file:///foo/space%20%3Fbar'), ]) def test_from_fs_path(path, uri): assert uris.from_fs_path(path) == uri @windows_only @pytest.mark.parametrize('path,uri', [ ('c:\\far\\boo', 'file:///c:/far/boo'), ('C:\\far\\space ?boo', 'file:///c:/far/space%20%3Fboo') ]) def test_win_from_fs_path(path, uri): assert uris.from_fs_path(path) == uri @pytest.mark.parametrize('uri,kwargs,new_uri', [ ('file:///foo/bar', {'path': '/baz/boo'}, 'file:///baz/boo'), ('file:///D:/hello%20world.py', {'path': 'D:/hello universe.py'}, 'file:///d:/hello%20universe.py') ]) def test_uri_with(uri, kwargs, new_uri): assert uris.uri_with(uri, **kwargs) == new_uri python-lsp-server-1.3.3/test/test_utils.py000066400000000000000000000044441415567622500207120ustar00rootroot00000000000000# Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. import time from unittest import mock from flaky import flaky from pylsp import _utils @flaky(max_runs=6, min_passes=1) def test_debounce(): interval = 0.1 obj = mock.Mock() @_utils.debounce(0.1) def call_m(): obj() assert not obj.mock_calls call_m() call_m() call_m() assert not obj.mock_calls time.sleep(interval * 2) assert len(obj.mock_calls) == 1 call_m() time.sleep(interval * 2) assert len(obj.mock_calls) == 2 @flaky(max_runs=6, min_passes=1) def test_debounce_keyed_by(): interval = 0.1 obj = mock.Mock() @_utils.debounce(0.1, keyed_by='key') def call_m(key): obj(key) assert not obj.mock_calls call_m(1) call_m(2) call_m(3) assert not obj.mock_calls time.sleep(interval * 2) obj.assert_has_calls([ mock.call(1), mock.call(2), mock.call(3), ], any_order=True) assert len(obj.mock_calls) == 3 call_m(1) call_m(1) call_m(1) time.sleep(interval * 2) assert len(obj.mock_calls) == 4 def test_list_to_string(): assert _utils.list_to_string("string") == "string" assert _utils.list_to_string(["a", "r", "r", "a", "y"]) == "a,r,r,a,y" def test_find_parents(tmpdir): subsubdir = tmpdir.ensure_dir("subdir", "subsubdir") path = subsubdir.ensure("path.py") test_cfg = tmpdir.ensure("test.cfg") assert _utils.find_parents(tmpdir.strpath, path.strpath, ["test.cfg"]) == [test_cfg.strpath] def test_merge_dicts(): assert _utils.merge_dicts( {'a': True, 'b': {'x': 123, 'y': {'hello': 'world'}}}, {'a': False, 'b': {'y': [], 'z': 987}} ) == {'a': False, 'b': {'x': 123, 'y': [], 'z': 987}} def test_clip_column(): assert _utils.clip_column(0, [], 0) == 0 assert _utils.clip_column(2, ['123'], 0) == 2 assert _utils.clip_column(3, ['123'], 0) == 3 assert _utils.clip_column(5, ['123'], 0) == 3 assert _utils.clip_column(0, ['\n', '123'], 0) == 0 assert _utils.clip_column(1, ['\n', '123'], 0) == 0 assert _utils.clip_column(2, ['123\n', '123'], 0) == 2 assert _utils.clip_column(3, ['123\n', '123'], 0) == 3 assert _utils.clip_column(4, ['123\n', '123'], 1) == 3 python-lsp-server-1.3.3/test/test_workspace.py000066400000000000000000000244271415567622500215530ustar00rootroot00000000000000# Copyright 2017 Palantir Technologies, Inc. import os import pathlib import pytest from pylsp import uris DOC_URI = uris.from_fs_path(__file__) def path_as_uri(path): return pathlib.Path(os.path.abspath(path)).as_uri() def test_local(pylsp): """ Since the workspace points to the test directory """ assert pylsp.workspace.is_local() def test_put_document(pylsp): pylsp.workspace.put_document(DOC_URI, 'content') assert DOC_URI in pylsp.workspace._docs def test_get_document(pylsp): pylsp.workspace.put_document(DOC_URI, 'TEXT') assert pylsp.workspace.get_document(DOC_URI).source == 'TEXT' def test_get_missing_document(tmpdir, pylsp): source = 'TEXT' doc_path = tmpdir.join("test_document.py") doc_path.write(source) doc_uri = uris.from_fs_path(str(doc_path)) assert pylsp.workspace.get_document(doc_uri).source == 'TEXT' def test_rm_document(pylsp): pylsp.workspace.put_document(DOC_URI, 'TEXT') assert pylsp.workspace.get_document(DOC_URI).source == 'TEXT' pylsp.workspace.rm_document(DOC_URI) assert pylsp.workspace.get_document(DOC_URI)._source is None @pytest.mark.parametrize('metafiles', [('setup.py',), ('pyproject.toml',), ('setup.py', 'pyproject.toml')]) def test_non_root_project(pylsp, metafiles): repo_root = os.path.join(pylsp.workspace.root_path, 'repo-root') os.mkdir(repo_root) project_root = os.path.join(repo_root, 'project-root') os.mkdir(project_root) for metafile in metafiles: with open(os.path.join(project_root, metafile), 'w+', encoding='utf-8') as f: f.write('# ' + metafile) test_uri = uris.from_fs_path(os.path.join(project_root, 'hello/test.py')) pylsp.workspace.put_document(test_uri, 'assert True') test_doc = pylsp.workspace.get_document(test_uri) assert project_root in test_doc.sys_path() def test_root_project_with_no_setup_py(pylsp): """Default to workspace root.""" workspace_root = pylsp.workspace.root_path test_uri = uris.from_fs_path(os.path.join(workspace_root, 'hello/test.py')) pylsp.workspace.put_document(test_uri, 'assert True') test_doc = pylsp.workspace.get_document(test_uri) assert workspace_root in test_doc.sys_path() def test_multiple_workspaces_from_initialize(pylsp_w_workspace_folders): pylsp, workspace_folders = pylsp_w_workspace_folders assert len(pylsp.workspaces) == 2 folders_uris = [uris.from_fs_path(str(folder)) for folder in workspace_folders] for folder_uri in folders_uris: assert folder_uri in pylsp.workspaces assert folders_uris[0] == pylsp.root_uri # Create file in the first workspace folder. file1 = workspace_folders[0].join('file1.py') file1.write('import os') msg1 = { 'uri': path_as_uri(str(file1)), 'version': 1, 'text': 'import os' } pylsp.m_text_document__did_open(textDocument=msg1) assert msg1['uri'] in pylsp.workspace._docs assert msg1['uri'] in pylsp.workspaces[folders_uris[0]]._docs # Create file in the second workspace folder. file2 = workspace_folders[1].join('file2.py') file2.write('import sys') msg2 = { 'uri': path_as_uri(str(file2)), 'version': 1, 'text': 'import sys' } pylsp.m_text_document__did_open(textDocument=msg2) assert msg2['uri'] not in pylsp.workspace._docs assert msg2['uri'] in pylsp.workspaces[folders_uris[1]]._docs def test_multiple_workspaces(tmpdir, pylsp): workspace1_dir = tmpdir.mkdir('workspace1') workspace2_dir = tmpdir.mkdir('workspace2') file1 = workspace1_dir.join('file1.py') file2 = workspace2_dir.join('file1.py') file1.write('import os') file2.write('import sys') msg = { 'uri': path_as_uri(str(file1)), 'version': 1, 'text': 'import os' } pylsp.m_text_document__did_open(textDocument=msg) assert msg['uri'] in pylsp.workspace._docs added_workspaces = [{'uri': path_as_uri(str(x))} for x in (workspace1_dir, workspace2_dir)] event = {'added': added_workspaces, 'removed': []} pylsp.m_workspace__did_change_workspace_folders(event) for workspace in added_workspaces: assert workspace['uri'] in pylsp.workspaces workspace1_uri = added_workspaces[0]['uri'] assert msg['uri'] not in pylsp.workspace._docs assert msg['uri'] in pylsp.workspaces[workspace1_uri]._docs msg = { 'uri': path_as_uri(str(file2)), 'version': 1, 'text': 'import sys' } pylsp.m_text_document__did_open(textDocument=msg) workspace2_uri = added_workspaces[1]['uri'] assert msg['uri'] in pylsp.workspaces[workspace2_uri]._docs event = {'added': [], 'removed': [added_workspaces[0]]} pylsp.m_workspace__did_change_workspace_folders(event) assert workspace1_uri not in pylsp.workspaces def test_multiple_workspaces_wrong_removed_uri(pylsp, tmpdir): workspace = {'uri': str(tmpdir.mkdir('Test123'))} event = {'added': [], 'removed': [workspace]} pylsp.m_workspace__did_change_workspace_folders(event) assert workspace['uri'] not in pylsp.workspaces def test_root_workspace_changed(pylsp, tmpdir): test_uri = str(tmpdir.mkdir('Test123')) pylsp.root_uri = test_uri pylsp.workspace._root_uri = test_uri workspace1 = {'uri': test_uri} workspace2 = {'uri': str(tmpdir.mkdir('NewTest456'))} event = {'added': [workspace2], 'removed': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert workspace2['uri'] == pylsp.workspace._root_uri assert workspace2['uri'] == pylsp.root_uri def test_root_workspace_not_changed(pylsp, tmpdir): # removed uri != root_uri test_uri_1 = str(tmpdir.mkdir('Test12')) pylsp.root_uri = test_uri_1 pylsp.workspace._root_uri = test_uri_1 workspace1 = {'uri': str(tmpdir.mkdir('Test1234'))} workspace2 = {'uri': str(tmpdir.mkdir('NewTest456'))} event = {'added': [workspace2], 'removed': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert test_uri_1 == pylsp.workspace._root_uri assert test_uri_1 == pylsp.root_uri # empty 'added' list test_uri_2 = str(tmpdir.mkdir('Test123')) new_root_uri = workspace2['uri'] pylsp.root_uri = test_uri_2 pylsp.workspace._root_uri = test_uri_2 workspace1 = {'uri': test_uri_2} event = {'added': [], 'removed': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri # empty 'removed' list event = {'added': [workspace1], 'removed': []} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri # 'added' list has no 'uri' workspace2 = {'TESTuri': 'Test1234'} event = {'added': [workspace2], 'removed': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri def test_root_workspace_removed(tmpdir, pylsp): workspace1_dir = tmpdir.mkdir('workspace1') workspace2_dir = tmpdir.mkdir('workspace2') root_uri = pylsp.root_uri # Add workspaces to the pylsp added_workspaces = [{'uri': path_as_uri(str(x))} for x in (workspace1_dir, workspace2_dir)] event = {'added': added_workspaces, 'removed': []} pylsp.m_workspace__did_change_workspace_folders(event) # Remove the root workspace removed_workspaces = [{'uri': root_uri}] event = {'added': [], 'removed': removed_workspaces} pylsp.m_workspace__did_change_workspace_folders(event) # Assert that the first of the workspaces (in alphabetical order) is now # the root workspace assert pylsp.root_uri == path_as_uri(str(workspace1_dir)) assert pylsp.workspace._root_uri == path_as_uri(str(workspace1_dir)) @pytest.mark.skipif(os.name == 'nt', reason="Fails on Windows") def test_workspace_loads_pycodestyle_config(pylsp, tmpdir): workspace1_dir = tmpdir.mkdir('Test123') pylsp.root_uri = str(workspace1_dir) pylsp.workspace._root_uri = str(workspace1_dir) # Test that project settings are loaded workspace2_dir = tmpdir.mkdir('NewTest456') cfg = workspace2_dir.join("pycodestyle.cfg") cfg.write( "[pycodestyle]\n" "max-line-length = 1000" ) workspace1 = {'uri': str(workspace1_dir)} workspace2 = {'uri': str(workspace2_dir)} event = {'added': [workspace2], 'removed': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) seetings = pylsp.workspaces[str(workspace2_dir)]._config.settings() assert seetings['plugins']['pycodestyle']['maxLineLength'] == 1000 # Test that project settings prevail over server ones. server_settings = {'pylsp': {'plugins': {'pycodestyle': {'maxLineLength': 10}}}} pylsp.m_workspace__did_change_configuration(server_settings) assert seetings['plugins']['pycodestyle']['maxLineLength'] == 1000 # Test switching to another workspace with different settings workspace3_dir = tmpdir.mkdir('NewTest789') cfg1 = workspace3_dir.join("pycodestyle.cfg") cfg1.write( "[pycodestyle]\n" "max-line-length = 20" ) workspace3 = {'uri': str(workspace3_dir)} event = {'added': [workspace3], 'removed': [workspace2]} pylsp.m_workspace__did_change_workspace_folders(event) seetings = pylsp.workspaces[str(workspace3_dir)]._config.settings() assert seetings['plugins']['pycodestyle']['maxLineLength'] == 20 def test_settings_of_added_workspace(pylsp, tmpdir): test_uri = str(tmpdir.mkdir('Test123')) pylsp.root_uri = test_uri pylsp.workspace._root_uri = test_uri # Set some settings for the server. server_settings = {'pylsp': {'plugins': {'jedi': {'environment': '/usr/bin/python3'}}}} pylsp.m_workspace__did_change_configuration(server_settings) # Create a new workspace. workspace1 = {'uri': str(tmpdir.mkdir('NewTest456'))} event = {'added': [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) # Assert settings are inherited from the server config. workspace1_object = pylsp.workspaces[workspace1['uri']] workspace1_jedi_settings = workspace1_object._config.plugin_settings('jedi') assert workspace1_jedi_settings == server_settings['pylsp']['plugins']['jedi']