././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4914992 portalocker-3.1.1/0000755000076500000240000000000014734777047012740 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1719096506.0 portalocker-3.1.1/.coveragerc0000644000076500000240000000050614635652272015052 0ustar00rickstaff[report] ignore_errors = True fail_under = 100 exclude_lines = pragma: no cover def __repr__ if self.debug: if settings.DEBUG raise AssertionError raise NotImplementedError if 0: if __name__ == .__main__.: typing.Protocol omit = portalocker/redis.py [run] source = src branch = True ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4485645 portalocker-3.1.1/.github/0000755000076500000240000000000014734777047014300 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1594933114.0 portalocker-3.1.1/.github/FUNDING.yml0000644000076500000240000000001613704137572016077 0ustar00rickstaffgithub: wolph ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/.github/dependabot.yml0000644000076500000240000000066214734754243017125 0ustar00rickstaffversion: 2 updates: - package-ecosystem: github-actions directory: / target-branch: master labels: - "meta: CI" schedule: interval: monthly groups: actions: patterns: - "*" - package-ecosystem: pip directory: / target-branch: master labels: - "meta: deps" schedule: interval: monthly groups: actions: patterns: - "*" ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1735654951.451137 portalocker-3.1.1/.github/workflows/0000755000076500000240000000000014734777047016335 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645348.0 portalocker-3.1.1/.github/workflows/lint.yml0000644000076500000240000000217214734754244020022 0ustar00rickstaffname: lint on: push: pull_request: env: FORCE_COLOR: 1 concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: lint: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Python version run: python --version - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel mypy python -m pip install -e '.[tests]' - name: Linting with pyright uses: jakebailey/pyright-action@v2 with: path: portalocker portalocker_tests - name: Linting with ruff uses: jpetrucciani/ruff-check@main with: extra-args: portalocker portalocker_tests - name: Linting with mypy run: | python -m mypy portalocker portalocker_tests ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645348.0 portalocker-3.1.1/.github/workflows/python-package.yml0000644000076500000240000000317614734754244021773 0ustar00rickstaffname: test on: push: branches: [ develop, master ] pull_request: branches: [ develop ] env: FORCE_COLOR: 1 concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: # Run os specific tests on the slower OS X/Windows machines windows_osx: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ['3.9', '3.10', '3.11', '3.12'] os: ['macos-latest', 'windows-latest'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Python version run: python --version - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel python -m pip install -e ".[tests]" - name: Test with pytest run: python -m pytest # Run all tests including Redis on Linux linux: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v4 - name: Start Redis uses: supercharge/redis-github-action@1.8.0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Python version run: python --version - name: Install dependencies run: | python -m pip install tox - name: Test with pytest run: tox -p all ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645348.0 portalocker-3.1.1/.github/workflows/stale.yml0000644000076500000240000000064614734754244020170 0ustar00rickstaffname: Close stale issues and pull requests on: workflow_dispatch: schedule: - cron: '0 0 * * *' # Run every day at midnight jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@v9 with: days-before-stale: 30 days-before-pr-stale: -1 exempt-issue-labels: in-progress,help-wanted,pinned,security,enhancement exempt-all-pr-assignees: true ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1398781121.0 portalocker-3.1.1/.gitignore0000644000076500000240000000006212327732301014702 0ustar00rickstaffbuild/ locked_file dist htmlcov *.egg-info .cache ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/.pre-commit-config.yaml0000644000076500000240000000356114734754243017217 0ustar00rickstaffci: autoupdate_branch: "master" autoupdate_commit_msg: "⬆️ update pre-commit hooks" skip: - basedpyright repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: check-added-large-files - id: check-ast - id: check-case-conflict - id: check-docstring-first - id: check-executables-have-shebangs - id: check-illegal-windows-names - id: check-json - id: check-merge-conflict - id: check-shebang-scripts-are-executable - id: check-symlinks - id: check-toml - id: check-vcs-permalinks - id: check-xml - id: check-yaml - id: debug-statements - id: destroyed-symlinks - id: detect-aws-credentials args: [--allow-missing-credentials] - id: detect-private-key - id: fix-byte-order-marker - id: forbid-submodules - id: name-tests-test args: [--pytest-test-first] - id: no-commit-to-branch args: [--branch, master] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - repo: https://github.com/igorshubovych/markdownlint-cli rev: v0.43.0 hooks: - id: markdownlint - repo: https://github.com/executablebooks/mdformat rev: 0.7.21 hooks: - id: mdformat additional_dependencies: - mdformat-gfm - mdformat-gfm-alerts - repo: https://github.com/crate-ci/typos rev: v1.28.4 hooks: - id: typos - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.8.4 hooks: - id: ruff args: [--fix, --show-fixes] types_or: [python, pyi] - id: ruff-format types_or: [python, pyi] - repo: local hooks: - id: basedpyright name: basedpyright entry: uv run --no-sync --locked basedpyright language: system types_or: [python, pyi] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611419668.0 portalocker-3.1.1/CHANGELOG.rst0000644000076500000240000000256314003050024014727 0ustar00rickstaffFor newer changes please look at the comments for the Git tags: https://github.com/WoLpH/portalocker/tags For more details the commit log for the master branch could be useful: https://github.com/WoLpH/portalocker/commits/master 1.5: * Moved tests to prevent collisions with other packages 1.4: * Added optional file open parameters 1.3: * Improved documentation * Added file handle to locking exceptions 1.2: * Added signed releases and tags to PyPI and Git 1.1: * Added support for Python 3.6+ * Using real time to calculate timeout 1.0: * Complete code refactor. - Splitting of code in logical classes - 100% test coverage and change in API behaviour - The default behavior of the `Lock` class has changed to append instead of write/truncate. 0.6: * Added msvcrt support for Windows 0.5: * Python 3 support 0.4: * Fixing a few bugs, added coveralls support, switched to py.test and added 100% test coverage. - Fixing exception thrown when fail_when_locked is true - Fixing exception "Lock object has no attribute '_release_lock'" when fail_when_locked is true due to the call to Lock._release_lock() which fails because _release_lock is not defined. 0.3: * Now actually returning the file descriptor from the `Lock` class 0.2: * Added `Lock` class to help prevent cache race conditions 0.1: * Initial release ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1666106348.0 portalocker-3.1.1/LICENSE0000644000076500000240000000266414323541754013741 0ustar00rickstaffCopyright 2022 Rick van Hattem Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1573518153.0 portalocker-3.1.1/MANIFEST.in0000644000076500000240000000014213562375511014457 0ustar00rickstaffinclude CHANGELOG.rst include README.rst include LICENSE recursive-include portalocker_tests *.py ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1735654951.490666 portalocker-3.1.1/PKG-INFO0000644000076500000240000002062514734777047014042 0ustar00rickstaffMetadata-Version: 2.1 Name: portalocker Version: 3.1.1 Summary: Wraps the portalocker recipe for easy usage Author-email: Rick van Hattem License: BSD-3-Clause Project-URL: bugs, https://github.com/wolph/portalocker/issues Project-URL: documentation, https://portalocker.readthedocs.io/en/latest/ Project-URL: repository, https://github.com/wolph/portalocker/ Keywords: locking,locks,with,statement,windows,linux,unix Platform: any Classifier: Development Status :: 5 - Production/Stable Classifier: Development Status :: 6 - Mature Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Natural Language :: English Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: MacOS Classifier: Operating System :: Microsoft :: MS-DOS Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Microsoft Classifier: Operating System :: POSIX :: BSD :: FreeBSD Classifier: Operating System :: POSIX :: BSD Classifier: Operating System :: POSIX :: Linux Classifier: Operating System :: POSIX :: SunOS/Solaris Classifier: Operating System :: POSIX Classifier: Operating System :: Unix Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: IronPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Programming Language :: Python :: Implementation Classifier: Programming Language :: Python Classifier: Topic :: Education :: Testing Classifier: Topic :: Office/Business Classifier: Topic :: Other/Nonlisted Topic Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: System :: Monitoring Classifier: Typing :: Typed Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: pywin32>=226; platform_system == "Windows" Provides-Extra: docs Requires-Dist: sphinx>=1.7.1; extra == "docs" Provides-Extra: tests Requires-Dist: pytest>=5.4.1; extra == "tests" Requires-Dist: pytest-cov>=2.8.1; extra == "tests" Requires-Dist: pytest-timeout>=2.1.0; extra == "tests" Requires-Dist: sphinx>=6.0.0; extra == "tests" Requires-Dist: pytest-mypy>=0.8.0; extra == "tests" Requires-Dist: types-redis; extra == "tests" Requires-Dist: redis; extra == "tests" Requires-Dist: pytest-rerunfailures>=15.0; extra == "tests" Provides-Extra: redis Requires-Dist: redis; extra == "redis" ############################################ portalocker - Cross-platform locking library ############################################ .. image:: https://github.com/WoLpH/portalocker/actions/workflows/python-package.yml/badge.svg?branch=master :alt: Linux Test Status :target: https://github.com/WoLpH/portalocker/actions/ .. image:: https://ci.appveyor.com/api/projects/status/mgqry98hgpy4prhh?svg=true :alt: Windows Tests Status :target: https://ci.appveyor.com/project/WoLpH/portalocker .. image:: https://coveralls.io/repos/WoLpH/portalocker/badge.svg?branch=master :alt: Coverage Status :target: https://coveralls.io/r/WoLpH/portalocker?branch=master Overview -------- Portalocker is a library to provide an easy API to file locking. An important detail to note is that on Linux and Unix systems the locks are advisory by default. By specifying the `-o mand` option to the mount command it is possible to enable mandatory file locking on Linux. This is generally not recommended however. For more information about the subject: - https://en.wikipedia.org/wiki/File_locking - http://stackoverflow.com/questions/39292051/portalocker-does-not-seem-to-lock - https://stackoverflow.com/questions/12062466/mandatory-file-lock-on-linux The module is currently maintained by Rick van Hattem . The project resides at https://github.com/WoLpH/portalocker . Bugs and feature requests can be submitted there. Patches are also very welcome. Security contact information ------------------------------------------------------------------------------ To report a security vulnerability, please use the `Tidelift security contact `_. Tidelift will coordinate the fix and disclosure. Redis Locks ----------- This library now features a lock based on Redis which allows for locks across multiple threads, processes and even distributed locks across multiple computers. It is an extremely reliable Redis lock that is based on pubsub. As opposed to most Redis locking systems based on key/value pairs, this locking method is based on the pubsub system. The big advantage is that if the connection gets killed due to network issues, crashing processes or otherwise, it will still immediately unlock instead of waiting for a lock timeout. First make sure you have everything installed correctly: :: pip install "portalocker[redis]" Usage is really easy: :: import portalocker lock = portalocker.RedisLock('some_lock_channel_name') with lock: print('do something here') The API is essentially identical to the other ``Lock`` classes so in addition to the ``with`` statement you can also use ``lock.acquire(...)``. Python 2 -------- Python 2 was supported in versions before Portalocker 2.0. If you are still using Python 2, you can run this to install: :: pip install "portalocker<2" Tips ---- On some networked filesystems it might be needed to force a `os.fsync()` before closing the file so it's actually written before another client reads the file. Effectively this comes down to: :: with portalocker.Lock('some_file', 'rb+', timeout=60) as fh: # do what you need to do ... # flush and sync to filesystem fh.flush() os.fsync(fh.fileno()) Links ----- * Documentation - http://portalocker.readthedocs.org/en/latest/ * Source - https://github.com/WoLpH/portalocker * Bug reports - https://github.com/WoLpH/portalocker/issues * Package homepage - https://pypi.python.org/pypi/portalocker * My blog - http://w.wol.ph/ Examples -------- To make sure your cache generation scripts don't race, use the `Lock` class: >>> import portalocker >>> with portalocker.Lock('somefile', timeout=1) as fh: ... print('writing some stuff to my cache...', file=fh) To customize the opening and locking a manual approach is also possible: >>> import portalocker >>> file = open('somefile', 'r+') >>> portalocker.lock(file, portalocker.LockFlags.EXCLUSIVE) >>> file.seek(12) >>> file.write('foo') >>> file.close() Explicitly unlocking is not needed in most cases but omitting it has been known to cause issues: https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/42#issuecomment-601108266 If needed, it can be done through: >>> portalocker.unlock(file) Do note that your data might still be in a buffer so it is possible that your data is not available until you `flush()` or `close()`. To create a cross platform bounded semaphore across multiple processes you can use the `BoundedSemaphore` class which functions somewhat similar to `threading.BoundedSemaphore`: >>> import portalocker >>> n = 2 >>> timeout = 0.1 >>> semaphore_a = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_b = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_c = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_a.acquire() >>> semaphore_b.acquire() >>> semaphore_c.acquire() Traceback (most recent call last): ... portalocker.exceptions.AlreadyLocked More examples can be found in the `tests `_. Versioning ---------- This library follows `Semantic Versioning `_. Changelog --------- Every release has a ``git tag`` with a commit message for the tag explaining what was added and/or changed. The list of tags/releases including the commit messages can be found here: https://github.com/WoLpH/portalocker/releases License ------- See the `LICENSE `_ file. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1694861733.0 portalocker-3.1.1/README.rst0000644000076500000240000001312514501304645014407 0ustar00rickstaff############################################ portalocker - Cross-platform locking library ############################################ .. image:: https://github.com/WoLpH/portalocker/actions/workflows/python-package.yml/badge.svg?branch=master :alt: Linux Test Status :target: https://github.com/WoLpH/portalocker/actions/ .. image:: https://ci.appveyor.com/api/projects/status/mgqry98hgpy4prhh?svg=true :alt: Windows Tests Status :target: https://ci.appveyor.com/project/WoLpH/portalocker .. image:: https://coveralls.io/repos/WoLpH/portalocker/badge.svg?branch=master :alt: Coverage Status :target: https://coveralls.io/r/WoLpH/portalocker?branch=master Overview -------- Portalocker is a library to provide an easy API to file locking. An important detail to note is that on Linux and Unix systems the locks are advisory by default. By specifying the `-o mand` option to the mount command it is possible to enable mandatory file locking on Linux. This is generally not recommended however. For more information about the subject: - https://en.wikipedia.org/wiki/File_locking - http://stackoverflow.com/questions/39292051/portalocker-does-not-seem-to-lock - https://stackoverflow.com/questions/12062466/mandatory-file-lock-on-linux The module is currently maintained by Rick van Hattem . The project resides at https://github.com/WoLpH/portalocker . Bugs and feature requests can be submitted there. Patches are also very welcome. Security contact information ------------------------------------------------------------------------------ To report a security vulnerability, please use the `Tidelift security contact `_. Tidelift will coordinate the fix and disclosure. Redis Locks ----------- This library now features a lock based on Redis which allows for locks across multiple threads, processes and even distributed locks across multiple computers. It is an extremely reliable Redis lock that is based on pubsub. As opposed to most Redis locking systems based on key/value pairs, this locking method is based on the pubsub system. The big advantage is that if the connection gets killed due to network issues, crashing processes or otherwise, it will still immediately unlock instead of waiting for a lock timeout. First make sure you have everything installed correctly: :: pip install "portalocker[redis]" Usage is really easy: :: import portalocker lock = portalocker.RedisLock('some_lock_channel_name') with lock: print('do something here') The API is essentially identical to the other ``Lock`` classes so in addition to the ``with`` statement you can also use ``lock.acquire(...)``. Python 2 -------- Python 2 was supported in versions before Portalocker 2.0. If you are still using Python 2, you can run this to install: :: pip install "portalocker<2" Tips ---- On some networked filesystems it might be needed to force a `os.fsync()` before closing the file so it's actually written before another client reads the file. Effectively this comes down to: :: with portalocker.Lock('some_file', 'rb+', timeout=60) as fh: # do what you need to do ... # flush and sync to filesystem fh.flush() os.fsync(fh.fileno()) Links ----- * Documentation - http://portalocker.readthedocs.org/en/latest/ * Source - https://github.com/WoLpH/portalocker * Bug reports - https://github.com/WoLpH/portalocker/issues * Package homepage - https://pypi.python.org/pypi/portalocker * My blog - http://w.wol.ph/ Examples -------- To make sure your cache generation scripts don't race, use the `Lock` class: >>> import portalocker >>> with portalocker.Lock('somefile', timeout=1) as fh: ... print('writing some stuff to my cache...', file=fh) To customize the opening and locking a manual approach is also possible: >>> import portalocker >>> file = open('somefile', 'r+') >>> portalocker.lock(file, portalocker.LockFlags.EXCLUSIVE) >>> file.seek(12) >>> file.write('foo') >>> file.close() Explicitly unlocking is not needed in most cases but omitting it has been known to cause issues: https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/42#issuecomment-601108266 If needed, it can be done through: >>> portalocker.unlock(file) Do note that your data might still be in a buffer so it is possible that your data is not available until you `flush()` or `close()`. To create a cross platform bounded semaphore across multiple processes you can use the `BoundedSemaphore` class which functions somewhat similar to `threading.BoundedSemaphore`: >>> import portalocker >>> n = 2 >>> timeout = 0.1 >>> semaphore_a = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_b = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_c = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_a.acquire() >>> semaphore_b.acquire() >>> semaphore_c.acquire() Traceback (most recent call last): ... portalocker.exceptions.AlreadyLocked More examples can be found in the `tests `_. Versioning ---------- This library follows `Semantic Versioning `_. Changelog --------- Every release has a ``git tag`` with a commit message for the tag explaining what was added and/or changed. The list of tags/releases including the commit messages can be found here: https://github.com/WoLpH/portalocker/releases License ------- See the `LICENSE `_ file. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/appveyor.yml0000644000076500000240000000106514734754243015323 0ustar00rickstaff# What Python version is installed where: # http://www.appveyor.com/docs/installed-software#python image: - Visual Studio 2022 environment: matrix: - TOXENV: py39 - TOXENV: py310 - TOXENV: py311 - TOXENV: py312 - TOXENV: py313 install: - powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex" # - py -m pip install -U tox setuptools wheel # - py -m pip install -Ue ".[tests]" build: false # Not a C# project, build stuff at the test step instead. test_script: - dir - C:\Users\appveyor\.local\bin\uvx.exe tox ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1735654951.462707 portalocker-3.1.1/docs/0000755000076500000240000000000014734777047013670 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/Makefile0000664000076500000240000001272012304411736015311 0ustar00rickstaff# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PythonUtils.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PythonUtils.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/PythonUtils" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PythonUtils" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4641602 portalocker-3.1.1/docs/_theme/0000755000076500000240000000000014734777047015131 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/LICENSE0000644000076500000240000000355412304411736016122 0ustar00rickstaffModifications: Copyright (c) 2012 Rick van Hattem. Original Projects: Copyright (c) 2010 Kenneth Reitz. Copyright (c) 2010 by Armin Ronacher. Some rights reserved. Redistribution and use in source and binary forms of the theme, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. We kindly ask you to only use these themes in an unmodified manner just for Flask and Flask-related products, not for unrelated projects. If you like the visual style and want to use it for your own projects, please consider making some larger changes to the themes (such as changing font faces, sizes, colors or margins). THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/flask_theme_support.py0000644000076500000240000001141712304411736021542 0ustar00rickstaff# flasky extensions. flasky pygments style based on tango style from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal class FlaskyStyle(Style): background_color = "#f8f8f8" default_style = "" styles = { # No corresponding class for the following: # Text: "", # class: '' Whitespace: "underline #f8f8f8", # class: 'w' Error: "#a40000 border:#ef2929", # class: 'err' Other: "#000000", # class 'x' Comment: "italic #8f5902", # class: 'c' Comment.Preproc: "noitalic", # class: 'cp' Keyword: "bold #004461", # class: 'k' Keyword.Constant: "bold #004461", # class: 'kc' Keyword.Declaration: "bold #004461", # class: 'kd' Keyword.Namespace: "bold #004461", # class: 'kn' Keyword.Pseudo: "bold #004461", # class: 'kp' Keyword.Reserved: "bold #004461", # class: 'kr' Keyword.Type: "bold #004461", # class: 'kt' Operator: "#582800", # class: 'o' Operator.Word: "bold #004461", # class: 'ow' - like keywords Punctuation: "bold #000000", # class: 'p' # because special names such as Name.Class, Name.Function, etc. # are not recognized as such later in the parsing, we choose them # to look the same as ordinary variables. Name: "#000000", # class: 'n' Name.Attribute: "#c4a000", # class: 'na' - to be revised Name.Builtin: "#004461", # class: 'nb' Name.Builtin.Pseudo: "#3465a4", # class: 'bp' Name.Class: "#000000", # class: 'nc' - to be revised Name.Constant: "#000000", # class: 'no' - to be revised Name.Decorator: "#888", # class: 'nd' - to be revised Name.Entity: "#ce5c00", # class: 'ni' Name.Exception: "bold #cc0000", # class: 'ne' Name.Function: "#000000", # class: 'nf' Name.Property: "#000000", # class: 'py' Name.Label: "#f57900", # class: 'nl' Name.Namespace: "#000000", # class: 'nn' - to be revised Name.Other: "#000000", # class: 'nx' Name.Tag: "bold #004461", # class: 'nt' - like a keyword Name.Variable: "#000000", # class: 'nv' - to be revised Name.Variable.Class: "#000000", # class: 'vc' - to be revised Name.Variable.Global: "#000000", # class: 'vg' - to be revised Name.Variable.Instance: "#000000", # class: 'vi' - to be revised Number: "#990000", # class: 'm' Literal: "#000000", # class: 'l' Literal.Date: "#000000", # class: 'ld' String: "#4e9a06", # class: 's' String.Backtick: "#4e9a06", # class: 'sb' String.Char: "#4e9a06", # class: 'sc' String.Doc: "italic #8f5902", # class: 'sd' - like a comment String.Double: "#4e9a06", # class: 's2' String.Escape: "#4e9a06", # class: 'se' String.Heredoc: "#4e9a06", # class: 'sh' String.Interpol: "#4e9a06", # class: 'si' String.Other: "#4e9a06", # class: 'sx' String.Regex: "#4e9a06", # class: 'sr' String.Single: "#4e9a06", # class: 's1' String.Symbol: "#4e9a06", # class: 'ss' Generic: "#000000", # class: 'g' Generic.Deleted: "#a40000", # class: 'gd' Generic.Emph: "italic #000000", # class: 'ge' Generic.Error: "#ef2929", # class: 'gr' Generic.Heading: "bold #000080", # class: 'gh' Generic.Inserted: "#00A000", # class: 'gi' Generic.Output: "#888", # class: 'go' Generic.Prompt: "#745334", # class: 'gp' Generic.Strong: "bold #000000", # class: 'gs' Generic.Subheading: "bold #800080", # class: 'gu' Generic.Traceback: "bold #a40000", # class: 'gt' } ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4657717 portalocker-3.1.1/docs/_theme/wolph/0000755000076500000240000000000014734777047016262 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/wolph/layout.html0000644000076500000240000000110612304411736020440 0ustar00rickstaff{%- extends "basic/layout.html" %} {%- block extrahead %} {{ super() }} {% if theme_touch_icon %} {% endif %} {% endblock %} {%- block relbar2 %}{% endblock %} {%- block footer %} {%- endblock %} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/wolph/relations.html0000644000076500000240000000111612304411736021124 0ustar00rickstaff

Related Topics

././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1735654951.467641 portalocker-3.1.1/docs/_theme/wolph/static/0000755000076500000240000000000014734777047017551 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/wolph/static/flasky.css_t0000644000076500000240000001560612304411736022064 0ustar00rickstaff/* * flasky.css_t * ~~~~~~~~~~~~ * * :copyright: Copyright 2010 by Armin Ronacher. Modifications by Kenneth Reitz. * :license: Flask Design License, see LICENSE for details. */ {% set page_width = '940px' %} {% set sidebar_width = '220px' %} @import url("basic.css"); /* -- page layout ----------------------------------------------------------- */ body { font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro'; font-size: 17px; background-color: white; color: #000; margin: 0; padding: 0; } div.document { width: {{ page_width }}; margin: 30px auto 0 auto; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 {{ sidebar_width }}; } div.sphinxsidebar { width: {{ sidebar_width }}; } hr { border: 1px solid #B1B4B6; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 0 30px; } img.floatingflask { padding: 0 0 10px 10px; float: right; } div.footer { width: {{ page_width }}; margin: 20px auto 30px auto; font-size: 14px; color: #888; text-align: right; } div.footer a { color: #888; } div.related { display: none; } div.sphinxsidebar a { color: #444; text-decoration: none; border-bottom: 1px dotted #999; } div.sphinxsidebar a:hover { border-bottom: 1px solid #999; } div.sphinxsidebar { font-size: 14px; line-height: 1.5; } div.sphinxsidebarwrapper { padding: 0px 10px; } div.sphinxsidebarwrapper p.logo { padding: 0 0 20px 0; margin: 0; text-align: center; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: 'Garamond', 'Georgia', serif; color: #555; font-size: 24px; font-weight: normal; margin: 0 0 5px 0; padding: 0; } div.sphinxsidebar h4 { font-size: 20px; } div.sphinxsidebar h3 a { color: #444; } div.sphinxsidebar p.logo a, div.sphinxsidebar h3 a, div.sphinxsidebar p.logo a:hover, div.sphinxsidebar h3 a:hover { border: none; } div.sphinxsidebar p { color: #555; margin: 10px 0; } div.sphinxsidebar ul { margin: 10px 0; padding: 0; color: #000; } div.sphinxsidebar input[type="text"] { width: 160px!important; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: 'Georgia', serif; font-size: 1em; } /* -- body styles ----------------------------------------------------------- */ a { color: #004B6B; text-decoration: underline; } a:hover { color: #6D4100; text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: 'Garamond', 'Georgia', serif; font-weight: normal; margin: 30px 0px 10px 0px; padding: 0; } div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } div.body h2 { font-size: 180%; } div.body h3 { font-size: 150%; } div.body h4 { font-size: 130%; } div.body h5 { font-size: 100%; } div.body h6 { font-size: 100%; } a.headerlink { color: #ddd; padding: 0 4px; text-decoration: none; } a.headerlink:hover { color: #444; background: #eaeaea; } div.body p, div.body dd, div.body li { line-height: 1.4em; } div.admonition { background: #fafafa; margin: 20px -30px; padding: 10px 30px; border-top: 1px solid #ccc; border-bottom: 1px solid #ccc; } div.admonition tt.xref, div.admonition a tt { border-bottom: 1px solid #fafafa; } dd div.admonition { margin-left: -60px; padding-left: 60px; } div.admonition p.admonition-title { font-family: 'Garamond', 'Georgia', serif; font-weight: normal; font-size: 24px; margin: 0 0 10px 0; padding: 0; line-height: 1; } div.admonition p.last { margin-bottom: 0; } div.highlight { background-color: white; } dt:target, .highlight { background: #FAF3E8; } div.note { background-color: #eee; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; } div.topic { background-color: #eee; } p.admonition-title { display: inline; } p.admonition-title:after { content: ":"; } pre, tt { font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 0.9em; } img.screenshot { } tt.descname, tt.descclassname { font-size: 0.95em; } tt.descname { padding-right: 0.08em; } img.screenshot { -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils { border: 1px solid #888; -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils td, table.docutils th { border: 1px solid #888; padding: 0.25em 0.7em; } table.field-list, table.footnote { border: none; -moz-box-shadow: none; -webkit-box-shadow: none; box-shadow: none; } table.footnote { margin: 15px 0; width: 100%; border: 1px solid #eee; background: #fdfdfd; font-size: 0.9em; } table.footnote + table.footnote { margin-top: -15px; border-top: none; } table.field-list th { padding: 0 0.8em 0 0; } table.field-list td { padding: 0; } table.footnote td.label { width: 0px; padding: 0.3em 0 0.3em 0.5em; } table.footnote td { padding: 0.3em 0.5em; } dl { margin: 0; padding: 0; } dl dd { margin-left: 30px; } blockquote { margin: 0 0 0 30px; padding: 0; } ul, ol { margin: 10px 0 10px 30px; padding: 0; } pre { background: #eee; padding: 7px 30px; margin: 15px -30px; line-height: 1.3em; } dl pre, blockquote pre, li pre { margin-left: -60px; padding-left: 60px; } dl dl pre { margin-left: -90px; padding-left: 90px; } tt { background-color: #ecf0f3; color: #222; /* padding: 1px 2px; */ } tt.xref, a tt { background-color: #FBFBFB; border-bottom: 1px solid white; } a.reference { text-decoration: none; border-bottom: 1px dotted #004B6B; } a.reference:hover { border-bottom: 1px solid #6D4100; } a.footnote-reference { text-decoration: none; font-size: 0.7em; vertical-align: top; border-bottom: 1px dotted #004B6B; } a.footnote-reference:hover { border-bottom: 1px solid #6D4100; } a:hover tt { background: #EEE; } /* scrollbars */ ::-webkit-scrollbar { width: 6px; height: 6px; } ::-webkit-scrollbar-button:start:decrement, ::-webkit-scrollbar-button:end:increment { display: block; height: 10px; } ::-webkit-scrollbar-button:vertical:increment { background-color: #fff; } ::-webkit-scrollbar-track-piece { background-color: #eee; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:vertical { height: 50px; background-color: #ccc; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:horizontal { width: 50px; background-color: #ccc; -webkit-border-radius: 3px; } /* misc. */ .revsys-inline { display: none!important; } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/wolph/static/small_flask.css0000644000076500000240000000172012304411736022530 0ustar00rickstaff/* * small_flask.css_t * ~~~~~~~~~~~~~~~~~ * * :copyright: Copyright 2010 by Armin Ronacher. * :license: Flask Design License, see LICENSE for details. */ body { margin: 0; padding: 20px 30px; } div.documentwrapper { float: none; background: white; } div.sphinxsidebar { display: block; float: none; width: 102.5%; margin: 50px -30px -20px -30px; padding: 10px 20px; background: #333; color: white; } div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, div.sphinxsidebar h3 a { color: white; } div.sphinxsidebar a { color: #aaa; } div.sphinxsidebar p.logo { display: none; } div.document { width: 100%; margin: 0; } div.related { display: block; margin: 0; padding: 10px 0 20px 0; } div.related ul, div.related ul li { margin: 0; padding: 0; } div.footer { display: none; } div.bodywrapper { margin: 0; } div.body { min-height: 0; padding: 0; } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/_theme/wolph/theme.conf0000644000076500000240000000017212304411736020210 0ustar00rickstaff[theme] inherit = basic stylesheet = flasky.css pygments_style = flask_theme_support.FlaskyStyle [options] touch_icon = ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1731931928.0 portalocker-3.1.1/docs/conf.py0000644000076500000240000002544114716627430015163 0ustar00rickstaff# # Documentation build configuration file, created by # sphinx-quickstart on Thu Feb 27 20:00:23 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys import datetime # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) from portalocker import __about__ as metadata # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = metadata.__package_name__.replace('-', ' ').capitalize() copyright = '{}, {}'.format( datetime.date.today().year, metadata.__author__, ) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = metadata.__version__ # The full version, including alpha/beta/rc tags. release = metadata.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = 'py:obj' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'wolph' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['_theme'] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = metadata.__package_name__ + '-doc' # -- Options for LaTeX output --------------------------------------------- # latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [( 'index', '%s.tex' % metadata.__package_name__, '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(), metadata.__author__, 'manual', )] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [( 'index', metadata.__package_name__, '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(), [metadata.__author__], 1, )] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [( 'index', metadata.__package_name__, '%s Documentation' % metadata.__package_name__.replace('-', ' ').capitalize(), metadata.__author__, metadata.__package_name__, metadata.__description__, 'Miscellaneous', )] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = metadata.__package_name__.replace('-', ' ').capitalize() epub_author = metadata.__author__ epub_publisher = metadata.__author__ epub_copyright = copyright # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files that should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the PIL. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = dict( python=('http://docs.python.org/3/', None), ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611419668.0 portalocker-3.1.1/docs/index.rst0000644000076500000240000000043014003050024015466 0ustar00rickstaffWelcome to portalocker's documentation! ======================================== .. include :: ../README.rst Contents: .. toctree:: :maxdepth: 4 portalocker tests license Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1475007998.0 portalocker-3.1.1/docs/license.rst0000644000076500000240000000006012772552776016040 0ustar00rickstaffLicense ======= .. literalinclude:: ../LICENSE ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1393693662.0 portalocker-3.1.1/docs/make.bat0000664000076500000240000001176212304411736015263 0ustar00rickstaff@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PythonUtils.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PythonUtils.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674080367.0 portalocker-3.1.1/docs/portalocker.constants.rst0000644000076500000240000000033614362070157020745 0ustar00rickstaffportalocker.constants module ============================ .. automodule:: portalocker.constants :members: :private-members: :special-members: :inherited-members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1520447429.0 portalocker-3.1.1/docs/portalocker.exceptions.rst0000644000076500000240000000023513250027705021104 0ustar00rickstaffportalocker.exceptions module ============================= .. automodule:: portalocker.exceptions :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1520447429.0 portalocker-3.1.1/docs/portalocker.portalocker.rst0000644000076500000240000000024013250027705021244 0ustar00rickstaffportalocker.portalocker module ============================== .. automodule:: portalocker.portalocker :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1622034804.0 portalocker-3.1.1/docs/portalocker.redis.rst0000644000076500000240000000021314053444564020035 0ustar00rickstaffportalocker.redis module ======================== .. automodule:: portalocker.redis :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674080369.0 portalocker-3.1.1/docs/portalocker.rst0000644000076500000240000000047514362070161016731 0ustar00rickstaffportalocker package =================== Submodules ---------- .. toctree:: portalocker.redis portalocker.constants portalocker.exceptions portalocker.portalocker portalocker.utils Module contents --------------- .. automodule:: portalocker :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/docs/portalocker.types.rst0000644000076500000240000000021314734754243020077 0ustar00rickstaffportalocker.types module ======================== .. automodule:: portalocker.types :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1520447429.0 portalocker-3.1.1/docs/portalocker.utils.rst0000644000076500000240000000021613250027705020062 0ustar00rickstaffportalocker.utils module ======================== .. automodule:: portalocker.utils :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1545012397.0 portalocker-3.1.1/docs/requirements.txt0000644000076500000240000000002013405602255017122 0ustar00rickstaff-e.[docs,tests] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1585014375.0 portalocker-3.1.1/docs/tests.rst0000644000076500000240000000100113636263147015543 0ustar00rickstafftests package ============= Module contents --------------- .. automodule:: portalocker_tests.tests :members: :private-members: :special-members: :inherited-members: :undoc-members: :show-inheritance: .. automodule:: portalocker_tests.test_combined :members: :undoc-members: :show-inheritance: .. automodule:: portalocker_tests.temporary_file_lock :members: :private-members: :special-members: :inherited-members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4754212 portalocker-3.1.1/portalocker/0000755000076500000240000000000014734777047015265 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654944.0 portalocker-3.1.1/portalocker/__about__.py0000644000076500000240000000034614734777040017541 0ustar00rickstaff__package_name__ = 'portalocker' __author__ = 'Rick van Hattem' __email__ = 'wolph@wol.ph' __version__ = '3.1.1' __description__ = """Wraps the portalocker recipe for easy usage""" __url__ = 'https://github.com/WoLpH/portalocker' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654944.0 portalocker-3.1.1/portalocker/__init__.py0000644000076500000240000000403014734777040017364 0ustar00rickstafffrom . import __about__, constants, exceptions, portalocker from .utils import ( BoundedSemaphore, Lock, RLock, TemporaryFileLock, open_atomic, ) try: # pragma: no cover from .redis import RedisLock except ImportError: # pragma: no cover RedisLock = None # type: ignore[assignment,misc] #: The package name on Pypi __package_name__ = __about__.__package_name__ #: Current author and maintainer, view the git history for the previous ones __author__ = __about__.__author__ #: Current author's email address __email__ = __about__.__email__ #: Version number __version__ = '3.1.1' #: Package description for Pypi __description__ = __about__.__description__ #: Package homepage __url__ = __about__.__url__ #: Exception thrown when the file is already locked by someone else AlreadyLocked = exceptions.AlreadyLocked #: Exception thrown if an error occurred during locking LockException = exceptions.LockException #: Lock a file. Note that this is an advisory lock on Linux/Unix systems lock = portalocker.lock #: Unlock a file unlock = portalocker.unlock #: Place an exclusive lock. #: Only one process may hold an exclusive lock for a given file at a given #: time. LOCK_EX: constants.LockFlags = constants.LockFlags.EXCLUSIVE #: Place a shared lock. #: More than one process may hold a shared lock for a given file at a given #: time. LOCK_SH: constants.LockFlags = constants.LockFlags.SHARED #: Acquire the lock in a non-blocking fashion. LOCK_NB: constants.LockFlags = constants.LockFlags.NON_BLOCKING #: Remove an existing lock held by this process. LOCK_UN: constants.LockFlags = constants.LockFlags.UNBLOCK #: Locking flags enum LockFlags = constants.LockFlags #: Locking utility class to automatically handle opening with timeouts and #: context wrappers __all__ = [ 'LOCK_EX', 'LOCK_NB', 'LOCK_SH', 'LOCK_UN', 'AlreadyLocked', 'BoundedSemaphore', 'Lock', 'LockException', 'LockFlags', 'RLock', 'RedisLock', 'TemporaryFileLock', 'lock', 'open_atomic', 'unlock', ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654944.0 portalocker-3.1.1/portalocker/__main__.py0000644000076500000240000000724314734777040017356 0ustar00rickstafffrom __future__ import annotations import argparse import logging import os import pathlib import re import typing base_path = pathlib.Path(__file__).parent.parent src_path = base_path / 'portalocker' dist_path = base_path / 'dist' _default_output_path = base_path / 'dist' / 'portalocker.py' _NAMES_RE = re.compile(r'(?P[^()]+)$') _RELATIVE_IMPORT_RE = re.compile( r'^from \.(?P.*?) import (?P\(?)(?P[^()]+)$', ) _USELESS_ASSIGNMENT_RE = re.compile(r'^(?P\w+) = \1\n$') _TEXT_TEMPLATE = """''' {} ''' """ logger = logging.getLogger(__name__) def main(argv: typing.Sequence[str] | None = None) -> None: parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(required=True) combine_parser = subparsers.add_parser( 'combine', help='Combine all Python files into a single unified `portalocker.py` ' 'file for easy distribution', ) combine_parser.add_argument( '--output-file', '-o', type=argparse.FileType('w'), default=str(_default_output_path), ) combine_parser.set_defaults(func=combine) args = parser.parse_args(argv) args.func(args) def _read_file( path: pathlib.Path, seen_files: set[pathlib.Path], ) -> typing.Iterator[str]: if path in seen_files: return names: set[str] = set() seen_files.add(path) paren = False from_ = None for line in path.open(): if '__future__' in line: continue if paren: if ')' in line: line = line.split(')', 1)[1] paren = False continue match = _NAMES_RE.match(line) else: match = _RELATIVE_IMPORT_RE.match(line) if match: if not paren: paren = bool(match.group('paren')) from_ = match.group('from') if from_: names.add(from_) yield from _read_file(src_path / f'{from_}.py', seen_files) else: for name in match.group('names').split(','): name = name.strip() names.add(name) yield from _read_file(src_path / f'{name}.py', seen_files) else: yield _clean_line(line, names) def _clean_line(line: str, names: set[str]) -> str: # Replace `some_import.spam` with `spam` if names: joined_names = '|'.join(names) line = re.sub(rf'\b({joined_names})\.', '', line) # Replace useless assignments (e.g. `spam = spam`) return _USELESS_ASSIGNMENT_RE.sub('', line) def combine(args: argparse.Namespace) -> None: output_file = args.output_file pathlib.Path(output_file.name).parent.mkdir(parents=True, exist_ok=True) # We're handling this separately because it has to be the first import. output_file.write('from __future__ import annotations\n') output_file.write( _TEXT_TEMPLATE.format((base_path / 'README.rst').read_text()), ) output_file.write( _TEXT_TEMPLATE.format((base_path / 'LICENSE').read_text()), ) seen_files: set[pathlib.Path] = set() for line in _read_file(src_path / '__init__.py', seen_files): output_file.write(line) output_file.flush() output_file.close() logger.info(f'Wrote combined file to {output_file.name}') # Run black and ruff if available. If not then just run the file. os.system(f'black {output_file.name}') os.system(f'ruff format {output_file.name}') os.system(f'ruff check --fix --fix-only {output_file.name}') os.system(f'python3 {output_file.name}') if __name__ == '__main__': logging.basicConfig(level=logging.INFO) main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker/constants.py0000644000076500000240000000217114734754243017645 0ustar00rickstaff""" Locking constants Lock types: - `EXCLUSIVE` exclusive lock - `SHARED` shared lock Lock flags: - `NON_BLOCKING` non-blocking Manually unlock, only needed internally - `UNBLOCK` unlock """ import enum import os # The actual tests will execute the code anyhow so the following code can # safely be ignored from the coverage tests if os.name == 'nt': # pragma: no cover import msvcrt #: exclusive lock LOCK_EX = 0x1 #: shared lock LOCK_SH = 0x2 #: non-blocking LOCK_NB = 0x4 #: unlock LOCK_UN = msvcrt.LK_UNLCK # type: ignore[attr-defined] elif os.name == 'posix': # pragma: no cover import fcntl #: exclusive lock LOCK_EX = fcntl.LOCK_EX #: shared lock LOCK_SH = fcntl.LOCK_SH #: non-blocking LOCK_NB = fcntl.LOCK_NB #: unlock LOCK_UN = fcntl.LOCK_UN else: # pragma: no cover raise RuntimeError('PortaLocker only defined for nt and posix platforms') class LockFlags(enum.IntFlag): #: exclusive lock EXCLUSIVE = LOCK_EX #: shared lock SHARED = LOCK_SH #: non-blocking NON_BLOCKING = LOCK_NB #: unlock UNBLOCK = LOCK_UN ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1731931928.0 portalocker-3.1.1/portalocker/exceptions.py0000644000076500000240000000076314716627430020014 0ustar00rickstaffimport typing from portalocker import types class BaseLockException(Exception): # noqa: N818 # Error codes: LOCK_FAILED = 1 def __init__( self, *args: typing.Any, fh: typing.Union[types.IO, None, int] = None, **kwargs: typing.Any, ) -> None: self.fh = fh Exception.__init__(self, *args) class LockException(BaseLockException): pass class AlreadyLocked(LockException): pass class FileToLarge(LockException): pass ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker/portalocker.py0000644000076500000240000001303014734754243020152 0ustar00rickstafffrom __future__ import annotations import os import typing from . import constants, exceptions, types # Alias for readability. Due to import recursion issues we cannot do: # from .constants import LockFlags LockFlags = constants.LockFlags class HasFileno(typing.Protocol): def fileno(self) -> int: ... LOCKER: typing.Callable[[int | HasFileno, int], typing.Any] | None = None if os.name == 'nt': # pragma: no cover import msvcrt import pywintypes import win32con import win32file import winerror __overlapped = pywintypes.OVERLAPPED() def lock(file_: types.IO | int, flags: LockFlags) -> None: # Windows locking does not support locking through `fh.fileno()` so # we cast it to make mypy and pyright happy file_ = typing.cast(types.IO, file_) mode = 0 if flags & LockFlags.NON_BLOCKING: mode |= win32con.LOCKFILE_FAIL_IMMEDIATELY if flags & LockFlags.EXCLUSIVE: mode |= win32con.LOCKFILE_EXCLUSIVE_LOCK # Save the old position so we can go back to that position but # still lock from the beginning of the file savepos = file_.tell() if savepos: file_.seek(0) os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore[attr-defined] try: win32file.LockFileEx(os_fh, mode, 0, -0x10000, __overlapped) except pywintypes.error as exc_value: # error: (33, 'LockFileEx', 'The process cannot access the file # because another process has locked a portion of the file.') if exc_value.winerror == winerror.ERROR_LOCK_VIOLATION: raise exceptions.AlreadyLocked( exceptions.LockException.LOCK_FAILED, exc_value.strerror, fh=file_, ) from exc_value else: # Q: Are there exceptions/codes we should be dealing with # here? raise finally: if savepos: file_.seek(savepos) def unlock(file_: types.IO) -> None: try: savepos = file_.tell() if savepos: file_.seek(0) os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore[attr-defined] try: win32file.UnlockFileEx( os_fh, 0, -0x10000, __overlapped, ) except pywintypes.error as exc: if exc.winerror != winerror.ERROR_NOT_LOCKED: # Q: Are there exceptions/codes we should be # dealing with here? raise finally: if savepos: file_.seek(savepos) except OSError as exc: raise exceptions.LockException( exceptions.LockException.LOCK_FAILED, exc.strerror, fh=file_, ) from exc elif os.name == 'posix': # pragma: no cover import errno import fcntl # The locking implementation. # Expected values are either fcntl.flock() or fcntl.lockf(), # but any callable that matches the syntax will be accepted. LOCKER = fcntl.flock # pyright: ignore[reportConstantRedefinition] def lock(file: int | types.IO, flags: LockFlags) -> None: # type: ignore[misc] assert LOCKER is not None, 'We need a locking function in `LOCKER` ' # Locking with NON_BLOCKING without EXCLUSIVE or SHARED enabled # results in an error if (flags & LockFlags.NON_BLOCKING) and not flags & ( LockFlags.SHARED | LockFlags.EXCLUSIVE ): raise RuntimeError( 'When locking in non-blocking mode the SHARED ' 'or EXCLUSIVE flag must be specified as well', ) try: LOCKER(file, flags) except OSError as exc_value: # Python can use one of several different exception classes to # represent timeout (most likely is BlockingIOError and IOError), # but these errors may also represent other failures. On some # systems, `IOError is OSError` which means checking for either # IOError or OSError can mask other errors. # The safest check is to catch OSError (from which the others # inherit) and check the errno (which should be EACCESS or EAGAIN # according to the spec). if exc_value.errno in (errno.EACCES, errno.EAGAIN): # A timeout exception, wrap this so the outer code knows to try # again (if it wants to). raise exceptions.AlreadyLocked( exc_value, fh=file, ) from exc_value else: # Something else went wrong; don't wrap this so we stop # immediately. raise exceptions.LockException( exc_value, fh=file, ) from exc_value except EOFError as exc_value: # On NFS filesystems, flock can raise an EOFError raise exceptions.LockException( exc_value, fh=file, ) from exc_value def unlock(file: types.IO) -> None: # type: ignore[misc] assert LOCKER is not None, 'We need a locking function in `LOCKER` ' LOCKER(file.fileno(), LockFlags.UNBLOCK) else: # pragma: no cover raise RuntimeError('PortaLocker only defined for nt and posix platforms') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1616897939.0 portalocker-3.1.1/portalocker/py.typed0000644000076500000240000000000014027763623016740 0ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker/redis.py0000644000076500000240000002073214734754243016742 0ustar00rickstaff# pyright: reportUnknownMemberType=false from __future__ import annotations import _thread import json import logging import random import time import typing import redis from . import exceptions, utils logger = logging.getLogger(__name__) DEFAULT_UNAVAILABLE_TIMEOUT = 1 DEFAULT_THREAD_SLEEP_TIME = 0.1 class PubSubWorkerThread(redis.client.PubSubWorkerThread): def run(self) -> None: try: super().run() except Exception: # pragma: no cover _thread.interrupt_main() raise class RedisLock(utils.LockBase): """ An extremely reliable Redis lock based on pubsub with a keep-alive thread As opposed to most Redis locking systems based on key/value pairs, this locking method is based on the pubsub system. The big advantage is that if the connection gets killed due to network issues, crashing processes or otherwise, it will still immediately unlock instead of waiting for a lock timeout. To make sure both sides of the lock know about the connection state it is recommended to set the `health_check_interval` when creating the redis connection.. Args: channel: the redis channel to use as locking key. connection: an optional redis connection if you already have one or if you need to specify the redis connection timeout: timeout when trying to acquire a lock check_interval: check interval while waiting fail_when_locked: after the initial lock failed, return an error or lock the file. This does not wait for the timeout. thread_sleep_time: sleep time between fetching messages from redis to prevent a busy/wait loop. In the case of lock conflicts this increases the time it takes to resolve the conflict. This should be smaller than the `check_interval` to be useful. unavailable_timeout: If the conflicting lock is properly connected this should never exceed twice your redis latency. Note that this will increase the wait time possibly beyond your `timeout` and is always executed if a conflict arises. redis_kwargs: The redis connection arguments if no connection is given. The `DEFAULT_REDIS_KWARGS` are used as default, if you want to override these you need to explicitly specify a value (e.g. `health_check_interval=0`) """ redis_kwargs: dict[str, typing.Any] thread: PubSubWorkerThread | None channel: str timeout: float connection: redis.client.Redis[str] | None pubsub: redis.client.PubSub | None = None close_connection: bool DEFAULT_REDIS_KWARGS: typing.ClassVar[dict[str, typing.Any]] = dict( health_check_interval=10, decode_responses=True, ) def __init__( self, channel: str, connection: redis.client.Redis[str] | None = None, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = False, thread_sleep_time: float = DEFAULT_THREAD_SLEEP_TIME, unavailable_timeout: float = DEFAULT_UNAVAILABLE_TIMEOUT, redis_kwargs: dict[str, typing.Any] | None = None, ) -> None: # We don't want to close connections given as an argument self.close_connection = not connection self.thread = None self.channel = channel self.connection = connection self.thread_sleep_time = thread_sleep_time self.unavailable_timeout = unavailable_timeout self.redis_kwargs = redis_kwargs or dict() for key, value in self.DEFAULT_REDIS_KWARGS.items(): self.redis_kwargs.setdefault(key, value) super().__init__( timeout=timeout, check_interval=check_interval, fail_when_locked=fail_when_locked, ) def get_connection(self) -> redis.client.Redis[str]: if not self.connection: self.connection = redis.client.Redis(**self.redis_kwargs) return self.connection def channel_handler(self, message: dict[str, str]) -> None: if message.get('type') != 'message': # pragma: no cover return raw_data = message.get('data') if not raw_data: return try: data = json.loads(raw_data) except TypeError: # pragma: no cover logger.debug('TypeError while parsing: %r', message) return assert self.connection is not None self.connection.publish(data['response_channel'], str(time.time())) @property def client_name(self) -> str: return f'{self.channel}-lock' def acquire( # type: ignore[override] self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> RedisLock: timeout = utils.coalesce(timeout, self.timeout, 0.0) check_interval = utils.coalesce( check_interval, self.check_interval, 0.0, ) fail_when_locked = utils.coalesce( fail_when_locked, self.fail_when_locked, ) assert not self.pubsub, 'This lock is already active' connection = self.get_connection() timeout_generator = self._timeout_generator(timeout, check_interval) for _ in timeout_generator: # pragma: no branch subscribers = connection.pubsub_numsub(self.channel)[0][1] if subscribers: logger.debug( 'Found %d lock subscribers for %s', subscribers, self.channel, ) if self.check_or_kill_lock( connection, self.unavailable_timeout, ): # pragma: no branch continue else: # pragma: no cover subscribers = 0 # Note: this should not be changed to an elif because the if # above can still end up here if not subscribers: connection.client_setname(self.client_name) self.pubsub = connection.pubsub() self.pubsub.subscribe(**{self.channel: self.channel_handler}) self.thread = PubSubWorkerThread( self.pubsub, sleep_time=self.thread_sleep_time, ) self.thread.start() subscribers = connection.pubsub_numsub(self.channel)[0][1] if subscribers == 1: # pragma: no branch return self else: # pragma: no cover # Race condition, let's try again self.release() if fail_when_locked: # pragma: no cover raise exceptions.AlreadyLocked(exceptions) raise exceptions.AlreadyLocked(exceptions) def check_or_kill_lock( self, connection: redis.client.Redis[str], timeout: float, ) -> bool | None: # Random channel name to get messages back from the lock response_channel = f'{self.channel}-{random.random()}' pubsub = connection.pubsub() pubsub.subscribe(response_channel) connection.publish( self.channel, json.dumps( dict( response_channel=response_channel, message='ping', ), ), ) check_interval = min(self.thread_sleep_time, timeout / 10) for _ in self._timeout_generator( timeout, check_interval, ): # pragma: no branch if pubsub.get_message(timeout=check_interval): pubsub.close() return True for client_ in connection.client_list('pubsub'): # pragma: no cover if client_.get('name') == self.client_name: logger.warning('Killing unavailable redis client: %r', client_) connection.client_kill_filter( # pyright: ignore client_.get('id'), ) return None def release(self) -> None: if self.thread: # pragma: no branch self.thread.stop() self.thread.join() self.thread = None time.sleep(0.01) if self.pubsub: # pragma: no branch self.pubsub.unsubscribe(self.channel) self.pubsub.close() self.pubsub = None def __del__(self) -> None: self.release() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1731931928.0 portalocker-3.1.1/portalocker/types.py0000644000076500000240000000322714716627430016775 0ustar00rickstafffrom __future__ import annotations import pathlib import typing from typing import Union # fmt: off Mode = typing.Literal[ # Text modes # Read text 'r', 'rt', 'tr', # Write text 'w', 'wt', 'tw', # Append text 'a', 'at', 'ta', # Exclusive creation text 'x', 'xt', 'tx', # Read and write text 'r+', '+r', 'rt+', 'r+t', '+rt', 'tr+', 't+r', '+tr', # Write and read text 'w+', '+w', 'wt+', 'w+t', '+wt', 'tw+', 't+w', '+tw', # Append and read text 'a+', '+a', 'at+', 'a+t', '+at', 'ta+', 't+a', '+ta', # Exclusive creation and read text 'x+', '+x', 'xt+', 'x+t', '+xt', 'tx+', 't+x', '+tx', # Universal newline support 'U', 'rU', 'Ur', 'rtU', 'rUt', 'Urt', 'trU', 'tUr', 'Utr', # Binary modes # Read binary 'rb', 'br', # Write binary 'wb', 'bw', # Append binary 'ab', 'ba', # Exclusive creation binary 'xb', 'bx', # Read and write binary 'rb+', 'r+b', '+rb', 'br+', 'b+r', '+br', # Write and read binary 'wb+', 'w+b', '+wb', 'bw+', 'b+w', '+bw', # Append and read binary 'ab+', 'a+b', '+ab', 'ba+', 'b+a', '+ba', # Exclusive creation and read binary 'xb+', 'x+b', '+xb', 'bx+', 'b+x', '+bx', # Universal newline support in binary mode 'rbU', 'rUb', 'Urb', 'brU', 'bUr', 'Ubr', ] Filename = Union[str, pathlib.Path] IO: typing.TypeAlias = Union[ # type: ignore[name-defined] typing.IO[str], typing.IO[bytes], ] class FileOpenKwargs(typing.TypedDict): buffering: int | None encoding: str | None errors: str | None newline: str | None closefd: bool | None opener: typing.Callable[[str, int], int] | None ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker/utils.py0000644000076500000240000004355314734754243017002 0ustar00rickstafffrom __future__ import annotations import abc import atexit import contextlib import logging import os import pathlib import random import tempfile import time import typing import warnings from . import constants, exceptions, portalocker, types from .types import Filename, Mode logger = logging.getLogger(__name__) DEFAULT_TIMEOUT = 5 DEFAULT_CHECK_INTERVAL = 0.25 DEFAULT_FAIL_WHEN_LOCKED = False LOCK_METHOD = constants.LockFlags.EXCLUSIVE | constants.LockFlags.NON_BLOCKING __all__ = [ 'Lock', 'open_atomic', ] def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any: """Simple coalescing function that returns the first value that is not equal to the `test_value`. Or `None` if no value is valid. Usually this means that the last given value is the default value. Note that the `test_value` is compared using an identity check (i.e. `value is not test_value`) so changing the `test_value` won't work for all values. >>> coalesce(None, 1) 1 >>> coalesce() >>> coalesce(0, False, True) 0 >>> coalesce(0, False, True, test_value=0) False # This won't work because of the `is not test_value` type testing: >>> coalesce([], dict(spam='eggs'), test_value=[]) [] """ return next((arg for arg in args if arg is not test_value), None) @contextlib.contextmanager def open_atomic( filename: Filename, binary: bool = True, ) -> typing.Iterator[types.IO]: """Open a file for atomic writing. Instead of locking this method allows you to write the entire file and move it to the actual location. Note that this makes the assumption that a rename is atomic on your platform which is generally the case but not a guarantee. http://docs.python.org/library/os.html#os.rename >>> filename = 'test_file.txt' >>> if os.path.exists(filename): ... os.remove(filename) >>> with open_atomic(filename) as fh: ... written = fh.write(b'test') >>> assert os.path.exists(filename) >>> os.remove(filename) >>> import pathlib >>> path_filename = pathlib.Path('test_file.txt') >>> with open_atomic(path_filename) as fh: ... written = fh.write(b'test') >>> assert path_filename.exists() >>> path_filename.unlink() """ # `pathlib.Path` cast in case `path` is a `str` path: pathlib.Path if isinstance(filename, pathlib.Path): path = filename else: path = pathlib.Path(filename) assert not path.exists(), f'{path!r} exists' # Create the parent directory if it doesn't exist path.parent.mkdir(parents=True, exist_ok=True) with tempfile.NamedTemporaryFile( mode=(binary and 'wb') or 'w', dir=str(path.parent), delete=False, ) as temp_fh: yield temp_fh temp_fh.flush() os.fsync(temp_fh.fileno()) try: os.rename(temp_fh.name, path) finally: with contextlib.suppress(Exception): os.remove(temp_fh.name) class LockBase(abc.ABC): # pragma: no cover #: timeout when trying to acquire a lock timeout: float #: check interval while waiting for `timeout` check_interval: float #: skip the timeout and immediately fail if the initial lock fails fail_when_locked: bool def __init__( self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> None: self.timeout = coalesce(timeout, DEFAULT_TIMEOUT) self.check_interval = coalesce(check_interval, DEFAULT_CHECK_INTERVAL) self.fail_when_locked = coalesce( fail_when_locked, DEFAULT_FAIL_WHEN_LOCKED, ) @abc.abstractmethod def acquire( self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> typing.IO[typing.AnyStr]: ... def _timeout_generator( self, timeout: float | None, check_interval: float | None, ) -> typing.Iterator[int]: f_timeout = coalesce(timeout, self.timeout, 0.0) f_check_interval = coalesce(check_interval, self.check_interval, 0.0) yield 0 i = 0 start_time = time.perf_counter() while start_time + f_timeout > time.perf_counter(): i += 1 yield i # Take low lock checks into account to stay within the interval since_start_time = time.perf_counter() - start_time time.sleep(max(0.001, (i * f_check_interval) - since_start_time)) @abc.abstractmethod def release(self) -> None: ... def __enter__(self) -> typing.IO[typing.AnyStr]: return self.acquire() def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: typing.Any, # Should be typing.TracebackType ) -> bool | None: self.release() return None def __delete__(self, instance: LockBase) -> None: instance.release() class Lock(LockBase): """Lock manager with built-in timeout Args: filename: filename mode: the open mode, 'a' or 'ab' should be used for writing. When mode contains `w` the file will be truncated to 0 bytes. timeout: timeout when trying to acquire a lock check_interval: check interval while waiting fail_when_locked: after the initial lock failed, return an error or lock the file. This does not wait for the timeout. **file_open_kwargs: The kwargs for the `open(...)` call fail_when_locked is useful when multiple threads/processes can race when creating a file. If set to true than the system will wait till the lock was acquired and then return an AlreadyLocked exception. Note that the file is opened first and locked later. So using 'w' as mode will result in truncate _BEFORE_ the lock is checked. """ fh: types.IO | None filename: str mode: str truncate: bool timeout: float check_interval: float fail_when_locked: bool flags: constants.LockFlags file_open_kwargs: dict[str, typing.Any] def __init__( self, filename: Filename, mode: Mode = 'a', timeout: float | None = None, check_interval: float = DEFAULT_CHECK_INTERVAL, fail_when_locked: bool = DEFAULT_FAIL_WHEN_LOCKED, flags: constants.LockFlags = LOCK_METHOD, **file_open_kwargs: typing.Any, ) -> None: if 'w' in mode: truncate = True mode = typing.cast(Mode, mode.replace('w', 'a')) else: truncate = False if timeout is None: timeout = DEFAULT_TIMEOUT elif not (flags & constants.LockFlags.NON_BLOCKING): warnings.warn( 'timeout has no effect in blocking mode', stacklevel=1, ) self.fh = None self.filename = str(filename) self.mode = mode self.truncate = truncate self.flags = flags self.file_open_kwargs = file_open_kwargs super().__init__(timeout, check_interval, fail_when_locked) def acquire( self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> typing.IO[typing.AnyStr]: """Acquire the locked filehandle""" fail_when_locked = coalesce(fail_when_locked, self.fail_when_locked) if ( not (self.flags & constants.LockFlags.NON_BLOCKING) and timeout is not None ): warnings.warn( 'timeout has no effect in blocking mode', stacklevel=1, ) # If we already have a filehandle, return it fh = self.fh if fh: # Due to type invariance we need to cast the type return typing.cast(typing.IO[typing.AnyStr], fh) # Get a new filehandler fh = self._get_fh() def try_close() -> None: # pragma: no cover # Silently try to close the handle if possible, ignore all issues if fh is not None: with contextlib.suppress(Exception): fh.close() exception = None # Try till the timeout has passed for _ in self._timeout_generator(timeout, check_interval): exception = None try: # Try to lock fh = self._get_lock(fh) break except exceptions.LockException as exc: # Python will automatically remove the variable from memory # unless you save it in a different location exception = exc # We already tried to the get the lock # If fail_when_locked is True, stop trying if fail_when_locked: try_close() raise exceptions.AlreadyLocked(exception) from exc except Exception as exc: # Something went wrong with the locking mechanism. # Wrap in a LockException and re-raise: try_close() raise exceptions.LockException(exc) from exc # Wait a bit if exception: try_close() # We got a timeout... reraising raise exception # Prepare the filehandle (truncate if needed) fh = self._prepare_fh(fh) self.fh = fh return typing.cast(typing.IO[typing.AnyStr], fh) def __enter__(self) -> typing.IO[typing.AnyStr]: return self.acquire() def release(self) -> None: """Releases the currently locked file handle""" if self.fh: portalocker.unlock(self.fh) self.fh.close() self.fh = None def _get_fh(self) -> types.IO: """Get a new filehandle""" return typing.cast( types.IO, open( # noqa: SIM115 self.filename, self.mode, **self.file_open_kwargs, ), ) def _get_lock(self, fh: types.IO) -> types.IO: """ Try to lock the given filehandle returns LockException if it fails""" portalocker.lock(fh, self.flags) return fh def _prepare_fh(self, fh: types.IO) -> types.IO: """ Prepare the filehandle for usage If truncate is a number, the file will be truncated to that amount of bytes """ if self.truncate: fh.seek(0) fh.truncate(0) return fh class RLock(Lock): """ A reentrant lock, functions in a similar way to threading.RLock in that it can be acquired multiple times. When the corresponding number of release() calls are made the lock will finally release the underlying file lock. """ def __init__( self, filename: Filename, mode: Mode = 'a', timeout: float = DEFAULT_TIMEOUT, check_interval: float = DEFAULT_CHECK_INTERVAL, fail_when_locked: bool = False, flags: constants.LockFlags = LOCK_METHOD, ) -> None: super().__init__( filename, mode, timeout, check_interval, fail_when_locked, flags, ) self._acquire_count = 0 def acquire( self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> typing.IO[typing.AnyStr]: fh: typing.IO[typing.AnyStr] if self._acquire_count >= 1: fh = typing.cast(typing.IO[typing.AnyStr], self.fh) else: fh = super().acquire(timeout, check_interval, fail_when_locked) self._acquire_count += 1 assert fh is not None return fh def release(self) -> None: if self._acquire_count == 0: raise exceptions.LockException( 'Cannot release more times than acquired', ) if self._acquire_count == 1: super().release() self._acquire_count -= 1 class TemporaryFileLock(Lock): def __init__( self, filename: str = '.lock', timeout: float = DEFAULT_TIMEOUT, check_interval: float = DEFAULT_CHECK_INTERVAL, fail_when_locked: bool = True, flags: constants.LockFlags = LOCK_METHOD, ) -> None: Lock.__init__( self, filename=filename, mode='w', timeout=timeout, check_interval=check_interval, fail_when_locked=fail_when_locked, flags=flags, ) atexit.register(self.release) def release(self) -> None: Lock.release(self) if os.path.isfile(self.filename): # pragma: no branch os.unlink(self.filename) class BoundedSemaphore(LockBase): """ Bounded semaphore to prevent too many parallel processes from running This method is deprecated because multiple processes that are completely unrelated could end up using the same semaphore. To prevent this, use `NamedBoundedSemaphore` instead. The `NamedBoundedSemaphore` is a drop-in replacement for this class. >>> semaphore = BoundedSemaphore(2, directory='') >>> str(semaphore.get_filenames()[0]) 'bounded_semaphore.00.lock' >>> str(sorted(semaphore.get_random_filenames())[1]) 'bounded_semaphore.01.lock' """ lock: Lock | None def __init__( self, maximum: int, name: str = 'bounded_semaphore', filename_pattern: str = '{name}.{number:02d}.lock', directory: str = tempfile.gettempdir(), timeout: float | None = DEFAULT_TIMEOUT, check_interval: float | None = DEFAULT_CHECK_INTERVAL, fail_when_locked: bool | None = True, ) -> None: self.maximum = maximum self.name = name self.filename_pattern = filename_pattern self.directory = directory self.lock: Lock | None = None super().__init__( timeout=timeout, check_interval=check_interval, fail_when_locked=fail_when_locked, ) if not name or name == 'bounded_semaphore': warnings.warn( '`BoundedSemaphore` without an explicit `name` ' 'argument is deprecated, use NamedBoundedSemaphore', DeprecationWarning, stacklevel=1, ) def get_filenames(self) -> typing.Sequence[pathlib.Path]: return [self.get_filename(n) for n in range(self.maximum)] def get_random_filenames(self) -> typing.Sequence[pathlib.Path]: filenames = list(self.get_filenames()) random.shuffle(filenames) return filenames def get_filename(self, number: int) -> pathlib.Path: return pathlib.Path(self.directory) / self.filename_pattern.format( name=self.name, number=number, ) def acquire( # type: ignore[override] self, timeout: float | None = None, check_interval: float | None = None, fail_when_locked: bool | None = None, ) -> Lock | None: assert not self.lock, 'Already locked' filenames = self.get_filenames() for n in self._timeout_generator(timeout, check_interval): # pragma: logger.debug('trying lock (attempt %d) %r', n, filenames) # no branch if self.try_lock(filenames): # pragma: no branch return self.lock # pragma: no cover if fail_when_locked := coalesce( fail_when_locked, self.fail_when_locked, ): raise exceptions.AlreadyLocked() return None def try_lock(self, filenames: typing.Sequence[Filename]) -> bool: filename: Filename for filename in filenames: logger.debug('trying lock for %r', filename) self.lock = Lock(filename, fail_when_locked=True) try: self.lock.acquire() except exceptions.AlreadyLocked: self.lock = None else: logger.debug('locked %r', filename) return True return False def release(self) -> None: # pragma: no cover if self.lock is not None: self.lock.release() self.lock = None class NamedBoundedSemaphore(BoundedSemaphore): """ Bounded semaphore to prevent too many parallel processes from running It's also possible to specify a timeout when acquiring the lock to wait for a resource to become available. This is very similar to `threading.BoundedSemaphore` but works across multiple processes and across multiple operating systems. Because this works across multiple processes it's important to give the semaphore a name. This name is used to create the lock files. If you don't specify a name, a random name will be generated. This means that you can't use the same semaphore in multiple processes unless you pass the semaphore object to the other processes. >>> semaphore = NamedBoundedSemaphore(2, name='test') >>> str(semaphore.get_filenames()[0]) '...test.00.lock' >>> semaphore = NamedBoundedSemaphore(2) >>> 'bounded_semaphore' in str(semaphore.get_filenames()[0]) True """ def __init__( self, maximum: int, name: str | None = None, filename_pattern: str = '{name}.{number:02d}.lock', directory: str = tempfile.gettempdir(), timeout: float | None = DEFAULT_TIMEOUT, check_interval: float | None = DEFAULT_CHECK_INTERVAL, fail_when_locked: bool | None = True, ) -> None: if name is None: name = f'bounded_semaphore.{random.randint(0, 1000000):d}' super().__init__( maximum, name, filename_pattern, directory, timeout, check_interval, fail_when_locked, ) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4853418 portalocker-3.1.1/portalocker.egg-info/0000755000076500000240000000000014734777047016757 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654951.0 portalocker-3.1.1/portalocker.egg-info/PKG-INFO0000644000076500000240000002062514734777047020061 0ustar00rickstaffMetadata-Version: 2.1 Name: portalocker Version: 3.1.1 Summary: Wraps the portalocker recipe for easy usage Author-email: Rick van Hattem License: BSD-3-Clause Project-URL: bugs, https://github.com/wolph/portalocker/issues Project-URL: documentation, https://portalocker.readthedocs.io/en/latest/ Project-URL: repository, https://github.com/wolph/portalocker/ Keywords: locking,locks,with,statement,windows,linux,unix Platform: any Classifier: Development Status :: 5 - Production/Stable Classifier: Development Status :: 6 - Mature Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Natural Language :: English Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: MacOS Classifier: Operating System :: Microsoft :: MS-DOS Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Microsoft Classifier: Operating System :: POSIX :: BSD :: FreeBSD Classifier: Operating System :: POSIX :: BSD Classifier: Operating System :: POSIX :: Linux Classifier: Operating System :: POSIX :: SunOS/Solaris Classifier: Operating System :: POSIX Classifier: Operating System :: Unix Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: IronPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Programming Language :: Python :: Implementation Classifier: Programming Language :: Python Classifier: Topic :: Education :: Testing Classifier: Topic :: Office/Business Classifier: Topic :: Other/Nonlisted Topic Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: System :: Monitoring Classifier: Typing :: Typed Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: pywin32>=226; platform_system == "Windows" Provides-Extra: docs Requires-Dist: sphinx>=1.7.1; extra == "docs" Provides-Extra: tests Requires-Dist: pytest>=5.4.1; extra == "tests" Requires-Dist: pytest-cov>=2.8.1; extra == "tests" Requires-Dist: pytest-timeout>=2.1.0; extra == "tests" Requires-Dist: sphinx>=6.0.0; extra == "tests" Requires-Dist: pytest-mypy>=0.8.0; extra == "tests" Requires-Dist: types-redis; extra == "tests" Requires-Dist: redis; extra == "tests" Requires-Dist: pytest-rerunfailures>=15.0; extra == "tests" Provides-Extra: redis Requires-Dist: redis; extra == "redis" ############################################ portalocker - Cross-platform locking library ############################################ .. image:: https://github.com/WoLpH/portalocker/actions/workflows/python-package.yml/badge.svg?branch=master :alt: Linux Test Status :target: https://github.com/WoLpH/portalocker/actions/ .. image:: https://ci.appveyor.com/api/projects/status/mgqry98hgpy4prhh?svg=true :alt: Windows Tests Status :target: https://ci.appveyor.com/project/WoLpH/portalocker .. image:: https://coveralls.io/repos/WoLpH/portalocker/badge.svg?branch=master :alt: Coverage Status :target: https://coveralls.io/r/WoLpH/portalocker?branch=master Overview -------- Portalocker is a library to provide an easy API to file locking. An important detail to note is that on Linux and Unix systems the locks are advisory by default. By specifying the `-o mand` option to the mount command it is possible to enable mandatory file locking on Linux. This is generally not recommended however. For more information about the subject: - https://en.wikipedia.org/wiki/File_locking - http://stackoverflow.com/questions/39292051/portalocker-does-not-seem-to-lock - https://stackoverflow.com/questions/12062466/mandatory-file-lock-on-linux The module is currently maintained by Rick van Hattem . The project resides at https://github.com/WoLpH/portalocker . Bugs and feature requests can be submitted there. Patches are also very welcome. Security contact information ------------------------------------------------------------------------------ To report a security vulnerability, please use the `Tidelift security contact `_. Tidelift will coordinate the fix and disclosure. Redis Locks ----------- This library now features a lock based on Redis which allows for locks across multiple threads, processes and even distributed locks across multiple computers. It is an extremely reliable Redis lock that is based on pubsub. As opposed to most Redis locking systems based on key/value pairs, this locking method is based on the pubsub system. The big advantage is that if the connection gets killed due to network issues, crashing processes or otherwise, it will still immediately unlock instead of waiting for a lock timeout. First make sure you have everything installed correctly: :: pip install "portalocker[redis]" Usage is really easy: :: import portalocker lock = portalocker.RedisLock('some_lock_channel_name') with lock: print('do something here') The API is essentially identical to the other ``Lock`` classes so in addition to the ``with`` statement you can also use ``lock.acquire(...)``. Python 2 -------- Python 2 was supported in versions before Portalocker 2.0. If you are still using Python 2, you can run this to install: :: pip install "portalocker<2" Tips ---- On some networked filesystems it might be needed to force a `os.fsync()` before closing the file so it's actually written before another client reads the file. Effectively this comes down to: :: with portalocker.Lock('some_file', 'rb+', timeout=60) as fh: # do what you need to do ... # flush and sync to filesystem fh.flush() os.fsync(fh.fileno()) Links ----- * Documentation - http://portalocker.readthedocs.org/en/latest/ * Source - https://github.com/WoLpH/portalocker * Bug reports - https://github.com/WoLpH/portalocker/issues * Package homepage - https://pypi.python.org/pypi/portalocker * My blog - http://w.wol.ph/ Examples -------- To make sure your cache generation scripts don't race, use the `Lock` class: >>> import portalocker >>> with portalocker.Lock('somefile', timeout=1) as fh: ... print('writing some stuff to my cache...', file=fh) To customize the opening and locking a manual approach is also possible: >>> import portalocker >>> file = open('somefile', 'r+') >>> portalocker.lock(file, portalocker.LockFlags.EXCLUSIVE) >>> file.seek(12) >>> file.write('foo') >>> file.close() Explicitly unlocking is not needed in most cases but omitting it has been known to cause issues: https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/42#issuecomment-601108266 If needed, it can be done through: >>> portalocker.unlock(file) Do note that your data might still be in a buffer so it is possible that your data is not available until you `flush()` or `close()`. To create a cross platform bounded semaphore across multiple processes you can use the `BoundedSemaphore` class which functions somewhat similar to `threading.BoundedSemaphore`: >>> import portalocker >>> n = 2 >>> timeout = 0.1 >>> semaphore_a = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_b = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_c = portalocker.BoundedSemaphore(n, timeout=timeout) >>> semaphore_a.acquire() >>> semaphore_b.acquire() >>> semaphore_c.acquire() Traceback (most recent call last): ... portalocker.exceptions.AlreadyLocked More examples can be found in the `tests `_. Versioning ---------- This library follows `Semantic Versioning `_. Changelog --------- Every release has a ``git tag`` with a commit message for the tag explaining what was added and/or changed. The list of tags/releases including the commit messages can be found here: https://github.com/WoLpH/portalocker/releases License ------- See the `LICENSE `_ file. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654951.0 portalocker-3.1.1/portalocker.egg-info/SOURCES.txt0000644000076500000240000000277614734777047020657 0ustar00rickstaff.coveragerc .gitignore .pre-commit-config.yaml CHANGELOG.rst LICENSE MANIFEST.in README.rst appveyor.yml pyproject.toml pytest.ini ruff.toml sourcery.yaml tox.toml .github/FUNDING.yml .github/dependabot.yml .github/workflows/lint.yml .github/workflows/python-package.yml .github/workflows/stale.yml docs/Makefile docs/conf.py docs/index.rst docs/license.rst docs/make.bat docs/portalocker.constants.rst docs/portalocker.exceptions.rst docs/portalocker.portalocker.rst docs/portalocker.redis.rst docs/portalocker.rst docs/portalocker.types.rst docs/portalocker.utils.rst docs/requirements.txt docs/tests.rst docs/_theme/LICENSE docs/_theme/flask_theme_support.py docs/_theme/wolph/layout.html docs/_theme/wolph/relations.html docs/_theme/wolph/theme.conf docs/_theme/wolph/static/flasky.css_t docs/_theme/wolph/static/small_flask.css portalocker/__about__.py portalocker/__init__.py portalocker/__main__.py portalocker/constants.py portalocker/exceptions.py portalocker/portalocker.py portalocker/py.typed portalocker/redis.py portalocker/types.py portalocker/utils.py portalocker.egg-info/PKG-INFO portalocker.egg-info/SOURCES.txt portalocker.egg-info/dependency_links.txt portalocker.egg-info/requires.txt portalocker.egg-info/top_level.txt portalocker_tests/__init__.py portalocker_tests/conftest.py portalocker_tests/mypy.ini portalocker_tests/requirements.txt portalocker_tests/temporary_file_lock.py portalocker_tests/test_combined.py portalocker_tests/test_redis.py portalocker_tests/test_semaphore.py portalocker_tests/tests.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654951.0 portalocker-3.1.1/portalocker.egg-info/dependency_links.txt0000644000076500000240000000000114734777047023025 0ustar00rickstaff ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654951.0 portalocker-3.1.1/portalocker.egg-info/requires.txt0000644000076500000240000000034014734777047021354 0ustar00rickstaff [:platform_system == "Windows"] pywin32>=226 [docs] sphinx>=1.7.1 [redis] redis [tests] pytest>=5.4.1 pytest-cov>=2.8.1 pytest-timeout>=2.1.0 sphinx>=6.0.0 pytest-mypy>=0.8.0 types-redis redis pytest-rerunfailures>=15.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654951.0 portalocker-3.1.1/portalocker.egg-info/top_level.txt0000644000076500000240000000001414734777047021504 0ustar00rickstaffportalocker ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4846504 portalocker-3.1.1/portalocker_tests/0000755000076500000240000000000014734777047016507 5ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654943.0 portalocker-3.1.1/portalocker_tests/__init__.py0000644000076500000240000000000014734777037020605 0ustar00rickstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker_tests/conftest.py0000644000076500000240000000156414734754243020705 0ustar00rickstaffimport contextlib import logging import multiprocessing import random import pytest from portalocker import utils logger = logging.getLogger(__name__) @pytest.fixture(scope='function') def tmpfile(tmp_path): filename = tmp_path / str(random.random())[2:] yield str(filename) with contextlib.suppress(PermissionError): filename.unlink(missing_ok=True) def pytest_sessionstart(session): # Force spawning the process so we don't accidentally inherit locks. # I'm not a 100% certain this will work correctly unfortunately... there # is some potential for breaking tests multiprocessing.set_start_method('spawn') @pytest.fixture(autouse=True) def reduce_timeouts(monkeypatch): "For faster testing we reduce the timeouts." monkeypatch.setattr(utils, 'DEFAULT_TIMEOUT', 0.1) monkeypatch.setattr(utils, 'DEFAULT_CHECK_INTERVAL', 0.05) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1611419668.0 portalocker-3.1.1/portalocker_tests/mypy.ini0000644000076500000240000000013014003050024020140 0ustar00rickstaff[mypy] warn_return_any = True warn_unused_configs = True ignore_missing_imports = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1563050062.0 portalocker-3.1.1/portalocker_tests/requirements.txt0000644000076500000240000000001313512440116021736 0ustar00rickstaff-e.[tests] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1694861733.0 portalocker-3.1.1/portalocker_tests/temporary_file_lock.py0000644000076500000240000000040014501304645023062 0ustar00rickstaffimport os import portalocker def test_temporary_file_lock(tmpfile): with portalocker.TemporaryFileLock(tmpfile): pass assert not os.path.isfile(tmpfile) lock = portalocker.TemporaryFileLock(tmpfile) lock.acquire() del lock ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker_tests/test_combined.py0000644000076500000240000000105614734754243021673 0ustar00rickstaffimport sys from portalocker import __main__ def test_combined(tmpdir): output_file = tmpdir.join('combined.py') __main__.main(['combine', '--output-file', output_file.strpath]) print(output_file) # noqa: T201 print('#################') # noqa: T201 print(output_file.read()) # noqa: T201 print('#################') # noqa: T201 sys.path.append(output_file.dirname) # Combined is being generated above but linters won't understand that import combined # pyright: ignore[reportMissingImports] assert combined ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker_tests/test_redis.py0000644000076500000240000000505214734754243021221 0ustar00rickstaffimport _thread import logging import random import time import pytest import portalocker from portalocker import redis, utils from redis import client, exceptions logger = logging.getLogger(__name__) try: client.Redis().ping() except (exceptions.ConnectionError, ConnectionRefusedError): pytest.skip('Unable to connect to redis', allow_module_level=True) @pytest.fixture(autouse=True) def set_redis_timeouts(monkeypatch): monkeypatch.setattr(utils, 'DEFAULT_TIMEOUT', 0.0001) monkeypatch.setattr(utils, 'DEFAULT_CHECK_INTERVAL', 0.0005) monkeypatch.setattr(redis, 'DEFAULT_UNAVAILABLE_TIMEOUT', 0.01) monkeypatch.setattr(redis, 'DEFAULT_THREAD_SLEEP_TIME', 0.001) monkeypatch.setattr(_thread, 'interrupt_main', lambda: None) def test_redis_lock() -> None: channel = str(random.random()) lock_a: redis.RedisLock = redis.RedisLock(channel) lock_a.acquire(fail_when_locked=True) time.sleep(0.01) lock_b = redis.RedisLock(channel) try: with pytest.raises(portalocker.AlreadyLocked): lock_b.acquire(fail_when_locked=True) finally: lock_a.release() if lock_a.connection is not None: lock_a.connection.close() @pytest.mark.parametrize('timeout', [None, 0, 0.001]) @pytest.mark.parametrize('check_interval', [None, 0, 0.0005]) def test_redis_lock_timeout(timeout, check_interval): connection: client.Redis[str] = client.Redis(decode_responses=True) channel = str(random.random()) lock_a = redis.RedisLock(channel) lock_a.acquire(timeout=timeout, check_interval=check_interval) lock_b = redis.RedisLock(channel, connection=connection) with pytest.raises(portalocker.AlreadyLocked): try: lock_b.acquire(timeout=timeout, check_interval=check_interval) finally: lock_a.release() if lock_a.connection is not None: lock_a.connection.close() def test_redis_lock_context() -> None: channel = str(random.random()) lock_a = redis.RedisLock(channel, fail_when_locked=True) with lock_a: time.sleep(0.01) lock_b = redis.RedisLock(channel, fail_when_locked=True) with pytest.raises(portalocker.AlreadyLocked), lock_b: pass def test_redis_relock() -> None: channel = str(random.random()) lock_a = redis.RedisLock(channel, fail_when_locked=True) with lock_a: time.sleep(0.01) with pytest.raises(AssertionError): lock_a.acquire() time.sleep(0.01) lock_a.release() if __name__ == '__main__': test_redis_lock() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1731900250.0 portalocker-3.1.1/portalocker_tests/test_semaphore.py0000644000076500000240000000173714716531532022076 0ustar00rickstaffimport random import pytest import portalocker from portalocker import utils @pytest.mark.parametrize('timeout', [None, 0, 0.001]) @pytest.mark.parametrize('check_interval', [None, 0, 0.0005]) def test_bounded_semaphore(timeout, check_interval, monkeypatch): n = 2 name: str = str(random.random()) monkeypatch.setattr(utils, 'DEFAULT_TIMEOUT', 0.0001) monkeypatch.setattr(utils, 'DEFAULT_CHECK_INTERVAL', 0.0005) semaphore_a = portalocker.BoundedSemaphore(n, name=name, timeout=timeout) semaphore_b = portalocker.BoundedSemaphore(n, name=name, timeout=timeout) semaphore_c = portalocker.BoundedSemaphore(n, name=name, timeout=timeout) semaphore_a.acquire(timeout=timeout) semaphore_b.acquire() with pytest.raises(portalocker.AlreadyLocked): semaphore_c.acquire(check_interval=check_interval, timeout=timeout) semaphore_c.acquire( check_interval=check_interval, timeout=timeout, fail_when_locked=False, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/portalocker_tests/tests.py0000644000076500000240000003301714734754243020220 0ustar00rickstaffimport dataclasses import math import multiprocessing import os import sys import time import typing import pytest import portalocker import portalocker.portalocker from portalocker import LockFlags, exceptions, utils if os.name == 'posix': import fcntl LOCKERS = [ fcntl.flock, fcntl.lockf, ] else: LOCKERS = [None] # type: ignore[list-item] @pytest.fixture def locker(request, monkeypatch): monkeypatch.setattr(portalocker.portalocker, 'LOCKER', request.param) return request.param def test_exceptions(tmpfile): with open(tmpfile, 'a') as a, open(tmpfile, 'a') as b: # Lock exclusive non-blocking lock_flags = portalocker.LOCK_EX | portalocker.LOCK_NB # First lock file a portalocker.lock(a, lock_flags) # Now see if we can lock file b with pytest.raises(portalocker.LockException): portalocker.lock(b, lock_flags) def test_utils_base(): class Test(utils.LockBase): pass def test_with_timeout(tmpfile): # Open the file 2 times with pytest.raises(portalocker.AlreadyLocked): with portalocker.Lock(tmpfile, timeout=0.1) as fh: print('writing some stuff to my cache...', file=fh) with portalocker.Lock( tmpfile, timeout=0.1, mode='wb', fail_when_locked=True, ): pass print('writing more stuff to my cache...', file=fh) def test_without_timeout(tmpfile): # Open the file 2 times with pytest.raises(portalocker.LockException): with portalocker.Lock(tmpfile, timeout=None) as fh: print('writing some stuff to my cache...', file=fh) with portalocker.Lock(tmpfile, timeout=None, mode='w'): pass print('writing more stuff to my cache...', file=fh) def test_without_fail(tmpfile): # Open the file 2 times with pytest.raises(portalocker.LockException): with portalocker.Lock(tmpfile, timeout=0.1) as fh: print('writing some stuff to my cache...', file=fh) lock = portalocker.Lock(tmpfile, timeout=0.1) lock.acquire(check_interval=0.05, fail_when_locked=False) def test_simple(tmpfile): with open(tmpfile, 'w') as fh: fh.write('spam and eggs') with open(tmpfile, 'r+') as fh: portalocker.lock(fh, portalocker.LOCK_EX) fh.seek(13) fh.write('foo') # Make sure we didn't overwrite the original text fh.seek(0) assert fh.read(13) == 'spam and eggs' portalocker.unlock(fh) def test_truncate(tmpfile): with open(tmpfile, 'w') as fh: fh.write('spam and eggs') with portalocker.Lock(tmpfile, mode='a+') as fh: # Make sure we didn't overwrite the original text fh.seek(0) assert fh.read(13) == 'spam and eggs' with portalocker.Lock(tmpfile, mode='w+') as fh: # Make sure we truncated the file assert fh.read() == '' def test_class(tmpfile): lock = portalocker.Lock(tmpfile) lock2 = portalocker.Lock(tmpfile, fail_when_locked=False, timeout=0.01) with lock: lock.acquire() with pytest.raises(portalocker.LockException), lock2: pass with lock2: pass def test_acquire_release(tmpfile): lock = portalocker.Lock(tmpfile) lock2 = portalocker.Lock(tmpfile, fail_when_locked=False) lock.acquire() # acquire lock when nobody is using it with pytest.raises(portalocker.LockException): # another party should not be able to acquire the lock lock2.acquire(timeout=0.01) # re-acquire a held lock is a no-op lock.acquire() lock.release() # release the lock lock.release() # second release does nothing def test_rlock_acquire_release_count(tmpfile): lock = portalocker.RLock(tmpfile) # Twice acquire h = lock.acquire() assert not h.closed lock.acquire() assert not h.closed # Two release lock.release() assert not h.closed lock.release() assert h.closed def test_rlock_acquire_release(tmpfile): lock = portalocker.RLock(tmpfile) lock2 = portalocker.RLock(tmpfile, fail_when_locked=False) lock.acquire() # acquire lock when nobody is using it with pytest.raises(portalocker.LockException): # another party should not be able to acquire the lock lock2.acquire(timeout=0.01) # Now acquire again lock.acquire() lock.release() # release the lock lock.release() # second release does nothing def test_release_unacquired(tmpfile): with pytest.raises(portalocker.LockException): portalocker.RLock(tmpfile).release() def test_exlusive(tmpfile): text_0 = 'spam and eggs' with open(tmpfile, 'w') as fh: fh.write(text_0) with open(tmpfile) as fh: portalocker.lock(fh, portalocker.LOCK_EX | portalocker.LOCK_NB) # Make sure we can't read the locked file with ( pytest.raises(portalocker.LockException), open( tmpfile, 'r+', ) as fh2, ): portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB) assert fh2.read() == text_0 # Make sure we can't write the locked file with ( pytest.raises(portalocker.LockException), open( tmpfile, 'w+', ) as fh2, ): portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB) fh2.write('surprise and fear') # Make sure we can explicitly unlock the file portalocker.unlock(fh) def test_shared(tmpfile): with open(tmpfile, 'w') as fh: fh.write('spam and eggs') with open(tmpfile) as f: portalocker.lock(f, portalocker.LOCK_SH | portalocker.LOCK_NB) # Make sure we can read the locked file with open(tmpfile) as fh2: portalocker.lock(fh2, portalocker.LOCK_SH | portalocker.LOCK_NB) assert fh2.read() == 'spam and eggs' # Make sure we can't write the locked file with ( pytest.raises(portalocker.LockException), open( tmpfile, 'w+', ) as fh2, ): portalocker.lock(fh2, portalocker.LOCK_EX | portalocker.LOCK_NB) fh2.write('surprise and fear') # Make sure we can explicitly unlock the file portalocker.unlock(f) @pytest.mark.parametrize('locker', LOCKERS, indirect=True) def test_blocking_timeout(tmpfile, locker): flags = LockFlags.SHARED with pytest.warns(UserWarning): with portalocker.Lock(tmpfile, 'a+', timeout=5, flags=flags): pass lock = portalocker.Lock(tmpfile, 'a+', flags=flags) with pytest.warns(UserWarning): lock.acquire(timeout=5) @pytest.mark.skipif( os.name == 'nt', reason='Windows uses an entirely different lockmechanism', ) @pytest.mark.parametrize('locker', LOCKERS, indirect=True) def test_nonblocking(tmpfile, locker): with open(tmpfile, 'w') as fh, pytest.raises(RuntimeError): portalocker.lock(fh, LockFlags.NON_BLOCKING) def shared_lock(filename, **kwargs): with portalocker.Lock( filename, timeout=0.1, fail_when_locked=False, flags=LockFlags.SHARED | LockFlags.NON_BLOCKING, ): time.sleep(0.2) return True def shared_lock_fail(filename, **kwargs): with portalocker.Lock( filename, timeout=0.1, fail_when_locked=True, flags=LockFlags.SHARED | LockFlags.NON_BLOCKING, ): time.sleep(0.2) return True def exclusive_lock(filename, **kwargs): with portalocker.Lock( filename, timeout=0.1, fail_when_locked=False, flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING, ): time.sleep(0.2) return True @dataclasses.dataclass(order=True) class LockResult: exception_class: typing.Union[type[Exception], None] = None exception_message: typing.Union[str, None] = None exception_repr: typing.Union[str, None] = None def lock( filename: str, fail_when_locked: bool, flags: LockFlags, timeout: float = 0.1, keep_locked: float = 0.05, ) -> LockResult: # Returns a case of True, False or FileNotFound # https://thedailywtf.com/articles/what_is_truth_0x3f_ # But seriously, the exception properties cannot be safely pickled so we # only return string representations of the exception properties try: with portalocker.Lock( filename, timeout=timeout, fail_when_locked=fail_when_locked, flags=flags, ): time.sleep(keep_locked) return LockResult() except Exception as exception: # The exceptions cannot be pickled so we cannot return them through # multiprocessing return LockResult( type(exception), str(exception), repr(exception), ) @pytest.mark.parametrize('fail_when_locked', [True, False]) @pytest.mark.skipif( 'pypy' in sys.version.lower(), reason='pypy3 does not support the multiprocessing test', ) @pytest.mark.flaky(reruns=5, reruns_delay=1) def test_shared_processes(tmpfile, fail_when_locked): flags = LockFlags.SHARED | LockFlags.NON_BLOCKING print() print(f'{fail_when_locked=}, {flags=}, {os.name=}, {LOCKERS=}') with multiprocessing.Pool(processes=2) as pool: args = tmpfile, fail_when_locked, flags results = pool.starmap_async(lock, 2 * [args]) # sourcery skip: no-loop-in-tests for result in results.get(timeout=1.5): print(f'{result=}') # sourcery skip: no-conditionals-in-tests if result.exception_class is not None: raise result.exception_class assert result == LockResult() @pytest.mark.parametrize('fail_when_locked', [True, False]) @pytest.mark.parametrize('locker', LOCKERS, indirect=True) # Skip pypy3 @pytest.mark.skipif( 'pypy' in sys.version.lower(), reason='pypy3 does not support the multiprocessing test', ) @pytest.mark.flaky(reruns=5, reruns_delay=1) def test_exclusive_processes( tmpfile: str, fail_when_locked: bool, locker: typing.Callable[..., typing.Any], ) -> None: flags = LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING print('Locking', tmpfile, fail_when_locked, locker) with multiprocessing.Pool(processes=2) as pool: # Submit tasks individually result_a = pool.apply_async(lock, [tmpfile, fail_when_locked, flags]) result_b = pool.apply_async(lock, [tmpfile, fail_when_locked, flags]) try: a = result_a.get(timeout=1.2) # Wait for 'a' with timeout except multiprocessing.TimeoutError: a = None print(f'{a=}') print(repr(a)) try: # Lower timeout since we already waited with `a` b = result_b.get(timeout=0.6) # Wait for 'b' with timeout except multiprocessing.TimeoutError: b = None print(f'{b=}') print(repr(b)) assert a or b # Make sure a is always filled if a is None: b, a = a, b # make pyright happy assert a is not None if b: # make pyright happy assert b is not None assert not a.exception_class or not b.exception_class assert issubclass( a.exception_class or b.exception_class, # type: ignore[arg-type] portalocker.LockException, ) else: assert not a.exception_class @pytest.mark.skipif( os.name == 'nt', reason='Locking on Windows requires a file object', ) @pytest.mark.parametrize('locker', LOCKERS, indirect=True) def test_lock_fileno(tmpfile, locker): with open(tmpfile, 'a+') as a: with open(tmpfile, 'a+') as b: # Lock shared non-blocking flags = LockFlags.SHARED | LockFlags.NON_BLOCKING # First lock file a portalocker.lock(a, flags) # Now see if we can lock using fileno() portalocker.lock(b.fileno(), flags) @pytest.mark.skipif( os.name != 'posix', reason='Only posix systems have different lockf behaviour', ) @pytest.mark.parametrize('locker', LOCKERS, indirect=True) def test_locker_mechanism(tmpfile, locker): """Can we switch the locking mechanism?""" # We can test for flock vs lockf based on their different behaviour re. # locking the same file. with portalocker.Lock(tmpfile, 'a+', flags=LockFlags.EXCLUSIVE): # If we have lockf(), we cannot get another lock on the same file. if locker is fcntl.lockf: portalocker.Lock( tmpfile, 'r+', flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING, ).acquire(timeout=0.1) # But with other lock methods we can't else: with pytest.raises(portalocker.LockException): portalocker.Lock( tmpfile, 'r+', flags=LockFlags.EXCLUSIVE | LockFlags.NON_BLOCKING, ).acquire(timeout=0.1) def test_exception(monkeypatch, tmpfile): """Do we stop immediately if the locking fails, even with a timeout?""" def patched_lock(*args, **kwargs): raise ValueError('Test exception') monkeypatch.setattr('portalocker.utils.portalocker.lock', patched_lock) lock = portalocker.Lock(tmpfile, 'w', timeout=math.inf) with pytest.raises(exceptions.LockException): lock.acquire() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735654944.0 portalocker-3.1.1/pyproject.toml0000644000076500000240000000767614734777040015665 0ustar00rickstaff[build-system] build-backend = 'setuptools.build_meta' requires = ['setuptools', 'setuptools-scm'] [project] name = 'portalocker' dynamic = ['version'] authors = [{ name = 'Rick van Hattem', email = 'wolph@wol.ph' }] license = { text = 'BSD-3-Clause' } description = 'Wraps the portalocker recipe for easy usage' keywords = ['locking', 'locks', 'with', 'statement', 'windows', 'linux', 'unix'] readme = 'README.rst' classifiers = [ 'Development Status :: 5 - Production/Stable', 'Development Status :: 6 - Mature', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: MacOS', 'Operating System :: Microsoft :: MS-DOS', 'Operating System :: Microsoft :: Windows', 'Operating System :: Microsoft', 'Operating System :: POSIX :: BSD :: FreeBSD', 'Operating System :: POSIX :: BSD', 'Operating System :: POSIX :: Linux', 'Operating System :: POSIX :: SunOS/Solaris', 'Operating System :: POSIX', 'Operating System :: Unix', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: IronPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Programming Language :: Python :: Implementation', 'Programming Language :: Python', 'Topic :: Education :: Testing', 'Topic :: Office/Business', 'Topic :: Other/Nonlisted Topic', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Libraries', 'Topic :: System :: Monitoring', 'Typing :: Typed', ] requires-python = '>=3.9' dependencies = [ 'pywin32>=226; platform_system == "Windows"', ] [project.urls] bugs = 'https://github.com/wolph/portalocker/issues' documentation = 'https://portalocker.readthedocs.io/en/latest/' repository = 'https://github.com/wolph/portalocker/' [project.optional-dependencies] docs = ['sphinx>=1.7.1'] tests = [ 'pytest>=5.4.1', 'pytest-cov>=2.8.1', 'pytest-timeout>=2.1.0', 'sphinx>=6.0.0', 'pytest-mypy>=0.8.0', 'types-redis', 'redis', 'pytest-rerunfailures>=15.0', ] redis = ['redis'] [tool.setuptools] platforms = ['any'] include-package-data = false [tool.setuptools.dynamic] version = { attr = 'portalocker.__about__.__version__' } [tool.setuptools.packages.find] include = ['portalocker'] [tool.setuptools.package-data] portalocker = ['py.typed', 'msvcrt.pyi'] [tool.black] line-length = 79 skip-string-normalization = true [tool.codespell] skip = '*/htmlcov,./docs/_build,*.asc' [tool.pyright] include = ['portalocker', 'portalocker_tests'] exclude = ['dist/*'] strict = ['portalocker'] [tool.mypy] python_version = '3.9' strict = true warn_return_any = true warn_unused_configs = true warn_unused_ignores = false packages = ['portalocker', 'portalocker_tests'] ignore_missing_imports = true check_untyped_defs = true exclude = ['dist', 'docs', '.venv', 'venv'] enable_error_code = ['ignore-without-code', 'truthy-bool', 'redundant-expr'] warn_unreachable = true [[tool.mypy.overrides]] module = ['portalocker_tests.*'] disallow_untyped_defs = false [dependency-groups] dev = [ 'portalocker[tests]', ] [tool.ruff] src = ['portalocker', 'portalocker_tests'] include = ['portalocker/**/*.py', 'portalocker_tests/**/*.py'] [tool.repo-review] ignore = [ 'PY004', # no /docs 'PY007', # tox configured in tox.toml 'PP301', # pytest is irrelevant 'PC111', # no blacken-docs because markdown has no code 'PC140', # manual typecheck pre-commit hooks 'PC170', # no pygrep-hooks because no rST 'RTD', # no RTD ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1657362565.0 portalocker-3.1.1/pytest.ini0000644000076500000240000000034214262254205014746 0ustar00rickstaff[pytest] python_files = portalocker_tests/*.py addopts = --ignore setup.py --ignore portalocker/_*.py --doctest-modules --cov portalocker --cov-report term-missing --cov-report html timeout = 20 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/ruff.toml0000644000076500000240000000613314734754243014573 0ustar00rickstaff# We keep the ruff configuration separate so it can easily be shared across # all projects target-version = 'py39' src = ['portalocker'] exclude = [ 'docs', '.tox', # Ignore local test files/directories/old-stuff 'test.py', '*_old.py', ] line-length = 79 [lint] ignore = [ 'A001', # Variable {name} is shadowing a Python builtin 'A002', # Argument {name} is shadowing a Python builtin 'A003', # Class attribute {name} is shadowing a Python builtin 'B023', # function-uses-loop-variable 'B024', # `FormatWidgetMixin` is an abstract base class, but it has no abstract methods 'D205', # blank-line-after-summary 'D212', # multi-line-summary-first-line 'RET505', # Unnecessary `else` after `return` statement 'TRY003', # Avoid specifying long messages outside the exception class 'RET507', # Unnecessary `elif` after `continue` statement 'C405', # Unnecessary {obj_type} literal (rewrite as a set literal) 'C406', # Unnecessary {obj_type} literal (rewrite as a dict literal) 'C408', # Unnecessary {obj_type} call (rewrite as a literal) 'SIM114', # Combine `if` branches using logical `or` operator 'RET506', # Unnecessary `else` after `raise` statement 'Q001', # Remove bad quotes 'Q002', # Remove bad quotes 'FA100', # Missing `from __future__ import annotations`, but uses `typing.Optional` 'COM812', # Missing trailing comma in a list 'ISC001', # String concatenation with implicit str conversion 'SIM108', # Ternary operators are not always more readable 'RUF100', # Unused noqa directives. Due to multiple Python versions, we need to keep them ] select = [ 'A', # flake8-builtins 'ASYNC', # flake8 async checker 'B', # flake8-bugbear 'C4', # flake8-comprehensions 'C90', # mccabe 'COM', # flake8-commas ## Require docstrings for all public methods, would be good to enable at some point # 'D', # pydocstyle 'E', # pycodestyle error ('W' for warning) 'F', # pyflakes 'FA', # flake8-future-annotations 'I', # isort 'ICN', # flake8-import-conventions 'INP', # flake8-no-pep420 'ISC', # flake8-implicit-str-concat 'N', # pep8-naming 'NPY', # NumPy-specific rules 'PERF', # perflint, 'PIE', # flake8-pie 'Q', # flake8-quotes 'RET', # flake8-return 'RUF', # Ruff-specific rules 'SIM', # flake8-simplify 'T20', # flake8-print 'TD', # flake8-todos 'TRY', # tryceratops 'UP', # pyupgrade ] [lint.per-file-ignores] 'portalocker_tests/tests.py' = ['SIM115', 'SIM117', 'T201'] [lint.pydocstyle] convention = 'google' ignore-decorators = [ 'typing.overload', 'typing.override', ] [lint.isort] case-sensitive = true combine-as-imports = true force-wrap-aliases = true [lint.flake8-quotes] docstring-quotes = 'single' inline-quotes = 'single' multiline-quotes = 'single' [format] line-ending = 'lf' indent-style = 'space' quote-style = 'single' docstring-code-format = true skip-magic-trailing-comma = false exclude = [ '__init__.py', ] [lint.pycodestyle] max-line-length = 79 [lint.flake8-pytest-style] mark-parentheses = true ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1735654951.4916365 portalocker-3.1.1/setup.cfg0000644000076500000240000000004614734777047014561 0ustar00rickstaff[egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1694861733.0 portalocker-3.1.1/sourcery.yaml0000644000076500000240000000003514501304645015453 0ustar00rickstaffignore: - portalocker_tests././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735645347.0 portalocker-3.1.1/tox.toml0000644000076500000240000000243614734754243014445 0ustar00rickstaffenv_list = [ 'py39', 'py310', 'py311', 'py312', 'pypy3', 'docs', 'mypy', 'pyright', 'ruff', 'repo-review', 'codespell', ] skip_missing_interpreters = true [env_run_base] pass_env = ['FORCE_COLOR'] commands = [['pytest', '{posargs}']] extras = ['tests', 'redis'] [env.mypy] commands = [['mypy']] [env.pyright] deps = ['pyright'] commands = [['pyright']] [env.ruff] deps = ['ruff'] commands = [['ruff', 'check'], ['ruff', 'format', '--check']] [env.docs] extras = ['docs'] allowlist_externals = ['rm', 'cd', 'mkdir'] commands = [ [ 'rm', '-f', 'docs/modules.rst', ], [ 'mkdir', '-p', 'docs/_static', ], [ 'sphinx-apidoc', '-e', '-o', 'docs/', 'portalocker', ], [ 'rm', '-f', 'docs/modules.rst', ], [ 'sphinx-build', '-b', 'html', '-d', 'docs/_build/doctrees', 'docs', 'docs/_build/html', '{posargs}', ], ] [env.repo-review] basepython = ['py312'] deps = ['sp-repo-review[cli]', 'validate-pyproject'] commands = [['repo-review']] [env.codespell] commands = [['codespell']] deps = ['codespell', 'tomli'] skip_install = true command = 'codespell'