pax_global_header00006660000000000000000000000064141657172630014526gustar00rootroot0000000000000052 comment=0dcb5db513d47e6b62cf11bf7f0234d84abef2fa path-16.3.0/000077500000000000000000000000001416571726300125515ustar00rootroot00000000000000path-16.3.0/.coveragerc000066400000000000000000000001671416571726300146760ustar00rootroot00000000000000[run] omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* path/py37compat.py [report] show_missing = True path-16.3.0/.dockerignore000066400000000000000000000000051416571726300152200ustar00rootroot00000000000000.tox path-16.3.0/.editorconfig000066400000000000000000000003301416571726300152220ustar00rootroot00000000000000root = true [*] charset = utf-8 indent_style = tab indent_size = 4 insert_final_newline = true end_of_line = lf [*.py] indent_style = space max_line_length = 88 [*.{yml,yaml}] indent_style = space indent_size = 2 path-16.3.0/.flake8000066400000000000000000000002101416571726300137150ustar00rootroot00000000000000[flake8] max-line-length = 88 # jaraco/skeleton#34 max-complexity = 10 extend-ignore = # Black creates whitespace before colon E203 path-16.3.0/.github/000077500000000000000000000000001416571726300141115ustar00rootroot00000000000000path-16.3.0/.github/FUNDING.yml000066400000000000000000000000241416571726300157220ustar00rootroot00000000000000tidelift: pypi/path path-16.3.0/.github/dependabot.yml000066400000000000000000000002241416571726300167370ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" allow: - dependency-type: "all" path-16.3.0/.github/workflows/000077500000000000000000000000001416571726300161465ustar00rootroot00000000000000path-16.3.0/.github/workflows/main.yml000066400000000000000000000021001416571726300176060ustar00rootroot00000000000000name: tests on: [push, pull_request] jobs: test: strategy: matrix: python: - 3.7 - 3.9 - "3.10" platform: - ubuntu-latest - macos-latest - windows-latest runs-on: ${{ matrix.platform }} steps: - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: Install tox run: | python -m pip install tox - name: Run tests run: tox release: needs: test if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v2 with: python-version: "3.10" - name: Install tox run: | python -m pip install tox - name: Release run: tox -e release env: TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} path-16.3.0/.gitignore000066400000000000000000000001171416571726300145400ustar00rootroot00000000000000__pycache__ *.pyc *.egg-info *.egg .eggs/ MANIFEST build dist .tox docs/_build path-16.3.0/.pre-commit-config.yaml000066400000000000000000000001211416571726300170240ustar00rootroot00000000000000repos: - repo: https://github.com/psf/black rev: 20.8b1 hooks: - id: black path-16.3.0/.readthedocs.yml000066400000000000000000000001171416571726300156360ustar00rootroot00000000000000version: 2 python: install: - path: . extra_requirements: - docs path-16.3.0/CHANGES.rst000066400000000000000000000342741416571726300143650ustar00rootroot00000000000000v16.3.0 ------- - Require Python 3.7 or later. - #205: test_listdir_other_encoding now automatically skips itself on file systems where it's not appropriate. v16.2.0 ------- - Deprecated passing bytes to ``write_text``. Instead, users should call ``write_bytes``. v16.1.0 ------- - #204: Improved test coverage across the package to 99%, fixing bugs in uncovered code along the way. v16.0.0 ------- - #200: ``TempDir`` context now cleans up unconditionally, even if an exception occurs. v15.1.2 ------- - #199: Fixed broken link in README. v15.1.1 ------- - Refreshed package metadata. v15.1.0 ------- - Added ``ExtantPath`` and ``ExtantFile`` objects that raise errors when they reference a non-existent path or file. v15.0.1 ------- - Refreshed package metadata. v15.0.0 ------- - Removed ``__version__`` property. To determine the version, use ``importlib.metadata.version('path')``. v14.0.1 ------- - Fixed regression on Python 3.7 and earlier where ``lru_cache`` did not support a user function. v14.0.0 ------- - Removed ``namebase`` property. Use ``stem`` instead. - Removed ``update`` parameter on method to ``Path.merge_tree``. Instead, to only copy newer files, provide a wrapped ``copy`` function, as described in the doc string. - Removed ``FastPath``. Just use ``Path``. - Removed ``path.CaseInsensitivePattern``. Instead use ``path.matchers.CaseInsensitive``. - Removed ``path.tempdir``. Use ``path.TempDir``. - #154: Added ``Traversal`` class and support for customizing the behavior of a ``Path.walk``. v13.3.0 ------- - #186: Fix test failures on Python 3.8 on Windows by relying on ``realpath()`` instead of ``readlink()``. - #189: ``realpath()`` now honors symlinks on Python 3.7 and earlier, approximating the behavior found on Python 3.8. - #187: ``lines()`` no longer relies on the deprecated ``.text()``. v13.2.0 ------- - Require Python 3.6 or later. v13.1.0 ------- - #170: Added ``read_text`` and ``read_bytes`` methods to align with ``pathlib`` behavior. Deprecated ``text`` method. If you require newline normalization of ``text``, use ``jaraco.text.normalize_newlines(Path.read_text())``. v13.0.0 ------- - #169: Renamed package from ``path.py`` to ``path``. The docs make reference to a pet name "path pie" for easier discovery. v12.5.0 ------- - #195: Project now depends on ``path``. v12.4.0 ------- - #169: Project now depends on ``path < 13.2``. - Fixed typo in README. v12.3.0 ------- - #169: Project is renamed to simply ``path``. This release of ``path.py`` simply depends on ``path < 13.1``. v12.2.0 ------- - #169: Moved project at GitHub from ``jaraco/path.py`` to ``jaraco/path``. v12.1.0 ------- - #171: Fixed exception in ``rmdir_p`` when target is not empty. - #174: Rely on ``importlib.metadata`` on Python 3.8. v12.0.2 ------- - Refreshed package metadata. 12.0.1 ------ - #166: Removed 'universal' wheel support. 12.0 --- - #148: Dropped support for Python 2.7 and 3.4. - Moved 'path' into a package. 11.5.2 ------ - #163: Corrected 'pymodules' typo in package declaration. 11.5.1 ------ - Minor packaging refresh. 11.5.0 ------ - #156: Re-wrote the handling of pattern matches for ``listdir``, ``walk``, and related methods, allowing the pattern to be a more complex object. This approach drastically simplifies the code and obviates the ``CaseInsensitivePattern`` and ``FastPath`` classes. Now the main ``Path`` class should be as performant as ``FastPath`` and case-insensitive matches can be readily constructed using the new ``path.matchers.CaseInsensitive`` class. 11.4.1 ------ - #153: Skip intermittently failing performance test on Python 2. 11.4.0 ------ - #130: Path.py now supports non-decodable filenames on Linux and Python 2, leveraging the `backports.os `_ package (as an optional dependency). Currently, only ``listdir`` is patched, but other ``os`` primitives may be patched similarly in the ``patch_for_linux_python2`` function. - #141: For merge_tree, instead of relying on the deprecated distutils module, implement merge_tree explicitly. The ``update`` parameter is deprecated, instead superseded by a ``copy_function`` parameter and an ``only_newer`` wrapper for any copy function. 11.3.0 ------ - #151: No longer use two techniques for splitting lines. Instead, unconditionally rely on io.open for universal newlines support and always use splitlines. 11.2.0 ------ - #146: Rely on `importlib_metadata `_ instead of setuptools/pkg_resources to load the version of the module. Added tests ensuring a <100ms import time for the ``path`` module. This change adds an explicit dependency on the importlib_metadata package, but the project still supports copying of the ``path.py`` module without any dependencies. 11.1.0 ------ - #143, #144: Add iglob method. - #142, #145: Rename ``tempdir`` to ``TempDir`` and declare it as part of ``__all__``. Retain ``tempdir`` for compatibility for now. - #145: ``TempDir.__enter__`` no longer returns the ``TempDir`` instance, but instead returns a ``Path`` instance, suitable for entering to change the current working directory. 11.0.1 ------ - #136: Fixed test failures on BSD. - Refreshed package metadata. 11.0 ---- - Drop support for Python 3.3. 10.6 ---- - Renamed ``namebase`` to ``stem`` to match API of pathlib. Kept ``namebase`` as a deprecated alias for compatibility. - Added new ``with_suffix`` method, useful for renaming the extension on a Path:: orig = Path('mydir/mypath.bat') renamed = orig.rename(orig.with_suffix('.cmd')) 10.5 ---- - Packaging refresh and readme updates. 10.4 ---- - #130: Removed surrogate_escape handler as it's no longer used. 10.3.1 ------ - #124: Fixed ``rmdir_p`` raising ``FileNotFoundError`` when directory does not exist on Windows. 10.3 ---- - #115: Added a new performance-optimized implementation for listdir operations, optimizing ``listdir``, ``walk``, ``walkfiles``, ``walkdirs``, and ``fnmatch``, presented as the ``FastPath`` class. Please direct feedback on this implementation to the ticket, especially if the performance benefits justify it replacing the default ``Path`` class. 10.2 ---- - Symlink no longer requires the ``newlink`` parameter and will default to the basename of the target in the current working directory. 10.1 ---- - #123: Implement ``Path.__fspath__`` per PEP 519. 10.0 ---- - Once again as in 8.0 remove deprecated ``path.path``. 9.1 --- - #121: Removed workaround for #61 added in 5.2. ``path.py`` now only supports file system paths that can be effectively decoded to text. It is the responsibility of the system implementer to ensure that filenames on the system are decodeable by ``sys.getfilesystemencoding()``. 9.0 --- - Drop support for Python 2.6 and 3.2 as integration dependencies (pip) no longer support these versions. 8.3 --- - Merge with latest skeleton, adding badges and test runs by default under tox instead of pytest-runner. - Documentation is no longer hosted with PyPI. 8.2.1 ----- - #112: Update Travis CI usage to only deploy on Python 3.5. 8.2 --- - Refreshed project metadata based on `jaraco's project skeleton `_. - Releases are now automatically published via Travis-CI. - #111: More aggressively trap errors when importing ``pkg_resources``. 8.1.2 ----- - #105: By using unicode literals, avoid errors rendering the backslash in __get_owner_windows. 8.1.1 ----- - #102: Reluctantly restored reference to path.path in ``__all__``. 8.1 --- - #102: Restored ``path.path`` with a DeprecationWarning. 8.0 --- Removed ``path.path``. Clients must now refer to the canonical name, ``path.Path`` as introduced in 6.2. 7.7 --- - #88: Added support for resolving certain directories on a system to platform-friendly locations using the `appdirs `_ library. The ``Path.special`` method returns an ``SpecialResolver`` instance that will resolve a path in a scope (i.e. 'site' or 'user') and class (i.e. 'config', 'cache', 'data'). For example, to create a config directory for "My App":: config_dir = Path.special("My App").user.config.makedirs_p() ``config_dir`` will exist in a user context and will be in a suitable platform-friendly location. As ``path.py`` does not currently have any dependencies, and to retain that expectation for a compatible upgrade path, ``appdirs`` must be installed to avoid an ImportError when invoking ``special``. - #88: In order to support "multipath" results, where multiple paths are returned in a single, ``os.pathsep``-separated string, a new class MultiPath now represents those special results. This functionality is experimental and may change. Feedback is invited. 7.6.2 ----- - Re-release of 7.6.1 without unintended feature. 7.6.1 ----- - #101: Supress error when `path.py` is not present as a distribution. 7.6 --- - #100: Add ``merge_tree`` method for merging two existing directory trees. - Uses `setuptools_scm `_ for version management. 7.5 --- - #97: ``__rdiv__`` and ``__rtruediv__`` are now defined. 7.4 --- - #93: chown now appears in docs and raises NotImplementedError if ``os.chown`` isn't present. - #92: Added compatibility support for ``.samefile`` on platforms without ``os.samefile``. 7.3 --- - #91: Releases now include a universal wheel. 7.2 --- - In chmod, added support for multiple symbolic masks (separated by commas). - In chmod, fixed issue in setting of symbolic mask with '=' where unreferenced permissions were cleared. 7.1 --- - #23: Added support for symbolic masks to ``.chmod``. 7.0 --- - The ``open`` method now uses ``io.open`` and supports all of the parameters to that function. ``open`` will always raise an ``OSError`` on failure, even on Python 2. - Updated ``write_text`` to support additional newline patterns. - The ``text`` method now always returns text (never bytes), and thus requires an encoding parameter be supplied if the default encoding is not sufficient to decode the content of the file. 6.2 --- - ``path`` class renamed to ``Path``. The ``path`` name remains as an alias for compatibility. 6.1 --- - ``chown`` now accepts names in addition to numeric IDs. 6.0 --- - Drop support for Python 2.5. Python 2.6 or later required. - Installation now requires setuptools. 5.3 --- - Allow arbitrary callables to be passed to path.walk ``errors`` parameter. Enables workaround for issues such as #73 and #56. 5.2 --- - #61: path.listdir now decodes filenames from os.listdir when loading characters from a file. On Python 3, the behavior is unchanged. On Python 2, the behavior will now mimick that of Python 3, attempting to decode all filenames and paths using the encoding indicated by ``sys.getfilesystemencoding()``, and escaping any undecodable characters using the 'surrogateescape' handler. 5.1 --- - #53: Added ``path.in_place`` for editing files in place. 5.0 --- - ``path.fnmatch`` now takes an optional parameter ``normcase`` and this parameter defaults to self.module.normcase (using case normalization most pertinent to the path object itself). Note that this change means that any paths using a custom ntpath module on non-Windows systems will have different fnmatch behavior. Before:: # on Unix >>> p = path('Foo') >>> p.module = ntpath >>> p.fnmatch('foo') False After:: # on any OS >>> p = path('Foo') >>> p.module = ntpath >>> p.fnmatch('foo') True To maintain the original behavior, either don't define the 'module' for the path or supply explicit normcase function:: >>> p.fnmatch('foo', normcase=os.path.normcase) # result always varies based on OS, same as fnmatch.fnmatch For most use-cases, the default behavior should remain the same. - Issue #50: Methods that accept patterns (``listdir``, ``files``, ``dirs``, ``walk``, ``walkdirs``, ``walkfiles``, and ``fnmatch``) will now use a ``normcase`` attribute if it is present on the ``pattern`` parameter. The path module now provides a ``CaseInsensitivePattern`` wrapper for strings suitable for creating case-insensitive patterns for those methods. 4.4 --- - Issue #44: _hash method would open files in text mode, producing invalid results on Windows. Now files are opened in binary mode, producing consistent results. - Issue #47: Documentation is dramatically improved with Intersphinx links to the Python os.path functions and documentation for all methods and properties. 4.3 --- - Issue #32: Add ``chdir`` and ``cd`` methods. 4.2 --- - ``open()`` now passes all positional and keyword arguments through to the underlying ``builtins.open`` call. 4.1 --- - Native Python 2 and Python 3 support without using 2to3 during the build process. 4.0 --- - Added a ``chunks()`` method to a allow quick iteration over pieces of a file at a given path. - Issue #28: Fix missing argument to ``samefile``. - Initializer no longer enforces `isinstance basestring` for the source object. Now any object that supplies ``__unicode__`` can be used by a ``path`` (except None). Clients that depend on a ValueError being raised for ``int`` and other non-string objects should trap these types internally. - Issue #30: ``chown`` no longer requires both uid and gid to be provided and will not mutate the ownership if nothing is provided. 3.2 --- - Issue #22: ``__enter__`` now returns self. 3.1 --- - Issue #20: `relpath` now supports a "start" parameter to match the signature of `os.path.relpath`. 3.0 --- - Minimum Python version is now 2.5. 2.6 --- - Issue #5: Implemented `path.tempdir`, which returns a path object which is a temporary directory and context manager for cleaning up the directory. - Issue #12: One can now construct path objects from a list of strings by simply using path.joinpath. For example:: path.joinpath('a', 'b', 'c') # or path.joinpath(*path_elements) 2.5 --- - Issue #7: Add the ability to do chaining of operations that formerly only returned None. - Issue #4: Raise a TypeError when constructed from None. path-16.3.0/Dockerfile000066400000000000000000000002471416571726300145460ustar00rootroot00000000000000from ubuntu:bionic RUN apt update RUN apt install -y python python-pip git RUN python -m pip install tox RUN mkdir /app ENV LANG=C.UTF-8 WORKDIR /app COPY . . CMD tox path-16.3.0/LICENSE000066400000000000000000000020321416571726300135530ustar00rootroot00000000000000Copyright Jason R. Coombs Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. path-16.3.0/MANIFEST.in000066400000000000000000000000161416571726300143040ustar00rootroot00000000000000include *.rst path-16.3.0/README.rst000066400000000000000000000130351416571726300142420ustar00rootroot00000000000000.. image:: https://img.shields.io/pypi/v/path.svg :target: `PyPI link`_ .. image:: https://img.shields.io/pypi/pyversions/path.svg :target: `PyPI link`_ .. _PyPI link: https://pypi.org/project/path .. image:: https://github.com/jaraco/path/workflows/tests/badge.svg :target: https://github.com/jaraco/path/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: Code style: Black .. image:: https://readthedocs.org/projects/path/badge/?version=latest :target: https://path.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/skeleton-2021-informational :target: https://blog.jaraco.com/skeleton .. image:: https://tidelift.com/badges/package/pypi/path :target: https://tidelift.com/subscription/pkg/pypi-path?utm_source=pypi-path&utm_medium=readme ``path`` (aka path pie, formerly ``path.py``) implements path objects as first-class entities, allowing common operations on files to be invoked on those path objects directly. For example: .. code-block:: python from path import Path d = Path("/home/guido/bin") for f in d.files("*.py"): f.chmod(0o755) # Globbing for f in d.files("*.py"): f.chmod("u+rwx") # Changing the working directory: with Path("somewhere"): # cwd in now `somewhere` ... # Concatenate paths with / foo_txt = Path("bar") / "foo.txt" Path pie is `hosted at Github `_. Find `the documentation here `_. Guides and Testimonials ======================= Yasoob wrote the Python 101 `Writing a Cleanup Script `_ based on ``path``. Advantages ========== Python 3.4 introduced `pathlib `_, which shares many characteristics with ``path``. In particular, it provides an object encapsulation for representing filesystem paths. One may have imagined ``pathlib`` would supersede ``path``. But the implementation and the usage quickly diverge, and ``path`` has several advantages over ``pathlib``: - ``path`` implements ``Path`` objects as a subclass of ``str``, and as a result these ``Path`` objects may be passed directly to other APIs that expect simple text representations of paths, whereas with ``pathlib``, one must first cast values to strings before passing them to APIs unaware of ``pathlib``. This shortcoming was `addressed by PEP 519 `_, in Python 3.6. - ``path`` goes beyond exposing basic functionality of a path and exposes commonly-used behaviors on a path, providing methods like ``rmtree`` (from shlib) and ``remove_p`` (remove a file if it exists). - As a PyPI-hosted package, ``path`` is free to iterate faster than a stdlib package. Contributions are welcome and encouraged. - ``path`` provides a uniform abstraction over its Path object, freeing the implementer to subclass it readily. One cannot subclass a ``pathlib.Path`` to add functionality, but must subclass ``Path``, ``PosixPath``, and ``WindowsPath``, even if one only wishes to add a ``__dict__`` to the subclass instances. ``path`` instead allows the ``Path.module`` object to be overridden by subclasses, defaulting to the ``os.path``. Even advanced uses of ``path.Path`` that subclass the model do not need to be concerned with OS-specific nuances. This path project has the explicit aim to provide compatibility with ``pathlib`` objects where possible, such that a ``path.Path`` object is a drop-in replacement for ``pathlib.Path*`` objects. This project welcomes contributions to improve that compatibility where it's lacking. Alternatives ============ In addition to `pathlib `_, the `pylib project `_ implements a `LocalPath `_ class, which shares some behaviors and interfaces with ``path``. Development =========== To install a development version, use the Github links to clone or download a snapshot of the latest code. Alternatively, if you have git installed, you may be able to use ``pip`` to install directly from the repository:: pip install git+https://github.com/jaraco/path.git Testing ======= Tests are invoked with `tox `_. After having installed tox, simply invoke ``tox`` in a checkout of the repo to invoke the tests. Tests are also run in continuous integration. See the badges above for links to the CI runs. Releasing ========= Tagged releases are automatically published to PyPI by Azure Pipelines, assuming the tests pass. Origins ======= The ``path.py`` project was initially released in 2003 by Jason Orendorff and has been continuously developed and supported by several maintainers over the years. For Enterprise ============== Available as part of the Tidelift Subscription. This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. `Learn more `_. Security Contact ================ To report a security vulnerability, please use the `Tidelift security contact `_. Tidelift will coordinate the fix and disclosure. path-16.3.0/docs/000077500000000000000000000000001416571726300135015ustar00rootroot00000000000000path-16.3.0/docs/Makefile000066400000000000000000000151551416571726300151500ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = -W SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pathpy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pathpy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/pathpy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pathpy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." path-16.3.0/docs/api.rst000066400000000000000000000002551416571726300150060ustar00rootroot00000000000000=== API === .. important:: The documented methods' signatures are not always correct. See :class:`path.Path`. .. automodule:: path :members: :undoc-members: path-16.3.0/docs/conf.py000066400000000000000000000020611416571726300147770ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] master_doc = "index" pygments_style = "sphinx" link_files = { '../CHANGES.rst': dict( using=dict(GH='https://github.com'), replace=[ dict( pattern=r'(Issue #|\B#)(?P\d+)', url='{package_url}/issues/{issue}', ), dict( pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', ), dict( pattern=r'PEP[- ](?P\d+)', url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', ), ], ) } # Be strict about any broken references: nitpicky = True # Include Python intersphinx mapping to prevent failures # jaraco/skeleton#51 extensions += ['sphinx.ext.intersphinx'] intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), } extensions += ['jaraco.tidelift'] path-16.3.0/docs/history.rst000066400000000000000000000001211416571726300157260ustar00rootroot00000000000000:tocdepth: 1 .. _changes: History ******* .. include:: ../CHANGES (links).rst path-16.3.0/docs/index.rst000066400000000000000000000006051416571726300153430ustar00rootroot00000000000000Welcome to |project| documentation! =================================== .. toctree:: :maxdepth: 1 api history .. tidelift-referral-banner:: Thanks to Mahan Marwat for transferring the ``path`` name on Read The Docs from `path `_ to this project. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` path-16.3.0/mypy.ini000066400000000000000000000000451416571726300142470ustar00rootroot00000000000000[mypy] ignore_missing_imports = True path-16.3.0/path/000077500000000000000000000000001416571726300135055ustar00rootroot00000000000000path-16.3.0/path/__init__.py000066400000000000000000001400061416571726300156170ustar00rootroot00000000000000""" Path Pie Implements ``path.Path`` - An object representing a path to a file or directory. Example:: from path import Path d = Path('/home/guido/bin') # Globbing for f in d.files('*.py'): f.chmod(0o755) # Changing the working directory: with Path("somewhere"): # cwd in now `somewhere` ... # Concatenate paths with / foo_txt = Path("bar") / "foo.txt" """ import sys import warnings import os import fnmatch import glob import shutil import hashlib import errno import tempfile import functools import re import contextlib import io import importlib import itertools with contextlib.suppress(ImportError): import win32security with contextlib.suppress(ImportError): import pwd with contextlib.suppress(ImportError): import grp from . import matchers from . import masks from . import classes from .py37compat import best_realpath, lru_cache __all__ = ['Path', 'TempDir'] LINESEPS = ['\r\n', '\r', '\n'] U_LINESEPS = LINESEPS + ['\u0085', '\u2028', '\u2029'] B_NEWLINE = re.compile('|'.join(LINESEPS).encode()) U_NEWLINE = re.compile('|'.join(U_LINESEPS)) B_NL_END = re.compile(B_NEWLINE.pattern + b'$') U_NL_END = re.compile(U_NEWLINE.pattern + '$') class TreeWalkWarning(Warning): pass class Traversal: """ Wrap a walk result to customize the traversal. `follow` is a function that takes an item and returns True if that item should be followed and False otherwise. For example, to avoid traversing into directories that begin with `.`: >>> traverse = Traversal(lambda dir: not dir.startswith('.')) >>> items = list(traverse(Path('.').walk())) Directories beginning with `.` will appear in the results, but their children will not. >>> dot_dir = next(item for item in items if item.isdir() and item.startswith('.')) >>> any(item.parent == dot_dir for item in items) False """ def __init__(self, follow): self.follow = follow def __call__(self, walker): traverse = None while True: try: item = walker.send(traverse) except StopIteration: return yield item traverse = functools.partial(self.follow, item) class Path(str): """ Represents a filesystem path. For documentation on individual methods, consult their counterparts in :mod:`os.path`. Some methods are additionally included from :mod:`shutil`. The functions are linked directly into the class namespace such that they will be bound to the Path instance. For example, ``Path(src).copy(target)`` is equivalent to ``shutil.copy(src, target)``. Therefore, when referencing the docs for these methods, assume `src` references `self`, the Path instance. """ module = os.path """ The path module to use for path operations. .. seealso:: :mod:`os.path` """ def __init__(self, other=''): if other is None: raise TypeError("Invalid initial value for path: None") with contextlib.suppress(AttributeError): self._validate() @classmethod @lru_cache def using_module(cls, module): subclass_name = cls.__name__ + '_' + module.__name__ bases = (cls,) ns = {'module': module} return type(subclass_name, bases, ns) @classes.ClassProperty @classmethod def _next_class(cls): """ What class should be used to construct new instances from this class """ return cls # --- Special Python methods. def __repr__(self): return '%s(%s)' % (type(self).__name__, super(Path, self).__repr__()) # Adding a Path and a string yields a Path. def __add__(self, more): return self._next_class(super(Path, self).__add__(more)) def __radd__(self, other): return self._next_class(other.__add__(self)) # The / operator joins Paths. def __div__(self, rel): """fp.__div__(rel) == fp / rel == fp.joinpath(rel) Join two path components, adding a separator character if needed. .. seealso:: :func:`os.path.join` """ return self._next_class(self.module.join(self, rel)) # Make the / operator work even when true division is enabled. __truediv__ = __div__ # The / operator joins Paths the other way around def __rdiv__(self, rel): """fp.__rdiv__(rel) == rel / fp Join two path components, adding a separator character if needed. .. seealso:: :func:`os.path.join` """ return self._next_class(self.module.join(rel, self)) # Make the / operator work even when true division is enabled. __rtruediv__ = __rdiv__ def __enter__(self): self._old_dir = self.getcwd() os.chdir(self) return self def __exit__(self, *_): os.chdir(self._old_dir) @classmethod def getcwd(cls): """Return the current working directory as a path object. .. seealso:: :func:`os.getcwd` """ return cls(os.getcwd()) # # --- Operations on Path strings. def abspath(self): """.. seealso:: :func:`os.path.abspath`""" return self._next_class(self.module.abspath(self)) def normcase(self): """.. seealso:: :func:`os.path.normcase`""" return self._next_class(self.module.normcase(self)) def normpath(self): """.. seealso:: :func:`os.path.normpath`""" return self._next_class(self.module.normpath(self)) def realpath(self): """.. seealso:: :func:`os.path.realpath`""" realpath = best_realpath(self.module) return self._next_class(realpath(self)) def expanduser(self): """.. seealso:: :func:`os.path.expanduser`""" return self._next_class(self.module.expanduser(self)) def expandvars(self): """.. seealso:: :func:`os.path.expandvars`""" return self._next_class(self.module.expandvars(self)) def dirname(self): """.. seealso:: :attr:`parent`, :func:`os.path.dirname`""" return self._next_class(self.module.dirname(self)) def basename(self): """.. seealso:: :attr:`name`, :func:`os.path.basename`""" return self._next_class(self.module.basename(self)) def expand(self): """Clean up a filename by calling :meth:`expandvars()`, :meth:`expanduser()`, and :meth:`normpath()` on it. This is commonly everything needed to clean up a filename read from a configuration file, for example. """ return self.expandvars().expanduser().normpath() @property def stem(self): """The same as :meth:`name`, but with one file extension stripped off. >>> Path('/home/guido/python.tar.gz').stem 'python.tar' """ base, ext = self.module.splitext(self.name) return base @property def ext(self): """The file extension, for example ``'.py'``.""" f, ext = self.module.splitext(self) return ext def with_suffix(self, suffix): """Return a new path with the file suffix changed (or added, if none) >>> Path('/home/guido/python.tar.gz').with_suffix(".foo") Path('/home/guido/python.tar.foo') >>> Path('python').with_suffix('.zip') Path('python.zip') >>> Path('filename.ext').with_suffix('zip') Traceback (most recent call last): ... ValueError: Invalid suffix 'zip' """ if not suffix.startswith('.'): raise ValueError("Invalid suffix {suffix!r}".format(**locals())) return self.stripext() + suffix @property def drive(self): """The drive specifier, for example ``'C:'``. This is always empty on systems that don't use drive specifiers. """ drive, r = self.module.splitdrive(self) return self._next_class(drive) parent = property( dirname, None, None, """ This path's parent directory, as a new Path object. For example, ``Path('/usr/local/lib/libpython.so').parent == Path('/usr/local/lib')`` .. seealso:: :meth:`dirname`, :func:`os.path.dirname` """, ) name = property( basename, None, None, """ The name of this file or directory without the full path. For example, ``Path('/usr/local/lib/libpython.so').name == 'libpython.so'`` .. seealso:: :meth:`basename`, :func:`os.path.basename` """, ) def splitpath(self): """Return two-tuple of ``.parent``, ``.name``. .. seealso:: :attr:`parent`, :attr:`name`, :func:`os.path.split` """ parent, child = self.module.split(self) return self._next_class(parent), child def splitdrive(self): """Return two-tuple of ``.drive`` and rest without drive. Split the drive specifier from this path. If there is no drive specifier, :samp:`{p.drive}` is empty, so the return value is simply ``(Path(''), p)``. This is always the case on Unix. .. seealso:: :func:`os.path.splitdrive` """ drive, rel = self.module.splitdrive(self) return self._next_class(drive), self._next_class(rel) def splitext(self): """Return two-tuple of ``.stripext()`` and ``.ext``. Split the filename extension from this path and return the two parts. Either part may be empty. The extension is everything from ``'.'`` to the end of the last path segment. This has the property that if ``(a, b) == p.splitext()``, then ``a + b == p``. .. seealso:: :func:`os.path.splitext` """ filename, ext = self.module.splitext(self) return self._next_class(filename), ext def stripext(self): """Remove one file extension from the path. For example, ``Path('/home/guido/python.tar.gz').stripext()`` returns ``Path('/home/guido/python.tar')``. """ return self.splitext()[0] @classes.multimethod def joinpath(cls, first, *others): """ Join first to zero or more :class:`Path` components, adding a separator character (:samp:`{first}.module.sep`) if needed. Returns a new instance of :samp:`{first}._next_class`. .. seealso:: :func:`os.path.join` """ return cls._next_class(cls.module.join(first, *others)) def splitall(self): r"""Return a list of the path components in this path. The first item in the list will be a Path. Its value will be either :data:`os.curdir`, :data:`os.pardir`, empty, or the root directory of this path (for example, ``'/'`` or ``'C:\\'``). The other items in the list will be strings. ``Path.joinpath(*result)`` will yield the original path. >>> Path('/foo/bar/baz').splitall() [Path('/'), 'foo', 'bar', 'baz'] """ return list(self._parts()) def parts(self): """ >>> Path('/foo/bar/baz').parts() (Path('/'), 'foo', 'bar', 'baz') """ return tuple(self._parts()) def _parts(self): return reversed(tuple(self._parts_iter())) def _parts_iter(self): loc = self while loc != os.curdir and loc != os.pardir: prev = loc loc, child = prev.splitpath() if loc == prev: break yield child yield loc def relpath(self, start='.'): """Return this path as a relative path, based from `start`, which defaults to the current working directory. """ cwd = self._next_class(start) return cwd.relpathto(self) def relpathto(self, dest): """Return a relative path from `self` to `dest`. If there is no relative path from `self` to `dest`, for example if they reside on different drives in Windows, then this returns ``dest.abspath()``. """ origin = self.abspath() dest = self._next_class(dest).abspath() orig_list = origin.normcase().splitall() # Don't normcase dest! We want to preserve the case. dest_list = dest.splitall() if orig_list[0] != self.module.normcase(dest_list[0]): # Can't get here from there. return dest # Find the location where the two paths start to differ. i = 0 for start_seg, dest_seg in zip(orig_list, dest_list): if start_seg != self.module.normcase(dest_seg): break i += 1 # Now i is the point where the two paths diverge. # Need a certain number of "os.pardir"s to work up # from the origin to the point of divergence. segments = [os.pardir] * (len(orig_list) - i) # Need to add the diverging part of dest_list. segments += dest_list[i:] if len(segments) == 0: # If they happen to be identical, use os.curdir. relpath = os.curdir else: relpath = self.module.join(*segments) return self._next_class(relpath) # --- Listing, searching, walking, and matching def listdir(self, match=None): """List of items in this directory. Use :meth:`files` or :meth:`dirs` instead if you want a listing of just files or just subdirectories. The elements of the list are Path objects. With the optional `match` argument, a callable, only return items whose names match the given pattern. .. seealso:: :meth:`files`, :meth:`dirs` """ match = matchers.load(match) return list(filter(match, (self / child for child in os.listdir(self)))) def dirs(self, *args, **kwargs): """List of this directory's subdirectories. The elements of the list are Path objects. This does not walk recursively into subdirectories (but see :meth:`walkdirs`). Accepts parameters to :meth:`listdir`. """ return [p for p in self.listdir(*args, **kwargs) if p.isdir()] def files(self, *args, **kwargs): """List of the files in self. The elements of the list are Path objects. This does not walk into subdirectories (see :meth:`walkfiles`). Accepts parameters to :meth:`listdir`. """ return [p for p in self.listdir(*args, **kwargs) if p.isfile()] def walk(self, match=None, errors='strict'): """Iterator over files and subdirs, recursively. The iterator yields Path objects naming each child item of this directory and its descendants. This requires that ``D.isdir()``. This performs a depth-first traversal of the directory tree. Each directory is returned just before all its children. The `errors=` keyword argument controls behavior when an error occurs. The default is ``'strict'``, which causes an exception. Other allowed values are ``'warn'`` (which reports the error via :func:`warnings.warn()`), and ``'ignore'``. `errors` may also be an arbitrary callable taking a msg parameter. """ errors = Handlers._resolve(errors) match = matchers.load(match) try: childList = self.listdir() except Exception as exc: errors(f"Unable to list directory '{self}': {exc}") return for child in childList: traverse = None if match(child): traverse = yield child traverse = traverse or child.isdir try: do_traverse = traverse() except Exception as exc: errors(f"Unable to access '{child}': {exc}") continue if do_traverse: for item in child.walk(errors=errors, match=match): yield item def walkdirs(self, *args, **kwargs): """Iterator over subdirs, recursively.""" return (item for item in self.walk(*args, **kwargs) if item.isdir()) def walkfiles(self, *args, **kwargs): """Iterator over files, recursively.""" return (item for item in self.walk(*args, **kwargs) if item.isfile()) def fnmatch(self, pattern, normcase=None): """Return ``True`` if `self.name` matches the given `pattern`. `pattern` - A filename pattern with wildcards, for example ``'*.py'``. If the pattern contains a `normcase` attribute, it is applied to the name and path prior to comparison. `normcase` - (optional) A function used to normalize the pattern and filename before matching. Defaults to normcase from ``self.module``, :func:`os.path.normcase`. .. seealso:: :func:`fnmatch.fnmatch` """ default_normcase = getattr(pattern, 'normcase', self.module.normcase) normcase = normcase or default_normcase name = normcase(self.name) pattern = normcase(pattern) return fnmatch.fnmatchcase(name, pattern) def glob(self, pattern): """Return a list of Path objects that match the pattern. `pattern` - a path relative to this directory, with wildcards. For example, ``Path('/users').glob('*/bin/*')`` returns a list of all the files users have in their :file:`bin` directories. .. seealso:: :func:`glob.glob` .. note:: Glob is **not** recursive, even when using ``**``. To do recursive globbing see :func:`walk`, :func:`walkdirs` or :func:`walkfiles`. """ cls = self._next_class return [cls(s) for s in glob.glob(self / pattern)] def iglob(self, pattern): """Return an iterator of Path objects that match the pattern. `pattern` - a path relative to this directory, with wildcards. For example, ``Path('/users').iglob('*/bin/*')`` returns an iterator of all the files users have in their :file:`bin` directories. .. seealso:: :func:`glob.iglob` .. note:: Glob is **not** recursive, even when using ``**``. To do recursive globbing see :func:`walk`, :func:`walkdirs` or :func:`walkfiles`. """ cls = self._next_class return (cls(s) for s in glob.iglob(self / pattern)) # # --- Reading or writing an entire file at once. def open(self, *args, **kwargs): """Open this file and return a corresponding file object. Keyword arguments work as in :func:`io.open`. If the file cannot be opened, an :class:`OSError` is raised. """ return io.open(self, *args, **kwargs) def bytes(self): """Open this file, read all bytes, return them as a string.""" with self.open('rb') as f: return f.read() def chunks(self, size, *args, **kwargs): """Returns a generator yielding chunks of the file, so it can be read piece by piece with a simple for loop. Any argument you pass after `size` will be passed to :meth:`open`. :example: >>> hash = hashlib.md5() >>> for chunk in Path("CHANGES.rst").chunks(8192, mode='rb'): ... hash.update(chunk) This will read the file by chunks of 8192 bytes. """ with self.open(*args, **kwargs) as f: for chunk in iter(lambda: f.read(size) or None, None): yield chunk def write_bytes(self, bytes, append=False): """Open this file and write the given bytes to it. Default behavior is to overwrite any existing file. Call ``p.write_bytes(bytes, append=True)`` to append instead. """ with self.open('ab' if append else 'wb') as f: f.write(bytes) def read_text(self, encoding=None, errors=None): r"""Open this file, read it in, return the content as a string. Optional parameters are passed to :meth:`open`. .. seealso:: :meth:`lines` """ with self.open(encoding=encoding, errors=errors) as f: return f.read() def read_bytes(self): r"""Return the contents of this file as bytes.""" with self.open(mode='rb') as f: return f.read() def text(self, encoding=None, errors='strict'): r"""Legacy function to read text. Converts all newline sequences to ``\n``. """ warnings.warn(".text is deprecated; use read_text", DeprecationWarning) return U_NEWLINE.sub('\n', self.read_text(encoding, errors)) def write_text( self, text, encoding=None, errors='strict', linesep=os.linesep, append=False ): r"""Write the given text to this file. The default behavior is to overwrite any existing file; to append instead, use the `append=True` keyword argument. There are two differences between :meth:`write_text` and :meth:`write_bytes`: newline handling and Unicode handling. See below. Parameters: `text` - str/bytes - The text to be written. `encoding` - str - The text encoding used. `errors` - str - How to handle Unicode encoding errors. Default is ``'strict'``. See ``help(unicode.encode)`` for the options. Ignored if `text` isn't a Unicode string. `linesep` - keyword argument - str/unicode - The sequence of characters to be used to mark end-of-line. The default is :data:`os.linesep`. Specify ``None`` to use newlines unmodified. `append` - keyword argument - bool - Specifies what to do if the file already exists (``True``: append to the end of it; ``False``: overwrite it). The default is ``False``. --- Newline handling. ``write_text()`` converts all standard end-of-line sequences (``'\n'``, ``'\r'``, and ``'\r\n'``) to your platform's default end-of-line sequence (see :data:`os.linesep`; on Windows, for example, the end-of-line marker is ``'\r\n'``). To override the platform's default, pass the `linesep=` keyword argument. To preserve the newlines as-is, pass ``linesep=None``. This handling applies to Unicode text and bytes, except with Unicode, additional non-ASCII newlines are recognized: ``\x85``, ``\r\x85``, and ``\u2028``. --- Unicode If `text` isn't Unicode, then apart from newline handling, the bytes are written verbatim to the file. The `encoding` and `errors` arguments are not used and must be omitted. If `text` is Unicode, it is first converted to :func:`bytes` using the specified `encoding` (or the default encoding if `encoding` isn't specified). The `errors` argument applies only to this conversion. """ if isinstance(text, str): if linesep is not None: text = U_NEWLINE.sub(linesep, text) bytes = text.encode(encoding or sys.getdefaultencoding(), errors) else: warnings.warn( "Writing bytes in write_text is deprecated", DeprecationWarning, stacklevel=1, ) assert encoding is None if linesep is not None: text = B_NEWLINE.sub(linesep.encode(), text) bytes = text self.write_bytes(bytes, append=append) def lines(self, encoding=None, errors=None, retain=True): r"""Open this file, read all lines, return them in a list. Optional arguments: `encoding` - The Unicode encoding (or character set) of the file. The default is ``None``, meaning use ``locale.getpreferredencoding()``. `errors` - How to handle Unicode errors; see `open `_ for the options. Default is ``None`` meaning "strict". `retain` - If ``True`` (default), retain newline characters, but translate all newline characters to ``\n``. If ``False``, newline characters are omitted. .. seealso:: :meth:`text` """ text = U_NEWLINE.sub('\n', self.read_text(encoding, errors)) return text.splitlines(retain) def write_lines( self, lines, encoding=None, errors='strict', linesep=os.linesep, append=False ): r"""Write the given lines of text to this file. By default this overwrites any existing file at this path. This puts a platform-specific newline sequence on every line. See `linesep` below. `lines` - A list of strings. `encoding` - A Unicode encoding to use. This applies only if `lines` contains any Unicode strings. `errors` - How to handle errors in Unicode encoding. This also applies only to Unicode strings. linesep - The desired line-ending. This line-ending is applied to every line. If a line already has any standard line ending (``'\r'``, ``'\n'``, ``'\r\n'``, ``u'\x85'``, ``u'\r\x85'``, ``u'\u2028'``), that will be stripped off and this will be used instead. The default is os.linesep, which is platform-dependent (``'\r\n'`` on Windows, ``'\n'`` on Unix, etc.). Specify ``None`` to write the lines as-is, like ``.writelines`` on a file object. Use the keyword argument ``append=True`` to append lines to the file. The default is to overwrite the file. .. warning :: When you use this with Unicode data, if the encoding of the existing data in the file is different from the encoding you specify with the `encoding=` parameter, the result is mixed-encoding data, which can really confuse someone trying to read the file later. """ with self.open('ab' if append else 'wb') as f: for line in lines: isUnicode = isinstance(line, str) if linesep is not None: pattern = U_NL_END if isUnicode else B_NL_END line = pattern.sub('', line) + linesep if isUnicode: line = line.encode(encoding or sys.getdefaultencoding(), errors) f.write(line) def read_md5(self): """Calculate the md5 hash for this file. This reads through the entire file. .. seealso:: :meth:`read_hash` """ return self.read_hash('md5') def _hash(self, hash_name): """Returns a hash object for the file at the current path. `hash_name` should be a hash algo name (such as ``'md5'`` or ``'sha1'``) that's available in the :mod:`hashlib` module. """ m = hashlib.new(hash_name) for chunk in self.chunks(8192, mode="rb"): m.update(chunk) return m def read_hash(self, hash_name): """Calculate given hash for this file. List of supported hashes can be obtained from :mod:`hashlib` package. This reads the entire file. .. seealso:: :meth:`hashlib.hash.digest` """ return self._hash(hash_name).digest() def read_hexhash(self, hash_name): """Calculate given hash for this file, returning hexdigest. List of supported hashes can be obtained from :mod:`hashlib` package. This reads the entire file. .. seealso:: :meth:`hashlib.hash.hexdigest` """ return self._hash(hash_name).hexdigest() # --- Methods for querying the filesystem. # N.B. On some platforms, the os.path functions may be implemented in C # (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get # bound. Playing it safe and wrapping them all in method calls. def isabs(self): """ >>> Path('.').isabs() False .. seealso:: :func:`os.path.isabs` """ return self.module.isabs(self) def exists(self): """.. seealso:: :func:`os.path.exists`""" return self.module.exists(self) def isdir(self): """.. seealso:: :func:`os.path.isdir`""" return self.module.isdir(self) def isfile(self): """.. seealso:: :func:`os.path.isfile`""" return self.module.isfile(self) def islink(self): """.. seealso:: :func:`os.path.islink`""" return self.module.islink(self) def ismount(self): """ >>> Path('.').ismount() False .. seealso:: :func:`os.path.ismount` """ return self.module.ismount(self) def samefile(self, other): """.. seealso:: :func:`os.path.samefile`""" return self.module.samefile(self, other) def getatime(self): """.. seealso:: :attr:`atime`, :func:`os.path.getatime`""" return self.module.getatime(self) atime = property( getatime, None, None, """ Last access time of the file. >>> Path('.').atime > 0 True .. seealso:: :meth:`getatime`, :func:`os.path.getatime` """, ) def getmtime(self): """.. seealso:: :attr:`mtime`, :func:`os.path.getmtime`""" return self.module.getmtime(self) mtime = property( getmtime, None, None, """ Last modified time of the file. .. seealso:: :meth:`getmtime`, :func:`os.path.getmtime` """, ) def getctime(self): """.. seealso:: :attr:`ctime`, :func:`os.path.getctime`""" return self.module.getctime(self) ctime = property( getctime, None, None, """ Creation time of the file. .. seealso:: :meth:`getctime`, :func:`os.path.getctime` """, ) def getsize(self): """.. seealso:: :attr:`size`, :func:`os.path.getsize`""" return self.module.getsize(self) size = property( getsize, None, None, """ Size of the file, in bytes. .. seealso:: :meth:`getsize`, :func:`os.path.getsize` """, ) def access(self, *args, **kwargs): """ Return does the real user have access to this path. >>> Path('.').access(os.F_OK) True .. seealso:: :func:`os.access` """ return os.access(self, *args, **kwargs) def stat(self): """ Perform a ``stat()`` system call on this path. >>> Path('.').stat() os.stat_result(...) .. seealso:: :meth:`lstat`, :func:`os.stat` """ return os.stat(self) def lstat(self): """ Like :meth:`stat`, but do not follow symbolic links. >>> Path('.').lstat() == Path('.').stat() True .. seealso:: :meth:`stat`, :func:`os.lstat` """ return os.lstat(self) def __get_owner_windows(self): # pragma: nocover r""" Return the name of the owner of this file or directory. Follow symbolic links. Return a name of the form ``DOMAIN\User Name``; may be a group. .. seealso:: :attr:`owner` """ desc = win32security.GetFileSecurity( self, win32security.OWNER_SECURITY_INFORMATION ) sid = desc.GetSecurityDescriptorOwner() account, domain, typecode = win32security.LookupAccountSid(None, sid) return domain + '\\' + account def __get_owner_unix(self): # pragma: nocover """ Return the name of the owner of this file or directory. Follow symbolic links. .. seealso:: :attr:`owner` """ st = self.stat() return pwd.getpwuid(st.st_uid).pw_name def __get_owner_not_implemented(self): # pragma: nocover raise NotImplementedError("Ownership not available on this platform.") get_owner = ( __get_owner_windows if 'win32security' in globals() else __get_owner_unix if 'pwd' in globals() else __get_owner_not_implemented ) owner = property( get_owner, None, None, """ Name of the owner of this file or directory. .. seealso:: :meth:`get_owner`""", ) if hasattr(os, 'statvfs'): def statvfs(self): """Perform a ``statvfs()`` system call on this path. .. seealso:: :func:`os.statvfs` """ return os.statvfs(self) if hasattr(os, 'pathconf'): def pathconf(self, name): """.. seealso:: :func:`os.pathconf`""" return os.pathconf(self, name) # # --- Modifying operations on files and directories def utime(self, *args, **kwargs): """Set the access and modified times of this file. .. seealso:: :func:`os.utime` """ os.utime(self, *args, **kwargs) return self def chmod(self, mode): """ Set the mode. May be the new mode (os.chmod behavior) or a `symbolic mode `_. .. seealso:: :func:`os.chmod` """ if isinstance(mode, str): mask = masks.compound(mode) mode = mask(self.stat().st_mode) os.chmod(self, mode) return self if hasattr(os, 'chown'): def chown(self, uid=-1, gid=-1): """ Change the owner and group by names or numbers. .. seealso:: :func:`os.chown` """ def resolve_uid(uid): return uid if isinstance(uid, int) else pwd.getpwnam(uid).pw_uid def resolve_gid(gid): return gid if isinstance(gid, int) else grp.getgrnam(gid).gr_gid os.chown(self, resolve_uid(uid), resolve_gid(gid)) return self def rename(self, new): """.. seealso:: :func:`os.rename`""" os.rename(self, new) return self._next_class(new) def renames(self, new): """.. seealso:: :func:`os.renames`""" os.renames(self, new) return self._next_class(new) # # --- Create/delete operations on directories def mkdir(self, mode=0o777): """.. seealso:: :func:`os.mkdir`""" os.mkdir(self, mode) return self def mkdir_p(self, mode=0o777): """Like :meth:`mkdir`, but does not raise an exception if the directory already exists.""" with contextlib.suppress(FileExistsError): self.mkdir(mode) return self def makedirs(self, mode=0o777): """.. seealso:: :func:`os.makedirs`""" os.makedirs(self, mode) return self def makedirs_p(self, mode=0o777): """Like :meth:`makedirs`, but does not raise an exception if the directory already exists.""" with contextlib.suppress(FileExistsError): self.makedirs(mode) return self def rmdir(self): """.. seealso:: :func:`os.rmdir`""" os.rmdir(self) return self def rmdir_p(self): """Like :meth:`rmdir`, but does not raise an exception if the directory is not empty or does not exist.""" suppressed = FileNotFoundError, FileExistsError, DirectoryNotEmpty with contextlib.suppress(suppressed): with DirectoryNotEmpty.translate(): self.rmdir() return self def removedirs(self): """.. seealso:: :func:`os.removedirs`""" os.removedirs(self) return self def removedirs_p(self): """Like :meth:`removedirs`, but does not raise an exception if the directory is not empty or does not exist.""" with contextlib.suppress(FileExistsError, DirectoryNotEmpty): with DirectoryNotEmpty.translate(): self.removedirs() return self # --- Modifying operations on files def touch(self): """Set the access/modified times of this file to the current time. Create the file if it does not exist. """ fd = os.open(self, os.O_WRONLY | os.O_CREAT, 0o666) os.close(fd) os.utime(self, None) return self def remove(self): """.. seealso:: :func:`os.remove`""" os.remove(self) return self def remove_p(self): """Like :meth:`remove`, but does not raise an exception if the file does not exist.""" with contextlib.suppress(FileNotFoundError): self.unlink() return self unlink = remove unlink_p = remove_p # --- Links def link(self, newpath): """Create a hard link at `newpath`, pointing to this file. .. seealso:: :func:`os.link` """ os.link(self, newpath) return self._next_class(newpath) def symlink(self, newlink=None): """Create a symbolic link at `newlink`, pointing here. If newlink is not supplied, the symbolic link will assume the name self.basename(), creating the link in the cwd. .. seealso:: :func:`os.symlink` """ if newlink is None: newlink = self.basename() os.symlink(self, newlink) return self._next_class(newlink) def readlink(self): """Return the path to which this symbolic link points. The result may be an absolute or a relative path. .. seealso:: :meth:`readlinkabs`, :func:`os.readlink` """ return self._next_class(os.readlink(self)) def readlinkabs(self): """Return the path to which this symbolic link points. The result is always an absolute path. .. seealso:: :meth:`readlink`, :func:`os.readlink` """ p = self.readlink() return p if p.isabs() else (self.parent / p).abspath() # High-level functions from shutil # These functions will be bound to the instance such that # Path(name).copy(target) will invoke shutil.copy(name, target) copyfile = shutil.copyfile copymode = shutil.copymode copystat = shutil.copystat copy = shutil.copy copy2 = shutil.copy2 copytree = shutil.copytree if hasattr(shutil, 'move'): move = shutil.move rmtree = shutil.rmtree def rmtree_p(self): """Like :meth:`rmtree`, but does not raise an exception if the directory does not exist.""" with contextlib.suppress(FileNotFoundError): self.rmtree() return self def chdir(self): """.. seealso:: :func:`os.chdir`""" os.chdir(self) cd = chdir def merge_tree( self, dst, symlinks=False, *, copy_function=shutil.copy2, ignore=lambda dir, contents: [], ): """ Copy entire contents of self to dst, overwriting existing contents in dst with those in self. Pass ``symlinks=True`` to copy symbolic links as links. Accepts a ``copy_function``, similar to copytree. To avoid overwriting newer files, supply a copy function wrapped in ``only_newer``. For example:: src.merge_tree(dst, copy_function=only_newer(shutil.copy2)) """ dst = self._next_class(dst) dst.makedirs_p() sources = self.listdir() _ignored = ignore(self, [item.name for item in sources]) def ignored(item): return item.name in _ignored for source in itertools.filterfalse(ignored, sources): dest = dst / source.name if symlinks and source.islink(): target = source.readlink() target.symlink(dest) elif source.isdir(): source.merge_tree( dest, symlinks=symlinks, copy_function=copy_function, ignore=ignore, ) else: copy_function(source, dest) self.copystat(dst) # # --- Special stuff from os if hasattr(os, 'chroot'): def chroot(self): # pragma: nocover """.. seealso:: :func:`os.chroot`""" os.chroot(self) if hasattr(os, 'startfile'): def startfile(self, *args, **kwargs): # pragma: nocover """.. seealso:: :func:`os.startfile`""" os.startfile(self, *args, **kwargs) return self # in-place re-writing, courtesy of Martijn Pieters # http://www.zopatista.com/python/2013/11/26/inplace-file-rewriting/ @contextlib.contextmanager def in_place( self, mode='r', buffering=-1, encoding=None, errors=None, newline=None, backup_extension=None, ): """ A context in which a file may be re-written in-place with new content. Yields a tuple of :samp:`({readable}, {writable})` file objects, where `writable` replaces `readable`. If an exception occurs, the old file is restored, removing the written data. Mode *must not* use ``'w'``, ``'a'``, or ``'+'``; only read-only-modes are allowed. A :exc:`ValueError` is raised on invalid modes. For example, to add line numbers to a file:: p = Path(filename) assert p.isfile() with p.in_place() as (reader, writer): for number, line in enumerate(reader, 1): writer.write('{0:3}: '.format(number))) writer.write(line) Thereafter, the file at `filename` will have line numbers in it. """ if set(mode).intersection('wa+'): raise ValueError('Only read-only file modes can be used') # move existing file to backup, create new file with same permissions # borrowed extensively from the fileinput module backup_fn = self + (backup_extension or os.extsep + 'bak') backup_fn.remove_p() self.rename(backup_fn) readable = io.open( backup_fn, mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, ) try: perm = os.fstat(readable.fileno()).st_mode except OSError: writable = self.open( 'w' + mode.replace('r', ''), buffering=buffering, encoding=encoding, errors=errors, newline=newline, ) else: os_mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC os_mode |= getattr(os, 'O_BINARY', 0) fd = os.open(self, os_mode, perm) writable = io.open( fd, "w" + mode.replace('r', ''), buffering=buffering, encoding=encoding, errors=errors, newline=newline, ) with contextlib.suppress(OSError, AttributeError): self.chmod(perm) try: yield readable, writable except Exception: # move backup back readable.close() writable.close() self.remove_p() backup_fn.rename(self) raise else: readable.close() writable.close() finally: backup_fn.remove_p() @classes.ClassProperty @classmethod def special(cls): """ Return a SpecialResolver object suitable referencing a suitable directory for the relevant platform for the given type of content. For example, to get a user config directory, invoke: dir = Path.special().user.config Uses the `appdirs `_ to resolve the paths in a platform-friendly way. To create a config directory for 'My App', consider: dir = Path.special("My App").user.config.makedirs_p() If the ``appdirs`` module is not installed, invocation of special will raise an ImportError. """ return functools.partial(SpecialResolver, cls) class DirectoryNotEmpty(OSError): @staticmethod @contextlib.contextmanager def translate(): try: yield except OSError as exc: if exc.errno == errno.ENOTEMPTY: raise DirectoryNotEmpty(*exc.args) from exc raise def only_newer(copy_func): """ Wrap a copy function (like shutil.copy2) to return the dst if it's newer than the source. """ @functools.wraps(copy_func) def wrapper(src, dst, *args, **kwargs): is_newer_dst = dst.exists() and dst.getmtime() >= src.getmtime() if is_newer_dst: return dst return copy_func(src, dst, *args, **kwargs) return wrapper class ExtantPath(Path): """ >>> ExtantPath('.') ExtantPath('.') >>> ExtantPath('does-not-exist') Traceback (most recent call last): OSError: does-not-exist does not exist. """ def _validate(self): if not self.exists(): raise OSError(f"{self} does not exist.") class ExtantFile(Path): """ >>> ExtantFile('.') Traceback (most recent call last): FileNotFoundError: . does not exist as a file. >>> ExtantFile('does-not-exist') Traceback (most recent call last): FileNotFoundError: does-not-exist does not exist as a file. """ def _validate(self): if not self.isfile(): raise FileNotFoundError(f"{self} does not exist as a file.") class SpecialResolver: class ResolverScope: def __init__(self, paths, scope): self.paths = paths self.scope = scope def __getattr__(self, class_): return self.paths.get_dir(self.scope, class_) def __init__(self, path_class, *args, **kwargs): appdirs = importlib.import_module('appdirs') vars(self).update( path_class=path_class, wrapper=appdirs.AppDirs(*args, **kwargs) ) def __getattr__(self, scope): return self.ResolverScope(self, scope) def get_dir(self, scope, class_): """ Return the callable function from appdirs, but with the result wrapped in self.path_class """ prop_name = '{scope}_{class_}_dir'.format(**locals()) value = getattr(self.wrapper, prop_name) MultiPath = Multi.for_class(self.path_class) return MultiPath.detect(value) class Multi: """ A mix-in for a Path which may contain multiple Path separated by pathsep. """ @classmethod def for_class(cls, path_cls): name = 'Multi' + path_cls.__name__ return type(name, (cls, path_cls), {}) @classmethod def detect(cls, input): if os.pathsep not in input: cls = cls._next_class return cls(input) def __iter__(self): return iter(map(self._next_class, self.split(os.pathsep))) @classes.ClassProperty @classmethod def _next_class(cls): """ Multi-subclasses should use the parent class """ return next(class_ for class_ in cls.__mro__ if not issubclass(class_, Multi)) class TempDir(Path): """ A temporary directory via :func:`tempfile.mkdtemp`, and constructed with the same parameters that you can use as a context manager. For example: >>> with TempDir() as d: ... d.isdir() and isinstance(d, Path) True The directory is deleted automatically. >>> d.isdir() False .. seealso:: :func:`tempfile.mkdtemp` """ @classes.ClassProperty @classmethod def _next_class(cls): return Path def __new__(cls, *args, **kwargs): dirname = tempfile.mkdtemp(*args, **kwargs) return super(TempDir, cls).__new__(cls, dirname) def __init__(self, *args, **kwargs): pass def __enter__(self): # TempDir should return a Path version of itself and not itself # so that a second context manager does not create a second # temporary directory, but rather changes CWD to the location # of the temporary directory. return self._next_class(self) def __exit__(self, exc_type, exc_value, traceback): self.rmtree() class Handlers: def strict(msg): raise def warn(msg): warnings.warn(msg, TreeWalkWarning) def ignore(msg): pass @classmethod def _resolve(cls, param): if not callable(param) and param not in vars(Handlers): raise ValueError("invalid errors parameter") return vars(cls).get(param, param) path-16.3.0/path/classes.py000066400000000000000000000012271416571726300155160ustar00rootroot00000000000000import functools class ClassProperty(property): def __get__(self, cls, owner): return self.fget.__get__(None, owner)() class multimethod: """ Acts like a classmethod when invoked from the class and like an instancemethod when invoked from the instance. """ def __init__(self, func): self.func = func def __get__(self, instance, owner): """ If called on an instance, pass the instance as the first argument. """ return ( functools.partial(self.func, owner) if instance is None else functools.partial(self.func, owner, instance) ) path-16.3.0/path/masks.py000066400000000000000000000042551416571726300152030ustar00rootroot00000000000000import re import functools import operator # from jaraco.functools def compose(*funcs): compose_two = lambda f1, f2: lambda *args, **kwargs: f1(f2(*args, **kwargs)) # noqa return functools.reduce(compose_two, funcs) def compound(mode): """ Support multiple, comma-separated Unix chmod symbolic modes. >>> oct(compound('a=r,u+w')(0)) '0o644' """ return compose(*map(simple, reversed(mode.split(',')))) def simple(mode): """ Convert a Unix chmod symbolic mode like ``'ugo+rwx'`` to a function suitable for applying to a mask to affect that change. >>> mask = simple('ugo+rwx') >>> mask(0o554) == 0o777 True >>> simple('go-x')(0o777) == 0o766 True >>> simple('o-x')(0o445) == 0o444 True >>> simple('a+x')(0) == 0o111 True >>> simple('a=rw')(0o057) == 0o666 True >>> simple('u=x')(0o666) == 0o166 True >>> simple('g=')(0o157) == 0o107 True >>> simple('gobbledeegook') Traceback (most recent call last): ValueError: ('Unrecognized symbolic mode', 'gobbledeegook') """ # parse the symbolic mode parsed = re.match('(?P[ugoa]+)(?P[-+=])(?P[rwx]*)$', mode) if not parsed: raise ValueError("Unrecognized symbolic mode", mode) # generate a mask representing the specified permission spec_map = dict(r=4, w=2, x=1) specs = (spec_map[perm] for perm in parsed.group('what')) spec = functools.reduce(operator.or_, specs, 0) # now apply spec to each subject in who shift_map = dict(u=6, g=3, o=0) who = parsed.group('who').replace('a', 'ugo') masks = (spec << shift_map[subj] for subj in who) mask = functools.reduce(operator.or_, masks) op = parsed.group('op') # if op is -, invert the mask if op == '-': mask ^= 0o777 # if op is =, retain extant values for unreferenced subjects if op == '=': masks = (0o7 << shift_map[subj] for subj in who) retain = functools.reduce(operator.or_, masks) ^ 0o777 op_map = { '+': operator.or_, '-': operator.and_, '=': lambda mask, target: target & retain ^ mask, } return functools.partial(op_map[op], mask) path-16.3.0/path/matchers.py000066400000000000000000000025421416571726300156700ustar00rootroot00000000000000import ntpath import fnmatch def load(param): """ If the supplied parameter is a string, assume it's a simple pattern. """ return ( Pattern(param) if isinstance(param, str) else param if param is not None else Null() ) class Base: pass class Null(Base): def __call__(self, path): return True class Pattern(Base): def __init__(self, pattern): self.pattern = pattern def get_pattern(self, normcase): try: return self._pattern except AttributeError: pass self._pattern = normcase(self.pattern) return self._pattern def __call__(self, path): normcase = getattr(self, 'normcase', path.module.normcase) pattern = self.get_pattern(normcase) return fnmatch.fnmatchcase(normcase(path.name), pattern) class CaseInsensitive(Pattern): """ A Pattern with a ``'normcase'`` property, suitable for passing to :meth:`listdir`, :meth:`dirs`, :meth:`files`, :meth:`walk`, :meth:`walkdirs`, or :meth:`walkfiles` to match case-insensitive. For example, to get all files ending in .py, .Py, .pY, or .PY in the current directory:: from path import Path, matchers Path('.').files(matchers.CaseInsensitive('*.py')) """ normcase = staticmethod(ntpath.normcase) path-16.3.0/path/py37compat.py000066400000000000000000000112301416571726300160620ustar00rootroot00000000000000import functools import os def best_realpath(module): """ Given a path module (i.e. ntpath, posixpath), determine the best 'realpath' function to use for best future compatibility. """ needs_backport = module.realpath is module.abspath return realpath_backport if needs_backport else module.realpath # backport taken from jaraco.windows 5 def realpath_backport(path): if isinstance(path, str): prefix = '\\\\?\\' unc_prefix = prefix + 'UNC' new_unc_prefix = '\\' cwd = os.getcwd() else: prefix = b'\\\\?\\' unc_prefix = prefix + b'UNC' new_unc_prefix = b'\\' cwd = os.getcwdb() had_prefix = path.startswith(prefix) path, ok = _resolve_path(cwd, path, {}) # The path returned by _getfinalpathname will always start with \\?\ - # strip off that prefix unless it was already provided on the original # path. if not had_prefix: # For UNC paths, the prefix will actually be \\?\UNC - handle that # case as well. if path.startswith(unc_prefix): path = new_unc_prefix + path[len(unc_prefix) :] elif path.startswith(prefix): path = path[len(prefix) :] return path def _resolve_path(path, rest, seen): # noqa: C901 # Windows normalizes the path before resolving symlinks; be sure to # follow the same behavior. rest = os.path.normpath(rest) if isinstance(rest, str): sep = '\\' else: sep = b'\\' if os.path.isabs(rest): drive, rest = os.path.splitdrive(rest) path = drive + sep rest = rest[1:] while rest: name, _, rest = rest.partition(sep) new_path = os.path.join(path, name) if path else name if os.path.exists(new_path): if not rest: # The whole path exists. Resolve it using the OS. path = os.path._getfinalpathname(new_path) else: # The OS can resolve `new_path`; keep traversing the path. path = new_path elif not os.path.lexists(new_path): # `new_path` does not exist on the filesystem at all. Use the # OS to resolve `path`, if it exists, and then append the # remainder. if os.path.exists(path): path = os.path._getfinalpathname(path) rest = os.path.join(name, rest) if rest else name return os.path.join(path, rest), True else: # We have a symbolic link that the OS cannot resolve. Try to # resolve it ourselves. # On Windows, symbolic link resolution can be partially or # fully disabled [1]. The end result of a disabled symlink # appears the same as a broken symlink (lexists() returns True # but exists() returns False). And in both cases, the link can # still be read using readlink(). Call stat() and check the # resulting error code to ensure we don't circumvent the # Windows symbolic link restrictions. # [1] https://technet.microsoft.com/en-us/library/cc754077.aspx try: os.stat(new_path) except OSError as e: # WinError 1463: The symbolic link cannot be followed # because its type is disabled. if e.winerror == 1463: raise key = os.path.normcase(new_path) if key in seen: # This link has already been seen; try to use the # previously resolved value. path = seen[key] if path is None: # It has not yet been resolved, which means we must # have a symbolic link loop. Return what we have # resolved so far plus the remainder of the path (who # cares about the Zen of Python?). path = os.path.join(new_path, rest) if rest else new_path return path, False else: # Mark this link as in the process of being resolved. seen[key] = None # Try to resolve it. path, ok = _resolve_path(path, os.readlink(new_path), seen) if ok: # Resolution succeded; store the resolved value. seen[key] = path else: # Resolution failed; punt. return (os.path.join(path, rest) if rest else path), False return path, True def lru_cache(user_function): """ Support for lru_cache(user_function) """ return functools.lru_cache()(user_function) path-16.3.0/pyproject.toml000066400000000000000000000005461416571726300154720ustar00rootroot00000000000000[build-system] requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"] build-backend = "setuptools.build_meta" [tool.black] skip-string-normalization = true [tool.setuptools_scm] [pytest.enabler.black] addopts = "--black" [pytest.enabler.mypy] addopts = "--mypy" [pytest.enabler.flake8] addopts = "--flake8" [pytest.enabler.cov] addopts = "--cov" path-16.3.0/pytest.ini000066400000000000000000000003521416571726300146020ustar00rootroot00000000000000[pytest] norecursedirs=dist build .tox .eggs addopts=--doctest-modules doctest_optionflags=ALLOW_UNICODE ELLIPSIS filterwarnings= # Suppress deprecation warning in flake8 ignore:SelectableGroups dict interface is deprecated::flake8 path-16.3.0/setup.cfg000066400000000000000000000024601416571726300143740ustar00rootroot00000000000000[metadata] name = path author = Jason Orendorff author_email=jason.orendorff@gmail.com maintainer = Jason R. Coombs maintainer_email = jaraco@jaraco.com description = A module wrapper for os.path long_description = file:README.rst url = https://github.com/jaraco/path classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only Operating System :: OS Independent Topic :: Software Development :: Libraries :: Python Modules [options] packages = find_namespace: include_package_data = true python_requires = >=3.7 install_requires = [options.packages.find] exclude = build* dist* docs* tests* [options.extras_require] testing = # upstream pytest >= 6 pytest-checkdocs >= 2.4 pytest-flake8 pytest-black >= 0.3.7; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-cov pytest-mypy; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 1.0.1 # local appdirs packaging pywin32; platform_system == "Windows" # required for checkdocs on README.rst pygments docs = # upstream sphinx jaraco.packaging >= 8.2 rst.linker >= 1.9 jaraco.tidelift >= 1.4 # local [options.entry_points] path-16.3.0/setup.py000066400000000000000000000001341416571726300142610ustar00rootroot00000000000000#!/usr/bin/env python import setuptools if __name__ == "__main__": setuptools.setup() path-16.3.0/test_path.py000066400000000000000000001266171416571726300151330ustar00rootroot00000000000000""" Tests for the path module. This suite runs on Linux, macOS, and Windows. To extend the platform support, just add appropriate pathnames for your platform (os.name) in each place where the p() function is called. Then report the result. If you can't get the test to run at all on your platform, there's probably a bug -- please report the issue in the issue tracker. TestScratchDir.test_touch() takes a while to run. It sleeps a few seconds to allow some time to pass between calls to check the modify time on files. """ import io import os import sys import shutil import time import types import ntpath import posixpath import textwrap import platform import importlib import datetime import subprocess import re import contextlib import stat import pytest import path from path import Path from path import TempDir from path import matchers from path import SpecialResolver from path import Multi def os_choose(**choices): """Choose a value from several possible values, based on os.name""" return choices[os.name] class TestBasics: def test_relpath(self): root = Path(os_choose(nt='C:\\', posix='/')) foo = root / 'foo' quux = foo / 'quux' bar = foo / 'bar' boz = bar / 'Baz' / 'Boz' up = Path(os.pardir) # basics assert root.relpathto(boz) == Path('foo') / 'bar' / 'Baz' / 'Boz' assert bar.relpathto(boz) == Path('Baz') / 'Boz' assert quux.relpathto(boz) == up / 'bar' / 'Baz' / 'Boz' assert boz.relpathto(quux) == up / up / up / 'quux' assert boz.relpathto(bar) == up / up # Path is not the first element in concatenation assert root.relpathto(boz) == 'foo' / Path('bar') / 'Baz' / 'Boz' # x.relpathto(x) == curdir assert root.relpathto(root) == os.curdir assert boz.relpathto(boz) == os.curdir # Make sure case is properly noted (or ignored) assert boz.relpathto(boz.normcase()) == os.curdir # relpath() cwd = Path(os.getcwd()) assert boz.relpath() == cwd.relpathto(boz) if os.name == 'nt': # pragma: nocover # Check relpath across drives. d = Path('D:\\') assert d.relpathto(boz) == boz def test_construction_from_none(self): """ """ with pytest.raises(TypeError): Path(None) def test_construction_from_int(self): """ Path class will construct a path as a string of the number """ assert Path(1) == '1' def test_string_compatibility(self): """Test compatibility with ordinary strings.""" x = Path('xyzzy') assert x == 'xyzzy' assert x == str('xyzzy') # sorting items = [Path('fhj'), Path('fgh'), 'E', Path('d'), 'A', Path('B'), 'c'] items.sort() assert items == ['A', 'B', 'E', 'c', 'd', 'fgh', 'fhj'] # Test p1/p1. p1 = Path("foo") p2 = Path("bar") assert p1 / p2 == os_choose(nt='foo\\bar', posix='foo/bar') def test_properties(self): # Create sample path object. f = Path( os_choose( nt='C:\\Program Files\\Python\\Lib\\xyzzy.py', posix='/usr/local/python/lib/xyzzy.py', ) ) # .parent nt_lib = 'C:\\Program Files\\Python\\Lib' posix_lib = '/usr/local/python/lib' expected = os_choose(nt=nt_lib, posix=posix_lib) assert f.parent == expected # .name assert f.name == 'xyzzy.py' assert f.parent.name == os_choose(nt='Lib', posix='lib') # .ext assert f.ext == '.py' assert f.parent.ext == '' # .drive assert f.drive == os_choose(nt='C:', posix='') def test_methods(self): # .abspath() assert Path(os.curdir).abspath() == os.getcwd() # .getcwd() cwd = Path.getcwd() assert isinstance(cwd, Path) assert cwd == os.getcwd() def test_explicit_module(self): """ The user may specify an explicit path module to use. """ nt_ok = Path.using_module(ntpath)(r'foo\bar\baz') posix_ok = Path.using_module(posixpath)(r'foo/bar/baz') posix_wrong = Path.using_module(posixpath)(r'foo\bar\baz') assert nt_ok.dirname() == r'foo\bar' assert posix_ok.dirname() == r'foo/bar' assert posix_wrong.dirname() == '' assert nt_ok / 'quux' == r'foo\bar\baz\quux' assert posix_ok / 'quux' == r'foo/bar/baz/quux' def test_explicit_module_classes(self): """ Multiple calls to path.using_module should produce the same class. """ nt_path = Path.using_module(ntpath) assert nt_path is Path.using_module(ntpath) assert nt_path.__name__ == 'Path_ntpath' def test_joinpath_on_instance(self): res = Path('foo') foo_bar = res.joinpath('bar') assert foo_bar == os_choose(nt='foo\\bar', posix='foo/bar') def test_joinpath_to_nothing(self): res = Path('foo') assert res.joinpath() == res def test_joinpath_on_class(self): "Construct a path from a series of strings" foo_bar = Path.joinpath('foo', 'bar') assert foo_bar == os_choose(nt='foo\\bar', posix='foo/bar') def test_joinpath_fails_on_empty(self): "It doesn't make sense to join nothing at all" with pytest.raises(TypeError): Path.joinpath() def test_joinpath_returns_same_type(self): path_posix = Path.using_module(posixpath) res = path_posix.joinpath('foo') assert isinstance(res, path_posix) res2 = res.joinpath('bar') assert isinstance(res2, path_posix) assert res2 == 'foo/bar' def test_radd_string(self): res = 'foo' + Path('bar') assert res == Path('foobar') def test_fspath(self): os.fspath(Path('foobar')) def test_normpath(self): assert Path('foo//bar').normpath() == os.path.normpath('foo//bar') def test_expandvars(self, monkeypatch): monkeypatch.setitem(os.environ, 'sub', 'value') val = '$sub/$(sub)' assert Path(val).expandvars() == os.path.expandvars(val) assert 'value' in Path(val).expandvars() def test_expand(self): val = 'foobar' expected = os.path.normpath(os.path.expanduser(os.path.expandvars(val))) assert Path(val).expand() == expected def test_splitdrive(self): val = Path.using_module(ntpath)(r'C:\bar') drive, rest = val.splitdrive() assert drive == 'C:' assert rest == r'\bar' assert isinstance(rest, Path) def test_relpathto(self): source = Path.using_module(ntpath)(r'C:\foo') dest = Path.using_module(ntpath)(r'D:\bar') assert source.relpathto(dest) == dest def test_walk_errors(self): start = Path('/does-not-exist') items = list(start.walk(errors='ignore')) assert not items def test_walk_child_error(self, tmpdir): def simulate_access_denied(item): if item.name == 'sub1': raise OSError("Access denied") p = Path(tmpdir) (p / 'sub1').makedirs_p() items = path.Traversal(simulate_access_denied)(p.walk(errors='ignore')) assert list(items) == [p / 'sub1'] def test_read_md5(self, tmpdir): target = Path(tmpdir) / 'some file' target.write_text('quick brown fox and lazy dog') assert target.read_md5() == b's\x15\rPOW\x7fYk\xa8\x8e\x00\x0b\xd7G\xf9' def test_read_hexhash(self, tmpdir): target = Path(tmpdir) / 'some file' target.write_text('quick brown fox and lazy dog') assert target.read_hexhash('md5') == '73150d504f577f596ba88e000bd747f9' @pytest.mark.skipif("not hasattr(os, 'statvfs')") def test_statvfs(self): Path('.').statvfs() @pytest.mark.skipif("not hasattr(os, 'pathconf')") def test_pathconf(self): assert isinstance(Path('.').pathconf(1), int) def test_utime(self, tmpdir): tmpfile = Path(tmpdir) / 'file' tmpfile.touch() new_time = (time.time() - 600,) * 2 assert Path(tmpfile).utime(new_time).stat().st_atime == new_time[0] def test_chmod_str(self, tmpdir): tmpfile = Path(tmpdir) / 'file' tmpfile.touch() tmpfile.chmod('o-r') is_windows = platform.system() == 'Windows' assert is_windows or not (tmpfile.stat().st_mode & stat.S_IROTH) @pytest.mark.skipif("not hasattr(Path, 'chown')") def test_chown(self, tmpdir): tmpfile = Path(tmpdir) / 'file' tmpfile.touch() tmpfile.chown(os.getuid(), os.getgid()) import pwd name = pwd.getpwuid(os.getuid()).pw_name tmpfile.chown(name) def test_renames(self, tmpdir): tmpfile = Path(tmpdir) / 'file' tmpfile.touch() tmpfile.renames(Path(tmpdir) / 'foo' / 'alt') def test_mkdir_p(self, tmpdir): Path(tmpdir).mkdir_p() def test_removedirs_p(self, tmpdir): dir = Path(tmpdir) / 'somedir' dir.mkdir() (dir / 'file').touch() (dir / 'sub').mkdir() dir.removedirs_p() assert dir.isdir() assert (dir / 'file').isfile() # TODO: shouldn't sub get removed? # assert not (dir / 'sub').isdir() class TestReadWriteText: @pytest.mark.filterwarnings('ignore:Writing bytes in write_text') def test_read_write(self, tmpdir): file = path.Path(tmpdir) / 'filename' file.write_text('hello world') assert file.read_text() == 'hello world' assert file.read_bytes() == b'hello world' file.write_text(b'hello world') class TestPerformance: @staticmethod def get_command_time(cmd): args = [sys.executable, '-m', 'timeit', '-n', '1', '-r', '1', '-u', 'usec'] + [ cmd ] res = subprocess.check_output(args, universal_newlines=True) dur = re.search(r'(\d+) usec per loop', res).group(1) return datetime.timedelta(microseconds=int(dur)) def test_import_time(self, monkeypatch): """ Import should take less than some limit. Run tests in a subprocess to isolate from test suite overhead. """ limit = datetime.timedelta(milliseconds=20) baseline = self.get_command_time('pass') measure = self.get_command_time('import path') duration = measure - baseline assert duration < limit class TestOwnership: def test_get_owner(self): Path('/').get_owner() class TestLinks: def test_link(self, tmpdir): target = Path(tmpdir) / 'target' target.write_text('hello', encoding='utf-8') link = target.link(Path(tmpdir) / 'link') assert link.read_text() == 'hello' def test_symlink_none(self, tmpdir): root = Path(tmpdir) with root: file = (Path('dir').mkdir() / 'file').touch() file.symlink() assert Path('file').isfile() def test_readlinkabs_passthrough(self, tmpdir): link = Path(tmpdir) / 'link' Path('foo').abspath().symlink(link) link.readlinkabs() == Path('foo').abspath() def test_readlinkabs_rendered(self, tmpdir): link = Path(tmpdir) / 'link' Path('foo').symlink(link) link.readlinkabs() == Path(tmpdir) / 'foo' class TestSymbolicLinksWalk: def test_skip_symlinks(self, tmpdir): root = Path(tmpdir) sub = root / 'subdir' sub.mkdir() sub.symlink(root / 'link') (sub / 'file').touch() assert len(list(root.walk())) == 4 skip_links = path.Traversal( lambda item: item.isdir() and not item.islink(), ) assert len(list(skip_links(root.walk()))) == 3 class TestSelfReturn: """ Some methods don't necessarily return any value (e.g. makedirs, makedirs_p, rename, mkdir, touch, chroot). These methods should return self anyhow to allow methods to be chained. """ def test_makedirs_p(self, tmpdir): """ Path('foo').makedirs_p() == Path('foo') """ p = Path(tmpdir) / "newpath" ret = p.makedirs_p() assert p == ret def test_makedirs_p_extant(self, tmpdir): p = Path(tmpdir) ret = p.makedirs_p() assert p == ret def test_rename(self, tmpdir): p = Path(tmpdir) / "somefile" p.touch() target = Path(tmpdir) / "otherfile" ret = p.rename(target) assert target == ret def test_mkdir(self, tmpdir): p = Path(tmpdir) / "newdir" ret = p.mkdir() assert p == ret def test_touch(self, tmpdir): p = Path(tmpdir) / "empty file" ret = p.touch() assert p == ret @pytest.mark.skipif("not hasattr(Path, 'chroot')") def test_chroot(monkeypatch): results = [] monkeypatch.setattr(os, 'chroot', results.append) Path().chroot() assert results == [''] @pytest.mark.skipif("not hasattr(Path, 'startfile')") def test_startfile(monkeypatch): results = [] monkeypatch.setattr(os, 'startfile', results.append) Path().startfile() assert results == [''] class TestScratchDir: """ Tests that run in a temporary directory (does not test TempDir class) """ def test_context_manager(self, tmpdir): """Can be used as context manager for chdir.""" d = Path(tmpdir) subdir = d / 'subdir' subdir.makedirs() old_dir = os.getcwd() with subdir: assert os.getcwd() == os.path.realpath(subdir) assert os.getcwd() == old_dir def test_touch(self, tmpdir): # NOTE: This test takes a long time to run (~10 seconds). # It sleeps several seconds because on Windows, the resolution # of a file's mtime and ctime is about 2 seconds. # # atime isn't tested because on Windows the resolution of atime # is something like 24 hours. threshold = 1 d = Path(tmpdir) f = d / 'test.txt' t0 = time.time() - threshold f.touch() t1 = time.time() + threshold assert f.exists() assert f.isfile() assert f.size == 0 assert t0 <= f.mtime <= t1 if hasattr(os.path, 'getctime'): ct = f.ctime assert t0 <= ct <= t1 time.sleep(threshold * 2) fobj = open(f, 'ab') fobj.write('some bytes'.encode('utf-8')) fobj.close() time.sleep(threshold * 2) t2 = time.time() - threshold f.touch() t3 = time.time() + threshold assert t0 <= t1 < t2 <= t3 # sanity check assert f.exists() assert f.isfile() assert f.size == 10 assert t2 <= f.mtime <= t3 if hasattr(os.path, 'getctime'): ct2 = f.ctime if platform.system() == 'Windows': # pragma: nocover # On Windows, "ctime" is CREATION time assert ct == ct2 assert ct2 < t2 else: assert ( # ctime is unchanged ct == ct2 or # ctime is approximately the mtime ct2 == pytest.approx(f.mtime, 0.001) ) def test_listing(self, tmpdir): d = Path(tmpdir) assert d.listdir() == [] f = 'testfile.txt' af = d / f assert af == os.path.join(d, f) af.touch() try: assert af.exists() assert d.listdir() == [af] # .glob() assert d.glob('testfile.txt') == [af] assert d.glob('test*.txt') == [af] assert d.glob('*.txt') == [af] assert d.glob('*txt') == [af] assert d.glob('*') == [af] assert d.glob('*.html') == [] assert d.glob('testfile') == [] # .iglob matches .glob but as an iterator. assert list(d.iglob('*')) == d.glob('*') assert isinstance(d.iglob('*'), types.GeneratorType) finally: af.remove() # Try a test with 20 files files = [d / ('%d.txt' % i) for i in range(20)] for f in files: fobj = open(f, 'w') fobj.write('some text\n') fobj.close() try: files2 = d.listdir() files.sort() files2.sort() assert files == files2 finally: for f in files: with contextlib.suppress(Exception): f.remove() @pytest.fixture def bytes_filename(self, tmpdir): name = r'r\xe9\xf1emi'.encode('latin-1') base = str(tmpdir).encode('ascii') try: with open(os.path.join(base, name), 'wb'): pass except Exception as exc: raise pytest.skip(f"Invalid encodings disallowed {exc}") return name def test_listdir_other_encoding(self, tmpdir, bytes_filename): # pragma: nocover """ Some filesystems allow non-character sequences in path names. ``.listdir`` should still function in this case. See issue #61 for details. """ # first demonstrate that os.listdir works assert os.listdir(str(tmpdir).encode('ascii')) # now try with path results = Path(tmpdir).listdir() (res,) = results assert isinstance(res, Path) assert len(res.basename()) == len(bytes_filename) def test_makedirs(self, tmpdir): d = Path(tmpdir) # Placeholder file so that when removedirs() is called, # it doesn't remove the temporary directory itself. tempf = d / 'temp.txt' tempf.touch() try: foo = d / 'foo' boz = foo / 'bar' / 'baz' / 'boz' boz.makedirs() try: assert boz.isdir() finally: boz.removedirs() assert not foo.exists() assert d.exists() foo.mkdir(0o750) boz.makedirs(0o700) try: assert boz.isdir() finally: boz.removedirs() assert not foo.exists() assert d.exists() finally: os.remove(tempf) def assertSetsEqual(self, a, b): ad = {} for i in a: ad[i] = None bd = {} for i in b: bd[i] = None assert ad == bd def test_shutil(self, tmpdir): # Note: This only tests the methods exist and do roughly what # they should, neglecting the details as they are shutil's # responsibility. d = Path(tmpdir) testDir = d / 'testdir' testFile = testDir / 'testfile.txt' testA = testDir / 'A' testCopy = testA / 'testcopy.txt' testLink = testA / 'testlink.txt' testB = testDir / 'B' testC = testB / 'C' testCopyOfLink = testC / testA.relpathto(testLink) # Create test dirs and a file testDir.mkdir() testA.mkdir() testB.mkdir() f = open(testFile, 'w') f.write('x' * 10000) f.close() # Test simple file copying. testFile.copyfile(testCopy) assert testCopy.isfile() assert testFile.bytes() == testCopy.bytes() # Test copying into a directory. testCopy2 = testA / testFile.name testFile.copy(testA) assert testCopy2.isfile() assert testFile.bytes() == testCopy2.bytes() # Make a link for the next test to use. testFile.symlink(testLink) # Test copying directory tree. testA.copytree(testC) assert testC.isdir() self.assertSetsEqual( testC.listdir(), [testC / testCopy.name, testC / testFile.name, testCopyOfLink], ) assert not testCopyOfLink.islink() # Clean up for another try. testC.rmtree() assert not testC.exists() # Copy again, preserving symlinks. testA.copytree(testC, True) assert testC.isdir() self.assertSetsEqual( testC.listdir(), [testC / testCopy.name, testC / testFile.name, testCopyOfLink], ) if hasattr(os, 'symlink'): assert testCopyOfLink.islink() assert testCopyOfLink.realpath() == testFile # Clean up. testDir.rmtree() assert not testDir.exists() self.assertList(d.listdir(), []) def assertList(self, listing, expected): assert sorted(listing) == sorted(expected) def test_patterns(self, tmpdir): d = Path(tmpdir) names = ['x.tmp', 'x.xtmp', 'x2g', 'x22', 'x.txt'] dirs = [d, d / 'xdir', d / 'xdir.tmp', d / 'xdir.tmp' / 'xsubdir'] for e in dirs: if not e.isdir(): e.makedirs() for name in names: (e / name).touch() self.assertList(d.listdir('*.tmp'), [d / 'x.tmp', d / 'xdir.tmp']) self.assertList(d.files('*.tmp'), [d / 'x.tmp']) self.assertList(d.dirs('*.tmp'), [d / 'xdir.tmp']) self.assertList( d.walk(), [e for e in dirs if e != d] + [e / n for e in dirs for n in names] ) self.assertList(d.walk('*.tmp'), [e / 'x.tmp' for e in dirs] + [d / 'xdir.tmp']) self.assertList(d.walkfiles('*.tmp'), [e / 'x.tmp' for e in dirs]) self.assertList(d.walkdirs('*.tmp'), [d / 'xdir.tmp']) encodings = 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16' @pytest.mark.parametrize("encoding", encodings) def test_unicode(self, tmpdir, encoding): """Test that path works with the specified encoding, which must be capable of representing the entire range of Unicode codepoints. """ d = Path(tmpdir) p = d / 'unicode.txt' givenLines = [ 'Hello world\n', '\u0d0a\u0a0d\u0d15\u0a15\r\n', '\u0d0a\u0a0d\u0d15\u0a15\x85', '\u0d0a\u0a0d\u0d15\u0a15\u2028', '\r', 'hanging', ] given = ''.join(givenLines) expectedLines = [ 'Hello world\n', '\u0d0a\u0a0d\u0d15\u0a15\n', '\u0d0a\u0a0d\u0d15\u0a15\n', '\u0d0a\u0a0d\u0d15\u0a15\n', '\n', 'hanging', ] clean = ''.join(expectedLines) stripped = [line.replace('\n', '') for line in expectedLines] # write bytes manually to file with io.open(p, 'wb') as strm: strm.write(given.encode(encoding)) # test all 3 path read-fully functions, including # path.lines() in unicode mode. assert p.bytes() == given.encode(encoding) with pytest.deprecated_call(): assert p.text(encoding) == clean assert p.lines(encoding) == expectedLines assert p.lines(encoding, retain=False) == stripped # If this is UTF-16, that's enough. # The rest of these will unfortunately fail because append=True # mode causes an extra BOM to be written in the middle of the file. # UTF-16 is the only encoding that has this problem. if encoding == 'UTF-16': return # Write Unicode to file using path.write_text(). # This test doesn't work with a hanging line. cleanNoHanging = clean + '\n' p.write_text(cleanNoHanging, encoding) p.write_text(cleanNoHanging, encoding, append=True) # Check the result. expectedBytes = 2 * cleanNoHanging.replace('\n', os.linesep).encode(encoding) expectedLinesNoHanging = expectedLines[:] expectedLinesNoHanging[-1] += '\n' assert p.bytes() == expectedBytes with pytest.deprecated_call(): assert p.text(encoding) == 2 * cleanNoHanging assert p.lines(encoding) == 2 * expectedLinesNoHanging assert p.lines(encoding, retain=False) == 2 * stripped # Write Unicode to file using path.write_lines(). # The output in the file should be exactly the same as last time. p.write_lines(expectedLines, encoding) p.write_lines(stripped, encoding, append=True) # Check the result. assert p.bytes() == expectedBytes # Now: same test, but using various newline sequences. # If linesep is being properly applied, these will be converted # to the platform standard newline sequence. p.write_lines(givenLines, encoding) p.write_lines(givenLines, encoding, append=True) # Check the result. assert p.bytes() == expectedBytes # Same test, using newline sequences that are different # from the platform default. def testLinesep(eol): p.write_lines(givenLines, encoding, linesep=eol) p.write_lines(givenLines, encoding, linesep=eol, append=True) expected = 2 * cleanNoHanging.replace('\n', eol).encode(encoding) assert p.bytes() == expected testLinesep('\n') testLinesep('\r') testLinesep('\r\n') testLinesep('\x0d\x85') # Again, but with linesep=None. p.write_lines(givenLines, encoding, linesep=None) p.write_lines(givenLines, encoding, linesep=None, append=True) # Check the result. expectedBytes = 2 * given.encode(encoding) assert p.bytes() == expectedBytes with pytest.deprecated_call(): assert p.text(encoding) == 2 * clean expectedResultLines = expectedLines[:] expectedResultLines[-1] += expectedLines[0] expectedResultLines += expectedLines[1:] assert p.lines(encoding) == expectedResultLines def test_chunks(self, tmpdir): p = (TempDir() / 'test.txt').touch() txt = "0123456789" size = 5 p.write_text(txt) for i, chunk in enumerate(p.chunks(size)): assert chunk == txt[i * size : i * size + size] assert i == len(txt) / size - 1 def test_samefile(self, tmpdir): f1 = (TempDir() / '1.txt').touch() f1.write_text('foo') f2 = (TempDir() / '2.txt').touch() f1.write_text('foo') f3 = (TempDir() / '3.txt').touch() f1.write_text('bar') f4 = TempDir() / '4.txt' f1.copyfile(f4) assert os.path.samefile(f1, f2) == f1.samefile(f2) assert os.path.samefile(f1, f3) == f1.samefile(f3) assert os.path.samefile(f1, f4) == f1.samefile(f4) assert os.path.samefile(f1, f1) == f1.samefile(f1) def test_rmtree_p(self, tmpdir): d = Path(tmpdir) sub = d / 'subfolder' sub.mkdir() (sub / 'afile').write_text('something') sub.rmtree_p() assert not sub.exists() def test_rmtree_p_nonexistent(self, tmpdir): d = Path(tmpdir) sub = d / 'subfolder' assert not sub.exists() sub.rmtree_p() def test_rmdir_p_exists(self, tmpdir): """ Invocation of rmdir_p on an existant directory should remove the directory. """ d = Path(tmpdir) sub = d / 'subfolder' sub.mkdir() sub.rmdir_p() assert not sub.exists() def test_rmdir_p_nonexistent(self, tmpdir): """ A non-existent file should not raise an exception. """ d = Path(tmpdir) sub = d / 'subfolder' assert not sub.exists() sub.rmdir_p() def test_rmdir_p_sub_sub_dir(self, tmpdir): """ A non-empty folder should not raise an exception. """ d = Path(tmpdir) sub = d / 'subfolder' sub.mkdir() subsub = sub / 'subfolder' subsub.mkdir() sub.rmdir_p() class TestMergeTree: @pytest.fixture(autouse=True) def testing_structure(self, tmpdir): self.test_dir = Path(tmpdir) self.subdir_a = self.test_dir / 'A' self.test_file = self.subdir_a / 'testfile.txt' self.test_link = self.subdir_a / 'testlink.txt' self.subdir_b = self.test_dir / 'B' self.subdir_a.mkdir() self.subdir_b.mkdir() with open(self.test_file, 'w') as f: f.write('x' * 10000) self.test_file.symlink(self.test_link) def check_link(self): target = Path(self.subdir_b / self.test_link.name) check = target.islink if hasattr(os, 'symlink') else target.isfile assert check() def test_with_nonexisting_dst_kwargs(self): self.subdir_a.merge_tree(self.subdir_b, symlinks=True) assert self.subdir_b.isdir() expected = set( (self.subdir_b / self.test_file.name, self.subdir_b / self.test_link.name) ) assert set(self.subdir_b.listdir()) == expected self.check_link() def test_with_nonexisting_dst_args(self): self.subdir_a.merge_tree(self.subdir_b, True) assert self.subdir_b.isdir() expected = set( (self.subdir_b / self.test_file.name, self.subdir_b / self.test_link.name) ) assert set(self.subdir_b.listdir()) == expected self.check_link() def test_with_existing_dst(self): self.subdir_b.rmtree() self.subdir_a.copytree(self.subdir_b, True) self.test_link.remove() test_new = self.subdir_a / 'newfile.txt' test_new.touch() with open(self.test_file, 'w') as f: f.write('x' * 5000) self.subdir_a.merge_tree(self.subdir_b, True) assert self.subdir_b.isdir() expected = set( ( self.subdir_b / self.test_file.name, self.subdir_b / self.test_link.name, self.subdir_b / test_new.name, ) ) assert set(self.subdir_b.listdir()) == expected self.check_link() assert len(Path(self.subdir_b / self.test_file.name).bytes()) == 5000 def test_copytree_parameters(self): """ merge_tree should accept parameters to copytree, such as 'ignore' """ ignore = shutil.ignore_patterns('testlink*') self.subdir_a.merge_tree(self.subdir_b, ignore=ignore) assert self.subdir_b.isdir() assert self.subdir_b.listdir() == [self.subdir_b / self.test_file.name] def test_only_newer(self): """ merge_tree should accept a copy_function in which only newer files are copied and older files do not overwrite newer copies in the dest. """ target = self.subdir_b / 'testfile.txt' target.write_text('this is newer') self.subdir_a.merge_tree( self.subdir_b, copy_function=path.only_newer(shutil.copy2) ) assert target.read_text() == 'this is newer' class TestChdir: def test_chdir_or_cd(self, tmpdir): """tests the chdir or cd method""" d = Path(str(tmpdir)) cwd = d.getcwd() # ensure the cwd isn't our tempdir assert str(d) != str(cwd) # now, we're going to chdir to tempdir d.chdir() # we now ensure that our cwd is the tempdir assert str(d.getcwd()) == str(tmpdir) # we're resetting our path d = Path(cwd) # we ensure that our cwd is still set to tempdir assert str(d.getcwd()) == str(tmpdir) # we're calling the alias cd method d.cd() # now, we ensure cwd isn'r tempdir assert str(d.getcwd()) == str(cwd) assert str(d.getcwd()) != str(tmpdir) class TestSubclass: def test_subclass_produces_same_class(self): """ When operations are invoked on a subclass, they should produce another instance of that subclass. """ class PathSubclass(Path): pass p = PathSubclass('/foo') subdir = p / 'bar' assert isinstance(subdir, PathSubclass) class TestTempDir: def test_constructor(self): """ One should be able to readily construct a temporary directory """ d = TempDir() assert isinstance(d, path.Path) assert d.exists() assert d.isdir() d.rmdir() assert not d.exists() def test_next_class(self): """ It should be possible to invoke operations on a TempDir and get Path classes. """ d = TempDir() sub = d / 'subdir' assert isinstance(sub, path.Path) d.rmdir() def test_context_manager(self): """ One should be able to use a TempDir object as a context, which will clean up the contents after. """ d = TempDir() res = d.__enter__() assert res == path.Path(d) (d / 'somefile.txt').touch() assert not isinstance(d / 'somefile.txt', TempDir) d.__exit__(None, None, None) assert not d.exists() def test_context_manager_using_with(self): """ The context manager will allow using the with keyword and provide a temporary directory that will be deleted after that. """ with TempDir() as d: assert d.isdir() assert not d.isdir() def test_cleaned_up_on_interrupt(self): with contextlib.suppress(KeyboardInterrupt): with TempDir() as d: raise KeyboardInterrupt() assert not d.exists() class TestUnicode: @pytest.fixture(autouse=True) def unicode_name_in_tmpdir(self, tmpdir): # build a snowman (dir) in the temporary directory Path(tmpdir).joinpath('☃').mkdir() def test_walkdirs_with_unicode_name(self, tmpdir): for res in Path(tmpdir).walkdirs(): pass class TestPatternMatching: def test_fnmatch_simple(self): p = Path('FooBar') assert p.fnmatch('Foo*') assert p.fnmatch('Foo[ABC]ar') def test_fnmatch_custom_mod(self): p = Path('FooBar') p.module = ntpath assert p.fnmatch('foobar') assert p.fnmatch('FOO[ABC]AR') def test_fnmatch_custom_normcase(self): def normcase(path): return path.upper() p = Path('FooBar') assert p.fnmatch('foobar', normcase=normcase) assert p.fnmatch('FOO[ABC]AR', normcase=normcase) def test_listdir_simple(self): p = Path('.') assert len(p.listdir()) == len(os.listdir('.')) def test_listdir_empty_pattern(self): p = Path('.') assert p.listdir('') == [] def test_listdir_patterns(self, tmpdir): p = Path(tmpdir) (p / 'sub').mkdir() (p / 'File').touch() assert p.listdir('s*') == [p / 'sub'] assert len(p.listdir('*')) == 2 def test_listdir_custom_module(self, tmpdir): """ Listdir patterns should honor the case sensitivity of the path module used by that Path class. """ always_unix = Path.using_module(posixpath) p = always_unix(tmpdir) (p / 'sub').mkdir() (p / 'File').touch() assert p.listdir('S*') == [] always_win = Path.using_module(ntpath) p = always_win(tmpdir) assert p.listdir('S*') == [p / 'sub'] assert p.listdir('f*') == [p / 'File'] def test_listdir_case_insensitive(self, tmpdir): """ Listdir patterns should honor the case sensitivity of the path module used by that Path class. """ p = Path(tmpdir) (p / 'sub').mkdir() (p / 'File').touch() assert p.listdir(matchers.CaseInsensitive('S*')) == [p / 'sub'] assert p.listdir(matchers.CaseInsensitive('f*')) == [p / 'File'] assert p.files(matchers.CaseInsensitive('S*')) == [] assert p.dirs(matchers.CaseInsensitive('f*')) == [] def test_walk_case_insensitive(self, tmpdir): p = Path(tmpdir) (p / 'sub1' / 'foo').makedirs_p() (p / 'sub2' / 'foo').makedirs_p() (p / 'sub1' / 'foo' / 'bar.Txt').touch() (p / 'sub2' / 'foo' / 'bar.TXT').touch() (p / 'sub2' / 'foo' / 'bar.txt.bz2').touch() files = list(p.walkfiles(matchers.CaseInsensitive('*.txt'))) assert len(files) == 2 assert p / 'sub2' / 'foo' / 'bar.TXT' in files assert p / 'sub1' / 'foo' / 'bar.Txt' in files class TestInPlace: reference_content = textwrap.dedent( """ The quick brown fox jumped over the lazy dog. """.lstrip() ) reversed_content = textwrap.dedent( """ .god yzal eht revo depmuj xof nworb kciuq ehT """.lstrip() ) alternate_content = textwrap.dedent( """ Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. """.lstrip() ) @classmethod def create_reference(cls, tmpdir): p = Path(tmpdir) / 'document' with p.open('w') as stream: stream.write(cls.reference_content) return p def test_line_by_line_rewrite(self, tmpdir): doc = self.create_reference(tmpdir) # reverse all the text in the document, line by line with doc.in_place() as (reader, writer): for line in reader: r_line = ''.join(reversed(line.strip())) + '\n' writer.write(r_line) with doc.open() as stream: data = stream.read() assert data == self.reversed_content def test_exception_in_context(self, tmpdir): doc = self.create_reference(tmpdir) with pytest.raises(RuntimeError) as exc: with doc.in_place() as (reader, writer): writer.write(self.alternate_content) raise RuntimeError("some error") assert "some error" in str(exc.value) with doc.open() as stream: data = stream.read() assert 'Lorem' not in data assert 'lazy dog' in data def test_write_mode_invalid(self, tmpdir): with pytest.raises(ValueError): with (Path(tmpdir) / 'document').in_place(mode='w'): pass class TestSpecialPaths: @pytest.fixture(autouse=True, scope='class') def appdirs_installed(cls): pytest.importorskip('appdirs') @pytest.fixture def feign_linux(self, monkeypatch): monkeypatch.setattr("platform.system", lambda: "Linux") monkeypatch.setattr("sys.platform", "linux") monkeypatch.setattr("os.pathsep", ":") # remove any existing import of appdirs, as it sets up some # state during import. sys.modules.pop('appdirs') def test_basic_paths(self): appdirs = importlib.import_module('appdirs') expected = appdirs.user_config_dir() assert SpecialResolver(Path).user.config == expected expected = appdirs.site_config_dir() assert SpecialResolver(Path).site.config == expected expected = appdirs.user_config_dir('My App', 'Me') assert SpecialResolver(Path, 'My App', 'Me').user.config == expected def test_unix_paths(self, tmpdir, monkeypatch, feign_linux): fake_config = tmpdir / '_config' monkeypatch.setitem(os.environ, 'XDG_CONFIG_HOME', str(fake_config)) expected = str(tmpdir / '_config') assert SpecialResolver(Path).user.config == expected def test_unix_paths_fallback(self, tmpdir, monkeypatch, feign_linux): "Without XDG_CONFIG_HOME set, ~/.config should be used." fake_home = tmpdir / '_home' monkeypatch.delitem(os.environ, 'XDG_CONFIG_HOME', raising=False) monkeypatch.setitem(os.environ, 'HOME', str(fake_home)) expected = Path('~/.config').expanduser() assert SpecialResolver(Path).user.config == expected def test_property(self): assert isinstance(Path.special().user.config, Path) assert isinstance(Path.special().user.data, Path) assert isinstance(Path.special().user.cache, Path) def test_other_parameters(self): """ Other parameters should be passed through to appdirs function. """ res = Path.special(version="1.0", multipath=True).site.config assert isinstance(res, Path) def test_multipath(self, feign_linux, monkeypatch, tmpdir): """ If multipath is provided, on Linux return the XDG_CONFIG_DIRS """ fake_config_1 = str(tmpdir / '_config1') fake_config_2 = str(tmpdir / '_config2') config_dirs = os.pathsep.join([fake_config_1, fake_config_2]) monkeypatch.setitem(os.environ, 'XDG_CONFIG_DIRS', config_dirs) res = Path.special(multipath=True).site.config assert isinstance(res, Multi) assert fake_config_1 in res assert fake_config_2 in res assert '_config1' in str(res) def test_reused_SpecialResolver(self): """ Passing additional args and kwargs to SpecialResolver should be passed through to each invocation of the function in appdirs. """ appdirs = importlib.import_module('appdirs') adp = SpecialResolver(Path, version="1.0") res = adp.user.config expected = appdirs.user_config_dir(version="1.0") assert res == expected class TestMultiPath: def test_for_class(self): """ Multi.for_class should return a subclass of the Path class provided. """ cls = Multi.for_class(Path) assert issubclass(cls, Path) assert issubclass(cls, Multi) expected_name = 'Multi' + Path.__name__ assert cls.__name__ == expected_name def test_detect_no_pathsep(self): """ If no pathsep is provided, multipath detect should return an instance of the parent class with no Multi mix-in. """ path = Multi.for_class(Path).detect('/foo/bar') assert isinstance(path, Path) assert not isinstance(path, Multi) def test_detect_with_pathsep(self): """ If a pathsep appears in the input, detect should return an instance of a Path with the Multi mix-in. """ inputs = '/foo/bar', '/baz/bing' input = os.pathsep.join(inputs) path = Multi.for_class(Path).detect(input) assert isinstance(path, Multi) def test_iteration(self): """ Iterating over a MultiPath should yield instances of the parent class. """ inputs = '/foo/bar', '/baz/bing' input = os.pathsep.join(inputs) path = Multi.for_class(Path).detect(input) items = iter(path) first = next(items) assert first == '/foo/bar' assert isinstance(first, Path) assert not isinstance(first, Multi) assert next(items) == '/baz/bing' assert path == input def test_no_dependencies(): """ Path pie guarantees that the path module can be transplanted into an environment without any dependencies. """ cmd = [sys.executable, '-S', '-c', 'import path'] subprocess.check_call(cmd) class TestHandlers: @staticmethod def run_with_handler(handler): try: raise ValueError() except Exception: handler("Something unexpected happened") def test_raise(self): handler = path.Handlers._resolve('strict') with pytest.raises(ValueError): self.run_with_handler(handler) def test_warn(self): handler = path.Handlers._resolve('warn') with pytest.warns(path.TreeWalkWarning): self.run_with_handler(handler) def test_ignore(self): handler = path.Handlers._resolve('ignore') self.run_with_handler(handler) def test_invalid_handler(self): with pytest.raises(ValueError): path.Handlers._resolve('raise') path-16.3.0/tox.ini000066400000000000000000000013341416571726300140650ustar00rootroot00000000000000[tox] envlist = python minversion = 3.2 # https://github.com/jaraco/skeleton/issues/6 tox_pip_extensions_ext_venv_update = true toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = commands = pytest {posargs} usedevelop = True extras = testing [testenv:docs] extras = docs testing changedir = docs commands = python -m sphinx -W --keep-going . {toxinidir}/build/html [testenv:release] skip_install = True deps = build twine>=3 jaraco.develop>=7.1 passenv = TWINE_PASSWORD GITHUB_TOKEN setenv = TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)" python -m build python -m twine upload dist/* python -m jaraco.develop.create-github-release