pax_global_header00006660000000000000000000000064144433101200014502gustar00rootroot0000000000000052 comment=82dc93ceffacdabab24c4e5909df08feea11ef50 nvchecker-2.12/000077500000000000000000000000001444331012000133765ustar00rootroot00000000000000nvchecker-2.12/.github/000077500000000000000000000000001444331012000147365ustar00rootroot00000000000000nvchecker-2.12/.github/workflows/000077500000000000000000000000001444331012000167735ustar00rootroot00000000000000nvchecker-2.12/.github/workflows/codeql-analysis.yml000066400000000000000000000050321444331012000226060ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. name: "CodeQL" on: push: branches: [master] pull_request: # The branches below must be a subset of the branches above branches: [master] schedule: - cron: '0 18 * * 5' jobs: analyze: name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: # Override automatic language detection by changing the below list # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] language: ['python'] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - name: Checkout repository uses: actions/checkout@v3 with: # We must fetch at least the immediate parents so that if this is # a pull request then we can checkout the head. fetch-depth: 2 # If this run was triggered by a pull request event, then checkout # the head of the pull request instead of the merge commit. - run: git checkout HEAD^2 if: ${{ github.event_name == 'pull_request' }} # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 nvchecker-2.12/.github/workflows/mypy.yaml000066400000000000000000000021101444331012000206470ustar00rootroot00000000000000name: run mypy on: [push, pull_request] jobs: build: name: run mypy runs-on: ubuntu-latest steps: - name: Checkout Code uses: actions/checkout@v3 - name: Setup Cache uses: actions/cache@v3 env: cache-name: cache-pipmypy with: path: ~/.cache/pip key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('setup.py') }} restore-keys: | ${{ runner.os }}-${{ env.cache-name }}- ${{ runner.os }}-cache-pip- - name: Install deps run: pip3 install -U tornado pytest pytest-asyncio pytest-httpbin flaky structlog tomli platformdirs aiohttp httpx mypy awesomeversion - name: Run mypy for --install-types run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests continue-on-error: true - name: Install types run: PATH=$HOME/.local/bin:$PATH yes | mypy --install-types - name: Run mypy run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests nvchecker-2.12/.github/workflows/tests.yaml000066400000000000000000000043161444331012000210250ustar00rootroot00000000000000name: run tests on: [push, pull_request] jobs: tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" # pypy fails in some cases but we don't care much about that # with github actions we can't mark some jobs to not affect the overall # conclusion so we have to omit "allow-failure" tests. # See https://github.com/actions/toolkit/issues/399 # - pypy-3.7 deps: - tornado pycurl - aiohttp - tornado - httpx[http2]>=0.14.0 exclude: # Python 3.7 has a bug with openssl 3.x: https://bugs.python.org/issue43788 # https://github.com/lilydjwg/nvchecker/actions/runs/4524633969/jobs/7968599431 - python-version: "3.7" deps: tornado steps: - name: Checkout code uses: actions/checkout@v3 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Cache uses: actions/cache@v3 env: cache-name: cache-pip with: path: ~/.cache/pip key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('pyproject.toml', 'setup.cfg') }} restore-keys: | ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}- ${{ runner.os }}-${{ env.cache-name }}- - name: Install pycurl deps if: ${{ contains(matrix.deps, 'pycurl') }} run: | sudo apt update sudo apt install -y libcurl4-openssl-dev # werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673 - name: Install Python deps run: pip install -U ${{ matrix.deps }} pytest pytest-asyncio pytest-httpbin flaky structlog tomli platformdirs lxml 'werkzeug<2.1' awesomeversion - name: Decrypt keys env: KEY: ${{ secrets.KEY }} run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi - name: Run pytest run: if [[ -f keyfile.toml ]]; then KEYFILE=keyfile.toml pytest; else pytest; fi nvchecker-2.12/.gitignore000066400000000000000000000001611444331012000153640ustar00rootroot00000000000000*.egg-info/ __pycache__/ /build/ /dist/ .cache/ .eggs/ *.pyc *.pyo .travis.pub .pytest_cache/ .tox/ keyfile.toml nvchecker-2.12/.typos.toml000066400000000000000000000000431444331012000155240ustar00rootroot00000000000000[default.extend-words] mis = "mis" nvchecker-2.12/LICENSE000066400000000000000000000021131444331012000144000ustar00rootroot00000000000000MIT License Copyright (c) 2013-2017 lilydjwg , et al. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. nvchecker-2.12/README.rst000066400000000000000000000033651444331012000150740ustar00rootroot00000000000000**nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released. This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch. .. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master :alt: Test Status :target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22 .. image:: https://badge.fury.io/py/nvchecker.svg :alt: PyPI version :target: https://badge.fury.io/py/nvchecker .. image:: https://readthedocs.org/projects/nvchecker/badge/?version=latest :target: https://nvchecker.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status | .. image:: https://repology.org/badge/vertical-allrepos/nvchecker.svg :alt: Packaging status :target: https://repology.org/metapackage/nvchecker/versions .. contents:: :local: Dependency ---------- - Python 3.7+ - Python library: structlog, platformdirs, tomli (on Python < 3.11) - One of these Python library combinations (ordered by preference): * tornado + pycurl * aiohttp * httpx with http2 support (experimental; only latest version is supported) * tornado - All commands used in your software version configuration files Install and Run --------------- To install:: pip3 install nvchecker To use the latest code, you can also clone this repository and run:: pip install . To see available options:: nvchecker --help Run with one or more software version files:: nvchecker -c config_file You normally will like to specify some "version record files"; see below. Documentation ------------- For detailed documentation, see `https://nvchecker.readthedocs.io/en/latest/ `_. nvchecker-2.12/docs/000077500000000000000000000000001444331012000143265ustar00rootroot00000000000000nvchecker-2.12/docs/.gitignore000066400000000000000000000000101444331012000163050ustar00rootroot00000000000000_build/ nvchecker-2.12/docs/Makefile000066400000000000000000000013611444331012000157670ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile man man: $(BUILDDIR)/man/nvchecker.1 $(BUILDDIR)/man/nvchecker.1: usage.rst mkdir -p $(BUILDDIR)/man ./myrst2man.py $< > $@ # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) nvchecker-2.12/docs/api.rst000066400000000000000000000014551444331012000156360ustar00rootroot00000000000000``nvchecker.api`` --- The source plugin API =========================================== .. automodule:: nvchecker.api :members: :imported-members: :undoc-members: .. py:data:: session :type: nvchecker.httpclient.base.BaseSession The object to send out HTTP requests, respecting various options in the configuration entry. .. automodule:: nvchecker.httpclient.base :members: BaseSession, Response :undoc-members: .. autodata:: nvchecker.api.proxy .. autodata:: nvchecker.api.user_agent .. autodata:: nvchecker.api.tries .. autodata:: nvchecker.api.verify_cert .. py:data:: nvchecker.api.entry_waiter :type: contextvars.ContextVar This :class:`ContextVar ` contains an :class:`EntryWaiter ` instance for waiting on other entries. nvchecker-2.12/docs/conf.py000066400000000000000000000022771444331012000156350ustar00rootroot00000000000000import os import sys sys.path.insert(0, os.path.abspath("..")) import nvchecker master_doc = "index" project = "nvchecker" copyright = "lilydjwg, et al." version = release = nvchecker.__version__ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", ] primary_domain = "py" default_role = "py:obj" autodoc_member_order = "bysource" autoclass_content = "both" autodoc_inherit_docstrings = False # Without this line sphinx includes a copy of object.__init__'s docstring # on any class that doesn't define __init__. # https://bitbucket.org/birkenfeld/sphinx/issue/1337/autoclass_content-both-uses-object__init__ autodoc_docstring_signature = False intersphinx_mapping = {"python": ("https://docs.python.org/3.8/", None)} on_rtd = os.environ.get("READTHEDOCS", None) == "True" # On RTD we can't import sphinx_rtd_theme, but it will be applied by # default anyway. This block will use the same theme when building locally # as on RTD. if not on_rtd: import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_options = { 'collapse_navigation': False, } nvchecker-2.12/docs/index.rst000066400000000000000000000006761444331012000162000ustar00rootroot00000000000000.. nvchecker documentation master file, created by sphinx-quickstart on Thu Sep 3 00:19:02 2020. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to nvchecker's documentation! ===================================== .. toctree:: :maxdepth: 2 usage plugin api Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` nvchecker-2.12/docs/make.bat000066400000000000000000000014331444331012000157340ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd nvchecker-2.12/docs/myrst2man.py000077500000000000000000000040341444331012000166400ustar00rootroot00000000000000#!/usr/bin/python3 import time import locale import os import sys try: locale.setlocale(locale.LC_ALL, '') except: pass sys.path.insert(0, '..') import nvchecker from docutils.core import publish_cmdline, default_description from docutils import nodes from docutils.writers import manpage from docutils.parsers.rst import roles def ref_role( role, rawtext, text, lineno, inliner, options={}, content=[], ): node = nodes.reference(rawtext, text.title(), **options) return [node], [] def doc_role( role, rawtext, text, lineno, inliner, options={}, content=[], ): node = nodes.reference(rawtext, text, **options) return [node], [] roles.register_local_role('ref', ref_role) roles.register_local_role('doc', doc_role) class MyTranslator(manpage.Translator): def visit_image(self, node): raise nodes.SkipNode def visit_topic(self, node): self.body.append('\n') raise nodes.SkipNode def visit_title(self, node): try: super().visit_title(node) except nodes.SkipNode: if self.section_level == 0: self._docinfo['title'] = 'nvchecker' self._docinfo['subtitle'] = 'New version checker for software releases' self._docinfo['title_upper'] = 'nvchecker'.upper() self._docinfo['manual_section'] = '1' # Make the generated man page reproducible. Based on the patch from # https://sourceforge.net/p/docutils/patches/132/#5333 source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') if source_date_epoch: self._docinfo['date'] = time.strftime('%Y-%m-%d', time.gmtime(int(source_date_epoch))) else: self._docinfo['date'] = time.strftime('%Y-%m-%d') self._docinfo['version'] = nvchecker.__version__ raise class MyWriter(manpage.Writer): def __init__(self): super().__init__() self.translator_class = MyTranslator def main(): description = ("Generates plain unix manual documents. " + default_description) publish_cmdline(writer=MyWriter(), description=description) if __name__ == '__main__': main() nvchecker-2.12/docs/plugin.rst000066400000000000000000000067061444331012000163670ustar00rootroot00000000000000How to develop a source plugin for nvchecker ============================================ .. contents:: :local: Source plugins enable nvchecker to discover software version strings in additional ways. Where to put the plugins ------------------------ They are Python modules put in any directories named ``nvchecker_source`` in ``sys.path``. This is called namespace packages introduced by `PEP 420 `_. For local use, ``~/.local/lib/pythonX.Y/site-packages/nvchecker_source`` is a good place, or you can define the ``PYTHONPATH`` environment variable and put nvchecker source plugins there inside a ``nvchecker_source`` directory. Plugins are referenced by their names in the configuration file (``source = "xxx"``). If multiple plugins have the same name, the first one in ``sys.path`` will be used. How to write a simple plugin ---------------------------- For simple situations, you need to define an async function with the following signature:: async def get_version( name: str, conf: Entry, *, cache: AsyncCache, keymanager: KeyManager, **kwargs, ) -> VersionResult: ... Those types are imported from :mod:`nvchecker.api`. ``name`` is the table keys in the configuration file, and ``conf`` is a dict of the content of that table. You should not modify this dict. ``cache`` is an :class:`AsyncCache ` object that caches results for you. Every plugin has its own ``cache`` object so that cache keys won't conflict. ``keymanager`` is a :class:`KeyManager ` object that you can call :meth:`.get_key(name) ` to get the key (token) from the keyfile. There may be additional keyword arguments in the future so ``**kwargs`` should be used. If you want to send an HTTP request, it's preferred to use :meth: `cache.get_json ` or the :data: `nvchecker.api.session` object. It will use the auto-selected HTTP backend and handle the ``proxy`` option automatically. For details about these objects, see :mod:`the API documentation `, or take existing source plugins as examples. How to write a more powerful plugin ----------------------------------- You may want more control in your source plugin, e.g. to do batch requests. To do this, you provide a class instead:: class Worker(BaseWorker): async def run(self) -> None: ... You will have the following in the attributes:: token_q: Queue[bool], result_q: Queue[RawResult], tasks: List[Tuple[str, Entry]], keymanager: KeyManager, You are expected to process :attr:`tasks ` and put results in :attr:`result_q `. See ``nvchecker_source/none.py`` for the simplest example, and ``nvchecker_source/aur.py`` for a complete, batching example. For details about these objects, see :mod:`the API documentation `. You can also receive a configuration section from the configuration as ``__config__.source.SOURCE_NAME``, where ``SOURCE_NAME`` is what your plugin is called. This can be used to specify a mirror site for your plugin to use, e.g. the ``npm`` plugin accepts the following config:: [__config__.source.npm] registry = "https://registry.npm.taobao.org" When such a configuration exists for your plugin, you need to define a function named ``configure`` to receive it:: def configure(config): '''use the "config" dict in some way''' ... nvchecker-2.12/docs/requirements.txt000066400000000000000000000002101444331012000176030ustar00rootroot00000000000000tomli structlog platformdirs tornado>=6 sphinx>=3.2 # <5 has strange bottom margins for p, and no list indicators sphinx-rtd-theme>=0.5 nvchecker-2.12/docs/usage.rst000066400000000000000000000723311444331012000161720ustar00rootroot00000000000000Usage of nvchecker commands =========================== **nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released. This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch. .. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master :alt: Test Status :target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22 .. image:: https://badge.fury.io/py/nvchecker.svg :alt: PyPI version :target: https://badge.fury.io/py/nvchecker .. contents:: :local: Dependency ---------- - Python 3.7+ - Python library: structlog, platformdirs, tomli (on Python < 3.11) - One of these Python library combinations (ordered by preference): * tornado + pycurl * aiohttp * httpx with http2 support (experimental; only latest version is supported) * tornado - All commands used in your software version configuration files Install and Run --------------- To install:: pip3 install nvchecker To use the latest code, you can also clone this repository and run:: python3 setup.py install To see available options:: nvchecker --help Run with one or more software version files:: nvchecker -c config_file.toml A simple config file may look like: .. code-block:: toml [nvchecker] source = "github" github = "lilydjwg/nvchecker" [python-toml] source = "pypi" pypi = "toml" You normally will like to specify some "version record files"; see below. JSON logging ~~~~~~~~~~~~ With ``--logger=json`` or ``--logger=both``, you can get a structured logging for programmatically consuming. You can use ``--json-log-fd=FD`` to specify the file descriptor to send logs to (take care to do line buffering). The logging level option (``-l`` or ``--logging``) doesn't take effect with this. The JSON log is one JSON string per line. The following documented events and fields are stable, undocumented ones may change without notice. event=updated An update is detected. Fields ``name``, ``old_version`` and ``version`` are available. ``old_version`` maybe ``null``. event=up-to-date There is no update. Fields ``name`` and ``version`` are available. event=no-result No version is detected. There may be an error. Fields ``name`` is available. level=error There is an error. Fields ``name`` and ``exc_info`` may be available to give further information. Upgrade from 1.x version ~~~~~~~~~~~~~~~~~~~~~~~~ There are several backward-incompatible changes from the previous 1.x version. 1. Version 2.x requires Python 3.7+ to run. 2. The command syntax changes a bit. You need to use a ``-c`` switch to specify your software version configuration file (or use the default). 3. The configuration file format has been changed from ini to `toml`_. You can use the ``nvchecker-ini2toml`` script to convert your old configuration files. However, comments and formatting will be lost, and some options may not be converted correctly. 4. Several options have been renamed. ``max_concurrent`` to ``max_concurrency``, and all option names have their ``-`` be replaced with ``_``. 5. All software configuration tables need a ``source`` option to specify which source is to be used rather than being figured out from option names in use. This enables additional source plugins to be discovered. 6. The version record files have been changed to use JSON format (the old format will be converted on writing). 7. The ``vcs`` source is removed. (It's available inside `lilac `_ at the moment.) A ``git`` source is provided. 8. ``include_tags_pattern`` and ``ignored_tags`` are removed. Use :ref:`list options` instead. Version Record Files -------------------- Version record files record which version of the software you know or is available. They are a simple JSON object mapping software names to known versions. The ``nvtake`` Command ~~~~~~~~~~~~~~~~~~~~~~ This command helps to manage version record files. It reads both old and new version record files, and a list of names given on the commandline. It then update the versions of those names in the old version record file. This helps when you have known (and processed) some of the updated software, but not all. You can tell nvchecker that via this command instead of editing the file by hand. This command will help most if you specify where you version record files are in your config file. See below for how to use a config file. The ``nvcmp`` Command ~~~~~~~~~~~~~~~~~~~~~ This command compares the ``newver`` file with the ``oldver`` one and prints out any differences as updates, e.g.:: $ nvcmp -c sample_source.toml Sparkle Test App None -> 2.0 test 0.0 -> 0.1 Configuration Files ------------------- The software version source files are in `toml`_ format. The *key name* is the name of the software. Following fields are used to tell nvchecker how to determine the current version of that software. See `sample_source.toml `_ for an example. Configuration Table ~~~~~~~~~~~~~~~~~~~ A special table named ``__config__`` provides some configuration options. Relative path are relative to the source files, and ``~`` and environmental variables are expanded. Currently supported options are: oldver Specify a version record file containing the old version info. newver Specify a version record file to store the new version info. proxy The HTTP proxy to use. The format is ``proto://host:port``, e.g. ``http://localhost:8087``. Different backends have different level support for this, e.g. with ``pycurl`` you can use ``socks5h://host:port`` proxies. max_concurrency Max number of concurrent jobs. Default: 20. http_timeout Time in seconds to wait for HTTP requests. Default: 20. keyfile Specify a toml config file containing key (token) information. This file should contain a ``keys`` table, mapping key names to key values. See specific source for the key name(s) to use. Sample ``keyfile.toml``: .. code-block:: toml [keys] # https://github.com/settings/tokens # scope: repo -> public_repo github = "ghp_" Global Options ~~~~~~~~~~~~~~ The following options apply to every check sources. You can use them in any item in your configuration file. prefix Strip the prefix string if the version string starts with it. Otherwise the version string is returned as-is. from_pattern, to_pattern Both are Python-compatible regular expressions. If ``from_pattern`` is found in the version string, it will be replaced with ``to_pattern``. If ``from_pattern`` is not found, the version string remains unchanged and no error is emitted. missing_ok Suppress warnings and errors if a version checking module finds nothing. Currently only ``regex`` supports it. proxy The HTTP proxy to use. The format is ``proto://host:port``, e.g. ``http://localhost:8087``. Different backends have different level support for this, e.g. with ``pycurl`` you can use ``socks5h://host:port`` proxies. Set it to ``""`` (empty string) to override the global setting. This only works when the source implementation uses the builtin HTTP client, and doesn't work with the ``aur`` source because it's batched (however the global proxy config still applies). user_agent The user agent string to use for HTTP requests. tries Try specified times when a network error occurs. Default is ``1``. This only works when the source implementation uses the builtin HTTP client. httptoken A personal authorization token used to fetch the url with the ``Authorization`` header. The type of token depends on the authorization required. - For Bearer token set \: ``Bearer `` - For Basic token set \: ``Basic `` In the keyfile add ``httptoken_{name}`` token. verify_cert Whether to verify the HTTPS certificate or not. Default is ``true``. If both ``prefix`` and ``from_pattern``/``to_pattern`` are used, ``from_pattern``/``to_pattern`` are ignored. If you want to strip the prefix and then do something special, just use ``from_pattern``/``to_pattern``. For example, the transformation of ``v1_1_0`` => ``1.1.0`` can be achieved with ``from_pattern = 'v(\d+)_(\d+)_(\d+)'`` and ``to_pattern = '\1.\2.\3'``. (Note that in TOML it's easiler to write regexes in single quotes so you don't need to escape ``\``.) .. _list options: List Options ~~~~~~~~~~~~ The following options apply to sources that return a list. See individual source tables to determine whether they are supported. include_regex Only consider version strings that match the given regex. The whole string should match the regex. Be sure to use ``.*`` when you mean it! exclude_regex Don't consider version strings that match the given regex. The whole string should match the regex. Be sure to use ``.*`` when you mean it! This option has higher precedence that ``include_regex``; that is, if matched by this one, it's excluded even it's also matched by ``include_regex``. sort_version_key Sort the version string using this key function. Choose among ``parse_version``, ``vercmp`` and ``awesomeversion``. Default value is ``parse_version``. ``parse_version`` uses an old version of ``pkg_resources.parse_version``. ``vercmp`` uses ``pyalpm.vercmp``. ``awesomeversion`` uses `awesomeversion `_. ignored Version strings that are explicitly ignored, separated by whitespace. This can be useful to avoid some known mis-named versions, so newer ones won't be "overridden" by the old broken ones. Search in a Webpage ~~~~~~~~~~~~~~~~~~~ :: source = "regex" Search through a specific webpage for the version string. This type of version finding has these fields: url The URL of the webpage to fetch. encoding (*Optional*) The character encoding of the webpage, if ``latin1`` is not appropriate. regex A regular expression used to find the version string. It can have zero or one capture group. The capture group or the whole match is the version string. When multiple version strings are found, the maximum of those is chosen. post_data (*Optional*) When present, a ``POST`` request (instead of a ``GET``) will be used. The value should be a string containing the full body of the request. The encoding of the string can be specified using the ``post_data_type`` option. post_data_type (*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``. This source supports :ref:`list options`. Search in an HTTP header ~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "httpheader" Send an HTTP request and search through a specific header. url The URL of the HTTP request. header (*Optional*) The header to look at. Default is ``Location``. Another useful header is ``Content-Disposition``. regex A regular expression used to find the version string. It can have zero or one capture group. The capture group or the whole match is the version string. When multiple version strings are found, the maximum of those is chosen. method (*Optional*) The HTTP method to use. Default is ``HEAD``. follow_redirects (*Optional*) Whether to follow 3xx HTTP redirects. Default is ``false``. If you are looking at a ``Location`` header, you shouldn't change this. Search with an HTML Parser ~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "htmlparser" Send an HTTP request and search through the body a specific xpath. url The URL of the HTTP request. xpath An xpath expression used to find the version string. post_data (*Optional*) When present, a ``POST`` request (instead of a ``GET``) will be used. The value should be a string containing the full body of the request. The encoding of the string can be specified using the ``post_data_type`` option. post_data_type (*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``. .. note:: An additional dependency "lxml" is required. You can use ``pip install 'nvchecker[htmlparser]'``. Find with a Command ~~~~~~~~~~~~~~~~~~~ :: source = "cmd" Use a shell command line to get the version. The output is striped first, so trailing newlines do not bother. cmd The command line to use. This will run with the system's standard shell (i.e. ``/bin/sh``). Check AUR ~~~~~~~~~ :: source = "aur" Check `Arch User Repository `_ for updates. Per-item proxy setting doesn't work for this because several items will be batched into one request. aur The package name in AUR. If empty, use the name of software (the *table name*). strip_release Strip the release part. use_last_modified Append last modified time to the version. Check GitHub ~~~~~~~~~~~~ :: source = "github" Check `GitHub `_ for updates. The version returned is in date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release`` or ``use_max_tag`` is used. See below. github The github repository, with author, e.g. ``lilydjwg/nvchecker``. branch Which branch to track? Default: the repository's default. path Only commits containing this file path will be returned. use_latest_release Set this to ``true`` to check for the latest release on GitHub. GitHub releases are not the same with git tags. You'll see big version names and descriptions in the release page for such releases, e.g. `zfsonlinux/zfs's `_, and those small ones like `nvchecker's `_ are only git tags that should use ``use_max_tag`` below. Will return the release name instead of date. use_latest_tag Set this to ``true`` to check for the latest tag on GitHub. This requires a token because it's using the v4 GraphQL API. query When ``use_latest_tag`` is ``true``, this sets a query for the tag. The exact matching method is not documented by GitHub. use_max_tag Set this to ``true`` to check for the max tag on GitHub. Unlike ``use_latest_release``, this option includes both annotated tags and lightweight ones, and return the largest one sorted by the ``sort_version_key`` option. Will return the tag name instead of date. token A personal authorization token used to call the API. An authorization token may be needed in order to use ``use_latest_tag`` or to request more frequently than anonymously. To set an authorization token, you can set: - a key named ``github`` in the keyfile - the token option This source supports :ref:`list options` when ``use_max_tag`` is set. Check Gitea ~~~~~~~~~~~ :: source = "gitea" Check `Gitea `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. gitea The gitea repository, with author, e.g. ``gitea/tea``. branch Which branch to track? Default: the repository's default. use_max_tag Set this to ``true`` to check for the max tag on Gitea. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. host Hostname for self-hosted Gitea instance. token Gitea authorization token used to call the API. To set an authorization token, you can set: - a key named ``gitea_{host}`` in the keyfile, where ``host`` is all-lowercased host name - the token option This source supports :ref:`list options` when ``use_max_tag`` is set. Check BitBucket ~~~~~~~~~~~~~~~ :: source = "bitbucket" Check `BitBucket `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. bitbucket The bitbucket repository, with author, e.g. ``lilydjwg/dotvim``. branch Which branch to track? Default: the repository's default. use_max_tag Set this to ``true`` to check for the max tag on BitBucket. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. use_sorted_tags If ``true``, tags are queried and sorted according to the ``query`` and ``sort`` keys. Will return the tag name instead of the date. query A query string use to filter tags when ``use_sorted_tags`` set (see `here `__ for examples). The string does not need to be escaped. sort A field used to sort the tags when ``use_sorted_tags`` is set (see `here `__ for examples). Defaults to ``-target.date`` (sorts tags in descending order by date). max_page How many pages do we search for the max tag? Default is 3. This works when ``use_max_tag`` is set. This source supports :ref:`list options` when ``use_max_tag`` or ``use_sorted_tags`` is set. Check GitLab ~~~~~~~~~~~~ :: source = "gitlab" Check `GitLab `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. gitlab The gitlab repository, with author, e.g. ``Deepin/deepin-music``. branch Which branch to track? use_max_tag Set this to ``true`` to check for the max tag on GitLab. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. host Hostname for self-hosted GitLab instance. token GitLab authorization token used to call the API. To set an authorization token, you can set: - a key named ``gitlab_{host}`` in the keyfile, where ``host`` is all-lowercased host name - the token option This source supports :ref:`list options` when ``use_max_tag`` is set. Check PyPI ~~~~~~~~~~ :: source = "pypi" Check `PyPI `_ for updates. pypi The name used on PyPI, e.g. ``PySide``. use_pre_release Whether to accept pre release. Default is false. .. note:: An additional dependency "packaging" is required. You can use ``pip install 'nvchecker[pypi]'``. Check RubyGems ~~~~~~~~~~~~~~ :: source = "gems" Check `RubyGems `_ for updates. gems The name used on RubyGems, e.g. ``sass``. This source supports :ref:`list options`. Check NPM Registry ~~~~~~~~~~~~~~~~~~ :: source = "npm" Check `NPM Registry `_ for updates. npm The name used on NPM Registry, e.g. ``coffee-script``. To configure which registry to query, a source plugin option is available. You can specify like this:: [__config__.source.npm] registry = "https://registry.npm.taobao.org" Check Hackage ~~~~~~~~~~~~~ :: source = "hackage" Check `Hackage `_ for updates. hackage The name used on Hackage, e.g. ``pandoc``. Check CPAN ~~~~~~~~~~ :: source = "cpan" Check `MetaCPAN `_ for updates. cpan The name used on CPAN, e.g. ``YAML``. Check CRAN ~~~~~~~~~~ :: source = "cran" Check `CRAN `_ for updates. cran The name used on CRAN, e.g. ``xml2``. Check Packagist ~~~~~~~~~~~~~~~ :: source = "packagist" Check `Packagist `_ for updates. packagist The name used on Packagist, e.g. ``monolog/monolog``. Check crates.io ~~~~~~~~~~~~~~~ :: source = "cratesio" Check `crates.io `_ for updates. cratesio The crate name on crates.io, e.g. ``tokio``. Check Local Pacman Database ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "pacman" This is used when you run ``nvchecker`` on an Arch Linux system and the program always keeps up with a package in your configured repositories for `Pacman`_. pacman The package name to reference to. strip_release Strip the release part. Check Arch Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "archpkg" This enables you to track the update of `Arch Linux official packages `_, without needing of pacman and an updated local Pacman databases. archpkg Name of the Arch Linux package. strip_release Strip the release part, only return part before ``-``. provided Instead of the package version, return the version this package provides. Its value is what the package provides, and ``strip_release`` takes effect too. This is best used with libraries. Check Debian Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "debianpkg" This enables you to track the update of `Debian Linux official packages `_, without needing of apt and an updated local APT database. debianpkg Name of the Debian Linux source package. suite Name of the Debian release (jessie, wheezy, etc, defaults to sid) strip_release Strip the release part. Check Ubuntu Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "ubuntupkg" This enables you to track the update of `Ubuntu Linux official packages `_, without needing of apt and an updated local APT database. ubuntupkg Name of the Ubuntu Linux source package. suite Name of the Ubuntu release (xenial, zesty, etc, defaults to None, which means no limit on suite) strip_release Strip the release part. Check Repology ~~~~~~~~~~~~~~ :: source = "repology" This enables you to track updates from `Repology `_ (repology.org). repology Name of the ``project`` to check. repo Check the version in this repo. This field is required. subrepo Check the version in this subrepo. This field is optional. When omitted all subrepos are queried. This source supports :ref:`list options`. Check Anitya ~~~~~~~~~~~~ :: source = "anitya" This enables you to track updates from `Anitya `_ (release-monitoring.org). anitya ``distro/package``, where ``distro`` can be a lot of things like "fedora", "arch linux", "gentoo", etc. ``package`` is the package name of the chosen distribution. Check Android SDK ~~~~~~~~~~~~~~~~~ :: source = "android_sdk" This enables you to track updates of Android SDK packages listed in ``sdkmanager --list``. android_sdk The package path prefix. This value is matched against the ``path`` attribute in all nodes in an SDK manifest XML. The first match is used for version comparisons. repo Should be one of ``addon`` or ``package``. Packages in ``addon2-1.xml`` use ``addon`` and packages in ``repository2-1.xml`` use ``package``. channel Choose the target channel from one of ``stable``, ``beta``, ``dev`` or ``canary``. This option also accepts a comma-separated list to pick from multiple channels. For example, the latest unstable version is picked with ``beta,dev,canary``. The default is ``stable``. host_os Choose the target OS for the tracked package from one of ``linux``, ``macosx``, ``windows``. The default is ``linux``. For OS-independent packages (e.g., Java JARs), this field is ignored. This source supports :ref:`list options`. Check Sparkle framework ~~~~~~~~~~~~~~~~~~~~~~~ :: source = "sparkle" This enables you to track updates of macOS applications which using `Sparkle framework `_. sparkle The url of the sparkle appcast. Check Pagure ~~~~~~~~~~~~ :: source = "pagure" This enables you to check updates from `Pagure `_. pagure The project name, optionally with a namespace. host Hostname of alternative instance like src.fedoraproject.org. This source returns tags and supports :ref:`list options`. Check APT repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "apt" This enables you to track the update of an arbitrary APT repository, without needing of apt and an updated local APT database. pkg Name of the APT binary package. srcpkg Name of the APT source package. mirror URL of the repository. suite Name of the APT repository release (jessie, wheezy, etc) repo Name of the APT repository (main, contrib, etc, defaults to main) arch Architecture of the repository (i386, amd64, etc, defaults to amd64) strip_release Strip the release part. Note that either pkg or srcpkg needs to be specified (but not both) or the item name will be used as pkg. Check Git repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "git" This enables you to check tags or branch commits of an arbitrary git repository, also useful for scenarios like a github project having too many tags. git URL of the Git repository. use_commit Return a commit hash instead of tags. branch When ``use_commit`` is true, return the commit on the specified branch instead of the default one. When this source returns tags (``use_commit`` is not true) it supports :ref:`list options`. Check container registry ~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "container" This enables you to check tags of images on a container registry like Docker. container The path for the container image. For official Docker images, use namespace ``library/`` (e.g. ``library/python``). registry The container registry host. Default: ``docker.io`` ``registry`` and ``container`` are the host and the path used in the pull command. Note that the ``docker`` command allows omitting some parts of the container name while this plugin requires the full name. If the host part is omitted, use ``docker.io``, and if there is no slash in the path, prepend ``library/`` to the path. Here are some examples: +----------------------------------------------+-----------+--------------------------+ | Pull command | registry | container | +==============================================+===========+==========================+ | docker pull quay.io/prometheus/node-exporter | quay.io | prometheus/node-exporter | +----------------------------------------------+-----------+--------------------------+ | docker pull nvidia/cuda | docker.io | nvidia/cuda | +----------------------------------------------+-----------+--------------------------+ | docker pull python | docker.io | library/python | +----------------------------------------------+-----------+--------------------------+ This source returns tags and supports :ref:`list options`. Check ALPM database ~~~~~~~~~~~~~~~~~~~ :: source = "alpm" Check package updates in a local ALPM database. alpm Name of the package. repo Name of the package repository in which the package resides. If not provided, nvchecker will use ``repos`` value, see below. repos An array of possible repositories in which the package may reside in, nvchecker will use the first repository which contains the package. If not provided, ``core``, ``extra``, ``community`` and ``multilib`` will be used, in that order. dbpath Path to the ALPM database directory. Default: ``/var/lib/pacman``. You need to update the database yourself. strip_release Strip the release part, only return the part before ``-``. provided Instead of the package version, return the version this package provides. Its value is what the package provides, and ``strip_release`` takes effect too. This is best used with libraries. .. note:: An additional dependency "pyalpm" is required. Check ALPM files database ~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "alpmfiles" Search package files in a local ALPM files database. The package does not need to be installed. This can be useful for checking shared library versions if a package does not list them in its ``provides``. pkgname Name of the package. filename Regular expression for the file path. If it contains one matching group, that group is returned. Otherwise return the whole file path. Paths do not have an initial slash. For example, ``usr/lib/libuv\\.so\\.([^.]+)`` matches the major shared library version of libuv. repo Name of the package repository in which the package resides. If not provided, search all repositories. strip_dir Strip directory from the path before matching. Defaults to ``false``. dbpath Path to the ALPM database directory. Default: ``/var/lib/pacman``. You need to update the database yourself with ``pacman -Fy``. Check Open Vsx ~~~~~~~~~~~~~~~ :: source = "openvsx" Check `Open Vsx `_ for updates. openvsx The extension's Unique Identifier on open-vsx.org, e.g. ``ritwickdey.LiveServer``. Check Visual Studio Code Marketplace ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "vsmarketplace" Check `Visual Studio Code Marketplace `_ for updates. vsmarketplace The extension's Unique Identifier on marketplace.visualstudio.com/vscode, e.g. ``ritwickdey.LiveServer``. Combine others' results ~~~~~~~~~~~~~~~~~~~~~~~ :: source = "combiner" This source can combine results from other entries. from A list of entry names to wait results for. format A format string to combine the results into the final string. Example: .. code-block:: toml [entry-1] source = "cmd" cmd = "echo 1" [entry-2] source = "cmd" cmd = "echo 2" [entry-3] source = "combiner" from = ["entry-1", "entry-2"] format = "$1-$2" Manually updating ~~~~~~~~~~~~~~~~~ :: source = "manual" This enables you to manually specify the version (maybe because you want to approve each release before it gets to the script). manual The version string. Extending ~~~~~~~~~ It's possible to extend the supported sources by writing plugins. See :doc:`plugin` for documentation. .. _Pacman: https://wiki.archlinux.org/title/Pacman .. _toml: https://toml.io/ nvchecker-2.12/keyfile.toml.enc000066400000000000000000000003461444331012000164720ustar00rootroot00000000000000Salted__ĮßKĘ]å¯õ´›äķ(¯J×ũēŒv4/ÆáôLԍĩ(Ķ“O|ôNTĖ’ĄO @”Ŧ” ×ÛǍ8—īû‡Jz cČOüžË˛úäk›dˇ—æ/œ÷ÛZ)Q}Ūá4×ĩËŌW§@í*´ ÃWš%ĸˇBäđo¤¨wDA›Ú\W´ĩĪ$:ؖ ߅´ĩ9ČßŊ0"1yE53ĢUÍwÅ"â0j!ĩéâœq^ ŽëŊŧJ$oÜÛnąhņ—u`ÅÚ0GŨÛôËNHĩXŽŧl53‘×D ˙€Œnvchecker-2.12/mypy.ini000066400000000000000000000007011444331012000150730ustar00rootroot00000000000000[mypy] warn_unused_configs = True warn_redundant_casts = True warn_unused_ignores = True show_error_context = True show_column_numbers = True no_implicit_optional = True [mypy-structlog] ignore_missing_imports = True [mypy-pyalpm] ignore_missing_imports = True [mypy-flaky] ignore_missing_imports = True [mypy-pytest_httpbin] ignore_missing_imports = True [mypy-lxml] ignore_missing_imports = True [mypy-tomllib] ignore_missing_imports = True nvchecker-2.12/nvchecker/000077500000000000000000000000001444331012000153465ustar00rootroot00000000000000nvchecker-2.12/nvchecker/__init__.py000066400000000000000000000001451444331012000174570ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2023 lilydjwg , et al. __version__ = '2.12' nvchecker-2.12/nvchecker/__main__.py000077500000000000000000000063721444331012000174530ustar00rootroot00000000000000#!/usr/bin/env python3 # MIT licensed # Copyright (c) 2013-2022 lilydjwg , et al. from __future__ import annotations import sys import argparse import asyncio from typing import Coroutine, Tuple from pathlib import Path import structlog from . import core from .util import VersData, RawResult, KeyManager, EntryWaiter from .ctxvars import proxy as ctx_proxy logger = structlog.get_logger(logger_name=__name__) def main() -> None: parser = argparse.ArgumentParser(description='New version checker for software') parser.add_argument('-k', '--keyfile', metavar='FILE', type=str, help='use specified keyfile (override the one in configuration file)') parser.add_argument('-t', '--tries', default=1, type=int, metavar='N', help='try N times when network errors occur') parser.add_argument('--failures', action='store_true', help='exit with code 3 if failures / errors happen during checking') parser.add_argument('-e', '--entry', type=str, help='only execute on specified entry (useful for debugging)') core.add_common_arguments(parser) args = parser.parse_args() if core.process_common_arguments(args): return try: entries, options = core.load_file( args.file, use_keymanager=not bool(args.keyfile)) if args.entry: if args.entry not in entries: sys.exit('Specified entry not found in config') entries = {args.entry: entries[args.entry]} if args.keyfile: keymanager = KeyManager(Path(args.keyfile)) else: keymanager = options.keymanager except core.FileLoadError as e: sys.exit(str(e)) if options.proxy is not None: ctx_proxy.set(options.proxy) task_sem = asyncio.Semaphore(options.max_concurrency) result_q: asyncio.Queue[RawResult] = asyncio.Queue() dispatcher = core.setup_httpclient( options.max_concurrency, options.httplib, options.http_timeout, ) entry_waiter = EntryWaiter() try: futures = dispatcher.dispatch( entries, task_sem, result_q, keymanager, entry_waiter, args.tries, options.source_configs, ) except ModuleNotFoundError as e: sys.exit(f'Error: {e}') if options.ver_files is not None: oldvers = core.read_verfile(options.ver_files[0]) else: oldvers = {} result_coro = core.process_result(oldvers, result_q, entry_waiter) runner_coro = core.run_tasks(futures) if sys.version_info >= (3, 10): # Python 3.10 has deprecated asyncio.get_event_loop newvers, has_failures = asyncio.run(run(result_coro, runner_coro)) else: # Python < 3.10 will create an eventloop when asyncio.Queue is initialized newvers, has_failures = asyncio.get_event_loop().run_until_complete(run(result_coro, runner_coro)) if options.ver_files is not None: core.write_verfile(options.ver_files[1], newvers) if args.failures and has_failures: sys.exit(3) async def run( result_coro: Coroutine[None, None, Tuple[VersData, bool]], runner_coro: Coroutine[None, None, None], ) -> Tuple[VersData, bool]: result_fu = asyncio.create_task(result_coro) runner_fu = asyncio.create_task(runner_coro) await runner_fu result_fu.cancel() return await result_fu if __name__ == '__main__': main() nvchecker-2.12/nvchecker/api.py000066400000000000000000000006031444331012000164700ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020 lilydjwg , et al. from .httpclient import session, TemporaryError, HTTPError from .util import ( Entry, BaseWorker, RawResult, VersionResult, AsyncCache, KeyManager, GetVersionError, EntryWaiter, ) from .sortversion import sort_version_keys from .ctxvars import tries, proxy, user_agent, httptoken, entry_waiter, verify_cert nvchecker-2.12/nvchecker/core.py000066400000000000000000000260271444331012000166570ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. from __future__ import annotations import os import sys import asyncio from asyncio import Queue import logging import argparse from typing import ( Tuple, NamedTuple, Optional, List, Union, cast, Dict, Awaitable, Sequence, Any, TYPE_CHECKING, ) import types from pathlib import Path from importlib import import_module import re import contextvars import json import structlog if TYPE_CHECKING: import tomli as tomllib else: try: import tomllib except ModuleNotFoundError: import tomli as tomllib import platformdirs from .lib import nicelogger from . import slogconf from .util import ( Entry, Entries, KeyManager, RawResult, Result, VersData, FunctionWorker, GetVersionError, FileLoadError, EntryWaiter, ) from . import __version__ from .sortversion import sort_version_keys from .ctxvars import tries as ctx_tries from .ctxvars import entry_waiter as ctx_entry_waiter from . import httpclient logger = structlog.get_logger(logger_name=__name__) def get_default_config() -> str: confdir = platformdirs.user_config_dir(appname='nvchecker') file = os.path.join(confdir, 'nvchecker.toml') return file def add_common_arguments(parser: argparse.ArgumentParser) -> None: parser.add_argument('-l', '--logging', choices=('debug', 'info', 'warning', 'error'), default='info', help='logging level (default: info)') parser.add_argument('--logger', default='pretty', choices=['pretty', 'json', 'both'], help='select which logger to use') parser.add_argument('--json-log-fd', metavar='FD', type=lambda fd: os.fdopen(int(fd), mode='w'), help='specify fd to send json logs to. stdout by default') parser.add_argument('-V', '--version', action='store_true', help='show version and exit') default_config = get_default_config() parser.add_argument('-c', '--file', metavar='FILE', type=str, default=default_config, help=f'software version configuration file [default: {default_config}]') def process_common_arguments(args: argparse.Namespace) -> bool: '''return True if should stop''' processors = [ slogconf.exc_info, slogconf.filter_exc, ] logger_factory = None if args.logger in ['pretty', 'both']: slogconf.fix_logging() nicelogger.enable_pretty_logging( getattr(logging, args.logging.upper())) processors.append(slogconf.stdlib_renderer) if args.logger == 'pretty': logger_factory=structlog.PrintLoggerFactory( file=open(os.devnull, 'w'), ) processors.append(slogconf.null_renderer) if args.logger in ['json', 'both']: processors.extend([ structlog.processors.format_exc_info, slogconf.json_renderer, ]) if logger_factory is None: logfile = args.json_log_fd or sys.stdout logger_factory = structlog.PrintLoggerFactory(file=logfile) structlog.configure( processors = processors, logger_factory = logger_factory, ) if args.version: progname = os.path.basename(sys.argv[0]) print(f'{progname} v{__version__}') return True return False def safe_overwrite(fname: str, data: Union[bytes, str], *, method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None: # FIXME: directory has no read perm # FIXME: symlinks and hard links tmpname = fname + '.tmp' # if not using "with", write can fail without exception with open(tmpname, mode, encoding=encoding) as f: getattr(f, method)(data) # see also: https://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/ f.flush() os.fsync(f.fileno()) # if the above write failed (because disk is full etc), the old data should be kept os.rename(tmpname, fname) def read_verfile(file: Path) -> VersData: try: with open(file) as f: data = f.read() except FileNotFoundError: return {} try: v = json.loads(data) except json.decoder.JSONDecodeError: # old format v = {} for l in data.splitlines(): name, ver = l.rstrip().split(None, 1) v[name] = ver return v def write_verfile(file: Path, versions: VersData) -> None: # sort and indent to make it friendly to human and git data = json.dumps( dict(sorted(versions.items())), indent=2, ensure_ascii=False, ) + '\n' safe_overwrite(str(file), data) class Options(NamedTuple): ver_files: Optional[Tuple[Path, Path]] max_concurrency: int proxy: Optional[str] keymanager: KeyManager source_configs: Dict[str, Dict[str, Any]] httplib: Optional[str] http_timeout: int def load_file( file: str, *, use_keymanager: bool, ) -> Tuple[Entries, Options]: try: with open(file, 'rb') as f: config = tomllib.load(f) except (OSError, tomllib.TOMLDecodeError) as e: raise FileLoadError('version configuration file', file, e) ver_files: Optional[Tuple[Path, Path]] = None keymanager = KeyManager(None) source_configs = {} if '__config__' in config: c = config.pop('__config__') d = Path(file).parent if 'oldver' in c and 'newver' in c: oldver_s = os.path.expandvars( os.path.expanduser(c.get('oldver'))) oldver = d / oldver_s newver_s = os.path.expandvars( os.path.expanduser(c.get('newver'))) newver = d / newver_s ver_files = oldver, newver if use_keymanager: keyfile = c.get('keyfile') if keyfile: keyfile_s = os.path.expandvars( os.path.expanduser(c.get('keyfile'))) keyfile = d / keyfile_s keymanager = KeyManager(keyfile) if 'source' in c: source_configs = c['source'] max_concurrency = c.get('max_concurrency', 20) proxy = c.get('proxy') httplib = c.get('httplib', None) http_timeout = c.get('http_timeout', 20) else: max_concurrency = 20 proxy = None httplib = None http_timeout = 20 return cast(Entries, config), Options( ver_files, max_concurrency, proxy, keymanager, source_configs, httplib, http_timeout, ) def setup_httpclient( max_concurrency: int = 20, httplib: Optional[str] = None, http_timeout: int = 20, ) -> Dispatcher: httplib_ = httplib or httpclient.find_best_httplib() httpclient.setup( httplib_, max_concurrency, http_timeout) return Dispatcher() class Dispatcher: def dispatch( self, entries: Entries, task_sem: asyncio.Semaphore, result_q: Queue[RawResult], keymanager: KeyManager, entry_waiter: EntryWaiter, tries: int, source_configs: Dict[str, Dict[str, Any]], ) -> List[asyncio.Future]: mods: Dict[str, Tuple[types.ModuleType, List]] = {} ctx_tries.set(tries) ctx_entry_waiter.set(entry_waiter) root_ctx = contextvars.copy_context() for name, entry in entries.items(): source = entry.get('source', 'none') if source not in mods: mod = import_module('nvchecker_source.' + source) tasks: List[Tuple[str, Entry]] = [] mods[source] = mod, tasks config = source_configs.get(source) if config and getattr(mod, 'configure'): mod.configure(config) else: tasks = mods[source][1] tasks.append((name, entry)) ret = [] for mod, tasks in mods.values(): if hasattr(mod, 'Worker'): worker_cls = mod.Worker else: worker_cls = FunctionWorker ctx = root_ctx.copy() worker = ctx.run( worker_cls, task_sem, result_q, tasks, keymanager, ) if worker_cls is FunctionWorker: func = mod.get_version ctx.run(worker.initialize, func) ret.append(ctx.run(worker._run_maynot_raise)) return ret def substitute_version( version: str, conf: Entry, ) -> str: ''' Substitute the version string via defined rules in the configuration file. See README.rst#global-options for details. ''' prefix = conf.get('prefix') if prefix: if version.startswith(prefix): version = version[len(prefix):] return version from_pattern = conf.get('from_pattern') if from_pattern: to_pattern = conf.get('to_pattern') if to_pattern is None: raise ValueError("from_pattern exists but to_pattern doesn't") return re.sub(from_pattern, to_pattern, version) # No substitution rules found. Just return the original version string. return version def apply_list_options( versions: List[str], conf: Entry, ) -> Optional[str]: pattern = conf.get('include_regex') if pattern: re_pat = re.compile(pattern) versions = [x for x in versions if re_pat.fullmatch(x)] pattern = conf.get('exclude_regex') if pattern: re_pat = re.compile(pattern) versions = [x for x in versions if not re_pat.fullmatch(x)] ignored = set(conf.get('ignored', '').split()) if ignored: versions = [x for x in versions if x not in ignored] if not versions: return None sort_version_key = sort_version_keys[ conf.get("sort_version_key", "parse_version")] versions.sort(key=sort_version_key) # type: ignore return versions[-1] def _process_result(r: RawResult) -> Union[Result, Exception]: version = r.version conf = r.conf name = r.name if isinstance(version, GetVersionError): kw = version.kwargs kw['name'] = name logger.error(version.msg, **kw) return version elif isinstance(version, Exception): logger.error('unexpected error happened', name=r.name, exc_info=r.version) return version elif isinstance(version, list): version_str = apply_list_options(version, conf) else: version_str = version if version_str: version_str = version_str.replace('\n', ' ') try: version_str = substitute_version(version_str, conf) return Result(name, version_str, conf) except (ValueError, re.error) as e: logger.exception('error occurred in version substitutions', name=name) return e else: return ValueError('no version returned') def check_version_update( oldvers: VersData, name: str, version: str, ) -> None: oldver = oldvers.get(name, None) if not oldver or oldver != version: logger.info('updated', name=name, version=version, old_version=oldver) else: logger.debug('up-to-date', name=name, version=version) async def process_result( oldvers: VersData, result_q: Queue[RawResult], entry_waiter: EntryWaiter, ) -> Tuple[VersData, bool]: ret = {} has_failures = False try: while True: r = await result_q.get() try: r1 = _process_result(r) except Exception as e: logger.exception('error processing result', result=r) r1 = e if isinstance(r1, Exception): entry_waiter.set_exception(r.name, r1) has_failures = True continue check_version_update(oldvers, r1.name, r1.version) entry_waiter.set_result(r1.name, r1.version) ret[r1.name] = r1.version except asyncio.CancelledError: return ret, has_failures async def run_tasks( futures: Sequence[Awaitable[None]] ) -> None: for fu in asyncio.as_completed(futures): await fu nvchecker-2.12/nvchecker/ctxvars.py000066400000000000000000000012371444331012000174150ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020 lilydjwg , et al. from __future__ import annotations from contextvars import ContextVar from typing import Optional, TYPE_CHECKING from . import __version__ DEFAULT_USER_AGENT = f'lilydjwg/nvchecker {__version__}' if TYPE_CHECKING: from .util import EntryWaiter tries = ContextVar('tries', default=1) proxy: ContextVar[Optional[str]] = ContextVar('proxy', default=None) user_agent = ContextVar('user_agent', default=DEFAULT_USER_AGENT) httptoken = ContextVar('httptoken', default=None) entry_waiter: ContextVar[EntryWaiter] = ContextVar('entry_waiter') verify_cert = ContextVar('verify_cert', default=True) nvchecker-2.12/nvchecker/httpclient/000077500000000000000000000000001444331012000175245ustar00rootroot00000000000000nvchecker-2.12/nvchecker/httpclient/__init__.py000066400000000000000000000021411444331012000216330ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. from typing import Optional from .base import TemporaryError, HTTPError class Proxy: _obj = None def set_obj(self, obj): super().__setattr__('_obj', obj) def __getattr__(self, name): return getattr(self._obj, name) def __setattr__(self, name, value): return setattr(self._obj, name, value) session = Proxy() def setup( which: Optional[str] = None, concurreny: int = 20, timeout: int = 20, ) -> None: if which is None: which = find_best_httplib() m = __import__( '%s_httpclient' % which, globals(), locals(), level=1) session.set_obj(m.session) session.setup(concurreny, timeout) def find_best_httplib() -> str: try: import tornado, pycurl # connection reuse, http/2 which = 'tornado' except ImportError: try: import aiohttp which = 'aiohttp' # connection reuse except ImportError: try: import httpx which = 'httpx' except ImportError: import tornado which = 'tornado' # fallback return which nvchecker-2.12/nvchecker/httpclient/aiohttp_httpclient.py000066400000000000000000000043471444331012000240140ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. import asyncio from typing import Optional, Dict import structlog import aiohttp from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] logger = structlog.get_logger(logger_name=__name__) connector = aiohttp.TCPConnector(limit=20) class AiohttpSession(BaseSession): session = None def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: self._concurreny = concurreny self._timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: if self.session is None: # need to create in async context self.session = aiohttp.ClientSession( connector = aiohttp.TCPConnector(limit=self._concurreny), timeout = aiohttp.ClientTimeout(total=self._timeout), trust_env = True, ) kwargs = { 'headers': headers, 'params': params, 'allow_redirects': follow_redirects, } if not verify_cert: kwargs['ssl'] = False if proxy is not None: kwargs['proxy'] = proxy if body is not None: # Make sure all backends have the same default encoding for post data. if 'Content-Type' not in headers: headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'} kwargs['headers'] = headers kwargs['data'] = body.encode() elif json is not None: kwargs['json'] = json try: logger.debug('send request', method=method, url=url, kwargs=kwargs) res = await self.session.request( method, url, **kwargs) except ( asyncio.TimeoutError, aiohttp.ClientConnectorError, ) as e: raise TemporaryError(599, repr(e), e) err_cls: Optional[type] = None if res.status >= 500: err_cls = TemporaryError elif res.status >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls(res.status, res.reason, res) body = await res.content.read() return Response(res.headers, body) session = AiohttpSession() nvchecker-2.12/nvchecker/httpclient/base.py000066400000000000000000000057121444331012000210150ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2019-2020 lilydjwg , et al. import structlog from typing import Optional, Dict, Mapping import json as _json from ..ctxvars import tries, proxy, user_agent, httptoken, verify_cert logger = structlog.get_logger(logger_name=__name__) class Response: '''The response of an HTTP request. .. py:attribute:: body :type: bytes .. py:attribute:: headers :type: Mapping[str, str] ''' def __init__( self, headers: Mapping[str, str], body: bytes, ) -> None: self.headers = headers self.body = body def json(self): '''Convert response content to JSON.''' return _json.loads(self.body.decode('utf-8')) class BaseSession: '''The base class for different HTTP backend.''' def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: pass async def head(self, *args, **kwargs): '''Shortcut for ``HEAD`` request.''' return await self.request( method='HEAD', *args, **kwargs) async def get(self, *args, **kwargs): '''Shortcut for ``GET`` request.''' return await self.request( method='GET', *args, **kwargs) async def post(self, *args, **kwargs): '''Shortcut for ``POST`` request.''' return await self.request( method='POST', *args, **kwargs) async def request( self, url: str, *, method: str, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, ) -> Response: t = tries.get() p = proxy.get() ua = user_agent.get() httpt = httptoken.get() verify = verify_cert.get() headers = headers.copy() headers.setdefault('User-Agent', ua) if httpt is not None: headers.setdefault('Authorization', httpt) for i in range(1, t+1): try: return await self.request_impl( url, method = method, headers = headers, params = params, follow_redirects = follow_redirects, json = json, body = body, proxy = p or None, verify_cert = verify, ) except TemporaryError as e: if i == t: raise else: logger.warning('temporary error, retrying', tries = i, exc_info = e) continue raise Exception('should not reach') async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: ''':meta private:''' raise NotImplementedError class BaseHTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class TemporaryError(BaseHTTPError): '''A temporary error (e.g. network error) happens.''' class HTTPError(BaseHTTPError): ''' An HTTP 4xx error happens ''' nvchecker-2.12/nvchecker/httpclient/httpx_httpclient.py000066400000000000000000000040131444331012000235010ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020 lilydjwg , et al. from typing import Dict, Optional, Tuple import httpx from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] class HttpxSession(BaseSession): def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: self.clients: Dict[Tuple[Optional[str], bool], httpx.AsyncClient] = {} self.timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: client = self.clients.get((proxy, verify_cert)) if not client: client = httpx.AsyncClient( timeout = httpx.Timeout(self.timeout, pool=None), http2 = True, proxies = {'all://': proxy}, verify = verify_cert, ) self.clients[(proxy, verify_cert)] = client try: if body is not None: # Make sure all backends have the same default encoding for post data. if 'Content-Type' not in headers: headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'} body = body.encode() r = await client.request( method, url, json = json, content = body, headers = headers, follow_redirects = follow_redirects, params = params, ) err_cls: Optional[type] = None if r.status_code >= 500: err_cls = TemporaryError elif r.status_code >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls( r.status_code, r.reason_phrase, r, ) except httpx.TransportError as e: raise TemporaryError(599, repr(e), e) body = await r.aread() return Response(r.headers, body) async def aclose(self): for client in self.clients.values(): await client.aclose() del self.clients session = HttpxSession() nvchecker-2.12/nvchecker/httpclient/tornado_httpclient.py000066400000000000000000000045111444331012000240030ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. import json as _json from urllib.parse import urlencode from typing import Optional, Dict, Any from tornado.httpclient import AsyncHTTPClient, HTTPRequest try: import pycurl except ImportError: pycurl = None # type: ignore from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] HTTP2_AVAILABLE = None if pycurl else False def try_use_http2(curl): global HTTP2_AVAILABLE if HTTP2_AVAILABLE is None: try: curl.setopt(pycurl.HTTP_VERSION, 4) HTTP2_AVAILABLE = True except pycurl.error: HTTP2_AVAILABLE = False elif HTTP2_AVAILABLE: curl.setopt(pycurl.HTTP_VERSION, 4) class TornadoSession(BaseSession): def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: impl: Optional[str] if pycurl: impl = "tornado.curl_httpclient.CurlAsyncHTTPClient" else: impl = None AsyncHTTPClient.configure( impl, max_clients = concurreny) self.timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: kwargs: Dict[str, Any] = { 'method': method, 'headers': headers, 'request_timeout': self.timeout, 'follow_redirects': follow_redirects, 'validate_cert': verify_cert, } if body: # By default the content type is already 'application/x-www-form-urlencoded' kwargs['body'] = body elif json: kwargs['body'] = _json.dumps(json) kwargs['prepare_curl_callback'] = try_use_http2 if proxy: host, port = proxy.rsplit(':', 1) kwargs['proxy_host'] = host kwargs['proxy_port'] = int(port) if params: q = urlencode(params) url += '?' + q r = HTTPRequest(url, **kwargs) res = await AsyncHTTPClient().fetch( r, raise_error=False) err_cls: Optional[type] = None if res.code >= 500: err_cls = TemporaryError elif res.code >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls( res.code, res.reason, res ) return Response(res.headers, res.body) session = TornadoSession() nvchecker-2.12/nvchecker/lib/000077500000000000000000000000001444331012000161145ustar00rootroot00000000000000nvchecker-2.12/nvchecker/lib/README.md000066400000000000000000000002741444331012000173760ustar00rootroot00000000000000This directory contains code from other places: * `nicelogger.py`: from my [winterpy](https://github.com/lilydjwg/winterpy) * `packaging_version.py`: from python-packaging 20.9, modified nvchecker-2.12/nvchecker/lib/__init__.py000066400000000000000000000000001444331012000202130ustar00rootroot00000000000000nvchecker-2.12/nvchecker/lib/nicelogger.py000066400000000000000000000070621444331012000206110ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2017 lilydjwg , et al. ''' A Tornado-inspired logging formatter, with displayed time with millisecond accuracy FYI: pyftpdlib also has a Tornado-style logger. ''' import sys import time import logging class Colors: def __init__(self, color=None): if color is None: color = support_color() if color: import curses curses.setupterm() if sys.hexversion < 0x30203f0: fg_color = str(curses.tigetstr("setaf") or curses.tigetstr("setf") or "", "ascii") else: fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" self.blue = str(curses.tparm(fg_color, 4), "ascii") self.yellow = str(curses.tparm(fg_color, 3), "ascii") self.green = str(curses.tparm(fg_color, 2), "ascii") self.red = str(curses.tparm(fg_color, 1), "ascii") self.bright_red = str(curses.tparm(fg_color, 9), "ascii") self.normal = str(curses.tigetstr("sgr0"), "ascii") else: self.blue = self.yellow = self.green = self.red = self.bright_red = self.normal = "" class TornadoLogFormatter(logging.Formatter): def __init__(self, color, *args, **kwargs): super().__init__(*args, **kwargs) self._color = color if color: colors = Colors(color=color) self._colors = { logging.DEBUG: colors.blue, logging.INFO: colors.green, logging.WARNING: colors.yellow, logging.ERROR: colors.red, logging.CRITICAL: colors.bright_red, } self._normal = colors.normal def format(self, record): try: record.message = record.getMessage() except Exception as e: record.message = "Bad message (%r): %r" % (e, record.__dict__) record.asctime = time.strftime( "%m-%d %H:%M:%S", self.converter(record.created)) prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \ record.__dict__ if self._color: prefix = (self._colors.get(record.levelno, self._normal) + prefix + self._normal) formatted = prefix + " " + record.message formatted += ''.join( ' %s=%s' % (k, v) for k, v in record.__dict__.items() if k not in { 'levelname', 'asctime', 'module', 'lineno', 'args', 'message', 'filename', 'exc_info', 'exc_text', 'created', 'funcName', 'processName', 'process', 'msecs', 'relativeCreated', 'thread', 'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info', }) if record.exc_info: if not record.exc_text: record.exc_text = self.formatException(record.exc_info) if record.exc_text: formatted = formatted.rstrip() + "\n" + record.exc_text return formatted.replace("\n", "\n ") def support_color(stream=sys.stderr): if stream.isatty(): try: import curses curses.setupterm() if curses.tigetnum("colors") > 0: return True except: import traceback traceback.print_exc() return False def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None): ''' handler: specify a handler instead of default StreamHandler color: boolean, force color to be on / off. Default to be on only when ``handler`` isn't specified and the term supports color ''' logger = logging.getLogger() if handler is None: h = logging.StreamHandler() else: h = handler if color is None and handler is None: color = support_color() formatter = TornadoLogFormatter(color=color) h.setLevel(level) h.setFormatter(formatter) logger.setLevel(level) logger.addHandler(h) nvchecker-2.12/nvchecker/lib/packaging_version.py000066400000000000000000000417511444331012000221670ustar00rootroot00000000000000# This file comes from python-packaging 20.9 and is modified # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. from __future__ import annotations import collections import itertools import re from typing import TYPE_CHECKING class InfinityType(object): def __repr__(self): # type: () -> str return "Infinity" def __hash__(self): # type: () -> int return hash(repr(self)) def __lt__(self, other): # type: (object) -> bool return False def __le__(self, other): # type: (object) -> bool return False def __eq__(self, other): # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): # type: (object) -> bool return True def __ge__(self, other): # type: (object) -> bool return True def __neg__(self): # type: (object) -> NegativeInfinityType return NegativeInfinity Infinity = InfinityType() class NegativeInfinityType(object): def __repr__(self): # type: () -> str return "-Infinity" def __hash__(self): # type: () -> int return hash(repr(self)) def __lt__(self, other): # type: (object) -> bool return True def __le__(self, other): # type: (object) -> bool return True def __eq__(self, other): # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): # type: (object) -> bool return False def __ge__(self, other): # type: (object) -> bool return False def __neg__(self): # type: (object) -> InfinityType return Infinity NegativeInfinity = NegativeInfinityType() if TYPE_CHECKING: # pragma: no cover from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union InfiniteTypes = Union[InfinityType, NegativeInfinityType] PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] SubLocalType = Union[InfiniteTypes, int, str] LocalType = Union[ NegativeInfinityType, Tuple[ Union[ SubLocalType, Tuple[SubLocalType, str], Tuple[NegativeInfinityType, SubLocalType], ], ..., ], ] CmpKey = Tuple[ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType ] LegacyCmpKey = Tuple[int, Tuple[str, ...]] VersionComparisonMethod = Callable[ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool ] __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] _Version = collections.namedtuple( "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) def parse(version): # type: (str) -> Union[LegacyVersion, Version] """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version. """ try: return Version(version) except InvalidVersion: return LegacyVersion(version) class InvalidVersion(ValueError): """ An invalid version was found, users should refer to PEP 440. """ class _BaseVersion(object): _key = None # type: Union[CmpKey, LegacyCmpKey] def __hash__(self): # type: () -> int return hash(self._key) # Please keep the duplicated `isinstance` check # in the six comparisons hereunder # unless you find a way to avoid adding overhead function calls. def __lt__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key < other._key def __le__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key <= other._key def __eq__(self, other): # type: (object) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key == other._key def __ge__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key >= other._key def __gt__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key > other._key def __ne__(self, other): # type: (object) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key != other._key class LegacyVersion(_BaseVersion): def __init__(self, version): # type: (str) -> None self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): # type: () -> str return self._version def __repr__(self): # type: () -> str return "".format(repr(str(self))) @property def public(self): # type: () -> str return self._version @property def base_version(self): # type: () -> str return self._version @property def epoch(self): # type: () -> int return -1 @property def release(self): # type: () -> None return None @property def pre(self): # type: () -> None return None @property def post(self): # type: () -> None return None @property def dev(self): # type: () -> None return None @property def local(self): # type: () -> None return None @property def is_prerelease(self): # type: () -> bool return False @property def is_postrelease(self): # type: () -> bool return False @property def is_devrelease(self): # type: () -> bool return False _legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) _legacy_version_replacement_map = { "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", } def _parse_version_parts(s): # type: (str) -> Iterator[str] for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) if not part or part == ".": continue if part[:1] in "0123456789": # pad for numeric comparison yield part.zfill(8) else: yield "*" + part # ensure that alpha/beta/candidate are before final yield "*final" def _legacy_cmpkey(version): # type: (str) -> LegacyCmpKey # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, # as before all PEP 440 versions. epoch = -1 # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. parts = [] # type: List[str] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag if part < "*final": while parts and parts[-1] == "*final-": parts.pop() # remove trailing zeros from each series of numeric parts while parts and parts[-1] == "00000000": parts.pop() parts.append(part) return epoch, tuple(parts) # Deliberately not anchored to the start and end of the string, to make it # easier for 3rd party code to reuse VERSION_PATTERN = r""" v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
"""


class Version(_BaseVersion):

    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)

    def __init__(self, version):
        # type: (str) -> None

        # Validate the version and parse it into pieces
        match = self._regex.search(version)
        if not match:
            raise InvalidVersion("Invalid version: '{0}'".format(version))

        # Store the parsed out pieces of the version
        self._version = _Version(
            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
            release=tuple(int(i) for i in match.group("release").split(".")),
            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
            post=_parse_letter_version(
                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
            ),
            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
            local=_parse_local_version(match.group("local")),
        )

        # Generate a key which will be used for sorting
        self._key = _cmpkey(
            self._version.epoch,
            self._version.release,
            self._version.pre,
            self._version.post,
            self._version.dev,
            self._version.local,
        )

    def __repr__(self):
        # type: () -> str
        return "".format(repr(str(self)))

    def __str__(self):
        # type: () -> str
        parts = []

        # Epoch
        if self.epoch != 0:
            parts.append("{0}!".format(self.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self.release))

        # Pre-release
        if self.pre is not None:
            parts.append("".join(str(x) for x in self.pre))

        # Post-release
        if self.post is not None:
            parts.append(".post{0}".format(self.post))

        # Development release
        if self.dev is not None:
            parts.append(".dev{0}".format(self.dev))

        # Local version segment
        if self.local is not None:
            parts.append("+{0}".format(self.local))

        return "".join(parts)

    @property
    def epoch(self):
        # type: () -> int
        _epoch = self._version.epoch  # type: int
        return _epoch

    @property
    def release(self):
        # type: () -> Tuple[int, ...]
        _release = self._version.release  # type: Tuple[int, ...]
        return _release

    @property
    def pre(self):
        # type: () -> Optional[Tuple[str, int]]
        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
        return _pre

    @property
    def post(self):
        # type: () -> Optional[Tuple[str, int]]
        return self._version.post[1] if self._version.post else None

    @property
    def dev(self):
        # type: () -> Optional[Tuple[str, int]]
        return self._version.dev[1] if self._version.dev else None

    @property
    def local(self):
        # type: () -> Optional[str]
        if self._version.local:
            return ".".join(str(x) for x in self._version.local)
        else:
            return None

    @property
    def public(self):
        # type: () -> str
        return str(self).split("+", 1)[0]

    @property
    def base_version(self):
        # type: () -> str
        parts = []

        # Epoch
        if self.epoch != 0:
            parts.append("{0}!".format(self.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self.release))

        return "".join(parts)

    @property
    def is_prerelease(self):
        # type: () -> bool
        return self.dev is not None or self.pre is not None

    @property
    def is_postrelease(self):
        # type: () -> bool
        return self.post is not None

    @property
    def is_devrelease(self):
        # type: () -> bool
        return self.dev is not None

    @property
    def major(self):
        # type: () -> int
        return self.release[0] if len(self.release) >= 1 else 0

    @property
    def minor(self):
        # type: () -> int
        return self.release[1] if len(self.release) >= 2 else 0

    @property
    def micro(self):
        # type: () -> int
        return self.release[2] if len(self.release) >= 3 else 0


def _parse_letter_version(
    letter,  # type: str
    number,  # type: Union[str, bytes, SupportsInt]
):
    # type: (...) -> Optional[Tuple[str, int]]

    if letter:
        # We consider there to be an implicit 0 in a pre-release if there is
        # not a numeral associated with it.
        if number is None:
            number = 0

        # We normalize any letters to their lower case form
        letter = letter.lower()

        # We consider some words to be alternate spellings of other words and
        # in those cases we want to normalize the spellings to our preferred
        # spelling.
        if letter == "alpha":
            letter = "a"
        elif letter == "beta":
            letter = "b"
        elif letter in ["c", "pre", "preview"]:
            letter = "rc"
        elif letter in ["rev", "r"]:
            letter = "post"

        return letter, int(number)
    if not letter and number:
        # We assume if we are given a number, but we are not given a letter
        # then this is using the implicit post release syntax (e.g. 1.0-1)
        letter = "post"

        return letter, int(number)

    return None


_local_version_separators = re.compile(r"[\._-]")


def _parse_local_version(local):
    # type: (str) -> Optional[LocalType]
    """
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    """
    if local is not None:
        return tuple(
            part.lower() if not part.isdigit() else int(part)
            for part in _local_version_separators.split(local)
        )
    return None


def _cmpkey(
    epoch,  # type: int
    release,  # type: Tuple[int, ...]
    pre,  # type: Optional[Tuple[str, int]]
    post,  # type: Optional[Tuple[str, int]]
    dev,  # type: Optional[Tuple[str, int]]
    local,  # type: Optional[Tuple[SubLocalType]]
):
    # type: (...) -> CmpKey

    # When we compare a release version, we want to compare it with all of the
    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    # leading zeros until we come to something non zero, then take the rest
    # re-reverse it back into the correct order and make it a tuple and use
    # that for our sorting key.
    _release = tuple(
        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
    )

    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    # We'll do this by abusing the pre segment, but we _only_ want to do this
    # if there is not a pre or a post segment. If we have one of those then
    # the normal sorting rules will handle this case correctly.
    if pre is None and post is None and dev is not None:
        _pre = NegativeInfinity  # type: PrePostDevType
    # Versions without a pre-release (except as noted above) should sort after
    # those with one.
    elif pre is None:
        _pre = Infinity
    else:
        _pre = pre

    # Versions without a post segment should sort before those with one.
    if post is None:
        _post = NegativeInfinity  # type: PrePostDevType

    else:
        _post = post

    # Versions without a development segment should sort after those with one.
    if dev is None:
        _dev = Infinity  # type: PrePostDevType

    else:
        _dev = dev

    if local is None:
        # Versions without a local segment should sort before those with one.
        _local = NegativeInfinity  # type: LocalType
    else:
        # Versions with a local segment need that segment parsed to implement
        # the sorting rules in PEP440.
        # - Alpha numeric segments sort before numeric segments
        # - Alpha numeric segments sort lexicographically
        # - Numeric segments sort numerically
        # - Shorter versions sort before longer versions when the prefixes
        #   match exactly
        _local = tuple(
            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
        )

    return epoch, _release, _pre, _post, _dev, _local
nvchecker-2.12/nvchecker/slogconf.py000066400000000000000000000063521444331012000175400ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2018 lilydjwg , et al.

import logging
import os
import io
import traceback
import sys

import structlog

from .httpclient import TemporaryError

def _console_msg(event):
  evt = event['event']
  if evt == 'up-to-date':
    msg = 'up-to-date, version %s' % event['version']
    del event['version']
  elif evt == 'updated':
    if event.get('old_version'):
      msg = 'updated from %(old_version)s to %(version)s' % event
    else:
      msg = 'updated to %(version)s' % event
    del event['version'], event['old_version']
  else:
    msg = evt

  if 'name' in event:
    msg = f"{event['name']}: {msg}"
    del event['name']

  event['msg'] = msg

  return event

def exc_info(logger, level, event):
  if level == 'exception':
    event['exc_info'] = True
  return event

def filter_exc(logger, level, event):
  exc_info = event.get('exc_info')
  if not exc_info:
    return event

  if exc_info is True:
    exc = sys.exc_info()[1]
  else:
    exc = exc_info

  if isinstance(exc, TemporaryError):
    if exc.code == 599: # network issues
      del event['exc_info']
  event['error'] = exc
  return event

def stdlib_renderer(logger, level, event):
  # return event unchanged for further processing
  std_event = _console_msg(event.copy())
  try:
    logger = logging.getLogger(std_event.pop('logger_name'))
  except KeyError:
    logger = logging.getLogger()
  msg = std_event.pop('msg', std_event.pop('event'))
  exc_info = std_event.pop('exc_info', None)
  if 'error' in std_event:
    std_event['error'] = repr(std_event['error'])
  getattr(logger, level)(
    msg, exc_info = exc_info, extra=std_event,
  )
  return event

_renderer = structlog.processors.JSONRenderer(ensure_ascii=False)
def json_renderer(logger, level, event):
  event['level'] = level
  return _renderer(logger, level, event)

def null_renderer(logger, level, event):
  return ''

class _Logger(logging.Logger):
  _my_srcfile = os.path.normcase(
    stdlib_renderer.__code__.co_filename)

  _structlog_dir = os.path.dirname(structlog.__file__)

  def findCaller(self, stack_info=False, stacklevel=1):
    """
    Find the stack frame of the caller so that we can note the source
    file name, line number and function name.
    """
    f = logging.currentframe()
    #On some versions of IronPython, currentframe() returns None if
    #IronPython isn't run with -X:Frames.
    if f is not None:
      f = f.f_back
    orig_f = f
    while f and stacklevel > 1:
      f = f.f_back
      stacklevel -= 1
    if not f:
      f = orig_f
    rv = "(unknown file)", 0, "(unknown function)", None
    while hasattr(f, "f_code"):
      co = f.f_code
      filename = os.path.normcase(co.co_filename)
      if filename in [logging._srcfile, self._my_srcfile] \
         or filename.startswith(self._structlog_dir):
        f = f.f_back
        continue
      sinfo = None
      if stack_info:
        sio = io.StringIO()
        sio.write('Stack (most recent call last):\n')
        traceback.print_stack(f, file=sio)
        sinfo = sio.getvalue()
        if sinfo[-1] == '\n':
          sinfo = sinfo[:-1]
        sio.close()
      rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
      break
    return rv

def fix_logging():
  logging.setLoggerClass(_Logger)

nvchecker-2.12/nvchecker/sortversion.py000066400000000000000000000016111444331012000203140ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 lilydjwg , et al.

'''
Sort versions using deprecated pkg_resource / packaging.parse_version or pyalpm.vercmp
'''

__all__ = ["sort_version_keys"]

from .lib.packaging_version import parse as parse_version

try:
  import pyalpm
  from functools import cmp_to_key
  vercmp = cmp_to_key(pyalpm.vercmp)
  vercmp_available = True
except ImportError:
  def vercmp(k):
    raise NotImplementedError("Using vercmp but pyalpm can not be imported!")
  vercmp_available = False

try:
  from awesomeversion import AwesomeVersion
  awesomeversion_available = True
except ImportError:
  def AwesomeVersion(k): # type: ignore
    raise NotImplementedError("Using awesomeversion but it can not be imported!")
  awesomeversion_available = False

sort_version_keys = {
  "parse_version": parse_version,
  "vercmp": vercmp,
  "awesomeversion": AwesomeVersion,
}
nvchecker-2.12/nvchecker/tools.py000066400000000000000000000131251444331012000170620ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import sys
import argparse
import structlog
import json

from . import core

logger = structlog.get_logger(logger_name=__name__)

def take() -> None:
  parser = argparse.ArgumentParser(description='update version records of nvchecker')
  core.add_common_arguments(parser)
  parser.add_argument('--all', action='store_true',
                      help='take all updates')
  parser.add_argument('--ignore-nonexistent', action='store_true',
                      help='ignore nonexistent names')
  parser.add_argument('names', metavar='NAME', nargs='*',
                      help='software name to be updated. use NAME=VERSION to update '
                           'to a specific version instead of the new version.')
  args = parser.parse_args()
  if core.process_common_arguments(args):
    return

  opt = core.load_file(args.file, use_keymanager=False)[1]
  if opt.ver_files is None:
    logger.critical(
      "doesn't have 'oldver' and 'newver' set.",
      source=args.file,
    )
    sys.exit(2)
  else:
    oldverf = opt.ver_files[0]
    newverf = opt.ver_files[1]

  oldvers = core.read_verfile(oldverf)
  newvers = core.read_verfile(newverf)

  if args.all:
    oldvers.update(newvers)
  else:
    for name in args.names:
      if "=" in name:
        name, newver = name.split("=")
        oldvers[name] = newver
      else:
        try:
          oldvers[name] = newvers[name]
        except KeyError:
          if args.ignore_nonexistent:
            logger.warning('nonexistent in newver, ignored', name=name)
            continue

          logger.critical(
            "doesn't exist in 'newver' set.", name=name,
          )
          sys.exit(2)

  try:
    oldverf.rename(
      oldverf.with_name(oldverf.name + '~'),
    )
  except FileNotFoundError:
    pass
  core.write_verfile(oldverf, oldvers)

def cmp() -> None:
  parser = argparse.ArgumentParser(description='compare version records of nvchecker')
  core.add_common_arguments(parser)
  parser.add_argument('-j', '--json', action='store_true',
                      help='Output JSON array of dictionaries with {name, newver, oldver, [delta]} '
                           '(or array of names if --quiet)')
  parser.add_argument('-q', '--quiet', action='store_true',
                      help="Quiet mode, output only the names.")
  parser.add_argument('-a', '--all', action='store_true',
                      help="Include unchanged versions.")
  parser.add_argument('-s', '--sort',
                      choices=('parse_version', 'vercmp', 'awesomeversion', 'none'),
                      default='parse_version',
                      help='Version compare method to backwards the arrow '
                           '(default: parse_version)')
  parser.add_argument('-n', '--newer', action='store_true',
                      help='Shows only the newer ones according to --sort.')
  parser.add_argument('--exit-status', action='store_true',
                      help="exit with status 4 if there are updates")
  args = parser.parse_args()
  if core.process_common_arguments(args):
    return

  opt = core.load_file(args.file, use_keymanager=False)[1]
  if opt.ver_files is None:
    logger.critical(
      "doesn't have 'oldver' and 'newver' set.",
      source=args.file,
    )
    sys.exit(2)
  else:
    oldverf = opt.ver_files[0]
    newverf = opt.ver_files[1]

  oldvers = core.read_verfile(oldverf)
  newvers = core.read_verfile(newverf)

  differences = []

  for name, newver in sorted(newvers.items()):  # accumulate differences
    oldver = oldvers.get(name, None)

    diff = {
      'name': name,
      'oldver': oldver,
      'newver': newver
    }

    if oldver is not None and newver is not None:
      if oldver == newver:
        diff['delta'] = 'equal'

      elif args.sort == "none":
        diff['delta'] = 'new'  # assume it's a new version if we're not comparing

      else:
        from .sortversion import sort_version_keys
        version = sort_version_keys[args.sort]

        if version(oldver) > version(newver): # type: ignore
          if args.newer:
            continue  # don't store this diff
          diff['delta'] = 'old'
        else:
          diff['delta'] = 'new'

    elif oldver is None:
      diff['delta'] = 'added'

    elif newver is None:
      if args.newer:
        continue  # don't store this diff
      diff['delta'] = 'gone'

    if args.all or diff['delta'] != 'equal':
      differences.append(diff)

  if args.json:
    if args.quiet:
      print(json.dumps([diff['name'] for diff in differences], separators=(',', ':')))
    else:
      print(json.dumps(differences, sort_keys=True, separators=(',', ':')))

  elif args.quiet:
    for diff in differences:
      print(diff['name'])

  else:
    from .lib.nicelogger import Colors, support_color
    c = Colors(support_color(sys.stdout))

    diffstyles = {
      'new': {
        'symbol': '->',
        'oldc': c.red
      },
      'old': {
        'symbol': f'{c.red}<-{c.normal}',
        'oldc': c.red
      },
      'added': {
        'symbol': '++',
        'oldc': c.red
      },
      'gone': {
        'symbol': f'{c.red}--{c.normal}',
        'oldc': c.green
      },
      'equal': {
        'symbol': '==',
        'oldc': c.green
      }
    }

    for diff in differences:
      style = diffstyles[diff.get('delta', 'equal')] # type: ignore # mypy has issues with this line

      print(f'{diff["name"]} {style["oldc"]}{diff["oldver"]}{c.normal} {style["symbol"]} {c.green}{diff["newver"]}{c.normal}')

  if args.exit_status and any(
    diff.get('delta') != 'equal' for diff in differences
  ):
    sys.exit(4)
nvchecker-2.12/nvchecker/util.py000066400000000000000000000176011444331012000167020ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

from __future__ import annotations

import asyncio
from asyncio import Queue
from typing import (
  Dict, Optional, List, NamedTuple, Union,
  Any, Tuple, Callable, Coroutine, Hashable,
  TYPE_CHECKING,
)
from pathlib import Path
import contextvars
import abc

if TYPE_CHECKING:
  import tomli as tomllib
else:
  try:
    import tomllib
  except ModuleNotFoundError:
    import tomli as tomllib

import structlog

from .httpclient import session
from .ctxvars import tries as ctx_tries
from .ctxvars import proxy as ctx_proxy
from .ctxvars import user_agent as ctx_ua
from .ctxvars import httptoken as ctx_httpt
from .ctxvars import verify_cert as ctx_verify_cert

logger = structlog.get_logger(logger_name=__name__)

Entry = Dict[str, Any]
Entry.__doc__ = '''The configuration `dict` for an entry.'''
Entries = Dict[str, Entry]
VersData = Dict[str, str]
VersionResult = Union[None, str, List[str], Exception]
VersionResult.__doc__ = '''The result of a `get_version` check.

* `None` - No version found.
* `str` - A single version string is found.
* `List[str]` - Multiple version strings are found. :ref:`list options` will be applied.
* `Exception` - An error occurred.
'''

class FileLoadError(Exception):
  def __init__(self, kind, filename, exc):
    self.kind = kind
    self.filename = filename
    self.exc = exc

  def __str__(self):
    return f'failed to load {self.kind} {self.filename!r}: {self.exc}'

class KeyManager:
  '''Manages data in the keyfile.'''
  def __init__(
    self, file: Optional[Path],
  ) -> None:
    if file is not None:
      try:
        with file.open('rb') as f:
          keys = tomllib.load(f)['keys']
      except (OSError, tomllib.TOMLDecodeError) as e:
        raise FileLoadError('keyfile', str(file), e)
    else:
      keys = {}
    self.keys = keys

  def get_key(self, name: str) -> Optional[str]:
    '''Get the named key (token) in the keyfile.'''
    return self.keys.get(name)

class EntryWaiter:
  def __init__(self) -> None:
    self._waiting: Dict[str, asyncio.Future] = {}

  async def wait(self, name: str) -> str:
    '''Wait on the ``name`` entry and return its result (the version string)'''
    fu = self._waiting.get(name)
    if fu is None:
      fu = asyncio.Future()
      self._waiting[name] = fu
    return await fu

  def set_result(self, name: str, value: str) -> None:
    fu = self._waiting.get(name)
    if fu is not None:
      fu.set_result(value)

  def set_exception(self, name: str, e: Exception) -> None:
    fu = self._waiting.get(name)
    if fu is not None:
      fu.set_exception(e)

class RawResult(NamedTuple):
  '''The unprocessed result from a check.'''
  name: str
  version: VersionResult
  conf: Entry

RawResult.name.__doc__ = 'The name (table name) of the entry.'
RawResult.version.__doc__ = 'The result from the check.'
RawResult.conf.__doc__ = 'The entry configuration (table content) of the entry.'

class Result(NamedTuple):
  name: str
  version: str
  conf: Entry

class BaseWorker:
  '''The base class for defining `Worker` classes for source plugins.

  .. py:attribute:: task_sem
      :type: asyncio.Semaphore

      This is the rate-limiting semaphore. Workers should acquire it while doing one unit of work.

  .. py:attribute:: result_q
      :type: Queue[RawResult]

      Results should be put into this queue.

  .. py:attribute:: tasks
      :type: List[Tuple[str, Entry]]

      A list of tasks for the `Worker` to complete. Every task consists of
      a tuple for the task name (table name in the configuration file) and the
      content of that table (as a `dict`).

  .. py:attribute:: keymanager
      :type: KeyManager

      The `KeyManager` for retrieving keys from the keyfile.
  '''
  def __init__(
    self,
    task_sem: asyncio.Semaphore,
    result_q: Queue[RawResult],
    tasks: List[Tuple[str, Entry]],
    keymanager: KeyManager,
  ) -> None:
    self.task_sem = task_sem
    self.result_q = result_q
    self.keymanager = keymanager
    self.tasks = tasks

  @abc.abstractmethod
  async def run(self) -> None:
    '''Run the `tasks`. Subclasses should implement this method.'''
    raise NotImplementedError

  async def _run_maynot_raise(self) -> None:
    try:
      await self.run()
    except Exception:
      # don't let an exception tear down the whole process
      logger.exception('exception raised by Worker.run')

class AsyncCache:
  '''A cache for use with async functions.'''
  cache: Dict[Hashable, Any]
  lock: asyncio.Lock

  def __init__(self) -> None:
    self.cache = {}
    self.lock = asyncio.Lock()

  async def _get_json(
    self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
  ) -> Any:
    _, url, headers = key
    res = await session.get(url, headers=dict(headers))
    return res.json()

  async def get_json(
    self, url: str, *,
    headers: Dict[str, str] = {},
  ) -> Any:
    '''Get specified ``url`` and return the response content as JSON.

    The returned data will be cached for reuse.
    '''
    key = '_jsonurl', url, tuple(sorted(headers.items()))
    return await self.get(
      key , self._get_json) # type: ignore

  async def get(
    self,
    key: Hashable,
    func: Callable[[Hashable], Coroutine[Any, Any, Any]],
  ) -> Any:
    '''Run async ``func`` and cache its return value by ``key``.

    The ``key`` should be hashable, and the function will be called with it as
    its sole argument. For multiple simultaneous calls with the same key, only
    one will actually be called, and others will wait and return the same
    (cached) value.
    '''
    async with self.lock:
      cached = self.cache.get(key)
      if cached is None:
        coro = func(key)
        fu = asyncio.create_task(coro)
        self.cache[key] = fu

    if asyncio.isfuture(cached): # pending
      return await cached
    elif cached is not None: # cached
      return cached
    else: # not cached
      r = await fu
      self.cache[key] = r
      return r

if TYPE_CHECKING:
  from typing_extensions import Protocol
  class GetVersionFunc(Protocol):
    async def __call__(
      self,
      name: str, conf: Entry,
      *,
      cache: AsyncCache,
      keymanager: KeyManager,
    ) -> VersionResult:
      ...
else:
  GetVersionFunc = Any

class FunctionWorker(BaseWorker):
  func: GetVersionFunc
  cache: AsyncCache

  def initialize(self, func: GetVersionFunc) -> None:
    self.func = func
    self.cache = AsyncCache()

  async def run(self) -> None:
    futures = []
    for name, entry in self.tasks:
      ctx = contextvars.copy_context()
      fu = ctx.run(self.run_one, name, entry)
      futures.append(fu)

    for fu2 in asyncio.as_completed(futures):
      await fu2

  async def run_one(
    self, name: str, entry: Entry,
  ) -> None:
    assert self.func is not None

    tries = entry.get('tries', None)
    if tries is not None:
      ctx_tries.set(tries)
    proxy = entry.get('proxy', None)
    if proxy is not None:
      ctx_proxy.set(proxy)
    ua = entry.get('user_agent', None)
    if ua is not None:
      ctx_ua.set(ua)
    httpt = entry.get('httptoken', None)
    if httpt is None:
      httpt = self.keymanager.get_key('httptoken_'+name)
    if httpt is not None:
      ctx_httpt.set(httpt)
    verify_cert = entry.get('verify_cert', None)
    if verify_cert is not None:
      ctx_verify_cert.set(verify_cert)

    try:
      async with self.task_sem:
        version = await self.func(
          name, entry,
          cache = self.cache,
          keymanager = self.keymanager,
        )
      await self.result_q.put(RawResult(name, version, entry))
    except Exception as e:
      await self.result_q.put(RawResult(name, e, entry))

class GetVersionError(Exception):
  '''An error occurred while getting version information.

  Raise this when a known bad situation happens.

  :param msg: The error message.
  :param kwargs: Arbitrary additional context for the error.
  '''
  def __init__(self, msg: str, **kwargs: Any) -> None:
    self.msg = msg
    self.kwargs = kwargs
nvchecker-2.12/nvchecker_source/000077500000000000000000000000001444331012000167265ustar00rootroot00000000000000nvchecker-2.12/nvchecker_source/alpm.py000066400000000000000000000023051444331012000202310ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 DDoSolitary , et al.

from nvchecker.api import GetVersionError
from pyalpm import Handle


async def open_db(info):
  dbpath, repo = info
  handle = Handle('/', dbpath)
  db = handle.register_syncdb(repo, 0)
  return handle, db


async def get_version(name, conf, *, cache, **kwargs):
  pkgname = conf.get('alpm', name)
  dbpath = conf.get('dbpath', '/var/lib/pacman')
  strip_release = conf.get('strip_release', False)
  provided = conf.get('provided')

  repo = conf.get('repo')
  if repo is None:
    repos = conf.get('repos') or ['core', 'extra', 'community', 'multilib']
  else:
    repos = [repo]

  for repo in repos:
    db = (await cache.get((dbpath, repo), open_db))[1]
    pkg = db.get_pkg(pkgname)
    if pkg is not None:
      break

  if pkg is None:
    raise GetVersionError('package not found in the ALPM database')
  if provided is None:
    version = pkg.version
  else:
    provides = dict(x.split('=', 1) for x in pkg.provides if '=' in x)
    version = provides.get(provided)
    if version is None:
      raise GetVersionError('provides element not found')
  if strip_release:
    version = version.split('-', 1)[0]
  return version
nvchecker-2.12/nvchecker_source/alpmfiles.py000066400000000000000000000027711444331012000212630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.

from asyncio import create_subprocess_exec
from asyncio.subprocess import PIPE
import re
from typing import Tuple, List

from nvchecker.api import GetVersionError

async def get_files(info: Tuple[str, str]) -> List[str]:
  dbpath, pkg = info
  # there's no pyalpm bindings for the file databases
  cmd = ['pacman', '-Flq', '--dbpath', dbpath, pkg]

  p = await create_subprocess_exec(*cmd, stdout = PIPE, stderr = PIPE)
  stdout, stderr = await p.communicate()

  if p.returncode == 0:
    return stdout.decode().splitlines()
  else:
    raise GetVersionError(
      'pacman failed to get file list',
      pkg = pkg,
      cmd = cmd,
      stdout = stdout.decode(errors='replace'),
      stderr = stderr.decode(errors='replace'),
      returncode = p.returncode,
    )

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf['pkgname']
  repo = conf.get('repo')
  if repo is not None:
    pkg = f'{repo}/{pkg}'
  dbpath = conf.get('dbpath', '/var/lib/pacman')
  regex = re.compile(conf['filename'])
  if regex.groups > 1:
    raise GetVersionError('multi-group regex')
  strip_dir = conf.get('strip_dir', False)

  files = await cache.get((dbpath, pkg), get_files)

  for f in files:
    fn = f.rsplit('/', 1)[-1] if strip_dir else f
    match = regex.fullmatch(fn)
    if match:
      groups = match.groups()
      return groups[0] if len(groups) > 0 else fn

  raise GetVersionError('no file matches specified regex')
nvchecker-2.12/nvchecker_source/android_sdk.py000066400000000000000000000042041444331012000215610ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017,2020 Chih-Hsuan Yen 

import os
import re
from xml.etree import ElementTree

from nvchecker.api import session

_ANDROID_REPO_MANIFESTS = {
  'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
  'package': 'https://dl.google.com/android/repository/repository2-1.xml',
}

# See  tags in Android SDK XML manifests
_CHANNEL_MAP = {
  'stable': 'channel-0',
  'beta': 'channel-1',
  'dev': 'channel-2',
  'canary': 'channel-3',
}

async def _get_repo_manifest(repo):
  repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]

  res = await session.get(repo_xml_url)
  data = res.body.decode('utf-8')

  repo_manifest = ElementTree.fromstring(data)
  return repo_manifest

async def get_version(name, conf, *, cache, **kwargs):
  repo = conf['repo']
  pkg_path_prefix = conf['android_sdk']
  channels = [_CHANNEL_MAP[channel]
              for channel in conf.get('channel', 'stable').split(',')]

  repo_manifest = await cache.get(repo, _get_repo_manifest)

  versions = []

  for pkg in repo_manifest.findall('.//remotePackage'):
    if not pkg.attrib['path'].startswith(pkg_path_prefix):
      continue
    channelRef = pkg.find('./channelRef')
    if channelRef.attrib['ref'] not in channels:
      continue
    for archive in pkg.findall('./archives/archive'):
      host_os = archive.find('./host-os')
      if host_os is not None and host_os.text != conf.get('host_os', 'linux'):
        continue
      archive_url = archive.find('./complete/url').text
      # revision
      rev = pkg.find('./revision')
      rev_strs = []
      for part in ('major', 'minor', 'micro'):
        part_node = rev.find('./' + part)
        if part_node is not None:
          rev_strs.append(part_node.text)
      # release number
      filename, ext = os.path.splitext(archive_url)
      rel_str = filename.rsplit('-')[-1]
      mobj = re.match(r'r\d+', rel_str)
      if mobj:
        rev_strs.append(rel_str)
      versions.append('.'.join(rev_strs))
      # A package suitable for the target host OS is found - skip remaining
      break

  return versions
nvchecker-2.12/nvchecker_source/anitya.py000066400000000000000000000004661444331012000205730ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2017-2020 lilydjwg , et al.

URL = 'https://release-monitoring.org/api/project/{pkg}'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('anitya')
  url = URL.format(pkg = pkg)
  data = await cache.get_json(url)
  return data['version']
nvchecker-2.12/nvchecker_source/apt.py000066400000000000000000000102251444331012000200640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

from __future__ import annotations

import re
import asyncio
from typing import Dict, Tuple
import itertools
import functools
from collections import defaultdict

from nvchecker.api import (
  session, GetVersionError,
  VersionResult, Entry, AsyncCache, KeyManager,
)

APT_RELEASE_URL = "%s/dists/%s/Release"
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
APT_PACKAGES_URL = "%s/dists/%s/%s"
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")

DpkgVersion = Tuple[int, str, str]

def parse_version(s: str) -> DpkgVersion:
  try:
    epoch_str, rest = s.split(':', 1)
  except ValueError:
    epoch = 0
    rest = s
  else:
    epoch = int(epoch_str)

  try:
    ver, rev = rest.split('-', 1)
  except ValueError:
    ver = rest
    rev = ''

  return epoch, ver, rev

def _compare_part(a: str, b: str) -> int:
  sa = re.split(r'(\d+)', a)
  sb = re.split(r'(\d+)', b)
  for idx, (pa, pb) in enumerate(itertools.zip_longest(sa, sb)):
    if pa is None:
      return -1
    elif pb is None:
      return 1

    if idx % 2 == 1:
      ret = int(pa) - int(pb)
      if ret != 0:
        return ret
    else:
      if pa < pb:
        return -1
      elif pa > pb:
        return 1

  return 0

def compare_version_parsed(a: DpkgVersion, b: DpkgVersion) -> int:
  ret = a[0] - b[0]
  if ret != 0:
    return ret
  ret = _compare_part(a[1], b[1])
  if ret != 0:
    return ret
  return _compare_part(a[2], b[2])

def compare_version(a: str, b: str) -> int:
  va = parse_version(a)
  vb = parse_version(b)
  return compare_version_parsed(va, vb)

def _decompress_data(url: str, data: bytes) -> str:
  if url.endswith(".xz"):
    import lzma
    data = lzma.decompress(data)
  elif url.endswith(".gz"):
    import gzip
    data = gzip.decompress(data)

  return data.decode('utf-8')

async def get_url(url: str) -> str:
  res = await session.get(url)
  data = res.body
  loop = asyncio.get_running_loop()
  return await loop.run_in_executor(
    None, _decompress_data,
    url, data)

async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str]]:
  cache, url = key
  apt_packages = await cache.get(url, get_url) # type: ignore

  pkg_map = defaultdict(list)
  srcpkg_map = defaultdict(list)

  pkg = None
  srcpkg = None
  for line in apt_packages.split('\n'):
    if line.startswith("Package: "):
      pkg = line[9:]
    elif line.startswith("Source: "):
      srcpkg = line[8:]
    elif line.startswith("Version: "):
      version = line[9:]
      if pkg is not None:
        pkg_map[pkg].append(version)
      if srcpkg is not None:
        srcpkg_map[srcpkg].append(version)
      pkg = srcpkg = None

  pkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
                 for pkg, vs in pkg_map.items()}
  srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
                 for pkg, vs in srcpkg_map.items()}

  return pkg_map_max, srcpkg_map_max

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  srcpkg = conf.get('srcpkg')
  pkg = conf.get('pkg')
  mirror = conf['mirror']
  suite = conf['suite']
  repo = conf.get('repo', 'main')
  arch = conf.get('arch', 'amd64')
  strip_release = conf.get('strip_release', False)

  if srcpkg and pkg:
    raise GetVersionError('Setting both srcpkg and pkg is ambiguous')
  elif not srcpkg and not pkg:
    pkg = name

  apt_release = await cache.get(
    APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
  for suffix in APT_PACKAGES_SUFFIX_PREFER:
    packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
    if " " + packages_path in apt_release:
      break
  else:
    raise GetVersionError('Packages file not found in APT repository')

  pkg_map, srcpkg_map = await cache.get(
    (cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore

  if pkg and pkg in pkg_map:
    version = pkg_map[pkg]
  elif srcpkg and srcpkg in srcpkg_map:
    version = srcpkg_map[srcpkg]
  else:
    raise GetVersionError('package not found in APT repository')

  if strip_release:
    version = version.split("-")[0]
  return version
nvchecker-2.12/nvchecker_source/archpkg.py000066400000000000000000000017011444331012000207160ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import session, GetVersionError

URL = 'https://www.archlinux.org/packages/search/json/'

async def request(pkg):
  res = await session.get(URL, params={"name": pkg})
  return res.json()

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('archpkg') or name
  strip_release = conf.get('strip_release', False)
  provided = conf.get('provided')

  data = await cache.get(pkg, request)

  if not data['results']:
    raise GetVersionError('Arch package not found')

  r = [r for r in data['results'] if r['repo'] != 'testing'][0]

  if provided:
    provides = dict(x.split('=', 1) for x in r['provides'] if '=' in x)
    version = provides.get(provided, None)
    if strip_release:
      version = version.split('-', 1)[0]
  elif strip_release:
    version = r['pkgver']
  else:
    version = r['pkgver'] + '-' + r['pkgrel']

  return version
nvchecker-2.12/nvchecker_source/aur.py000066400000000000000000000054311444331012000200720ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from datetime import datetime
import asyncio
from typing import Iterable, Dict, List, Tuple, Any, Optional

from nvchecker.api import (
  session, GetVersionError, VersionResult,
  Entry, BaseWorker, RawResult,
)

AUR_URL = 'https://aur.archlinux.org/rpc/'

class AurResults:
  cache: Dict[str, Optional[Dict[str, Any]]]

  def __init__(self) -> None:
    self.cache = {}

  async def get_multiple(
    self,
    aurnames: Iterable[str],
  ) -> Dict[str, Optional[Dict[str, Any]]]:
    params = [('v', '5'), ('type', 'info')]
    params.extend(('arg[]', name) for name in aurnames
                  if name not in self.cache)
    res = await session.get(AUR_URL, params=params)
    data = res.json()
    new_results = {r['Name']: r for r in data['results']}

    cache = self.cache
    cache.update(new_results)
    cache.update(
      (name, None)
      for name in set(aurnames) - new_results.keys()
    )

    return {name: cache[name] for name in aurnames
            if name in cache}

class Worker(BaseWorker):
  # https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
  batch_size = 100

  async def run(self) -> None:
    tasks = self.tasks
    n_batch, left = divmod(len(tasks), self.batch_size)
    if left > 0:
      n_batch += 1

    aur_results = AurResults()

    ret = []
    for i in range(n_batch):
      s = i * self.batch_size
      batch = tasks[s : s+self.batch_size]
      fu = self._run_batch(batch, aur_results)
      ret.append(fu)

    await asyncio.gather(*ret)

  async def _run_batch(
    self,
    batch: List[Tuple[str, Entry]],
    aur_results: AurResults,
  ) -> None:
    task_by_name: Dict[str, Entry] = dict(self.tasks)

    async with self.task_sem:
      results = await _run_batch_impl(batch, aur_results)
      for name, version in results.items():
        r = RawResult(name, version, task_by_name[name])
        await self.result_q.put(r)

async def _run_batch_impl(
  batch: List[Tuple[str, Entry]],
  aur_results: AurResults,
) -> Dict[str, VersionResult]:
  aurnames = {conf.get('aur', name) for name, conf in batch}
  results = await aur_results.get_multiple(aurnames)

  ret: Dict[str, VersionResult] = {}

  for name, conf in batch:
    aurname = conf.get('aur', name)
    use_last_modified = conf.get('use_last_modified', False)
    strip_release = conf.get('strip_release', False)

    result = results.get(aurname)

    if result is None:
      ret[name] = GetVersionError('AUR upstream not found')
      continue

    version = result['Version']
    if use_last_modified:
      version += '-' + datetime.utcfromtimestamp(result['LastModified']).strftime('%Y%m%d%H%M%S')
    if strip_release and '-' in version:
      version = version.rsplit('-', 1)[0]

    ret[name] = version

  return ret

nvchecker-2.12/nvchecker_source/bitbucket.py000066400000000000000000000036621444331012000212630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from typing import Any, List
from urllib.parse import urlencode

from nvchecker.api import VersionResult, Entry, AsyncCache

# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-refs/#api-repositories-workspace-repo-slug-refs-tags-get
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache,
  **kwargs: Any,
) -> VersionResult:
  repo = conf['bitbucket']
  br = conf.get('branch', '')
  use_max_tag = conf.get('use_max_tag', False)
  use_sorted_tags = conf.get('use_sorted_tags', False)

  if use_sorted_tags or use_max_tag:
    parameters = {'fields': 'values.name,next'}

    if use_sorted_tags:
      parameters['sort'] = conf.get('sort', '-target.date')
      if 'query' in conf:
        parameters['q'] = conf['query']

  if use_sorted_tags:
    url = BITBUCKET_MAX_TAG % repo
    url += '?' + urlencode(parameters)

    version = await _get_tags(url, max_page=1, cache=cache)

  elif use_max_tag:
    url = BITBUCKET_MAX_TAG % repo
    url += '?' + urlencode(parameters)

    max_page = conf.get('max_page', 3)
    version = await _get_tags(url, max_page=max_page, cache=cache)

  else:
    url = BITBUCKET_URL % (repo, br)
    data = await cache.get_json(url)

    version = data['values'][0]['date'].split('T', 1)[0].replace('-', '')

  return version

async def _get_tags(
  url: str, *,
  max_page: int,
  cache: AsyncCache,
) -> List[str]:
  ret: List[str] = []

  for _ in range(max_page):
    data = await cache.get_json(url)
    ret.extend(x['name'] for x in data['values'])
    if 'next' in data:
      url = data['next']
    else:
      break

  return ret

nvchecker-2.12/nvchecker_source/cmd.py000066400000000000000000000017511444331012000200470ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import asyncio

import structlog

from nvchecker.api import GetVersionError

logger = structlog.get_logger(logger_name=__name__)

async def run_cmd(cmd: str) -> str:
  logger.debug('running cmd', cmd=cmd)
  p = await asyncio.create_subprocess_shell(
    cmd,
    stdout=asyncio.subprocess.PIPE,
    stderr=asyncio.subprocess.PIPE,
  )

  output, error = await p.communicate()
  output_s = output.strip().decode('latin1')
  error_s = error.strip().decode(errors='replace')
  if p.returncode != 0:
    raise GetVersionError(
      'command exited with error',
      cmd=cmd, error=error_s,
      returncode=p.returncode)
  elif not output_s:
    raise GetVersionError(
      'command exited without output',
      cmd=cmd, error=error_s,
      returncode=p.returncode)
  else:
    return output_s

async def get_version(
  name, conf, *, cache, keymanager=None
):
  cmd = conf['cmd']
  return await cache.get(cmd, run_cmd)
nvchecker-2.12/nvchecker_source/combiner.py000066400000000000000000000010331444331012000210730ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import asyncio
import string

from nvchecker.api import entry_waiter

class CombineFormat(string.Template):
  idpattern = '[0-9]+'

async def get_version(
  name, conf, *, cache, keymanager=None
):
  t = CombineFormat(conf['format'])
  from_ = conf['from']
  waiter = entry_waiter.get()
  entries = [waiter.wait(name) for name in from_]
  vers = await asyncio.gather(*entries)
  versdict = {str(i+1): v for i, v in enumerate(vers)}
  return t.substitute(versdict)
nvchecker-2.12/nvchecker_source/container.py000066400000000000000000000062251444331012000212670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen 

from typing import Dict, List, NamedTuple, Optional, Tuple
from urllib.request import parse_http_list
from urllib.parse import urljoin

from nvchecker.api import session, HTTPError

class AuthInfo(NamedTuple):
  service: Optional[str]
  realm: str

def parse_www_authenticate_header(header: str) -> Tuple[str, Dict[str, str]]:
  '''
  Parse WWW-Authenticate header used in OAuth2 authentication for container
  registries. This is NOT RFC-compliant!

  Simplified from http.parse_www_authenticate_header in Werkzeug (BSD license)
  '''
  auth_type, auth_info = header.split(None, 1)
  result = {}
  for item in parse_http_list(auth_info):
    name, value = item.split("=", 1)
    if value[:1] == value[-1:] == '"':
      value = value[1:-1]
    result[name] = value
  return auth_type, result

# Inspired by https://stackoverflow.com/a/51921869
# Reference: https://github.com/containers/image/blob/v5.6.0/docker/docker_client.go

class UnsupportedAuthenticationError(NotImplementedError):
  def __init__(self):
    super().__init__('Only Bearer authentication supported for now')

async def get_registry_auth_info(registry_host: str) -> AuthInfo:
  auth_service = auth_realm = None

  try:
    await session.get(f'https://{registry_host}/v2/')
    raise UnsupportedAuthenticationError  # No authentication needed
  except HTTPError as e:
    if e.code != 401:
      raise

    auth_type, auth_info = parse_www_authenticate_header(e.response.headers['WWW-Authenticate'])
    if auth_type.lower() != 'bearer':
      raise UnsupportedAuthenticationError

    # Although 'service' is needed as per https://docs.docker.com/registry/spec/auth/token/,
    # ghcr.io (GitHub container registry) does not provide it
    auth_service = auth_info.get('service')
    auth_realm = auth_info['realm']

    return AuthInfo(auth_service, auth_realm)

async def get_container_tags(info: Tuple[str, str, AuthInfo]) -> List[str]:
  image_path, registry_host, auth_info = info

  auth_params = {
    'scope': f'repository:{image_path}:pull',
  }
  if auth_info.service:
    auth_params['service'] = auth_info.service
  res = await session.get(auth_info.realm, params=auth_params)
  token = res.json()['token']

  tags = []
  url = f'https://{registry_host}/v2/{image_path}/tags/list'

  while True:
    res = await session.get(url, headers={
      'Authorization': f'Bearer {token}',
      'Accept': 'application/json',
    })
    tags += res.json()['tags']
    link = res.headers.get('Link')
    if link is None:
      break
    else:
      url = urljoin(url, parse_next_link(link))

  return tags

def parse_next_link(value: str) -> str:
  ending = '>; rel="next"'
  if value.endswith(ending):
    return value[1:-len(ending)]
  else:
    raise ValueError(value)

async def get_version(name, conf, *, cache, **kwargs):
  image_path = conf.get('container', name)
  registry_host = conf.get('registry', 'docker.io')
  if registry_host == 'docker.io':
    registry_host = 'registry-1.docker.io'

  auth_info = await cache.get(registry_host, get_registry_auth_info)

  key = image_path, registry_host, auth_info
  return await cache.get(key, get_container_tags)
nvchecker-2.12/nvchecker_source/cpan.py000066400000000000000000000004721444331012000202240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

# Using metacpan
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('cpan', name)
  data = await cache.get_json(CPAN_URL % key)
  return str(data['version'])

nvchecker-2.12/nvchecker_source/cran.py000066400000000000000000000013061444331012000202230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.

from nvchecker.api import session, GetVersionError

CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
VERSION_FIELD = 'Version: '

async def request(pkg):
  url = CRAN_URL % pkg
  res = await session.get(url)
  return res.body.decode('utf-8', errors='ignore')

async def get_version(name, conf, *, cache, **kwargs):
  package = conf.get('cran', name)

  desc = await cache.get(package, request)

  for line in desc.splitlines():
    if line.startswith(VERSION_FIELD):
      version = line[len(VERSION_FIELD):]
      break
  else:
    raise GetVersionError('Invalid DESCRIPTION file')

  return version
nvchecker-2.12/nvchecker_source/cratesio.py000066400000000000000000000005431444331012000211130ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

API_URL = 'https://crates.io/api/v1/crates/%s'

async def get_version(name, conf, *, cache, **kwargs):
  name = conf.get('cratesio') or name
  data = await cache.get_json(API_URL % name)
  version = [v['num'] for v in data['versions'] if not v['yanked']][0]
  return version
nvchecker-2.12/nvchecker_source/debianpkg.py000066400000000000000000000013541444331012000212270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from nvchecker.api import GetVersionError

URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('debianpkg') or name
  strip_release = conf.get('strip_release', False)
  suite = conf.get('suite') or "sid"
  url = URL % {"pkgname": pkg, "suite": suite}
  data = await cache.get_json(url)

  if not data.get('versions'):
    raise GetVersionError('Debian package not found')

  r = data['versions'][0]
  if strip_release:
    version = r['version'].split("-")[0]
  else:
    version = r['version']

  return version
nvchecker-2.12/nvchecker_source/gems.py000066400000000000000000000004721444331012000202360ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('gems', name)
  data = await cache.get_json(GEMS_URL % key)
  return [item['number'] for item in data]
nvchecker-2.12/nvchecker_source/git.py000066400000000000000000000012411444331012000200610ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

from .cmd import run_cmd

async def get_version(
  name, conf, *, cache, keymanager=None
):
  git = conf['git']

  use_commit = conf.get('use_commit', False)
  if use_commit:
    ref = conf.get('branch')
    if ref is None:
      ref = 'HEAD'
    else:
      ref = 'refs/heads/' + ref
    cmd = f"git ls-remote {git} {ref}"
    data = await cache.get(cmd, run_cmd)
    return data.split(None, 1)[0]
  else:
    cmd = f"git ls-remote --tags --refs {git}"
    data = await cache.get(cmd, run_cmd)
    versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
    return versions
nvchecker-2.12/nvchecker_source/gitea.py000066400000000000000000000023451444331012000203750ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from __future__ import annotations

import urllib.parse

GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
)

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
) -> VersionResult:
  repo = urllib.parse.quote(conf['gitea'])
  br = conf.get('branch')
  host = conf.get('host', 'gitea.com')
  use_max_tag = conf.get('use_max_tag', False)

  if use_max_tag:
    url = GITEA_MAX_TAG % (host, repo)
  else:
    url = GITEA_URL % (host, repo)
    if br:
      url += '?sha=' + br

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    key_name = 'gitea_' + host.lower()
    token = keymanager.get_key(key_name)

  # Set private token if token exists.
  headers = {}
  if token:
    headers["Authorization"] = f'token {token}'

  data = await cache.get_json(url, headers = headers)
  if use_max_tag:
    version = [tag["name"] for tag in data]
  else:
    version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', '')
  return version
nvchecker-2.12/nvchecker_source/github.py000066400000000000000000000075321444331012000205710ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import time
from urllib.parse import urlencode
from typing import Tuple

import structlog

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
  TemporaryError, session, GetVersionError,
)

logger = structlog.get_logger(logger_name=__name__)

GITHUB_URL = 'https://api.github.com/repos/%s/commits'
GITHUB_LATEST_RELEASE = 'https://api.github.com/repos/%s/releases/latest'
# https://developer.github.com/v3/git/refs/#get-all-references
GITHUB_MAX_TAG = 'https://api.github.com/repos/%s/git/refs/tags'
GITHUB_GRAPHQL_URL = 'https://api.github.com/graphql'

async def get_version(name, conf, **kwargs):
  try:
    return await get_version_real(name, conf, **kwargs)
  except TemporaryError as e:
    check_ratelimit(e, name)

QUERY_LATEST_TAG = '''
{{
  repository(name: "{name}", owner: "{owner}") {{
    refs(refPrefix: "refs/tags/", first: 1,
         query: "{query}",
         orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
      edges {{
        node {{
          name
        }}
      }}
    }}
  }}
}}
'''

async def get_latest_tag(key: Tuple[str, str, str]) -> str:
  repo, query, token = key
  owner, reponame = repo.split('/')
  headers = {
    'Authorization': f'bearer {token}',
    'Content-Type': 'application/json',
  }
  q = QUERY_LATEST_TAG.format(
    owner = owner,
    name = reponame,
    query = query,
  )

  res = await session.post(
    GITHUB_GRAPHQL_URL,
    headers = headers,
    json = {'query': q},
  )
  j = res.json()

  refs = j['data']['repository']['refs']['edges']
  if not refs:
    raise GetVersionError('no tag found')

  return refs[0]['node']['name']

async def get_version_real(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = conf['github']

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    token = keymanager.get_key('github')

  use_latest_tag = conf.get('use_latest_tag', False)
  if use_latest_tag:
    if not token:
      raise GetVersionError('token not given but it is required')

    query = conf.get('query', '')
    return await cache.get((repo, query, token), get_latest_tag) # type: ignore

  br = conf.get('branch')
  path = conf.get('path')
  use_latest_release = conf.get('use_latest_release', False)
  use_max_tag = conf.get('use_max_tag', False)
  if use_latest_release:
    url = GITHUB_LATEST_RELEASE % repo
  elif use_max_tag:
    url = GITHUB_MAX_TAG % repo
  else:
    url = GITHUB_URL % repo
    parameters = {}
    if br:
      parameters['sha'] = br
    if path:
      parameters['path'] = path
    url += '?' + urlencode(parameters)
  headers = {
    'Accept': 'application/vnd.github.quicksilver-preview+json',
  }
  if token:
    headers['Authorization'] = f'token {token}'

  data = await cache.get_json(url, headers = headers)

  if use_max_tag:
    tags = [ref['ref'].split('/', 2)[-1] for ref in data]
    if not tags:
      raise GetVersionError('No tag found in upstream repository.')
    return tags

  if use_latest_release:
    if 'tag_name' not in data:
      raise GetVersionError('No release found in upstream repository.')
    version = data['tag_name']

  else:
    # YYYYMMDD.HHMMSS
    version = data[0]['commit']['committer']['date'] \
        .rstrip('Z').replace('-', '').replace(':', '').replace('T', '.')

  return version

def check_ratelimit(exc, name):
  res = exc.response
  if not res:
    raise

  # default -1 is used to re-raise the exception
  n = int(res.headers.get('X-RateLimit-Remaining', -1))
  if n == 0:
    reset = int(res.headers.get('X-RateLimit-Reset'))
    logger.error(f'rate limited, resetting at {time.ctime(reset)}. '
                  'Or get an API token to increase the allowance if not yet',
                 name = name,
                 reset = reset)
  else:
    raise
nvchecker-2.12/nvchecker_source/gitlab.py000066400000000000000000000035361444331012000205510ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import urllib.parse

import structlog

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
  TemporaryError,
)

GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'

logger = structlog.get_logger(logger_name=__name__)

async def get_version(name, conf, **kwargs):
  try:
    return await get_version_real(name, conf, **kwargs)
  except TemporaryError as e:
    check_ratelimit(e, name)

async def get_version_real(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = urllib.parse.quote_plus(conf['gitlab'])
  br = conf.get('branch')
  host = conf.get('host', "gitlab.com")
  use_max_tag = conf.get('use_max_tag', False)

  if use_max_tag:
    url = GITLAB_MAX_TAG % (host, repo)
  else:
    url = GITLAB_URL % (host, repo)
    if br:
      url += '?ref_name=%s' % br

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    key_name = 'gitlab_' + host.lower()
    token = keymanager.get_key(key_name)

  # Set private token if token exists.
  headers = {}
  if token:
    headers["PRIVATE-TOKEN"] = token

  data = await cache.get_json(url, headers = headers)
  if use_max_tag:
    version = [tag["name"] for tag in data]
  else:
    version = data[0]['created_at'].split('T', 1)[0].replace('-', '')
  return version

def check_ratelimit(exc, name):
  res = exc.response
  if not res:
    raise

  # default -1 is used to re-raise the exception
  n = int(res.headers.get('RateLimit-Remaining', -1))
  if n == 0:
    logger.error('gitlab rate limited. Wait some time '
                 'or get an API token to increase the allowance if not yet',
                 name = name)
  else:
    raise
nvchecker-2.12/nvchecker_source/hackage.py000066400000000000000000000005051444331012000206630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('hackage', name)
  data = await cache.get_json(HACKAGE_URL % key)
  return data['normal-version'][0]

nvchecker-2.12/nvchecker_source/htmlparser.py000066400000000000000000000022771444331012000214710ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Ypsilik , et al.
# Copyright (c) 2013-2020 lilydjwg , et al.

from lxml import html, etree

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)

async def get_version_impl(info):
  conf = dict(info)

  encoding = conf.get('encoding')
  parser = html.HTMLParser(encoding=encoding)
  data = conf.get('post_data')
  if data is None:
    res = await session.get(conf['url'])
  else:
    res = await session.post(conf['url'], body = data, headers = {
        'Content-Type': conf.get('post_data_type', 'application/x-www-form-urlencoded')
      })
  doc = html.fromstring(res.body, base_url=conf['url'], parser=parser)

  try:
    els = doc.xpath(conf.get('xpath'))
  except ValueError:
    if not conf.get('missing_ok', False):
      raise GetVersionError('version string not found.')
  except etree.XPathEvalError as e:
    raise GetVersionError('bad xpath', exc_info=e)

  version = [
    str(el)
    if isinstance(el, str)
    else str(el.text_content())
    for el in els
  ]
  return version
nvchecker-2.12/nvchecker_source/httpheader.py000066400000000000000000000017431444331012000214350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import re

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)

async def get_version_impl(info):
  conf = dict(info)
  url = conf['url']
  header = conf.get('header', 'Location')
  follow_redirects = conf.get('follow_redirects', False)
  method = conf.get('method', 'HEAD')

  try:
    regex = re.compile(conf['regex'])
  except re.error as e:
    raise GetVersionError('bad regex', exc_info=e)

  res = await session.request(
    url,
    method = method,
    follow_redirects = follow_redirects,
  )

  header_value = res.headers.get(header)
  if not header_value:
    raise GetVersionError('header %s not found or is empty' % header)

  try:
    version = regex.findall(header_value)
  except ValueError:
    raise GetVersionError('version string not found.')
  return version
nvchecker-2.12/nvchecker_source/manual.py000066400000000000000000000002561444331012000205600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

async def get_version(name, conf, **kwargs):
  return str(conf.get('manual')).strip() or None
nvchecker-2.12/nvchecker_source/none.py000066400000000000000000000006531444331012000202430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

from __future__ import annotations

from nvchecker.api import (
  BaseWorker, GetVersionError, RawResult,
)

class Worker(BaseWorker):
  async def run(self) -> None:
    exc = GetVersionError('no source specified')
    async with self.task_sem:
      for name, conf in self.tasks:
        await self.result_q.put(
          RawResult(name, exc, conf))
nvchecker-2.12/nvchecker_source/npm.py000066400000000000000000000013441444331012000200740ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import json
import re
from nvchecker.api import session

NPM_URL = 'https://registry.npmjs.org/%s'

def configure(config):
  global NPM_URL
  url = config.get('registry')
  if url:
    NPM_URL = f'{url.rstrip("/")}/%s'

async def get_first_1k(url):
  headers = {
    "Accept": "application/vnd.npm.install-v1+json",
    "Range": "bytes=0-1023",
  }
  res = await session.get(url, headers=headers)
  return res.body

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('npm', name)
  data = await cache.get(NPM_URL % key, get_first_1k)

  dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
  return dist_tags['latest']
nvchecker-2.12/nvchecker_source/openvsx.py000066400000000000000000000006531444331012000210060ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

API_URL = 'https://open-vsx.org/api/%s/%s'

async def get_version(name, conf, *, cache, **kwargs):
  name = conf.get('openvsx') or name
  splitName = name.split('.')
  publisher = splitName[0]
  extension = splitName[1]
  data = await cache.get_json(API_URL % (publisher, extension))
  version = data['version']
  return version
nvchecker-2.12/nvchecker_source/packagist.py000066400000000000000000000007751444331012000212570ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

PACKAGIST_URL = 'https://packagist.org/packages/%s.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('packagist', name)
  data = await cache.get_json(PACKAGIST_URL % key)

  versions = {
    version: details
    for version, details in data["package"]['versions'].items()
    if version != "dev-master"
  }

  if len(versions):
    return max(versions, key=lambda version: versions[version]["time"])
nvchecker-2.12/nvchecker_source/pacman.py000066400000000000000000000007771444331012000205520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker_source import cmd

async def get_version(name, conf, **kwargs):
  referree = conf.get('pacman') or name
  c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
  conf['cmd'] = c
  strip_release = conf.get('strip_release', False)

  version = await cmd.get_version(name, conf, **kwargs)

  if strip_release and '-' in version:
    version = version.rsplit('-', 1)[0]
  return version
nvchecker-2.12/nvchecker_source/pagure.py000066400000000000000000000011341444331012000205620ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import urllib.parse

import structlog

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
)

PAGURE_URL = 'https://%s/api/0/%s/git/tags'

logger = structlog.get_logger(logger_name=__name__)

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = conf['pagure']
  host = conf.get('host', "pagure.io")

  url = PAGURE_URL % (host, repo)

  data = await cache.get_json(url)
  version = data["tags"]
  return version
nvchecker-2.12/nvchecker_source/pypi.py000066400000000000000000000010211444331012000202530ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 lilydjwg , et al.

from packaging.version import Version

async def get_version(name, conf, *, cache, **kwargs):
  package = conf.get('pypi') or name
  use_pre_release = conf.get('use_pre_release', False)

  url = 'https://pypi.org/pypi/{}/json'.format(package)

  data = await cache.get_json(url)

  if use_pre_release:
    version = sorted(
      data['releases'].keys(),
      key = Version,
    )[-1]
  else:
    version = data['info']['version']
  return version
nvchecker-2.12/nvchecker_source/regex.py000066400000000000000000000020371444331012000204140ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import re

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  try:
    regex = re.compile(conf['regex'])
  except re.error as e:
    raise GetVersionError('bad regex', exc_info=e)
  if regex.groups > 1:
    raise GetVersionError('multi-group regex')

  key = (
    conf['url'],
    conf.get('encoding', 'latin1'),
    conf.get('post_data'),
    conf.get('post_data_type', 'application/x-www-form-urlencoded'),
  )
  body = await cache.get(key, get_url)

  versions = regex.findall(body)
  if not versions and not conf.get('missing_ok', False):
    raise GetVersionError('version string not found.')
  return versions

async def get_url(info):
  url, encoding, post_data, post_data_type = info

  if post_data is None:
    res = await session.get(url)
  else:
    res = await session.post(url, body = post_data, headers = {
      'Content-Type': post_data_type,
    })
  body = res.body.decode(encoding)
  return body
nvchecker-2.12/nvchecker_source/repology.py000066400000000000000000000016001444331012000211350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019 lilydjwg , et al.

from nvchecker.api import GetVersionError

API_URL = 'https://repology.org/api/v1/project/{}'

async def get_version(name, conf, *, cache, **kwargs):
  project = conf.get('repology') or name
  repo = conf.get('repo')
  subrepo = conf.get('subrepo')
  if not repo:
    raise GetVersionError('repo field is required for repology source')

  url = API_URL.format(project)
  data = await cache.get_json(url)

  pkgs = [pkg for pkg in data if pkg['repo'] == repo]
  if not pkgs:
    raise GetVersionError('package is not found', repo=repo)

  if subrepo:
    pkgs = [pkg for pkg in pkgs if pkg.get('subrepo') == subrepo]
    if not pkgs:
        raise GetVersionError('package is not found in subrepo',
                              repo=repo, subrepo=subrepo)

  versions = [pkg['version'] for pkg in pkgs]
  return versions
nvchecker-2.12/nvchecker_source/sparkle.py000066400000000000000000000020521444331012000207400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei 

from xml.etree import ElementTree

from nvchecker.api import session

NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'


async def get_version(name, conf, *, cache, **kwargs):
  sparkle = conf['sparkle']
  return await cache.get(sparkle, get_version_impl)


async def get_version_impl(sparkle):
  res = await session.get(sparkle)
  root = ElementTree.fromstring(res.body)
  item = root.find('./channel/item[1]/enclosure')

  version_string = item.get(f'{{{NAMESPACE}}}shortVersionString')
  build_number = item.get(f'{{{NAMESPACE}}}version')

  if (version_string and version_string.isdigit()) and (
    build_number and not build_number.isdigit()
  ):
    version_string, build_number = build_number, version_string

  version = []

  if version_string:
    version.append(version_string)
  if build_number and (build_number not in version):
    version.append(build_number)

  return '-'.join(version) if version else None
nvchecker-2.12/nvchecker_source/ubuntupkg.py000066400000000000000000000023361444331012000213300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from nvchecker.api import GetVersionError

URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('ubuntupkg') or name
  strip_release = conf.get('strip_release', False)
  suite = conf.get('suite')
  url = URL % pkg

  if suite:
    suite = "https://api.launchpad.net/1.0/ubuntu/" + suite

  releases = []

  while not releases:
    data = await cache.get_json(url)

    if not data.get('entries'):
      raise GetVersionError('Ubuntu package not found')

    releases = [r for r in data["entries"] if r["status"] == "Published"]

    if suite:
      releases = [r for r in releases if r["distro_series_link"] == suite]

    if "next_collection_link" not in data:
      break

    url = data["next_collection_link"]

  if not releases:
    raise GetVersionError('Ubuntu package not found')
    return

  if strip_release:
    version = releases[0]['source_package_version'].split("-")[0]
  else:
    version = releases[0]['source_package_version']

  return version
nvchecker-2.12/nvchecker_source/vsmarketplace.py000066400000000000000000000022741444331012000221460ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
  TemporaryError, session, GetVersionError,
)

API_URL = 'https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery'

HEADERS = {
  'Accept': 'application/json;api-version=6.1-preview.1',
  'Content-Type': 'application/json'
}

async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
  name = conf.get('vsmarketplace') or name

  q = {
    'filters': [
      {
        'criteria': [
          {
            'filterType': 8,
            'value': 'Microsoft.VisualStudio.Code'
          },
          {
            'filterType': 7,
            'value': name
          },
          {
            'filterType': 12,
            'value': '4096'
          }
        ],
        'pageNumber': 1,
        'pageSize': 2,
        'sortBy': 0,
        'sortOrder': 0
      }
    ],
    'assetTypes': [],
    'flags': 946
  }

  res = await session.post(
    API_URL,
    headers = HEADERS,
    json = q,
  )
  j = res.json()

  version = j['results'][0]['extensions'][0]['versions'][0]['version']
  return version
nvchecker-2.12/pyproject.toml000066400000000000000000000004141444331012000163110ustar00rootroot00000000000000[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[tool.pytest.ini_options]
# addopts = -n auto
asyncio_mode = "strict"

# build and upload
# rm -rf dist && python -m build --no-isolation && twine check dist/* && twine upload -s dist/*
nvchecker-2.12/sample_config.toml000066400000000000000000000015151444331012000171030ustar00rootroot00000000000000[__config__]
oldver = "old_ver.json"
newver = "new_ver.json"

[vim]
source = "regex"
regex = "7\\.3\\.\\d+"
url = "http://ftp.vim.org/pub/vim/patches/7.3/"

[google-chrome]
source = "cmd"
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''

[fbcat]
source = "aur"

[winterpy]
source = "github"
github = "lilydjwg/winterpy"

[nvchecker]
source = "github"
github = "lilydjwg/nvchecker"

[ssed]
source = "regex"
regex = "The current version is ([\\d.]+)\\."
url = "http://sed.sourceforge.net/grabbag/ssed/"
proxy = "http://localhost:8087"

[PySide]
source = "pypi"
pypi = "PySide"

[test]
source = "manual"
manual = "0.1"

["Sparkle Test App"]
source = "sparkle"
sparkle = "https://sparkle-project.org/files/sparkletestcast.xml"
nvchecker-2.12/scripts/000077500000000000000000000000001444331012000150655ustar00rootroot00000000000000nvchecker-2.12/scripts/README.rst000066400000000000000000000000451444331012000165530ustar00rootroot00000000000000Additional scripts may help someone.
nvchecker-2.12/scripts/nvchecker-ini2toml000077500000000000000000000037151444331012000205240ustar00rootroot00000000000000#!/usr/bin/python3
# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

import argparse

import configparser
import toml

_handler_precedence = (
  'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
  'gems', 'pacman',
  'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
  'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
  'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
)

BOOL_KEYS = [
  'strip_release', 'use_last_modified',
  'use_latest_release', 'use_latest_tag',
  'use_max_tag', 'use_pre_release',
]

INT_KEYS = [
  'max_page',
]

def main():
  parser = argparse.ArgumentParser(description='convert 1.x ini file to 2.x toml file')
  parser.add_argument('ini', type=argparse.FileType(),
                      help='the old ini file')
  parser.add_argument('toml', type=argparse.FileType(mode='w'),
                      help='the new ini file')
  args = parser.parse_args()

  old = configparser.ConfigParser(
    dict_type=dict, allow_no_value=True, interpolation=None,
  )
  old.read_file(args.ini)

  if '__config__' in old:
    c = old['__config__']
    newconf = dict(c)
    x = newconf.pop('max_concurrent', None)
    if x is not None:
      newconf['max_concurrency'] = x
    confs = {'__config__': newconf}
  else:
    confs = {}

  for section in old.sections():
    if section == '__config__':
      continue

    conf = old[section]
    newconf = {}

    for key in _handler_precedence:
      if key not in conf:
        continue
      newconf['source'] = key
      if conf.get(key):
        newconf[key] = conf.get(key)
      break

    dconf = dict(conf)

    for k, v in dconf.items():
      if '-' in k:
        k = k.replace('-', '_')

      if k in BOOL_KEYS:
        newconf[k] = conf.getboolean(k)
      elif k in INT_KEYS:
        newconf[k] = conf.getint(k)
      elif v != '':
        newconf[k] = v

    confs[section] = newconf

  toml.dump(confs, args.toml)
  args.toml.flush()

if __name__ == '__main__':
  main()
nvchecker-2.12/scripts/nvchecker-notify000077500000000000000000000031611444331012000202720ustar00rootroot00000000000000#!/usr/bin/env python3
# MIT licensed
# Copyright (c) 2020,2022 lilydjwg , et al.

'''
A simple wrapper to show desktop notifications while running nvchecker.
'''

import os
import subprocess
import json

import gi
try:
  gi.require_version('Notify', '0.8')
except ValueError:
  gi.require_version('Notify', '0.7')
from gi.repository import Notify

def get_args():
  import argparse
  parser = argparse.ArgumentParser(description='show desktop notifications while running nvchecker')
  parser.add_argument('-c', '--file',
                      metavar='FILE', type=str,
                      help='software version configuration file if not default')
  parser.add_argument('-k', '--keyfile',
                      metavar='FILE', type=str,
                      help='use specified keyfile (override the one in configuration file)')
  return parser.parse_args()

def main():
  args = get_args()

  Notify.init('nvchecker')
  notif = Notify.Notification()
  updates = []

  rfd, wfd = os.pipe()
  cmd = [
    'nvchecker', '--logger', 'both', '--json-log-fd', str(wfd),
  ]
  if args.file:
    cmd.extend(['-c', args.file])
  if args.keyfile:
    cmd.extend(['-k', args.keyfile])

  process = subprocess.Popen(cmd, pass_fds=(wfd,))
  os.close(wfd)

  output = os.fdopen(rfd)
  for l in output:
    j = json.loads(l)
    event = j['event']
    if event == 'updated':
      updates.append('%(name)s updated to version %(version)s' % j)
      notif.update('nvchecker', '\n'.join(updates))
      notif.show()

  ret = process.wait()
  if ret != 0:
    raise subprocess.CalledProcessError(ret, cmd)

if __name__ == '__main__':
  main()
nvchecker-2.12/scripts/nvtake.bash_completion000066400000000000000000000010651444331012000214470ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

_nvtake() {
    local cur _nvchecker_conf_mtime
    _init_completion || return

    _nvchecker_conf_mtime="$(stat -c %Y $HOME/.config/nvchecker/*)"
    if [ -z "$_nvtake_completion_cache" -o "$_nvchecker_conf_mtime" != "$_nvchecker_conf_mtime_cached" ]; then
        _nvtake_completion_cache="$(nvcmp -q)"
        _nvchecker_conf_mtime_cached="$_nvchecker_conf_mtime"
    fi

    COMPREPLY=( $(compgen -W "$_nvtake_completion_cache" -- "$cur") )
} &&
    complete -F _nvtake nvtake
nvchecker-2.12/setup.cfg000066400000000000000000000034031444331012000152170ustar00rootroot00000000000000# The complex upload command:
# rm -rf dist && python -m build --sdist && twine check dist/* && twine upload -s dist/*

[metadata]
name = nvchecker
version = attr: nvchecker.__version__
author = lilydjwg
author_email = lilydjwg@gmail.com
description = New version checker for software
license = MIT
keywords = new, version, build, check
url = https://github.com/lilydjwg/nvchecker
long_description = file: README.rst
long_description_content_type = text/x-rst
platforms = any

classifiers =
  Development Status :: 5 - Production/Stable
  Environment :: Console
  Intended Audience :: Developers
  Intended Audience :: System Administrators
  License :: OSI Approved :: MIT License
  Operating System :: OS Independent
  Programming Language :: Python
  Programming Language :: Python :: 3
  Programming Language :: Python :: 3 :: Only
  Programming Language :: Python :: 3.7
  Programming Language :: Python :: 3.8
  Programming Language :: Python :: 3.9
  Programming Language :: Python :: 3.10
  Topic :: Internet
  Topic :: Internet :: WWW/HTTP
  Topic :: Software Development
  Topic :: System :: Archiving :: Packaging
  Topic :: System :: Software Distribution
  Topic :: Utilities

[options]
zip_safe = True

packages = find_namespace:
install_requires =
  setuptools; python_version<"3.8"
  tomli; python_version<"3.11"
  structlog
  platformdirs
  tornado>=6
  pycurl
scripts =
  scripts/nvchecker-ini2toml
  scripts/nvchecker-notify

[options.packages.find]
exclude = tests, build*, docs*

[options.extras_require]
vercmp =
  pyalpm
awesomeversion =
  awesomeversion
pypi =
  packaging
htmlparser =
  lxml

[options.entry_points]
console_scripts =
  nvchecker = nvchecker.__main__:main
  nvtake = nvchecker.tools:take
  nvcmp = nvchecker.tools:cmp

[flake8]
ignore = E111, E302, E501
nvchecker-2.12/tests/000077500000000000000000000000001444331012000145405ustar00rootroot00000000000000nvchecker-2.12/tests/__init__.py000066400000000000000000000001131444331012000166440ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

nvchecker-2.12/tests/conftest.py000066400000000000000000000057541444331012000167520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

import asyncio
import structlog
import os
from pathlib import Path
from typing import TYPE_CHECKING

if TYPE_CHECKING:
  import tomli as tomllib
else:
  try:
    import tomllib
  except ModuleNotFoundError:
    import tomli as tomllib

import pytest
import pytest_asyncio

from nvchecker import core
from nvchecker import __main__ as main
from nvchecker.util import Entries, VersData, RawResult

use_keyfile = False

async def run(
  entries: Entries, max_concurrency: int = 20,
) -> VersData:
  task_sem = asyncio.Semaphore(max_concurrency)
  result_q: asyncio.Queue[RawResult] = asyncio.Queue()
  keyfile = os.environ.get('KEYFILE')
  if use_keyfile and keyfile:
    filepath = Path(keyfile)
    keymanager = core.KeyManager(filepath)
  else:
    keymanager = core.KeyManager(None)

  dispatcher = core.setup_httpclient()
  entry_waiter = core.EntryWaiter()
  futures = dispatcher.dispatch(
    entries, task_sem, result_q,
    keymanager, entry_waiter, 1, {},
  )

  oldvers: VersData = {}
  result_coro = core.process_result(oldvers, result_q, entry_waiter)
  runner_coro = core.run_tasks(futures)

  vers, _has_failures = await main.run(result_coro, runner_coro)
  return vers

@pytest_asyncio.fixture(scope="module")
async def get_version():
  async def __call__(name, config):
    entries = {name: config}
    newvers = await run(entries)
    return newvers.get(name)

  return __call__

@pytest_asyncio.fixture(scope="module")
async def run_str():
  async def __call__(str):
    entries = tomllib.loads(str)
    newvers = await run(entries)
    return newvers.popitem()[1]

  return __call__

@pytest_asyncio.fixture(scope="module")
async def run_str_multi():
  async def __call__(str):
    entries = tomllib.loads(str)
    newvers = await run(entries)
    return newvers

  return __call__

loop = asyncio.new_event_loop()
@pytest.fixture(scope="session")
def event_loop(request):
  """Override pytest-asyncio's event_loop fixture,
     Don't create an instance of the default event loop for each test case.
     We need the same ioloop across tests for the aiohttp support.
  """
  yield loop

@pytest.fixture(scope="session", autouse=True)
def raise_on_logger_msg():
  def proc(logger, method_name, event_dict):
    if method_name in ('warning', 'error'):
      if 'exc_info' in event_dict:
        exc = event_dict['exc_info']
        if isinstance(exc, Exception):
          raise exc
        else: # exc_info=True
          raise
      if not event_dict['event'].startswith(('rate limited', 'no-result')):
        raise RuntimeError(event_dict['event'])
    return event_dict['event']

  structlog.configure([proc])

def pytest_configure(config):
  # register an additional marker
  config.addinivalue_line(
    'markers', 'needs_net: mark test to require Internet access',
  )

@pytest.fixture
def keyfile():
  global use_keyfile
  if 'KEYFILE' not in os.environ:
    pytest.skip('KEYFILE not set')
    return

  use_keyfile = True
  yield
  use_keyfile = False
nvchecker-2.12/tests/test_alpm.py000066400000000000000000000053161444331012000171070ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 DDoSolitary , et al.

import pathlib
import shutil
import subprocess
import tempfile

import pytest

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.skipif(shutil.which('makepkg') is None, reason='requires makepkg command'),
  pytest.mark.skipif(shutil.which('repo-add') is None, reason='requires repo-add command')
]

global temp_dir, db_path


def setup_module(module):
  global temp_dir, db_path
  temp_dir = tempfile.TemporaryDirectory()
  temp_path = pathlib.Path(temp_dir.name)
  pkg_path = temp_path / 'test-pkg'
  pkg_path.mkdir()
  with (pkg_path / 'PKGBUILD').open('w') as f:
    f.write(
      'pkgname=test-pkg\n'
      'pkgver=1.2.3\n'
      'pkgrel=4\n'
      'arch=(any)\n'
      'provides=("test-provides=5.6-7" "test-provides-unversioned")\n'
      'options=(!debug)\n'
    )
  subprocess.check_call(['makepkg', '--nosign'], cwd=pkg_path)
  pkg_file = subprocess.check_output(['makepkg', '--packagelist'], cwd=pkg_path, text=True).strip()
  db_path = pkg_path / 'test-db'
  db_path.mkdir()
  repo_path = db_path / 'sync'
  repo_path.mkdir()
  subprocess.check_call([
    'repo-add',
    repo_path / 'test-repo.db.tar.gz',
    pkg_path / pkg_file
  ])


def teardown_module(module):
  temp_dir.cleanup()


async def test_alpm(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo'
  }) == '1.2.3-4'


async def test_alpm_strip(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'strip_release': True
  }) == '1.2.3'


async def test_alpm_provided(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'provided': 'test-provides'
  }) == '5.6-7'


async def test_alpm_provided_strip(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'provided': 'test-provides',
    'strip_release': True
  }) == '5.6'


async def test_alpm_missing_repo(get_version):
  with pytest.raises(RuntimeError):
    await get_version('test-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'wrong-repo'
    })


async def test_alpm_missing_pkg(get_version):
  with pytest.raises(RuntimeError):
    await get_version('wrong-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'test-repo'
    })


async def test_alpm_missing_provides(get_version):
  with pytest.raises(RuntimeError):
    await get_version('test-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'test-repo',
      'provided': 'wrong-provides'
    })
nvchecker-2.12/tests/test_alpmfiles.py000066400000000000000000000013041444331012000201230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.

import shutil

import pytest

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.skipif(shutil.which('pacman') is None, reason='requires pacman command'),
]

async def test_alpmfiles(get_version):
  assert await get_version('test', {
    'source': 'alpmfiles',
    'pkgname': 'libuv',
    'filename': 'usr/lib/libuv\\.so\\.([^.]+)',
  }) == '1'

async def test_alpmfiles_strip(get_version):
  assert await get_version('test', {
    'source': 'alpmfiles',
    'pkgname': 'glibc',
    'repo': 'core',
    'filename': 'libc\\.so\\.[^.]+',
    'strip_dir': True,
    'dbpath': '/var/lib/pacman',
  }) == 'libc.so.6'
nvchecker-2.12/tests/test_android_sdk.py000066400000000000000000000032231444331012000204320ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Chih-Hsuan Yen 

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_android_addon(get_version):
    assert await get_version("android-google-play-apk-expansion", {
        "source": "android_sdk",
        "android_sdk": "extras;google;market_apk_expansion",
        "repo": "addon",
    }) == "1.r03"

async def test_android_package(get_version):
    assert await get_version("android-sdk-cmake", {
        "source": "android_sdk",
        "android_sdk": "cmake;",
        "repo": "package",
    }) == "3.22.1"


async def test_android_package_channel(get_version):
    assert await get_version("android-sdk-cmake", {
        "source": "android_sdk",
        "android_sdk": "ndk;",
        "repo": "package",
        "channel": "beta,dev,canary",
    }) == "25.0.8528842"

async def test_android_list(get_version):
    assert await get_version("android-sdk-cmake-older", {
        "source": "android_sdk",
        "android_sdk": "cmake;",
        "repo": "package",
        "include_regex": r"3\.10.*",
    }) == "3.10.2"

async def test_android_package_os(get_version):
    await get_version("android-usb-driver", {
        "source": "android_sdk",
        "android_sdk": "extras;google;usb_driver",
        "repo": "addon",
        "host_os": "windows"
    }) == "13"

async def test_android_package_os_missing(get_version):
    await get_version("android-usb-driver", {
        "source": "android_sdk",
        "android_sdk": "extras;google;usb_driver",
        "repo": "addon",
        "host_os": "linux"
    }) == None
nvchecker-2.12/tests/test_anitya.py000066400000000000000000000006251444331012000174410ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

import re

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_anitya(get_version):
  version = await get_version("shutter", {
    "source": "anitya",
    "anitya": "fedora/shutter",
  })
  assert re.match(r"[0-9.]+", version)
nvchecker-2.12/tests/test_apt.py000066400000000000000000000024461444331012000167430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@flaky(max_runs=10)
async def test_apt(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
    }) == "0.1.7-1"

@flaky(max_runs=10)
async def test_apt_srcpkg(get_version):
    ver = await get_version("test", {
        "source": "apt",
        "srcpkg": "golang-github-dataence-porter2",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
    })
    assert ver.startswith("0.0~git20150829.56e4718-")

@flaky(max_runs=10)
async def test_apt_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
        "strip_release": 1,
    }) == "0.1.7"

@flaky(max_runs=10)
async def test_apt_deepin(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "https://community-packages.deepin.com/deepin",
        "suite": "apricot",
    }) == "0.1.6-1"

nvchecker-2.12/tests/test_archpkg.py000066400000000000000000000015431444331012000175730ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@flaky
async def test_archpkg(get_version):
    assert await get_version("base", {
        "source": "archpkg",
    }) == "3-1"

@flaky
async def test_archpkg_strip_release(get_version):
    assert await get_version("base", {
        "source": "archpkg",
        "strip_release": True,
    }) == "3"

@flaky
async def test_archpkg_provided(get_version):
    assert await get_version("dbus", {
        "source": "archpkg",
        "provided": "libdbus-1.so",
    }) == "3-64"

@flaky
async def test_archpkg_provided_strip(get_version):
    assert await get_version("jsoncpp", {
        "source": "archpkg",
        "provided": "libjsoncpp.so",
        "strip_release": True,
    }) == "25"

nvchecker-2.12/tests/test_aur.py000066400000000000000000000015201444331012000167360ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import os

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net,
              pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
                                 reason="fail too often")]

@flaky(max_runs=10)
async def test_aur(get_version):
    assert await get_version("ssed", {
        "source": "aur",
    }) == "3.62-2"

@flaky(max_runs=10)
async def test_aur_strip_release(get_version):
    assert await get_version("ssed", {
        "source": "aur",
        "strip_release": 1,
    }) == "3.62"

@flaky(max_runs=10)
async def test_aur_use_last_modified(get_version):
    assert await get_version("ssed", {
        "source": "aur",
        'use_last_modified': True,
    }) == "3.62-2-20150725052412"
nvchecker-2.12/tests/test_bitbucket.py000066400000000000000000000026631444331012000201340ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_bitbucket(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
    }) == "20150303"

async def test_bitbucket_max_tag(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_max_tag": True,
    }) == "1.7.0"

async def test_bitbucket_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_max_tag": True,
        "ignored": "1.6.0 1.7.0",
    }) == "v1.5"

async def test_bitbucket_sorted_tags(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_sorted_tags": True,
    }) == "1.7.0"

    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_sorted_tags": True,
        "query": 'name~"v"',
    }) == "v1.5"

    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "berkeleylab/gasnet",
        "use_sorted_tags": True,
        "query": 'name~"CVS/BERKELEY_UPC" AND name!~"rc"',
        "prefix": "CVS/BERKELEY_UPC_",
    }) == "2_18_0"
nvchecker-2.12/tests/test_cache.py000066400000000000000000000005521444331012000172160ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_cache(run_str_multi):
  conf = r'''
[cache-1]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"

[cache-2]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"
'''

  r = await run_str_multi(conf)
  assert r['cache-1'] == r['cache-2']
nvchecker-2.12/tests/test_cmd.py000066400000000000000000000012521444331012000167140ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import time
import pytest
pytestmark = pytest.mark.asyncio

async def test_cmd(get_version):
    assert await get_version("example", {
        "source": "cmd",
        "cmd": "echo Meow",
    }) == "Meow"

async def test_cmd_complex(get_version):
    assert await get_version("example", {
        "source": "cmd",
        "cmd": "echo Meow | sed 's/meow/woof/i'",
    }) == "woof"

async def test_cmd_with_percent(run_str):
    test_conf = '''\
[example]
source = "cmd"
cmd = "date +%Y-%m-%d"'''
    date = await run_str(test_conf)
    expected = time.strftime('%Y-%m-%d')
    assert date == expected

nvchecker-2.12/tests/test_combiner.py000066400000000000000000000006441444331012000177530ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_combiner(run_str_multi):
  conf = r'''
[entry-1]
source = "cmd"
cmd = "echo 1"

[entry-2]
source = "cmd"
cmd = "echo 2"

[entry-3]
source = "combiner"
from = ["entry-1", "entry-2", "entry-2"]
format = "$1-$2-$3"
'''

  r = await run_str_multi(conf)
  assert r['entry-3'] == '1-2-2'
nvchecker-2.12/tests/test_container.py000066400000000000000000000010721444331012000201330ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen 

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_container(get_version):
  assert await get_version("hello-world", {
    "source": "container",
    "container": "library/hello-world",
    "include_regex": "linux",
  }) == "linux"

async def test_container_paging(get_version):
  assert await get_version("prometheus-operator", {
    "source": "container",
    "registry": "quay.io",
    "container": "redhattraining/hello-world-nginx",
  }) == "v1.0"
nvchecker-2.12/tests/test_cpan.py000066400000000000000000000004511444331012000170720ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cpan(get_version):
    assert await get_version("POE-Component-Server-HTTPServer", {
        "source": "cpan",
    }) == "0.9.2"
nvchecker-2.12/tests/test_cran.py000066400000000000000000000004351444331012000170760ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cran(get_version):
    assert await get_version("xml2", {
        "source": "cran",
    }) == "1.3.4"
nvchecker-2.12/tests/test_cratesio.py000066400000000000000000000004311444331012000177600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cratesio(get_version):
    assert await get_version("example", {
        "source": "cratesio",
    }) == "0.1.0"
nvchecker-2.12/tests/test_debianpkg.py000066400000000000000000000014621444331012000201000ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@flaky(max_runs=10)
async def test_debianpkg(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
    }) == "0.1.7-1"

@flaky(max_runs=10)
async def test_debianpkg_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
        "strip_release": 1,
    }) == "0.1.7"

@flaky(max_runs=10)
async def test_debianpkg_suite(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
        "suite": "buster",
    }) == "0.1.6-1"
nvchecker-2.12/tests/test_gems.py000066400000000000000000000004211444331012000171010ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_gems(get_version):
    assert await get_version("example", {
        "source": "gems",
    }) == "1.0.2"
nvchecker-2.12/tests/test_git.py000066400000000000000000000015721444331012000167410ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_git(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
    }) == "v1.1.1"

async def test_git_commit(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
        "use_commit": True,
    }) == "ddd0f15ae83993f5cb66a927a28673882e99100b"

async def test_git_commit_branch(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
        "use_commit": True,
        "branch": "with-executables",
    }) == "6b8dc4a827797aa025ff6b8f425e583858a10d4f"
nvchecker-2.12/tests/test_gitea.py000066400000000000000000000012201444331012000172350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net]

@flaky(max_runs=10)
async def test_gitea(get_version):
    ver = await get_version("example", {
        "source": "gitea",
        "gitea": "gitea/tea"})
    assert len(ver) == 8
    assert ver.isdigit()

@flaky(max_runs=10)
async def test_gitea_max_tag_with_include(get_version):
    assert await get_version("example", {
        "source": "gitea",
        "gitea": "gitea/tea",
        "use_max_tag": True,
        "include_regex": r'v0\.3.*',
    }) == "v0.3.1"
nvchecker-2.12/tests/test_github.py000066400000000000000000000044651444331012000174440ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import re

import pytest

pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net,
              pytest.mark.usefixtures('keyfile')]

async def test_github(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
    }) == "20140122.012101"

async def test_github_default_not_master(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "MariaDB/server",
    }) is not None

async def test_github_latest_release(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_latest_release": True,
    }) == "release3"

async def test_github_max_tag(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_tag": True,
    }) == "second_release"

async def test_github_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_tag": True,
        "ignored": "second_release release3",
    }) == "first_release"

async def test_github_with_path(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "petronny/ReleaseTestRepo",
        "path": "test_directory",
    }) == "20140122.012101"

async def test_github_with_path_and_branch(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "petronny/ReleaseTestRepo",
        "branch": "test",
        "path": "test_directory/test_directory",
    }) == "20190128.113201"

async def test_github_max_tag_with_include(get_version):
    version = await get_version("example", {
        "source": "github",
        "github": "EFForg/https-everywhere",
        "use_max_tag": True,
        "include_regex": r"chrome-\d.*",
    })
    assert re.match(r'chrome-[\d.]+', version)

async def test_github_latest_tag(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_latest_tag": True,
    }) == "release3"

nvchecker-2.12/tests/test_gitlab.py000066400000000000000000000024541444331012000174200ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_gitlab(get_version):
    ver = await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
    })
    assert len(ver) == 8
    assert ver.isdigit()

async def test_gitlab_blm(get_version):
    # repo with a custom main branch
    ver = await get_version("example", {
        "source": "gitlab",
        "gitlab": "asus-linux/asusctl",
    })
    assert len(ver) == 8
    assert ver.isdigit()

async def test_gitlab_max_tag(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
    }) == "v1.1.1"

async def test_gitlab_max_tag_with_include(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
        "include_regex": r'v1\.0.*',
    }) == "v1.0.0"

async def test_gitlab_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
        "ignored": "v1.1.0 v1.1.1",
    }) == "v1.0.0"

nvchecker-2.12/tests/test_hackage.py000066400000000000000000000005101444331012000175300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@flaky(max_runs=10)
async def test_hackage(get_version):
    assert await get_version("sessions", {
        "source": "hackage",
    }) == "2008.7.18"
nvchecker-2.12/tests/test_htmlparser.py000066400000000000000000000015141444331012000203330ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 ypsilik , et al.

import pytest

lxml_available = True
try:
  import lxml
except ImportError:
  lxml_available = False

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.needs_net,
  pytest.mark.skipif(not lxml_available, reason="needs lxml"),
]

async def test_xpath_ok(get_version):
    ver = await get_version("aur", {
        "source": "htmlparser",
        "url": "https://aur.archlinux.org/",
        "xpath": '//div[@id="footer"]/p[1]/a/text()',
    })
    assert ver.startswith('v')
    assert '.' in ver

async def test_xpath_element(get_version):
    ver = await get_version("aur", {
        "source": "htmlparser",
        "url": "https://aur.archlinux.org/",
        "xpath": '//div[@id="footer"]/p[1]/a',
    })
    assert ver.startswith('v')
    assert '.' in ver

nvchecker-2.12/tests/test_httpheader.py000066400000000000000000000014541444331012000203050ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import pytest
import pytest_httpbin
assert pytest_httpbin # for pyflakes

pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_redirection(get_version):
    assert await get_version("unifiedremote", {
        "source": "httpheader",
        "url": "https://www.unifiedremote.com/download/linux-x64-deb",
        "regex": r'urserver-([\d.]+).deb',
    }) is not None

async def test_get_version_withtoken(get_version, httpbin):
    assert await get_version("unifiedremote", {
        "source": "httpheader",
        "url": httpbin.url + "/basic-auth/username/superpassword",
        "httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
        "header": "server",
        "regex": r'([0-9.]+)*',
    }) is not None
nvchecker-2.12/tests/test_manual.py000066400000000000000000000004251444331012000174270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_manual(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "Meow",
    }) == "Meow"
nvchecker-2.12/tests/test_npm.py000066400000000000000000000004171444331012000167450ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_npm(get_version):
    assert await get_version("example", {
        "source": "npm",
    }) == "0.0.0"
nvchecker-2.12/tests/test_openvsx.py000066400000000000000000000004751444331012000176610ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_openvsx(get_version):
    assert await get_version("usernamehw.indent-one-space", {
        "source": "openvsx",
    }) == "0.3.0"
nvchecker-2.12/tests/test_packagist.py000066400000000000000000000004651444331012000201240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_packagist(get_version):
    assert await get_version("butterfly/example-web-application", {
        "source": "packagist",
    }) == "1.2.0"
nvchecker-2.12/tests/test_pacman.py000066400000000000000000000013441444331012000174120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pathlib
import shutil
import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.skipif(shutil.which("pacman") is None,
                                 reason="requires pacman command"),
              pytest.mark.skipif(not pathlib.Path("/var/lib/pacman/sync/core.db").exists(),
                                 reason="requires synced pacman databases")]

async def test_pacman(get_version):
    assert await get_version("base", {
        "source": "pacman",
    }) == "3-1"

async def test_pacman_strip_release(get_version):
    assert await get_version("base", {
        "source": "pacman",
        "strip_release": 1,
    }) == "3"
nvchecker-2.12/tests/test_pagure.py000066400000000000000000000014641444331012000174410ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_pagure(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "nvchecker-test",
    })
    assert ver == "0.2"

async def test_pagure_with_ignored(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "nvchecker-test",
        "ignored": "0.2",
    })
    assert ver == "0.1"

async def test_pagure_with_alternative_host(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "rpms/glibc",
        "host": "src.fedoraproject.org",
        "include_regex": r"F-\d+-start",
    })
    assert ver == "F-13-start"
nvchecker-2.12/tests/test_pypi.py000066400000000000000000000011711444331012000171320ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_pypi(get_version):
    assert await get_version("example", {
        "source": "pypi",
    }) == "0.1.0"

async def test_pypi_release(get_version):
    assert await get_version("example-test-package", {
        "source": "pypi",
        "pypi": "example-test-package",
    }) == "1.0.0"

async def test_pypi_pre_release(get_version):
    assert await get_version("example-test-package", {
        "source": "pypi",
        "use_pre_release": 1,
    }) == "1.0.1a1"
nvchecker-2.12/tests/test_regex.py000066400000000000000000000103601444331012000172630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import base64

import pytest
import pytest_httpbin
assert pytest_httpbin # for pyflakes

pytestmark = pytest.mark.asyncio

def base64_encode(s):
    return base64.b64encode(s.encode('utf-8')).decode('ascii')

async def test_regex_httpbin_default_user_agent(get_version, httpbin):
    ua = await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/get",
        "regex": r'"User-Agent":\s*"([^"]+)"',
    })
    assert ua.startswith("lilydjwg/nvchecker")

async def test_regex_httpbin_user_agent(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/get",
        "regex": r'"User-Agent":\s*"(\w+)"',
        "user_agent": "Meow",
    }) == "Meow"

async def test_regex(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/base64/" + base64_encode("version 1.12 released"),
        "regex": r'version ([0-9.]+)',
    }) == "1.12"

async def test_missing_ok(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/base64/" + base64_encode("something not there"),
        "regex": "foobar",
        "missing_ok": True,
    }) is None

async def test_missing(get_version, httpbin):
    with pytest.raises(RuntimeError):
      await get_version("example", {
          "source": "regex",
          "url": httpbin.url + "/base64/" + base64_encode("something not there"),
          "regex": "foobar",
      })

async def test_multi_group(get_version, httpbin):
    with pytest.raises(RuntimeError):
      await get_version("example", {
          "source": "regex",
          "url": httpbin.url + "/base64/" + base64_encode("1.2"),
          "regex": r"(\d+)\.(\d+)",
      })

async def test_regex_with_tokenBasic(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/basic-auth/username/superpassword",
        "httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
        "regex": r'"user":"([a-w]+)"',
    }) == "username"

async def test_regex_with_tokenBearer(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/bearer",
        "httptoken": "Bearer username:password",
        "regex": r'"token":"([a-w]+):.*"',
    }) == "username"

async def test_regex_no_verify_ssl(get_version, httpbin_secure):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
        "regex": r'version ([0-9.]+)',
        "verify_cert": False,
    }) == "1.12"

async def test_regex_bad_ssl(get_version, httpbin_secure):
    try:
        await get_version("example", {
            "source": "regex",
            "url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
            "regex": r'version ([0-9.]+)',
        })
    except Exception:
        pass
    else:
        assert False, 'certificate should not be trusted'

async def test_regex_post(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/post",
        "regex": r'"ABCDEF":\s*"(\w+)"',
        "post_data": "ABCDEF=234&CDEFG=xyz"
    }) == "234"

async def test_regex_post2(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/post",
        "regex": r'"CDEFG":\s*"(\w+)"',
        "post_data": "ABCDEF=234&CDEFG=xyz"
    }) == "xyz"

async def test_regex_post_json(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/post",
        "regex": r'"ABCDEF":\s*(\w+)',
        "post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
        "post_data_type": "application/json"
    }) == "234"

async def test_regex_post_json2(get_version, httpbin):
    assert await get_version("example", {
        "source": "regex",
        "url": httpbin.url + "/post",
        "regex": r'"CDEFG":\s*"(\w+)"',
        "post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
        "post_data_type": "application/json"
    }) == "xyz"
nvchecker-2.12/tests/test_repology.py000066400000000000000000000017761444331012000200240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net]

async def test_repology(get_version):
  assert await get_version("ssed", {
        "source": "repology",
        "repo": "aur",
  }) == "3.62"

async def test_repology_subrepo(get_version):
  assert await get_version("asciiquarium", {
        "source": "repology",
        "repo": "fedora_32",
        "subrepo": "release"
  }) == "1.1"

async def test_repology_bad_subrepo(get_version):
  try:
    assert await get_version("asciiquarium", {
            "source": "repology",
            "repo": "fedora_32",
            "subrepo": "badsubrepo"
    }) is None
  except RuntimeError as e:
    assert "package is not found in subrepo" in str(e)

async def test_repology_no_repo(get_version):
  try:
    assert await get_version("ssed", {
        "source": "repology",
    }) is None
  except RuntimeError as e:
    assert "repo field is required" in str(e)
nvchecker-2.12/tests/test_simplerun.py000066400000000000000000000006531444331012000201730ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 lilydjwg , et al.

import sys
import tempfile
import subprocess

def test_simple_run():
  '''make sure the tool as a whole can run the simplest check'''
  with tempfile.NamedTemporaryFile(mode='w') as f:
    f.write('''\
[t]
source = "cmd"
cmd = "echo 1"
''')
    f.flush()
    subprocess.check_call([
      sys.executable, '-m', 'nvchecker',
      '-c', f.name,
    ])

nvchecker-2.12/tests/test_sortversion.py000066400000000000000000000012531444331012000205470ustar00rootroot00000000000000import pytest

from nvchecker.sortversion import (
  parse_version,
  vercmp, vercmp_available,
  AwesomeVersion, awesomeversion_available,
)

def test_parse_version():
  assert parse_version("v6.0") < parse_version("6.1")
  assert parse_version("v6.0") > parse_version("v6.1-stable")

@pytest.mark.skipif(not vercmp_available,
                    reason="needs pyalpm")
def test_vercmp():
  assert vercmp("v6.0") < vercmp("v6.1-stable")

@pytest.mark.skipif(not awesomeversion_available,
                    reason="needs awesomeversion")
def test_awesomeversion():
  assert AwesomeVersion("v6.0") < AwesomeVersion("6.1")
  assert AwesomeVersion("v6.0") > AwesomeVersion("v6.0b0")

nvchecker-2.12/tests/test_sparkle.py000066400000000000000000000010451444331012000176120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei 

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_sparkle(get_version):
    assert await get_version('example', {
        'source': 'sparkle',
        'sparkle': (
            'https://raw.githubusercontent.com/sparkle-project/Sparkle/'
            'f453625573fc9a251760b65c74df59023b1471c1/Tests/Resources/'
            'testlocalizedreleasenotesappcast.xml'
        ),
    }) == '6.0'
nvchecker-2.12/tests/test_substitute.py000066400000000000000000000027221444331012000203670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_substitute_prefix(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "v1.0",
        "prefix": "v",
    }) == "1.0"

async def test_substitute_prefix_missing_ok(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "1.0",
        "prefix": "v",
    }) == "1.0"

async def test_substitute_regex(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "r15c",
        "from_pattern": r"r(\d+)([a-z])",
        "to_pattern": r"r\1.\2",
    }) == "r15.c"

async def test_substitute_regex_missing_ok(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "r15",
        "from_pattern": r"r(\d+)([a-z])",
        "to_pattern": r"r\1.\2",
    }) == "r15"

async def test_substitute_regex_empty_to_pattern(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "15-debian",
        "from_pattern": r"-\w+$",
        "to_pattern": r"",
    }) == "15"

async def test_substitute_prefix_has_higher_priority(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "r15",
        "prefix": "r",
        "from_pattern": r"r(\d+)",
        "to_pattern": r"R\1",
    }) == "15"
nvchecker-2.12/tests/test_ubuntupkg.py000066400000000000000000000017221444331012000201770ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from flaky import flaky
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@flaky
async def test_ubuntupkg(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
    }) == "0.1.7-1"

@flaky
async def test_ubuntupkg_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
        "strip_release": True,
    }) == "0.1.7"

@flaky
async def test_ubuntupkg_suite(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
        "suite": "xenial",
    }) == "0.1.2-1"

@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
    assert await get_version("ffmpeg", {
        "source": "ubuntupkg",
        "suite": "xenial",
    }) == "7:2.8.17-0ubuntu0.1"
nvchecker-2.12/tests/test_vsmarketplace.py000066400000000000000000000005111444331012000210070ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_vsmarketplace(get_version):
    assert await get_version("usernamehw.indent-one-space", {
        "source": "vsmarketplace",
    }) == "1.0.0"
nvchecker-2.12/tox.ini000066400000000000000000000004261444331012000147130ustar00rootroot00000000000000[tox]
isolated_build = True
# you may find `tox --skip-missing-interpreters=true` helpful.
envlist = py3{7,8,9,10}

[testenv]
usedevelop = false
deps =
  pytest
  pytest-asyncio
  pytest-httpbin
  flaky
extras =
  htmlparser
passenv = KEYFILE
commands = pytest -r fEs {posargs}