pax_global_header00006660000000000000000000000064146271751140014522gustar00rootroot0000000000000052 comment=09012bd759a92d0ba3ea07ba217f27848a2ff4a1 upstream-ontologist-0.1.37/000077500000000000000000000000001462717511400156515ustar00rootroot00000000000000upstream-ontologist-0.1.37/.flake8000066400000000000000000000002201462717511400170160ustar00rootroot00000000000000[flake8] extend-ignore = E203, E266, E501, W293, W291, W503, B950, B903, B905 max-line-length = 88 max-complexity = 18 select = B,C,E,F,W,T4,B9 upstream-ontologist-0.1.37/.github/000077500000000000000000000000001462717511400172115ustar00rootroot00000000000000upstream-ontologist-0.1.37/.github/FUNDING.yml000066400000000000000000000000231462717511400210210ustar00rootroot00000000000000--- github: jelmer upstream-ontologist-0.1.37/.github/dependabot.yml000066400000000000000000000007141462717511400220430ustar00rootroot00000000000000# Please see the documentation for all configuration options: # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" upstream-ontologist-0.1.37/.github/workflows/000077500000000000000000000000001462717511400212465ustar00rootroot00000000000000upstream-ontologist-0.1.37/.github/workflows/cargo-publish.yaml000066400000000000000000000010511462717511400246660ustar00rootroot00000000000000on: push: tags: - 'v*' # Push events to every tag not containing / workflow_dispatch: name: Publish jobs: publish: name: Publish runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v2 - name: Install stable toolchain uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - run: cargo publish --token ${CRATES_TOKEN} env: CRATES_TOKEN: ${{ secrets.CRATES_TOKEN }} upstream-ontologist-0.1.37/.github/workflows/disperse.yml000066400000000000000000000002741462717511400236120ustar00rootroot00000000000000--- name: Disperse configuration "on": - push jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jelmer/action-disperse-validate@v1 upstream-ontologist-0.1.37/.github/workflows/pythonpackage.yml000066400000000000000000000044711462717511400246340ustar00rootroot00000000000000--- name: Python package "on": push: pull_request: schedule: - cron: '0 6 * * *' # Daily 6AM UTC build jobs: python: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest] python-version: [3.8, 3.9, "3.10", "3.11", "3.12"] fail-fast: false steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install pre-dependencies run: | python -m pip install -U pip "setuptools>=60" setuptools-rust - name: Install dependencies (Linux) run: | sudo apt install libxml2-dev libxslt1-dev if: "matrix.os == 'ubuntu-latest'" - name: Install dependencies (Mac OS X) run: | brew install pcre if: "matrix.os == 'macos-latest'" - name: Install dependencies run: | python -m pip install \ '.[readme,cargo,pyproject,debian_watch,debian_rules,debian_changelog,setup.cfg]' python setup.py build_ext -i if: "matrix.os == 'ubuntu-latest'" - name: Install dependencies run: | python -m pip install \ '.[readme,cargo,pyproject,debian_watch,debian_rules,debian_changelog,setup.cfg]' python setup.py build_ext -i if: "matrix.os == 'macos-latest'" - name: Install breezy run: | python -m pip install breezy if: "matrix.os != 'windows-latest'" - name: Style checks (ruff) run: | python -m pip install --upgrade pip python -m pip install --upgrade ".[dev]" python -m ruff check . python -m ruff format --check . continue-on-error: true - name: Style checks (yamllint) run: | python -m pip install --upgrade yamllint yamllint . continue-on-error: true - name: Typing checks run: | pip install -U mypy types-docutils types-Markdown types-toml \ types-beautifulsoup4 python -m mypy upstream_ontologist/ tests/ continue-on-error: true - name: Test suite run run: | python -m unittest tests.test_suite env: PYTHONHASHSEED: random upstream-ontologist-0.1.37/.github/workflows/rust.yml000066400000000000000000000011211462717511400227610ustar00rootroot00000000000000--- name: Rust on: push: pull_request: env: CARGO_TERM_COLOR: always jobs: rust: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] fail-fast: false steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install breezy run: pip install breezy - name: Build run: cargo build --verbose - name: Run tests run: cargo test --verbose upstream-ontologist-0.1.37/.github/workflows/wheels.yaml000066400000000000000000000036211462717511400234230ustar00rootroot00000000000000--- name: Build Python Wheels on: push: pull_request: schedule: - cron: "0 6 * * *" # Daily 6AM UTC build jobs: build: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest] fail-fast: true steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 - name: Install native dependencies (Ubuntu) run: sudo apt-get update && sudo apt-get install -y libgpgme-dev if: "matrix.os == 'ubuntu-latest'" - name: set up rust if: matrix.os != 'ubuntu' uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - name: Install native dependencies (MacOS) run: brew install swig gpgme if: "matrix.os == 'macos-latest'" - name: Install dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel cibuildwheel - name: Install gpg on supported platforms run: pip install -U gpg if: "matrix.os != 'windows-latest'" - name: Set up QEMU uses: docker/setup-qemu-action@v3 if: "matrix.os == 'ubuntu-latest'" - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse - name: Upload wheels uses: actions/upload-artifact@v2 with: path: ./wheelhouse/*.whl publish: runs-on: ubuntu-latest needs: build if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') permissions: id-token: write environment: name: pypi url: https://pypi.org/p/upstream-ontologist steps: - uses: actions/setup-python@v5 - name: Download wheels uses: actions/download-artifact@v2 with: path: dist - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 upstream-ontologist-0.1.37/.gitignore000066400000000000000000000002571462717511400176450ustar00rootroot00000000000000*~ __pycache__ .mypy_cache MANIFEST build dist upstream_ontologist.egg-info .eggs .tox .coverage htmlcov target *.swp src/vcs.rs upstream_ontologist/_upstream_ontologist.*.so upstream-ontologist-0.1.37/.yamllint000066400000000000000000000000551462717511400175030ustar00rootroot00000000000000--- extends: default ignore: | testdata/ upstream-ontologist-0.1.37/AUTHORS000066400000000000000000000000431462717511400167160ustar00rootroot00000000000000Jelmer Vernooij upstream-ontologist-0.1.37/CODE_OF_CONDUCT.md000066400000000000000000000064231462717511400204550ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at team@dulwich.io. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq upstream-ontologist-0.1.37/Cargo.lock000066400000000000000000003404151462717511400175650ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] [[package]] name = "adler" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", "zerocopy", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "ansi_term" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ "winapi", ] [[package]] name = "anstream" version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" [[package]] name = "anstyle-parse" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", "windows-sys 0.52.0", ] [[package]] name = "anyhow" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arbitrary" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" dependencies = [ "derive_arbitrary", ] [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayvec" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "ascii-canvas" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" dependencies = [ "term", ] [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi 0.1.19", "libc", "winapi", ] [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", ] [[package]] name = "base64" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bit-set" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] [[package]] name = "bit-vec" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] name = "bitmaps" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" dependencies = [ "typenum", ] [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "breezyshim" version = "0.1.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "887d1f2ab8fe13cd15d20a55414bb4d239a30d544b7db2e919256d5b6aa0d516" dependencies = [ "chrono", "ctor", "debversion 0.3.1", "lazy-regex", "lazy_static", "pyo3", "pyo3-filelike", "serde", "tempfile", "url", ] [[package]] name = "bstr" version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "bytesize" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" [[package]] name = "bzip2" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" dependencies = [ "bzip2-sys", "libc", ] [[package]] name = "bzip2-sys" version = "0.1.11+1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" dependencies = [ "cc", "libc", "pkg-config", ] [[package]] name = "cargo" version = "0.63.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d092a7c3e3aaa66469b2233b58c0bf330419dad9c423165f2b9cf1c57dc9f2e" dependencies = [ "anyhow", "atty", "bytesize", "cargo-platform", "cargo-util", "clap 3.2.25", "crates-io", "crossbeam-utils", "curl", "curl-sys", "env_logger 0.9.3", "filetime", "flate2", "fwdansi", "git2", "git2-curl", "glob", "hex", "home", "humantime", "ignore", "im-rc", "indexmap 1.9.3", "itertools", "jobserver", "lazy_static", "lazycell", "libc", "libgit2-sys", "log", "memchr", "num_cpus", "opener", "os_info", "pathdiff", "percent-encoding", "rustc-workspace-hack", "rustfix", "semver", "serde", "serde_ignored", "serde_json", "shell-escape", "strip-ansi-escapes", "tar", "tempfile", "termcolor", "toml_edit 0.14.4", "unicode-width", "unicode-xid", "url", "walkdir", "winapi", ] [[package]] name = "cargo-platform" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" dependencies = [ "serde", ] [[package]] name = "cargo-util" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77042b5b585f701f1cfb4b6b12ebc02b9b0cefbc8dcce235906b6bf376d4245d" dependencies = [ "anyhow", "core-foundation", "filetime", "hex", "jobserver", "libc", "miow", "same-file", "sha2", "shell-escape", "tempfile", "tracing", "walkdir", "windows-sys 0.48.0", ] [[package]] name = "cc" version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" dependencies = [ "jobserver", "libc", "once_cell", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "charset" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18e9079d1a12a2cc2bffb5db039c43661836ead4082120d5844f02555aca2d46" dependencies = [ "base64 0.13.1", "encoding_rs", ] [[package]] name = "chrono" version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", "windows-targets 0.52.5", ] [[package]] name = "chumsky" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ "hashbrown 0.14.5", "stacker", ] [[package]] name = "clap" version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "atty", "bitflags 1.3.2", "clap_lex 0.2.4", "indexmap 1.9.3", "strsim", "termcolor", "textwrap", ] [[package]] name = "clap" version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" dependencies = [ "anstream", "anstyle", "clap_lex 0.6.0", "strsim", "terminal_size", ] [[package]] name = "clap_derive" version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "clap_lex" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" dependencies = [ "os_str_bytes", ] [[package]] name = "clap_lex" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "colorchoice" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] name = "combine" version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "memchr", ] [[package]] name = "configparser" version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec6d3da8e550377a85339063af6e3735f4b1d9392108da4e083a1b3b9820288" [[package]] name = "const-random" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" dependencies = [ "const-random-macro", ] [[package]] name = "const-random-macro" version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ "getrandom", "once_cell", "tiny-keccak", ] [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crates-io" version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b4a87459133b2e708195eaab34be55039bc30e0d120658bd40794bb00b6328d" dependencies = [ "anyhow", "curl", "percent-encoding", "serde", "serde_json", "url", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-deque" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crunchy" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "cssparser" version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b3df4f93e5fbbe73ec01ec8d3f68bba73107993a5b1e7519273c32db9b0d5be" dependencies = [ "cssparser-macros", "dtoa-short", "itoa", "phf 0.11.2", "smallvec", ] [[package]] name = "cssparser-macros" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", "syn 2.0.66", ] [[package]] name = "csv" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" dependencies = [ "csv-core", "itoa", "ryu", "serde", ] [[package]] name = "csv-core" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" dependencies = [ "memchr", ] [[package]] name = "ctor" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" dependencies = [ "quote", "syn 2.0.66", ] [[package]] name = "curl" version = "0.4.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "windows-sys 0.52.0", ] [[package]] name = "curl-sys" version = "0.4.72+curl-8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea" dependencies = [ "cc", "libc", "libnghttp2-sys", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", "windows-sys 0.52.0", ] [[package]] name = "data-encoding" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "deb822-lossless" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3af14237edb89bb9e614af13b05930dc15f7a9ea9c1df6fa5cf2838c3999121" dependencies = [ "regex", "rowan", "serde", ] [[package]] name = "debbugs" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c97610f1921c853f2461e39c9d6aaa4639d060994348bd45d12204049432fca" dependencies = [ "debversion 0.2.2", "lazy-regex", "log", "mailparse 0.14.1", "maplit", "reqwest 0.11.27", "tokio", "xmltree", ] [[package]] name = "debcargo" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33de5c25ee2509db4150fd4e9bb76a5ae7d28adca9aea7c68138548a0227d30b" dependencies = [ "ansi_term", "anyhow", "cargo", "chrono", "clap 4.4.18", "env_logger 0.9.3", "filetime", "flate2", "git2", "glob", "itertools", "log", "regex", "semver", "serde", "serde_derive", "tar", "tempfile", "textwrap", "toml 0.5.11", "walkdir", ] [[package]] name = "debian-changelog" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71be2258e81c0c1f83f5e87340a11bf5c2336b3424fb2a7c7aa1be5a00c32577" dependencies = [ "chrono", "debversion 0.3.1", "lazy-regex", "log", "rowan", "textwrap", "whoami", ] [[package]] name = "debian-control" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd28ccdcb873a3fe3c07392a5e1df998d7720e4b58d48b1c6037c146c738980b" dependencies = [ "deb822-lossless", "debversion 0.2.2", "regex", "rowan", "url", ] [[package]] name = "debian-copyright" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d8236df464b440a9c11653a54f50a02d1a59d5c91bbc49bfb58a85397a6b919" dependencies = [ "deb822-lossless", "debversion 0.2.2", "regex", ] [[package]] name = "debian-watch" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "506bd868882fe7b6d5dbaaeb001174aa01fb267563f019ceaa6f31a8b6e9639f" dependencies = [ "debversion 0.3.1", "m_lexer", "rowan", "url", ] [[package]] name = "debversion" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e65a0a572fa10f34b89addac251b7c8f40266606ee5847d769ab8db4d56ca11b" dependencies = [ "lazy-regex", ] [[package]] name = "debversion" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07366429d82d50c6393c1e9c03645ea5e2f529d4bce483b4b5a54312b86e0f25" dependencies = [ "lazy-regex", "pyo3", ] [[package]] name = "deranged" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", ] [[package]] name = "derivative" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "derive_arbitrary" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "derive_more" version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "dirs-next" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ "cfg-if", "dirs-sys-next", ] [[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", "winapi", ] [[package]] name = "displaydoc" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "distro-info" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef12237f2ced990e453ec0b69230752e73be0a357817448c50a62f8bbbe0ca71" dependencies = [ "chrono", "csv", "failure", ] [[package]] name = "dlv-list" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" dependencies = [ "const-random", ] [[package]] name = "dtoa" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" [[package]] name = "dtoa-short" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" dependencies = [ "dtoa", ] [[package]] name = "ego-tree" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591" [[package]] name = "either" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" [[package]] name = "ena" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" dependencies = [ "log", ] [[package]] name = "encoding_rs" version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] [[package]] name = "env_filter" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" dependencies = [ "log", "regex", ] [[package]] name = "env_logger" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" dependencies = [ "atty", "humantime", "log", "regex", "termcolor", ] [[package]] name = "env_logger" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" dependencies = [ "anstream", "anstyle", "env_filter", "humantime", "log", ] [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "failure" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" dependencies = [ "backtrace", "failure_derive", ] [[package]] name = "failure_derive" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "synstructure", ] [[package]] name = "faster-hex" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" [[package]] name = "fastrand" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "filetime" version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", "redox_syscall 0.4.1", "windows-sys 0.52.0", ] [[package]] name = "fixedbitset" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "libz-sys", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fs-err" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" dependencies = [ "autocfg", ] [[package]] name = "futf" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" dependencies = [ "mac", "new_debug_unreachable", ] [[package]] name = "futures-channel" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-io" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-sink" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-core", "futures-io", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "fwdansi" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08c1f5787fe85505d1f7777268db5103d80a7a374d2316a7ce262e57baf8f208" dependencies = [ "memchr", "termcolor", ] [[package]] name = "fxhash" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" dependencies = [ "byteorder", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getopts" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" dependencies = [ "unicode-width", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "gimli" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "git2" version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0155506aab710a86160ddb504a480d2964d7ab5b9e62419be69e0032bc5931c" dependencies = [ "bitflags 1.3.2", "libc", "libgit2-sys", "log", "openssl-probe", "openssl-sys", "url", ] [[package]] name = "git2-curl" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ee51709364c341fbb6fe2a385a290fb9196753bdde2fc45447d27cd31b11b13" dependencies = [ "curl", "git2", "log", "url", ] [[package]] name = "gix-actor" version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d69c59d392c7e6c94385b6fd6089d6df0fe945f32b4357687989f3aee253cd7f" dependencies = [ "bstr", "gix-date", "gix-utils", "itoa", "thiserror", "winnow 0.6.9", ] [[package]] name = "gix-config" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53fafe42957e11d98e354a66b6bd70aeea00faf2f62dd11164188224a507c840" dependencies = [ "bstr", "gix-config-value", "gix-features", "gix-glob", "gix-path", "gix-ref", "gix-sec", "memchr", "once_cell", "smallvec", "thiserror", "unicode-bom", "winnow 0.6.9", ] [[package]] name = "gix-config-value" version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbd06203b1a9b33a78c88252a625031b094d9e1b647260070c25b09910c0a804" dependencies = [ "bitflags 2.5.0", "bstr", "gix-path", "libc", "thiserror", ] [[package]] name = "gix-date" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "367ee9093b0c2b04fd04c5c7c8b6a1082713534eab537597ae343663a518fa99" dependencies = [ "bstr", "itoa", "thiserror", "time", ] [[package]] name = "gix-features" version = "0.38.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac7045ac9fe5f9c727f38799d002a7ed3583cd777e3322a7c4b43e3cf437dc69" dependencies = [ "gix-hash", "gix-trace", "gix-utils", "libc", "prodash", "sha1_smol", "walkdir", ] [[package]] name = "gix-fs" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3338ff92a2164f5209f185ec0cd316f571a72676bb01d27e22f2867ba69f77a" dependencies = [ "fastrand", "gix-features", "gix-utils", ] [[package]] name = "gix-glob" version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a29ad0990cf02c48a7aac76ed0dbddeb5a0d070034b83675cc3bbf937eace4" dependencies = [ "bitflags 2.5.0", "bstr", "gix-features", "gix-path", ] [[package]] name = "gix-hash" version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f93d7df7366121b5018f947a04d37f034717e113dcf9ccd85c34b58e57a74d5e" dependencies = [ "faster-hex", "thiserror", ] [[package]] name = "gix-lock" version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bc7fe297f1f4614774989c00ec8b1add59571dc9b024b4c00acb7dedd4e19d" dependencies = [ "gix-tempfile", "gix-utils", "thiserror", ] [[package]] name = "gix-object" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fe2dc4a41191c680c942e6ebd630c8107005983c4679214fdb1007dcf5ae1df" dependencies = [ "bstr", "gix-actor", "gix-date", "gix-features", "gix-hash", "gix-utils", "gix-validate", "itoa", "smallvec", "thiserror", "winnow 0.6.9", ] [[package]] name = "gix-path" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23623cf0f475691a6d943f898c4d0b89f5c1a2a64d0f92bce0e0322ee6528783" dependencies = [ "bstr", "gix-trace", "home", "once_cell", "thiserror", ] [[package]] name = "gix-ref" version = "0.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3394a2997e5bc6b22ebc1e1a87b41eeefbcfcff3dbfa7c4bd73cb0ac8f1f3e2e" dependencies = [ "gix-actor", "gix-date", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-path", "gix-tempfile", "gix-utils", "gix-validate", "memmap2", "thiserror", "winnow 0.6.9", ] [[package]] name = "gix-sec" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fddc27984a643b20dd03e97790555804f98cf07404e0e552c0ad8133266a79a1" dependencies = [ "bitflags 2.5.0", "gix-path", "libc", "windows-sys 0.52.0", ] [[package]] name = "gix-tempfile" version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3b0e276cd08eb2a22e9f286a4f13a222a01be2defafa8621367515375644b99" dependencies = [ "gix-fs", "libc", "once_cell", "parking_lot", "tempfile", ] [[package]] name = "gix-trace" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f924267408915fddcd558e3f37295cc7d6a3e50f8bd8b606cee0808c3915157e" [[package]] name = "gix-utils" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35192df7fd0fa112263bad8021e2df7167df4cc2a6e6d15892e1e55621d3d4dc" dependencies = [ "fastrand", "unicode-normalization", ] [[package]] name = "gix-validate" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82c27dd34a49b1addf193c92070bcbf3beaf6e10f16a78544de6372e146a0acf" dependencies = [ "bstr", "thiserror", ] [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata", "regex-syntax 0.8.3", ] [[package]] name = "h2" version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", "http 0.2.12", "indexmap 2.2.6", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "home" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "html5ever" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", "markup5ever 0.11.0", "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "html5ever" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" dependencies = [ "log", "mac", "markup5ever 0.12.1", "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "http" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", "itoa", ] [[package]] name = "http" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", "itoa", ] [[package]] name = "http-body" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http 0.2.12", "pin-project-lite", ] [[package]] name = "http-body" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ "bytes", "http 1.1.0", ] [[package]] name = "http-body-util" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" dependencies = [ "bytes", "futures-core", "http 1.1.0", "http-body 1.0.0", "pin-project-lite", ] [[package]] name = "httparse" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", "h2", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", "socket2", "tokio", "tower-service", "tracing", "want", ] [[package]] name = "hyper" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.0", "httparse", "itoa", "pin-project-lite", "smallvec", "tokio", "want", ] [[package]] name = "hyper-rustls" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" dependencies = [ "futures-util", "http 1.1.0", "hyper 1.3.1", "hyper-util", "rustls", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", ] [[package]] name = "hyper-tls" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", "hyper 0.14.28", "native-tls", "tokio", "tokio-native-tls", ] [[package]] name = "hyper-tls" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", "hyper 1.3.1", "hyper-util", "native-tls", "tokio", "tokio-native-tls", "tower-service", ] [[package]] name = "hyper-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.0", "hyper 1.3.1", "pin-project-lite", "socket2", "tokio", "tower", "tower-service", "tracing", ] [[package]] name = "iana-time-zone" version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "idna" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", ] [[package]] name = "ignore" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata", "same-file", "walkdir", "winapi-util", ] [[package]] name = "im-rc" version = "15.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" dependencies = [ "bitmaps", "rand_core", "rand_xoshiro", "sized-chunks", "typenum", "version_check", ] [[package]] name = "indexmap" version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", ] [[package]] name = "indexmap" version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown 0.14.5", "serde", ] [[package]] name = "indoc" version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "ipnet" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-terminal" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ "hermit-abi 0.3.9", "libc", "windows-sys 0.52.0", ] [[package]] name = "is_terminal_polyfill" version = "1.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" [[package]] name = "itertools" version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] [[package]] name = "kstring" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747" dependencies = [ "static_assertions", ] [[package]] name = "lalrpop" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" dependencies = [ "ascii-canvas", "bit-set", "diff", "ena", "is-terminal", "itertools", "lalrpop-util", "petgraph", "regex", "regex-syntax 0.6.29", "string_cache", "term", "tiny-keccak", "unicode-xid", ] [[package]] name = "lalrpop-util" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" dependencies = [ "regex", ] [[package]] name = "lazy-regex" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d12be4595afdf58bd19e4a9f4e24187da2a66700786ff660a418e9059937a4c" dependencies = [ "lazy-regex-proc_macros", "once_cell", "regex", ] [[package]] name = "lazy-regex-proc_macros" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44bcd58e6c97a7fcbaffcdc95728b393b8d98933bfadad49ed4097845b57ef0b" dependencies = [ "proc-macro2", "quote", "regex", "syn 2.0.66", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lazycell" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libgit2-sys" version = "0.13.5+1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51e5ea06c26926f1002dd553fded6cfcdc9784c1f60feeb58368b4d9b07b6dba" dependencies = [ "cc", "libc", "libssh2-sys", "libz-sys", "openssl-sys", "pkg-config", ] [[package]] name = "libnghttp2-sys" version = "0.1.10+1.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" dependencies = [ "cc", "libc", ] [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.5.0", "libc", ] [[package]] name = "libssh2-sys" version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b094a36eb4b8b8c8a7b4b8ae43b2944502be3e59cd87687595cf6b0a71b3f4ca" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", ] [[package]] name = "libz-sys" version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "m_lexer" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7e51ebf91162d585a5bae05e4779efc4a276171cb880d61dd6fab11c98467a7" dependencies = [ "regex", ] [[package]] name = "mac" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "mailparse" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d096594926cab442e054e047eb8c1402f7d5b2272573b97ba68aa40629f9757" dependencies = [ "charset", "data-encoding", "quoted_printable", ] [[package]] name = "mailparse" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3da03d5980411a724e8aaf7b61a7b5e386ec55a7fb49ee3d0ff79efc7e5e7c7e" dependencies = [ "charset", "data-encoding", "quoted_printable", ] [[package]] name = "makefile-lossless" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd5e0a2d9a68e4c19c655158e8d6cbf8bd64524254747e66f8215a2e58c0d102" dependencies = [ "log", "rowan", ] [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ "log", "phf 0.10.1", "phf_codegen 0.10.0", "string_cache", "string_cache_codegen", "tendril", ] [[package]] name = "markup5ever" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" dependencies = [ "log", "phf 0.11.2", "phf_codegen 0.11.2", "string_cache", "string_cache_codegen", "tendril", ] [[package]] name = "markup5ever_rcdom" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9521dd6750f8e80ee6c53d65e2e4656d7de37064f3a7a5d2d11d05df93839c2" dependencies = [ "html5ever 0.26.0", "markup5ever 0.11.0", "tendril", "xml5ever", ] [[package]] name = "memchr" version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "memmap2" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" dependencies = [ "libc", ] [[package]] name = "memoffset" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] [[package]] name = "mime" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" dependencies = [ "adler", ] [[package]] name = "mio" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "wasi", "windows-sys 0.48.0", ] [[package]] name = "miow" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" dependencies = [ "windows-sys 0.48.0", ] [[package]] name = "native-tls" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework", "security-framework-sys", "tempfile", ] [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ "hermit-abi 0.3.9", "libc", ] [[package]] name = "num_enum" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "num_threads" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] [[package]] name = "object" version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opam-file-rs" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dc9fde26706c9170630772dd86981d874e9a3107cc456c811e1ee234e0c4863" dependencies = [ "lalrpop", "lalrpop-util", "thiserror", ] [[package]] name = "opener" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "293c15678e37254c15bd2f092314abb4e51d7fdde05c2021279c12631b54f005" dependencies = [ "bstr", "winapi", ] [[package]] name = "openssl" version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ "bitflags 2.5.0", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-macros", "openssl-sys", ] [[package]] name = "openssl-macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" version = "0.9.102" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "ordered-multimap" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" dependencies = [ "dlv-list", "hashbrown 0.14.5", ] [[package]] name = "os_info" version = "3.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" dependencies = [ "log", "serde", "windows-sys 0.52.0", ] [[package]] name = "os_str_bytes" version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] name = "parking_lot" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall 0.5.1", "smallvec", "windows-targets 0.52.5", ] [[package]] name = "pathdiff" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" [[package]] name = "pep440_rs" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca0a570e7ec9171250cac57614e901f62408094b54b3798bb920d3cf0d4a0e09" dependencies = [ "once_cell", "serde", "unicode-width", "unscanny", ] [[package]] name = "pep508_rs" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581c27e97a3f38c5d691962af7da93c2672b5227d59cf165b87a9b1fd53dd724" dependencies = [ "derivative", "once_cell", "pep440_rs", "regex", "serde", "thiserror", "unicode-width", "url", "urlencoding", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", "indexmap 2.2.6", ] [[package]] name = "phf" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared 0.10.0", ] [[package]] name = "phf" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ "phf_macros", "phf_shared 0.11.2", ] [[package]] name = "phf_codegen" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" dependencies = [ "phf_generator 0.10.0", "phf_shared 0.10.0", ] [[package]] name = "phf_codegen" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" dependencies = [ "phf_generator 0.11.2", "phf_shared 0.11.2", ] [[package]] name = "phf_generator" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared 0.10.0", "rand", ] [[package]] name = "phf_generator" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared 0.11.2", "rand", ] [[package]] name = "phf_macros" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" dependencies = [ "phf_generator 0.11.2", "phf_shared 0.11.2", "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ "siphasher", ] [[package]] name = "phf_shared" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ "siphasher", ] [[package]] name = "pin-project" version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "pin-project-lite" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "portable-atomic" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "precomputed-hash" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "proc-macro-crate" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ "toml_edit 0.21.1", ] [[package]] name = "proc-macro2" version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "744a264d26b88a6a7e37cbad97953fa233b94d585236310bcbc88474b4092d79" [[package]] name = "psm" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" dependencies = [ "cc", ] [[package]] name = "pulldown-cmark" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8746739f11d39ce5ad5c2520a9b75285310dbfe78c541ccf832d38615765aec0" dependencies = [ "bitflags 2.5.0", "getopts", "memchr", "pulldown-cmark-escape", "unicase", ] [[package]] name = "pulldown-cmark-escape" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" [[package]] name = "pyo3" version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" dependencies = [ "cfg-if", "indoc", "libc", "memoffset", "parking_lot", "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", "serde", "unindent", ] [[package]] name = "pyo3-build-config" version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" dependencies = [ "once_cell", "target-lexicon", ] [[package]] name = "pyo3-ffi" version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" dependencies = [ "libc", "pyo3-build-config", ] [[package]] name = "pyo3-filelike" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd3b0fe5c172055cfa8daddd9fab48dc7a4ebb532bd5dd8a87a40fac0a570fa3" dependencies = [ "pyo3", ] [[package]] name = "pyo3-log" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c09c2b349b6538d8a73d436ca606dab6ce0aaab4dad9e6b7bdd57a4f556c3bc3" dependencies = [ "arc-swap", "log", "pyo3", ] [[package]] name = "pyo3-macros" version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", "syn 2.0.66", ] [[package]] name = "pyo3-macros-backend" version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", "syn 2.0.66", ] [[package]] name = "pyproject-toml" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef7061023bcb58a0fc4a4bbe9819c13b0dca7c2abc14da14f5ecc1532ab3a36a" dependencies = [ "indexmap 2.2.6", "pep440_rs", "pep508_rs", "serde", "toml 0.8.13", ] [[package]] name = "python-pkginfo" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4320ca452fe003f8a07afb8e30c315bbd813ae8105f454ddefebf15a24021e1f" dependencies = [ "flate2", "fs-err", "mailparse 0.15.0", "rfc2047-decoder", "tar", "thiserror", "zip", ] [[package]] name = "quote" version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] [[package]] name = "quoted_printable" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79ec282e887b434b68c18fe5c121d38e72a5cf35119b59e54ec5b992ea9c8eb0" [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] [[package]] name = "rand_xoshiro" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ "rand_core", ] [[package]] name = "redox_syscall" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_syscall" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" dependencies = [ "bitflags 2.5.0", ] [[package]] name = "redox_users" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ "getrandom", "libredox", "thiserror", ] [[package]] name = "regex" version = "1.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax 0.8.3", ] [[package]] name = "regex-automata" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", "regex-syntax 0.8.3", ] [[package]] name = "regex-syntax" version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "reqwest" version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "base64 0.21.7", "bytes", "encoding_rs", "futures-core", "futures-util", "h2", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.28", "hyper-tls 0.5.0", "ipnet", "js-sys", "log", "mime", "native-tls", "once_cell", "percent-encoding", "pin-project-lite", "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "winreg 0.50.0", ] [[package]] name = "reqwest" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", "http 1.1.0", "http-body 1.0.0", "http-body-util", "hyper 1.3.1", "hyper-rustls", "hyper-tls 0.6.0", "hyper-util", "ipnet", "js-sys", "log", "mime", "native-tls", "once_cell", "percent-encoding", "pin-project-lite", "rustls", "rustls-pemfile 2.1.2", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", "tokio-native-tls", "tokio-rustls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "webpki-roots", "winreg 0.52.0", ] [[package]] name = "rfc2047-decoder" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e90a668c463c412c3118ae1883e18b53d812c349f5af7a06de3ba4bb0c17cc73" dependencies = [ "base64 0.21.7", "charset", "chumsky", "memchr", "quoted_printable", "thiserror", ] [[package]] name = "ring" version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", "getrandom", "libc", "spin", "untrusted", "windows-sys 0.52.0", ] [[package]] name = "rowan" version = "0.15.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" dependencies = [ "countme", "hashbrown 0.14.5", "memoffset", "rustc-hash", "text-size", ] [[package]] name = "rust-ini" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d625ed57d8f49af6cfa514c42e1a71fadcff60eb0b1c517ff82fe41aa025b41" dependencies = [ "cfg-if", "ordered-multimap", "trim-in-place", ] [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-workspace-hack" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc71d2faa173b74b232dedc235e3ee1696581bb132fc116fa3626d6151a1a8fb" [[package]] name = "rustfix" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ecd2853d9e26988467753bd9912c3a126f642d05d229a4b53f5752ee36c56481" dependencies = [ "anyhow", "log", "serde", "serde_json", ] [[package]] name = "rustix" version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", "windows-sys 0.52.0", ] [[package]] name = "rustls" version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" dependencies = [ "log", "ring", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] [[package]] name = "rustls-pemfile" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ "base64 0.21.7", ] [[package]] name = "rustls-pemfile" version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" dependencies = [ "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" [[package]] name = "rustls-webpki" version = "0.102.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" dependencies = [ "ring", "rustls-pki-types", "untrusted", ] [[package]] name = "rustversion" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "schannel" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scraper" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b80b33679ff7a0ea53d37f3b39de77ea0c75b12c5805ac43ec0c33b3051af1b" dependencies = [ "ahash", "cssparser", "ego-tree", "getopts", "html5ever 0.26.0", "once_cell", "selectors", "tendril", ] [[package]] name = "security-framework" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ "bitflags 2.5.0", "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "select" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f9da09dc3f4dfdb6374cbffff7a2cffcec316874d4429899eefdc97b3b94dcd" dependencies = [ "bit-set", "html5ever 0.26.0", "markup5ever_rcdom", ] [[package]] name = "selectors" version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06" dependencies = [ "bitflags 2.5.0", "cssparser", "derive_more", "fxhash", "log", "new_debug_unreachable", "phf 0.10.1", "phf_codegen 0.10.0", "precomputed-hash", "servo_arc", "smallvec", ] [[package]] name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" dependencies = [ "serde", ] [[package]] name = "serde" version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "serde_ignored" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8e319a36d1b52126a0d608f24e93b2d81297091818cd70625fcf50a15d84ddf" dependencies = [ "serde", ] [[package]] name = "serde_json" version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "itoa", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ "serde", ] [[package]] name = "serde_urlencoded" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap 2.2.6", "itoa", "ryu", "serde", "unsafe-libyaml", ] [[package]] name = "servo_arc" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d036d71a959e00c77a63538b90a6c2390969f9772b096ea837205c6bd0491a44" dependencies = [ "stable_deref_trait", ] [[package]] name = "sha1_smol" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shell-escape" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "sized-chunks" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" dependencies = [ "bitmaps", "typenum", ] [[package]] name = "slab" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "stacker" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" dependencies = [ "cc", "cfg-if", "libc", "psm", "winapi", ] [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" dependencies = [ "new_debug_unreachable", "once_cell", "parking_lot", "phf_shared 0.10.0", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" dependencies = [ "phf_generator 0.10.0", "phf_shared 0.10.0", "proc-macro2", "quote", ] [[package]] name = "strip-ansi-escapes" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "011cbb39cf7c1f62871aea3cc46e5817b0937b49e9447370c93cacbe93a766d8" dependencies = [ "vte", ] [[package]] name = "strsim" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "subtle" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "syn" version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "synstructure" version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "unicode-xid", ] [[package]] name = "system-configuration" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "tar" version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" dependencies = [ "filetime", "libc", "xattr", ] [[package]] name = "target-lexicon" version = "0.12.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" [[package]] name = "tempfile" version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand", "rustix", "windows-sys 0.52.0", ] [[package]] name = "tendril" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" dependencies = [ "futf", "mac", "utf-8", ] [[package]] name = "term" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ "dirs-next", "rustversion", "winapi", ] [[package]] name = "termcolor" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] name = "terminal_size" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ "rustix", "windows-sys 0.48.0", ] [[package]] name = "text-size" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" dependencies = [ "smawk", "unicode-linebreak", "unicode-width", ] [[package]] name = "thiserror" version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "time" version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", "libc", "num-conv", "num_threads", "powerfmt", "serde", "time-core", "time-macros", ] [[package]] name = "time-core" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", ] [[package]] name = "tiny-keccak" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" dependencies = [ "crunchy", ] [[package]] name = "tinyvec" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes", "libc", "mio", "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-rustls" version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ "rustls", "rustls-pki-types", "tokio", ] [[package]] name = "tokio-util" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", ] [[package]] name = "toml" version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" dependencies = [ "serde", ] [[package]] name = "toml" version = "0.8.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4e43f8cc456c9704c851ae29c67e17ef65d2c30017c17a9765b89c382dc8bba" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit 0.22.13", ] [[package]] name = "toml_datetime" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5376256e44f2443f8896ac012507c19a012df0fe8758b55246ae51a2279db51f" dependencies = [ "combine", "indexmap 1.9.3", "itertools", "kstring", "serde", ] [[package]] name = "toml_edit" version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ "indexmap 2.2.6", "toml_datetime", "winnow 0.5.40", ] [[package]] name = "toml_edit" version = "0.22.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c127785850e8c20836d49732ae6abfa47616e60bf9d9f57c43c250361a9db96c" dependencies = [ "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", "winnow 0.6.9", ] [[package]] name = "tower" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", "pin-project", "pin-project-lite", "tokio", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-service" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "tracing-core" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", ] [[package]] name = "trim-in-place" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicase" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] [[package]] name = "unicode-bidi" version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-bom" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" [[package]] name = "unicode-xid" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unindent" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "unscanny" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47" [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "upstream-ontologist" version = "0.1.37" dependencies = [ "breezyshim", "chrono", "clap 4.4.18", "configparser", "debbugs", "debcargo", "debian-changelog", "debian-control", "debian-copyright", "debian-watch", "distro-info", "env_logger 0.11.3", "gix-config", "html5ever 0.27.0", "lazy-regex", "lazy_static", "log", "mailparse 0.15.0", "makefile-lossless", "maplit", "opam-file-rs", "percent-encoding", "pulldown-cmark", "pyo3", "pyproject-toml", "python-pkginfo", "regex", "reqwest 0.12.4", "rust-ini", "scraper", "select", "serde", "serde_json", "serde_yaml", "shlex", "tempfile", "textwrap", "toml 0.8.13", "url", "xml", "xmltree", ] [[package]] name = "upstream-ontologist-py" version = "0.1.37" dependencies = [ "log", "pyo3", "pyo3-log", "reqwest 0.12.4", "serde_json", "upstream-ontologist", "url", ] [[package]] name = "url" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", ] [[package]] name = "urlencoding" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" [[package]] name = "utf-8" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8parse" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "vte" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cbce692ab4ca2f1f3047fcf732430249c0e971bfdd2b234cf2c47ad93af5983" dependencies = [ "arrayvec", "utf8parse", "vte_generate_state_changes", ] [[package]] name = "vte_generate_state_changes" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ "proc-macro2", "quote", ] [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "want" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ "try-lock", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn 2.0.66", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if", "js-sys", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "web-sys" version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "webpki-roots" version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3de34ae270483955a94f4b21bdaaeb83d508bb84a01435f393818edb0012009" dependencies = [ "rustls-pki-types", ] [[package]] name = "whoami" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" dependencies = [ "redox_syscall 0.4.1", "wasite", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets 0.52.5", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.5", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ "windows_aarch64_gnullvm 0.52.5", "windows_aarch64_msvc 0.52.5", "windows_i686_gnu 0.52.5", "windows_i686_gnullvm", "windows_i686_msvc 0.52.5", "windows_x86_64_gnu 0.52.5", "windows_x86_64_gnullvm 0.52.5", "windows_x86_64_msvc 0.52.5", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" [[package]] name = "windows_i686_gnullvm" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] [[package]] name = "winnow" version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86c949fede1d13936a99f14fafd3e76fd642b556dd2ce96287fbe2e0151bfac6" dependencies = [ "memchr", ] [[package]] name = "winreg" version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] [[package]] name = "winreg" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] [[package]] name = "xattr" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", "linux-raw-sys", "rustix", ] [[package]] name = "xml" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ede1c99c55b4b3ad0349018ef0eccbe954ce9c342334410707ee87177fcf2ab4" dependencies = [ "xml-rs", ] [[package]] name = "xml-rs" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "791978798f0597cfc70478424c2b4fdc2b7a8024aaff78497ef00f24ef674193" [[package]] name = "xml5ever" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" dependencies = [ "log", "mac", "markup5ever 0.11.0", ] [[package]] name = "xmltree" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" dependencies = [ "xml-rs", ] [[package]] name = "zerocopy" version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ "proc-macro2", "quote", "syn 2.0.66", ] [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zip" version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cc23c04387f4da0374be4533ad1208cbb091d5c11d070dfef13676ad6497164" dependencies = [ "arbitrary", "bzip2", "crc32fast", "crossbeam-utils", "displaydoc", "flate2", "indexmap 2.2.6", "num_enum", "thiserror", "time", ] upstream-ontologist-0.1.37/Cargo.toml000066400000000000000000000056641462717511400176140ustar00rootroot00000000000000[package] name = "upstream-ontologist" version = { workspace = true } authors = [ "Jelmer Vernooij ",] edition = "2021" license = "GPL-2.0+" description = "tracking of upstream project metadata" repository = "https://github.com/jelmer/upstream-ontologist.git" homepage = "https://github.com/jelmer/upstream-ontologist" default-run = "guess-upstream-metadata" [workspace.package] version = "0.1.37" [dependencies] log = "0.4" shlex = "1.1.0" serde_json = "1.0" lazy_static = "1" regex = "1" url = "2" xmltree = "0.10" configparser = "3" serde_yaml = "0.9" percent-encoding = "2" html5ever = ">=0.26" chrono = "0.4" textwrap = ">=0.16" lazy-regex = "3" breezyshim = "0.1.92" debian-watch = { version = "0.2.1", optional = true } debian-changelog = { version = "0.1.8", optional = true } debbugs = "0.1.0" clap = { version = "4,<4.5", features = ["derive", "env"], optional = true } maplit = "1.0.2" env_logger = { version = "0.11", optional = true } xml = "0.8.10" makefile-lossless = { version = "0.1.2", optional = true } debian-copyright = { version = "0.1.2", optional = true } debian-control = { version = "0.1.6", optional = true } select = "0.6.0" pulldown-cmark = "0.11" debcargo = { version = "2.6.1", optional = true } scraper = "0.19.0" [workspace] members = [ "upstream-ontologist-py",] [features] default = ["git-config", "launchpad", "opam", "dist-ini", "cargo", "r-description", "pyproject-toml", "python-pkginfo", "default-tls", "debian", "pyo3"] default-tls = ["reqwest/default-tls"] git-config = ["dep:gix-config"] launchpad = ["dep:distro-info"] opam = ["dep:opam-file-rs"] dist-ini = ["dep:rust-ini"] cargo = ["dep:toml"] r-description = ["dep:mailparse"] pyproject-toml = ["dep:pyproject-toml"] python-pkginfo = ["dep:python-pkginfo"] rustls-tls = ["reqwest/rustls-tls"] debcargo = ["dep:debcargo"] debian = ["debcargo", "dep:debian-watch", "dep:makefile-lossless", "dep:debian-changelog", "dep:debian-control", "dep:debian-copyright"] pyo3 = [] cli = ["dep:clap", "dep:env_logger"] [lib] [dev-dependencies] tempfile = "3.8.1" [dependencies.pyo3] workspace = true [dependencies.reqwest] version = "^0.12" features = [ "blocking", "json",] default-features = false [dependencies.rust-ini] version = "0.21" optional = true [dependencies.serde] version = "1.0" features = [ "derive",] [dependencies.opam-file-rs] version = "0.1" optional = true [dependencies.gix-config] version = ">=0.20" optional = true [dependencies.distro-info] version = "0.4" optional = true [dependencies.toml] version = ">=0.5" optional = true [dependencies.mailparse] version = "0.15" optional = true [dependencies.pyproject-toml] version = "0.11" optional = true [dependencies.python-pkginfo] version = ">=0.5" optional = true [workspace.dependencies] pyo3 = "0.20" pyo3-log = "0.8" [[bin]] name = "autodoap" required-features = ["cli"] [[bin]] name = "autocodemeta" required-features = ["cli"] [[bin]] name = "guess-upstream-metadata" required-features = ["cli"] upstream-ontologist-0.1.37/LICENSE000066400000000000000000000432541462717511400166660ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. upstream-ontologist-0.1.37/MANIFEST.in000066400000000000000000000004241462717511400174070ustar00rootroot00000000000000include CODE_OF_CONDUCT.md include README.md include SECURITY.md include AUTHORS recursive-include tests *.py README README.md description README.rst include Cargo.toml upstream-ontologist-py/Cargo.toml recursive-include src *.rs recursive-include upstream-ontologist-py *.rs upstream-ontologist-0.1.37/Makefile000066400000000000000000000006231462717511400173120ustar00rootroot00000000000000.PHONY: build check unittest coverage coverage-html typing build: python3 setup.py build_ext -i check:: unittest-py unittest-py: build python3 -m unittest tests.test_suite cargo-test: cargo test check:: cargo-test coverage: build python3 -m coverage run -m unittest tests.test_suite coverage-html: coverage python3 -m coverage html check:: typing typing: mypy upstream_ontologist/ tests/ upstream-ontologist-0.1.37/README.md000066400000000000000000000106711462717511400171350ustar00rootroot00000000000000Upstream Ontologist =================== The upstream ontologist provides a common interface for finding metadata about upstream software projects. It will gather information from any sources available, prioritize data that it has higher confidence in as well as report the confidence for each of the bits of metadata. The ontologist originated in Debian and the currently reported metadata fields are loosely based on [DEP-12](https://dep-team.pages.debian.net/deps/dep12), but it is meant to be distribution-agnostic. Provided Fields --------------- Standard fields: * ``Homepage``: homepage URL * ``Name``: human name of the upstream project * ``Contact``: contact address of some sort of the upstream (e-mail, mailing list URL) * ``Repository``: VCS URL * ``Repository-Browse``: Web URL for viewing the VCS * ``Bug-Database``: Bug database URL (for web viewing, generally) * ``Bug-Submit``: URL to use to submit new bugs (either on the web or an e-mail address) * ``Screenshots``: List of URLs with screenshots * ``Archive``: Archive used - e.g. SourceForge * ``Security-Contact``: e-mail or URL with instructions for reporting security issues * ``Documentation``: Link to documentation on the web Extensions for upstream-ontologist, not defined in DEP-12: * ``SourceForge-Project``: sourceforge project name * ``Wiki``: Wiki URL * ``Summary``: one-line description of the project * ``Description``: longer description of the project * ``License``: Single line license (e.g. "GPL 2.0") * ``Copyright``: List of copyright holders * ``Version``: Current upstream version * ``Security-MD``: URL to markdown file with security policy * ``Author``: List of people who contributed to the project * ``Maintainer``: The maintainer of the project * ``Funding``: URL to more information about funding Supported Data Sources ---------------------- At the moment, the ontologist can read metadata from the following upstream data sources: * Python package metadata (PKG-INFO, setup.py, setup.cfg, pyproject.timl) * [package.json](https://docs.npmjs.com/cli/v7/configuring-npm/package-json) * [composer.json](https://getcomposer.org/doc/04-schema.md) * [package.xml](https://pear.php.net/manual/en/guide.developers.package2.dependencies.php) * Perl package metadata (dist.ini, META.json, META.yml, Makefile.PL) * [Perl POD files](https://perldoc.perl.org/perlpod) * GNU configure files * [R DESCRIPTION files](https://r-pkgs.org/description.html) * [Rust Cargo.toml](https://doc.rust-lang.org/cargo/reference/manifest.html) * [Maven pom.xml](https://maven.apache.org/pom.html) * [metainfo.xml](https://www.freedesktop.org/software/appstream/docs/chap-Metadata.html) * [.git/config](https://git-scm.com/docs/git-config) * SECURITY.md * [DOAP](https://github.com/ewilderj/doap) * [Haskell cabal files](https://cabal.readthedocs.io/en/3.4/cabal-package.html) * [go.mod](https://golang.org/doc/modules/gomod-ref) * [ruby gemspec files](https://guides.rubygems.org/specification-reference/) * [nuspec files](https://docs.microsoft.com/en-us/nuget/reference/nuspec) * [OPAM files](https://opam.ocaml.org/doc/Manual.html#Package-definitions) * Debian packaging metadata (debian/watch, debian/control, debian/rules, debian/get-orig-source.sh, debian/copyright, debian/patches) * Dart's [pubspec.yaml](https://dart.dev/tools/pub/pubspec) * meson.build It will also scan README and INSTALL for possible upstream repository URLs (and will attempt to verify that those match the local repository). In addition to local files, it can also consult external directories using their APIs: * [GitHub](https://github.com/) * [SourceForge](https://sourceforge.net/) * [repology](https://www.repology.org/) * [Launchpad](https://launchpad.net/) * [PECL](https://pecl.php.net/) * [AUR](https://aur.archlinux.org/) Example Usage ------------- The easiest way to use the upstream ontologist is by invoking the ``guess-upstream-metadata`` command in a software project: ```console $ guess-upstream-metadata ~/src/dulwich Security-MD: https://github.com/dulwich/dulwich/tree/HEAD/SECURITY.md Name: dulwich Version: 0.20.15 Bug-Database: https://github.com/dulwich/dulwich/issues Repository: https://www.dulwich.io/code/ Summary: Python Git Library Bug-Submit: https://github.com/dulwich/dulwich/issues/new ``` Alternatively, there is a Python API. There are also ``autocodemeta`` and ``autodoap`` commands that can generate output in the [codemeta](https://codemeta.github.io/) and [DOAP](https://github.com/ewilderj/doap) formats, respectively. upstream-ontologist-0.1.37/SECURITY.md000066400000000000000000000004561462717511400174470ustar00rootroot00000000000000# Security Policy ## Supported Versions upstream-ontologist is still under heavy development. Only the latest version is security supported. ## Reporting a Vulnerability Please report security issues by e-mail to jelmer@jelmer.uk, ideally PGP encrypted to the key at https://jelmer.uk/D729A457.asc upstream-ontologist-0.1.37/disperse.conf000066400000000000000000000006011462717511400203330ustar00rootroot00000000000000# See https://github.com/jelmer/disperse name: "upstream-ontologist" timeout_days: 5 tag_name: "v$VERSION" update_version { path: "Cargo.toml" match: "^version = \"(.*)\"" new_line: "version = \"$VERSION\"" } update_version { path: "upstream_ontologist/__init__.py" match: "^version_string = \"(.*)\"" new_line: "version_string = \"$VERSION\"" } update_manpages: "man/*.1" upstream-ontologist-0.1.37/docs/000077500000000000000000000000001462717511400166015ustar00rootroot00000000000000upstream-ontologist-0.1.37/docs/vcs.md000066400000000000000000000001761462717511400177220ustar00rootroot00000000000000Version control URLs are reported as two-tuples: * vcs family * URL TODO(jelmer): subpath handling TODO(jelmer): branches upstream-ontologist-0.1.37/man/000077500000000000000000000000001462717511400164245ustar00rootroot00000000000000upstream-ontologist-0.1.37/man/autodoap.1000066400000000000000000000023471462717511400203300ustar00rootroot00000000000000.TH AUTODOAP 1 'September 2023' 'autodoap 0.1.36' 'User Commands' .SH NAME autodoap \- automatically write DOAP files for upstream projects .SH DESCRIPTION autodoap [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) information for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). Data is written to standard out in DOAP. .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.1.37/man/guess-upstream-metadata.1000066400000000000000000000023331462717511400232510ustar00rootroot00000000000000.TH GUESS-UPSTREAM-METADATA 1 'September 2023' 'guess-upstream-metadata 0.1.36' 'User Commands' .SH NAME guess-upstream-metadata \- guess upstream package metadata .SH DESCRIPTION guess\-upstream\-metadata [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.1.37/pyproject.toml000066400000000000000000000065571462717511400206020ustar00rootroot00000000000000[build-system] requires = ["setuptools>=61.2", "setuptools-rust"] build-backend = "setuptools.build_meta" [tool.mypy] warn_redundant_casts = true warn_unused_configs = true check_untyped_defs = true ignore_missing_imports = false [[tool.mypy.overrides]] module = [ # No type hints yet "ruamel.*", "setuptools.*", "pcre.*", "lxml.*", "breezy.*", ] ignore_missing_imports = true [project] name = "upstream-ontologist" authors = [{name = "Jelmer Vernooij", email = "jelmer@jelmer.uk"}] maintainers = [{name = "Jelmer Vernooij", email = "jelmer@jelmer.uk"}] description = "tracking of upstream project metadata" requires-python = ">= 3.7" dependencies = [ "python_debian", "ruamel.yaml", "breezy>=3.3.0", ] dynamic = ["version"] [project.readme] file = "README.md" content-type = "text/markdown" [project.urls] Homepage = "https://github.com/jelmer/upstream-ontologist" Repository = "https://github.com/jelmer/upstream-ontologist.git" [project.optional-dependencies] debian_changelog = [ "httplib2>=0.7.8", "python_debian", ] homepage = ["bs4"] readme = [ "docutils", "lxml", "bs4", "pygments", ] "setup.cfg" = ["setuptools"] testing = ["breezy>=3.3.0"] dev = ["ruff==0.4.5"] [project.scripts] guess-upstream-metadata = "upstream_ontologist.__main__:main" [tool.setuptools] packages = [ "upstream_ontologist", ] include-package-data = false [tool.setuptools.package-data] upstream_ontologist = ["py.typed"] [tool.setuptools.dynamic] version = {attr = "upstream_ontologist.version_string"} [tool.ruff] target-version = "py37" [tool.ruff.lint] ignore = [ "ANN001", "ANN002", "ANN003", "ANN101", # missing-type-self "ANN102", "ANN201", "ANN202", "ANN204", "ANN205", "ANN206", "D100", "D101", "D102", "D103", "D104", "D105", "D107", "D204", "D205", "D417", "E501", # line too long "E741", # ambiguous variable name ] select = [ "ANN", "D", "E", "F", "I", "UP", ] [tool.ruff.lint.pydocstyle] convention = "google" [tool.cibuildwheel] before-build = "pip install -U setuptools-rust && rustup default stable && rustup show" environment = {PATH="$HOME/.cargo/bin:$PATH"} # breezyshim needs to embed python, which pypy doesn't support skip = "pp*-* *musllinux*" [tool.cibuildwheel.linux] before-build = "if command -v yum; then yum -y install openssl-devel; fi && if command -v apk; then apk add openssl-dev pkgconfig; fi && pip install -U setuptools-rust && curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain=stable --profile=minimal -y && rustup show" [tool.cibuildwheel.macos] macos = "x86_64 arm64 universal2" before-build = "brew install openssl && pip install -U setuptools-rust && curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain=stable --profile=minimal -y && rustup show" environment = {MACOSX_DEPLOYMENT_TARGET=14.0} [[tool.cibuildwheel.overrides]] select = "*-macosx_arm64" before-build = "pip install -U setuptools-rust && curl https://sh.rustup.rs -sSf | sh -s -- --profile=minimal -y && rustup target add aarch64-apple-darwin" [tool.cibuildwheel.windows] before-build = "vcpkg install openssl && pip install -U setuptools-rust && curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain=stable --profile=minimal -y && rustup show" environment = {CMAKE_TOOLCHAIN_FILE="C:/vcpkg/scripts/buildsystems/vcpkg.cmake"} upstream-ontologist-0.1.37/setup.py000077500000000000000000000006701462717511400173710ustar00rootroot00000000000000#!/usr/bin/python3 from setuptools import setup from setuptools_rust import Binding, RustExtension setup( rust_extensions=[ RustExtension( "upstream_ontologist._upstream_ontologist", "upstream-ontologist-py/Cargo.toml", binding=Binding.PyO3, features=["rustls-tls", "extension-module"], ), ], data_files=[("share/man/man1", ["man/guess-upstream-metadata.1"])], ) upstream-ontologist-0.1.37/src/000077500000000000000000000000001462717511400164405ustar00rootroot00000000000000upstream-ontologist-0.1.37/src/bin/000077500000000000000000000000001462717511400172105ustar00rootroot00000000000000upstream-ontologist-0.1.37/src/bin/autocodemeta.rs000066400000000000000000000111071462717511400222300ustar00rootroot00000000000000use clap::Parser; use serde::Serialize; use std::collections::HashSet; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; #[derive(Serialize, Default)] struct SoftwareSourceCode { name: Option, version: Option, #[serde(rename = "codeRepository")] code_repository: Option, #[serde(rename = "issueTracker")] issue_tracker: Option, license: Option, description: Option, // TODO(jelmer): Support setting contIntegration // TODO(jelmer): Support keywords // TODO(jelmer): Support funder // TODO(jelmer): Support funding // TODO(jelmer): Support creation date // TODO(jelmer): Support first release date // TODO(jelmer): Support unique identifier // TODO(jelmer): Support runtime platform // TODO(jelmer): Support other software requirements // TODO(jelmer): Support operating system // TODO(jelmer): Support development status // TODO(jelmer): Support reference publication // TODO(jelmer): Support part of // TODO(jelmer): Support Author #[serde(rename = "downloadUrl")] download_url: Option, #[serde(rename = "relatedLink")] related_link: HashSet, } fn valid_spdx_identifier(name: &str) -> bool { name.chars() .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '+') } fn codemeta_file_from_upstream_info(data: Vec) -> SoftwareSourceCode { let mut result = SoftwareSourceCode { ..Default::default() }; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { result.name = Some(n); } UpstreamDatum::Homepage(h) => { result.related_link.insert(h); } UpstreamDatum::Description(d) => { result.description = Some(d); } UpstreamDatum::Download(d) => { result.download_url = Some(d); } UpstreamDatum::MailingList(ml) => { result.related_link.insert(ml); } UpstreamDatum::BugDatabase(bd) => { result.issue_tracker = Some(bd); } UpstreamDatum::Screenshots(us) => { for u in us { result.related_link.insert(u); } } UpstreamDatum::Wiki(r) => { result.related_link.insert(r); } UpstreamDatum::Repository(r) => { result.code_repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { result.related_link.insert(r); } UpstreamDatum::License(l) => { if valid_spdx_identifier(&l) { result.license = Some(format!("https://spdx.org/licenses/{}", l)); } } UpstreamDatum::Version(v) => { result.version = Some(v); } UpstreamDatum::Documentation(a) => { result.related_link.insert(a); } _ => {} } } result } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory for metadata #[clap(long)] consult_external_directory: bool, } fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); pyo3::prepare_freethreaded_python(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .unwrap(); let codemeta = codemeta_file_from_upstream_info(upstream_info.into()); std::io::stdout() .write_all(serde_json::to_string_pretty(&codemeta).unwrap().as_bytes()) .unwrap(); } upstream-ontologist-0.1.37/src/bin/autodoap.rs000066400000000000000000000224011462717511400213710ustar00rootroot00000000000000use clap::Parser; use maplit::hashmap; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; use xmltree::{Element, Namespace, XMLNode}; const DOAP_NS: &str = "http://usefulinc.com/ns/doap"; const RDF_NS: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns"; const FOAF_NS: &str = "http://xmlns.com/foaf/0.1/"; fn rdf_resource(namespace: &Namespace, url: String) -> XMLNode { XMLNode::Element(Element { prefix: Some("rdf".to_string()), namespaces: Some(namespace.clone()), namespace: Some(RDF_NS.to_string()), name: "resource".to_string(), attributes: hashmap! {"rdf:resource".to_string() => url}, children: vec![], }) } fn doap_file_from_upstream_info(data: Vec) -> Element { let mut namespace = Namespace::empty(); namespace.put("doap", DOAP_NS); namespace.put("rdf", RDF_NS); namespace.put("foaf", FOAF_NS); let mut repository = None; let mut repository_browse = None; let mut children = vec![]; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "name".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(n)], })); } UpstreamDatum::Homepage(h) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "homepage".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, h)], })); } UpstreamDatum::Summary(s) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "shortdesc".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(s)], })); } UpstreamDatum::Description(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "description".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(d)], })); } UpstreamDatum::Download(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "download-page".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, d)], })); } UpstreamDatum::MailingList(ml) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "mailing-list".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, ml)], })); } UpstreamDatum::BugDatabase(bd) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "bug-database".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, bd)], })); } UpstreamDatum::Screenshots(us) => { for u in us { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "screenshots".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, u)], })); } } UpstreamDatum::SecurityContact(sc) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "security-contact".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, sc)], })); } UpstreamDatum::Wiki(r) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "wiki".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } UpstreamDatum::Repository(r) => { repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { repository_browse = Some(r); } _ => {} } } if repository.is_some() || repository_browse.is_some() { let mut git_repo_el = Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "GitRepository".to_string(), attributes: hashmap! {}, children: vec![], }; if let Some(r) = repository { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "location".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } if let Some(b) = repository_browse { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "browse".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, b)], })); } children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "repository".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Element(git_repo_el)], })); } Element { prefix: Some("doap".to_string()), namespaces: Some(namespace), namespace: Some(DOAP_NS.to_string()), name: "Project".to_string(), attributes: hashmap! {}, children, } } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory #[clap(long)] consult_external_directory: bool, } fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); pyo3::prepare_freethreaded_python(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .unwrap(); let el = doap_file_from_upstream_info(upstream_info.into()); use xmltree::EmitterConfig; let config = EmitterConfig::new() .perform_indent(true) .normalize_empty_elements(true); el.write_with_config(&mut std::io::stdout(), config) .unwrap(); } upstream-ontologist-0.1.37/src/bin/guess-upstream-metadata.rs000066400000000000000000000107061462717511400243240ustar00rootroot00000000000000use clap::Parser; use std::io::Write; use std::path::PathBuf; #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Whether to enable trace logging #[clap(long)] trace: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Scan for metadata rather than printing results #[clap(long)] scan: bool, /// Scan specified homepage rather than current directory #[clap(long)] from_homepage: Option, /// Pull in external (not maintained by upstream) directory data #[clap(long)] consult_external_directory: bool, } fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.trace { log::LevelFilter::Trace } else if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); pyo3::prepare_freethreaded_python(); if let Some(from_homepage) = args.from_homepage { for d in upstream_ontologist::homepage::guess_from_homepage(&from_homepage).unwrap() { println!( "{}: {:?} - certainty {} (from {:?})", d.datum.field(), d.datum, d.certainty .map_or_else(|| "unknown".to_string(), |d| d.to_string()), d.origin ); } } else if args.scan { for entry in upstream_ontologist::guess_upstream_info( &args.path.canonicalize().unwrap(), Some(args.trust), ) { let entry = entry.unwrap(); println!( "{}: {:?} - certainty {}{}", entry.datum.field(), entry.datum, entry .certainty .map_or("unknown".to_string(), |c| c.to_string()), entry .origin .map_or_else(|| "".to_string(), |o| format!(" (from {:?})", o)) ); } } else { let metadata = match upstream_ontologist::guess_upstream_metadata( &args.path.canonicalize().unwrap(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) { Ok(m) => m, Err(upstream_ontologist::ProviderError::ParseError(e)) => { eprintln!("Error parsing metadata: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::IoError(e)) => { eprintln!("I/O Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::Other(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::HttpJsonError(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::ExtrapolationLimitExceeded(l)) => { eprintln!("Extraoplation limit exceeded: {}", l); std::process::exit(1); } Err(upstream_ontologist::ProviderError::Python(e)) => { eprintln!("Python error: {}", e); if args.debug { pyo3::Python::with_gil(|py| { if let Some(tb) = e.traceback(py) { for line in tb.format().unwrap().lines() { eprintln!("{}", line); } } else { panic!("No traceback"); } }); } std::process::exit(1); } }; let out = serde_yaml::to_value(&metadata).unwrap(); std::io::stdout() .write_all(serde_yaml::to_string(&out).unwrap().as_bytes()) .unwrap(); } } upstream-ontologist-0.1.37/src/debian.rs000066400000000000000000000042321462717511400202310ustar00rootroot00000000000000pub fn debian_to_upstream_version(version: &str) -> &str { // Drop debian-specific modifiers from an upstream version string. version.split("+dfsg").next().unwrap_or_default() } pub fn upstream_name_to_debian_source_name(mut upstream_name: &str) -> String { if let Some((_, _, abbrev)) = lazy_regex::regex_captures!(r"^(.{10,})\((.*)\)", upstream_name) { upstream_name = abbrev; } // Remove "GNU " prefix if upstream_name.starts_with("GNU ") { upstream_name = &upstream_name["GNU ".len()..]; } // Convert to lowercase and replace characters upstream_name .to_lowercase() .replace(['_', ' ', '/'], "-") } pub fn upstream_package_to_debian_source_name(package: &crate::UpstreamPackage) -> String { if package.family == "rust" { return format!("rust-{}", package.name.to_lowercase()); } else if package.family == "perl" { return format!("lib{}-perl", package.name.to_lowercase().replace("::", "-")); } else if package.family == "node" { return format!("node-{}", package.name.to_lowercase()); } // If family is not rust, perl, or node, call upstream_name_to_debian_source_name upstream_name_to_debian_source_name(package.name.as_str()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_gnu() { assert_eq!("lala", upstream_name_to_debian_source_name("GNU Lala")); } #[test] fn test_abbrev() { assert_eq!( "mun", upstream_name_to_debian_source_name("Made Up Name (MUN)") ); } } pub fn upstream_package_to_debian_binary_name(package: &crate::UpstreamPackage) -> String { if package.family == "rust" { return format!("rust-{}", package.name.to_lowercase()); } else if package.family == "perl" { return format!("lib{}-perl", package.name.to_lowercase().replace("::", "-")); } else if package.family == "node" { return format!("node-{}", package.name.to_lowercase()); } // TODO(jelmer) package.name.to_lowercase().replace('_', "-") } pub fn valid_debian_package_name(name: &str) -> bool { lazy_regex::regex_is_match!("[a-z0-9][a-z0-9+-.]+", name) } upstream-ontologist-0.1.37/src/extrapolate.rs000066400000000000000000000442641462717511400213500ustar00rootroot00000000000000use crate::{Certainty, UpstreamDatum, UpstreamDatumWithMetadata}; use crate::{ProviderError, UpstreamMetadata}; use log::warn; const DEFAULT_ITERATION_LIMIT: usize = 10; struct Extrapolation { from_fields: &'static [&'static str], to_fields: &'static [&'static str], cb: fn(&mut UpstreamMetadata, bool) -> Result, ProviderError>, } fn extrapolate_repository_from_homepage( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => return { warn!("Homepage field is not a URL"); Ok(vec![]) } }; if let Some(repo) = crate::vcs::guess_repo_from_url(&url, Some(net_access)) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(homepage.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: homepage.origin.clone(), }); } Ok(ret) } fn extrapolate_homepage_from_repository_browse( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let browse_url = upstream_metadata.get("Repository-Browse").unwrap(); let url = match browse_url.datum.to_url() { Some(url) => url, None => return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } }; // Some hosting sites are commonly used as Homepage // TODO(jelmer): Maybe check that there is a README file that // can serve as index? let forge = crate::find_forge(&url, Some(net_access)); if forge.is_some() && forge.unwrap().repository_browse_can_be_homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(browse_url.datum.as_str().unwrap().to_string()), certainty: Some( std::cmp::min(browse_url.certainty, Some(Certainty::Possible)) .unwrap_or(Certainty::Possible), ), origin: browse_url.origin.clone(), }); } Ok(ret) } fn copy_bug_db_field( upstream_metadata: &mut UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_bug_db = upstream_metadata.get("Bugs-Database").unwrap(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(old_bug_db.datum.as_str().unwrap().to_string()), certainty: old_bug_db.certainty, origin: old_bug_db.origin.clone(), }); upstream_metadata.remove("Bugs-Database"); Ok(ret) } fn extrapolate_repository_from_bug_db( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)); Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_repository_browse_from_repository( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Repository field is not a URL"); Ok(vec![]) } }; let browse_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: None, }, Some(net_access), ); Ok(if let Some(browse_url) = browse_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_repository_from_repository_browse( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository-Browse").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)); Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_bug_database_from_repository( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Repository field is not a URL"); Ok(vec![]) } }; Ok( if let Some(bug_db_url) = crate::guess_bug_database_url_from_repo_url( &url, Some(net_access), ) { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }, ) } fn extrapolate_bug_submit_from_bug_db( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } }; let bug_submit_url = crate::bug_submit_url_from_bug_database_url( &url, Some(net_access), ); Ok(if let Some(bug_submit_url) = bug_submit_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(bug_submit_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_bug_db_from_bug_submit( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Submit").unwrap(); let old_value_url = match old_value.datum.to_url() { Some(url) => url, None => return Ok(vec![]), }; let bug_db_url = crate::bug_database_url_from_bug_submit_url(&old_value_url, Some(net_access)); Ok(if let Some(bug_db_url) = bug_db_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_repository_from_download( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Download").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Download field is not a URL"); Ok(vec![]) } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)); Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } fn extrapolate_name_from_repository( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => return { warn!("Repository field is not a URL"); Ok(vec![]) } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)); if let Some(repo) = repo { let parsed: url::Url = repo.parse().unwrap(); let name = parsed.path_segments().unwrap().last().unwrap(); let name = name.strip_suffix(".git").unwrap_or(name); if !name.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }); } } Ok(ret) } fn extrapolate_security_contact_from_security_md( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let repository_url = upstream_metadata.get("Repository").unwrap(); let security_md_path = upstream_metadata.get("Security-MD").unwrap(); let url = match repository_url.datum.to_url() { Some(url) => url, None => return { warn!("Repository field is not a URL"); Ok(vec![]) } }; let security_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: security_md_path.datum.as_str().map(|x| x.to_string()), }, Some(net_access), ); Ok(if let Some(security_url) = security_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityContact(security_url.to_string()), certainty: std::cmp::min(repository_url.certainty, security_md_path.certainty), origin: repository_url.origin.clone(), }] } else { vec![] }) } fn extrapolate_contact_from_maintainer( upstream_metadata: &mut UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let maintainer = upstream_metadata.get("Maintainer").unwrap(); Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(maintainer.datum.as_person().unwrap().to_string()), certainty: maintainer.certainty, origin: maintainer.origin.clone(), }]) } fn consult_homepage( upstream_metadata: &mut UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { if !net_access { return Ok(vec![]); } let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => return { warn!("Homepage field is not a URL"); Ok(vec![]) } }; let mut ret = vec![]; for mut entry in crate::homepage::guess_from_homepage(&url)? { entry.certainty = std::cmp::min(homepage.certainty, entry.certainty); ret.push(entry); } Ok(ret) } const EXTRAPOLATIONS: &[Extrapolation] = &[ Extrapolation { from_fields: &["Homepage"], to_fields: &["Repository"], cb: extrapolate_repository_from_homepage, }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Homepage"], cb: extrapolate_homepage_from_repository_browse, }, Extrapolation { from_fields: &["Bugs-Database"], to_fields: &["Bug-Database"], cb: copy_bug_db_field, }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Repository"], cb: extrapolate_repository_from_bug_db, }, Extrapolation { from_fields: &["Repository"], to_fields: &["Repository-Browse"], cb: extrapolate_repository_browse_from_repository, }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Repository"], cb: extrapolate_repository_from_repository_browse, }, Extrapolation { from_fields: &["Repository"], to_fields: &["Bug-Database"], cb: extrapolate_bug_database_from_repository, }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Bug-Submit"], cb: extrapolate_bug_submit_from_bug_db, }, Extrapolation { from_fields: &["Bug-Submit"], to_fields: &["Bug-Database"], cb: extrapolate_bug_db_from_bug_submit, }, Extrapolation { from_fields: &["Download"], to_fields: &["Repository"], cb: extrapolate_repository_from_download, }, Extrapolation { from_fields: &["Repository"], to_fields: &["Name"], cb: extrapolate_name_from_repository, }, Extrapolation { from_fields: &["Repository", "Security-MD"], to_fields: &["Security-Contact"], cb: extrapolate_security_contact_from_security_md, }, Extrapolation { from_fields: &["Maintainer"], to_fields: &["Contact"], cb: extrapolate_contact_from_maintainer, }, Extrapolation { from_fields: &["Homepage"], to_fields: &["Bug-Database", "Repository"], cb: consult_homepage, }, ]; pub fn extrapolate_fields( upstream_metadata: &mut UpstreamMetadata, net_access: bool, iteration_limit: Option, ) -> Result<(), ProviderError> { let iteration_limit = iteration_limit.unwrap_or(DEFAULT_ITERATION_LIMIT); let mut changed = true; let mut iterations = 0; while changed { changed = false; iterations += 1; if iterations > iteration_limit { return Err(ProviderError::ExtrapolationLimitExceeded(iteration_limit)); } for extrapolation in EXTRAPOLATIONS { let from_fields = extrapolation.from_fields; let to_fields = extrapolation.to_fields; let cb = extrapolation.cb; let from_values = from_fields .iter() .map(|f| upstream_metadata.get(f)) .collect::>(); if !from_values.iter().all(|v| v.is_some()) { log::trace!( "Not enough values for extrapolation from {:?} to {:?}", from_fields, to_fields ); continue; } let from_values = from_values .iter() .map(|v| v.unwrap().clone()) .collect::>(); let from_certainties = from_fields .iter() .map(|f| upstream_metadata.get(f).unwrap().certainty) .collect::>(); let from_certainty = *from_certainties.iter().min().unwrap(); let old_to_values: std::collections::HashMap<_, _> = to_fields .iter() .filter_map(|f| upstream_metadata.get(f).map(|v| (f, v.clone()))) .collect(); assert!(old_to_values.values().all(|v| v.certainty.is_some())); // If any of the to_fields already exist in old_to_values with a better or same // certainty, then we don't need to extrapolate. if to_fields.iter().all(|f| { old_to_values .get(f) .map(|v| v.certainty >= from_certainty) .unwrap_or(false) }) { log::trace!( "Not extrapolating from {:?} to {:?} because of certainty ({:?} >= {:?})", from_fields, to_fields, old_to_values .values() .map(|v| v.certainty) .collect::>(), from_certainty ); continue; } let extra_upstream_metadata = cb(upstream_metadata, net_access)?; let changes = upstream_metadata.update(extra_upstream_metadata.into_iter()); if !changes.is_empty() { log::debug!( "Extrapolating ({:?} ⇒ {:?}) from ({:?})", old_to_values .iter() .map(|(k, v)| format!("{}: {}", k, v.datum)) .collect::>(), changes .iter() .map(|d| format!("{}: {}", d.datum.field(), d.datum)) .collect::>(), from_values .iter() .map(|v| format!( "{}: {} ({})", v.datum.field(), v.datum, v.certainty .map_or_else(|| "unknown".to_string(), |c| c.to_string()) )) .collect::>() ); changed = true; } } } Ok(()) } upstream-ontologist-0.1.37/src/homepage.rs000066400000000000000000000042731462717511400206010ustar00rootroot00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use reqwest::blocking::Client; use reqwest::header::USER_AGENT; use scraper::{Html, Selector}; pub fn guess_from_homepage( url: &url::Url, ) -> Result, ProviderError> { let client = Client::new(); let response = client .get(url.clone()) .header(USER_AGENT, crate::USER_AGENT) .send()?; let body = response.text()?; Ok(guess_from_page(&body, url)) } fn guess_from_page(text: &str, basehref: &url::Url) -> Vec { let fragment = Html::parse_document(text); let selector = Selector::parse("a").unwrap(); let mut result = Vec::new(); for element in fragment.select(&selector) { if let Some(href) = element.value().attr("href") { let labels: Vec = vec![ element.value().attr("aria-label").unwrap_or("").to_string(), element.text().collect::(), ]; for label in labels.iter().filter(|&label| !label.is_empty()) { match label.to_lowercase().as_str() { "github" | "git" | "repository" | "github repository" => { result.push(UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } "github bug tracking" | "bug tracker" => { result.push(UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } _ => {} } } } } result } upstream-ontologist-0.1.37/src/lib.rs000066400000000000000000003535051462717511400175670ustar00rootroot00000000000000use lazy_regex::regex; use log::{debug, error, warn}; use percent_encoding::utf8_percent_encode; use pyo3::exceptions::{PyRuntimeError, PyTypeError, PyValueError}; use pyo3::prelude::*; use pyo3::types::PyDict; use reqwest::header::HeaderMap; use serde::ser::SerializeSeq; use std::str::FromStr; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; use url::Url; static USER_AGENT: &str = concat!("upstream-ontologist/", env!("CARGO_PKG_VERSION")); // Too aggressive? const DEFAULT_URLLIB_TIMEOUT: u64 = 3; pub mod debian; pub mod extrapolate; pub mod homepage; pub mod providers; pub mod readme; pub mod vcs; pub mod vcs_command; #[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] pub enum Certainty { Possible, Likely, Confident, Certain, } #[derive(Clone, Debug, PartialEq, Eq)] pub enum Origin { Path(PathBuf), Url(url::Url), Other(String), } impl std::fmt::Display for Origin { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Origin::Path(path) => write!(f, "{}", path.display()), Origin::Url(url) => write!(f, "{}", url), Origin::Other(s) => write!(f, "{}", s), } } } impl From<&std::path::Path> for Origin { fn from(path: &std::path::Path) -> Self { Origin::Path(path.to_path_buf()) } } impl From for Origin { fn from(path: std::path::PathBuf) -> Self { Origin::Path(path) } } impl From for Origin { fn from(url: url::Url) -> Self { Origin::Url(url) } } impl ToPyObject for Origin { fn to_object(&self, py: Python) -> PyObject { match self { Origin::Path(path) => path.to_str().unwrap().to_object(py), Origin::Url(url) => url.to_string().to_object(py), Origin::Other(s) => s.to_object(py), } } } impl IntoPy for Origin { fn into_py(self, py: Python) -> PyObject { match self { Origin::Path(path) => path.to_str().unwrap().to_object(py), Origin::Url(url) => url.to_string().to_object(py), Origin::Other(s) => s.to_object(py), } } } impl FromPyObject<'_> for Origin { fn extract(ob: &PyAny) -> PyResult { if let Ok(path) = ob.extract::() { Ok(Origin::Path(path)) } else if let Ok(s) = ob.extract::() { Ok(Origin::Other(s)) } else { Err(PyTypeError::new_err("expected str or Path")) } } } impl FromStr for Certainty { type Err = String; fn from_str(s: &str) -> Result { match s { "certain" => Ok(Certainty::Certain), "confident" => Ok(Certainty::Confident), "likely" => Ok(Certainty::Likely), "possible" => Ok(Certainty::Possible), _ => Err(format!("unknown certainty: {}", s)), } } } impl ToString for Certainty { fn to_string(&self) -> String { match self { Certainty::Certain => "certain".to_string(), Certainty::Confident => "confident".to_string(), Certainty::Likely => "likely".to_string(), Certainty::Possible => "possible".to_string(), } } } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for Certainty { fn extract(ob: &PyAny) -> PyResult { let o = ob.extract::<&str>()?; o.parse().map_err(PyValueError::new_err) } } #[derive(Default, Clone, Debug, PartialEq, Eq, serde::Deserialize, serde::Serialize)] pub struct Person { pub name: Option, pub email: Option, pub url: Option, } impl std::fmt::Display for Person { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name.as_ref().unwrap_or(&"".to_string()))?; if let Some(email) = &self.email { write!(f, " <{}>", email)?; } if let Some(url) = &self.url { write!(f, " ({})", url)?; } Ok(()) } } impl From<&str> for Person { fn from(text: &str) -> Self { let mut text = text.replace(" at ", "@"); text = text.replace(" -at- ", "@"); text = text.replace(" -dot- ", "."); text = text.replace("[AT]", "@"); if text.contains('(') && text.ends_with(')') { if let Some((p1, p2)) = text[..text.len() - 1].split_once('(') { if p2.starts_with("https://") || p2.starts_with("http://") { let url = p2.to_string(); if let Some((name, email)) = parseaddr(p1) { Person { name: Some(name), email: Some(email), url: Some(url), } } else { Person { name: Some(p1.to_string()), url: Some(url), ..Default::default() } } } else if p2.contains('@') { Person { name: Some(p1.to_string()), email: Some(p2.to_string()), ..Default::default() } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('<') { if let Some((name, email)) = parseaddr(text.as_str()) { return Person { name: Some(name), email: Some(email), ..Default::default() }; } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('@') && !text.contains(' ') { return Person { email: Some(text), ..Default::default() }; } else { Person { name: Some(text), ..Default::default() } } } } #[cfg(test)] mod person_tests { use super::*; #[test] fn test_from_str() { assert_eq!( Person::from("Foo Bar "), Person { name: Some("Foo Bar".to_string()), email: Some("foo@example.com".to_string()), url: None } ); assert_eq!( Person::from("Foo Bar"), Person { name: Some("Foo Bar".to_string()), email: None, url: None } ); assert_eq!( Person::from("foo@example.com"), Person { name: None, email: Some("foo@example.com".to_string()), url: None } ); } } impl ToPyObject for Person { fn to_object(&self, py: Python) -> PyObject { let m = PyModule::import(py, "upstream_ontologist").unwrap(); let person_cls = m.getattr("Person").unwrap(); person_cls .call1((self.name.as_ref(), self.email.as_ref(), self.url.as_ref())) .unwrap() .into_py(py) } } fn parseaddr(text: &str) -> Option<(String, String)> { let re = regex!(r"(.*?)\s*<([^<>]+)>"); if let Some(captures) = re.captures(text) { let name = captures.get(1).map(|m| m.as_str().trim().to_string()); let email = captures.get(2).map(|m| m.as_str().trim().to_string()); if let (Some(name), Some(email)) = (name, email) { return Some((name, email)); } } None } impl FromPyObject<'_> for Person { fn extract(ob: &'_ PyAny) -> PyResult { let name = ob.getattr("name")?.extract::>()?; let email = ob.getattr("email")?.extract::>()?; let url = ob.getattr("url")?.extract::>()?; Ok(Person { name, email, url }) } } #[derive(Clone, Debug, PartialEq, Eq)] pub enum UpstreamDatum { /// Name of the project Name(String), /// URL to project homepage Homepage(String), /// URL to the project's source code repository Repository(String), /// URL to browse the project's source code repository RepositoryBrowse(String), /// Long description of the project Description(String), /// Short summary of the project (one line) Summary(String), /// License name or SPDX identifier License(String), /// List of authors Author(Vec), /// List of maintainers Maintainer(Person), /// URL of the project's issue tracker BugDatabase(String), /// URL to submit a new bug BugSubmit(String), /// URL to the project's contact page or email address Contact(String), /// Cargo crate name CargoCrate(String), /// Name of the security page name SecurityMD(String), /// URL to the security page or email address SecurityContact(String), /// Last version of the project Version(String), /// List of keywords Keywords(Vec), /// Copyright notice Copyright(String), /// URL to the project's documentation Documentation(String), /// Go import path GoImportPath(String), /// URL to the project's download page Download(String), /// URL to the project's wiki Wiki(String), /// URL to the project's mailing list MailingList(String), /// SourceForge project name SourceForgeProject(String), Archive(String), /// URL to a demo instance Demo(String), /// PHP PECL package name PeclPackage(String), /// URL to the funding page Funding(String), /// URL to the changelog Changelog(String), /// Haskell package name HaskellPackage(String), /// Debian ITP (Intent To Package) bug number DebianITP(i32), /// List of URLs to screenshots Screenshots(Vec), /// Name of registry Registry(Vec<(String, String)>), /// Recommended way to cite the software CiteAs(String), /// Link for donations (e.g. Paypal, Libera, etc) Donation(String), /// Link to a life instance of the webservice Webservice(String), } #[derive(Clone, PartialEq, Eq, Debug)] pub struct UpstreamDatumWithMetadata { pub datum: UpstreamDatum, pub origin: Option, pub certainty: Option, } fn known_bad_url(value: &str) -> bool { if value.contains("${") { return true; } false } impl UpstreamDatum { pub fn field(&self) -> &'static str { match self { UpstreamDatum::Summary(..) => "Summary", UpstreamDatum::Description(..) => "Description", UpstreamDatum::Name(..) => "Name", UpstreamDatum::Homepage(..) => "Homepage", UpstreamDatum::Repository(..) => "Repository", UpstreamDatum::RepositoryBrowse(..) => "Repository-Browse", UpstreamDatum::License(..) => "License", UpstreamDatum::Author(..) => "Author", UpstreamDatum::BugDatabase(..) => "Bug-Database", UpstreamDatum::BugSubmit(..) => "Bug-Submit", UpstreamDatum::Contact(..) => "Contact", UpstreamDatum::CargoCrate(..) => "Cargo-Crate", UpstreamDatum::SecurityMD(..) => "Security-MD", UpstreamDatum::SecurityContact(..) => "Security-Contact", UpstreamDatum::Version(..) => "Version", UpstreamDatum::Keywords(..) => "Keywords", UpstreamDatum::Maintainer(..) => "Maintainer", UpstreamDatum::Copyright(..) => "Copyright", UpstreamDatum::Documentation(..) => "Documentation", UpstreamDatum::GoImportPath(..) => "Go-Import-Path", UpstreamDatum::Download(..) => "Download", UpstreamDatum::Wiki(..) => "Wiki", UpstreamDatum::MailingList(..) => "MailingList", UpstreamDatum::SourceForgeProject(..) => "SourceForge-Project", UpstreamDatum::Archive(..) => "Archive", UpstreamDatum::Demo(..) => "Demo", UpstreamDatum::PeclPackage(..) => "Pecl-Package", UpstreamDatum::HaskellPackage(..) => "Haskell-Package", UpstreamDatum::Funding(..) => "Funding", UpstreamDatum::Changelog(..) => "Changelog", UpstreamDatum::DebianITP(..) => "Debian-ITP", UpstreamDatum::Screenshots(..) => "Screenshots", UpstreamDatum::Registry(..) => "Registry", UpstreamDatum::CiteAs(..) => "Cite-As", UpstreamDatum::Donation(..) => "Donation", UpstreamDatum::Webservice(..) => "Webservice", } } pub fn as_str(&self) -> Option<&str> { match self { UpstreamDatum::Name(s) => Some(s), UpstreamDatum::Homepage(s) => Some(s), UpstreamDatum::Repository(s) => Some(s), UpstreamDatum::RepositoryBrowse(s) => Some(s), UpstreamDatum::Description(s) => Some(s), UpstreamDatum::Summary(s) => Some(s), UpstreamDatum::License(s) => Some(s), UpstreamDatum::BugDatabase(s) => Some(s), UpstreamDatum::BugSubmit(s) => Some(s), UpstreamDatum::Contact(s) => Some(s), UpstreamDatum::CargoCrate(s) => Some(s), UpstreamDatum::SecurityMD(s) => Some(s), UpstreamDatum::SecurityContact(s) => Some(s), UpstreamDatum::Version(s) => Some(s), UpstreamDatum::Documentation(s) => Some(s), UpstreamDatum::GoImportPath(s) => Some(s), UpstreamDatum::Download(s) => Some(s), UpstreamDatum::Wiki(s) => Some(s), UpstreamDatum::MailingList(s) => Some(s), UpstreamDatum::SourceForgeProject(s) => Some(s), UpstreamDatum::Archive(s) => Some(s), UpstreamDatum::Demo(s) => Some(s), UpstreamDatum::PeclPackage(s) => Some(s), UpstreamDatum::HaskellPackage(s) => Some(s), UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(c) => Some(c), UpstreamDatum::Funding(f) => Some(f), UpstreamDatum::Changelog(c) => Some(c), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::CiteAs(c) => Some(c), UpstreamDatum::Registry(_) => None, UpstreamDatum::Donation(d) => Some(d), UpstreamDatum::Webservice(w) => Some(w), } } pub fn to_url(&self) -> Option { match self { UpstreamDatum::Name(..) => None, UpstreamDatum::Homepage(s) => Some(s.parse().ok()?), UpstreamDatum::Repository(s) => Some(s.parse().ok()?), UpstreamDatum::RepositoryBrowse(s) => Some(s.parse().ok()?), UpstreamDatum::Description(..) => None, UpstreamDatum::Summary(..) => None, UpstreamDatum::License(..) => None, UpstreamDatum::BugDatabase(s) => Some(s.parse().ok()?), UpstreamDatum::BugSubmit(s) => Some(s.parse().ok()?), UpstreamDatum::Contact(..) => None, UpstreamDatum::CargoCrate(s) => Some(s.parse().ok()?), UpstreamDatum::SecurityMD(..) => None, UpstreamDatum::SecurityContact(..) => None, UpstreamDatum::Version(..) => None, UpstreamDatum::Documentation(s) => Some(s.parse().ok()?), UpstreamDatum::GoImportPath(_s) => None, UpstreamDatum::Download(s) => Some(s.parse().ok()?), UpstreamDatum::Wiki(s) => Some(s.parse().ok()?), UpstreamDatum::MailingList(s) => Some(s.parse().ok()?), UpstreamDatum::SourceForgeProject(s) => Some(s.parse().ok()?), UpstreamDatum::Archive(s) => Some(s.parse().ok()?), UpstreamDatum::Demo(s) => Some(s.parse().ok()?), UpstreamDatum::PeclPackage(_s) => None, UpstreamDatum::HaskellPackage(_s) => None, UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(..) => None, UpstreamDatum::Funding(s) => Some(s.parse().ok()?), UpstreamDatum::Changelog(s) => Some(s.parse().ok()?), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::Registry(_r) => None, UpstreamDatum::CiteAs(_c) => None, UpstreamDatum::Donation(_d) => None, UpstreamDatum::Webservice(w) => Some(w.parse().ok()?), } } pub fn as_person(&self) -> Option<&Person> { match self { UpstreamDatum::Maintainer(p) => Some(p), _ => None, } } pub fn known_bad_guess(&self) -> bool { match self { UpstreamDatum::BugDatabase(s) | UpstreamDatum::BugSubmit(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("bugzilla.gnome.org") { return true; } if url.host_str() == Some("bugs.freedesktop.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Repository(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("anongit.kde.org") { return true; } if url.host_str() == Some("git.gitorious.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Homepage(s) => { let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("pypi.org") { return true; } if url.host_str() == Some("rubygems.org") { return true; } } UpstreamDatum::RepositoryBrowse(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("cgit.kde.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Author(authors) => { for a in authors { if let Some(name) = &a.name { let lc = name.to_lowercase(); if lc.contains("unknown") { return true; } if lc.contains("maintainer") { return true; } if lc.contains("contributor") { return true; } } } } UpstreamDatum::Name(s) => { let lc = s.to_lowercase(); if lc.contains("unknown") { return true; } if lc == "package" { return true; } } UpstreamDatum::Version(s) => { let lc = s.to_lowercase(); if ["devel", "unknown"].contains(&lc.as_str()) { return true; } } _ => {} } false } } impl std::fmt::Display for UpstreamDatum { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpstreamDatum::Name(s) => write!(f, "Name: {}", s), UpstreamDatum::Homepage(s) => write!(f, "Homepage: {}", s), UpstreamDatum::Repository(s) => write!(f, "Repository: {}", s), UpstreamDatum::RepositoryBrowse(s) => write!(f, "RepositoryBrowse: {}", s), UpstreamDatum::Description(s) => write!(f, "Description: {}", s), UpstreamDatum::Summary(s) => write!(f, "Summary: {}", s), UpstreamDatum::License(s) => write!(f, "License: {}", s), UpstreamDatum::BugDatabase(s) => write!(f, "BugDatabase: {}", s), UpstreamDatum::BugSubmit(s) => write!(f, "BugSubmit: {}", s), UpstreamDatum::Contact(s) => write!(f, "Contact: {}", s), UpstreamDatum::CargoCrate(s) => write!(f, "CargoCrate: {}", s), UpstreamDatum::SecurityMD(s) => write!(f, "SecurityMD: {}", s), UpstreamDatum::SecurityContact(s) => write!(f, "SecurityContact: {}", s), UpstreamDatum::Version(s) => write!(f, "Version: {}", s), UpstreamDatum::Documentation(s) => write!(f, "Documentation: {}", s), UpstreamDatum::GoImportPath(s) => write!(f, "GoImportPath: {}", s), UpstreamDatum::Download(s) => write!(f, "Download: {}", s), UpstreamDatum::Wiki(s) => write!(f, "Wiki: {}", s), UpstreamDatum::MailingList(s) => write!(f, "MailingList: {}", s), UpstreamDatum::SourceForgeProject(s) => write!(f, "SourceForgeProject: {}", s), UpstreamDatum::Archive(s) => write!(f, "Archive: {}", s), UpstreamDatum::Demo(s) => write!(f, "Demo: {}", s), UpstreamDatum::PeclPackage(s) => write!(f, "PeclPackage: {}", s), UpstreamDatum::Author(authors) => { write!( f, "Author: {}", authors .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Maintainer(maintainer) => { write!(f, "Maintainer: {}", maintainer) } UpstreamDatum::Keywords(keywords) => { write!( f, "Keywords: {}", keywords .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Copyright(s) => { write!(f, "Copyright: {}", s) } UpstreamDatum::Funding(s) => { write!(f, "Funding: {}", s) } UpstreamDatum::Changelog(s) => { write!(f, "Changelog: {}", s) } UpstreamDatum::DebianITP(s) => { write!(f, "DebianITP: {}", s) } UpstreamDatum::HaskellPackage(p) => { write!(f, "HaskellPackage: {}", p) } UpstreamDatum::Screenshots(s) => { write!(f, "Screenshots: {}", s.join(", ")) } UpstreamDatum::Registry(r) => { write!(f, "Registry:")?; for (k, v) in r { write!(f, " - Name: {}", k)?; write!(f, " Entry: {}", v)?; } Ok(()) } UpstreamDatum::CiteAs(c) => { write!(f, "Cite-As: {}", c) } UpstreamDatum::Donation(d) => { write!(f, "Donation: {}", d) } UpstreamDatum::Webservice(w) => { write!(f, "Webservice: {}", w) } } } } impl serde::ser::Serialize for UpstreamDatum { fn serialize(&self, serializer: S) -> Result { match self { UpstreamDatum::Name(s) => serializer.serialize_str(s), UpstreamDatum::Homepage(s) => serializer.serialize_str(s), UpstreamDatum::Repository(s) => serializer.serialize_str(s), UpstreamDatum::RepositoryBrowse(s) => serializer.serialize_str(s), UpstreamDatum::Description(s) => serializer.serialize_str(s), UpstreamDatum::Summary(s) => serializer.serialize_str(s), UpstreamDatum::License(s) => serializer.serialize_str(s), UpstreamDatum::BugDatabase(s) => serializer.serialize_str(s), UpstreamDatum::BugSubmit(s) => serializer.serialize_str(s), UpstreamDatum::Contact(s) => serializer.serialize_str(s), UpstreamDatum::CargoCrate(s) => serializer.serialize_str(s), UpstreamDatum::SecurityMD(s) => serializer.serialize_str(s), UpstreamDatum::SecurityContact(s) => serializer.serialize_str(s), UpstreamDatum::Version(s) => serializer.serialize_str(s), UpstreamDatum::Documentation(s) => serializer.serialize_str(s), UpstreamDatum::GoImportPath(s) => serializer.serialize_str(s), UpstreamDatum::Download(s) => serializer.serialize_str(s), UpstreamDatum::Wiki(s) => serializer.serialize_str(s), UpstreamDatum::MailingList(s) => serializer.serialize_str(s), UpstreamDatum::SourceForgeProject(s) => serializer.serialize_str(s), UpstreamDatum::Archive(s) => serializer.serialize_str(s), UpstreamDatum::Demo(s) => serializer.serialize_str(s), UpstreamDatum::PeclPackage(s) => serializer.serialize_str(s), UpstreamDatum::Author(authors) => { let mut seq = serializer.serialize_seq(Some(authors.len()))?; for a in authors { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Maintainer(maintainer) => maintainer.serialize(serializer), UpstreamDatum::Keywords(keywords) => { let mut seq = serializer.serialize_seq(Some(keywords.len()))?; for a in keywords { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Copyright(s) => serializer.serialize_str(s), UpstreamDatum::Funding(s) => serializer.serialize_str(s), UpstreamDatum::Changelog(s) => serializer.serialize_str(s), UpstreamDatum::DebianITP(s) => serializer.serialize_i32(*s), UpstreamDatum::HaskellPackage(p) => serializer.serialize_str(p), UpstreamDatum::Screenshots(s) => { let mut seq = serializer.serialize_seq(Some(s.len()))?; for s in s { seq.serialize_element(s)?; } seq.end() } UpstreamDatum::CiteAs(c) => serializer.serialize_str(c), UpstreamDatum::Registry(r) => { let mut l = serializer.serialize_seq(Some(r.len()))?; for (k, v) in r { let mut m = serde_yaml::Mapping::new(); m.insert( serde_yaml::Value::String("Name".to_string()), serde_yaml::to_value(k).unwrap(), ); m.insert( serde_yaml::Value::String("Entry".to_string()), serde_yaml::to_value(v).unwrap(), ); l.serialize_element(&m)?; } l.end() } UpstreamDatum::Donation(d) => serializer.serialize_str(d), UpstreamDatum::Webservice(w) => serializer.serialize_str(w), } } } pub struct UpstreamMetadata(Vec); impl UpstreamMetadata { pub fn new() -> Self { UpstreamMetadata(Vec::new()) } pub fn from_data(data: Vec) -> Self { Self(data) } pub fn mut_items(&mut self) -> &mut Vec { &mut self.0 } pub fn iter(&self) -> impl Iterator { self.0.iter() } pub fn mut_iter(&mut self) -> impl Iterator { self.0.iter_mut() } pub fn get(&self, field: &str) -> Option<&UpstreamDatumWithMetadata> { self.0.iter().find(|d| d.datum.field() == field) } pub fn get_mut(&mut self, field: &str) -> Option<&mut UpstreamDatumWithMetadata> { self.0.iter_mut().find(|d| d.datum.field() == field) } pub fn insert(&mut self, datum: UpstreamDatumWithMetadata) { self.0.push(datum); } pub fn contains_key(&self, field: &str) -> bool { self.get(field).is_some() } pub fn discard_known_bad(&mut self) { self.0.retain(|d| !d.datum.known_bad_guess()); } pub fn update( &mut self, new_items: impl Iterator, ) -> Vec { update_from_guesses(&mut self.0, new_items) } pub fn remove(&mut self, field: &str) -> Option { let index = self.0.iter().position(|d| d.datum.field() == field)?; Some(self.0.remove(index)) } } impl Default for UpstreamMetadata { fn default() -> Self { UpstreamMetadata::new() } } impl Iterator for UpstreamMetadata { type Item = UpstreamDatumWithMetadata; fn next(&mut self) -> Option { self.0.pop() } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata(v) } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata( v.into_iter() .map(|d| UpstreamDatumWithMetadata { datum: d, certainty: None, origin: None, }) .collect(), ) } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0 } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0.into_iter().map(|d| d.datum).collect() } } impl serde::ser::Serialize for UpstreamMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { let mut map = serde_yaml::Mapping::new(); for datum in &self.0 { map.insert( serde_yaml::Value::String(datum.datum.field().to_string()), serde_yaml::to_value(datum).unwrap(), ); } map.serialize(serializer) } } impl ToPyObject for UpstreamDatumWithMetadata { fn to_object(&self, py: Python) -> PyObject { let m = PyModule::import(py, "upstream_ontologist.guess").unwrap(); let cls = m.getattr("UpstreamDatum").unwrap(); let (field, py_datum) = self .datum .to_object(py) .extract::<(String, PyObject)>(py) .unwrap(); let kwargs = pyo3::types::PyDict::new(py); kwargs .set_item("certainty", self.certainty.map(|x| x.to_string())) .unwrap(); kwargs.set_item("origin", self.origin.as_ref()).unwrap(); let datum = cls.call((field, py_datum), Some(kwargs)).unwrap(); datum.to_object(py) } } impl serde::ser::Serialize for UpstreamDatumWithMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { UpstreamDatum::serialize(&self.datum, serializer) } } pub trait UpstreamDataProvider { fn provide( path: &std::path::Path, trust_package: bool, ) -> dyn Iterator; } #[derive(Debug)] pub enum HTTPJSONError { HTTPError(reqwest::Error), Error { url: reqwest::Url, status: u16, response: reqwest::blocking::Response, }, } impl std::fmt::Display for HTTPJSONError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { HTTPJSONError::HTTPError(e) => write!(f, "{}", e), HTTPJSONError::Error { url, status, response: _, } => write!(f, "HTTP error {} for {}:", status, url,), } } } pub fn load_json_url( http_url: &Url, timeout: Option, ) -> Result { let timeout = timeout.unwrap_or(std::time::Duration::from_secs(DEFAULT_URLLIB_TIMEOUT)); let mut headers = HeaderMap::new(); headers.insert(reqwest::header::USER_AGENT, USER_AGENT.parse().unwrap()); headers.insert(reqwest::header::ACCEPT, "application/json".parse().unwrap()); if let Some(hostname) = http_url.host_str() { if hostname == "github.com" || hostname == "raw.githubusercontent.com" { if let Ok(token) = std::env::var("GITHUB_TOKEN") { headers.insert( reqwest::header::WWW_AUTHENTICATE, format!("Bearer {}", token).parse().unwrap(), ); } } } let client = reqwest::blocking::Client::builder() .timeout(timeout) .default_headers(headers) .build() .map_err(HTTPJSONError::HTTPError)?; let http_url: reqwest::Url = Into::::into(http_url.clone()).parse().unwrap(); let request = client .get(http_url) .build() .map_err(HTTPJSONError::HTTPError)?; let response = client.execute(request).map_err(HTTPJSONError::HTTPError)?; if !response.status().is_success() { return Err(HTTPJSONError::Error { url: response.url().clone(), status: response.status().as_u16(), response, }); } let json_contents: serde_json::Value = response.json().map_err(HTTPJSONError::HTTPError)?; Ok(json_contents) } fn xmlparse_simplify_namespaces(path: &Path, namespaces: &[&str]) -> Option { let namespaces = namespaces .iter() .map(|ns| format!("{{{}{}}}", ns, ns)) .collect::>(); let mut f = std::fs::File::open(path).unwrap(); let mut buf = Vec::new(); f.read_to_end(&mut buf).ok()?; let mut tree = xmltree::Element::parse(std::io::Cursor::new(buf)).ok()?; simplify_namespaces(&mut tree, &namespaces); Some(tree) } fn simplify_namespaces(element: &mut xmltree::Element, namespaces: &[String]) { use xmltree::XMLNode; element.prefix = None; if let Some(namespace) = namespaces.iter().find(|&ns| element.name.starts_with(ns)) { element.name = element.name[namespace.len()..].to_string(); } for child in &mut element.children { if let XMLNode::Element(ref mut child_element) = child { simplify_namespaces(child_element, namespaces); } } } pub enum CanonicalizeError { InvalidUrl(Url, String), Unverifiable(Url, String), RateLimited(Url), } pub fn check_url_canonical(url: &Url) -> Result { if url.scheme() != "http" && url.scheme() != "https" { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unsupported scheme {}", url.scheme()), )); } let timeout = std::time::Duration::from_secs(DEFAULT_URLLIB_TIMEOUT); let mut headers = reqwest::header::HeaderMap::new(); headers.insert( reqwest::header::USER_AGENT, USER_AGENT.parse().expect("valid user agent"), ); let client = reqwest::blocking::Client::builder() .default_headers(headers) .timeout(timeout) .build() .map_err(|e| CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)))?; let response = client .get(url.clone()) .send() .map_err(|e| CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)))?; match response.status() { status if status.is_success() => Ok(response.url().clone()), status if status == reqwest::StatusCode::TOO_MANY_REQUESTS => { Err(CanonicalizeError::RateLimited(url.clone())) } status if status == reqwest::StatusCode::NOT_FOUND => Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Not found: {}", response.status()), )), status if status.is_server_error() => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Server down: {}", response.status()), )), _ => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unknown HTTP error {}", response.status()), )), } } pub fn with_path_segments(url: &Url, path_segments: &[&str]) -> Result { let mut url = url.clone(); url.path_segments_mut()? .clear() .extend(path_segments.iter()); Ok(url) } pub trait Forge: Send + Sync { fn repository_browse_can_be_homepage(&self) -> bool; fn name(&self) -> &'static str; fn bug_database_url_from_bug_submit_url(&self, _url: &Url) -> Option { None } fn bug_submit_url_from_bug_database_url(&self, _url: &Url) -> Option { None } fn check_bug_database_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } fn bug_database_from_issue_url(&self, _url: &Url) -> Option { None } fn bug_database_url_from_repo_url(&self, _url: &Url) -> Option { None } fn repo_url_from_merge_request_url(&self, _url: &Url) -> Option { None } fn extend_metadata( &self, _metadata: &mut Vec, _project: &str, _max_certainty: Option, ) { } } pub struct GitHub; impl GitHub { pub fn new() -> Self { Self } } impl Forge for GitHub { fn name(&self) -> &'static str { "GitHub" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 && path_elements.len() != 4 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); url.path_segments_mut().unwrap().push("new"); Some(url) } fn check_bug_database_canonical(&self, url: &Url) -> Result { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } if path_elements[2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } let api_url = Url::parse(&format!( "https://api.github.com/repos/{}/{}", path_elements[0], path_elements[1] )) .unwrap(); let response = match reqwest::blocking::get(api_url) { Ok(response) => response, Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => { return Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Project does not exist {}", e), )); } Err(e) if e.status() == Some(reqwest::StatusCode::FORBIDDEN) => { // Probably rate limited warn!("Unable to verify bug database URL {}: {}", url, e); return Err(CanonicalizeError::RateLimited(url.clone())); } Err(e) => { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), )); } }; let data = response.json::().map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; if data["has_issues"].as_bool() != Some(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } if data.get("archived").unwrap_or(&serde_json::Value::Null) == &serde_json::Value::Bool(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project is archived".to_string(), )); } let mut url = Url::parse(data["html_url"].as_str().ok_or_else(|| { CanonicalizeError::Unverifiable( url.clone(), "Unable to verify bug database URL: no html_url".to_string(), ) })?) .map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; url.set_scheme("https").expect("valid scheme"); url.path_segments_mut() .expect("path segments") .push("issues"); Ok(url) } fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let mut path_segments = url.path_segments().unwrap().collect::>(); path_segments.pop(); let db_url = with_path_segments(url, &path_segments).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url)?; canonical_db_url.set_scheme("https").expect("valid scheme"); canonical_db_url .path_segments_mut() .expect("path segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut path = url .path_segments() .into_iter() .take(2) .flatten() .collect::>(); path[1] = path[1].strip_suffix(".git").unwrap_or(path[1]); path.push("issues"); let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, path.as_slice()).unwrap()) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..2]).unwrap()) } } static DEFAULT_ASCII_SET: percent_encoding::AsciiSet = percent_encoding::CONTROLS .add(b'/') .add(b'?') .add(b'#') .add(b'%'); pub struct GitLab; impl GitLab { pub fn new() -> Self { Self } } impl Forge for GitLab { fn name(&self) -> &'static str { "GitLab" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 2] != "issues" { return None; } if path_elements[path_elements.len() - 1] != "new" { path_elements.pop(); } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 1] != "issues" { return None; } let mut url = url.clone(); url.path_segments_mut().expect("path segments").push("new"); Some(url) } fn check_bug_database_canonical(&self, url: &Url) -> Result { let host = url .host() .ok_or_else(|| CanonicalizeError::InvalidUrl(url.clone(), "no host".to_string()))?; let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 1] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } path_elements.pop(); let proj = path_elements.join("/"); let proj_segment = utf8_percent_encode(proj.as_str(), &DEFAULT_ASCII_SET); let api_url = Url::parse(&format!( "https://{}/api/v4/projects/{}", host, proj_segment )) .map_err(|_| { CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with invalid project path".to_string(), ) })?; match load_json_url(&api_url, None) { Ok(data) => { // issues_enabled is only provided when the user is authenticated, // so if we're not then we just fall back to checking the canonical URL let issues_enabled = data .get("issues_enabled") .unwrap_or(&serde_json::Value::Null); if issues_enabled.as_bool() == Some(false) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } let mut canonical_url = Url::parse(data["web_url"].as_str().unwrap()).unwrap(); canonical_url .path_segments_mut() .unwrap() .extend(&["-", "issues"]); if issues_enabled.as_bool() == Some(true) { return Ok(canonical_url); } check_url_canonical(&canonical_url) } Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { Err(CanonicalizeError::InvalidUrl( url.clone(), "Project not found".to_string(), )) } Err(e) => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {:?}", e), )), } } fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } if path_elements[path_elements.len() - 1] != "new" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } let db_url = with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url)?; canonical_db_url .path_segments_mut() .expect("valid segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut url = url.clone(); let last = url .path_segments() .expect("valid segments") .last() .unwrap() .to_string(); url.path_segments_mut() .unwrap() .pop() .push(last.trim_end_matches(".git")) .push("issues"); Some(url) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 3 || path_elements[path_elements.len() - 2] != "merge_requests" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 2]).unwrap()) } } pub fn guess_from_travis_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(go_import_path) = data.get("go_import_path") { if let Some(go_import_path) = go_import_path.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } pub fn guess_from_environment() -> std::result::Result, ProviderError> { let mut results = Vec::new(); if let Ok(url) = std::env::var("UPSTREAM_BRANCH_URL") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Certain), origin: Some(Origin::Other("environment".to_string())), }); } Ok(results) } fn find_datum<'a>( metadata: &'a [UpstreamDatumWithMetadata], field: &str, ) -> Option<&'a UpstreamDatumWithMetadata> { metadata.iter().find(|d| d.datum.field() == field) } fn set_datum(metadata: &mut Vec, datum: UpstreamDatumWithMetadata) { if let Some(idx) = metadata .iter() .position(|d| d.datum.field() == datum.datum.field()) { metadata[idx] = datum; } else { metadata.push(datum); } } pub fn update_from_guesses( metadata: &mut Vec, new_items: impl Iterator, ) -> Vec { let mut changed = vec![]; for datum in new_items { let current_datum = find_datum(metadata, datum.datum.field()); if current_datum.is_none() || datum.certainty > current_datum.unwrap().certainty { changed.push(datum.clone()); set_datum(metadata, datum); } } changed } fn possible_fields_missing( upstream_metadata: &[UpstreamDatumWithMetadata], fields: &[&str], _field_certainty: Certainty, ) -> bool { for field in fields { match find_datum(upstream_metadata, field) { Some(datum) if datum.certainty != Some(Certainty::Certain) => return true, None => return true, _ => (), } } false } fn extend_from_external_guesser( metadata: &mut Vec, max_certainty: Option, supported_fields: &[&str], new_items: impl Fn() -> Vec, ) { if max_certainty.is_some() && !possible_fields_missing(metadata, supported_fields, max_certainty.unwrap()) { return; } let new_items = new_items() .into_iter() .map(|item| UpstreamDatumWithMetadata { datum: item, certainty: max_certainty, origin: None, }); update_from_guesses(metadata, new_items); } pub struct SourceForge; impl SourceForge { pub fn new() -> Self { Self } } impl Forge for SourceForge { fn name(&self) -> &'static str { "SourceForge" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut segments = url.path_segments()?; if segments.next() != Some("p") { return None; } let project = segments.next()?; if segments.next() != Some("bugs") { return None; } with_path_segments(url, &["p", project, "bugs"]).ok() } fn extend_metadata( &self, metadata: &mut Vec, project: &str, max_certainty: Option, ) { let subproject = find_datum(metadata, "Name").and_then(|f| match f.datum { UpstreamDatum::Name(ref name) => Some(name.to_string()), _ => None, }); extend_from_external_guesser( metadata, max_certainty, &["Homepage", "Name", "Repository", "Bug-Database"], || guess_from_sf(project, subproject.as_deref()), ) } } pub struct Launchpad; impl Launchpad { pub fn new() -> Self { Self } } impl Forge for Launchpad { fn name(&self) -> &'static str { "launchpad" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project]).ok() } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project, "+filebug"]).ok() } } pub fn find_forge(url: &Url, net_access: Option) -> Option> { if url.host_str()? == "sourceforge.net" { return Some(Box::new(SourceForge::new())); } if url.host_str()?.ends_with(".launchpad.net") { return Some(Box::new(Launchpad::new())); } if url.host_str()? == "github.com" { return Some(Box::new(GitHub::new())); } if vcs::is_gitlab_site(url.host_str()?, net_access) { return Some(Box::new(GitLab::new())); } None } pub fn check_bug_database_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access) { forge .bug_database_url_from_bug_submit_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug database URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } pub fn bug_submit_url_from_bug_database_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access) { forge.bug_submit_url_from_bug_database_url(url) } else { None } } pub fn bug_database_url_from_bug_submit_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access) { forge.bug_database_url_from_bug_submit_url(url) } else { None } } #[test] fn test_bug_database_url_from_bug_submit_url() { let url = Url::parse("https://bugs.launchpad.net/bugs/+filebug").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None).unwrap(), Url::parse("https://bugs.launchpad.net/bugs").unwrap() ); let url = Url::parse("https://github.com/dulwich/dulwich/issues/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None).unwrap(), Url::parse("https://github.com/dulwich/dulwich/issues").unwrap() ); let url = Url::parse("https://sourceforge.net/p/dulwich/bugs/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None).unwrap(), Url::parse("https://sourceforge.net/p/dulwich/bugs").unwrap() ); } pub fn guess_bug_database_url_from_repo_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access) { forge.bug_database_url_from_repo_url(url) } else { None } } pub fn repo_url_from_merge_request_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access) { forge.repo_url_from_merge_request_url(url) } else { None } } pub fn bug_database_from_issue_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access) { forge.bug_database_from_issue_url(url) } else { None } } pub fn check_bug_submit_url_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access) { forge .bug_submit_url_from_bug_database_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug submit URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } fn sf_git_extract_url(page: &str) -> Option { use select::document::Document; use select::predicate::Attr; let soup = Document::from(page); let el = soup.find(Attr("id", "access_url")).next(); el?; let el = el.unwrap(); el.attr("value")?; let value = el.attr("value").unwrap(); let access_command: Vec<&str> = value.split(' ').collect(); if access_command.len() < 3 || access_command[..2] != ["git", "clone"] { return None; } Some(access_command[2].to_string()) } pub fn get_sf_metadata(project: &str) -> Option { let url = format!("https://sourceforge.net/rest/p/{}", project); match load_json_url(&Url::parse(url.as_str()).unwrap(), None) { Ok(data) => Some(data), Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { None } r => panic!("Unexpected result from {}: {:?}", url, r), } } pub fn guess_from_sf(sf_project: &str, subproject: Option<&str>) -> Vec { let mut results = Vec::new(); match get_sf_metadata(sf_project) { Some(data) => { if let Some(name) = data.get("name") { results.push(UpstreamDatum::Name(name.to_string())); } if let Some(external_homepage) = data.get("external_homepage") { results.push(UpstreamDatum::Homepage(external_homepage.to_string())); } if let Some(preferred_support_url) = data.get("preferred_support_url") { let preferred_support_url = Url::parse(preferred_support_url.as_str().unwrap()) .expect("preferred_support_url is not a valid URL"); match check_bug_database_canonical(&preferred_support_url, Some(true)) { Ok(canonical_url) => { results.push(UpstreamDatum::BugDatabase(canonical_url.to_string())); } Err(_) => { results.push(UpstreamDatum::BugDatabase( preferred_support_url.to_string(), )); } } } let vcs_names = ["hg", "git", "svn", "cvs", "bzr"]; let mut vcs_tools: Vec<(&str, Option<&str>, &str)> = data.get("tools").map_or_else(Vec::new, |tools| { tools .as_array() .unwrap() .iter() .filter(|tool| { vcs_names.contains(&tool.get("name").unwrap().as_str().unwrap()) }) .map(|tool| { ( tool.get("name").map_or("", |n| n.as_str().unwrap()), tool.get("mount_label").map(|l| l.as_str().unwrap()), tool.get("url").map_or("", |u| u.as_str().unwrap()), ) }) .collect::, &str)>>() }); if vcs_tools.len() > 1 { vcs_tools.retain(|tool| { if let Some(url) = tool.2.strip_suffix('/') { !["www", "homepage"].contains(&url.rsplit('/').next().unwrap_or("")) } else { true } }); } if vcs_tools.len() > 1 && subproject.is_some() { let new_vcs_tools: Vec<(&str, Option<&str>, &str)> = vcs_tools .iter() .filter(|tool| tool.1 == subproject) .cloned() .collect(); if !new_vcs_tools.is_empty() { vcs_tools = new_vcs_tools; } } if vcs_tools.iter().any(|tool| tool.0 == "cvs") { vcs_tools.retain(|tool| tool.0 != "cvs"); } if vcs_tools.len() == 1 { let (kind, _, url) = vcs_tools[0]; match kind { "git" => { let url = format!("https://sourceforge.net/{}", url); let client = reqwest::blocking::Client::new(); let response = client .head(url) .header("User-Agent", USER_AGENT) .send() .unwrap(); let url = sf_git_extract_url(&response.text().unwrap()); if let Some(url) = url { results.push(UpstreamDatum::Repository(url)); } } "svn" => { let url = format!("https://svn.code.sf.net/{}", url); results.push(UpstreamDatum::Repository(url)); } "hg" => { let url = format!("https://hg.code.sf.net/{}", url); results.push(UpstreamDatum::Repository(url)); } "cvs" => { let url = format!( "cvs+pserver://anonymous@{}.cvs.sourceforge.net/cvsroot/{}", sf_project, url.strip_suffix('/') .unwrap_or("") .rsplit('/') .nth(1) .unwrap_or("") ); results.push(UpstreamDatum::Repository(url)); } "bzr" => { // TODO: Implement Bazaar (BZR) handling } _ => { error!("Unknown VCS kind: {}", kind); } } } else if vcs_tools.len() > 1 { warn!("Multiple possible VCS URLs found"); } } None => { debug!("No SourceForge metadata found for {}", sf_project); } } results } pub fn extract_sf_project_name(url: &str) -> Option { let projects_regex = regex!(r"https?://sourceforge\.net/(projects|p)/([^/]+)"); if let Some(captures) = projects_regex.captures(url) { return captures.get(2).map(|m| m.as_str().to_string()); } let sf_regex = regex!(r"https?://(.*).(sf|sourceforge).(net|io)/.*"); if let Some(captures) = sf_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } pub fn extract_pecl_package_name(url: &str) -> Option { let pecl_regex = regex!(r"https?://pecl\.php\.net/package/(.*)"); if let Some(captures) = pecl_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } pub fn extract_hackage_package(url: &str) -> Option { let hackage_regex = regex!(r"https?://hackage\.haskell\.org/package/([^/]+)/.*"); if let Some(captures) = hackage_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } /// Obtain metadata from a URL related to the project pub fn metadata_from_url(url: &str, origin: &Origin) -> Vec { let mut results = Vec::new(); if let Some(sf_project) = extract_sf_project_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(sf_project), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(pecl_package) = extract_pecl_package_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::PeclPackage(pecl_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Pecl".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(haskell_package) = extract_hackage_package(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::HaskellPackage(haskell_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Hackage".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } results } pub fn get_repology_metadata(srcname: &str, repo: Option<&str>) -> Option { let repo = repo.unwrap_or("debian_unstable"); let url = format!( "https://repology.org/tools/project-by?repo={}&name_type=srcname' '&target_page=api_v1_project&name={}", repo, srcname ); match load_json_url(&Url::parse(url.as_str()).unwrap(), None) { Ok(json) => Some(json), Err(HTTPJSONError::Error { status, .. }) if status == 404 => None, Err(e) => { debug!("Failed to load repology metadata: {:?}", e); None } } } pub fn guess_from_path( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let basename = path.file_name().and_then(|s| s.to_str()); let mut ret = Vec::new(); if let Some(basename_str) = basename { let re = regex!(r"(.*)-([0-9.]+)"); if let Some(captures) = re.captures(basename_str) { if let Some(name) = captures.get(1) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(version) = captures.get(2) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } else { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(basename_str.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(ret) } impl FromPyObject<'_> for UpstreamDatum { fn extract(obj: &PyAny) -> PyResult { let (field, val): (String, &PyAny) = if let Ok((field, val)) = obj.extract::<(String, &PyAny)>() { (field, val) } else if let Ok(datum) = obj.getattr("datum") { let field = datum.getattr("field")?.extract::()?; let val = datum.getattr("value")?; (field, val) } else if obj.hasattr("field")? && obj.hasattr("value")? { let field = obj.getattr("field")?.extract::()?; let val = obj.getattr("value")?; (field, val) } else { return Err(PyTypeError::new_err(( format!("Expected a tuple of (field, value) or an object with field and value attributesm, found {:?}", obj), ))); }; match field.as_str() { "Name" => Ok(UpstreamDatum::Name(val.extract::()?)), "Version" => Ok(UpstreamDatum::Version(val.extract::()?)), "Homepage" => Ok(UpstreamDatum::Homepage(val.extract::()?)), "Bug-Database" => Ok(UpstreamDatum::BugDatabase(val.extract::()?)), "Bug-Submit" => Ok(UpstreamDatum::BugSubmit(val.extract::()?)), "Contact" => Ok(UpstreamDatum::Contact(val.extract::()?)), "Repository" => Ok(UpstreamDatum::Repository(val.extract::()?)), "Repository-Browse" => Ok(UpstreamDatum::RepositoryBrowse(val.extract::()?)), "License" => Ok(UpstreamDatum::License(val.extract::()?)), "Description" => Ok(UpstreamDatum::Description(val.extract::()?)), "Summary" => Ok(UpstreamDatum::Summary(val.extract::()?)), "Cargo-Crate" => Ok(UpstreamDatum::CargoCrate(val.extract::()?)), "Security-MD" => Ok(UpstreamDatum::SecurityMD(val.extract::()?)), "Security-Contact" => Ok(UpstreamDatum::SecurityContact(val.extract::()?)), "Keywords" => Ok(UpstreamDatum::Keywords(val.extract::>()?)), "Copyright" => Ok(UpstreamDatum::Copyright(val.extract::()?)), "Documentation" => Ok(UpstreamDatum::Documentation(val.extract::()?)), "Go-Import-Path" => Ok(UpstreamDatum::GoImportPath(val.extract::()?)), "Download" => Ok(UpstreamDatum::Download(val.extract::()?)), "Wiki" => Ok(UpstreamDatum::Wiki(val.extract::()?)), "MailingList" => Ok(UpstreamDatum::MailingList(val.extract::()?)), "Funding" => Ok(UpstreamDatum::Funding(val.extract::()?)), "SourceForge-Project" => { Ok(UpstreamDatum::SourceForgeProject(val.extract::()?)) } "Archive" => Ok(UpstreamDatum::Archive(val.extract::()?)), "Demo" => Ok(UpstreamDatum::Demo(val.extract::()?)), "Pecl-Package" => Ok(UpstreamDatum::PeclPackage(val.extract::()?)), "Haskell-Package" => Ok(UpstreamDatum::HaskellPackage(val.extract::()?)), "Author" => Ok(UpstreamDatum::Author(val.extract::>()?)), "Maintainer" => Ok(UpstreamDatum::Maintainer(val.extract::()?)), "Changelog" => Ok(UpstreamDatum::Changelog(val.extract::()?)), "Screenshots" => Ok(UpstreamDatum::Screenshots(val.extract::>()?)), "Cite-As" => Ok(UpstreamDatum::CiteAs(val.extract::()?)), "Registry" => { let v = val.extract::>()?; let mut registry = Vec::new(); for item in v { let d = item.extract::<&PyDict>()?; let name = d.get_item("Name")?.unwrap().extract::()?; let entry = d.get_item("Entry")?.unwrap().extract::()?; registry.push((name, entry)); } Ok(UpstreamDatum::Registry(registry)) } "Donation" => Ok(UpstreamDatum::Donation(val.extract::()?)), "Webservice" => Ok(UpstreamDatum::Webservice(val.extract::()?)), _ => Err(PyRuntimeError::new_err(format!("Unknown field: {}", field))), } } } impl ToPyObject for UpstreamDatum { fn to_object(&self, py: Python) -> PyObject { ( self.field().to_string(), match self { UpstreamDatum::Name(n) => n.into_py(py), UpstreamDatum::Version(v) => v.into_py(py), UpstreamDatum::Contact(c) => c.into_py(py), UpstreamDatum::Summary(s) => s.into_py(py), UpstreamDatum::License(l) => l.into_py(py), UpstreamDatum::Homepage(h) => h.into_py(py), UpstreamDatum::Description(d) => d.into_py(py), UpstreamDatum::BugDatabase(b) => b.into_py(py), UpstreamDatum::BugSubmit(b) => b.into_py(py), UpstreamDatum::Repository(r) => r.into_py(py), UpstreamDatum::RepositoryBrowse(r) => r.into_py(py), UpstreamDatum::SecurityMD(s) => s.into_py(py), UpstreamDatum::SecurityContact(s) => s.into_py(py), UpstreamDatum::CargoCrate(c) => c.into_py(py), UpstreamDatum::Keywords(ks) => ks.to_object(py), UpstreamDatum::Copyright(c) => c.into_py(py), UpstreamDatum::Documentation(a) => a.into_py(py), UpstreamDatum::GoImportPath(ip) => ip.into_py(py), UpstreamDatum::Archive(a) => a.into_py(py), UpstreamDatum::Demo(d) => d.into_py(py), UpstreamDatum::Maintainer(m) => m.to_object(py), UpstreamDatum::Author(a) => a.to_object(py), UpstreamDatum::Wiki(w) => w.into_py(py), UpstreamDatum::Download(d) => d.into_py(py), UpstreamDatum::MailingList(m) => m.into_py(py), UpstreamDatum::SourceForgeProject(m) => m.into_py(py), UpstreamDatum::PeclPackage(p) => p.into_py(py), UpstreamDatum::Funding(p) => p.into_py(py), UpstreamDatum::Changelog(c) => c.into_py(py), UpstreamDatum::HaskellPackage(p) => p.into_py(py), UpstreamDatum::DebianITP(i) => i.into_py(py), UpstreamDatum::Screenshots(s) => s.to_object(py), UpstreamDatum::CiteAs(s) => s.to_object(py), UpstreamDatum::Registry(r) => r .iter() .map(|(name, entry)| { let dict = PyDict::new(py); dict.set_item("Name", name).unwrap(); dict.set_item("Entry", entry).unwrap(); dict.into() }) .collect::>() .to_object(py), UpstreamDatum::Donation(d) => d.to_object(py), UpstreamDatum::Webservice(w) => w.to_object(py), }, ) .to_object(py) } } impl FromPyObject<'_> for UpstreamDatumWithMetadata { fn extract(obj: &PyAny) -> PyResult { let certainty = obj.getattr("certainty")?.extract::>()?; let origin = obj.getattr("origin")?.extract::>()?; let datum = if obj.hasattr("datum")? { obj.getattr("datum")?.extract::() } else { obj.extract::() }?; Ok(UpstreamDatumWithMetadata { datum, certainty: certainty.map(|s| s.parse().unwrap()), origin, }) } } #[derive(Debug)] pub enum ProviderError { ParseError(String), IoError(std::io::Error), Other(String), HttpJsonError(HTTPJSONError), Python(PyErr), ExtrapolationLimitExceeded(usize), } impl std::fmt::Display for ProviderError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ProviderError::ParseError(e) => write!(f, "Parse error: {}", e), ProviderError::IoError(e) => write!(f, "IO error: {}", e), ProviderError::Other(e) => write!(f, "Other error: {}", e), ProviderError::HttpJsonError(e) => write!(f, "HTTP JSON error: {}", e), ProviderError::Python(e) => write!(f, "Python error: {}", e), ProviderError::ExtrapolationLimitExceeded(e) => { write!(f, "Extrapolation limit exceeded: {}", e) } } } } impl std::error::Error for ProviderError {} impl From for ProviderError { fn from(e: HTTPJSONError) -> Self { ProviderError::HttpJsonError(e) } } impl From for ProviderError { fn from(e: std::io::Error) -> Self { ProviderError::IoError(e) } } impl From for ProviderError { fn from(e: reqwest::Error) -> Self { ProviderError::Other(e.to_string()) } } #[cfg(feature = "pyo3")] pyo3::create_exception!( upstream_ontologist, ParseError, pyo3::exceptions::PyException ); #[cfg(feature = "pyo3")] impl From for PyErr { fn from(e: ProviderError) -> PyErr { match e { ProviderError::IoError(e) => e.into(), ProviderError::ParseError(e) => ParseError::new_err((e,)), ProviderError::Other(e) => PyRuntimeError::new_err((e,)), ProviderError::HttpJsonError(e) => PyRuntimeError::new_err((e.to_string(),)), ProviderError::Python(e) => e, ProviderError::ExtrapolationLimitExceeded(e) => { PyRuntimeError::new_err((e.to_string(),)) } } } } #[derive(Debug)] pub struct UpstreamPackage { pub family: String, pub name: String, } impl FromPyObject<'_> for UpstreamPackage { fn extract(obj: &PyAny) -> PyResult { let family = obj.getattr("family")?.extract::()?; let name = obj.getattr("name")?.extract::()?; Ok(UpstreamPackage { family, name }) } } impl ToPyObject for UpstreamPackage { fn to_object(&self, py: Python) -> PyObject { let dict = pyo3::types::PyDict::new(py); dict.set_item("family", self.family.clone()).unwrap(); dict.set_item("name", self.name.clone()).unwrap(); dict.into() } } #[derive(Debug)] pub struct UpstreamVersion(String); impl FromPyObject<'_> for UpstreamVersion { fn extract(obj: &PyAny) -> PyResult { let version = obj.extract::()?; Ok(UpstreamVersion(version)) } } impl ToPyObject for UpstreamVersion { fn to_object(&self, py: Python) -> PyObject { self.0.to_object(py) } } #[derive(Debug)] pub struct GuesserSettings { pub trust_package: bool, } impl Default for GuesserSettings { fn default() -> Self { GuesserSettings { trust_package: false, } } } pub struct UpstreamMetadataGuesser { pub name: std::path::PathBuf, pub guess: Box Result, ProviderError>>, } impl std::fmt::Debug for UpstreamMetadataGuesser { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("UpstreamMetadataGuesser") .field("name", &self.name) .finish() } } const STATIC_GUESSERS: &[( &str, fn(&std::path::Path, &GuesserSettings) -> Result, ProviderError>, )] = &[ ( "debian/watch", crate::providers::debian::guess_from_debian_watch, ), ( "debian/control", crate::providers::debian::guess_from_debian_control, ), ( "debian/changelog", crate::providers::debian::guess_from_debian_changelog, ), ( "debian/rules", crate::providers::debian::guess_from_debian_rules, ), ("PKG-INFO", crate::providers::python::guess_from_pkg_info), ( "package.json", crate::providers::package_json::guess_from_package_json, ), ( "composer.json", crate::providers::composer_json::guess_from_composer_json, ), ( "package.xml", crate::providers::package_xml::guess_from_package_xml, ), ( "package.yaml", crate::providers::package_yaml::guess_from_package_yaml, ), ("dist.ini", crate::providers::perl::guess_from_dist_ini), ( "debian/copyright", crate::providers::debian::guess_from_debian_copyright, ), ("META.json", crate::providers::perl::guess_from_meta_json), ("MYMETA.json", crate::providers::perl::guess_from_meta_json), ("META.yml", crate::providers::perl::guess_from_meta_yml), ("MYMETA.yml", crate::providers::perl::guess_from_meta_yml), ( "configure", crate::providers::autoconf::guess_from_configure, ), ("DESCRIPTION", crate::providers::r::guess_from_r_description), ("Cargo.toml", crate::providers::rust::guess_from_cargo), ("pom.xml", crate::providers::maven::guess_from_pom_xml), (".git/config", crate::providers::git::guess_from_git_config), ( "debian/get-orig-source.sh", crate::vcs_command::guess_from_get_orig_source, ), ( "pyproject.toml", crate::providers::python::guess_from_pyproject_toml, ), ("setup.cfg", crate::providers::python::guess_from_setup_cfg), ("go.mod", crate::providers::go::guess_from_go_mod), ( "Makefile.PL", crate::providers::perl::guess_from_makefile_pl, ), ("wscript", crate::providers::waf::guess_from_wscript), ("AUTHORS", crate::providers::authors::guess_from_authors), ("INSTALL", crate::providers::guess_from_install), ( "pubspec.yaml", crate::providers::pubspec::guess_from_pubspec_yaml, ), ( "pubspec.yml", crate::providers::pubspec::guess_from_pubspec_yaml, ), ("meson.build", crate::providers::meson::guess_from_meson), ( "metadata.json", crate::providers::metadata_json::guess_from_metadata_json, ), (".travis.yml", crate::guess_from_travis_yml), ]; fn find_guessers(path: &std::path::Path) -> Vec { let mut candidates: Vec<( String, Box< dyn FnOnce( &std::path::Path, &GuesserSettings, ) -> Result, ProviderError>, >, )> = Vec::new(); let path = path.canonicalize().unwrap(); for (name, cb) in STATIC_GUESSERS { let subpath = path.join(name); if subpath.exists() { candidates.push(( name.to_string(), Box::new(move |_path, s: &GuesserSettings| cb(&subpath, s)), )); } } for name in ["SECURITY.md", ".github/SECURITY.md", "docs/SECURITY.md"].iter() { if path.join(name).exists() { candidates.push(( name.to_string(), Box::new(move |path, s: &GuesserSettings| { crate::providers::security_md::guess_from_security_md(name, path, s) }), )); } } let mut found_pkg_info = path.join("PKG-INFO").exists(); for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".egg-info") { candidates.push(( format!("{}/PKG-INFO", filename), Box::new(move |_path, s| { crate::providers::python::guess_from_pkg_info( entry.path().join("PKG-INFO").as_path(), s, ) }), )); found_pkg_info = true; } else if filename.ends_with(".dist-info") { candidates.push(( format!("{}/METADATA", filename), Box::new(move |_path, s| { crate::providers::python::guess_from_pkg_info( entry.path().join("PKG-INFO").as_path(), s, ) }), )); found_pkg_info = true; } } if !found_pkg_info && path.join("setup.py").exists() { candidates.push(( "setup.py".to_string(), Box::new(|path, s| { crate::providers::python::guess_from_setup_py(path, s.trust_package) }), )); } for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".gemspec") { candidates.push(( entry.file_name().to_string_lossy().to_string(), Box::new(move |_path, s| { crate::providers::ruby::guess_from_gemspec(entry.path().as_path(), s) }), )); } } // TODO(jelmer): Perhaps scan all directories if no other primary project information file has been found? for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if entry.file_type().unwrap().is_dir() { let description_name = format!("{}/DESCRIPTION", entry.file_name().to_string_lossy()); if path.join(&description_name).exists() { candidates.push(( description_name, Box::new(move |_path, s| { crate::providers::r::guess_from_r_description(entry.path().as_path(), s) }), )); } } } let mut doap_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".doap") || (filename.ends_with(".xml") && filename.starts_with("doap_XML_")) { Some(entry.file_name()) } else { None } }) .collect::>(); if doap_filenames.len() == 1 { let doap_filename = doap_filenames.remove(0); candidates.push(( doap_filename.to_string_lossy().to_string(), Box::new(|path, s| crate::providers::doap::guess_from_doap(path, s.trust_package)), )); } else if doap_filenames.len() > 1 { log::warn!( "Multiple DOAP files found: {:?}, ignoring all.", doap_filenames ); } let mut metainfo_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry .file_name() .to_string_lossy() .ends_with(".metainfo.xml") { Some(entry.file_name()) } else { None } }) .collect::>(); if metainfo_filenames.len() == 1 { let metainfo_filename = metainfo_filenames.remove(0); candidates.push(( metainfo_filename.to_string_lossy().to_string(), Box::new(|path, s| { crate::providers::metainfo::guess_from_metainfo(path, s.trust_package) }), )); } else if metainfo_filenames.len() > 1 { log::warn!( "Multiple metainfo files found: {:?}, ignoring all.", metainfo_filenames ); } let mut cabal_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".cabal") { Some(entry.file_name()) } else { None } }) .collect::>(); if cabal_filenames.len() == 1 { let cabal_filename = cabal_filenames.remove(0); candidates.push(( cabal_filename.to_string_lossy().to_string(), Box::new(|path, s| crate::providers::haskell::guess_from_cabal(path, s.trust_package)), )); } else if cabal_filenames.len() > 1 { log::warn!( "Multiple cabal files found: {:?}, ignoring all.", cabal_filenames ); } let readme_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if !(filename.to_lowercase().starts_with("readme") || filename.to_lowercase().starts_with("hacking") || filename.to_lowercase().starts_with("contributing")) { return None; } if filename.ends_with('~') { return None; } let extension = entry .path() .extension() .map(|s| s.to_string_lossy().to_string()); if extension.as_deref() == Some("html") || extension.as_deref() == Some("pdf") || extension.as_deref() == Some("xml") { return None; } Some(entry.file_name()) }) .collect::>(); for filename in readme_filenames { candidates.push(( filename.to_string_lossy().to_string(), Box::new(|path, s| crate::readme::guess_from_readme(path, s.trust_package)), )); } let mut nuspec_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".nuspec") { Some(entry.file_name()) } else { None } }) .collect::>(); if nuspec_filenames.len() == 1 { let nuspec_filename = nuspec_filenames.remove(0); candidates.push(( nuspec_filename.to_string_lossy().to_string(), Box::new(|path, s| crate::providers::nuspec::guess_from_nuspec(path, s.trust_package)), )); } else if nuspec_filenames.len() > 1 { log::warn!( "Multiple nuspec files found: {:?}, ignoring all.", nuspec_filenames ); } let mut opam_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".opam") { Some(entry.file_name()) } else { None } }) .collect::>(); if opam_filenames.len() == 1 { let opam_filename = opam_filenames.remove(0); candidates.push(( opam_filename.to_string_lossy().to_string(), Box::new(|path, s| crate::providers::ocaml::guess_from_opam(path, s.trust_package)), )); } else if opam_filenames.len() > 1 { log::warn!( "Multiple opam files found: {:?}, ignoring all.", opam_filenames ); } let debian_patches = match std::fs::read_dir(path.join("debian").join("patches")) { Ok(patches) => patches .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".patch") { Some(format!( "debian/patches/{}", entry.file_name().to_string_lossy() )) } else { None } }) .collect::>(), Err(_) => Vec::new(), }; for filename in debian_patches { candidates.push(( filename.clone(), Box::new(crate::providers::debian::guess_from_debian_patch), )); } candidates.push(( "environment".to_string(), Box::new(|_path, _| crate::guess_from_environment()), )); candidates.push((".".to_string(), Box::new(crate::guess_from_path))); candidates .into_iter() .map(|(name, cb)| { assert!( !name.is_empty() && !name.starts_with('/'), "invalid name: {}", name ); let path = path.join(name); UpstreamMetadataGuesser { name: path.clone(), guess: Box::new(move |s| cb(&path, s)), } }) .collect() } pub struct UpstreamMetadataScanner { path: std::path::PathBuf, config: GuesserSettings, pending: Vec, guessers: Vec, } impl UpstreamMetadataScanner { pub fn from_path(path: &std::path::Path, trust_package: Option) -> Self { let trust_package = trust_package.unwrap_or(false); let guessers = find_guessers(path); Self { path: path.to_path_buf(), pending: Vec::new(), config: GuesserSettings { trust_package }, guessers, } } } impl Iterator for UpstreamMetadataScanner { type Item = Result; fn next(&mut self) -> Option> { loop { if !self.pending.is_empty() { return Some(Ok(self.pending.remove(0))); } if self.guessers.is_empty() { return None; } let guesser = self.guessers.remove(0); let abspath = std::env::current_dir().unwrap().join(self.path.as_path()); let guess = (guesser.guess)(&self.config); match guess { Ok(entries) => { self.pending.extend(entries.into_iter().map(|mut e| { log::trace!("{}: {:?}", guesser.name.display(), e); e.origin = e .origin .or(Some(Origin::Other(guesser.name.display().to_string()))); if let Some(Origin::Path(p)) = e.origin.as_ref() { if let Ok(suffix) = p.strip_prefix(abspath.as_path()) { if suffix.to_str().unwrap().is_empty() { e.origin = Some(Origin::Path(PathBuf::from_str(".").unwrap())); } else { e.origin = Some(Origin::Path( PathBuf::from_str(".").unwrap().join(suffix), )); } } } e })); } Err(e) => { return Some(Err(e)); } } } } } pub fn guess_upstream_info( path: &std::path::Path, trust_package: Option, ) -> impl Iterator> { UpstreamMetadataScanner::from_path(path, trust_package) } pub fn extend_upstream_metadata( upstream_metadata: &mut UpstreamMetadata, path: &std::path::Path, minimum_certainty: Option, net_access: Option, consult_external_directory: Option, ) -> Result<(), ProviderError> { let net_access = net_access.unwrap_or(false); let consult_external_directory = consult_external_directory.unwrap_or(false); let minimum_certainty = minimum_certainty.unwrap_or(Certainty::Confident); // TODO(jelmer): Use EXTRAPOLATE_FNS mechanism for this? for field in [ "Homepage", "Bug-Database", "Bug-Submit", "Repository", "Repository-Browse", "Download", ] { let value = match upstream_metadata.get(field) { Some(value) => value, None => continue, }; if let Some(project) = extract_sf_project_name(value.datum.as_str().unwrap()) { let certainty = Some( std::cmp::min(Some(Certainty::Likely), value.certainty) .unwrap_or(Certainty::Likely), ); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(project), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); break; } } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "SourceForge" && upstream_metadata.contains_key("SourceForge-Project") && net_access { let sf_project = upstream_metadata .get("SourceForge-Project") .unwrap() .datum .as_str() .unwrap() .to_string(); let sf_certainty = archive.unwrap().certainty; SourceForge::new().extend_metadata( upstream_metadata.mut_items(), sf_project.as_str(), sf_certainty, ); } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Hackage" && upstream_metadata.contains_key("Hackage-Package") && net_access { let hackage_package = upstream_metadata .get("Hackage-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let hackage_certainty = archive.unwrap().certainty; crate::providers::haskell::Hackage::new() .extend_metadata( upstream_metadata.mut_items(), hackage_package.as_str(), hackage_certainty, ) .unwrap(); } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "crates.io" && upstream_metadata.contains_key("Cargo-Crate") && net_access { let cargo_crate = upstream_metadata .get("Cargo-Crate") .unwrap() .datum .as_str() .unwrap() .to_string(); let crates_io_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::rust::CratesIo::new() .extend_metadata( upstream_metadata.mut_items(), cargo_crate.as_str(), crates_io_certainty, ) .unwrap(); } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Pecl" && upstream_metadata.contains_key("Pecl-Package") && net_access { let pecl_package = upstream_metadata .get("Pecl-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let pecl_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::php::Pecl::new() .extend_metadata( upstream_metadata.mut_items(), pecl_package.as_str(), pecl_certainty, ) .unwrap(); } if net_access && consult_external_directory { // TODO(jelmer): Don't assume debian/control exists let package = match debian_control::Control::from_file_relaxed(path.join("debian/control")) { Ok((control, _)) => control.source().and_then(|s| s.get("Package")), Err(_) => None, }; if let Some(package) = package { extend_from_lp( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), None, None, ); crate::providers::arch::Aur::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .unwrap(); crate::providers::gobo::Gobo::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .unwrap(); extend_from_repology( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), ); } } crate::extrapolate::extrapolate_fields(upstream_metadata, net_access, None)?; Ok(()) } pub trait ThirdPartyRepository { fn name(&self) -> &'static str; fn supported_fields(&self) -> &'static [&'static str]; fn max_supported_certainty(&self) -> Certainty; fn extend_metadata( &self, metadata: &mut Vec, name: &str, min_certainty: Option, ) -> Result<(), ProviderError> { if min_certainty.is_some() && min_certainty.unwrap() > self.max_supported_certainty() { // Don't bother if we can't meet minimum certainty return Ok(()); } extend_from_external_guesser( metadata, Some(self.max_supported_certainty()), self.supported_fields(), || self.guess_metadata(name).unwrap(), ); Ok(()) } fn guess_metadata(&self, name: &str) -> Result, ProviderError>; } fn extend_from_lp( upstream_metadata: &mut Vec, minimum_certainty: Certainty, package: &str, distribution: Option<&str>, suite: Option<&str>, ) { // The set of fields that Launchpad can possibly provide: let lp_fields = &["Homepage", "Repository", "Name", "Download"][..]; let lp_certainty = Certainty::Possible; if lp_certainty < minimum_certainty { // Don't bother talking to launchpad if we're not // speculating. return; } extend_from_external_guesser(upstream_metadata, Some(lp_certainty), lp_fields, || { crate::providers::launchpad::guess_from_launchpad(package, distribution, suite).unwrap() }) } fn extend_from_repology( upstream_metadata: &mut Vec, minimum_certainty: Certainty, source_package: &str, ) { // The set of fields that repology can possibly provide: let repology_fields = &["Homepage", "License", "Summary", "Download"][..]; let certainty = Certainty::Confident; if certainty < minimum_certainty { // Don't bother talking to repology if we're not speculating. return; } extend_from_external_guesser(upstream_metadata, Some(certainty), repology_fields, || { crate::providers::repology::guess_from_repology(source_package).unwrap() }) } /// Fix existing upstream metadata. pub fn fix_upstream_metadata(upstream_metadata: &mut UpstreamMetadata) { if let Some(repository) = upstream_metadata.get_mut("Repository") { let url = crate::vcs::sanitize_url(repository.datum.as_str().unwrap()); repository.datum = UpstreamDatum::Repository(url.to_string()); } if let Some(summary) = upstream_metadata.get_mut("Summary") { let s = summary.datum.as_str().unwrap(); let s = s.split_once(". ").map_or(s, |(a, _)| a); let s = s.trim_end().trim_end_matches('.'); summary.datum = UpstreamDatum::Summary(s.to_string()); } } /// Summarize the upstream metadata into a dictionary. /// /// # Arguments /// * `metadata_items`: Iterator over metadata items /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub fn summarize_upstream_metadata( metadata_items: impl Iterator, path: &std::path::Path, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let check = check.unwrap_or(false); let mut upstream_metadata = UpstreamMetadata::new(); upstream_metadata.update(filter_bad_guesses(metadata_items)); extend_upstream_metadata( &mut upstream_metadata, path, None, net_access, consult_external_directory, )?; if check { check_upstream_metadata(&mut upstream_metadata, None); } fix_upstream_metadata(&mut upstream_metadata); Ok(upstream_metadata) } /// Guess upstream metadata items, in no particular order. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `minimum_certainty`: Minimum certainty of guesses to return pub fn guess_upstream_metadata_items( path: &std::path::Path, trust_package: Option, minimum_certainty: Option, ) -> impl Iterator> { guess_upstream_info(path, trust_package).filter_map(move |e| match e { Err(e) => Some(Err(e)), Ok(UpstreamDatumWithMetadata { datum, certainty, origin, }) => { if minimum_certainty.is_some() && certainty < minimum_certainty { None } else { Some(Ok(UpstreamDatumWithMetadata { datum, certainty, origin, })) } } }) } pub fn get_upstream_info( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = guess_upstream_info(path, trust_package); summarize_upstream_metadata( metadata_items.filter_map(|x| match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } }), path, net_access, consult_external_directory, check, ) } /// Guess the upstream metadata dictionary. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub fn guess_upstream_metadata( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = guess_upstream_metadata_items(path, trust_package, None).filter_map(|x| match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } }); summarize_upstream_metadata( metadata_items, path, net_access, consult_external_directory, check, ) } pub fn verify_screenshots(urls: &[&str]) -> Vec<(String, Option)> { let mut ret = Vec::new(); for url in urls { let mut request = reqwest::blocking::Request::new(reqwest::Method::GET, url.parse().unwrap()); request.headers_mut().insert( reqwest::header::USER_AGENT, reqwest::header::HeaderValue::from_static(USER_AGENT), ); match reqwest::blocking::Client::new().execute(request) { Ok(response) => { let status = response.status(); if status.is_success() { ret.push((url.to_string(), Some(true))); } else if status.is_client_error() { ret.push((url.to_string(), Some(false))); } else { ret.push((url.to_string(), None)); } } Err(e) => { log::debug!("Error fetching {}: {}", url, e); ret.push((url.to_string(), None)); } } } ret } /// Check upstream metadata. /// /// This will make network connections, etc. pub fn check_upstream_metadata(upstream_metadata: &mut UpstreamMetadata, version: Option<&str>) { let repository = upstream_metadata.get_mut("Repository"); if let Some(repository) = repository { match vcs::check_repository_url_canonical(repository.datum.to_url().unwrap(), version) { Ok(canonical_url) => { repository.datum = UpstreamDatum::Repository(canonical_url.to_string()); if repository.certainty == Some(Certainty::Confident) { repository.certainty = Some(Certainty::Certain); } let derived_browse_url = vcs::browse_url_from_repo_url( &vcs::VcsLocation { url: repository.datum.to_url().unwrap(), branch: None, subpath: None, }, Some(true), ); let certainty = repository.certainty; let browse_repo = upstream_metadata.get_mut("Repository-Browse"); if browse_repo.is_some() && derived_browse_url == browse_repo.as_ref().and_then(|u| u.datum.to_url()) { browse_repo.unwrap().certainty = certainty; } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository URL {}: {}", u, e); upstream_metadata.remove("Repository"); } } } let homepage = upstream_metadata.get_mut("Homepage"); if let Some(homepage) = homepage { match check_url_canonical(&homepage.datum.to_url().unwrap()) { Ok(canonical_url) => { homepage.datum = UpstreamDatum::Homepage(canonical_url.to_string()); if homepage.certainty >= Some(Certainty::Likely) { homepage.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Homepage URL {}: {}", u, e); upstream_metadata.remove("Homepage"); } } } if let Some(repository_browse) = upstream_metadata.get_mut("Repository-Browse") { match check_url_canonical(&repository_browse.datum.to_url().unwrap()) { Ok(u) => { repository_browse.datum = UpstreamDatum::RepositoryBrowse(u.to_string()); if repository_browse.certainty >= Some(Certainty::Likely) { repository_browse.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository-Browse URL {}: {}", u, e); upstream_metadata.remove("Repository-Browse"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Repository-Browse URL {}", u); } } } if let Some(bug_database) = upstream_metadata.get_mut("Bug-Database") { match check_bug_database_canonical(&bug_database.datum.to_url().unwrap(), Some(true)) { Ok(u) => { bug_database.datum = UpstreamDatum::BugDatabase(u.to_string()); if bug_database.certainty >= Some(Certainty::Likely) { bug_database.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Database URL {}: {}", u, e); upstream_metadata.remove("Bug-Database"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Database URL {}", u); } } } let bug_submit = upstream_metadata.get_mut("Bug-Submit"); if let Some(bug_submit) = bug_submit { match check_bug_submit_url_canonical(&bug_submit.datum.to_url().unwrap(), Some(true)) { Ok(u) => { bug_submit.datum = UpstreamDatum::BugSubmit(u.to_string()); if bug_submit.certainty >= Some(Certainty::Likely) { bug_submit.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Submit URL {}: {}", u, e); upstream_metadata.remove("Bug-Submit"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Submit URL {}", u); } } } let mut screenshots = upstream_metadata.get_mut("Screenshots"); if screenshots.is_some() && screenshots.as_ref().unwrap().certainty == Some(Certainty::Likely) { let mut newvalue = vec![]; screenshots.as_mut().unwrap().certainty = Some(Certainty::Certain); let urls = match &screenshots.as_ref().unwrap().datum { UpstreamDatum::Screenshots(urls) => urls, _ => unreachable!(), }; for (url, status) in verify_screenshots( urls.iter() .map(|x| x.as_str()) .collect::>() .as_slice(), ) { match status { Some(true) => { newvalue.push(url); } Some(false) => {} None => { screenshots.as_mut().unwrap().certainty = Some(Certainty::Likely); } } } screenshots.as_mut().unwrap().datum = UpstreamDatum::Screenshots(newvalue); } } pub fn filter_bad_guesses( guessed_items: impl Iterator, ) -> impl Iterator { guessed_items.filter(|item| { let bad = item.datum.known_bad_guess(); if bad { log::debug!("Excluding known bad item {:?}", item); } !bad }) } upstream-ontologist-0.1.37/src/providers/000077500000000000000000000000001462717511400204555ustar00rootroot00000000000000upstream-ontologist-0.1.37/src/providers/arch.rs000066400000000000000000000136321462717511400217450ustar00rootroot00000000000000use crate::{vcs, UpstreamDatum, USER_AGENT}; use log::{debug, error}; use std::collections::HashMap; use std::io::BufRead; pub fn parse_pkgbuild_variables(file: &str) -> HashMap> { let reader = std::io::Cursor::new(file); let mut variables = HashMap::new(); let mut keep: Option<(String, String)> = None; let mut existing: Option = None; for line in reader.lines() { let line = line.expect("Failed to read line"); if let Some(existing_line) = existing.take() { let line = [&existing_line[..existing_line.len() - 2], &line].concat(); existing = Some(line); continue; } if line.ends_with("\\\n") { existing = Some(line[..line.len() - 2].to_owned()); continue; } if line.starts_with('\t') || line.starts_with(' ') || line.starts_with('#') { continue; } if let Some((key, mut value)) = keep.take() { value.push_str(&line); if line.trim_end().ends_with(')') { let value_parts = match shlex::split(value.as_str()) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value.as_str()); continue; } }; variables.insert(key, value_parts); } else { keep = Some((key, value)); } continue; } if let Some((key, value)) = line.split_once('=') { if value.starts_with('(') { if value.trim_end().ends_with(')') { let value = &value[1..value.len() - 1]; let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } else { keep = Some((key.to_owned(), value[1..].to_owned())); } } else { let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } } } variables } pub fn guess_from_aur(package: &str) -> Vec { let mut variables = HashMap::new(); for vcs in vcs::VCSES { let url = format!( "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h={}-{}", package, vcs ); let mut headers = reqwest::header::HeaderMap::new(); headers.insert(reqwest::header::USER_AGENT, USER_AGENT.parse().unwrap()); let client = reqwest::blocking::Client::builder() .default_headers(headers) .build() .unwrap(); debug!("Requesting {}", url); let response = client.get(&url).send(); match response { Ok(response) => { if response.status().is_success() { let text = response.text().unwrap(); variables = parse_pkgbuild_variables(&text); break; } else if response.status().as_u16() != 404 { // If the response is not 404, raise an error // response.error_for_status(); error!("Error contacting AUR: {}", response.status()); return Vec::new(); } else { continue; } } Err(e) => { error!("Error contacting AUR: {}", e); return Vec::new(); } } } let mut results = Vec::new(); for (key, value) in variables.iter() { match key.as_str() { "url" => { results.push(UpstreamDatum::Homepage(value[0].to_owned())); } "source" => { if value.is_empty() { continue; } let mut value = value[0].to_owned(); if value.contains("${") { for (k, v) in variables.iter() { value = value.replace(format!("${{{}}}", k).as_str(), v.join(" ").as_str()); value = value.replace(format!("${}", k).as_str(), v.join(" ").as_str()); } } let url = match value.split_once("::") { Some((_unique_name, url)) => url, None => value.as_str(), }; let url = url.replace("#branch=", ",branch="); results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(url.as_str()).to_owned(), )); } "_gitroot" => { results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(value[0].as_str()).to_owned(), )); } _ => { debug!("Ignoring variable: {}", key); } } } results } pub struct Aur; impl Aur { pub fn new() -> Self { Self } } impl crate::ThirdPartyRepository for Aur { fn name(&self) -> &'static str { "AUR" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { Ok(guess_from_aur(name)) } } upstream-ontologist-0.1.37/src/providers/authors.rs000066400000000000000000000032031462717511400225060ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::fs::File; use std::io::BufRead; use std::path::Path; pub fn guess_from_authors( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let mut authors: Vec = Vec::new(); for line in reader.lines().flatten() { let mut m = line.trim().to_string(); if m.is_empty() { continue; } if m.starts_with("arch-tag: ") { continue; } if m.ends_with(':') { continue; } if m.starts_with("$Id") { continue; } if m.starts_with('*') || m.starts_with('-') { m = m[1..].trim().to_string(); } if m.len() < 3 { continue; } if m.ends_with('.') { continue; } if m.contains(" for ") { let parts: Vec<&str> = m.split(" for ").collect(); m = parts[0].to_string(); } if !m.chars().next().unwrap().is_alphabetic() { continue; } if !m.contains('<') && line.as_bytes().starts_with(b"\t") { continue; } if m.contains('<') || m.matches(' ').count() < 5 { authors.push(Person::from(m.as_str())); } } Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }]) } upstream-ontologist-0.1.37/src/providers/autoconf.rs000066400000000000000000000112241462717511400226410ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use url::Url; fn is_email_address(email: &str) -> bool { if email.contains('@') { return true; } if email.contains(" (at) ") { return true; } false } pub fn guess_from_configure( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { if std::path::Path::new(path).is_dir() { return Ok(Vec::new()); } let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.split(b'\n') { if let Ok(line) = line { let split = line.splitn(2, |&c| c == b'=').collect::>(); let (key, value) = if let [key, value] = split.as_slice() { (key, value) } else { continue; }; let key = String::from_utf8(key.to_vec()).expect("Failed to parse UTF-8"); let key = key.trim(); let value = String::from_utf8(value.to_vec()).expect("Failed to parse UTF-8"); let mut value = value.trim(); if key.contains(' ') { continue; } if value.contains('$') { continue; } if value.starts_with('\'') && value.ends_with('\'') { value = &value[1..value.len() - 1]; if value.is_empty() { continue; } } match key { "PACKAGE_NAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_TARNAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_VERSION" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_BUGREPORT" => { let certainty = if value == "BUG-REPORT-ADDRESS" { None } else if is_email_address(value) { // Downgrade the trustworthiness of this field for most // upstreams if it contains an e-mail address. Most // upstreams seem to just set this to some random address, // and then forget about it. Some(Certainty::Possible) } else if value.contains("mailing list") { // Downgrade the trustworthiness of this field if // it contains a mailing list Some(Certainty::Possible) } else { let parsed_url = Url::parse(value).expect("Failed to parse URL"); if parsed_url.path().trim_end_matches('/') != "" { Some(Certainty::Certain) } else { // It seems unlikely that the bug submit URL lives at // the root. Some(Certainty::Possible) } }; if certainty.is_some() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(value.to_string()), certainty, origin: Some(path.into()), }); } } "PACKAGE_URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => { debug!("unknown key: {}", key); } } } } Ok(results) } upstream-ontologist-0.1.37/src/providers/composer_json.rs000066400000000000000000000067721462717511400237170ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::path::Path; pub fn guess_from_composer_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // https://getcomposer.org/doc/04-schema.md let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package.as_object() { Some(package) => package, None => { return Err(ProviderError::Other( "Failed to parse composer.json".to_string(), )) } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "type" => { if value != "project" { error!("unexpected composer.json type: {:?}", value); } } "keywords" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords( value .as_array() .unwrap() .iter() .map(|v| v.as_str().unwrap().to_string()) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "require" | "require-dev" | "autoload" | "autoload-dev" | "scripts" | "extra" | "config" | "prefer-stable" | "minimum-stability" => { // Do nothing, skip these fields } _ => { error!("Unknown field {} ({:?}) in composer.json", field, value); } } } Ok(upstream_data) } upstream-ontologist-0.1.37/src/providers/debian.rs000066400000000000000000000665741462717511400222670ustar00rootroot00000000000000use crate::{ bug_database_from_issue_url, repo_url_from_merge_request_url, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use lazy_regex::regex_captures; use log::debug; use std::fs::File; use std::io::BufRead; use std::io::Read; use std::path::Path; use url::Url; pub fn guess_from_debian_patch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let net_access = None; let mut upstream_data: Vec = Vec::new(); for line in reader.lines().flatten() { if line.starts_with("Forwarded: ") { let forwarded = match line.split_once(':') { Some((_, url)) => url.trim(), None => { debug!("Malformed Forwarded line in patch {}", path.display()); continue; } }; let forwarded = match Url::parse(forwarded) { Ok(url) => url, Err(e) => { debug!( "Malformed URL in Forwarded line in patch {}: {}", path.display(), e ); continue; } }; if let Some(bug_db) = bug_database_from_issue_url(&forwarded, net_access) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(repo_url) = repo_url_from_merge_request_url(&forwarded, net_access) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(upstream_data) } pub fn metadata_from_itp_bug_body( body: &str, origin: Option, ) -> std::result::Result, ProviderError> { let mut results: Vec = Vec::new(); // Skip first few lines with bug metadata (severity, owner, etc) let mut line_iter = body.split_terminator('\n'); let mut next_line = line_iter.next(); while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } next_line = line_iter.next(); if line.trim().is_empty() { break; } } while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } if !line.is_empty() { break; } next_line = line_iter.next(); } while let Some(mut line) = next_line { line = line.trim_start_matches('*').trim_start(); if line.is_empty() { break; } match line.split_once(':') { Some((key, value)) => { let key = key.trim(); let value = value.trim(); match key { "Package name" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Version" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Possible), origin: origin.clone(), }); } "Upstream Author" if !value.is_empty() => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from(value)]), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "License" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Description" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } _ => { debug!("Unknown pseudo-header {} in ITP bug body", key); } } } _ => { debug!("Ignoring non-semi-field line {}", line); } } next_line = line_iter.next(); } let mut rest: Vec = Vec::new(); for line in line_iter { if line.trim() == "-- System Information:" { break; } rest.push(line.to_string()); } results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(rest.join("\n")), certainty: Some(Certainty::Likely), origin: origin.clone(), }); Ok(results) } #[test] fn test_metadata_from_itp_bug_body() { assert_eq!( vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.0.1".to_string()), certainty: Some(Certainty::Possible), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from("Breezy Team ")]), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://github.com/jelmer/setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("GPL".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("Compile .po files into .mo files".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description("This extension for setuptools compiles gettext .po files\nfound in the source directory into .mo files and installs them.\n".to_string()), certainty: Some(Certainty::Likely), origin: None, }, ], metadata_from_itp_bug_body( r#"Package: wnpp Severity: wishlist Owner: Jelmer Vernooij Debbugs-Cc: debian-devel@lists.debian.org * Package name : setuptools-gettext Version : 0.0.1 Upstream Author : Breezy Team * URL : https://github.com/jelmer/setuptools-gettext * License : GPL Programming Lang: Python Description : Compile .po files into .mo files This extension for setuptools compiles gettext .po files found in the source directory into .mo files and installs them. "#, None ) .unwrap() ); } pub fn guess_from_debian_changelog( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let cl = debian_changelog::ChangeLog::read_path(path).map_err(|e| { ProviderError::ParseError(format!( "Failed to parse changelog {}: {}", path.display(), e )) })?; let first_entry = cl .entries() .next() .ok_or_else(|| ProviderError::ParseError("Empty changelog".to_string()))?; let package = first_entry.package().ok_or_else(|| { ProviderError::ParseError(format!("Changelog {} has no package name", path.display())) })?; let mut ret = Vec::new(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(package.clone()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); if let Some(version) = first_entry.version() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.upstream_version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } #[cfg(feature = "debcargo")] if package.starts_with("rust-") { let debcargo_toml_path = path.parent().unwrap().join("debcargo.toml"); let debcargo_config = debcargo::config::Config::parse(debcargo_toml_path.as_path()) .map_err(|e| { ProviderError::ParseError(format!( "Failed to parse debcargo config {}: {}", path.display(), e )) })?; let semver_suffix = debcargo_config.semver_suffix; let (mut crate_name, _crate_semver_version) = parse_debcargo_source_name(&package, semver_suffix); if crate_name.contains('-') { crate_name = match crate::providers::rust::cargo_translate_dashes(crate_name.as_str()) .map_err(|e| { ProviderError::Other(format!( "Failed to translate dashes in crate name {}: {}", crate_name, e )) })? { Some(name) => name, None => { return Err(ProviderError::Other(format!( "Failed to translate dashes in crate name {}", crate_name ))) } }; } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("crates.io".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(crate_name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(itp) = find_itp(first_entry.change_lines().collect::>().as_slice()) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::DebianITP(itp), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.extend(guess_from_itp_bug(itp)?); } Ok(ret) } pub fn find_itp(changes: &[String]) -> Option { for line in changes { if let Some((_, itp)) = regex_captures!(r"\* Initial release. \(?Closes: #(\d+)\)?", line) { return Some(itp.parse().unwrap()); } } None } pub fn guess_from_itp_bug( bugno: i32, ) -> std::result::Result, ProviderError> { let debbugs = debbugs::blocking::Debbugs::default(); let log = debbugs.get_bug_log(bugno).map_err(|e| { ProviderError::ParseError(format!("Failed to get bug log for bug {}: {}", bugno, e)) })?; metadata_from_itp_bug_body( log[0].body.as_str(), Some(Origin::Other(format!("Debian bug #{}", bugno))), ) } /// Parse a debcargo source name and return crate. /// /// # Arguments /// * `source_name` - Source package name /// * `semver_suffix` - Whether semver_suffix is enabled /// /// # Returns /// tuple with crate name and optional semver pub fn parse_debcargo_source_name( source_name: &str, semver_suffix: bool, ) -> (String, Option) { let mut crate_name = source_name.strip_prefix("rust-").unwrap(); match crate_name.rsplitn(2, '-').collect::>().as_slice() { [semver, new_crate_name] if semver_suffix => { crate_name = new_crate_name; (crate_name.to_string(), Some(semver.to_string())) } _ => (crate_name.to_string(), None), } } pub fn guess_from_debian_rules( path: &Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let f = std::fs::File::open(path)?; let mf = makefile_lossless::Makefile::read_relaxed(f) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/rules: {}", e)))?; let mut ret = vec![]; if let Some(variable) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_GIT")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Repository(variable.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(deb_upstream_url) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_URL")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Download(deb_upstream_url.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } Ok(ret) } pub fn guess_from_debian_control( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use std::str::FromStr; let control = debian_control::Control::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/control: {}", e)))?; let source = control.source().unwrap(); let is_native = debian_is_native(path.parent().unwrap()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse debian/source/format: {}", e)) })?; if let Some(homepage) = source.homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(go_import_path) = source.get("XS-Go-Import-Path") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://{}", go_import_path)), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } if is_native == Some(true) { if let Some(vcs_git) = source.vcs_git() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(vcs_git), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(vcs_browser) = source.vcs_browser() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(vcs_browser), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } let binaries = control.binaries().collect::>(); let certainty = if binaries.len() == 1 && is_native == Some(true) { // Debian native package with only one binary package Certainty::Certain } else if binaries.len() > 1 && is_native == Some(true) { Certainty::Possible } else if binaries.len() == 1 && is_native == Some(false) { // Debian non-native package with only one binary package, so description is likely to be // good but might be Debian-specific Certainty::Confident } else { Certainty::Likely }; for binary in binaries { if let Some(description) = binary.description() { let lines = description.split('\n').collect::>(); let mut summary = lines[0].to_string(); let mut description_lines = &lines[1..]; if !description_lines.is_empty() && description_lines .last() .unwrap() .starts_with("This package contains") { summary = summary .split(" - ") .next() .unwrap_or(summary.as_str()) .to_string(); description_lines = description_lines.split_last().unwrap().1; } if !summary.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(certainty), origin: Some(path.into()), }); } if !description_lines.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description_lines.join("\n")), certainty: Some(certainty), origin: Some(path.into()), }); } } } Ok(ret) } pub fn guess_from_debian_copyright( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; let text = &std::fs::read_to_string(path)?; let mut urls = vec![]; match debian_copyright::Copyright::from_str_relaxed(text) { Ok((c, _)) => { let header = c.header().unwrap(); if let Some(upstream_name) = header.upstream_name() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(upstream_name.to_string()), certainty: Some(if upstream_name.contains(' ') { Certainty::Confident } else { Certainty::Certain }), origin: Some(path.into()), }); } if let Some(upstream_contact) = header.upstream_contact() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(upstream_contact), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(source) = header.source() { if source.contains(' ') { urls.extend( source .split(|c| c == ' ' || c == '\n' || c == ',') .filter(|s| !s.is_empty()) .map(|s| s.to_string()), ); } else { urls.push(source.clone()); } for (m, _, _) in lazy_regex::regex_captures!(r"(http|https)://([^ ,]+)", source.as_str()) { urls.push(m.to_string()); } } if let Some(upstream_bugs) = header.get("X-Upstream-Bugs") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(upstream_bugs), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(source_downloaded_from) = header.get("X-Source-Downloaded-From") { if let Ok(url) = source_downloaded_from.parse::() { urls.push(url.to_string()); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(source_downloaded_from), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } let referenced_licenses = c .iter_licenses() .filter_map(|l| l.name()) .collect::>(); if referenced_licenses.len() == 1 { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(referenced_licenses.into_iter().next().unwrap()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Err(debian_copyright::Error::IoError(e)) => { unreachable!("IO error: {}", e); } Err(debian_copyright::Error::ParseError(e)) => { return Err(ProviderError::ParseError(e.to_string())); } Err(debian_copyright::Error::NotMachineReadable) => { for line in text.lines() { if let Some(name) = line.strip_prefix("Upstream-Name: ") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Possible), origin: Some(Origin::Path(path.into())), }); } if let Some(url) = lazy_regex::regex_find!(r".* was downloaded from ([^\s]+)", line) { urls.push(url.to_string()); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } } for url in urls.into_iter() { if let Ok(url) = url.parse() { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } ret.extend(crate::metadata_from_url( url.as_str(), &Origin::Path(path.into()), )); } Ok(ret) } pub fn guess_from_debian_watch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use debian_changelog::ChangeLog; use debian_watch::{Mode, WatchFile}; let get_package_name = || -> String { let text = std::fs::read_to_string(path.parent().unwrap().join("changelog")).unwrap(); let cl: ChangeLog = text.parse().unwrap(); let first_entry = cl.entries().next().unwrap(); first_entry.package().unwrap() }; let w: WatchFile = std::fs::read_to_string(path)? .parse() .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/watch: {}", e)))?; let origin = Origin::Path(path.into()); for entry in w.entries() { let url = entry.format_url(get_package_name); match entry.mode().unwrap_or_default() { Mode::Git => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::Svn => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::LWP => { if url.scheme() == "http" || url.scheme() == "https" { if let Some(repo) = crate::vcs::guess_repo_from_url(&url, None) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } }; ret.extend(crate::metadata_from_url(url.as_str(), &origin)); } Ok(ret) } #[cfg(test)] mod watch_tests { use super::*; #[test] fn test_empty() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#" # Blah "#, ) .unwrap(); assert!(guess_from_debian_watch(&path, &GuesserSettings::default()) .unwrap() .is_empty()); } #[test] fn test_simple() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#"version=4 https://github.com/jelmer/dulwich/tags/dulwich-(.*).tar.gz "#, ) .unwrap(); assert_eq!( vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository("https://github.com/jelmer/dulwich".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.clone().into()) }], guess_from_debian_watch(&path, &GuesserSettings::default()).unwrap() ); } } pub fn debian_is_native(path: &Path) -> std::io::Result> { let format_file_path = path.join("source/format"); match File::open(format_file_path) { Ok(mut file) => { let mut content = String::new(); file.read_to_string(&mut content)?; return Ok(Some(content.trim() == "3.0 (native)")); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } let changelog_file = path.join("changelog"); match File::open(changelog_file) { Ok(mut file) => { let cl = debian_changelog::ChangeLog::read(&mut file) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; let first_entry = cl.entries().next().unwrap(); let version = first_entry.version().unwrap(); return Ok(Some(version.debian_revision.is_none())); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } Ok(None) } upstream-ontologist-0.1.37/src/providers/doap.rs000066400000000000000000000244071462717511400217550ustar00rootroot00000000000000//! See https://github.com/ewilderj/doap use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::fs::File; use std::path::Path; pub fn guess_from_doap( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let doc = Element::parse(file).expect("Failed to parse XML"); let mut root = &doc; let mut results: Vec = Vec::new(); const DOAP_NAMESPACE: &str = "http://usefulinc.com/ns/doap#"; const RDF_NAMESPACE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; const SCHEMA_NAMESPACE: &str = "https://schema.org/"; if root.name == "RDF" && root.namespace.as_deref() == Some(RDF_NAMESPACE) { for child in root.children.iter() { if let Some(element) = child.as_element() { root = element; break; } } } if root.name != "Project" || root.namespace.as_deref() != Some(DOAP_NAMESPACE) { return Err(ProviderError::ParseError(format!( "Doap file does not have DOAP project as root, but {}", root.name ))); } fn extract_url(el: &Element) -> Option<&str> { el.attributes.get("resource").map(|url| url.as_str()) } fn extract_lang(el: &Element) -> Option<&str> { el.attributes.get("lang").map(|lang| lang.as_str()) } let mut screenshots: Vec = Vec::new(); let mut maintainers: Vec = Vec::new(); for child in &root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; match (child.namespace.as_deref(), child.name.as_str()) { (Some(DOAP_NAMESPACE), "name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortname") | (Some(DOAP_NAMESPACE), "short-name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "bug-database") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "homepage") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "download-page") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortdesc") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "description") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "license") => { // TODO: Handle license } (Some(DOAP_NAMESPACE), "repository") => { for repo in &child.children { let repo = if let Some(element) = repo.as_element() { element } else { continue; }; match repo.name.as_str() { "SVNRepository" | "GitRepository" => { if let Some(repo_location) = repo.get_child("location") { if let Some(repo_url) = extract_url(repo_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(web_location) = repo.get_child("browse") { if let Some(web_url) = extract_url(web_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } _ => (), } } } (Some(DOAP_NAMESPACE), "category") | (Some(DOAP_NAMESPACE), "programming-language") | (Some(DOAP_NAMESPACE), "os") | (Some(DOAP_NAMESPACE), "implements") | (Some(SCHEMA_NAMESPACE), "logo") | (Some(DOAP_NAMESPACE), "platform") => { // TODO: Handle other tags } (Some(SCHEMA_NAMESPACE), "screenshot") | (Some(DOAP_NAMESPACE), "screenshots") => { if let Some(url) = extract_url(child) { screenshots.push(url.to_string()); } } (Some(DOAP_NAMESPACE), "wiki") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Wiki(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "maintainer") => { for person in &child.children { let person = if let Some(element) = person.as_element() { element } else { continue; }; if person.name != "Person" { continue; } let name = if let Some(name_tag) = person.get_child("name") { name_tag.get_text().clone() } else { None }; let email = if let Some(email_tag) = person.get_child("mbox") { email_tag.get_text().as_ref().cloned() } else { None }; let url = if let Some(email_tag) = person.get_child("mbox") { extract_url(email_tag).map(|url| url.to_string()) } else { None }; maintainers.push(Person { name: name.map(|n| n.to_string()), email: email.map(|n| n.to_string()), url, }); } } (Some(DOAP_NAMESPACE), "mailing-list") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::MailingList(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "release") => {} _ => { error!("Unknown tag {} in DOAP file", child.name); } } } if maintainers.len() == 1 { let maintainer = maintainers.remove(0); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { for maintainer in maintainers { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.1.37/src/providers/git.rs000066400000000000000000000027701462717511400216140ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; #[cfg(feature = "git-config")] pub fn guess_from_git_config( path: &Path, settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let config_file = gix_config::File::from_path_no_includes(path.to_path_buf(), gix_config::Source::Local) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results = Vec::new(); // Check if there's a remote named "upstream" if let Some(remote_upstream) = config_file.string_by_key("remote.upstream.url") { let url = remote_upstream.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } // Check if there's a remote named "origin" if !settings.trust_package { if let Some(remote_origin) = config_file.string_by_key("remote.origin.url") { let url = remote_origin.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(results) } upstream-ontologist-0.1.37/src/providers/go.rs000066400000000000000000000023301462717511400214260ustar00rootroot00000000000000//! See https://golang.org/doc/modules/gomod-ref use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn guess_from_go_mod( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path).expect("Failed to open file"); let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.lines() { if let Ok(line) = line { if line.starts_with("module ") { let modname = match line.trim().split_once(' ') { Some((_, modname)) => modname, None => { debug!("Failed to parse module name from line: {}", line); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(modname.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } Ok(results) } upstream-ontologist-0.1.37/src/providers/gobo.rs000066400000000000000000000111501462717511400217470ustar00rootroot00000000000000use crate::UpstreamDatum; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Contents { name: String, path: String, sha: String, size: u64, url: url::Url, html_url: url::Url, git_url: url::Url, download_url: Option, r#type: String, content: Option, encoding: Option, _links: Links, } #[allow(dead_code)] #[derive(serde::Deserialize)] struct Links { #[serde(rename = "self")] self_: String, git: url::Url, html: url::Url, } pub fn guess_from_gobo(package: &str) -> Result, crate::ProviderError> { let packages_url = "https://api.github.com/repos/gobolinux/Recipes/contents" .parse() .unwrap(); let contents: Vec = serde_json::from_value(crate::load_json_url(&packages_url, None)?).unwrap(); let package = match contents .iter() .find(|p| p.name.to_ascii_lowercase() == package.to_ascii_lowercase()) { Some(p) => p, None => { log::debug!("No gobo package named {}", package); return Ok(Vec::new()); } }; let versions: Vec = serde_json::from_value(crate::load_json_url(&package.url, None)?).unwrap(); let last_version = if let Some(last_version) = versions.last() { &last_version.name } else { log::debug!("No versions for gobo package {}", package.name); return Ok(Vec::new()); }; let base_url: url::Url = format!( "https://raw.githubusercontent.com/gobolinux/Recipes/master/{}/{}/", package.name, last_version ) .parse() .unwrap(); let client = reqwest::blocking::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let mut result = Vec::new(); let recipe_url = base_url.join("Recipe").unwrap(); match client.get(recipe_url.as_ref()).send() { Ok(response) => { let text = response.text().unwrap(); for line in text.lines() { if let Some(url) = line.strip_prefix("url=") { result.push(UpstreamDatum::Homepage(url.to_string())); } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No recipe for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", recipe_url, e); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } let description_url = base_url.join("Resources/Description").unwrap(); match client.get(description_url.as_ref()).send() { Ok(response) => { for line in response.text().unwrap().lines() { if let Some((_, key, value)) = lazy_regex::regex_captures!("\\[(.*)\\] (.*)", line) { match key { "Name" => result.push(UpstreamDatum::Name(value.to_string())), "Summary" => result.push(UpstreamDatum::Summary(value.to_string())), "License" => result.push(UpstreamDatum::License(value.to_string())), "Description" => result.push(UpstreamDatum::Description(value.to_string())), "Homepage" => result.push(UpstreamDatum::Homepage(value.to_string())), _ => log::warn!("Unknown field {} in gobo Description", key), } } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No description for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", description_url, e); return Ok(Vec::new()); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } Ok(result) } pub struct Gobo; impl Gobo { pub fn new() -> Self { Self } } impl crate::ThirdPartyRepository for Gobo { fn name(&self) -> &'static str { "gobo" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"][..] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { guess_from_gobo(name) } } upstream-ontologist-0.1.37/src/providers/haskell.rs000066400000000000000000000206121462717511400224470ustar00rootroot00000000000000use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn parse_cabal_lines( lines: impl Iterator, ) -> Vec<(Option, String, String)> { let mut ret = Vec::new(); let mut section = None; for line in lines { if line.trim_start().starts_with("--") { // Comment continue; } // Empty line if line.trim().is_empty() { section = None; continue; } let (field, value) = match line.split_once(':') { Some((field, value)) => (field.to_lowercase(), value.trim()), None => { if !line.starts_with(' ') { section = Some(line.trim().to_lowercase()); } else { log::debug!("Failed to parse line: {}", line); } continue; } }; if section.is_none() && !field.starts_with(' ') { ret.push((None, field.trim().to_string(), value.to_owned())); } else if field.starts_with(' ') { ret.push(( section.clone(), field.trim().to_lowercase(), value.to_owned(), )); } else { log::debug!("Invalid field {}", field); } } ret } #[cfg(test)] mod parse_tests { use super::*; #[test] fn test_parse_cabal_lines() { let lines = r#"Name: foo Version: 0.0 License: BSD3 Author: John Doe Maintainer: John Doe Cabal-Version: >= 1.10 Homepage: https://example.com Executable program1 Build-Depends: HUnit Main-Is: Main.hs source-repository head type: git location: https://github.com/example/blah "#; let parsed = parse_cabal_lines(lines.lines().map(|s| s.to_owned())); assert_eq!( parsed, vec![ (None, "name".to_owned(), "foo".to_owned()), (None, "version".to_owned(), "0.0".to_owned()), (None, "license".to_owned(), "BSD3".to_owned()), (None, "author".to_owned(), "John Doe".to_owned()), ( None, "maintainer".to_owned(), "John Doe ".to_owned() ), (None, "cabal-version".to_owned(), ">= 1.10".to_owned()), ( None, "homepage".to_owned(), "https://example.com".to_owned() ), ( Some("executable program1".to_owned()), "build-depends".to_owned(), "HUnit".to_owned() ), ( Some("executable program1".to_owned()), "main-is".to_owned(), "Main.hs".to_owned() ), ( Some("source-repository head".to_owned()), "type".to_owned(), "git".to_owned() ), ( Some("source-repository head".to_owned()), "location".to_owned(), "https://github.com/example/blah".to_owned() ) ] ); } } pub fn guess_from_cabal_lines( lines: impl Iterator, ) -> std::result::Result, ProviderError> { let mut repo_url = None; let mut repo_branch = None; let mut repo_subpath = None; let mut results = Vec::new(); for (section, key, value) in parse_cabal_lines(lines) { match (section.as_deref(), key.as_str()) { (None, "homepage") => results.push(( UpstreamDatum::Homepage(value.to_owned()), Certainty::Certain, )), (None, "bug-reports") => results.push(( UpstreamDatum::BugDatabase(value.to_owned()), Certainty::Certain, )), (None, "name") => { results.push((UpstreamDatum::Name(value.to_owned()), Certainty::Certain)) } (None, "maintainer") => results.push(( UpstreamDatum::Maintainer(Person::from(value.as_str())), Certainty::Certain, )), (None, "copyright") => results.push(( UpstreamDatum::Copyright(value.to_owned()), Certainty::Certain, )), (None, "license") => { results.push((UpstreamDatum::License(value.to_owned()), Certainty::Certain)) } (None, "author") => results.push(( UpstreamDatum::Author(vec![Person::from(value.as_str())]), Certainty::Certain, )), (None, "synopsis") => { results.push((UpstreamDatum::Summary(value.to_owned()), Certainty::Certain)) } (None, "cabal-version") => {} (None, "build-depends") => {} (None, "build-type") => {} (Some("source-repository head"), "location") => repo_url = Some(value.to_owned()), (Some("source-repository head"), "branch") => repo_branch = Some(value.to_owned()), (Some("source-repository head"), "subdir") => repo_subpath = Some(value.to_owned()), (s, _) if s.is_some() && s.unwrap().starts_with("executable ") => {} _ => { log::debug!("Unknown field {:?} in section {:?}", key, section); } } } if let Some(repo_url) = repo_url { results.push(( UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url(&crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: repo_branch, subpath: repo_subpath, })), Certainty::Certain, )); } Ok(results .into_iter() .map(|(datum, certainty)| UpstreamDatumWithMetadata { datum, certainty: Some(certainty), origin: None, }) .collect()) } pub fn guess_from_cabal( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } pub fn guess_from_hackage( package: &str, ) -> std::result::Result, ProviderError> { let client = reqwest::blocking::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let url: url::Url = format!( "https://hackage.haskell.org/package/{}/{}.cabal", package, package ) .parse() .unwrap(); match client.get(url).send() { Ok(response) => { let reader = BufReader::new(response); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } Err(e) => match e.status() { Some(reqwest::StatusCode::NOT_FOUND) => { log::warn!("Package {} not found on Hackage", package); Ok(Vec::new()) } _ => { log::warn!("Failed to fetch package {} from Hackage: {}", package, e); Err(ProviderError::Other(format!( "Failed to fetch package {} from Hackage: {}", package, e ))) } }, } } pub struct Hackage; impl Hackage { pub fn new() -> Self { Self } } impl crate::ThirdPartyRepository for Hackage { fn name(&self) -> &'static str { "Hackage" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &[ "Homepage", "Name", "Repository", "Maintainer", "Copyright", "License", "Bug-Database", ][..] } fn guess_metadata(&self, name: &str) -> Result, ProviderError> { Ok(guess_from_hackage(name)? .into_iter() .map(|v| v.datum) .collect()) } } upstream-ontologist-0.1.37/src/providers/launchpad.rs000066400000000000000000000157101462717511400227660ustar00rootroot00000000000000use crate::{load_json_url, UpstreamDatum}; use log::error; #[cfg(feature = "launchpad")] pub fn guess_from_launchpad( package: &str, distribution: Option<&str>, suite: Option<&str>, ) -> Option> { use distro_info::UbuntuDistroInfo; use distro_info::DistroInfo; let distribution = distribution.unwrap_or("ubuntu"); let suite = suite.map_or_else( || { if distribution == "ubuntu" { let ubuntu = UbuntuDistroInfo::new().unwrap(); Some( ubuntu .ubuntu_devel(chrono::Utc::now().date_naive()) .last()? .codename() .clone(), ) } else if distribution == "debian" { Some("sid".to_string()) } else { None } }, |x| Some(x.to_string()), ); let suite = suite?; let sourcepackage_url = format!( "https://api.launchpad.net/devel/{}/{}/+source/{}", distribution, suite, package ); let sourcepackage_data = load_json_url(&url::Url::parse(sourcepackage_url.as_str()).unwrap(), None).unwrap(); if let Some(productseries_url) = sourcepackage_data.get("productseries_link") { let productseries_data = load_json_url( &url::Url::parse(productseries_url.as_str().unwrap()).unwrap(), None, ) .unwrap(); let project_link = productseries_data.get("project_link").cloned(); if let Some(project_link) = project_link { let project_data = load_json_url( &url::Url::parse(project_link.as_str().unwrap()).unwrap(), None, ) .unwrap(); let mut results = Vec::new(); if let Some(homepage_url) = project_data.get("homepage_url") { results.push(UpstreamDatum::Homepage( homepage_url.as_str().unwrap().to_string(), )); } if let Some(display_name) = project_data.get("display_name") { results.push(UpstreamDatum::Name( display_name.as_str().unwrap().to_string(), )); } if let Some(sourceforge_project) = project_data.get("sourceforge_project") { results.push(UpstreamDatum::SourceForgeProject( sourceforge_project.as_str().unwrap().to_string(), )); } if let Some(wiki_url) = project_data.get("wiki_url") { results.push(UpstreamDatum::Wiki(wiki_url.as_str().unwrap().to_string())); } if let Some(summary) = project_data.get("summary") { results.push(UpstreamDatum::Summary( summary.as_str().unwrap().to_string(), )); } if let Some(download_url) = project_data.get("download_url") { results.push(UpstreamDatum::Download( download_url.as_str().unwrap().to_string(), )); } if let Some(vcs) = project_data.get("vcs") { if vcs == "Bazaar" { if let Some(branch_link) = productseries_data.get("branch_link") { let code_import_data = load_json_url( &url::Url::parse( format!("{}/+code-import", branch_link.as_str().unwrap()).as_str(), ) .unwrap(), None, ) .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_string())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { let branch_data = load_json_url( &url::Url::parse( productseries_data.as_object().unwrap()["branch_link"] .as_str() .unwrap(), ) .unwrap(), None, ) .unwrap(); results.push(UpstreamDatum::Repository( branch_data.as_object().unwrap()["bzr_identity"] .as_str() .unwrap() .to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( branch_data.as_object().unwrap()["web_link"] .as_str() .unwrap() .to_owned(), )); } } } else if vcs == "Git" { let repo_link = format!( "https://api.launchpad.net/devel/+git?ws.op=getByPath&path={}", project_data["name"] ); let repo_data = load_json_url(&url::Url::parse(repo_link.as_str()).unwrap(), None).unwrap(); if let Some(code_import_link) = repo_data.get("code_import_link") { let code_import_data = load_json_url( &url::Url::parse(code_import_link.as_str().unwrap()).unwrap(), None, ) .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_owned())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { results.push(UpstreamDatum::Repository( repo_data["git_https_url"].as_str().unwrap().to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( repo_data["web_link"].as_str().unwrap().to_owned(), )); } } } else { error!("unknown vcs: {:?}", vcs); } } return Some(results); } } None } upstream-ontologist-0.1.37/src/providers/maven.rs000066400000000000000000000153541462717511400221410ustar00rootroot00000000000000//! Documentation: https://maven.apache.org/pom.html use crate::{ vcs, Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::path::Path; pub fn guess_from_pom_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let file = std::io::BufReader::new(file); let root = Element::parse(file) .map_err(|e| ProviderError::ParseError(format!("Unable to parse package.xml: {}", e)))?; let mut result = Vec::new(); if root.name == "project" { if let Some(name_tag) = root.get_child("name") { if let Some(name) = name_tag.get_text() { if !name.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } else if let Some(artifact_id_tag) = root.get_child("artifactId") { if let Some(artifact_id) = artifact_id_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(artifact_id.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } if let Some(description_tag) = root.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version_tag) = root.get_child("version") { if let Some(version) = version_tag.get_text() { if !version.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(licenses_tag) = root.get_child("licenses") { for license_tag in licenses_tag .children .iter() .filter(|c| c.as_element().map_or(false, |e| e.name == "license")) { if let Some(license_tag) = license_tag.as_element() { if let Some(name_tag) = license_tag.get_child("name") { if let Some(license_name) = name_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license_name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } } for scm_tag in root .children .iter() .filter(|c| c.as_element().map_or(false, |e| e.name == "scm")) { if let Some(scm_tag) = scm_tag.as_element() { if let Some(url_tag) = scm_tag.get_child("url") { if let Some(url) = url_tag.get_text() { if url.starts_with("scm:") && url.matches(':').count() >= 3 { let url_parts: Vec<&str> = url.splitn(3, ':').collect(); let browse_url = url_parts[2]; if vcs::plausible_browse_url(browse_url) { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } else { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(connection_tag) = scm_tag.get_child("connection") { if let Some(connection) = connection_tag.get_text() { let connection_parts: Vec<&str> = connection.splitn(3, ':').collect(); if connection_parts.len() == 3 && connection_parts[0] == "scm" { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(connection_parts[2].to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { warn!("Invalid format for SCM connection: {}", connection); } } } } } for issue_mgmt_tag in root.children.iter().filter(|c| { c.as_element() .map_or(false, |e| e.name == "issueManagement") }) { if let Some(issue_mgmt_tag) = issue_mgmt_tag.as_element() { if let Some(url_tag) = issue_mgmt_tag.get_child("url") { if let Some(url) = url_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } if let Some(url_tag) = root.get_child("url") { if let Some(url) = url_tag.get_text() { if !url.starts_with("scm:") { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(result) } upstream-ontologist-0.1.37/src/providers/meson.rs000066400000000000000000000036151462717511400221510ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::process::Command; pub fn guess_from_meson( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // TODO(jelmer): consider looking for a meson build directory to call "meson // introspect" on // TODO(jelmer): mesonbuild is python; consider using its internal functions to parse // meson.build? let mut command = Command::new("meson"); command.arg("introspect").arg("--projectinfo").arg(path); let output = command.output().map_err(|_| { ProviderError::Other("meson not installed; skipping meson.build introspection".to_string()) })?; if !output.status.success() { return Err(ProviderError::Other(format!( "meson failed to run; exited with code {}", output.status.code().unwrap() ))); } let project_info: serde_json::Value = serde_json::from_slice(&output.stdout) .map_err(|e| ProviderError::Other(format!("Failed to parse meson project info: {}", e)))?; let mut results = Vec::new(); if let Some(descriptive_name) = project_info.get("descriptive_name") { if let Some(name) = descriptive_name.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = project_info.get("version") { if let Some(version_str) = version.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version_str.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.1.37/src/providers/metadata_json.rs000066400000000000000000000140061462717511400236350ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::io::Read; use std::path::Path; pub fn guess_from_metadata_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = match serde_json::from_str(&contents) { Ok(data) => data, Err(e) => { return Err(ProviderError::ParseError(e.to_string())); } }; let mut upstream_data: Vec = Vec::new(); for (field, value) in data.iter() { match field.as_str() { "description" => { if let Some(description) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "name" => { if let Some(name) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "version" => { if let Some(version) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { if let Some(license) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "source" => { if let Some(repository) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "summary" => { if let Some(summary) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "issues_url" => { if let Some(issues_url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issues_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "project_page" => { if let Some(project_page) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_page.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "author" => { if let Some(author_value) = value.as_str() { let author = Person::from(author_value); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![author]), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } else if let Some(author_values) = value.as_array() { let authors: Vec = match author_values .iter() .map(|v| { Ok::(Person::from( v.as_str().ok_or("Author value is not a string")?, )) }) .collect::, _>>() { Ok(authors) => authors, Err(e) => { warn!("Error parsing author array: {}", e); continue; } }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "operatingsystem_support" | "requirements" | "dependencies" => { // Skip these fields } _ => { warn!("Unknown field {} ({:?}) in metadata.json", field, value); } } } Ok(upstream_data) } upstream-ontologist-0.1.37/src/providers/metainfo.rs000066400000000000000000000060021462717511400226230ustar00rootroot00000000000000//! See https://www.freedesktop.org/software/appstream/docs/chap-Metadata.html use crate::{Certainty, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; pub fn guess_from_metainfo( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path)?; let root = Element::parse(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results: Vec = Vec::new(); for child in root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; if child.name == "id" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "project_license" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "url" { if let Some(urltype) = child.attributes.get("type") { if urltype == "homepage" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if urltype == "bugtracker" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if child.name == "description" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "summary" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "name" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.1.37/src/providers/mod.rs000066400000000000000000000100211462717511400215740ustar00rootroot00000000000000pub mod arch; pub mod authors; pub mod autoconf; pub mod composer_json; pub mod debian; pub mod doap; pub mod git; pub mod go; pub mod gobo; pub mod haskell; pub mod launchpad; pub mod maven; pub mod meson; pub mod metadata_json; pub mod metainfo; pub mod nuspec; #[cfg(feature = "opam")] pub mod ocaml; pub mod package_json; pub mod package_xml; pub mod package_yaml; pub mod perl; pub mod php; pub mod pubspec; pub mod python; pub mod r; pub mod repology; pub mod ruby; #[cfg(feature = "cargo")] pub mod rust; pub mod security_md; pub mod waf; use crate::{Certainty, GuesserSettings, UpstreamDatum, UpstreamDatumWithMetadata}; use std::io::BufRead; pub fn guess_from_install( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let mut ret = Vec::new(); let f = std::fs::File::open(path)?; let f = std::io::BufReader::new(f); let mut urls: Vec = Vec::new(); let mut lines = f.lines(); while let Some(oline) = lines.next() { let oline = oline?; let line = oline.trim(); let mut cmdline = line.trim().trim_start_matches('$').trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") { while cmdline.ends_with('\\') { cmdline.push_str(lines.next().unwrap()?.trim()); cmdline = cmdline.trim().to_string(); } if let Some(url) = if cmdline.starts_with("git clone ") { crate::vcs_command::url_from_git_clone_command(cmdline.as_bytes()) } else if cmdline.starts_with("fossil clone ") { crate::vcs_command::url_from_fossil_clone_command(cmdline.as_bytes()) } else { None } { urls.push(url); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").find_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command(m.as_str().as_bytes()) { urls.push(url); } } let project_re = "([^/]+)/([^/?.()\"#>\\s]*[^-/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", project_re).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = regex::Regex::new(format!("https://github.com/{}", project_re).as_str()) .unwrap() .captures(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some((url, _)) = lazy_regex::regex_captures!("git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://([^]/]+)/([^]\\s()\"#]+)").find_iter(line) { let url: url::Url = m.as_str().trim_end_matches('.').trim().parse().unwrap(); if crate::vcs::is_gitlab_site(url.host_str().unwrap(), None) { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } } Ok(ret) } upstream-ontologist-0.1.37/src/providers/nuspec.rs000066400000000000000000000122561462717511400223260ustar00rootroot00000000000000use crate::xmlparse_simplify_namespaces; use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; // Documentation: https://docs.microsoft.com/en-us/nuget/reference/nuspec pub fn guess_from_nuspec( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { const NAMESPACES: &[&str] = &["http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd"]; // XML parsing and other logic let root = match xmlparse_simplify_namespaces(path, NAMESPACES) { Some(root) => root, None => { return Err(crate::ProviderError::ParseError( "Unable to parse nuspec".to_string(), )); } }; assert_eq!(root.name, "package", "root tag is {}", root.name); let metadata = root.get_child("metadata"); if metadata.is_none() { return Err(ProviderError::ParseError( "Unable to find metadata tag".to_string(), )); } let metadata = metadata.unwrap(); let mut result = Vec::new(); if let Some(version_tag) = metadata.get_child("version") { if let Some(version) = version_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description_tag) = metadata.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors_tag) = metadata.get_child("authors") { if let Some(authors) = authors_tag.get_text() { let authors = authors.split(',').map(Person::from).collect(); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(project_url_tag) = metadata.get_child("projectUrl") { if let Some(project_url) = project_url_tag.get_text() { let repo_url = crate::vcs::guess_repo_from_url(&url::Url::parse(&project_url).unwrap(), None); if let Some(repo_url) = repo_url { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license_tag) = metadata.get_child("license") { if let Some(license) = license_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(copyright_tag) = metadata.get_child("copyright") { if let Some(copyright) = copyright_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(copyright.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(title_tag) = metadata.get_child("title") { if let Some(title) = title_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(title.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(summary_tag) = metadata.get_child("summary") { if let Some(summary) = summary_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(repository_tag) = metadata.get_child("repository") { if let Some(repo_url) = repository_tag.attributes.get("url") { let branch = repository_tag.attributes.get("branch"); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url( &crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: branch.cloned(), subpath: None, }, )), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.1.37/src/providers/ocaml.rs000066400000000000000000000212651462717511400221240ustar00rootroot00000000000000//! Documentation: https://opam.ocaml.org/doc/Manual.html#Package-definitions use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::warn; use opam_file_rs::value::{OpamFileItem, OpamFileSection, ValueKind}; use std::fs::File; use std::io::Read; use std::path::Path; pub fn guess_from_opam( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let mut f = File::open(path)?; let mut contents = String::new(); f.read_to_string(&mut contents)?; let opam = opam_file_rs::parse(contents.as_str()) .map_err(|e| ProviderError::ParseError(format!("Failed to parse OPAM file: {:?}", e)))?; let mut results: Vec = Vec::new(); fn find_item<'a>(section: &'a OpamFileSection, name: &str) -> Option<&'a OpamFileItem> { for child in section.section_item.iter() { match child { OpamFileItem::Variable(_, n, _) if n == name => return Some(child), _ => (), } } None } for entry in opam.file_contents { match entry { OpamFileItem::Variable(_, name, value) if name == "maintainer" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for maintainer in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person::from(value.as_str())), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "license" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for license in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "homepage" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for homepage in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Section(_, section) if section.section_name.as_deref() == Some("dev-repo") => { match find_item(§ion, "repository") { Some(OpamFileItem::Variable(_, _, ref value)) => { let value = match value.kind { ValueKind::String(ref s) => s, _ => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(value.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Some(o) => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", o); continue; } None => { warn!("Missing repository for dev-repo in OPAM file"); continue; } } } OpamFileItem::Variable(_, name, value) if name == "bug-reports" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for bug-reports in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "synopsis" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for synopsis in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "description" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for description in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "doc" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for doc in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "version" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for version in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "authors" => { let value = match value.kind { ValueKind::String(s) => vec![Person::from(s.as_str())], ValueKind::List(ref l) => l .iter() .filter_map(|v| match v.kind { ValueKind::String(ref s) => Some(Person::from(s.as_str())), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", &value); None } }) .collect(), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, _) => { warn!("Unexpected variable in OPAM file: {}", name); } OpamFileItem::Section(_, section) => { warn!("Unexpected section in OPAM file: {:?}", section); } } } Ok(results) } upstream-ontologist-0.1.37/src/providers/package_json.rs000066400000000000000000000236671462717511400234650ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; use url::Url; pub fn guess_from_package_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://docs.npmjs.com/cli/v7/configuring-npm/package-json let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package { serde_json::Value::Object(package) => package, _ => { return Err(ProviderError::ParseError( "package.json is not an object".to_string(), )); } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "demo" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Demo(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "repository" => { let repo_url = if let Some(repo_url) = value.as_str() { Some(repo_url) } else if let Some(repo) = value.as_object() { if let Some(repo_url) = repo.get("url") { repo_url.as_str() } else { None } } else { None }; if let Some(repo_url) = repo_url { match Url::parse(repo_url) { Ok(url) if url.scheme() == "github" => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", url.path()); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Err(e) if e == url::ParseError::RelativeUrlWithoutBase => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", repo_url); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Ok(url) => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Err(e) => { panic!("Failed to parse repository URL: {}", e); } } } } "bugs" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(email) = value.get("email").and_then(serde_json::Value::as_str) { let url = format!("mailto:{}", email); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = value.as_array() { let keywords = keywords .iter() .filter_map(|keyword| keyword.as_str()) .map(String::from) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = value.as_object() { let name = author .get("name") .and_then(serde_json::Value::as_str) .map(String::from); let url = author .get("url") .and_then(serde_json::Value::as_str) .map(String::from); let email = author .get("email") .and_then(serde_json::Value::as_str) .map(String::from); let person = Person { name, url, email }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else if let Some(author) = value.as_str() { let person = Person::from(author); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else { error!("Unsupported type for author in package.json: {:?}", value); } } "dependencies" | "private" | "devDependencies" | "scripts" | "files" | "main" => { // Do nothing, skip these fields } _ => { error!("Unknown package.json field {} ({:?})", field, value); } } } Ok(upstream_data) } #[cfg(test)] mod package_json_tests { use super::*; #[test] fn test_dummy() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("package.json"); std::fs::write( &path, r#"{ "name": "mozillaeslintsetup", "description": "This package file is for setup of ESLint.", "repository": {}, "license": "MPL-2.0", "dependencies": { "eslint": "4.18.1", "eslint-plugin-html": "4.0.2", "eslint-plugin-mozilla": "file:tools/lint/eslint/eslint-plugin-mozilla", "eslint-plugin-no-unsanitized": "2.0.2", "eslint-plugin-react": "7.1.0", "eslint-plugin-spidermonkey-js": "file:tools/lint/eslint/eslint-plugin-spidermonkey-js" }, "devDependencies": {} } "#, ) .unwrap(); let ret = guess_from_package_json(&path, &GuesserSettings::default()).unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary( "This package file is for setup of ESLint.".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MPL-2.0".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("mozillaeslintsetup".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()) } ] ); } } upstream-ontologist-0.1.37/src/providers/package_xml.rs000066400000000000000000000161231462717511400233010ustar00rootroot00000000000000use crate::xmlparse_simplify_namespaces; use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; pub fn guess_from_package_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::{Element, XMLNode}; const NAMESPACES: &[&str] = &[ "http://pear.php.net/dtd/package-2.0", "http://pear.php.net/dtd/package-2.1", ]; let root = xmlparse_simplify_namespaces(path, NAMESPACES) .ok_or_else(|| ProviderError::ParseError("Unable to parse package.xml".to_string()))?; assert_eq!(root.name, "package", "root tag is {:?}", root.name); let mut upstream_data: Vec = Vec::new(); let mut leads: Vec<&Element> = Vec::new(); let mut maintainers: Vec<&Element> = Vec::new(); let mut authors: Vec<&Element> = Vec::new(); for child_element in &root.children { if let XMLNode::Element(ref element) = child_element { match element.name.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "summary" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { if let Some(release_tag) = element.get_child("release") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version( release_tag.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "url" => { if let Some(url_type) = element.attributes.get("type") { match url_type.as_str() { "repository" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "bugtracker" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => {} } } } "lead" => { leads.push(element); } "maintainer" => { maintainers.push(element); } "author" => { authors.push(element); } "stability" | "dependencies" | "providesextension" | "extsrcrelease" | "channel" | "notes" | "contents" | "date" | "time" | "depend" | "exec_depend" | "buildtool_depend" => { // Do nothing, skip these fields } _ => { error!("Unknown package.xml tag {}", element.name); } } } } for lead_element in leads.iter().take(1) { let name_el = lead_element.get_child("name").unwrap().get_text(); let email_el = lead_element .get_child("email") .map(|s| s.get_text().unwrap()); let active_el = lead_element .get_child("active") .map(|s| s.get_text().unwrap()); if let Some(active_el) = active_el { if active_el != "yes" { continue; } } let person = Person { name: name_el.map(|s| s.to_string()), email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if maintainers.len() == 1 { let maintainer_element = maintainers[0]; let name_el = maintainer_element.get_text().map(|s| s.into_owned()); let email_el = maintainer_element.attributes.get("email"); let person = Person { name: name_el, email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if !authors.is_empty() { let persons = authors .iter() .map(|author_element| { let name_el = author_element.get_text().unwrap().into_owned(); let email_el = author_element.attributes.get("email"); Person { name: Some(name_el), email: email_el.map(|s| s.to_string()), ..Default::default() } }) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.1.37/src/providers/package_yaml.rs000066400000000000000000000106651462717511400234500ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::path::Path; pub fn guess_from_package_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let reader = std::fs::File::open(path)?; let data: serde_yaml::Value = serde_yaml::from_reader(reader).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors) = data.get("author") { if let Some(author) = authors.as_str() { let authors = author.split(',').collect::>(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.into_iter().map(Person::from).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(maintainers) = data.get("maintainer") { if let Some(maintainer) = maintainers.as_str() { let maintainers = maintainer.split(',').collect::>(); let mut maintainers = maintainers .into_iter() .map(Person::from) .collect::>(); if let Some(maintainer) = maintainers.pop() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(homepage) = data.get("homepage") { if let Some(homepage) = homepage.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description) = data.get("description") { if let Some(description) = description.as_str() { if !description.starts_with("Please see the README") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } } if let Some(synopsis) = data.get("synopsis") { if let Some(synopsis) = synopsis.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(synopsis.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(github) = data.get("github") { if let Some(github) = github.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://github.com/{}", github)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(repository) = data.get("repository") { if let Some(repository) = repository.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } upstream-ontologist-0.1.37/src/providers/pecl.html000066400000000000000000000526021462717511400222730ustar00rootroot00000000000000 PECL :: Package :: smbclient
Login  |  Packages  |  Support  |  Bugs
Top Level :: File System :: smbclient

smbclient

Package Information
Summary A PHP wrapper for libsmbclient
Maintainers Eduardo Bacchi Kienetz (lead) [details]
Remi Collet (contributor) [details]
License BSD 2-clause
Description smbclient is a PHP extension that uses Samba's libsmbclient library to provide
Samba related functions and 'smb' streams to PHP programs.
Homepage https://github.com/eduardok/libsmbclient-php

[ Latest Tarball ] [ Changelog ] [ View Statistics ]
[ Browse Source ] [ Package Bugs ]

Available Releases
Version State Release Date Downloads  
1.1.1 stable 2023-04-17 smbclient-1.1.1.tgz (34.6kB) [ Changelog ]
1.1.0 stable 2023-04-01 smbclient-1.1.0.tgz (34.3kB) [ Changelog ]
1.0.7 stable 2023-04-01 smbclient-1.0.7.tgz (32.5kB) [ Changelog ]
1.0.6 stable 2021-02-28 smbclient-1.0.6.tgz (32.4kB) [ Changelog ]
1.0.5 stable 2021-02-11 smbclient-1.0.5.tgz (32.2kB) [ Changelog ]
1.0.4 stable 2021-01-22 smbclient-1.0.4.tgz (32.1kB) [ Changelog ]
1.0.3 stable 2021-01-21 smbclient-1.0.3.tgz (31.9kB) [ Changelog ]
1.0.2 stable 2021-01-20 smbclient-1.0.2.tgz (31.8kB) [ Changelog ]
1.0.1 stable 2020-12-29 smbclient-1.0.1.tgz (31.6kB) [ Changelog ]
1.0.0 stable 2018-12-24 smbclient-1.0.0.tgz (31.3kB) [ Changelog ]
0.9.0 stable 2017-02-10 smbclient-0.9.0.tgz (31.1kB) [ Changelog ]
0.8.0 stable 2016-03-01 smbclient-0.8.0.tgz (30.2kB) [ Changelog ]
0.8.0RC1 beta 2015-12-08 smbclient-0.8.0RC1.tgz (30.1kB) [ Changelog ]


Dependencies
Release 1.1.1: PEAR Package: PEAR 1.9.5 or newer
PHP Version: PHP 5.3.7 or newer
Release 1.1.0: PHP Version: PHP 5.3.7 or newer
PEAR Package: PEAR 1.9.5 or newer
Release 1.0.7: PHP Version: PHP 5.3.0 or newer
PEAR Package: PEAR 1.9.5 or newer
Dependencies for older releases can be found on the release overview page.
PRIVACY POLICY  |  CREDITS
Copyright © 2001-2023 The PHP Group
All rights reserved.
Last updated: Mon Jun 01 07:05:01 2020 UTC
Bandwidth and hardware provided by: pair Networks
upstream-ontologist-0.1.37/src/providers/perl.rs000066400000000000000000000365651462717511400220040ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use lazy_regex::regex; use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read}; use std::path::{Path, PathBuf}; use std::process::Command; pub fn guess_from_pod( contents: &str, origin: &Origin, ) -> std::result::Result, ProviderError> { let mut by_header: HashMap = HashMap::new(); let mut inheader: Option = None; for line in contents.lines() { if line.starts_with("=head1 ") { inheader = Some(line.trim_start_matches("=head1 ").to_string()); by_header.insert(inheader.clone().unwrap().to_uppercase(), String::new()); } else if let Some(header) = &inheader { if let Some(value) = by_header.get_mut(&header.to_uppercase()) { value.push_str(line) } } } let mut upstream_data: Vec = Vec::new(); if let Some(description) = by_header.get("DESCRIPTION") { let mut description = description.trim_start_matches('\n').to_string(); description = regex!(r"[FXZSCBI]\\<([^>]+)>") .replace_all(&description, "$1") .into_owned(); description = regex!(r"L\\<([^\|]+)\|([^\\>]+)\\>") .replace_all(&description, "$2") .into_owned(); description = regex!(r"L\\<([^\\>]+)\\>") .replace_all(&description, "$1") .into_owned(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(name) = by_header.get("NAME") { let lines: Vec<&str> = name.trim().lines().collect(); if let Some(line) = lines.first() { if let Some((name, summary)) = line.split_once(" - ") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } else if !line.contains(' ') { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(line.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } Ok(upstream_data) } pub fn guess_from_perl_module( path: &Path, ) -> std::result::Result, ProviderError> { match Command::new("perldoc").arg("-u").arg(path).output() { Ok(output) => guess_from_pod( &String::from_utf8_lossy(&output.stdout), &Origin::Path(path.into()), ), Err(e) => Err(ProviderError::Other(format!( "Error running perldoc: {}", e ))), } } pub fn guess_from_perl_dist_name( path: &Path, dist_name: &str, ) -> std::result::Result, ProviderError> { let mod_path = PathBuf::from(format!( "{}/lib/{}.pm", std::path::Path::new(path) .parent() .expect("parent") .display(), dist_name.replace('-', "/") )); if mod_path.exists() { guess_from_perl_module(mod_path.as_path()) } else { Ok(Vec::new()) } } #[cfg(feature = "dist-ini")] pub fn guess_from_dist_ini( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let parser = ini::Ini::load_from_file(path) .map_err(|e| ProviderError::ParseError(format!("Error parsing dist.ini: {}", e)))?; let dist_name = parser .get_from::<&str>(None, "name") .map(|name| UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let version = parser .get_from::<&str>(None, "version") .map(|version| UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let summary = parser .get_from::<&str>(None, "abstract") .map(|summary| UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let bug_database = parser .get_from(Some("MetaResources"), "bugtracker.web") .map(|bugtracker| UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let repository = parser .get_from(Some("MetaResources"), "repository.url") .map(|repository| UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let license = parser .get_from::<&str>(None, "license") .map(|license| UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let copyright = match ( parser.get_from::<&str>(None, "copyright_year"), parser.get_from::<&str>(None, "copyright_holder"), ) { (Some(year), Some(holder)) => Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(format!("{} {}", year, holder)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => None, }; let mut upstream_data: Vec = Vec::new(); if let Some(dist_name) = dist_name { upstream_data.push(dist_name); } if let Some(version) = version { upstream_data.push(version); } if let Some(summary) = summary { upstream_data.push(summary); } if let Some(bug_database) = bug_database { upstream_data.push(bug_database); } if let Some(repository) = repository { upstream_data.push(repository); } if let Some(license) = license { upstream_data.push(license); } if let Some(copyright) = copyright { upstream_data.push(copyright); } if let Some(dist_name) = parser.get_from::<&str>(None, "name") { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } pub fn guess_from_meta_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = serde_json::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.json: {}", e)))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = data.get("version").and_then(serde_json::Value::as_str) { let version = version.strip_prefix('v').unwrap_or(version); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = data.get("abstract").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(resources) = data.get("resources").and_then(serde_json::Value::as_object) { if let Some(bugtracker) = resources .get("bugtracker") .and_then(serde_json::Value::as_object) { if let Some(web) = bugtracker.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); // TODO: Support resources["bugtracker"]["mailto"] } } if let Some(homepage) = resources .get("homepage") .and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repo) = resources .get("repository") .and_then(serde_json::Value::as_object) { if let Some(url) = repo.get("url").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(web) = repo.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } /// Guess upstream metadata from a META.yml file. /// /// See http://module-build.sourceforge.net/META-spec-v1.4.html for the /// specification of the format. pub fn guess_from_meta_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.yml: {}", e)))?; let mut upstream_data = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(resources) = data.get("resources") { if let Some(bugtracker) = resources.get("bugtracker") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = resources.get("homepage") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = resources.get("repository") { if let Some(url) = repository.get("url") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name") { if let Some(dist_name) = dist_name.as_str() { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } } Ok(upstream_data) } pub fn guess_from_makefile_pl( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut dist_name = None; let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let name_regex = regex!("name '([^'\"]+)';$"); let repository_regex = regex!("repository '([^'\"]+)';$"); for line in reader.lines().flatten() { if let Some(captures) = name_regex.captures(&line) { dist_name = Some(captures.get(1).unwrap().as_str().to_owned()); let name = dist_name.as_ref().unwrap().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = repository_regex.captures(&line) { let repository = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(dist_name) = dist_name { results.extend(guess_from_perl_dist_name(path, &dist_name)?); } Ok(results) } upstream-ontologist-0.1.37/src/providers/php.rs000066400000000000000000000066241462717511400216220ustar00rootroot00000000000000use crate::{ProviderError, UpstreamDatum}; pub fn guess_from_pecl_package(package: &str) -> Result, ProviderError> { let url = format!("https://pecl.php.net/packages/{}", package); let client = reqwest::blocking::Client::builder() .user_agent(crate::USER_AGENT) // PECL is slow .timeout(std::time::Duration::from_secs(15)) .build() .unwrap(); let response = client .get(url) .send() .map_err(|e| ProviderError::Other(e.to_string()))?; match response.status() { reqwest::StatusCode::NOT_FOUND => { return Ok(vec![]); } status if !status.is_success() => { return Err(ProviderError::Other(format!("HTTP error: {}", status))); } _ => {} } let body = response .text() .map_err(|e| ProviderError::Other(e.to_string()))?; guess_from_pecl_page(&body) } struct TextMatches<'a>(&'a str); impl<'a> select::predicate::Predicate for TextMatches<'a> { fn matches(&self, node: &select::node::Node) -> bool { node.text() == self.0 } } fn guess_from_pecl_page(body: &str) -> Result, ProviderError> { use select::document::Document; use select::predicate::{And, Name}; let document = Document::from_read(body.as_bytes()).map_err(|e| ProviderError::Other(e.to_string()))?; let mut ret = Vec::new(); if let Some(node) = document .find(And(Name("a"), TextMatches("Browse Source"))) .next() { ret.push(UpstreamDatum::RepositoryBrowse( node.attr("href").unwrap().to_string(), )); } if let Some(node) = document .find(And(Name("a"), TextMatches("Package Bugs"))) .next() { ret.push(UpstreamDatum::BugDatabase( node.attr("href").unwrap().to_string(), )); } if let Some(node) = document .find(And(Name("th"), TextMatches("Homepage"))) .next() { if let Some(node) = node.parent().and_then(|node| node.find(Name("a")).next()) { ret.push(UpstreamDatum::Homepage( node.attr("href").unwrap().to_string(), )); } } Ok(ret) } #[cfg(test)] mod pecl_tests { use super::*; #[test] fn test_guess_from_pecl_page() { let text = include_str!("pecl.html"); let ret = guess_from_pecl_page(text).unwrap(); assert_eq!( ret, vec![ UpstreamDatum::RepositoryBrowse( "https://github.com/eduardok/libsmbclient-php".to_string() ), UpstreamDatum::BugDatabase( "https://github.com/eduardok/libsmbclient-php/issues".to_string() ), UpstreamDatum::Homepage("https://github.com/eduardok/libsmbclient-php".to_string()) ] ); } } pub struct Pecl; impl Pecl { pub fn new() -> Self { Self } } impl crate::ThirdPartyRepository for Pecl { fn name(&self) -> &'static str { "Pecl" } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository", "Bug-Database"] } fn guess_metadata(&self, name: &str) -> Result, ProviderError> { guess_from_pecl_package(name) } } upstream-ontologist-0.1.37/src/providers/pubspec.rs000066400000000000000000000052051462717511400224660ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; #[derive(serde::Deserialize)] struct Pubspec { name: Option, description: Option, version: Option, homepage: Option, repository: Option, documentation: Option, issue_tracker: Option, } pub fn guess_from_pubspec_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let pubspec: Pubspec = serde_yaml::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = pubspec.name { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = pubspec.description { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = pubspec.version { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = pubspec.homepage { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = pubspec.repository { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = pubspec.documentation { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(issue_tracker) = pubspec.issue_tracker { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issue_tracker), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.1.37/src/providers/python.rs000066400000000000000000001206031462717511400223460ustar00rootroot00000000000000use crate::{ vcs, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::{debug, warn}; use pyo3::prelude::*; use std::collections::HashMap; use std::path::Path; pub fn guess_from_pkg_info( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let contents = std::fs::read(path)?; let dist = python_pkginfo::Metadata::parse(contents.as_slice()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse python package metadata: {}", e)) })?; let mut ret = vec![]; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(dist.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(dist.version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(homepage) = dist.home_page { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = dist.summary { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = dist.description { ret.extend(parse_python_long_description( description.as_str(), dist.description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } ret.extend(parse_python_project_urls( dist.project_urls .iter() .map(|k| k.split_once(", ").unwrap()) .map(|(k, v)| (k.to_string(), v.to_string())), &Origin::Path(path.to_path_buf()), )); if dist.author.is_some() || dist.author_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: dist.author, email: dist.author_email, url: None, }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if dist.maintainer.is_some() || dist.maintainer_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: dist.maintainer, email: dist.maintainer_email, url: None, }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = dist.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(keywords) = dist.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords.split(", ").map(|s| s.to_string()).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(download_url) = dist.download_url { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(ret) } pub fn guess_from_pyproject_toml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let content = std::fs::read_to_string(path)?; let mut ret = Vec::new(); use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct PyProjectToml { #[serde(flatten)] inner: pyproject_toml::PyProjectToml, tool: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct Tool { poetry: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct ToolPoetry { version: Option, description: Option, license: Option, repository: Option, name: String, urls: Option>, keywords: Option>, authors: Option>, homepage: Option, documentation: Option, } let pyproject: PyProjectToml = toml::from_str(content.as_str()).map_err(|e| ProviderError::ParseError(e.to_string()))?; if let Some(inner_project) = pyproject.inner.project { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(inner_project.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(version) = inner_project.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = inner_project.license { match license { pyproject_toml::License::String(license) => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => {} } } fn contact_to_person(contact: &pyproject_toml::Contact) -> Person { Person { name: contact.name.clone(), email: contact.email.clone(), url: None, } } if let Some(authors) = inner_project.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.iter().map(contact_to_person).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(maintainers) = inner_project.maintainers { let maintainers: Vec<_> = maintainers.iter().map(contact_to_person).collect(); let certainty = if maintainers.len() == 1 { Certainty::Certain } else { Certainty::Possible }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainers[0].clone()), certainty: Some(certainty), origin: Some(path.into()), }); } if let Some(keywords) = inner_project.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(urls) = inner_project.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(classifiers) = inner_project.classifiers { ret.extend(parse_python_classifiers( classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.to_path_buf()), )); } } if let Some(tool) = pyproject.tool { if let Some(poetry) = tool.poetry { if let Some(version) = poetry.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = poetry.description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = poetry.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = poetry.repository { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(poetry.name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(urls) = poetry.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(keywords) = poetry.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(authors) = poetry.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( authors.iter().map(|p| Person::from(p.as_str())).collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = poetry.homepage { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = poetry.documentation { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } Ok(ret) } fn parse_python_project_urls( urls: impl Iterator, origin: &Origin, ) -> Vec { let mut ret = Vec::new(); for (url_type, url) in urls { match url_type.as_str() { "GitHub" | "Repository" | "Source Code" | "Source" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Bug Tracker" | "Bug Reports" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Documentation" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Funding" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Funding(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Homepage" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } _u => { debug!("Unknown Python project URL type: {}", url_type); } } } ret } fn parse_python_long_description( long_description: &str, content_type: Option<&str>, origin: &Origin, ) -> std::result::Result, ProviderError> { if long_description.is_empty() { return Ok(vec![]); } let content_type = content_type.unwrap_or("text/plain"); let mut content_type = content_type.split(';').next().unwrap(); if long_description.contains("-*-restructuredtext-*-") { content_type = "text/restructured-text"; } let mut ret = vec![]; match content_type { "text/plain" => { let lines = long_description.split('\n').collect::>(); if lines.len() > 30 { debug!("Long description is too long ({} lines)", lines.len()); return Ok(vec![]); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(long_description.to_string()), certainty: Some(Certainty::Possible), origin: Some(origin.clone()), }); } "text/restructured-text" | "text/x-rst" => { let (description, extra_md) = crate::readme::description_from_readme_rst(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (restructuredText)".to_string(), )), }); } ret.extend(extra_md); } "text/markdown" => { let (description, extra_md) = crate::readme::description_from_readme_md(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (markdown)".to_string(), )), }); } ret.extend(extra_md); } _ => { warn!("Unknown content type: {}", content_type); } } Ok(ret) } pub fn parse_python_url(url: &str) -> Vec { let repo = vcs::guess_repo_from_url(&url::Url::parse(url).unwrap(), None); if let Some(repo) = repo { return vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Likely), origin: None, }]; } vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Likely), origin: None, }] } pub fn guess_from_setup_cfg( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let setup_cfg = ini::Ini::load_from_file(path).map_err(|e| ProviderError::ParseError(e.to_string()))?; let metadata = match setup_cfg.section(Some("metadata")) { Some(metadata) => metadata, None => { debug!("No [metadata] section in setup.cfg"); return Ok(vec![]); } }; let origin = Origin::Path(path.to_path_buf()); let mut ret = vec![]; for (field, value) in metadata.iter() { match field { "name" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "version" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "url" => { ret.extend(parse_python_url(value)); } "description" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "summary" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description" => { if let Some(path) = value.strip_prefix(value) { if path.contains('/') { debug!("Ignoring long_description path: {}", path); continue; } let value = match std::fs::read_to_string(path) { Ok(value) => value, Err(e) => { debug!("Failed to read long_description file: {}", e); continue; } }; ret.extend(parse_python_long_description( &value, metadata.get("long_description_content_type"), &origin, )?); } else { ret.extend(parse_python_long_description( value, metadata.get("long_description_content_type"), &origin, )?); } } "maintainer" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(value.to_string()), email: metadata .get("maintainer_email") .or_else(|| metadata.get("maintainer-email")) .map(|s| s.to_string()), url: None, }), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "author" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(value.to_string()), email: metadata .get("author_email") .or_else(|| metadata.get("author-email")) .map(|s| s.to_string()), url: None, }]), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "project_urls" => { let urls = value.split('\n').filter_map(|s| { if s.is_empty() { return None; } let (key, value) = match s.split_once('=') { Some((key, value)) => (key, value), None => { debug!("Invalid project_urls line: {}", s); return None; } }; Some((key.to_string(), value.to_string())) }); ret.extend(parse_python_project_urls(urls, &origin)); } "license" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "home-page" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description_content_type" | "maintainer_email" | "author_email" | "maintainer-email" | "author-email" => { // Ignore these, they are handled elsewhere } _ => { warn!("Unknown setup.cfg field: {}", field); } } } Ok(ret) } fn guess_from_setup_py_executed( path: &Path, ) -> std::result::Result, ProviderError> { let mut ret = Vec::new(); // Import setuptools, just in case it replaces distutils use pyo3::types::PyDict; let mut long_description = None; Python::with_gil(|py| { let _ = py.import("setuptools"); let run_setup = py.import("distutils.core")?.getattr("run_setup")?; let os = py.import("os")?; let orig = match os.getattr("getcwd")?.call0() { Ok(orig) => Some(orig.extract::()?), Err(e) => { debug!("Failed to get current directory: {}", e); None } }; let parent = path.parent().unwrap(); os.getattr("chdir")?.call1((parent,))?; let result = || -> PyResult<_> { let kwargs = PyDict::new(py); kwargs.set_item("stop_after", "config")?; run_setup.call((path,), Some(kwargs)) }(); if let Some(orig) = orig { os.getattr("chdir")?.call1((orig,))?; } let result = result?; if let Some(name) = result.call_method0("get_name")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(version) = result.call_method0("get_version")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(url) = result.call_method0("get_url")?.extract()? { ret.extend(parse_python_url(url)); } if let Some(download_url) = result.call_method0("get_download_url")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = result.call_method0("get_license")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Likely), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(contact) = result.call_method0("get_contact")?.extract()? { let contact: String = match result .call_method0("get_contact_email")? .extract::>()? { Some(email) => format!("{} <{}>", contact, email), None => contact, }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(contact), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result.call_method0("get_description")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result .call_method0("get_long_description")? .extract::>()? { let content_type = match result.getattr("long_description_content_type") { Ok(content_type) => content_type.extract::>(), Err(e) if e.is_instance_of::(py) => Ok(None), Err(e) => return Err(e), }?; long_description = Some((description, content_type)); } if let Ok(metadata) = result.getattr("metadata") { if let Ok(project_urls) = metadata.getattr("project_urls") { ret.extend(parse_python_project_urls( project_urls .extract::>()? .into_iter(), &Origin::Path(path.to_path_buf()), )); } } Ok::<(), PyErr>(()) }) .map_err(|e| { warn!("Failed to run setup.py: {}", e); ProviderError::Other(e.to_string()) })?; if let Some((long_description, long_description_content_type)) = long_description { ret.extend(parse_python_long_description( long_description.as_str(), long_description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } Ok(ret) } pub fn guess_from_setup_py( path: &Path, trust_package: bool, ) -> std::result::Result, ProviderError> { if trust_package { guess_from_setup_py_executed(path) } else { guess_from_setup_py_parsed(path) } } fn guess_from_setup_py_parsed( path: &Path, ) -> std::result::Result, ProviderError> { let code = match std::fs::read_to_string(path) { Ok(setup_text) => setup_text, Err(e) => { warn!("Failed to read setup.py: {}", e); return Err(ProviderError::IoError(e)); } }; let mut long_description = None; let mut ret = Vec::new(); Python::with_gil(|py| { let ast = py.import("ast").unwrap(); // Based on pypi.py in https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py // // Copyright (c) nexB Inc. and others. All rights reserved. // ScanCode is a trademark of nexB Inc. // SPDX-License-Identifier: Apache-2.0 let tree = ast.call_method1("parse", (code,))?; let mut setup_args: HashMap = HashMap::new(); let ast_expr = ast.getattr("Expr").unwrap(); let ast_call = ast.getattr("Call").unwrap(); let ast_assign = ast.getattr("Assign").unwrap(); let ast_name = ast.getattr("Name").unwrap(); for statement in tree.getattr("body")?.iter()? { let statement = statement?; // We only care about function calls or assignments to functions named // `setup` or `main` if (statement.is_instance(ast_expr)? || statement.is_instance(ast_call)? || statement.is_instance(ast_assign)?) && statement.getattr("value")?.is_instance(ast_call)? && statement .getattr("value")? .getattr("func")? .is_instance(ast_name)? && (statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "setup" || // we also look for main as sometimes this is used instead of // setup() statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "main") { let value = statement.getattr("value")?; // Process the arguments to the setup function for kw in value.getattr("keywords")?.iter()? { let kw = kw?; let arg_name = kw.getattr("arg")?.extract::()?; setup_args.insert(arg_name, kw.getattr("value")?.to_object(py)); } } } // End code from https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py let ast_str = ast.getattr("Str").unwrap(); let ast_constant = ast.getattr("Constant").unwrap(); let get_str_from_expr = |expr: &PyAny| -> Option { if expr.is_instance(ast_str).ok()? { Some(expr.getattr("s").ok()?.extract::().ok()?) } else if expr.is_instance(ast_constant).ok()? { Some(expr.getattr("value").ok()?.extract::().ok()?) } else { None } }; let ast_list = ast.getattr("List").unwrap(); let ast_tuple = ast.getattr("Tuple").unwrap(); let ast_set = ast.getattr("Set").unwrap(); let get_str_list_from_expr = |expr: &PyAny| -> Option> { // We collect the elements of a list if the element // and tag function calls if expr.is_instance(ast_list).ok()? || expr.is_instance(ast_tuple).ok()? || expr.is_instance(ast_set).ok()? { let mut ret = Vec::new(); for elt in expr.getattr("elts").ok()?.iter().ok()? { let elt = elt.ok()?; if let Some(value) = get_str_from_expr(elt) { ret.push(value); } else { return None; } } Some(ret) } else { None } }; let ast = py.import("ast").unwrap(); let ast_dict = ast.getattr("Dict").unwrap(); let get_dict_from_expr = |expr: &PyAny| -> Option> { if expr.is_instance(ast_dict).ok()? { let mut ret = HashMap::new(); let keys = expr.getattr("keys").ok()?; let values = expr.getattr("values").ok()?; for (key, value) in keys.iter().ok()?.zip(values.iter().ok()?) { if let Some(key) = get_str_from_expr(key.ok()?) { if let Some(value) = get_str_from_expr(value.ok()?) { ret.insert(key, value); } else { return None; } } else { return None; } } Some(ret) } else { None } }; // TODO: what if kw.value is an expression like a call to // version=get_version or version__version__ for (key, value) in setup_args.iter() { let value = value.as_ref(py); match key.as_str() { "name" => { if let Some(name) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "version" => { if let Some(version) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "description" => { if let Some(description) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "long_description" => { if let Some(description) = get_str_from_expr(value) { let content_type = setup_args.get("long_description_content_type"); let content_type = if let Some(content_type) = content_type { get_str_from_expr(content_type.as_ref(py)) } else { None }; long_description = Some((description, content_type)); } } "license" => { if let Some(license) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "download_url" => { if let Some(download_url) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = get_str_from_expr(value) { ret.extend(parse_python_url(url.as_str())); } } "project_urls" => { if let Some(project_urls) = get_dict_from_expr(value) { ret.extend(parse_python_project_urls(project_urls.into_iter(), &Origin::Path(path.into()))); } } "maintainer" => { if let Some(maintainer) = get_str_from_expr(value) { let maintainer_email = setup_args.get("maintainer_email"); let maintainer_email = if let Some(maintainer_email) = maintainer_email { get_str_from_expr(maintainer_email.as_ref(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(maintainer), email: maintainer_email, url: None }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = get_str_from_expr(value) { let author_email = setup_args.get("author_email"); let author_email = if let Some(author_email) = author_email { get_str_from_expr(author_email.as_ref(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(author), email: author_email, url: None }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(author) = get_str_list_from_expr(value) { let author_emails = setup_args.get("author_email"); let author_emails = if let Some(author_emails) = author_emails { get_str_list_from_expr(author_emails.as_ref(py)).map_or_else(|| vec![None; author.len()], |v| v.into_iter().map(Some).collect()) } else { vec![None; author.len()] }; let persons = author.into_iter().zip(author_emails.into_iter()).map(|(name, email)| Person { name: Some(name), email, url: None }).collect(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = get_str_list_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "classifiers" => { if let Some(classifiers) = get_str_list_from_expr(value) { ret.extend(parse_python_classifiers(classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.into()))); } } // Handled above "author_email" | "maintainer_email" => {}, // Irrelevant "rust_extensions" | "data_files" | "packages" | "package_dir" | "entry_points" => {}, // Irrelevant: dependencies t if t.ends_with("_requires") || t.ends_with("_require") => {}, _ => { warn!("Unknown key in setup.py: {}", key); } } } Ok::<(), PyErr>(()) }).map_err(|e: PyErr| { Python::with_gil(|py| { if e.is_instance_of::(py) { warn!("Syntax error while parsing setup.py: {}", e); ProviderError::Other(e.to_string()) } else { warn!("Failed to parse setup.py: {}", e); ProviderError::Other(e.to_string()) } }) })?; if let Some((description, content_type)) = long_description { ret.extend(parse_python_long_description( description.as_str(), content_type.as_deref(), &Origin::Path(path.into()), )?); } Ok(ret) } fn parse_python_classifiers<'a>( classifiers: impl Iterator + 'a, origin: &'a Origin, ) -> impl Iterator + 'a { classifiers.filter_map(|classifier| { let mut parts = classifier.split(" :: "); let category = parts.next()?; let subcategory = parts.next()?; let value = parts.next()?; let certainty = Some(Certainty::Certain); let origin = Some(origin.clone()); match (category, subcategory) { ("Development Status", _) => None, ("Intended Audience", _) => None, ("License", "OSI Approved") => { Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.into()), certainty, origin: origin, }) } ("Natural Language", _) => None, ("Operating System", _) => None, ("Programming Language", _) => None, ("Topic", _) => None, _ => { warn!("Unknown classifier: {}", classifier); None } } }) } upstream-ontologist-0.1.37/src/providers/r.rs000066400000000000000000000267531462717511400213010ustar00rootroot00000000000000//! See https://r-pkgs.org/description.html use crate::{ vcs, Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::debug; use std::fs::File; use std::io::Read; use url::Url; #[cfg(feature = "r-description")] pub fn guess_from_r_description( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use mailparse::MailHeaderMap; let mut file = File::open(path)?; let mut contents = Vec::new(); file.read_to_end(&mut contents)?; let msg = mailparse::parse_mail(&contents).map_err(|e| ProviderError::ParseError(e.to_string()))?; let headers = msg.get_headers(); let mut results = Vec::new(); fn parse_url_entry(entry: &str) -> Option<(&str, Option<&str>)> { let mut parts = entry.splitn(2, " ("); if let Some(url) = parts.next() { let label = parts.next().map(|label| label.trim_end_matches(')').trim()); Some((url.trim(), label)) } else { Some((entry, None)) } } if let Some(package) = headers.get_first_value("Package") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(package), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = headers.get_first_value("Repository") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(bug_reports) = headers.get_first_value("BugReports") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_reports), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = headers.get_first_value("Version") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = headers.get_first_value("License") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(title) = headers.get_first_value("Title") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(title), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(desc) = headers .get_first_header("Description") .map(|h| h.get_value_raw()) { let desc = String::from_utf8_lossy(desc); let lines: Vec<&str> = desc.split_inclusive('\n').collect(); if !lines.is_empty() { let reflowed = format!("{}{}", lines[0], textwrap::dedent(&lines[1..].concat())); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(reflowed), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(maintainer) = headers.get_first_value("Maintainer") { let person = Person::from(maintainer.as_str()); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(url) = headers.get_first_header("URL").map(|h| h.get_value_raw()) { let url = String::from_utf8(url.to_vec()).unwrap(); let entries: Vec<&str> = url .split_terminator(|c| c == ',' || c == '\n') .map(str::trim) .collect(); let mut urls = Vec::new(); for entry in entries { if let Some((url, label)) = parse_url_entry(entry) { urls.push((label, url)); } } if urls.len() == 1 { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(urls[0].1.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for (label, url) in urls { let url = match Url::parse(url) { Ok(url) => url, Err(_) => { debug!("Invalid URL: {}", url); continue; } }; if let Some(hostname) = url.host_str() { if hostname == "bioconductor.org" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Bioconductor".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if label.map(str::to_lowercase).as_deref() == Some("devel") || label.map(str::to_lowercase).as_deref() == Some("repository") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if label.map(str::to_lowercase).as_deref() == Some("homepage") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(repo_url) = vcs::guess_repo_from_url(&url, None) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(results) } #[cfg(test)] #[cfg(feature = "r-description")] mod description_tests { use super::*; #[test] fn test_read() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("DESCRIPTION"); std::fs::write( &path, r#"Package: crul Title: HTTP Client Description: A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' (). Version: 0.8.4 License: MIT + file LICENSE Authors@R: c( person("Scott", "Chamberlain", role = c("aut", "cre"), email = "myrmecocystus@gmail.com", comment = c(ORCID = "0000-0003-1444-9135")) ) URL: https://github.com/ropensci/crul (devel) https://ropenscilabs.github.io/http-testing-book/ (user manual) https://www.example.com/crul (homepage) BugReports: https://github.com/ropensci/crul/issues Encoding: UTF-8 Language: en-US Imports: curl (>= 3.3), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode (>= 0.2.0), jsonlite, mime Suggests: testthat, fauxpas (>= 0.1.0), webmockr (>= 0.1.0), knitr VignetteBuilder: knitr RoxygenNote: 6.1.1 X-schema.org-applicationCategory: Web X-schema.org-keywords: http, https, API, web-services, curl, download, libcurl, async, mocking, caching X-schema.org-isPartOf: https://ropensci.org NeedsCompilation: no Packaged: 2019-08-02 19:58:21 UTC; sckott Author: Scott Chamberlain [aut, cre] () Maintainer: Scott Chamberlain Repository: CRAN Date/Publication: 2019-08-02 20:30:02 UTC "#, ) .unwrap(); let ret = guess_from_r_description(&path, &GuesserSettings::default()).unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("CRAN".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( "https://github.com/ropensci/crul/issues".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.8.4".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MIT + file LICENSE".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("HTTP Client".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description( r#"A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' ()."# .to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some("Scott Chamberlain".to_string()), email: Some("myrmecocystus@gmail.com".to_string()), url: None }), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( "https://github.com/ropensci/crul".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://www.example.com/crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()) }, ] ); } } upstream-ontologist-0.1.37/src/providers/repology.rs000066400000000000000000000040351462717511400226650ustar00rootroot00000000000000use crate::UpstreamDatum; use std::collections::HashMap; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Project { pub name: String, pub status: Option, pub www: Vec, pub licenses: Vec, pub summary: Option, pub downloads: Vec, } pub fn guess_from_repology( repology_project: &str, ) -> Result, crate::ProviderError> { let metadata: Vec = serde_json::from_value( if let Some(value) = crate::get_repology_metadata(repology_project, None) { value } else { return Ok(Vec::new()); }, ) .unwrap(); let mut fields = HashMap::new(); let mut add_field = |name, value, add| { *fields .entry(name) .or_insert(HashMap::new()) .entry(value) .or_insert(0) += add; }; for entry in metadata { let score = if entry.status.as_deref() == Some("outdated") { 1 } else { 10 }; for www in entry.www { add_field("Homepage", www, score); } for license in entry.licenses { add_field("License", license, score); } if let Some(summary) = entry.summary { add_field("Summary", summary, score); } for download in entry.downloads { add_field("Download", download, score); } } Ok(fields .into_iter() .map(|(name, scores)| { ( name.to_string(), scores .into_iter() .max_by_key(|(_, score)| *score) .unwrap() .0, ) }) .map(|(f, v)| match f.as_str() { "Homepage" => UpstreamDatum::Homepage(v), "License" => UpstreamDatum::License(v), "Summary" => UpstreamDatum::Summary(v), "Download" => UpstreamDatum::Download(v), _ => unreachable!(), }) .collect()) } upstream-ontologist-0.1.37/src/providers/ruby.rs000066400000000000000000000116711462717511400220120ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn guess_from_gemspec( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); #[derive(Debug)] enum GemValue { String(String), Array(Vec), } impl GemValue { fn as_str(&self) -> Option<&str> { match self { GemValue::String(s) => Some(s), GemValue::Array(_) => None, } } fn as_array(&self) -> Option<&Vec> { match self { GemValue::String(_) => None, GemValue::Array(a) => Some(a), } } } fn parse_value(value: &str) -> Result { let trimmed = value.trim(); if (trimmed.starts_with('"') && trimmed.ends_with('"')) || (trimmed.starts_with('\'') && trimmed.ends_with('\'')) { return Ok(GemValue::String(trimmed[1..trimmed.len() - 1].to_string())); } else if trimmed.starts_with('"') || trimmed.starts_with("'.freeze") { return Ok(GemValue::String(trimmed[1..].to_string())); } else if trimmed.starts_with('[') && trimmed.ends_with(']') { let elements = trimmed[1..trimmed.len() - 1] .split(',') .map(parse_value) .collect::, _>>()?; return Ok(GemValue::Array(elements)); } Err(format!("Could not parse value: {}", value)) } for line in reader.lines().flatten() { if line.starts_with('#') { continue; } if line.trim().is_empty() { continue; } if line == "Gem::Specification.new do |s|\n" || line == "end\n" { continue; } if line.starts_with(" s.") { let (key, rawval) = match line[4..].split_once('=') { Some((key, rawval)) => (key.trim(), rawval), _ => continue, }; let val = match parse_value(rawval.trim()) { Ok(val) => val, Err(_) => { debug!("Could not parse value: {}", rawval); continue; } }; match key { "name" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "version" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "homepage" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "summary" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "description" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "license" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "authors" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( val.as_array() .unwrap() .iter() .map(|p| Person::from(p.as_str().unwrap())) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => debug!("unknown field {} ({:?}) in gemspec", key, val), } } else { debug!( "ignoring unparseable line in {}: {:?}", path.display(), line ); } } Ok(results) } upstream-ontologist-0.1.37/src/providers/rust.rs000066400000000000000000000144451462717511400220300ustar00rootroot00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::debug; pub fn guess_from_cargo( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://doc.rust-lang.org/cargo/reference/manifest.html let doc: toml::Table = toml::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let package = match doc.get("package") { Some(package) => package.as_table().ok_or_else(|| { ProviderError::ParseError("[package] section in Cargo.toml is not a table".to_string()) })?, None => { log::debug!("No [package] section in Cargo.toml"); return Ok(Vec::new()); } }; let mut results = Vec::new(); for (field, value) in package.into_iter() { match field.as_str() { "name" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { let license = value.as_str().unwrap(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "repository" => { let repository = value.as_str().unwrap(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { if let Some(version) = value.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "authors" => { let authors = value.as_array().unwrap(); let authors = authors .iter() .map(|a| Person::from(a.as_str().unwrap())) .collect(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "edition" | "default-run" => {} n => { debug!("Unknown Cargo.toml field: {}", n); } } } Ok(results) } pub fn cargo_translate_dashes(crate_name: &str) -> Result, crate::HTTPJSONError> { let url = format!("https://crates.io/api/v1/crates?q={}", crate_name) .parse() .unwrap(); let json: serde_json::Value = crate::load_json_url(&url, None)?; // Navigate through the JSON response to find the crate name. if let Some(crates) = json.get("crates").and_then(|c| c.as_array()) { for krate in crates { if let Some(name) = krate.get("id").and_then(|n| n.as_str()) { return Ok(Some(name.to_string())); } } } Ok(None) } fn parse_crates_io(data: serde_json::Value) -> Vec { let crate_data = &data["crate"]; let mut results = Vec::new(); results.push(UpstreamDatum::Name( crate_data["name"].as_str().unwrap().to_string(), )); if let Some(homepage) = crate_data.get("homepage") { results.push(UpstreamDatum::Homepage( homepage.as_str().unwrap().to_string(), )); } if let Some(repository) = crate_data.get("repository") { results.push(UpstreamDatum::Repository( repository.as_str().unwrap().to_string(), )); } if let Some(description) = crate_data.get("description") { results.push(UpstreamDatum::Summary( description.as_str().unwrap().to_string(), )); } if let Some(license) = crate_data.get("license") { results.push(UpstreamDatum::License( license.as_str().unwrap().to_string(), )); } if let Some(version) = crate_data.get("newest_version") { results.push(UpstreamDatum::Version( version.as_str().unwrap().to_string(), )); } results } pub struct CratesIo; impl CratesIo { pub fn new() -> Self { Self } } impl crate::ThirdPartyRepository for CratesIo { fn name(&self) -> &'static str { "crates.io" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Name", "Repository", "Version", "Summary"][..] } fn guess_metadata(&self, name: &str) -> Result, ProviderError> { let data = crate::load_json_url( &format!("https://crates.io/api/v1/crates/{}", name) .parse() .unwrap(), None, )?; Ok(parse_crates_io(data)) } } upstream-ontologist-0.1.37/src/providers/security_md.rs000066400000000000000000000014321462717511400233520ustar00rootroot00000000000000//! https://docs.github.com/en/free-pro-team@latest/github/\ //! managing-security-vulnerabilities/adding-a-security-policy-to-your-repository use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; pub fn guess_from_security_md( name: &str, path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let path = path.strip_prefix("./").unwrap_or(path); let mut results = Vec::new(); // TODO(jelmer): scan SECURITY.md for email addresses/URLs with instructions results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityMD(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); Ok(results) } upstream-ontologist-0.1.37/src/providers/waf.rs000066400000000000000000000027021462717511400216010ustar00rootroot00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn guess_from_wscript( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let appname_regex = regex!("APPNAME = [\'\"](.*)[\'\"]"); let version_regex = regex!("VERSION = [\'\"](.*)[\'\"]"); for line in reader.lines() { if let Ok(line) = line { if let Some(captures) = appname_regex.captures(&line) { let name = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = version_regex.captures(&line) { let version = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } } Ok(results) } upstream-ontologist-0.1.37/src/readme.rs000066400000000000000000000471611462717511400202540ustar00rootroot00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use pyo3::prelude::*; use scraper::{ElementRef, Selector}; use std::io::BufRead; use std::iter::Iterator; pub fn skip_paragraph(para: &str) -> (bool, Vec) { let mut ret = Vec::::new(); let re = regex!(r"(?ms)^See .* for more (details|information)\."); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^See .* for instructions"); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^Please refer .*\."); if re.is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^It is licensed under (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^License: (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^(Home page|homepage_url|Main website|Website|Homepage): (.*)").captures(para) { let mut url = m.get(2).unwrap().as_str().to_string(); if url.starts_with('<') && url.ends_with('>') { url = url[1..url.len() - 1].to_string(); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^More documentation .* at http.*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation (can be found|is hosted|is available) (at|on) ([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(3).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation for (.*)\s+(can\s+be\s+found|is\s+hosted)\s+(at|on)\s+([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(4).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Documentation[, ].*found.*(at|on).*\.").is_match(para) { return (true, ret); } if regex!(r"(?ms)^See (http.*|gopkg.in.*|github.com.*)").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Available on (.*)").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^This software is freely distributable under the (.*) license.*") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^This .* is hosted at .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This code has been developed by .*").is_match(para) { return (true, ret); } if para.starts_with("Download and install using:") { return (true, ret); } if regex!(r"(?ms)^Bugs should be reported by .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^The bug tracker can be found at (http[^ ]+[^.])").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Copyright (\(c\) |)(.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^You install .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This .* is free software; .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Please report any bugs(.*) to <(.*)>").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Share and Enjoy").is_match(para) { return (true, ret); } let lines = para.lines().collect::>(); if !lines.is_empty() && ["perl Makefile.PL", "make", "./configure"].contains(&lines[0].trim()) { return (true, ret); } if regex!(r"(?ms)^For further information, .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Further information .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^A detailed ChangeLog can be found.*:\s+(http.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Changelog(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } (false, ret) } pub fn description_from_readme_rst( long_description: &str, ) -> PyResult<(Option, Vec)> { Python::with_gil(|py| { let readme_mod = Python::import(py, "upstream_ontologist.readme").unwrap(); let (description, extra_md): (Option, Vec) = readme_mod .call_method1("description_from_readme_rst", (long_description,))? .extract()?; Ok((description, extra_md)) }) } pub fn description_from_readme_md( long_description: &str, ) -> PyResult<(Option, Vec)> { let parser = pulldown_cmark::Parser::new(long_description); let mut html_output = String::new(); pulldown_cmark::html::push_html(&mut html_output, parser); description_from_readme_html(&html_output) } pub fn guess_from_readme( path: &std::path::Path, _trust_package: bool, ) -> Result, ProviderError> { let mut urls: Vec = vec![]; let mut ret = vec![]; let f = std::fs::File::open(path)?; let reader = std::io::BufReader::new(f); let mut line_iter = reader.lines(); loop { let line = if let Some(line) = line_iter.next() { line? } else { break; }; let line = line.trim(); let mut cmdline = line.strip_prefix('$').unwrap_or(line).trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") || cmdline.starts_with("hg clone ") || cmdline.starts_with("bzr co ") || cmdline.starts_with("bzr branch ") { while cmdline.ends_with('\\') { let next_line = line_iter.next().unwrap()?; cmdline = format!("{} {}", cmdline, next_line.trim()); } if let Some(url) = crate::vcs_command::url_from_vcs_command(cmdline.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command( m.get(1).unwrap().as_str().as_bytes(), ) { urls.push(url.parse().unwrap()); } } if let Some(m) = lazy_regex::regex_find!(r"cvs.*-d\s*:pserver:.*", line) { if let Some(url) = crate::vcs_command::url_from_cvs_co_command(m.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("($ )?(svn co .*)").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_svn_co_command(m.get(2).unwrap().as_str().as_bytes()) { urls.push(url.parse().unwrap()); } } const PROJECT_RE: &str = "([^/]+)/([^/?.()\"#>\\s]*[^-,/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://travis-ci.org/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://coveralls.io/r/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://github.com/([^/]+)/([^/]+)/issues").find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", PROJECT_RE).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = lazy_regex::regex_find!(r"git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex_find!("https://([^]/]+)/([^]\\s()\"#]+)", line) { let url = m.trim_end_matches('.'); if crate::vcs::is_gitlab_site(m, None) { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url.parse().unwrap(), None) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } else { log::warn!("Ignoring invalid URL {} in {}", url, path.display()); } } } } let (description, extra_metadata) = match path.extension().and_then(|s| s.to_str()) { Some("md") => { let contents = std::fs::read_to_string(path)?; description_from_readme_md(&contents) } Some("rst") => { let contents = std::fs::read_to_string(path)?; description_from_readme_rst(&contents) } None => { let contents = std::fs::read_to_string(path)?; Ok(description_from_readme_plain(&contents)?) } Some("pod") => { let contents = std::fs::read_to_string(path)?; let metadata = crate::providers::perl::guess_from_pod( &contents, &Origin::Path(path.to_path_buf()), )?; Ok((None, metadata)) } _ => Ok((None, vec![])), } .map_err(ProviderError::Python)?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } ret.extend(extra_metadata.into_iter()); let prefer_public = |url: &url::Url| -> i32 { if url.scheme().contains("ssh") { 1 } else { 0 } }; urls.sort_by_key(prefer_public); if !urls.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(urls.remove(0).to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } Ok(ret) } pub fn parse_first_header_text(text: &str) -> (Option<&str>, Option<&str>, Option<&str>) { if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) ([0-9.]+)$", text) { return (Some(name), None, Some(version)); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+): (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) - (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) -- (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) version ([^ ]+)", text) { return (Some(name), None, Some(version)); } (None, None, None) } #[test] fn test_parse_first_header_text() { assert_eq!( parse_first_header_text("libwand 1.0"), (Some("libwand"), None, Some("1.0")) ); assert_eq!( parse_first_header_text("libwand -- A wand"), (Some("libwand"), Some("A wand"), None) ); assert_eq!( parse_first_header_text("libwand version 1.0"), (Some("libwand"), None, Some("1.0")) ); } pub fn description_from_readme_plain( text: &str, ) -> Result<(Option, Vec), ProviderError> { let mut lines: Vec<&str> = text.split_terminator('\n').collect(); let mut metadata: Vec = Vec::new(); if lines.is_empty() { return Ok((None, Vec::new())); } if !lines[0].trim().is_empty() && lines.len() > 1 && (lines[1].is_empty() || !lines[1].chars().next().unwrap().is_alphanumeric()) { let (name, summary, version) = parse_first_header_text(lines[0]); if let Some(name) = name { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(version) = version { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(summary) = summary { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Likely), }); } if name.is_some() || version.is_some() || summary.is_some() { lines.remove(0); } } while !lines.is_empty() && lines[0].trim().trim_matches('-').is_empty() { lines.remove(0); } let mut paras: Vec> = Vec::new(); let mut current_para: Vec<&str> = Vec::new(); for line in lines { if line.trim().is_empty() { if !current_para.is_empty() { paras.push(current_para.clone()); current_para.clear(); } } else { current_para.push(line); } } if !current_para.is_empty() { paras.push(current_para.clone()); } let mut output: Vec = Vec::new(); for para in paras { if para.is_empty() { continue; } let line = para.join("\n"); let (skip, extra_metadata) = skip_paragraph(&line); metadata.extend(extra_metadata); if skip { continue; } output.push(format!("{}\n", line)); } let description = if output.len() > 30 { None } else { while !output.is_empty() && output.last().unwrap().trim().is_empty() { output.pop(); } Some(output.join("\n")) }; Ok((description, metadata)) } fn ul_is_field_list(el: ElementRef) -> bool { let names = ["Issues", "Home", "Documentation", "License"]; for li in el.select(&Selector::parse("li").unwrap()) { let text = li.text().collect::(); if let Some((_, name)) = lazy_regex::regex_captures!(r"([A-Za-z]+)\s*:.*", text.trim()) { if !names.contains(&name) { return false; } } else { return false; } } true } #[test] fn test_ul_is_field_list() { let el = scraper::Html::parse_fragment( r#""#, ); assert_eq!( ul_is_field_list( el.root_element() .select(&Selector::parse("ul").unwrap()) .next() .unwrap() ), true ); let el = scraper::Html::parse_fragment( r#"
  • Some other thing
"#, ); assert_eq!( ul_is_field_list( el.root_element() .select(&Selector::parse("ul").unwrap()) .next() .unwrap() ), false ); } pub fn description_from_readme_html( long_description: &str, ) -> PyResult<(Option, Vec)> { Python::with_gil(|py| { let readme_mod = Python::import(py, "upstream_ontologist.readme").unwrap(); let (description, extra_md): (Option, Vec) = readme_mod .call_method1("description_from_readme_html", (long_description,))? .extract()?; Ok((description, extra_md)) }) } upstream-ontologist-0.1.37/src/vcs.rs000066400000000000000000001467031462717511400176140ustar00rootroot00000000000000use crate::with_path_segments; use lazy_regex::regex; use log::{debug, warn}; use std::borrow::Cow; use std::collections::HashMap; use url::Url; pub const VCSES: &[&str] = &["git", "bzr", "hg"]; pub const KNOWN_GITLAB_SITES: &[&str] = &["salsa.debian.org", "invent.kde.org", "0xacab.org"]; pub const SECURE_SCHEMES: &[&str] = &["https", "git+ssh", "bzr+ssh", "hg+ssh", "ssh", "svn+ssh"]; const KNOWN_HOSTING_SITES: &[&str] = &[ "code.launchpad.net", "github.com", "launchpad.net", "git.openstack.org", ]; pub fn plausible_url(url: &str) -> bool { url.contains(':') } pub fn drop_vcs_in_scheme(url: &Url) -> Option { let scheme = url.scheme(); match scheme { "git+http" | "git+https" => { Some(derive_with_scheme(url, scheme.trim_start_matches("git+"))) } "hg+http" | "hg+https" => Some(derive_with_scheme(url, scheme.trim_start_matches("hg+"))), "bzr+lp" | "bzr+http" => Some(derive_with_scheme(url, scheme.trim_start_matches("bzr+"))), _ => None, } } pub fn split_vcs_url(location: &str) -> (String, Option, Option) { let mut url = location.to_string(); let mut branch = None; let mut subpath = None; if let Some(idx) = url.find('[') { if let Some(idx2) = url.find(']') { subpath = Some(url[idx + 1..idx2].to_string()); url = url[..idx].to_string(); } } if let Some(idx) = url.find(" -b ") { branch = Some(url[idx + 4..].to_string()); url = url[..idx].to_string(); } (url, branch, subpath) } pub fn unsplit_vcs_url(location: &VcsLocation) -> String { let mut url = location.url.to_string(); if let Some(branch_name) = location.branch.as_deref() { url = format!("{} -b {}", url, branch_name); } if let Some(subpath_str) = location.subpath.as_deref() { url = format!("{} [{}]", url, subpath_str); } url } pub fn plausible_browse_url(url: &str) -> bool { if let Ok(url) = url::Url::parse(url) { if url.scheme() == "https" || url.scheme() == "http" { return true; } } false } pub fn strip_vcs_prefixes(url: &str) -> &str { let prefixes = ["git", "hg"]; for prefix in prefixes.iter() { if url.starts_with(&format!("{}+", prefix)) { return &url[prefix.len() + 1..]; } } url } fn probe_upstream_github_branch_url(url: &url::Url, version: Option<&str>) -> Option { let path = url.path(); let path = path.strip_suffix(".git").unwrap_or(path); let api_url = url::Url::parse( format!( "https://api.github.com/repos/{}/tags", path.trim_start_matches('/') ) .as_str(), ) .unwrap(); match crate::load_json_url(&api_url, None) { Ok(json) => { if let Some(version) = version { let tags = json.as_array()?; let tag_names = tags .iter() .map(|x| x["name"].as_str().unwrap()) .collect::>(); if tag_names.is_empty() { // Uhm, hmm return Some(true); } return Some(version_in_tags(version, tag_names.as_slice())); } Some(true) } Err(crate::HTTPJSONError::Error { status, .. }) if status == 404 => Some(false), Err(crate::HTTPJSONError::Error { status, .. }) if status == 403 => { debug!("github api rate limit exceeded"); None } Err(e) => { warn!("failed to probe github api: {:?}", e); None } } } fn version_in_tags(version: &str, tag_names: &[&str]) -> bool { if tag_names.contains(&version) { return true; } if tag_names.contains(&format!("v{}", version).as_str()) { return true; } if tag_names.contains(&format!("release/{}", version).as_str()) { return true; } if tag_names.contains(&version.replace('.', "_").as_str()) { return true; } for tag_name in tag_names { if tag_name.ends_with(&format!("_{}", version)) { return true; } if tag_name.ends_with(&format!("-{}", version)) { return true; } if tag_name.ends_with(&format!("_{}", version.replace('.', "_"))) { return true; } } false } fn probe_upstream_breezy_branch_url(url: &url::Url, version: Option<&str>) -> Option { let tags: HashMap> = pyo3::Python::with_gil(|py| { let breezy_ui = py.import("breezy.ui")?; let branch_mod = py.import("breezy.branch")?; py.import("breezy.bzr")?; py.import("breezy.git")?; let old_ui = breezy_ui.getattr("ui_factory")?; breezy_ui.setattr("ui_factory", breezy_ui.call_method0("SilentUIFactory")?)?; let branch_cls = branch_mod.getattr("Branch")?; let branch = branch_cls.call_method1("open", (url.as_str(),))?; branch.call_method0("last_revision")?; let tags = branch.getattr("tags")?.call_method0("get_tag_dict")?; breezy_ui.setattr("ui_factory", old_ui)?; tags.extract() }) .map_err(|e| { warn!("failed to probe breezy branch: {:?}", e); e }) .ok()?; let tag_names = tags.keys().map(|x| x.as_str()).collect::>(); if let Some(version) = version { Some(version_in_tags(version, tag_names.as_slice())) } else { Some(true) } } pub fn probe_upstream_branch_url(url: &url::Url, version: Option<&str>) -> Option { if url.scheme() == "git+ssh" || url.scheme() == "ssh" || url.scheme() == "bzr+ssh" { // Let's not probe anything possibly non-public. return None; } if url.host() == Some(url::Host::Domain("github.com")) { probe_upstream_github_branch_url(url, version) } else { probe_upstream_breezy_branch_url(url, version) } } pub fn check_repository_url_canonical( mut url: url::Url, version: Option<&str>, ) -> std::result::Result { if url.host_str() == Some("github.com") { let mut segments = url.path_segments().unwrap().collect::>(); if segments.len() < 2 { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL with less than 2 path elements".to_string(), )); } if segments[0] == "sponsors" { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub sponsors URL".to_string(), )); } segments[1] = segments[1].trim_end_matches(".git"); let api_url = format!( "https://api.github.com/repos/{}/{}", segments[0], segments[1] ); url = match crate::load_json_url(&url::Url::parse(api_url.as_str()).unwrap(), None) { Ok(data) => { if data["archived"].as_bool().unwrap_or(false) { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is archived".to_string(), )); } if let Some(description) = data["description"].as_str() { if description.contains("DEPRECATED") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is deprecated".to_string(), )); } if description.starts_with("Moved to") { let url = url::Url::parse( description .trim_start_matches("Moved to ") .trim_end_matches('.'), ) .unwrap(); return check_repository_url_canonical(url, version); } if description.contains("has moved") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL has moved".to_string(), )); } if description.starts_with("Mirror of ") { let url = url::Url::parse( description .trim_start_matches("Mirror of ") .trim_end_matches('.'), ) .unwrap(); return check_repository_url_canonical(url, version); } } if let Some(homepage) = data["homepage"].as_str() { if is_gitlab_site(homepage, None) { return Err(crate::CanonicalizeError::InvalidUrl( url, format!("homepage is on GitLab: {}", homepage), )); } } // TODO(jelmer): Look at the contents of the repository; if it contains just a // single README file with < 10 lines, assume the worst. // return data['clone_url'] Ok(url::Url::parse(data["clone_url"].as_str().unwrap()).unwrap()) } Err(crate::HTTPJSONError::Error { status, .. }) if status == 404 => { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL does not exist".to_string(), )) } Err(crate::HTTPJSONError::Error { status, .. }) if status == 403 => { return Err(crate::CanonicalizeError::Unverifiable( url, "GitHub URL rate-limited".to_string(), )) } Err(e) => { return Err(crate::CanonicalizeError::Unverifiable( url, format!("GitHub URL failed to load: {:?}", e), )) } }?; } let is_valid = probe_upstream_branch_url(&url, version); if is_valid.is_none() { return Err(crate::CanonicalizeError::Unverifiable( url, "unable to probe".to_string(), )); } if is_valid.unwrap() { return Ok(url); } Err(crate::CanonicalizeError::InvalidUrl( url, "unable to successfully probe URL".to_string(), )) } pub fn is_gitlab_site(hostname: &str, net_access: Option) -> bool { if KNOWN_GITLAB_SITES.contains(&hostname) { return true; } if hostname.starts_with("gitlab.") { return true; } if net_access.unwrap_or(false) { probe_gitlab_host(hostname) } else { false } } pub fn probe_gitlab_host(hostname: &str) -> bool { let url = format!("https://{}/api/v4/version", hostname); match crate::load_json_url(&url::Url::parse(url.as_str()).unwrap(), None) { Ok(_data) => true, Err(crate::HTTPJSONError::Error { status, response, .. }) if status == 401 => { if let Ok(data) = response.json::() { if let Some(message) = data["message"].as_str() { if message == "401 Unauthorized" { true } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response"); false } } Err(e) => { debug!("failed to probe GitLab host: {:?}", e); false } } } pub fn guess_repo_from_url(url: &url::Url, net_access: Option) -> Option { let net_access = net_access.unwrap_or(false); let path_segments = url.path_segments().unwrap().collect::>(); match url.host_str()? { "github.com" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "travis-ci.org" => { if path_segments.len() < 2 { return None; } Some(format!( "https://github.com/{}/{}", path_segments[0], path_segments[1] )) } "coveralls.io" => { if path_segments.len() < 3 { return None; } if path_segments[0] != "r" { return None; } Some(format!( "https://github.com/{}/{}", path_segments[1], path_segments[2] )) } "launchpad.net" => Some( url::Url::parse(format!("https://code.launchpad.net/{}", path_segments[0]).as_str()) .unwrap() .to_string(), ), "git.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "git" { return None; } Some(url.to_string()) } "freedesktop.org" | "www.freedesktop.org" => { if path_segments.len() >= 2 && path_segments[0] == "software" { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else if path_segments.len() >= 3 && path_segments[0..2] == ["wiki", "Software"] { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[2]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "download.gnome.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "sources" { return None; } Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.kde.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "stable" && path_segments[0] != "unstable" { return None; } Some( url::Url::parse(format!("https://invent.kde.org/{}", path_segments[1]).as_str()) .unwrap() .to_string(), ) } "ftp.gnome.org" => { if path_segments.len() >= 4 && path_segments[0] == "pub" && path_segments[1] == "GNOME" && path_segments[2] == "sources" { Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[3]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "sourceforge.net" => { if path_segments.len() >= 4 && path_segments[0] == "p" && path_segments[3] == "ci" { Some( url::Url::parse( format!( "https://sourceforge.net/p/{}/{}", path_segments[1], path_segments[2] ) .as_str(), ) .unwrap() .to_string(), ) } else { None } } "www.apache.org" => { if path_segments.len() >= 2 && path_segments[0] == "dist" { Some( url::Url::parse( format!("https://svn.apache.org/repos/asf/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "bitbucket.org" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "ftp.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "gnu" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "releases" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } u if is_gitlab_site(u, Some(net_access)) => { if path_segments.is_empty() { return None; } let proj_segments = if path_segments.contains(&"-") { path_segments[0..path_segments.iter().position(|s| s.contains('-')).unwrap()] .to_vec() } else if path_segments.contains(&"tags") { path_segments[0..path_segments.iter().position(|s| s == &"tags").unwrap()].to_vec() } else if path_segments.contains(&"blob") { path_segments[0..path_segments.iter().position(|s| s == &"blob").unwrap()].to_vec() } else { path_segments.to_vec() }; Some(with_path_segments(url, &proj_segments).unwrap().to_string()) } "git.php.net" => { if path_segments[0] == "repository" { Some(url.to_string()) } else if path_segments.is_empty() { let qs = url.query_pairs().collect::>(); qs.get("p") .map(|p| { url::Url::parse(format!("https://git.php.net/repository/?{}", p).as_str()) .unwrap() }) .map(|u| u.to_string()) } else { None } } u if KNOWN_HOSTING_SITES.contains(&u) => Some(url.to_string()), u if u.starts_with("svn.") => { // 'svn' subdomains are often used for hosting SVN repositories Some(url.to_string()) } _ => { if net_access { match check_repository_url_canonical(url.clone(), None) { Ok(url) => Some(url.to_string()), Err(_) => { debug!("Failed to canonicalize URL: {}", url); None } } } else { None } } } } #[test] fn test_guess_repo_url() { assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url(&"https://github.com/jelmer/blah".parse().unwrap(), Some(false))); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url(&"https://github.com/jelmer/blah/blob/README".parse().unwrap(), Some(false)) ); assert_eq!( None, guess_repo_from_url(&"https://github.com/jelmer".parse().unwrap(), Some(false))); assert_eq!( None, guess_repo_from_url(&"https://www.jelmer.uk/".parse().unwrap(), Some(false))); assert_eq!( Some("http://code.launchpad.net/blah".to_string()), guess_repo_from_url(&"http://code.launchpad.net/blah".parse().unwrap(), Some(false)), ); assert_eq!( Some("https://code.launchpad.net/bzr".to_string()), guess_repo_from_url(&"http://launchpad.net/bzr/+download".parse().unwrap(), Some(false)), ); assert_eq!( Some("https://git.savannah.gnu.org/git/auctex.git".to_string()), guess_repo_from_url(&"https://git.savannah.gnu.org/git/auctex.git".parse().unwrap(), Some(false)), ); assert_eq!( None, guess_repo_from_url(&"https://git.savannah.gnu.org/blah/auctex.git".parse().unwrap(), Some(false)), ); assert_eq!( Some("https://bitbucket.org/fenics-project/dolfin".to_string()), guess_repo_from_url( &"https://bitbucket.org/fenics-project/dolfin/downloads/".parse().unwrap(), Some(false) ), ); } pub fn canonical_git_repo_url(repo_url: &Url, net_access: Option) -> Option { if let Some(hostname) = repo_url.host_str() { if (is_gitlab_site(hostname, net_access) || hostname == "github.com") && !repo_url.path().ends_with(".git") { let mut url = repo_url.clone(); url.set_path(&(url.path().to_owned() + ".git")); return Some(url); } } None } pub fn browse_url_from_repo_url( location: &VcsLocation, net_access: Option, ) -> Option { if location.url.host_str() == Some("github.com") { let mut path = location .url .path_segments() .unwrap() .take(3) .collect::>() .join("/"); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if location.subpath.is_some() || location.branch.is_some() { path.push_str(&format!( "/tree/{}", location.branch.as_deref().unwrap_or("HEAD") )); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else if location.url.host_str() == Some("gopkg.in") { let mut els = location .url .path_segments() .unwrap() .take(3) .collect::>(); if els.len() != 2 { return None; } if let Some(version) = els[2].strip_prefix(".v") { els[2] = ""; let mut path = els.join("/"); path.push_str(&format!("/tree/{}", version)); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else { None } } else if location.url.host_str() == Some("code.launchpad.net") || location.url.host_str() == Some("launchpad.net") { let mut path = location.url.path().to_string(); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/view/head:{}", subpath_str)); return Some( Url::parse(format!("https://bazaar.launchpad.net{}", path).as_str()).unwrap(), ); } else { return Some( Url::parse(format!("https://code.launchpad.net{}", path).as_str()).unwrap(), ); } } else if location.url.host_str() == Some("svn.apache.org") { let path_elements = location .url .path_segments() .map(|segments| segments.into_iter().collect::>()) .unwrap_or_else(Vec::new); if path_elements.len() >= 2 && path_elements[0] == "repos" && path_elements[1] == "asf" { let mut path_elements = path_elements.into_iter().skip(1).collect::>(); path_elements[0] = "viewvc"; if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push(subpath_str); } return Some( Url::parse(format!("https://svn.apache.org{}", path_elements.join("/")).as_str()) .unwrap(), ); } else { None } } else if location.url.host_str() == Some("git.savannah.gnu.org") || location.url.host_str() == Some("git.sv.gnu.org") { let mut path_elements = location.url.path_segments().unwrap().collect::>(); if location.url.scheme() == "https" && path_elements.first() == Some(&"git") { path_elements.remove(0); } path_elements.insert(0, "cgit"); if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push("tree"); path_elements.push(subpath_str); } Some( Url::parse(format!("https://git.savannah.gnu.org{}", path_elements.join("/")).as_str()) .unwrap(), ) } else if location.url.host_str().is_some() && is_gitlab_site(location.url.host_str().unwrap(), net_access) { let mut path = location.url.path().to_string(); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/-/blob/HEAD/{}", subpath_str)); } Some( Url::parse(format!("https://{}{}", location.url.host_str().unwrap(), path).as_str()) .unwrap(), ) } else { None } } pub fn find_public_repo_url(repo_url: &str, net_access: Option) -> Option { let parsed = match Url::parse(repo_url) { Ok(parsed) => parsed, Err(_) => { if repo_url.contains(':') { let re = regex!(r"^(?P[^@:/]+@)?(?P[^/:]+):(?P.*)$"); if let Some(captures) = re.captures(repo_url) { let host = captures.name("host").unwrap().as_str(); let path = captures.name("path").unwrap().as_str(); if host == "github.com" || is_gitlab_site(host, net_access) { return Some(format!("https://{}/{}", host, path)); } } } return None; } }; let revised_url: Option; match parsed.host_str() { Some("github.com") => { if ["https", "http", "git"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } revised_url = Some( Url::parse("https://github.com") .unwrap() .join(parsed.path()) .unwrap() .to_string(), ); } Some(hostname) if is_gitlab_site(hostname, net_access) => { if ["https", "http"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } if parsed.scheme() == "ssh" { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path(), )); } else { revised_url = None; } } Some("code.launchpad.net") | Some("bazaar.launchpad.net") | Some("git.launchpad.net") => { if parsed.scheme().starts_with("http") || parsed.scheme() == "lp" { return Some(repo_url.to_string()); } if ["ssh", "bzr+ssh"].contains(&parsed.scheme()) { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path() )); } else { revised_url = None; } } _ => revised_url = None, } revised_url } pub fn fixup_rcp_style_git_repo_url(url: &str) -> Option { pyo3::prepare_freethreaded_python(); breezyshim::location::rcp_location_to_url(url).ok() } pub fn try_open_branch( url: &url::Url, branch_name: Option<&str>, ) -> Option> { use pyo3::prelude::*; match Python::with_gil(|py| { let uim = py.import("breezy.ui")?; let controldirm = py.import("breezy.controldir")?; let controldir_cls = controldirm.getattr("ControlDir")?; let old_ui_factory = uim.getattr("ui_factory")?; uim.setattr("ui_factory", uim.call_method0("SilentUIFactory")?)?; let r = || -> PyResult { let c = controldir_cls.call_method1("open", (url.to_string(),))?; let b = c.call_method1("open_branch", (branch_name,))?; b.call_method0("last_revision")?; Ok(b.to_object(py)) }(); uim.setattr("ui_factory", old_ui_factory)?; match r { Ok(b) => Ok(b), Err(e) => Err(e), } }) { Ok(b) => Python::with_gil(|py| { Some( Box::new(breezyshim::branch::RegularBranch::new(b.to_object(py))) as Box, ) }), Err(_) => None, } } pub fn find_secure_repo_url( mut url: url::Url, branch: Option<&str>, net_access: Option, ) -> Option { if SECURE_SCHEMES.contains(&url.scheme()) { return Some(url); } // Sites we know to be available over https if let Some(hostname) = url.host_str() { if is_gitlab_site(hostname, net_access) || vec![ "github.com", "git.launchpad.net", "bazaar.launchpad.net", "code.launchpad.net", ] .contains(&hostname) { url = derive_with_scheme(&url, "https"); } } if url.scheme() == "lp" { url = derive_with_scheme(&url, "https"); url.set_host(Some("code.launchpad.net")).unwrap(); } if let Some(host) = url.host_str() { if vec!["git.savannah.gnu.org", "git.sv.gnu.org"].contains(&host) { if url.scheme() == "http" { url = derive_with_scheme(&url, "https"); } else { url = derive_with_scheme(&url, "https"); url.set_path(format!("/git{}", url.path()).as_str()); } } } else { return None; } if net_access.unwrap_or(true) { let secure_repo_url = derive_with_scheme(&url, "https"); let insecure_branch = try_open_branch(&url, branch); let secure_branch = try_open_branch(&secure_repo_url, branch); if let Some(secure_branch) = secure_branch { if insecure_branch.is_none() || secure_branch.last_revision() == insecure_branch.unwrap().last_revision() { url = secure_repo_url; } } } if SECURE_SCHEMES.contains(&url.scheme()) { Some(url) } else { // Can't find a secure URI :( None } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct VcsLocation { pub url: url::Url, pub branch: Option, pub subpath: Option, } impl ToString for VcsLocation { fn to_string(&self) -> String { unsplit_vcs_url(self) } } impl From for url::Url { fn from(v: VcsLocation) -> Self { v.url } } impl From for VcsLocation { fn from(url: url::Url) -> Self { VcsLocation { url, branch: None, subpath: None, } } } impl From<&str> for VcsLocation { fn from(url: &str) -> Self { let (url, branch, subpath) = split_vcs_url(url); let url = fixup_git_url(url.as_str()); VcsLocation { url: url.parse().unwrap(), branch, subpath, } } } fn derive_with_scheme(url: &url::Url, scheme: &str) -> url::Url { let mut s = url.to_string(); s.replace_range(..url.scheme().len(), scheme); url::Url::parse(&s).unwrap() } fn fix_path_in_port(url: &str) -> Option { let (_, scheme, host, port, rest) = match lazy_regex::regex_captures!(r"^([^:]+)://([^:]+):([^/]+)(/.*)$", url) { Some(c) => c, None => return None, }; if port.ends_with(']') { return None; } if let Ok(port) = port.parse::() { Some(format!("{}://{}:{}{}", scheme, host, port, rest)) } else { Some(format!("{}://{}/{}{}", scheme, host, port, rest)) } } fn fix_gitlab_scheme(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if is_gitlab_site(host, None) { return Some(derive_with_scheme(&url, "https").to_string()); } } } None } fn fix_github_scheme(url: &str) -> Option { // GitHub no longer supports the git:// scheme if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("github.com") { return Some(derive_with_scheme(&url, "https").to_string()); } } None } fn fix_salsa_cgit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("salsa.debian.org") { if let Some(suffix) = url.path().strip_prefix("/cgit/") { let mut url = url.clone(); url.set_path(suffix); return Some(url.to_string()); } } } None } fn fix_gitlab_tree_in_url(location: &VcsLocation) -> Option { if is_gitlab_site(location.url.host_str()?, None) { let segments = location.url.path_segments().unwrap().collect::>(); if let Some(p) = segments.iter().position(|p| *p == "tree") { let branch = segments[(p + 1)..].join("/"); let path = segments[..p].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); return Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }); } } None } fn fix_double_slash(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with("//") { let path = url .path() .to_string() .strip_prefix("//") .unwrap() .to_string(); url.set_path(path.as_str()); return Some(url.to_string()); } } None } fn fix_extra_colon(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with(':') { let path = url .path() .to_string() .strip_prefix(':') .unwrap() .to_string(); url.set_path(&path); return Some(url.to_string()); } } None } fn drop_git_username(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if !["github.com", "salsa.debian.org", "gitlab.com"].contains(&host) { return None; } } else { return None; } if !["git", "http", "https"].contains(&url.scheme()) { return None; } if url.username() == "git" { url.set_username("").unwrap(); return Some(url.to_string()); } } None } fn fix_branch_argument(location: &VcsLocation) -> Option { if location.url.host_str() == Some("github.com") { // TODO(jelmer): Handle gitlab sites too? let path_elements = location.url.path_segments().unwrap().collect::>(); if path_elements.len() > 2 && path_elements[2] == "tree" { let branch = path_elements[3..].join("/"); let path = path_elements[..2].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }) } else { None } } else { None } } fn fix_git_gnome_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("git.gnome.org") { let mut path_segments = url.path_segments().unwrap().collect::>(); if path_segments.first().map(|p| *p == "browse") == Some(true) { path_segments.remove(0); } let mut url = derive_with_scheme(&url, "https"); url.set_host(Some("gitlab.gnome.org")).unwrap(); url.set_path(format!("GNOME/{}", path_segments.join("/")).as_str()); return Some(url.to_string()); } } None } fn fix_kde_anongit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.kde.org") { let url = derive_with_scheme(&url, "https"); return Some(url.to_string()); } } None } fn fix_freedesktop_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.freedesktop.org") { let suffix = url.path().strip_prefix("/git/"); let mut url = derive_with_scheme(&url, "https"); if let Some(suffix) = suffix { url.set_path(suffix); } url.set_host(Some("gitlab.freedesktop.org")).unwrap(); return Some(url.to_string()); } } None } const LOCATION_FIXERS: &[fn(&VcsLocation) -> Option] = &[fix_gitlab_tree_in_url, fix_branch_argument]; /// Attempt to fix up broken Git URLs. pub fn fixup_git_location(location: &VcsLocation) -> Cow<'_, VcsLocation> { let mut location = Cow::Borrowed(location); for cb in LOCATION_FIXERS { location = cb(&location).map_or(location, Cow::Owned); } location } const URL_FIXERS: &[fn(&str) -> Option] = &[ fix_path_in_port, fix_gitlab_scheme, fix_github_scheme, fix_salsa_cgit_url, fix_double_slash, fix_extra_colon, drop_git_username, fix_freedesktop_org_url, fix_kde_anongit_url, fix_git_gnome_org_url, ]; pub fn fixup_git_url(url: &str) -> String { let mut url = url.to_string(); for cb in URL_FIXERS { url = cb(&url).unwrap_or(url); } url } pub fn convert_cvs_list_to_str(urls: &[&str]) -> Option { if urls[0].starts_with(":extssh:") || urls[0].starts_with(":pserver:") { let url = breezyshim::location::cvs_to_url(urls[0]); Some(format!("{}#{}", url, urls[1])) } else { None } } pub const SANITIZERS: &[fn(&str) -> Option] = &[ |url| drop_vcs_in_scheme(&url.parse().ok()?), |url| Some(fixup_git_location(&VcsLocation::from(url)).url.clone()), fixup_rcp_style_git_repo_url, |url| find_public_repo_url(url.to_string().as_str(), None).and_then(|u| u.parse().ok()), |url| canonical_git_repo_url(&url.parse().ok()?, None), |url| find_secure_repo_url(url.parse().ok()?, None, Some(false)), ]; pub fn sanitize_url(url: &str) -> String { let mut url: Cow<'_, str> = Cow::Borrowed(url); for sanitizer in SANITIZERS { url = sanitizer(url.as_ref()).map_or(url, |f| Cow::Owned(f.to_string())); } url.into_owned() } #[cfg(test)] mod tests { use super::fixup_git_url; fn fixup_git_location(url: &str) -> String { super::fixup_git_location(&super::VcsLocation::from(url)).to_string() } #[test] fn test_plausible_url() { use super::plausible_url; assert!(!plausible_url("the")); assert!(!plausible_url("1")); assert!(plausible_url("git@foo:blah")); assert!(plausible_url("git+ssh://git@foo/blah")); assert!(plausible_url("https://foo/blah")); } #[test] fn test_is_gitlab_site() { use super::is_gitlab_site; assert!(is_gitlab_site("gitlab.com", Some(false))); assert!(is_gitlab_site("gitlab.example.com", Some(false))); assert!(is_gitlab_site("salsa.debian.org", Some(false))); assert!(!is_gitlab_site("github.com", Some(false))); assert!(!is_gitlab_site("foo.example.com", Some(false))); } #[test] pub fn test_canonicalize_github() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://github.com/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://github.com/jelmer/example".parse::().unwrap(), Some(false) ) ); } #[test] pub fn test_canonicalize_github_ssh() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example" .parse::() .unwrap(), Some(false) ) ); assert_eq!( None, canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap(), Some(false) ) ); } #[test] fn test_find_public_github() { use super::find_public_repo_url; assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("ssh://git@github.com/jelmer/example", Some(false)).unwrap() ); assert_eq!( Some("https://github.com/jelmer/example"), find_public_repo_url("https://github.com/jelmer/example", Some(false)).as_deref() ); assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("git@github.com:jelmer/example", Some(false)) .unwrap() .as_str() ); } #[test] fn test_find_public_salsa() { use super::find_public_repo_url; assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("ssh://salsa.debian.org/jelmer/example", Some(false)) .unwrap() .as_str() ); assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("https://salsa.debian.org/jelmer/example", Some(false)) .unwrap() .as_str() ); } #[test] fn test_fixup_rcp_style() { use super::fixup_rcp_style_git_repo_url; use url::Url; assert_eq!( Some( "ssh://git@github.com/jelmer/example" .parse::() .unwrap() ), fixup_rcp_style_git_repo_url("git@github.com:jelmer/example") ); assert_eq!( Some("ssh://github.com/jelmer/example".parse::().unwrap()), fixup_rcp_style_git_repo_url("github.com:jelmer/example") ); } #[test] fn test_fixup_rcp_leave() { use super::fixup_rcp_style_git_repo_url; assert_eq!( None, fixup_rcp_style_git_repo_url("https://salsa.debian.org/jelmer/example") ); assert_eq!( None, fixup_rcp_style_git_repo_url("ssh://git@salsa.debian.org/jelmer/example") ); } #[test] fn test_guess_repo_url_travis_ci_org() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://travis-ci.org/jelmer/dulwich".parse().unwrap(), Some(false) ) .as_deref(), ); } #[test] fn test_guess_repo_url_coveralls() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://coveralls.io/r/jelmer/dulwich".parse().unwrap(), Some(false) ) .as_deref(), ); } #[test] fn test_guess_repo_url_gitlab() { use super::guess_repo_from_url; assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich".parse().unwrap(), Some(false) ) .as_deref(), ); assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich/tags".parse().unwrap(), Some(false) ) .as_deref(), ); } #[test] fn test_fixup_git_location() { use super::{fixup_git_location, VcsLocation}; assert_eq!( VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, fixup_git_location(&VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }) .into_owned() ); } #[test] fn test_browse_url_from_repo() { use super::browse_url_from_repo_url; assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, Some(false) ), ); assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: None, }, Some(false) ) ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: Some("foo".to_string()), subpath: None, }, Some(false) ) ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/HEAD/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: Some("foo".to_string()), }, Some(false) ) ); } #[test] fn test_fix_github_scheme() { use super::fix_github_scheme; assert_eq!( Some("https://github.com/jelmer/example"), fix_github_scheme("git://github.com/jelmer/example").as_deref() ); } #[test] fn test_fix_git_gnome_org_url() { use super::fix_git_gnome_org_url; assert_eq!( Some("https://gitlab.gnome.org/GNOME/example".to_string()), fix_git_gnome_org_url("https://git.gnome.org/browse/example") ); } #[test] fn test_fixup() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com:jelmer/dulwich") ); assert_eq!( "https://github.com/jelmer/dulwich -b blah", fixup_git_location("https://github.com:jelmer/dulwich -b blah"), ); assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("git://github.com/jelmer/dulwich"), ); } #[test] fn test_preserves() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com/jelmer/dulwich"), ); } #[test] fn test_salsa_not_https() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("git://salsa.debian.org/jelmer/dulwich"), ); } #[test] fn test_salsa_uses_cgit() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("https://salsa.debian.org/cgit/jelmer/dulwich"), ); } #[test] fn test_salsa_tree_branch() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich -b master", fixup_git_location("https://salsa.debian.org/jelmer/dulwich/tree/master"), ); } #[test] fn test_strip_extra_slash() { assert_eq!( "https://salsa.debian.org/salve/auctex.git", fixup_git_url("https://salsa.debian.org//salve/auctex.git"), ); } #[test] fn test_strip_extra_colon() { assert_eq!( "https://salsa.debian.org/mckinstry/lcov.git", fixup_git_url("https://salsa.debian.org:/mckinstry/lcov.git"), ); } #[test] fn test_strip_username() { assert_eq!( "https://github.com/RPi-Distro/pgzero.git", fixup_git_url("git://git@github.com:RPi-Distro/pgzero.git"), ); assert_eq!( "https://salsa.debian.org/debian-astro-team/pyavm.git", fixup_git_url("https://git@salsa.debian.org:debian-astro-team/pyavm.git"), ); } #[test] fn test_github_tree_url() { assert_eq!( "https://github.com/blah/blah -b master", fixup_git_location("https://github.com/blah/blah/tree/master"), ); } #[test] fn test_freedesktop() { assert_eq!( "https://gitlab.freedesktop.org/xorg/xserver", fixup_git_url("git://anongit.freedesktop.org/xorg/xserver"), ); assert_eq!( "https://gitlab.freedesktop.org/xorg/lib/libSM", fixup_git_url("git://anongit.freedesktop.org/git/xorg/lib/libSM"), ); } #[test] fn test_anongit() { assert_eq!( "https://anongit.kde.org/kdev-php.git", fixup_git_url("git://anongit.kde.org/kdev-php.git"), ); } #[test] fn test_gnome() { assert_eq!( "https://gitlab.gnome.org/GNOME/alacarte", fixup_git_url("https://git.gnome.org/browse/alacarte"), ); } } upstream-ontologist-0.1.37/src/vcs_command.rs000066400000000000000000000141211462717511400212760ustar00rootroot00000000000000use crate::vcs; use crate::GuesserSettings; use log::warn; fn parse_command_bytes(command: &[u8]) -> Option> { if command.ends_with(&[b'\\']) { warn!( "Ignoring command with line break: {}", String::from_utf8_lossy(command) ); return None; } let command_str = match String::from_utf8(command.to_vec()) { Ok(s) => s, Err(_) => { warn!( "Ignoring command with non-UTF-8: {}", String::from_utf8_lossy(command) ); return None; } }; let args: Vec<_> = shlex::split(command_str.as_str())? .into_iter() .filter(|arg| !arg.trim().is_empty()) .collect(); if args.is_empty() { None } else { Some(args) } } pub fn url_from_vcs_command(command: &[u8]) -> Option { if let Some(url) = url_from_git_clone_command(command) { return Some(url); } if let Some(url) = url_from_fossil_clone_command(command) { return Some(url); } if let Some(url) = url_from_cvs_co_command(command) { return Some(url); } if let Some(url) = url_from_svn_co_command(command) { return Some(url); } None } pub fn url_from_git_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "git" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } // arguments that take a parameter if args[i] == "-b" || args[i] == "--depth" || args[i] == "--branch" { args.remove(i); args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.get(0).cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_git_clone_command() { assert_eq!( url_from_git_clone_command(b"git clone https://github.com/foo/bar foo"), Some("https://github.com/foo/bar".to_string()) ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone https://github.com/jelmer/blah"), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command( b"git clone https://github.com/jelmer/blah target" ), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command( b"git clone -b foo https://github.com/jelmer/blah target" ), ); assert_eq!( None, url_from_git_clone_command( b"git ls-tree")); } pub fn url_from_fossil_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "fossil" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.get(0).cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_fossil_clone_command() { assert_eq!( Some("https://example.com/repo/blah".to_string()), url_from_fossil_clone_command( b"fossil clone https://example.com/repo/blah blah.fossil" ), ); } pub fn url_from_cvs_co_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; let i = 0; let mut cvsroot = None; let mut module = None; let mut command_seen = false; if args.remove(0) != "cvs" { return None; } while i < args.len() { if args[i] == "-d" { args.remove(i); cvsroot = Some(args.remove(i)); continue; } if args[i].starts_with("-d") { cvsroot = Some(args.remove(i)[2..].to_string()); continue; } if command_seen && !args[i].starts_with('-') { module = Some(args[i].clone()); } else if args[i] == "co" || args[i] == "checkout" { command_seen = true; } args.remove(i); } if let Some(cvsroot) = cvsroot { let url = breezyshim::location::cvs_to_url(&cvsroot); if let Some(module) = module { return Some(url.join(module.as_str()).unwrap().to_string()); } return Some(url.to_string()); } None } pub fn url_from_svn_co_command(command: &[u8]) -> Option { let args = parse_command_bytes(command)?; if args[0] != "svn" || args[1] != "co" { return None; } let url_schemes = vec!["svn+ssh", "http", "https", "svn"]; args.into_iter().find(|arg| { url_schemes .iter() .any(|scheme| arg.starts_with(&format!("{}://", scheme))) }) } pub fn guess_from_get_orig_source( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let text = std::fs::read(path)?; let mut result = Vec::new(); for line in text.split(|b| *b == b'\n') { if let Some(url) = url_from_vcs_command(line) { let certainty = if url.contains('$') { crate::Certainty::Possible } else { crate::Certainty::Likely }; result.push(crate::UpstreamDatumWithMetadata { datum: crate::UpstreamDatum::Repository(url), certainty: Some(certainty), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.1.37/testdata/000077500000000000000000000000001462717511400174625ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cabal/000077500000000000000000000000001462717511400205245ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cabal/blah.cabal000066400000000000000000000010361462717511400224160ustar00rootroot00000000000000Name: TestPackage Version: 0.0 Cabal-Version: >= 1.2 License: BSD3 Author: Angela Author Synopsis: Small package with two programs Build-Type: Simple Bug-Reports: https://github.com/example/blah/issues Executable program1 Build-Depends: HUnit Main-Is: Main.hs Hs-Source-Dirs: prog1 Executable program2 Main-Is: Main.hs Build-Depends: HUnit Hs-Source-Dirs: prog2 Other-Modules: Utils source-repository head type: git location: https://github.com/example/blah upstream-ontologist-0.1.37/testdata/cabal/debian/000077500000000000000000000000001462717511400217465ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cabal/debian/control000066400000000000000000000004211462717511400233460ustar00rootroot00000000000000Source: haskell-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~) Package: libghc6-haskell-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.1.37/testdata/cabal/expected.yaml000066400000000000000000000006031462717511400232100ustar00rootroot00000000000000Author: - !Person name: Angela Author Bug-Database: https://github.com/example/blah/issues Bug-Submit: https://github.com/example/blah/issues/new Homepage: https://github.com/example/blah Name: TestPackage Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah Summary: Small package with two programs Description: blah blah License: BSD3 upstream-ontologist-0.1.37/testdata/cargo-invalid/000077500000000000000000000000001462717511400222015ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cargo-invalid/Cargo.toml000066400000000000000000000010411462717511400241250ustar00rootroot00000000000000[package] name = "something" version = "0.1.0" homepage = http://www.example.com/something authors = ["Somebody "] edition = "2018" default-run = "test" description = "Something something" license = "Apache-2.0" [dependencies] tokio = { version = "0.2", features = ["full"] } async-std = { version = "", features = ["unstable"] } rand = "" futures = "" mio = { version = "0.6.20" } rusqlite = { version = "0.23.0" } tokio-util = { version="", features= ["codec"] } bytes = "" [lib] name = "something" path = "src/lib.rs" upstream-ontologist-0.1.37/testdata/cargo-invalid/debian/000077500000000000000000000000001462717511400234235ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cargo-invalid/debian/control000066400000000000000000000000221462717511400250200ustar00rootroot00000000000000Source: something upstream-ontologist-0.1.37/testdata/cargo-invalid/expected.yaml000066400000000000000000000000321462717511400246610ustar00rootroot00000000000000{"Name": "cargo-invalid"} upstream-ontologist-0.1.37/testdata/cargo/000077500000000000000000000000001462717511400205555ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cargo/Cargo.toml000066400000000000000000000011241462717511400225030ustar00rootroot00000000000000[package] name = "something" version = "0.1.0" homepage = "http://www.example.com/something" authors = ["Somebody "] edition = "2018" default-run = "test" description = "Something something" license = "Apache-2.0" repository = "https://github.com/jelmer/example" [dependencies] tokio = { version = "0.2", features = ["full"] } async-std = { version = "", features = ["unstable"] } rand = "" futures = "" mio = { version = "0.6.20" } rusqlite = { version = "0.23.0" } tokio-util = { version="", features= ["codec"] } bytes = "" [lib] name = "something" path = "src/lib.rs" upstream-ontologist-0.1.37/testdata/cargo/debian/000077500000000000000000000000001462717511400217775ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/cargo/debian/control000066400000000000000000000000221462717511400233740ustar00rootroot00000000000000Source: something upstream-ontologist-0.1.37/testdata/cargo/expected.yaml000066400000000000000000000006551462717511400232500ustar00rootroot00000000000000Bug-Database: https://github.com/jelmer/example/issues Bug-Submit: https://github.com/jelmer/example/issues/new Homepage: http://www.example.com/something Name: something Repository: https://github.com/jelmer/example.git Repository-Browse: https://github.com/jelmer/example Author: - !Person name: Somebody email: somebody@example.com Cargo-Crate: something License: Apache-2.0 Summary: Something something Version: 0.1.0 upstream-ontologist-0.1.37/testdata/composer.json/000077500000000000000000000000001462717511400222615ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/composer.json/composer.json000066400000000000000000000034241462717511400250060ustar00rootroot00000000000000{ "name": "laravel/laravel", "type": "project", "description": "The Laravel Framework.", "keywords": ["framework", "laravel"], "license": "MIT", "require": { "php": "^8.1", "guzzlehttp/guzzle": "^7.2", "laravel/framework": "^10.0", "laravel/sanctum": "^3.2", "laravel/tinker": "^2.8" }, "require-dev": { "fakerphp/faker": "^1.9.1", "laravel/pint": "^1.0", "laravel/sail": "^1.18", "mockery/mockery": "^1.4.4", "nunomaduro/collision": "^7.0", "phpunit/phpunit": "^10.0", "spatie/laravel-ignition": "^2.0" }, "autoload": { "psr-4": { "App\\": "app/", "Database\\Factories\\": "database/factories/", "Database\\Seeders\\": "database/seeders/" } }, "autoload-dev": { "psr-4": { "Tests\\": "tests/" } }, "scripts": { "post-autoload-dump": [ "Illuminate\\Foundation\\ComposerScripts::postAutoloadDump", "@php artisan package:discover --ansi" ], "post-update-cmd": [ "@php artisan vendor:publish --tag=laravel-assets --ansi --force" ], "post-root-package-install": [ "@php -r \"file_exists('.env') || copy('.env.example', '.env');\"" ], "post-create-project-cmd": [ "@php artisan key:generate --ansi" ] }, "extra": { "laravel": { "dont-discover": [] } }, "config": { "optimize-autoloader": true, "preferred-install": "dist", "sort-packages": true, "allow-plugins": { "pestphp/pest-plugin": true } }, "minimum-stability": "stable", "prefer-stable": true } upstream-ontologist-0.1.37/testdata/composer.json/expected.yaml000066400000000000000000000001451462717511400247460ustar00rootroot00000000000000Name: laravel/laravel Keywords: ['framework', 'laravel'] Summary: The Laravel Framework License: MIT upstream-ontologist-0.1.37/testdata/configure/000077500000000000000000000000001462717511400214435ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/configure/configure000077500000000000000000000006451462717511400233570ustar00rootroot00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.1.37/testdata/configure/expected.yaml000066400000000000000000000002121462717511400241230ustar00rootroot00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.1.37/testdata/copyright-meta/000077500000000000000000000000001462717511400224165ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/copyright-meta/debian/000077500000000000000000000000001462717511400236405ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/copyright-meta/debian/copyright000066400000000000000000000023051462717511400255730ustar00rootroot00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://www.some-homepage/ X-Source: https://www.another-homepage/ X-Source-Downloaded-From: https://github.com/example/example/releases X-Upstream-Bugs: https://github.com/example/example/issues Files-Excluded: doc/manual.pdf Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.1.37/testdata/copyright-meta/expected.yaml000066400000000000000000000007021462717511400251020ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Contact": "An upstream contact ", "Download": "https://github.com/example/example/releases", "Homepage": "https://github.com/example/example", "Name": "Blah", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example", "License": "GPL-2+"} upstream-ontologist-0.1.37/testdata/copyright-more-on-line/000077500000000000000000000000001462717511400237715ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/copyright-more-on-line/debian/000077500000000000000000000000001462717511400252135ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/copyright-more-on-line/debian/copyright000066400000000000000000000020271462717511400271470ustar00rootroot00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://github.com/example/blah, modified to do blah. Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.1.37/testdata/copyright-more-on-line/expected.yaml000066400000000000000000000005671462717511400264660ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Contact": "An upstream contact ", "Homepage": "https://github.com/example/blah", "Name": "Blah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "License": "GPL-2+"} upstream-ontologist-0.1.37/testdata/doap/000077500000000000000000000000001462717511400204055ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/doap/blah.doap000066400000000000000000000070231462717511400221620ustar00rootroot00000000000000 blah blah 2006-06-11 blah is a swiss army knife for project maintainers and developers. Lorem ipsum. python Joe Maintainer 0.2.4 trunk Pacito 2007-05-20 Added RSS 2.0 feed generation from .doap release entries using Genshi or Cheetah templates. Added support for CHANGE_LOG_EMAIL_ADDRESS environment variable to blah changelog prepare. Added parsing of wiki attribute of a .DOAP project. Implemented "blah doap search" to search Google or Yahoo for your project's home page. Added support for multiple doap files to "blah doap" Added code to check the user's distribution and offer hints on how to install dependencies. 0.2.3 trunk Ketnet 2007-04-17 0.2.2 trunk Airlines 2007-03-25 0.2.1 trunk Ambulance 2007-02-04 0.2.0 trunk Waffle Flop 2006-12-17 upstream-ontologist-0.1.37/testdata/doap/expected.yaml000066400000000000000000000007241462717511400230750ustar00rootroot00000000000000Bug-Database: http://example.com/blah/trac/newticket Contact: Joe Maintainer Homepage: http://example.com/blah/trac/ Name: blah Repository: http://example.com/blah/svn/trunk/ Repository-Browse: http://example.com/blah/trac/browser/ Description: "\n Lorem ipsum.\n " Download: http://example.com/projects/blah/ Maintainer: !Person name: Joe Maintainer Summary: "\nblah is a swiss army knife for project maintainers and developers" Wiki: http://example.com/blah/trac/ upstream-ontologist-0.1.37/testdata/gemspec/000077500000000000000000000000001462717511400211055ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/gemspec/expected.yaml000066400000000000000000000006751462717511400236020ustar00rootroot00000000000000Name: jekyll License: MIT Summary: A simple, blog aware, static site generator Description: Jekyll is a simple, blog aware, static site generator. Author: - !Person name: Tom Preston-Werner Homepage: http://github.com/jekyll/jekyll Repository: https://github.com/jekyll/jekyll.git Repository-Browse: https://github.com/jekyll/jekyll Bug-Database: https://github.com/jekyll/jekyll/issues Bug-Submit: https://github.com/jekyll/jekyll/issues/new upstream-ontologist-0.1.37/testdata/gemspec/jekyll.gemspec000066400000000000000000000051701462717511400237470ustar00rootroot00000000000000# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'jekyll/version' Gem::Specification.new do |s| s.specification_version = 2 if s.respond_to? :specification_version= s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.rubygems_version = '2.2.2' s.required_ruby_version = '>= 1.9.3' s.name = 'jekyll' s.version = Jekyll::VERSION s.license = 'MIT' s.summary = "A simple, blog aware, static site generator." s.description = "Jekyll is a simple, blog aware, static site generator." s.authors = ["Tom Preston-Werner"] s.email = 'tom@mojombo.com' s.homepage = 'http://github.com/jekyll/jekyll' s.files = `git ls-files`.split($/) s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) } s.test_files = s.files.grep(%r{^(test|spec|features)/}) s.require_paths = ["lib"] s.rdoc_options = ["--charset=UTF-8"] s.extra_rdoc_files = %w[README.markdown LICENSE] s.add_runtime_dependency('liquid', "~> 2.5.5") s.add_runtime_dependency('classifier', "~> 1.3") s.add_runtime_dependency('listen', "~> 2.5") s.add_runtime_dependency('kramdown', "~> 1.3") s.add_runtime_dependency('pygments.rb', "~> 0.5.0") s.add_runtime_dependency('mercenary', "~> 0.3.1") s.add_runtime_dependency('safe_yaml', "~> 1.0") s.add_runtime_dependency('colorator', "~> 0.1") s.add_runtime_dependency('redcarpet', "~> 3.1") s.add_runtime_dependency('toml', '~> 0.1.0') s.add_runtime_dependency('jekyll-coffeescript', '~> 1.0') s.add_runtime_dependency('jekyll-sass-converter', '~> 1.0') s.add_development_dependency('rake', "~> 10.1") s.add_development_dependency('rdoc', "~> 3.11") s.add_development_dependency('redgreen', "~> 1.2") s.add_development_dependency('shoulda', "~> 3.5") s.add_development_dependency('rr', "~> 1.1") s.add_development_dependency('cucumber', "1.3.11") s.add_development_dependency('RedCloth', "~> 4.2") s.add_development_dependency('maruku', "0.7.0") s.add_development_dependency('rdiscount', "~> 1.6") s.add_development_dependency('launchy', "~> 2.3") s.add_development_dependency('simplecov', "~> 0.7") s.add_development_dependency('simplecov-gem-adapter', "~> 1.0.1") s.add_development_dependency('coveralls', "~> 0.7.0") s.add_development_dependency('mime-types', "~> 1.5") s.add_development_dependency('activesupport', '~> 3.2.13') s.add_development_dependency('jekyll_test_plugin') s.add_development_dependency('jekyll_test_plugin_malicious') s.add_development_dependency('rouge', '~> 1.3') end upstream-ontologist-0.1.37/testdata/get-orig-source/000077500000000000000000000000001462717511400224755ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/get-orig-source/debian/000077500000000000000000000000001462717511400237175ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/get-orig-source/debian/get-orig-source.sh000066400000000000000000000011471462717511400272710ustar00rootroot00000000000000#!/bin/sh set -eux repack_version="$1" version="${repack_version%+repack*}" tag="v$(echo "$version" | tr '~' '.')" tmpdir=$(mktemp -d -t exampl.get-orig-source.XXXXXX) orig_dir="exampl-${version}+repack.orig" git clone -b "$tag" --depth 1 https://example.com/scm/project.git "$tmpdir/${orig_dir}" rm -rf "$tmpdir"/*.orig/src/tls/ # free, but appears to be an unused code example from gnutls export TAR_OPTIONS='--owner root --group root --mode a+rX --format ustar' tar -cJ --wildcards --exclude '.git*' -C "$tmpdir/" "${orig_dir}" \ > "../exampl_${version}+repack.orig.tar.xz" rm -rf "$tmpdir" # vim:ts=4 sw=4 et upstream-ontologist-0.1.37/testdata/get-orig-source/expected.yaml000066400000000000000000000001211462717511400251540ustar00rootroot00000000000000{"Name": "get-orig-source", "Repository": "https://example.com/scm/project.git"} upstream-ontologist-0.1.37/testdata/go/000077500000000000000000000000001462717511400200675ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/go/debian/000077500000000000000000000000001462717511400213115ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/go/debian/control000066400000000000000000000006321462717511400227150ustar00rootroot00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any XS-Go-Import-Path: github.com/blah/blah Testsuite: autopkgtest-pkg-go Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.1.37/testdata/go/expected.yaml000066400000000000000000000005721462717511400225600ustar00rootroot00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "blah", "Go-Import-Path": "github.com/blah/blah", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.1.37/testdata/homepage/000077500000000000000000000000001462717511400212475ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/homepage/debian/000077500000000000000000000000001462717511400224715ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/homepage/debian/control000066400000000000000000000005771462717511400241050ustar00rootroot00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Homepage: https://github.com/j-keck/arping Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.1.37/testdata/homepage/expected.yaml000066400000000000000000000005451462717511400237400ustar00rootroot00000000000000{"Bug-Database": "https://github.com/j-keck/arping/issues", "Bug-Submit": "https://github.com/j-keck/arping/issues/new", "Homepage": "https://github.com/j-keck/arping", "Name": "arping", "Repository": "https://github.com/j-keck/arping.git", "Repository-Browse": "https://github.com/j-keck/arping", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.1.37/testdata/js/000077500000000000000000000000001462717511400200765ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/js/expected.yaml000066400000000000000000000011311462717511400225570ustar00rootroot00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.1.37/testdata/js/package.json000066400000000000000000000017061462717511400223700ustar00rootroot00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.1.37/testdata/js2/000077500000000000000000000000001462717511400201605ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/js2/expected.yaml000066400000000000000000000011311462717511400226410ustar00rootroot00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.1.37/testdata/js2/package.json000066400000000000000000000016421462717511400224510ustar00rootroot00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "http://github.com/jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.1.37/testdata/js3/000077500000000000000000000000001462717511400201615ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/js3/expected.yaml000066400000000000000000000011311462717511400226420ustar00rootroot00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.1.37/testdata/js3/package.json000066400000000000000000000016201462717511400224460ustar00rootroot00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.1.37/testdata/line-interrupted/000077500000000000000000000000001462717511400227545ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/line-interrupted/DESCRIPTION000066400000000000000000000010261462717511400244610ustar00rootroot00000000000000Package: tsne Type: Package Title: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 Date: 2016-06-04 Author: Justin Donaldson Maintainer: Justin Donaldson Description: A "pure R" implementation of the t-SNE algorithm. License: GPL LazyLoad: yes NeedsCompilation: no URL: https://github.com/jdonaldson/rtsne/ BugReports: https://github.com/jdonaldson/rtsne/issues Packaged: 2016-07-15 15:40:42 UTC; jdonaldson Repository: CRAN Date/Publication: 2016-07-15 20:02:16 upstream-ontologist-0.1.37/testdata/line-interrupted/debian/000077500000000000000000000000001462717511400241765ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/line-interrupted/debian/upstream/000077500000000000000000000000001462717511400260365ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/line-interrupted/debian/upstream/metadata000066400000000000000000000000651462717511400275420ustar00rootroot00000000000000Registry: - Name: conda:conda-forge Entry: r-tsneupstream-ontologist-0.1.37/testdata/line-interrupted/expected.yaml000066400000000000000000000011031462717511400254340ustar00rootroot00000000000000Archive: CRAN Bug-Database: https://github.com/jdonaldson/rtsne/issues Bug-Submit: https://github.com/jdonaldson/rtsne/issues/new Contact: Justin Donaldson Homepage: https://github.com/jdonaldson/rtsne/ Name: tsne Repository: https://github.com/jdonaldson/rtsne.git Repository-Browse: https://github.com/jdonaldson/rtsne Description: A "pure R" implementation of the t-SNE algorithm. License: GPL Maintainer: !Person name: Justin Donaldson email: jdonaldson@gmail.com Summary: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 upstream-ontologist-0.1.37/testdata/machine-copyright/000077500000000000000000000000001462717511400230745ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/machine-copyright/debian/000077500000000000000000000000001462717511400243165ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/machine-copyright/debian/copyright000066400000000000000000000003021462717511400262440ustar00rootroot00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: blah Upstream-Contact: Jelmer Vernooij Source: https://salsa.debian.org/jelmer/blah upstream-ontologist-0.1.37/testdata/machine-copyright/expected.yaml000066400000000000000000000012061462717511400255600ustar00rootroot00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Contact: Jelmer Vernooij Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.1.37/testdata/machine-copyright/package.json000066400000000000000000000017061462717511400253660ustar00rootroot00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.1.37/testdata/meta.json/000077500000000000000000000000001462717511400213605ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/meta.json/META.json000066400000000000000000000301451462717511400230040ustar00rootroot00000000000000{ "abstract" : "parse and validate simple name/value option pairs", "author" : [ "Somebody " ], "dynamic_config" : 0, "license" : [ "perl_5" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : 2 }, "name" : "Some-Blah", "prereqs" : { "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0" }, "suggests" : { "JSON::PP" : "2.27300" } }, "develop" : { "requires" : { "Test::More" : "0", "Test::Pod" : "1.41" } }, "runtime" : { "requires" : { "List::Util" : "0", "Params::Util" : "0", "Sub::Install" : "0.921", "strict" : "0", "warnings" : "0" } }, "test" : { "recommends" : { "CPAN::Meta" : "2.120900" }, "requires" : { "ExtUtils::MakeMaker" : "0", "File::Spec" : "0", "Test::More" : "0.96" } } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/blah/Blie/issues" }, "homepage" : "https://github.com/blah/Blie", "repository" : { "type" : "git", "url" : "https://github.com/blah/Blie.git", "web" : "https://github.com/blah/Blie" } }, "version" : "0.110", "x_Dist_Zilla" : { "perl" : { "version" : "5.023008" }, "plugins" : [ { "class" : "Dist::Zilla::Plugin::Git::GatherDir", "config" : { "Dist::Zilla::Plugin::GatherDir" : { "exclude_filename" : [], "exclude_match" : [], "follow_symlinks" : 0, "include_dotfiles" : 0, "prefix" : "", "prune_directory" : [], "root" : "." }, "Dist::Zilla::Plugin::Git::GatherDir" : { "include_untracked" : 0 } }, "name" : "@RJBS/Git::GatherDir", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::CheckPrereqsIndexed", "name" : "@RJBS/CheckPrereqsIndexed", "version" : "0.017" }, { "class" : "Dist::Zilla::Plugin::CheckExtraTests", "name" : "@RJBS/CheckExtraTests", "version" : "0.028" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 0, "check_all_prereqs" : 0, "modules" : [ "Dist::Zilla::PluginBundle::RJBS" ], "phase" : "build", "skip" : [] } }, "name" : "@RJBS/RJBS-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 1, "check_all_prereqs" : 0, "modules" : [], "phase" : "release", "skip" : [] } }, "name" : "@RJBS/CPAN-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PruneCruft", "name" : "@RJBS/@Filter/PruneCruft", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ManifestSkip", "name" : "@RJBS/@Filter/ManifestSkip", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaYAML", "name" : "@RJBS/@Filter/MetaYAML", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::License", "name" : "@RJBS/@Filter/License", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Readme", "name" : "@RJBS/@Filter/Readme", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ExecDir", "name" : "@RJBS/@Filter/ExecDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ShareDir", "name" : "@RJBS/@Filter/ShareDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Manifest", "name" : "@RJBS/@Filter/Manifest", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::TestRelease", "name" : "@RJBS/@Filter/TestRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ConfirmRelease", "name" : "@RJBS/@Filter/ConfirmRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::UploadToCPAN", "name" : "@RJBS/@Filter/UploadToCPAN", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MakeMaker", "config" : { "Dist::Zilla::Role::TestRunner" : { "default_jobs" : 9 } }, "name" : "@RJBS/MakeMaker", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::AutoPrereqs", "name" : "@RJBS/AutoPrereqs", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Git::NextVersion", "config" : { "Dist::Zilla::Plugin::Git::NextVersion" : { "first_version" : "0.001", "version_by_branch" : 0, "version_regexp" : "(?^:^([0-9]+\\.[0-9]+)$)" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/Git::NextVersion", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::PkgVersion", "name" : "@RJBS/PkgVersion", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaConfig", "name" : "@RJBS/MetaConfig", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaJSON", "name" : "@RJBS/MetaJSON", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::NextRelease", "name" : "@RJBS/NextRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ChangesHasContent", "name" : "@RJBS/Test::ChangesHasContent", "version" : "0.008" }, { "class" : "Dist::Zilla::Plugin::PodSyntaxTests", "name" : "@RJBS/PodSyntaxTests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ReportPrereqs", "name" : "@RJBS/Test::ReportPrereqs", "version" : "0.024" }, { "class" : "Dist::Zilla::Plugin::Prereqs", "config" : { "Dist::Zilla::Plugin::Prereqs" : { "phase" : "test", "type" : "requires" } }, "name" : "@RJBS/TestMoreWithSubtests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::PodWeaver", "config" : { "Dist::Zilla::Plugin::PodWeaver" : { "config_plugins" : [ "@RJBS" ], "finder" : [ ":InstallModules", ":ExecFiles" ], "plugins" : [ { "class" : "Pod::Weaver::Plugin::EnsurePod5", "name" : "@CorePrep/EnsurePod5", "version" : "4.012" } ] } }, "name" : "@RJBS/PodWeaver", "version" : "4.006" }, { "class" : "Dist::Zilla::Plugin::GithubMeta", "name" : "@RJBS/GithubMeta", "version" : "0.54" }, { "class" : "Dist::Zilla::Plugin::Git::Check", "config" : { "Dist::Zilla::Plugin::Git::Check" : { "untracked_files" : "die" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Check", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Commit", "config" : { "Dist::Zilla::Plugin::Git::Commit" : { "add_files_in" : [], "commit_msg" : "v%v%n%n%c" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Commit", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Tag", "config" : { "Dist::Zilla::Plugin::Git::Tag" : { "branch" : null, "changelog" : "Changes", "signed" : 0, "tag" : "0.110", "tag_format" : "%v", "tag_message" : "v%v" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Tag", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Push", "config" : { "Dist::Zilla::Plugin::Git::Push" : { "push_to" : [ "origin :", "github :" ], "remotes_must_exist" : 0 }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Push", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Contributors", "config" : { "Dist::Zilla::Plugin::Git::Contributors" : { "include_authors" : 0, "include_releaser" : 1, "order_by" : "name", "paths" : [ "." ] } }, "name" : "@RJBS/Git::Contributors", "version" : "0.020" }, { "class" : "Dist::Zilla::Plugin::FinderCode", "name" : ":NoFiles", "version" : "5.043" } ], "zilla" : { "class" : "Dist::Zilla::Dist::Builder", "config" : { "is_trial" : "0" }, "version" : "5.043" } }, "x_contributors" : [ "Contributor 1 ", "Contributor 2 " ] } upstream-ontologist-0.1.37/testdata/meta.json/expected.yaml000066400000000000000000000005641462717511400240520ustar00rootroot00000000000000{"Bug-Database": "https://github.com/blah/Blie/issues", "Bug-Submit": "https://github.com/blah/Blie/issues/new", "Homepage": "https://github.com/blah/Blie", "Name": "Some-Blah", "Repository": "https://github.com/blah/Blie.git", "Repository-Browse": "https://github.com/blah/Blie", "Summary": "parse and validate simple name/value option pairs", "Version": "0.110"} upstream-ontologist-0.1.37/testdata/meta.yml/000077500000000000000000000000001462717511400212105ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/meta.yml/META.yml000066400000000000000000000122001462717511400224540ustar00rootroot00000000000000--- abstract: 'do things' author: - 'Somebody ' build_requires: ExtUtils::MakeMaker: '0' File::Spec: '0' Test::More: '0.96' configure_requires: ExtUtils::MakeMaker: '0' dynamic_config: 0 generated_by: 'Dist::Zilla version 5.043, CPAN::Meta::Converter version 2.150005' license: perl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: Blah-Blieh requires: List::Util: '0' Params::Util: '0' Sub::Install: '0.921' strict: '0' warnings: '0' resources: bugtracker: https://github.com/example/Blah/issues homepage: https://github.com/example/Blah repository: https://github.com/example/Blah.git version: '0.110' x_Dist_Zilla: perl: version: '5.023008' plugins: - class: Dist::Zilla::Plugin::Git::GatherDir config: Dist::Zilla::Plugin::GatherDir: exclude_filename: [] exclude_match: [] follow_symlinks: 0 include_dotfiles: 0 prefix: '' prune_directory: [] root: . Dist::Zilla::Plugin::Git::GatherDir: include_untracked: 0 name: '@RJBS/Git::GatherDir' version: '2.036' - class: Dist::Zilla::Plugin::PromptIfStale config: Dist::Zilla::Plugin::PromptIfStale: check_all_plugins: 1 check_all_prereqs: 0 modules: [] phase: release skip: [] name: '@RJBS/CPAN-Outdated' version: '0.047' - class: Dist::Zilla::Plugin::MakeMaker config: Dist::Zilla::Role::TestRunner: default_jobs: 9 name: '@RJBS/MakeMaker' version: '5.043' - class: Dist::Zilla::Plugin::AutoPrereqs name: '@RJBS/AutoPrereqs' version: '5.043' - class: Dist::Zilla::Plugin::Prereqs config: Dist::Zilla::Plugin::Prereqs: phase: test type: requires name: '@RJBS/TestMoreWithSubtests' version: '5.043' - class: Dist::Zilla::Plugin::PodWeaver config: Dist::Zilla::Plugin::PodWeaver: config_plugins: - '@RJBS' finder: - ':InstallModules' - ':ExecFiles' plugins: - class: Pod::Weaver::Section::Legal name: '@RJBS/Legal' version: '4.012' - class: Pod::Weaver::Plugin::Transformer name: '@RJBS/List' version: '4.012' name: '@RJBS/PodWeaver' version: '4.006' - class: Dist::Zilla::Plugin::GithubMeta name: '@RJBS/GithubMeta' version: '0.54' - class: Dist::Zilla::Plugin::Git::Check config: Dist::Zilla::Plugin::Git::Check: untracked_files: die Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Check' version: '2.036' - class: Dist::Zilla::Plugin::Git::Commit config: Dist::Zilla::Plugin::Git::Commit: add_files_in: [] commit_msg: v%v%n%n%c Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Commit' version: '2.036' - class: Dist::Zilla::Plugin::Git::Tag config: Dist::Zilla::Plugin::Git::Tag: branch: ~ changelog: Changes signed: 0 tag: '0.110' tag_format: '%v' tag_message: v%v Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Tag' version: '2.036' - class: Dist::Zilla::Plugin::Git::Push config: Dist::Zilla::Plugin::Git::Push: push_to: - 'origin :' - 'github :' remotes_must_exist: 0 Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Push' version: '2.036' - class: Dist::Zilla::Plugin::Git::Contributors config: Dist::Zilla::Plugin::Git::Contributors: include_authors: 0 include_releaser: 1 order_by: name paths: - . name: '@RJBS/Git::Contributors' version: '0.020' - class: Dist::Zilla::Plugin::FinderCode name: ':ShareFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':MainModule' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':AllFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':NoFiles' version: '5.043' zilla: class: Dist::Zilla::Dist::Builder config: is_trial: '0' version: '5.043' x_contributors: - 'Contributor 1 ' - 'Contributor 2 ' upstream-ontologist-0.1.37/testdata/meta.yml/expected.yaml000066400000000000000000000005271462717511400237010ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/Blah/issues", "Bug-Submit": "https://github.com/example/Blah/issues/new", "Homepage": "https://github.com/example/Blah", "Name": "Blah-Blieh", "Repository": "https://github.com/example/Blah.git", "Repository-Browse": "https://github.com/example/Blah", "License": "perl", "Version": "0.110"} upstream-ontologist-0.1.37/testdata/metadata.json/000077500000000000000000000000001462717511400222125ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/metadata.json/expected.yaml000066400000000000000000000006521462717511400247020ustar00rootroot00000000000000Name: puppet-nginx Version: 4.3.1-rc0 Author: - !Person name: Vox Pupuli Summary: Puppet NGINX management module License: MIT Repository: https://github.com/voxpupuli/puppet-nginx.git Homepage: http://github.com/voxpupuli/puppet-nginx Bug-Database: https://github.com/voxpupuli/puppet-nginx/issues Repository-Browse: https://github.com/voxpupuli/puppet-nginx Bug-Submit: https://github.com/voxpupuli/puppet-nginx/issues/new upstream-ontologist-0.1.37/testdata/metadata.json/metadata.json000066400000000000000000000032311462717511400246640ustar00rootroot00000000000000{ "name": "puppet-nginx", "version": "4.3.1-rc0", "author": "Vox Pupuli", "summary": "Puppet NGINX management module", "license": "MIT", "source": "https://github.com/voxpupuli/puppet-nginx.git", "project_page": "http://github.com/voxpupuli/puppet-nginx", "issues_url": "https://github.com/voxpupuli/puppet-nginx/issues", "dependencies": [ { "name": "puppetlabs/concat", "version_requirement": ">= 4.1.0 < 8.0.0" }, { "name": "puppetlabs/stdlib", "version_requirement": ">= 5.0.0 < 9.0.0" } ], "requirements": [ { "name": "puppet", "version_requirement": ">= 6.1.0 < 8.0.0" } ], "operatingsystem_support": [ { "operatingsystem": "Debian", "operatingsystemrelease": [ "10", "11" ] }, { "operatingsystem": "OpenBSD" }, { "operatingsystem": "RedHat", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "CentOS", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "VirtuozzoLinux", "operatingsystemrelease": [ "6", "7" ] }, { "operatingsystem": "SLES" }, { "operatingsystem": "Solaris" }, { "operatingsystem": "AIX" }, { "operatingsystem": "FreeBSD" }, { "operatingsystem": "DragonFly" }, { "operatingsystem": "NetBSD" }, { "operatingsystem": "Archlinux" }, { "operatingsystem": "Ubuntu", "operatingsystemrelease": [ "18.04", "20.04", "22.04" ] } ] } upstream-ontologist-0.1.37/testdata/native/000077500000000000000000000000001462717511400207505ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/native/configure000077500000000000000000000006451462717511400226640ustar00rootroot00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.1.37/testdata/native/expected.yaml000066400000000000000000000002121462717511400234300ustar00rootroot00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.1.37/testdata/override/000077500000000000000000000000001462717511400213015ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/override/debian/000077500000000000000000000000001462717511400225235ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/override/debian/source/000077500000000000000000000000001462717511400240235ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/override/debian/source/lintian-overrides000066400000000000000000000000571462717511400274060ustar00rootroot00000000000000blah source: upstream-metadata-file-is-missing upstream-ontologist-0.1.37/testdata/override/expected.yaml000066400000000000000000000011311462717511400237620ustar00rootroot00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.1.37/testdata/override/package.json000066400000000000000000000017061462717511400235730ustar00rootroot00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.1.37/testdata/package.json/000077500000000000000000000000001462717511400220255ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/package.json/expected.yaml000066400000000000000000000000441462717511400245100ustar00rootroot00000000000000Name: react-fixtures Version: 0.1.0 upstream-ontologist-0.1.37/testdata/package.json/package.json000066400000000000000000000026441462717511400243210ustar00rootroot00000000000000{ "name": "react-fixtures", "version": "0.1.0", "private": true, "devDependencies": { "react-scripts": "^1.0.11" }, "dependencies": { "@babel/standalone": "^7.0.0", "art": "^0.10.3", "classnames": "^2.2.5", "codemirror": "^5.40.0", "core-js": "^2.4.1", "jest-diff": "^29.4.1", "prop-types": "^15.6.0", "query-string": "^4.2.3", "react": "^15.4.1", "react-dom": "^15.4.1", "semver": "^5.5.0" }, "scripts": { "start": "react-scripts start", "prestart": "cp ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.development.js ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.production.min.js ../../build/oss-stable/react/umd/react.development.js ../../build/oss-stable/react-dom/umd/react-dom.development.js ../../build/oss-stable/react/umd/react.production.min.js ../../build/oss-stable/react-dom/umd/react-dom.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.development.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.development.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.production.min.js public/ && cp -a ../../build/oss-stable/. node_modules", "build": "react-scripts build && cp build/index.html build/200.html", "test": "react-scripts test --env=jsdom", "eject": "react-scripts eject" } } upstream-ontologist-0.1.37/testdata/package.json2/000077500000000000000000000000001462717511400221075ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/package.json2/expected.yaml000066400000000000000000000007141462717511400245760ustar00rootroot00000000000000Name: autosize Version: 4.0.2 Demo: http://www.jacklmoore.com/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Homepage: http://www.jacklmoore.com/autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new upstream-ontologist-0.1.37/testdata/package.json2/package.json000066400000000000000000000006361462717511400244020ustar00rootroot00000000000000{ "name": "autosize", "version": "4.0.2", "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" } } upstream-ontologist-0.1.37/testdata/package.xml/000077500000000000000000000000001462717511400216545ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/package.xml/expected.yaml000066400000000000000000000007131462717511400243420ustar00rootroot00000000000000Author: - !Person name: Author 1 email: author1@example.com - !Person name: Author 2 email: author2@example.com Bug-Database: https://github.com/example/tracker/issues Bug-Submit: https://github.com/example/tracker/issues/new Homepage: https://github.com/example/repo Name: blah Repository: https://github.com/example/repo.git Repository-Browse: https://github.com/example/repo Description: "\n This package does something\n " License: BSD upstream-ontologist-0.1.37/testdata/package.xml/package.xml000066400000000000000000000014241462717511400237720ustar00rootroot00000000000000 blah 1.12.4 This package does something Author 1 Author 2 Maintainer 1 Other maintainer BSD http://website.example.com/ https://github.com/example/repo https://github.com/example/tracker/issues catkin curl boost python-rospkg upstream-ontologist-0.1.37/testdata/package.xml2/000077500000000000000000000000001462717511400217365ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/package.xml2/expected.yaml000066400000000000000000000012101462717511400244150ustar00rootroot00000000000000Name: phalcon Summary: Phalcon is a full stack PHP framework offering low resource consumption and high performance Description: |2- Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Version: 5.1.4 License: BSD 3-Clause License Maintainer: !Person name: Anton Vasiliev email: anton@phalcon.io Contact: Anton Vasiliev upstream-ontologist-0.1.37/testdata/package.xml2/package.xml000066400000000000000000000050421462717511400240540ustar00rootroot00000000000000 phalcon pecl.php.net Phalcon is a full stack PHP framework offering low resource consumption and high performance. Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Anton Vasiliev jeckerson anton@phalcon.io yes Nikolaos Dimopoulos niden nikos@phalcon.io yes 2023-01-10 5.1.4 5.1.4 stable stable BSD 3-Clause License Full changelog can be found at: https://github.com/phalcon/cphalcon/blob/master/CHANGELOG-5.0.md ### Fixed - Fixed `Phalcon\Acl\Adapter\Memory::isAllowed` to not use the deprecated `ReflectionType::getClass` [#16255](https://github.com/phalcon/cphalcon/issues/16255) 7.4.1 8.1.99 1.10 phalcon upstream-ontologist-0.1.37/testdata/package.yaml/000077500000000000000000000000001462717511400220165ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/package.yaml/expected.yaml000066400000000000000000000010301462717511400244750ustar00rootroot00000000000000Name: css-text Version: 0.1.2.2 Summary: CSS parser and renderer License: MIT Homepage: https://github.com/yesodweb/css-text Bug-Submit: https://github.com/yesodweb/css-text/issues/new Bug-Database: https://github.com/yesodweb/css-text/issues Repository: https://github.com/yesodweb/css-text.git Repository-Browse: https://github.com/yesodweb/css-text Contact: Greg Weber Maintainer: !Person name: Greg Weber email: greg@gregweber.info Author: - !Person name: Michael Snoyman email: michael@snoyman.com upstream-ontologist-0.1.37/testdata/package.yaml/package.yaml000066400000000000000000000014441462717511400243000ustar00rootroot00000000000000name: css-text version: 0.1.2.2 synopsis: CSS parser and renderer. description: Please see the README and generated docs at category: Web, Yesod author: Michael Snoyman maintainer: Michael Snoyman , Greg Weber license: MIT github: yesodweb/css-text.git stability: Stable extra-source-files: - README.md - ChangeLog.md dependencies: - base >=4 && <5 - text >=0.11 - attoparsec >=0.10.2.0 library: source-dirs: src ghc-options: -Wall when: - condition: ! '!(impl(ghc >=8.0))' dependencies: - semigroups >=0.16.1 tests: runtests: main: runtests.hs source-dirs: test dependencies: - hspec >=1.3 - QuickCheck - css-text upstream-ontologist-0.1.37/testdata/perl-parsingerror/000077500000000000000000000000001462717511400231375ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/perl-parsingerror/dist.ini000066400000000000000000000002611462717511400246020ustar00rootroot00000000000000name = IO-Blah-Blah author = Somebody license = Perl_5 [@Author::ETHER] :version = 0.097 [Prereqs] perl = 5.008 upstream-ontologist-0.1.37/testdata/perl-parsingerror/expected.yaml000066400000000000000000000000361462717511400256230ustar00rootroot00000000000000{"Name": "perl-parsingerror"} upstream-ontologist-0.1.37/testdata/perl/000077500000000000000000000000001462717511400204245ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/perl/dist.ini000066400000000000000000000014361462717511400220740ustar00rootroot00000000000000name = PerlIO-eol author = Shlomi Fish license = Perl_5 copyright_holder = Audrey Tang copyright_year = 2004 [@Filter] -bundle = @Basic -remove = MakeMaker -remove = ExtraTests -remove = License -remove = Readme [AutoPrereqs] [MakeMaker::Awesome] WriteMakefile_arg = 'OBJECT' => 'eol.o' [MetaJSON] [MetaProvides::Package] [MetaResources] bugtracker.web = https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol bugtracker.mailto = bug-perlio-eol@rt.cpan.org repository.url = https://github.com/shlomif/PerlIO-eol.git repository.web = https://github.com/shlomif/PerlIO-eol repository.type = git [PodCoverageTests] [PodSyntaxTests] [PruneCruft] [RewriteVersion] [RunExtraTests] [Test::CPAN::Changes] [Test::Compile] fake_home = 1 [Test::Kwalitee] [Test::TrailingSpace] upstream-ontologist-0.1.37/testdata/perl/expected.yaml000066400000000000000000000005221462717511400231100ustar00rootroot00000000000000{"Bug-Database": "https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol", "Homepage": "https://github.com/shlomif/PerlIO-eol", "Name": "PerlIO-eol", "Repository": "https://github.com/shlomif/PerlIO-eol.git", "Repository-Browse": "https://github.com/shlomif/PerlIO-eol", "Copyright": "2004 Audrey Tang", "License": "Perl_5"} upstream-ontologist-0.1.37/testdata/pkg-info/000077500000000000000000000000001462717511400211745ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/pkg-info/PKG-INFO000066400000000000000000000054521462717511400222770ustar00rootroot00000000000000Metadata-Version: 2.1 Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Home-page: https://forge.softwareheritage.org/diffusion/DLDG/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-git Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-git/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-loader-git ============== The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before. The main entry points are: - :class:`swh.loader.git.loader.GitLoader` for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production. - :class:`swh.loader.git.from_disk.GitLoaderFromDisk` which ingests only local git clone repository. - :class:`swh.loader.git.loader.GitLoaderFromArchive` which ingests a git repository wrapped in an archive. License ------- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See top-level LICENSE file for the full text of the GNU General Public License along with this program. Dependencies ------------ ### Runtime - python3 - python3-dulwich - python3-retrying - python3-swh.core - python3-swh.model - python3-swh.storage - python3-swh.scheduler ### Test - python3-nose Requirements ------------ - implementation language, Python3 - coding guidelines: conform to PEP8 - Git access: via dulwich CLI Run ---------- You can run the loader from a remote origin (*loader*) or from an origin on disk (*from_disk*) directly by calling: ``` swh loader -C run git ``` or "git_disk". ## Configuration sample /tmp/git.yml: ``` storage: cls: remote args: url: http://localhost:5002/ ``` upstream-ontologist-0.1.37/testdata/pkg-info/expected.yaml000066400000000000000000000021371462717511400236640ustar00rootroot00000000000000Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Homepage: https://forge.softwareheritage.org/diffusion/DLDG/ Author: - !Person name: Software Heritage developers email: swh-devel@inria.fr Bug-Database: https://forge.softwareheritage.org/maniphest Funding: https://www.softwareheritage.org/donate Repository: https://forge.softwareheritage.org/source/swh-loader-git Documentation: https://docs.softwareheritage.org/devel/swh-loader-git/ Description: "The Software Heritage Git Loader is a tool and a library to walk a local\nGit repository and inject into the SWH dataset all contained files that\nweren't known before.\n\nThe main entry points are:\n\n* \n:class:swh.loader.git.loader.GitLoader for the main loader which can ingest either\nlocal or remote git repository's contents. This is the main implementation deployed in\nproduction.\n\n* \n:class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone\nrepository.\n\n* \n:class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository\nwrapped in an archive.\n\n" upstream-ontologist-0.1.37/testdata/poetry/000077500000000000000000000000001462717511400210045ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/poetry/expected.yaml000066400000000000000000000011621462717511400234710ustar00rootroot00000000000000Name: gi-docgen Version: '2021.1' Summary: Documentation tool for GObject-based libraries Author: - !Person name: Emmanuele Bassi email: ebassi@gnome.org License: GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0 Homepage: https://gitlab.gnome.org/GNOME/gi-docgen Documentation: https://gnome.pages.gitlab.gnome.org/gi-docgen/ Keywords: - documentation - introspection - gobject - gtk Bug-Database: https://gitlab.gnome.org/GNOME/gi-docgen/issues Repository: https://gitlab.gnome.org/GNOME/gi-docgen.git Repository-Browse: https://gitlab.gnome.org/GNOME/gi-docgen Bug-Submit: https://gitlab.gnome.org/GNOME/gi-docgen/issues/new upstream-ontologist-0.1.37/testdata/poetry/pyproject.toml000066400000000000000000000033021462717511400237160ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021 GNOME Foundation # # SPDX-License-Identifier: Apache-2.0 OR GPL-3.0-or-later [tool.poetry] name = "gi-docgen" packages = [ { include = "gidocgen" }, ] version = "2021.1" description = "Documentation tool for GObject-based libraries" authors = ["Emmanuele Bassi "] license = "GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0" readme = "README.md" homepage = "https://gitlab.gnome.org/GNOME/gi-docgen" documentation = "https://gnome.pages.gitlab.gnome.org/gi-docgen/" keywords = ["documentation","introspection","gobject","gtk"] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: BSD", "Operating System :: POSIX :: Linux", "Topic :: Desktop Environment :: Gnome", "Topic :: Software Development :: Documentation" ] [tool.poetry.urls] "Bug Tracker" = "https://gitlab.gnome.org/GNOME/gi-docgen/issues" [tool.poetry.dependencies] python = "^3.6" Markdown = "^3" MarkupSafe = "^1" Pygments = "^2" Jinja2 = "^2" toml = "^0" typogrify = "^2" [tool.poetry.dev-dependencies] coverage = "^5" green = "^3" mypy = "0.812" flake8 = "^3" black = {version = "^20.8b1", allow-prereleases = true} [tool.poetry.scripts] gi-docgen = "gidocgen.__main__:main" [tool.coverage.report] show_missing = true exclude_lines = [ "pragma: no cover", "if False" ] [build-system] requires = ["setuptools","wheel"] upstream-ontologist-0.1.37/testdata/pom/000077500000000000000000000000001462717511400202555ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/pom/debian/000077500000000000000000000000001462717511400214775ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/pom/debian/control000066400000000000000000000000151462717511400230760ustar00rootroot00000000000000Source: blah upstream-ontologist-0.1.37/testdata/pom/expected.yaml000066400000000000000000000005771462717511400227530ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Homepage": "http://www.example.com", "License": "GNU Lesser General Public License, Version 2.1", "Name": "libblah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "Summary": "Bla lah lah lah"} upstream-ontologist-0.1.37/testdata/pom/pom.xml000066400000000000000000000062471462717511400216030ustar00rootroot00000000000000 4.0.0 com.example libblah jar ${version} libblah http://www.example.com Bla lah lah lah. GNU Lesser General Public License, Version 2.1 http://www.gnu.org/licenses/lgpl-2.1.txt Joe Example joe@example.com Org1 http://www.example.com/org1 scm:git:https://github.com/example/blah.git scm:git:git@github.com/example/blah.git https://github.com/example/blah ossrh https://oss.sonatype.org/content/repositories/snapshots java org.apache.maven.plugins maven-compiler-plugin 3.1 org.apache.maven.plugins maven-release-plugin 2.4.1 org.apache.maven.plugins maven-source-plugin attach-sources jar org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.sonatype.plugins nexus-staging-maven-plugin 1.6.2 true ossrh https://oss.sonatype.org/ true org.apache.maven.plugins maven-gpg-plugin 1.5 sign-artifacts verify sign upstream-ontologist-0.1.37/testdata/pubspec.yml/000077500000000000000000000000001462717511400217235ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/pubspec.yml/expected.yaml000066400000000000000000000007021462717511400244070ustar00rootroot00000000000000Name: dart Description: You don't need to be worried just because you have to support multiple screens Version: 2.1.53 Homepage: https://github.com/DisplayKit/responsive_styles Repository: https://github.com/DisplayKit/responsive_styles.git Repository-Browse: https://github.com/DisplayKit/responsive_styles Bug-Database: https://github.com/DisplayKit/responsive_styles/issues Bug-Submit: https://github.com/DisplayKit/responsive_styles/issues/new upstream-ontologist-0.1.37/testdata/pubspec.yml/pubspec.yml000066400000000000000000000006021462717511400241050ustar00rootroot00000000000000name: dart description: You don't need to be worried just because you have to support multiple screens version: 2.1.53 homepage: https://github.com/DisplayKit/responsive_styles environment: sdk: ">=2.17.5 <3.0.0" flutter: ">=1.17.0" dependencies: flutter: sdk: flutter mockito: ^5.2.0 dev_dependencies: flutter_test: sdk: flutter flutter_lints: ^2.0.0 flutter: upstream-ontologist-0.1.37/testdata/python/000077500000000000000000000000001462717511400210035ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/python/blah/000077500000000000000000000000001462717511400217115ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/python/blah/__init__.py000066400000000000000000000000001462717511400240100ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/python/debian/000077500000000000000000000000001462717511400222255ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/python/debian/control000066400000000000000000000005241462717511400236310ustar00rootroot00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.1.37/testdata/python/expected.yaml000066400000000000000000000007141462717511400234720ustar00rootroot00000000000000{"Bug-Database": "https://salsa.debian.org/jelmer/lintian-brush/issues", "Bug-Submit": "https://salsa.debian.org/jelmer/lintian-brush/issues/new", "Homepage": "https://salsa.debian.org/jelmer/lintian-brush", "Name": "blah", "Repository": "https://salsa.debian.org/jelmer/lintian-brush.git", "Repository-Browse": "https://salsa.debian.org/jelmer/lintian-brush", "Description": "blah blah", "Summary": "Automatic lintian issue fixer", "Version": "0.16"} upstream-ontologist-0.1.37/testdata/python/setup.py000077500000000000000000000005011462717511400225140ustar00rootroot00000000000000#!/usr/bin/python3 from setuptools import setup setup( name="blah", version="0.16", packages=["blah"], url="https://salsa.debian.org/jelmer/lintian-brush", description="Automatic lintian issue fixer", project_urls={ "Repository": "https://salsa.debian.org/jelmer/lintian-brush", }, ) upstream-ontologist-0.1.37/testdata/r-description/000077500000000000000000000000001462717511400222445ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/r-description/DESCRIPTION000066400000000000000000000056251462717511400237620ustar00rootroot00000000000000Package: readxl Title: Read Excel Files Version: 1.3.1 Authors@R: c(person(given = "Hadley", family = "Wickham", role = "aut", email = "hadley@rstudio.com", comment = c(ORCID = "0000-0003-4757-117X")), person(given = "Jennifer", family = "Bryan", role = c("aut", "cre"), email = "jenny@rstudio.com", comment = c(ORCID = "0000-0002-6983-2759")), person(given = "RStudio", role = c("cph", "fnd"), comment = "Copyright holder of all R code and all C/C++ code without explicit copyright attribution"), person(given = "Marcin", family = "Kalicinski", role = c("ctb", "cph"), comment = "Author of included RapidXML code"), person(given = "Komarov Valery", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Christophe Leitienne", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Bob Colbert", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "David Hoerl", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Evan Miller", role = c("ctb", "cph"), comment = "Author of included libxls code")) Description: Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 URL: https://readxl.tidyverse.org, https://github.com/tidyverse/readxl BugReports: https://github.com/tidyverse/readxl/issues Imports: cellranger, Rcpp (>= 0.12.18), tibble (>= 1.3.1), utils Suggests: covr, knitr, rmarkdown, rprojroot (>= 1.1), testthat LinkingTo: progress, Rcpp VignetteBuilder: knitr Encoding: UTF-8 LazyData: true Note: libxls-SHA cef1393 RoxygenNote: 6.1.1 NeedsCompilation: yes Packaged: 2019-03-13 16:01:23 UTC; jenny Author: Hadley Wickham [aut] (), Jennifer Bryan [aut, cre] (), RStudio [cph, fnd] (Copyright holder of all R code and all C/C++ code without explicit copyright attribution), Marcin Kalicinski [ctb, cph] (Author of included RapidXML code), Komarov Valery [ctb, cph] (Author of included libxls code), Christophe Leitienne [ctb, cph] (Author of included libxls code), Bob Colbert [ctb, cph] (Author of included libxls code), David Hoerl [ctb, cph] (Author of included libxls code), Evan Miller [ctb, cph] (Author of included libxls code) Maintainer: Jennifer Bryan Repository: CRAN Date/Publication: 2019-03-13 16:30:02 UTC upstream-ontologist-0.1.37/testdata/r-description/expected.yaml000066400000000000000000000013601462717511400247310ustar00rootroot00000000000000Archive: CRAN Bug-Database: https://github.com/tidyverse/readxl/issues Bug-Submit: https://github.com/tidyverse/readxl/issues/new Contact: Jennifer Bryan Homepage: https://github.com/tidyverse/readxl Name: readxl Repository: https://github.com/tidyverse/readxl.git Repository-Browse: https://github.com/tidyverse/readxl Description: |- Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 Maintainer: !Person name: Jennifer Bryan email: jenny@rstudio.com Summary: Read Excel Files Version: 1.3.1 upstream-ontologist-0.1.37/testdata/readme-command/000077500000000000000000000000001462717511400223335ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/readme-command/README000066400000000000000000000002611462717511400232120ustar00rootroot00000000000000This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.1.37/testdata/readme-command/expected.yaml000066400000000000000000000007431462717511400250240ustar00rootroot00000000000000Bug-Database: https://github.com/OpenPrinting/cups-filters/issues Bug-Submit: https://github.com/OpenPrinting/cups-filters/issues/new Homepage: https://github.com/blah/blah Name: readme-command Repository: https://github.com/blah/blah.git Repository-Browse: https://github.com/blah/blah Description: > This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.1.37/testdata/readme-other/000077500000000000000000000000001462717511400220365ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/readme-other/README000066400000000000000000000001621462717511400227150ustar00rootroot00000000000000This is a project. One of the dependencies is blah, which you can install from: https://github.com/blah/blah.git upstream-ontologist-0.1.37/testdata/readme-other/expected.yaml000066400000000000000000000006521462717511400245260ustar00rootroot00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "readme-other", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "This is a project. One of the dependencies is blah, which you can install from:\n\nhttps://github.com/blah/blah.git\n"} upstream-ontologist-0.1.37/testdata/security.md/000077500000000000000000000000001462717511400217305ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/security.md/SECURITY.md000066400000000000000000000000461462717511400235210ustar00rootroot00000000000000Please send email to blah@example.com upstream-ontologist-0.1.37/testdata/security.md/debian/000077500000000000000000000000001462717511400231525ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/security.md/debian/control000066400000000000000000000000151462717511400245510ustar00rootroot00000000000000Source: blah upstream-ontologist-0.1.37/testdata/security.md/debian/upstream/000077500000000000000000000000001462717511400250125ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/security.md/debian/upstream/metadata000066400000000000000000000001471462717511400265170ustar00rootroot00000000000000--- Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah upstream-ontologist-0.1.37/testdata/security.md/expected.yaml000066400000000000000000000000671462717511400244200ustar00rootroot00000000000000{"Name": "security.md", "Security-MD": "SECURITY.md"} upstream-ontologist-0.1.37/testdata/setup.py1/000077500000000000000000000000001462717511400213325ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/setup.py1/expected.yaml000066400000000000000000000011211462717511400240120ustar00rootroot00000000000000Bug-Database: https://github.com/jelmer/upstream-ontologist/issues Bug-Submit: https://github.com/jelmer/upstream-ontologist/issues/new Contact: Jelmer Vernooij Homepage: https://github.com/jelmer/upstream-ontologist Name: upstream-ontologist Repository: https://github.com/jelmer/upstream-ontologist.git Repository-Browse: https://github.com/jelmer/upstream-ontologist Summary: tracking of upstream project metadata Version: 0.1.35 Author: - !Person name: Jelmer Vernooij email: jelmer@jelmer.uk Maintainer: !Person name: Jelmer Vernooij email: jelmer@jelmer.uk upstream-ontologist-0.1.37/testdata/setup.py1/setup.cfg000066400000000000000000000024261462717511400231570ustar00rootroot00000000000000[metadata] name = upstream-ontologist version = 0.1.35 author = Jelmer Vernooij author_email = jelmer@jelmer.uk maintainer = Jelmer Vernooij maintainer_email = jelmer@jelmer.uk url = https://github.com/jelmer/upstream-ontologist description = tracking of upstream project metadata long_description = file:README.md long_description_content_type = text/markdown project_urls = Repository=https://github.com/jelmer/upstream-ontologist.git [options] python_requires = >= 3.7 packages = upstream_ontologist upstream_ontologist.debian install_requires = python_debian typing_extensions;python_version<="3.7" ruamel.yaml # Ideally this would be an optional dependency breezy>=3.3.0 tests_require = breezy>=3.3.0 [options.entry_points] console_scripts = guess-upstream-metadata=upstream_ontologist.__main__:main autodoap=upstream_ontologist.doap:main autocodemeta=upstream_ontologist.codemeta:main [options.extras_require] cargo = tomlkit debian_changelog = python-debianbts httplib2>=0.7.8 python_debian debian_watch = debmutate[watch]>=0.59 debian_rules = debmutate pyproject = tomlkit homepage = bs4 readme = docutils lxml bs4 markdown pygments setup.cfg = setuptools [options.package_data] upstream_ontologist = py.typed upstream-ontologist-0.1.37/testdata/setup.py1/setup.py000077500000000000000000000001731462717511400230500ustar00rootroot00000000000000#!/usr/bin/python3 from setuptools import setup setup(data_files=[("share/man/man1", ["man/guess-upstream-metadata.1"])]) upstream-ontologist-0.1.37/testdata/travis.yml/000077500000000000000000000000001462717511400215725ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/travis.yml/.travis.yml000066400000000000000000000010011462717511400236730ustar00rootroot00000000000000language: go go_import_path: github.com/ethereum/go-ethereum sudo: false jobs: allow_failures: - stage: build os: osx go: 1.17.x env: - azure-osx - azure-ios - cocoapods-ios include: # This builder only tests code linters on latest version of Go - stage: lint os: linux dist: bionic go: 1.19.x env: - lint git: submodules: false # avoid cloning ethereum/tests script: - go run build/ci.go lint upstream-ontologist-0.1.37/testdata/travis.yml/expected.yaml000066400000000000000000000001011462717511400242470ustar00rootroot00000000000000Name: travis.yml Go-Import-Path: github.com/ethereum/go-ethereum upstream-ontologist-0.1.37/testdata/watch-git/000077500000000000000000000000001462717511400213515ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch-git/debian/000077500000000000000000000000001462717511400225735ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch-git/debian/watch000066400000000000000000000002411462717511400236210ustar00rootroot00000000000000version=3 opts="mode=git, gitmode=shallow, pgpmode=gittag" \ https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git \ refs/tags/(.*) debian upstream-ontologist-0.1.37/testdata/watch-git/expected.yaml000066400000000000000000000001641462717511400240370ustar00rootroot00000000000000{"Name": "watch-git", "Repository": "https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git"} upstream-ontologist-0.1.37/testdata/watch/000077500000000000000000000000001462717511400205705ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch/debian/000077500000000000000000000000001462717511400220125ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch/debian/watch000066400000000000000000000002261462717511400230430ustar00rootroot00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example/releases .*/Toric-(\d\S*)\.tar\.gz upstream-ontologist-0.1.37/testdata/watch/expected.yaml000066400000000000000000000004721462717511400232600ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Homepage": "https://github.com/example/example", "Name": "example", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example"} upstream-ontologist-0.1.37/testdata/watch2/000077500000000000000000000000001462717511400206525ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch2/debian/000077500000000000000000000000001462717511400220745ustar00rootroot00000000000000upstream-ontologist-0.1.37/testdata/watch2/debian/watch000066400000000000000000000002641462717511400231270ustar00rootroot00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example-cat/tags \ (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate upstream-ontologist-0.1.37/testdata/watch2/expected.yaml000066400000000000000000000005221462717511400233360ustar00rootroot00000000000000{"Bug-Database": "https://github.com/example/example-cat/issues", "Bug-Submit": "https://github.com/example/example-cat/issues/new", "Homepage": "https://github.com/example/example-cat", "Name": "example-cat", "Repository": "https://github.com/example/example-cat.git", "Repository-Browse": "https://github.com/example/example-cat"} upstream-ontologist-0.1.37/tests/000077500000000000000000000000001462717511400170135ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/__init__.py000066400000000000000000000021411462717511400211220ustar00rootroot00000000000000#!/usr/bin/python # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import unittest def test_suite(): names = [ "data", "vcs", ] module_names = [__name__ + ".test_" + name for name in names] module_names.append(__name__ + ".test_readme.test_suite") module_names.append(__name__ + ".testdata.test_suite") loader = unittest.TestLoader() return loader.loadTestsFromNames(module_names) upstream-ontologist-0.1.37/tests/readme_data/000077500000000000000000000000001462717511400212415ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/aiozipkin/000077500000000000000000000000001462717511400232365ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/aiozipkin/README.rst000066400000000000000000000147431462717511400247360ustar00rootroot00000000000000aiozipkin ========= .. image:: https://github.com/aio-libs/aiozipkin/workflows/CI/badge.svg :target: https://github.com/aio-libs/aiozipkin/actions?query=workflow%3ACI .. image:: https://codecov.io/gh/aio-libs/aiozipkin/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiozipkin .. image:: https://api.codeclimate.com/v1/badges/1ff813d5cad2d702cbf1/maintainability :target: https://codeclimate.com/github/aio-libs/aiozipkin/maintainability :alt: Maintainability .. image:: https://img.shields.io/pypi/v/aiozipkin.svg :target: https://pypi.python.org/pypi/aiozipkin .. image:: https://readthedocs.org/projects/aiozipkin/badge/?version=latest :target: http://aiozipkin.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter **aiozipkin** is Python 3.6+ module that adds distributed tracing capabilities from asyncio_ applications with zipkin (http://zipkin.io) server instrumentation. zipkin_ is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with **aiozipkin** report timing data to zipkin_. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_animation2.gif :alt: zipkin ui animation Features ======== * Distributed tracing capabilities to **asyncio** applications. * Support zipkin_ ``v2`` protocol. * Easy to use API. * Explicit context handling, no thread local variables. * Can work with jaeger_ and stackdriver_ through zipkin compatible API. zipkin vocabulary ----------------- Before code lets learn important zipkin_ vocabulary, for more detailed information please visit https://zipkin.io/pages/instrumenting .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_glossary.png :alt: zipkin ui glossary * **Span** represents one specific method (RPC) call * **Annotation** string data associated with a particular timestamp in span * **Tag** - key and value associated with given span * **Trace** - collection of spans, related to serving particular request Simple example -------------- .. code:: python import asyncio import aiozipkin as az async def run(): # setup zipkin client zipkin_address = 'http://127.0.0.1:9411/api/v2/spans' endpoint = az.create_endpoint( "simple_service", ipv4="127.0.0.1", port=8080) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) # create and setup new trace with tracer.new_trace(sampled=True) as span: # give a name for the span span.name("Slow SQL") # tag with relevant information span.tag("span_type", "root") # indicate that this is client span span.kind(az.CLIENT) # make timestamp and name it with START SQL query span.annotate("START SQL SELECT * FROM") # imitate long SQL query await asyncio.sleep(0.1) # make other timestamp and name it "END SQL" span.annotate("END SQL") await tracer.close() if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(run()) aiohttp example --------------- *aiozipkin* includes *aiohttp* server instrumentation, for this create `web.Application()` as usual and install aiozipkin plugin: .. code:: python import aiozipkin as az def init_app(): host, port = "127.0.0.1", 8080 app = web.Application() endpoint = az.create_endpoint("AIOHTTP_SERVER", ipv4=host, port=port) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) az.setup(app, tracer) That is it, plugin adds middleware that tries to fetch context from headers, and create/join new trace. Optionally on client side you can add propagation headers in order to force tracing and to see network latency between client and server. .. code:: python import aiozipkin as az endpoint = az.create_endpoint("AIOHTTP_CLIENT") tracer = await az.create(zipkin_address, endpoint) with tracer.new_trace() as span: span.kind(az.CLIENT) headers = span.context.make_headers() host = "http://127.0.0.1:8080/api/v1/posts/{}".format(i) resp = await session.get(host, headers=headers) await resp.text() Documentation ------------- http://aiozipkin.readthedocs.io/ Installation ------------ Installation process is simple, just:: $ pip install aiozipkin Support of other collectors =========================== **aiozipkin** can work with any other zipkin_ compatible service, currently we tested it with jaeger_ and stackdriver_. Jaeger support -------------- jaeger_ supports zipkin_ span format as result it is possible to use *aiozipkin* with jaeger_ server. You just need to specify *jaeger* server address and it should work out of the box. Not need to run local zipkin server. For more informations see tests and jaeger_ documentation. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/jaeger.png :alt: jaeger ui animation Stackdriver support ------------------- Google stackdriver_ supports zipkin_ span format as result it is possible to use *aiozipkin* with this google_ service. In order to make this work you need to setup zipkin service locally, that will send trace to the cloud. See google_ cloud documentation how to setup make zipkin collector: .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/stackdriver.png :alt: jaeger ui animation Requirements ------------ * Python_ 3.6+ * aiohttp_ .. _PEP492: https://www.python.org/dev/peps/pep-0492/ .. _Python: https://www.python.org .. _aiohttp: https://github.com/KeepSafe/aiohttp .. _asyncio: http://docs.python.org/3.5/library/asyncio.html .. _uvloop: https://github.com/MagicStack/uvloop .. _zipkin: http://zipkin.io .. _jaeger: http://jaeger.readthedocs.io/en/latest/ .. _stackdriver: https://cloud.google.com/stackdriver/ .. _google: https://cloud.google.com/trace/docs/zipkin upstream-ontologist-0.1.37/tests/readme_data/aiozipkin/description000066400000000000000000000013631462717511400255070ustar00rootroot00000000000000aiozipkin is Python 3.6+ module that adds distributed tracing capabilities from asyncio applications with zipkin (http://zipkin.io) server instrumentation. zipkin is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with aiozipkin report timing data to zipkin. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. upstream-ontologist-0.1.37/tests/readme_data/argparse/000077500000000000000000000000001462717511400230455ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/argparse/README.rst000066400000000000000000000411141462717511400245350ustar00rootroot00000000000000ConfigArgParse -------------- .. image:: https://img.shields.io/pypi/v/ConfigArgParse.svg?style=flat :alt: PyPI version :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://img.shields.io/pypi/pyversions/ConfigArgParse.svg :alt: Supported Python versions :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :alt: Travis CI build :target: https://travis-ci.org/bw2/ConfigArgParse Overview ~~~~~~~~ Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. Available on PyPI: http://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :target: https://travis-ci.org/bw2/ConfigArgParse Features ~~~~~~~~ - command-line, config file, env var, and default settings can now be defined, documented, and parsed in one go using a single API (if a value is specified in more than one way then: command line > environment variables > config file values > defaults) - config files can have .ini or .yaml style syntax (eg. key=value or key: value) - user can provide a config file via a normal-looking command line arg (eg. -c path/to/config.txt) rather than the argparse-style @config.txt - one or more default config file paths can be specified (eg. ['/etc/bla.conf', '~/.my_config'] ) - all argparse functionality is fully supported, so this module can serve as a drop-in replacement (verified by argparse unittests). - env vars and config file keys & syntax are automatically documented in the -h help message - new method :code:`print_values()` can report keys & values and where they were set (eg. command line, env var, config file, or default). - lite-weight (no 3rd-party library dependencies except (optionally) PyYAML) - extensible (:code:`ConfigFileParser` can be subclassed to define a new config file format) - unittested by running the unittests that came with argparse but on configargparse, and using tox to test with Python 2.7 and Python 3+ Example ~~~~~~~ *config_test.py*: Script that defines 4 options and a positional arg and then parses and prints the values. Also, it prints out the help message as well as the string produced by :code:`format_values()` to show what they look like. .. code:: py import configargparse p = configargparse.ArgParser(default_config_files=['/etc/app/conf.d/*.conf', '~/.my_settings']) p.add('-c', '--my-config', required=True, is_config_file=True, help='config file path') p.add('--genome', required=True, help='path to genome file') # this option can be set in a config file because it starts with '--' p.add('-v', help='verbose', action='store_true') p.add('-d', '--dbsnp', help='known variants .vcf', env_var='DBSNP_PATH') # this option can be set in a config file because it starts with '--' p.add('vcf', nargs='+', help='variant file(s)') options = p.parse_args() print(options) print("----------") print(p.format_help()) print("----------") print(p.format_values()) # useful for logging where different settings came from *config.txt:* Since the script above set the config file as required=True, lets create a config file to give it: .. code:: py # settings for config_test.py genome = HCMV # cytomegalovirus genome dbsnp = /data/dbsnp/variants.vcf *command line:* Now run the script and pass it the config file: .. code:: bash DBSNP_PATH=/data/dbsnp/variants_v2.vcf python config_test.py --my-config config.txt f1.vcf f2.vcf *output:* Here is the result: .. code:: bash Namespace(dbsnp='/data/dbsnp/variants_v2.vcf', genome='HCMV', my_config='config.txt', v=False, vcf=['f1.vcf', 'f2.vcf']) ---------- usage: config_test.py [-h] -c MY_CONFIG --genome GENOME [-v] [-d DBSNP] vcf [vcf ...] Args that start with '--' (eg. --genome) can also be set in a config file (/etc/app/conf.d/*.conf or ~/.my_settings or specified via -c). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at https://goo.gl/R74nmi). If an arg is specified in more than one place, then commandline values override environment variables which override config file values which override defaults. positional arguments: vcf variant file(s) optional arguments: -h, --help show this help message and exit -c MY_CONFIG, --my-config MY_CONFIG config file path --genome GENOME path to genome file -v verbose -d DBSNP, --dbsnp DBSNP known variants .vcf [env var: DBSNP_PATH] ---------- Command Line Args: --my-config config.txt f1.vcf f2.vcf Environment Variables: DBSNP_PATH: /data/dbsnp/variants_v2.vcf Config File (config.txt): genome: HCMV Special Values ~~~~~~~~~~~~~~ Under the hood, configargparse handles environment variables and config file values by converting them to their corresponding command line arg. For example, "key = value" will be processed as if "--key value" was specified on the command line. Also, the following special values (whether in a config file or an environment variable) are handled in a special way to support booleans and lists: - :code:`key = true` is handled as if "--key" was specified on the command line. In your python code this key must be defined as a boolean flag (eg. action="store_true" or similar). - :code:`key = [value1, value2, ...]` is handled as if "--key value1 --key value2" etc. was specified on the command line. In your python code this key must be defined as a list (eg. action="append"). Config File Syntax ~~~~~~~~~~~~~~~~~~ Only command line args that have a long version (eg. one that starts with '--') can be set in a config file. For example, "--color" can be set by putting "color=green" in a config file. The config file syntax depends on the constuctor arg: :code:`config_file_parser_class` which can be set to one of the provided classes: :code:`DefaultConfigFileParser`, :code:`YAMLConfigFileParser`, :code:`ConfigparserConfigFileParser` or to your own subclass of the :code:`ConfigFileParser` abstract class. *DefaultConfigFileParser* - the full range of valid syntax is: .. code:: yaml # this is a comment ; this is also a comment (.ini style) --- # lines that start with --- are ignored (yaml style) ------------------- [section] # .ini-style section names are treated as comments # how to specify a key-value pair (all of these are equivalent): name value # key is case sensitive: "Name" isn't "name" name = value # (.ini style) (white space is ignored, so name = value same as name=value) name: value # (yaml style) --name value # (argparse style) # how to set a flag arg (eg. arg which has action="store_true") --name name name = True # "True" and "true" are the same # how to specify a list arg (eg. arg which has action="append") fruit = [apple, orange, lemon] indexes = [1, 12, 35 , 40] *YAMLConfigFileParser* - allows a subset of YAML syntax (http://goo.gl/VgT2DU) .. code:: yaml # a comment name1: value name2: true # "True" and "true" are the same fruit: [apple, orange, lemon] indexes: [1, 12, 35, 40] *ConfigparserConfigFileParser* - allows a subset of python's configparser module syntax (https://docs.python.org/3.7/library/configparser.html). In particular the following configparser options are set: .. code:: py config = configparser.ArgParser( delimiters=("=",":"), allow_no_value=False, comment_prefixes=("#",";"), inline_comment_prefixes=("#",";"), strict=True, empty_lines_in_values=False, ) Once configparser parses the config file all section names are removed, thus all keys must have unique names regardless of which INI section they are defined under. Also, any keys which have python list syntax are converted to lists by evaluating them as python code using ast.literal_eval (https://docs.python.org/3/library/ast.html#ast.literal_eval). To facilitate this all multi-line values are converted to single-line values. Thus multi-line string values will have all new-lines converted to spaces. Note, since key-value pairs that have python dictionary syntax are saved as single-line strings, even if formatted across multiple lines in the config file, dictionaries can be read in and converted to valid python dictionaries with PyYAML's safe_load. Example given below: .. code:: py # inside your config file (e.g. config.ini) [section1] # INI sections treated as comments system1_settings: { # start of multi-line dictionary 'a':True, 'b':[2, 4, 8, 16], 'c':{'start':0, 'stop':1000}, 'd':'experiment 32 testing simulation with parameter a on' } # end of multi-line dictionary value ....... # in your configargparse setup import configargparse import yaml parser = configargparse.ArgParser( config_file_parser_class=configargparse.ConfigparserConfigFileParser ) parser.add_argument('--system1_settings', type=yaml.safe_load) args = parser.parse_args() # now args.system1 is a valid python dict ArgParser Singletons ~~~~~~~~~~~~~~~~~~~~~~~~~ To make it easier to configure different modules in an application, configargparse provides globally-available ArgumentParser instances via configargparse.get_argument_parser('name') (similar to logging.getLogger('name')). Here is an example of an application with a utils module that also defines and retrieves its own command-line args. *main.py* .. code:: py import configargparse import utils p = configargparse.get_argument_parser() p.add_argument("-x", help="Main module setting") p.add_argument("--m-setting", help="Main module setting") options = p.parse_known_args() # using p.parse_args() here may raise errors. *utils.py* .. code:: py import configargparse p = configargparse.get_argument_parser() p.add_argument("--utils-setting", help="Config-file-settable option for utils") if __name__ == "__main__": options = p.parse_known_args() Help Formatters ~~~~~~~~~~~~~~~ :code:`ArgumentDefaultsRawHelpFormatter` is a new HelpFormatter that both adds default values AND disables line-wrapping. It can be passed to the constructor: :code:`ArgParser(.., formatter_class=ArgumentDefaultsRawHelpFormatter)` Aliases ~~~~~~~ The configargparse.ArgumentParser API inherits its class and method names from argparse and also provides the following shorter names for convenience: - p = configargparse.get_arg_parser() # get global singleton instance - p = configargparse.get_parser() - p = configargparse.ArgParser() # create a new instance - p = configargparse.Parser() - p.add_arg(..) - p.add(..) - options = p.parse(..) HelpFormatters: - RawFormatter = RawDescriptionHelpFormatter - DefaultsFormatter = ArgumentDefaultsHelpFormatter - DefaultsRawFormatter = ArgumentDefaultsRawHelpFormatter Design Notes ~~~~~~~~~~~~ Unit tests: tests/test_configargparse.py contains custom unittests for features specific to this module (such as config file and env-var support), as well as a hook to load and run argparse unittests (see the built-in test.test_argparse module) but on configargparse in place of argparse. This ensures that configargparse will work as a drop in replacement for argparse in all usecases. Previously existing modules (PyPI search keywords: config argparse): - argparse (built-in module Python v2.7+) - Good: - fully featured command line parsing - can read args from files using an easy to understand mechanism - Bad: - syntax for specifying config file path is unusual (eg. @file.txt)and not described in the user help message. - default config file syntax doesn't support comments and is unintuitive (eg. --namevalue) - no support for environment variables - ConfArgParse v1.0.15 (https://pypi.python.org/pypi/ConfArgParse) - Good: - extends argparse with support for config files parsed by ConfigParser - clear documentation in README - Bad: - config file values are processed using ArgumentParser.set_defaults(..) which means "required" and "choices" are not handled as expected. For example, if you specify a required value in a config file, you still have to specify it again on the command line. - doesn't work with Python 3 yet - no unit tests, code not well documented - appsettings v0.5 (https://pypi.python.org/pypi/appsettings) - Good: - supports config file (yaml format) and env_var parsing - supports config-file-only setting for specifying lists and dicts - Bad: - passes in config file and env settings via parse_args namespace param - tests not finished and don't work with Python 3 (import StringIO) - argparse_config v0.5.1 (https://pypi.python.org/pypi/argparse_config) - Good: - similar features to ConfArgParse v1.0.15 - Bad: - doesn't work with Python 3 (error during pip install) - yconf v0.3.2 - (https://pypi.python.org/pypi/yconf) - features and interface not that great - hieropt v0.3 - (https://pypi.python.org/pypi/hieropt) - doesn't appear to be maintained, couldn't find documentation - configurati v0.2.3 - (https://pypi.python.org/pypi/configurati) - Good: - JSON, YAML, or Python configuration files - handles rich data structures such as dictionaries - can group configuration names into sections (like .ini files) - Bad: - doesn't work with Python 3 - 2+ years since last release to PyPI - apparently unmaintained Design choices: 1. all options must be settable via command line. Having options that can only be set using config files or env. vars adds complexity to the API, and is not a useful enough feature since the developer can split up options into sections and call a section "config file keys", with command line args that are just "--" plus the config key. 2. config file and env. var settings should be processed by appending them to the command line (another benefit of #1). This is an easy-to-implement solution and implicitly takes care of checking that all "required" args are provied, etc., plus the behavior should be easy for users to understand. 3. configargparse shouldn't override argparse's convert_arg_line_to_args method so that all argparse unit tests can be run on configargparse. 4. in terms of what to allow for config file keys, the "dest" value of an option can't serve as a valid config key because many options can have the same dest. Instead, since multiple options can't use the same long arg (eg. "--long-arg-x"), let the config key be either "--long-arg-x" or "long-arg-x". This means the developer can allow only a subset of the command-line args to be specified via config file (eg. short args like -x would be excluded). Also, that way config keys are automatically documented whenever the command line args are documented in the help message. 5. don't force users to put config file settings in the right .ini [sections]. This doesn't have a clear benefit since all options are command-line settable, and so have a globally unique key anyway. Enforcing sections just makes things harder for the user and adds complexity to the implementation. 6. if necessary, config-file-only args can be added later by implementing a separate add method and using the namespace arg as in appsettings_v0.5 Relevant sites: - http://stackoverflow.com/questions/6133517/parse-config-file-environment-and-command-line-arguments-to-get-a-single-coll - http://tricksntweaks.blogspot.com/2013_05_01_archive.html - http://www.youtube.com/watch?v=vvCwqHgZJc8#t=35 .. |Travis CI Status for bw2/ConfigArgParse| image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master Versioning ~~~~~~~~~~ This software follows `Semantic Versioning`_ .. _Semantic Versioning: http://semver.org/ upstream-ontologist-0.1.37/tests/readme_data/argparse/description000066400000000000000000000006011462717511400253100ustar00rootroot00000000000000Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. upstream-ontologist-0.1.37/tests/readme_data/bitlbee/000077500000000000000000000000001462717511400226475ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/bitlbee/README.md000066400000000000000000000034761462717511400241400ustar00rootroot00000000000000# BitlBee ![](https://www.bitlbee.org/style/logo.png) [![Build Status](https://travis-ci.org/bitlbee/bitlbee.svg)](https://travis-ci.org/bitlbee/bitlbee) [![Coverity Scan Build Status](https://scan.coverity.com/projects/4028/badge.svg)](https://scan.coverity.com/projects/4028) An IRC to other chat networks gateway Main website: https://www.bitlbee.org/ Bug tracker: https://bugs.bitlbee.org/ Wiki: https://wiki.bitlbee.org/ License: GPLv2 ## Installation BitlBee is available in the package managers of most distros. For debian/ubuntu/etc you may use the nightly APT repository: https://code.bitlbee.org/debian/ You can also use a public server (such as `im.bitlbee.org`) instead of installing it: https://www.bitlbee.org/main.php/servers.html ## Compiling If you wish to compile it yourself, ensure you have the following packages and their headers: * glib 2.32 or newer (not to be confused with glibc) * gnutls * python 2 or 3 (for the user guide) Some optional features have additional dependencies, such as libpurple, libotr, libevent, etc. NSS and OpenSSL are also available but not as well supported as GnuTLS. Once you have the dependencies, building should be a matter of: ./configure make sudo make install ## Development tips * To enable debug symbols: `./configure --debug=1` * To get some additional debug output for some protocols: `BITLBEE_DEBUG=1 ./bitlbee -Dnv` * Use github pull requests against the 'develop' branch to submit patches. * The coding style based on K&R with tabs and 120 columns. See `./doc/uncrustify.cfg` for the parameters used to reformat the code. * Mappings of bzr revisions to git commits (for historical purposes) are available in `./doc/git-bzr-rev-map` * See also `./doc/README` and `./doc/HACKING` ## Help? Join **#BitlBee** on OFTC (**irc.oftc.net**) (OFTC, *not* freenode!) upstream-ontologist-0.1.37/tests/readme_data/bitlbee/description000066400000000000000000000000461462717511400251150ustar00rootroot00000000000000An IRC to other chat networks gateway upstream-ontologist-0.1.37/tests/readme_data/bup/000077500000000000000000000000001462717511400220275ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/bup/README.md000066400000000000000000000617601462717511400233200ustar00rootroot00000000000000bup: It backs things up ======================= bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. Reasons bup is awesome ---------------------- bup has a few advantages over other backup software: - It uses a rolling checksum algorithm (similar to rsync) to split large files into chunks. The most useful result of this is you can backup huge virtual machine (VM) disk images, databases, and XML files incrementally, even though they're typically all in one huge file, and not use tons of disk space for multiple versions. - It uses the packfile format from git (the open source version control system), so you can access the stored data even if you don't like bup's user interface. - Unlike git, it writes packfiles *directly* (instead of having a separate garbage collection / repacking stage) so it's fast even with gratuitously huge amounts of data. bup's improved index formats also allow you to track far more filenames than git (millions) and keep track of far more objects (hundreds or thousands of gigabytes). - Data is "automagically" shared between incremental backups without having to know which backup is based on which other one - even if the backups are made from two different computers that don't even know about each other. You just tell bup to back stuff up, and it saves only the minimum amount of data needed. - You can back up directly to a remote bup server, without needing tons of temporary disk space on the computer being backed up. And if your backup is interrupted halfway through, the next run will pick up where you left off. And it's easy to set up a bup server: just install bup on any machine where you have ssh access. - Bup can use "par2" redundancy to recover corrupted backups even if your disk has undetected bad sectors. - Even when a backup is incremental, you don't have to worry about restoring the full backup, then each of the incrementals in turn; an incremental backup *acts* as if it's a full backup, it just takes less disk space. - You can mount your bup repository as a FUSE filesystem and access the content that way, and even export it over Samba. - It's written in python (with some C parts to make it faster) so it's easy for you to extend and maintain. Reasons you might want to avoid bup ----------------------------------- - It's not remotely as well tested as something like tar, so it's more likely to eat your data. It's also missing some probably-critical features, though fewer than it used to be. - It requires python 3.7 or newer (or 2.7 for a bit longer), a C compiler, and an installed git version >= 1.5.6. It also requires par2 if you want fsck to be able to generate the information needed to recover from some types of corruption. While python 2.7 is still supported, please make plans to upgrade. Python 2 upstream support ended on 2020-01-01, and we plan to drop support soon too. - It currently only works on Linux, FreeBSD, NetBSD, OS X >= 10.4, Solaris, or Windows (with Cygwin, and WSL). Patches to support other platforms are welcome. - Until resolved, a [glibc bug](https://sourceware.org/bugzilla/show_bug.cgi?id=26034) might cause bup to crash on startup for some (unusual) command line argument values, when bup is configured to use Python 3. - Any items in "Things that are stupid" below. Notable changes introduced by a release ======================================= - Changes in 0.32 as compared to 0.31 - Changes in 0.31 as compared to 0.30.1 - Changes in 0.30.1 as compared to 0.30 - Changes in 0.30 as compared to 0.29.3 - Changes in 0.29.3 as compared to 0.29.2 - Changes in 0.29.2 as compared to 0.29.1 - Changes in 0.29.1 as compared to 0.29 - Changes in 0.29 as compared to 0.28.1 - Changes in 0.28.1 as compared to 0.28 - Changes in 0.28 as compared to 0.27.1 - Changes in 0.27.1 as compared to 0.27 Test status =========== | branch | Debian | FreeBSD | macOS | |--------|------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------| | master | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.30.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.29.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | Getting started =============== From source ----------- - Check out the bup source code using git: ```sh git clone https://github.com/bup/bup ``` - This will leave you on the master branch, which is perfect if you would like to help with development, but if you'd just like to use bup, please check out the latest stable release like this: ```sh git checkout 0.32 ``` You can see the latest stable release here: https://github.com/bup/bup/releases. - Install the required python libraries (including the development libraries). On very recent Debian/Ubuntu versions, this may be sufficient (run as root): ```sh apt-get build-dep bup ``` Otherwise try this: ```sh apt-get install python3.7-dev python3-fuse apt-get install python3-pyxattr python3-pytest apt-get install python3-distutils apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get isntall python3-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python3-tornado # optional (bup web) ``` Or, if you can't yet migrate to Python 3 (please try to soon): ```sh apt-get install python2.7-dev python-fuse apt-get install python-pyxattr python-pytest apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get isntall python-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python-tornado # optional (bup web) ``` On CentOS (for CentOS 6, at least), this should be sufficient (run as root): ```sh yum groupinstall "Development Tools" yum install python2 python2-devel libacl-devel pylibacl yum install fuse-python pyxattr yum install perl-Time-HiRes yum install readline-devel # optional (bup ftp) yum install python-tornado # optional (bup web) ``` In addition to the default CentOS repositories, you may need to add RPMForge (for fuse-python) and EPEL (for pyxattr). On Cygwin, install python, make, rsync, and gcc4. If you would like to use the optional bup web server on systems without a tornado package, you may want to try this: ```sh pip install tornado ``` - Build the python module and symlinks: ```sh make ``` - Run the tests: ```sh make long-check ``` or if you're in a bit more of a hurry: ```sh make check ``` If you have the Python xdist module installed, then you can probably run the tests faster by adding the make -j option (see ./HACKING for additional information): ```sh make -j check ``` The tests should pass. If they don't pass for you, stop here and send an email to bup-list@googlegroups.com. Though if there are symbolic links along the current working directory path, the tests may fail. Running something like this before "make test" should sidestep the problem: ```sh cd "$(pwd -P)" ``` - You can install bup via "make install", and override the default destination with DESTDIR and PREFIX. Files are normally installed to "$DESTDIR/$PREFIX" where DESTDIR is empty by default, and PREFIX is set to /usr/local. So if you wanted to install bup to /opt/bup, you might do something like this: ```sh make install DESTDIR=/opt/bup PREFIX='' ``` - The Python executable that bup will use is chosen by ./configure, which will search for a reasonable version unless PYTHON is set in the environment, in which case, bup will use that path. You can see which Python executable was chosen by looking at the configure output, or examining cmd/python-cmd.sh, and you can change the selection by re-running ./configure. From binary packages -------------------- Binary packages of bup are known to be built for the following OSes: - Debian: http://packages.debian.org/search?searchon=names&keywords=bup - Ubuntu: http://packages.ubuntu.com/search?searchon=names&keywords=bup - pkgsrc (NetBSD, Dragonfly, and others) http://pkgsrc.se/sysutils/bup http://cvsweb.netbsd.org/bsdweb.cgi/pkgsrc/sysutils/bup/ - Arch Linux: https://www.archlinux.org/packages/?sort=&q=bup - Fedora: https://apps.fedoraproject.org/packages/bup - macOS (Homebrew): https://formulae.brew.sh/formula/bup Using bup --------- - Get help for any bup command: ```sh bup help bup help init bup help index bup help save bup help restore ... ``` - Initialize the default BUP_DIR (~/.bup -- you can choose another by either specifying `bup -d DIR ...` or setting the `BUP_DIR` environment variable for a command): ```sh bup init ``` - Make a local backup (-v or -vv will increase the verbosity): ```sh bup index /etc bup save -n local-etc /etc ``` - Restore a local backup to ./dest: ```sh bup restore -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another backup (which should be mostly identical to the last one; notice that you don't have to *specify* that this backup is incremental, it just saves space automatically): ```sh bup index /etc bup save -n local-etc /etc ``` - Look how little extra space your second backup used (on top of the first): ```sh du -s ~/.bup ``` - Get a list of your previous backups: ```sh bup ls local-etc ``` - Restore your first backup again: ```sh bup restore -C ./dest-2 local-etc/2013-11-23-11195/etc ``` - Make a backup to a remote server which must already have the 'bup' command somewhere in its PATH (see /etc/profile, etc/environment, ~/.profile, or ~/.bashrc), and be accessible via ssh. Make sure to replace SERVERNAME with the actual hostname of your server: ```sh bup init -r SERVERNAME:path/to/remote-bup-dir bup index /etc bup save -r SERVERNAME:path/to/remote-bup-dir -n local-etc /etc ``` - Make a remote backup to ~/.bup on SERVER: ```sh bup index /etc bup save -r SERVER: -n local-etc /etc ``` - See what saves are available in ~/.bup on SERVER: ```sh bup ls -r SERVER: ``` - Restore the remote backup to ./dest: ```sh bup restore -r SERVER: -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Defend your backups from death rays (OK fine, more likely from the occasional bad disk block). This writes parity information (currently via par2) for all of the existing data so that bup may be able to recover from some amount of repository corruption: ```sh bup fsck -g ``` - Use split/join instead of index/save/restore. Try making a local backup using tar: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Try restoring the tarball: ```sh bup join local-etc | tar -tf - ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another tar backup: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Look at how little extra space your second backup used on top of the first: ```sh du -s ~/.bup ``` - Restore the first tar backup again (the ~1 is git notation for "one older than the most recent"): ```sh bup join local-etc~1 | tar -tf - ``` - Get a list of your previous split-based backups: ```sh GIT_DIR=~/.bup git log local-etc ``` - Save a tar archive to a remote server (without tar -z to facilitate deduplication): ```sh tar -cvf - /etc | bup split -r SERVERNAME: -n local-etc -vv ``` - Restore the archive: ```sh bup join -r SERVERNAME: local-etc | tar -tf - ``` That's all there is to it! Notes on FreeBSD ---------------- - FreeBSD's default 'make' command doesn't like bup's Makefile. In order to compile the code, run tests and install bup, you need to install GNU Make from the port named 'gmake' and use its executable instead in the commands seen above. (i.e. 'gmake test' runs bup's test suite) - Python's development headers are automatically installed with the 'python' port so there's no need to install them separately. - To use the 'bup fuse' command, you need to install the fuse kernel module from the 'fusefs-kmod' port in the 'sysutils' section and the libraries from the port named 'py-fusefs' in the 'devel' section. - The 'par2' command can be found in the port named 'par2cmdline'. - In order to compile the documentation, you need pandoc which can be found in the port named 'hs-pandoc' in the 'textproc' section. Notes on NetBSD/pkgsrc ---------------------- - See pkgsrc/sysutils/bup, which should be the most recent stable release and includes man pages. It also has a reasonable set of dependencies (git, par2, py-fuse-bindings). - The "fuse-python" package referred to is hard to locate, and is a separate tarball for the python language binding distributed by the fuse project on sourceforge. It is available as pkgsrc/filesystems/py-fuse-bindings and on NetBSD 5, "bup fuse" works with it. - "bup fuse" presents every directory/file as inode 0. The directory traversal code ("fts") in NetBSD's libc will interpret this as a cycle and error out, so "ls -R" and "find" will not work. - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. Notes on Cygwin --------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. - In test/ext/test-misc, two tests have been disabled. These tests check to see that repeated saves produce identical trees and that an intervening index doesn't change the SHA1. Apparently Cygwin has some unusual behaviors with respect to access times (that probably warrant further investigation). Possibly related: http://cygwin.com/ml/cygwin/2007-06/msg00436.html Notes on OS X ------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. How it works ============ Basic storage: -------------- bup stores its data in a git-formatted repository. Unfortunately, git itself doesn't actually behave very well for bup's use case (huge numbers of files, files with huge sizes, retaining file permissions/ownership are important), so we mostly don't use git's *code* except for a few helper programs. For example, bup has its own git packfile writer written in python. Basically, 'bup split' reads the data on stdin (or from files specified on the command line), breaks it into chunks using a rolling checksum (similar to rsync), and saves those chunks into a new git packfile. There is at least one git packfile per backup. When deciding whether to write a particular chunk into the new packfile, bup first checks all the other packfiles that exist to see if they already have that chunk. If they do, the chunk is skipped. git packs come in two parts: the pack itself (*.pack) and the index (*.idx). The index is pretty small, and contains a list of all the objects in the pack. Thus, when generating a remote backup, we don't have to have a copy of the packfiles from the remote server: the local end just downloads a copy of the server's *index* files, and compares objects against those when generating the new pack, which it sends directly to the server. The "-n" option to 'bup split' and 'bup save' is the name of the backup you want to create, but it's actually implemented as a git branch. So you can do cute things like checkout a particular branch using git, and receive a bunch of chunk files corresponding to the file you split. If you use '-b' or '-t' or '-c' instead of '-n', bup split will output a list of blobs, a tree containing that list of blobs, or a commit containing that tree, respectively, to stdout. You can use this to construct your own scripts that do something with those values. The bup index: -------------- 'bup index' walks through your filesystem and updates a file (whose name is, by default, ~/.bup/bupindex) to contain the name, attributes, and an optional git SHA1 (blob id) of each file and directory. 'bup save' basically just runs the equivalent of 'bup split' a whole bunch of times, once per file in the index, and assembles a git tree that contains all the resulting objects. Among other things, that makes 'git diff' much more useful (compared to splitting a tarball, which is essentially a big binary blob). However, since bup splits large files into smaller chunks, the resulting tree structure doesn't *exactly* correspond to what git itself would have stored. Also, the tree format used by 'bup save' will probably change in the future to support storing file ownership, more complex file permissions, and so on. If a file has previously been written by 'bup save', then its git blob/tree id is stored in the index. This lets 'bup save' avoid reading that file to produce future incremental backups, which means it can go *very* fast unless a lot of files have changed. Things that are stupid for now but which we'll fix later ======================================================== Help with any of these problems, or others, is very welcome. Join the mailing list (see below) if you'd like to help. - 'bup save' and 'bup restore' have immature metadata support. On the plus side, they actually do have support now, but it's new, and not remotely as well tested as tar/rsync/whatever's. However, you have to start somewhere, and as of 0.25, we think it's ready for more general use. Please let us know if you have any trouble. Also, if any strip or graft-style options are specified to 'bup save', then no metadata will be written for the root directory. That's obviously less than ideal. - bup is overly optimistic about mmap. Right now bup just assumes that it can mmap as large a block as it likes, and that mmap will never fail. Yeah, right... If nothing else, this has failed on 32-bit architectures (and 31-bit is even worse -- looking at you, s390). To fix this, we might just implement a FakeMmap[1] class that uses normal file IO and handles all of the mmap methods[2] that bup actually calls. Then we'd swap in one of those whenever mmap fails. This would also require implementing some of the methods needed to support "[]" array access, probably at a minimum __getitem__, __setitem__, and __setslice__ [3]. [1] http://comments.gmane.org/gmane.comp.sysutils.backup.bup/613 [2] http://docs.python.org/2/library/mmap.html [3] http://docs.python.org/2/reference/datamodel.html#emulating-container-types - 'bup index' is slower than it should be. It's still rather fast: it can iterate through all the filenames on my 600,000 file filesystem in a few seconds. But it still needs to rewrite the entire index file just to add a single filename, which is pretty nasty; it should just leave the new files in a second "extra index" file or something. - bup could use inotify for *really* efficient incremental backups. You could even have your system doing "continuous" backups: whenever a file changes, we immediately send an image of it to the server. We could give the continuous-backup process a really low CPU and I/O priority so you wouldn't even know it was running. - bup only has experimental support for pruning old backups. While you should now be able to drop old saves and branches with `bup rm`, and reclaim the space occupied by data that's no longer needed by other backups with `bup gc`, these commands are experimental, and should be handled with great care. See the man pages for more information. Unless you want to help test the new commands, one possible workaround is to just start a new BUP_DIR occasionally, i.e. bup-2013, bup-2014... - bup has never been tested on anything but Linux, FreeBSD, NetBSD, OS X, and Windows+Cygwin. There's nothing that makes it *inherently* non-portable, though, so that's mostly a matter of someone putting in some effort. (For a "native" Windows port, the most annoying thing is the absence of ssh in a default Windows installation.) - bup needs better documentation. According to an article about bup in Linux Weekly News (https://lwn.net/Articles/380983/), "it's a bit short on examples and a user guide would be nice." Documentation is the sort of thing that will never be great unless someone from outside contributes it (since the developers can never remember which parts are hard to understand). - bup is "relatively speedy" and has "pretty good" compression. ...according to the same LWN article. Clearly neither of those is good enough. We should have awe-inspiring speed and crazy-good compression. Must work on that. Writing more parts in C might help with the speed. - bup has no GUI. Actually, that's not stupid, but you might consider it a limitation. See the ["Related Projects"](https://bup.github.io/) list for some possible options. More Documentation ================== bup has an extensive set of man pages. Try using 'bup help' to get started, or use 'bup help SUBCOMMAND' for any bup subcommand (like split, join, index, save, etc.) to get details on that command. For further technical details, please see ./DESIGN. How you can help ================ bup is a work in progress and there are many ways it can still be improved. If you'd like to contribute patches, ideas, or bug reports, please join the bup mailing list. You can find the mailing list archives here: http://groups.google.com/group/bup-list and you can subscribe by sending a message to: bup-list+subscribe@googlegroups.com Please see ./HACKING for additional information, i.e. how to submit patches (hint - no pull requests), how we handle branches, etc. Have fun, Avery upstream-ontologist-0.1.37/tests/readme_data/bup/description000066400000000000000000000010261462717511400242740ustar00rootroot00000000000000bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. upstream-ontologist-0.1.37/tests/readme_data/cbor2/000077500000000000000000000000001462717511400222505ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/cbor2/README.rst000066400000000000000000000073231462717511400237440ustar00rootroot00000000000000.. image:: https://travis-ci.com/agronholm/cbor2.svg?branch=master :target: https://travis-ci.com/agronholm/cbor2 :alt: Build Status .. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master :target: https://coveralls.io/github/agronholm/cbor2?branch=master :alt: Code Coverage .. image:: https://readthedocs.org/projects/cbor2/badge/?version=latest :target: https://cbor2.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status About ===== This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) (`RFC 7049`_) serialization format. `Read the docs `_ to learn more. It is implemented in pure python with an optional C backend. On PyPy, cbor2 runs with almost identical performance to the C backend. .. _RFC 7049: https://tools.ietf.org/html/rfc7049 Features -------- * Simple api like ``json`` or ``pickle`` modules. * Support many `CBOR tags`_ with `stdlib objects`_. * Generic tag decoding. * `Shared value`_ references including cyclic references. * Optional C module backend tested on big- and little-endian architectures. * Extensible `tagged value handling`_ using ``tag_hook`` and ``object_hook`` on decode and ``default`` on encode. * Command-line diagnostic tool, converting CBOR file or stream to JSON ``python -m cbor2.tool`` (This is a lossy conversion, for diagnostics only) * Thorough test suite. .. _CBOR tags: https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml .. _stdlib objects: https://cbor2.readthedocs.io/en/latest/usage.html#tag-support .. _Shared value: http://cbor.schmorp.de/value-sharing .. _tagged value handling: https://cbor2.readthedocs.io/en/latest/customizing.html#using-the-cbor-tags-for-custom-types Installation ============ :: pip install cbor2 Requirements ------------ * Python >= 3.6 (or `PyPy3`_ 3.6+) * C-extension: Any C compiler that can build Python extensions. Any modern libc with the exception of Glibc<2.9 .. _PyPy3: https://www.pypy.org/ Building the C-Extension ------------------------ To force building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=1``. To disable building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=0``. If this environment variable is unset, setup.py will default to auto detecting a compatible C library and attempt to compile the extension. Usage ===== `Basic Usage `_ Command-line Usage ================== ``python -m cbor2.tool`` converts CBOR data in raw binary or base64 encoding into a representation that allows printing as JSON. This is a lossy transformation as each datatype is converted into something that can be represented as a JSON value. Usage:: # Pass hexadecimal through xxd. $ echo a16568656c6c6f65776f726c64 | xxd -r -ps | python -m cbor2.tool --pretty { "hello": "world" } # Decode Base64 directly $ echo ggEC | python -m cbor2.tool --decode [1, 2] # Read from a file encoded in Base64 $ python -m cbor2.tool -d tests/examples.cbor.b64 {...} It can be used in a pipeline with json processing tools like `jq`_ to allow syntax coloring, field extraction and more. CBOR data items concatenated into a sequence can be decoded also:: $ echo ggECggMEggUG | python -m cbor2.tool -d --sequence [1, 2] [3, 4] [5, 6] Multiple files can also be sent to a single output file:: $ python -m cbor2.tool -o all_files.json file1.cbor file2.cbor ... fileN.cbor .. _jq: https://stedolan.github.io/jq/ Security ======== This library has not been tested against malicious input. In theory it should be as safe as JSON, since unlike ``pickle`` the decoder does not execute any code. upstream-ontologist-0.1.37/tests/readme_data/cbor2/description000066400000000000000000000004451462717511400245210ustar00rootroot00000000000000This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) (RFC 7049) serialization format. Read the docs to learn more. It is implemented in pure python with an optional C backend. On PyPy, cbor2 runs with almost identical performance to the C backend. upstream-ontologist-0.1.37/tests/readme_data/django-ical/000077500000000000000000000000001462717511400234115ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/django-ical/README.rst000066400000000000000000000032631462717511400251040ustar00rootroot00000000000000django-ical =========== |pypi| |docs| |build| |coverage| |jazzband| django-ical is a simple library/framework for creating `iCal `_ feeds based in Django's `syndication feed framework `_. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the `icalendar `_ library under the hood to generate iCalendar feeds. Documentation ------------- Documentation is hosted on Read the Docs: https://django-ical.readthedocs.io/en/latest/ .. |pypi| image:: https://img.shields.io/pypi/v/django-ical.svg :alt: PyPI :target: https://pypi.org/project/django-ical/ .. |docs| image:: https://readthedocs.org/projects/django-ical/badge/?version=latest :alt: Documentation Status :scale: 100% :target: http://django-ical.readthedocs.io/en/latest/?badge=latest .. |build| image:: https://github.com/jazzband/django-ical/workflows/Test/badge.svg :target: https://github.com/jazzband/django-ical/actions :alt: GitHub Actions .. |coverage| image:: https://codecov.io/gh/jazzband/django-ical/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-ical :alt: Coverage .. |jazzband| image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband upstream-ontologist-0.1.37/tests/readme_data/django-ical/description000066400000000000000000000011071462717511400256560ustar00rootroot00000000000000django-ical is a simple library/framework for creating iCal feeds based in Django's syndication feed framework. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the icalendar library under the hood to generate iCalendar feeds. upstream-ontologist-0.1.37/tests/readme_data/dulwich/000077500000000000000000000000001462717511400227005ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/dulwich/README.rst000066400000000000000000000056041462717511400243740ustar00rootroot00000000000000Dulwich ======= This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. **Main website**: **License**: Apache License, version 2 or GNU General Public License, version 2 or later. The project is named after the part of London that Mr. and Mrs. Git live in in the particular Monty Python sketch. Installation ------------ By default, Dulwich' setup.py will attempt to build and install the optional C extensions. The reason for this is that they significantly improve the performance since some low-level operations that are executed often are much slower in CPython. If you don't want to install the C bindings, specify the --pure argument to setup.py:: $ python setup.py --pure install or if you are installing from pip:: $ pip install dulwich --global-option="--pure" Note that you can also specify --global-option in a `requirements.txt `_ file, e.g. like this:: dulwich --global-option=--pure Getting started --------------- Dulwich comes with both a lower-level API and higher-level plumbing ("porcelain"). For example, to use the lower level API to access the commit message of the last commit:: >>> from dulwich.repo import Repo >>> r = Repo('.') >>> r.head() '57fbe010446356833a6ad1600059d80b1e731e15' >>> c = r[r.head()] >>> c >>> c.message 'Add note about encoding.\n' And to print it using porcelain:: >>> from dulwich import porcelain >>> porcelain.log('.', max_entries=1) -------------------------------------------------- commit: 57fbe010446356833a6ad1600059d80b1e731e15 Author: Jelmer Vernooij Date: Sat Apr 29 2017 23:57:34 +0000 Add note about encoding. Further documentation --------------------- The dulwich documentation can be found in docs/ and built by running ``make doc``. It can also be found `on the web `_. Help ---- There is a *#dulwich* IRC channel on the `Freenode `_, and `dulwich-announce `_ and `dulwich-discuss `_ mailing lists. Contributing ------------ For a full list of contributors, see the git logs or `AUTHORS `_. If you'd like to contribute to Dulwich, see the `CONTRIBUTING `_ file and `list of open issues `_. Supported versions of Python ---------------------------- At the moment, Dulwich supports (and is tested on) CPython 3.5 and later and Pypy. The latest release series to support Python 2.x was the 0.19 series. See the 0.19 branch in the Dulwich git repository. upstream-ontologist-0.1.37/tests/readme_data/dulwich/description000066400000000000000000000002471462717511400251510ustar00rootroot00000000000000This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. upstream-ontologist-0.1.37/tests/readme_data/empty/000077500000000000000000000000001462717511400223775ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/empty/README.md000066400000000000000000000000001462717511400236440ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/erbium/000077500000000000000000000000001462717511400225245ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/erbium/README.md000066400000000000000000000010101462717511400237730ustar00rootroot00000000000000Erbium ====== Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.1.37/tests/readme_data/erbium/description000066400000000000000000000007561462717511400250020ustar00rootroot00000000000000Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.1.37/tests/readme_data/isso/000077500000000000000000000000001462717511400222165ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/isso/README.md000066400000000000000000000006071462717511400235000ustar00rootroot00000000000000Isso – a commenting server similar to Disqus ============================================ Isso – *Ich schrei sonst* – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for [Disqus](http://disqus.com). ![Isso in Action](http://posativ.org/~tmp/isso-sample.png) See [posativ.org/isso](http://posativ.org/isso/) for more details. upstream-ontologist-0.1.37/tests/readme_data/isso/description000066400000000000000000000002231462717511400244610ustar00rootroot00000000000000Isso – Ich schrei sonst – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for Disqus. upstream-ontologist-0.1.37/tests/readme_data/jadx/000077500000000000000000000000001462717511400221675ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/jadx/README.md000066400000000000000000000123351462717511400234520ustar00rootroot00000000000000## JADX [![Build Status](https://travis-ci.org/skylot/jadx.png?branch=master)](https://travis-ci.org/skylot/jadx) [![Code Coverage](https://codecov.io/gh/skylot/jadx/branch/master/graph/badge.svg)](https://codecov.io/gh/skylot/jadx) [![SonarQube Bugs](https://sonarcloud.io/api/project_badges/measure?project=jadx&metric=bugs)](https://sonarcloud.io/dashboard?id=jadx) [![License](http://img.shields.io/:license-apache-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.html) [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) **jadx** - Dex to Java decompiler Command line and GUI tools for produce Java source code from Android Dex and Apk files ![jadx-gui screenshot](https://i.imgur.com/h917IBZ.png) ### Downloads - latest [unstable build: ![Download](https://api.bintray.com/packages/skylot/jadx/unstable/images/download.svg) ](https://bintray.com/skylot/jadx/unstable/_latestVersion#files) - release from [github: ![Latest release](https://img.shields.io/github/release/skylot/jadx.svg)](https://github.com/skylot/jadx/releases/latest) - release from [bintray: ![Download](https://api.bintray.com/packages/skylot/jadx/releases/images/download.svg) ](https://bintray.com/skylot/jadx/releases/_latestVersion#files) After download unpack zip file go to `bin` directory and run: - `jadx` - command line version - `jadx-gui` - graphical version On Windows run `.bat` files with double-click\ **Note:** ensure you have installed Java 8 64-bit version ### Related projects: - [PyJadx](https://github.com/romainthomas/pyjadx) - python binding for jadx by [@romainthomas](https://github.com/romainthomas) ### Building jadx from source JDK 8 or higher must be installed: git clone https://github.com/skylot/jadx.git cd jadx ./gradlew dist (on Windows, use `gradlew.bat` instead of `./gradlew`) Scripts for run jadx will be placed in `build/jadx/bin` and also packed to `build/jadx-.zip` ### macOS You can install using brew: brew install jadx ### Run Run **jadx** on itself: cd build/jadx/ bin/jadx -d out lib/jadx-core-*.jar # or bin/jadx-gui lib/jadx-core-*.jar ### Usage ``` jadx[-gui] [options] (.apk, .dex, .jar, .class, .smali, .zip, .aar, .arsc) options: -d, --output-dir - output directory -ds, --output-dir-src - output directory for sources -dr, --output-dir-res - output directory for resources -j, --threads-count - processing threads count -r, --no-res - do not decode resources -s, --no-src - do not decompile source code --single-class - decompile a single class --output-format - can be 'java' or 'json' (default: java) -e, --export-gradle - save as android gradle project --show-bad-code - show inconsistent code (incorrectly decompiled) --no-imports - disable use of imports, always write entire package name --no-debug-info - disable debug info --no-inline-anonymous - disable anonymous classes inline --no-replace-consts - don't replace constant value with matching constant field --escape-unicode - escape non latin characters in strings (with \u) --respect-bytecode-access-modifiers - don't change original access modifiers --deobf - activate deobfuscation --deobf-min - min length of name, renamed if shorter (default: 3) --deobf-max - max length of name, renamed if longer (default: 64) --deobf-rewrite-cfg - force to save deobfuscation map --deobf-use-sourcename - use source file name as class name alias --rename-flags - what to rename, comma-separated, 'case' for system case sensitivity, 'valid' for java identifiers, 'printable' characters, 'none' or 'all' --fs-case-sensitive - treat filesystem as case sensitive, false by default --cfg - save methods control flow graph to dot file --raw-cfg - save methods control flow graph (use raw instructions) -f, --fallback - make simple dump (using goto instead of 'if', 'for', etc) -v, --verbose - verbose output --version - print jadx version -h, --help - print this help Example: jadx -d out classes.dex jadx --rename-flags "none" classes.dex jadx --rename-flags "valid,printable" classes.dex ``` These options also worked on jadx-gui running from command line and override options from preferences dialog ### Troubleshooting ##### Out of memory error: - Reduce processing threads count (`-j` option) - Increase maximum java heap size: * command line (example for linux): `JAVA_OPTS="-Xmx4G" jadx -j 1 some.apk` * edit 'jadx' script (jadx.bat on Windows) and setup bigger heap size: `DEFAULT_JVM_OPTS="-Xmx2500M"` --------------------------------------- *Licensed under the Apache 2.0 License* *Copyright 2018 by Skylot* upstream-ontologist-0.1.37/tests/readme_data/jadx/description000066400000000000000000000001271462717511400244350ustar00rootroot00000000000000Command line and GUI tools for produce Java source code from Android Dex and Apk files upstream-ontologist-0.1.37/tests/readme_data/jupyter-client/000077500000000000000000000000001462717511400242175ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/jupyter-client/README.md000066400000000000000000000044341462717511400255030ustar00rootroot00000000000000# Jupyter Client [![Build Status](https://github.com/jupyter/jupyter_client/workflows/CI/badge.svg)](https://github.com/jupyter/jupyter_client/actions) [![Code Health](https://landscape.io/github/jupyter/jupyter_client/master/landscape.svg?style=flat)](https://landscape.io/github/jupyter/jupyter_client/master) `jupyter_client` contains the reference implementation of the [Jupyter protocol][]. It also provides client and kernel management APIs for working with kernels. It also provides the `jupyter kernelspec` entrypoint for installing kernelspecs for use with Jupyter frontends. [Jupyter protocol]: https://jupyter-client.readthedocs.io/en/latest/messaging.html # Development Setup The [Jupyter Contributor Guides](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience. ## Coding You'll need Python and `pip` on the search path. Clone the Jupyter Client git repository to your computer, for example in `/my/project/jupyter_client`. Now create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) and download the dependencies of code and test suite by executing: cd /my/projects/jupyter_client/ pip install -e .[test] py.test The last command runs the test suite to verify the setup. During development, you can pass filenames to `py.test`, and it will execute only those tests. ## Documentation The documentation of Jupyter Client is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://jupyter.readthedocs.io/en/latest/contributing/docs-contributions/index.html). You'll also need the `make` command. For a minimal Sphinx installation to process the Jupyter Client docs, execute: pip install ipykernel sphinx sphinx_rtd_theme The following commands build the documentation in HTML format and check for broken links: cd /my/projects/jupyter_client/docs/ make html linkcheck Point your browser to the following URL to access the generated documentation: _file:///my/projects/jupyter\_client/docs/\_build/html/index.html_ upstream-ontologist-0.1.37/tests/readme_data/jupyter-client/description000066400000000000000000000004121462717511400264620ustar00rootroot00000000000000jupyter_client contains the reference implementation of the Jupyter protocol. It also provides client and kernel management APIs for working with kernels. It also provides the jupyter kernelspec entrypoint for installing kernelspecs for use with Jupyter frontends. upstream-ontologist-0.1.37/tests/readme_data/libtrace/000077500000000000000000000000001462717511400230265ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/libtrace/README000066400000000000000000000032431462717511400237100ustar00rootroot00000000000000libtrace 4.0.7 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install libtrace. This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. Further information about libtrace, see http://research.wand.net.nz/software/libtrace.php Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/LibtraceTeam/libtrace It is licensed under the GNU Lesser General Public License (GPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. A detailed ChangeLog can be found on the libtrace wiki: https://github.com/LibtraceTeam/libtrace/wiki/ChangeLog Documentation, usage instructions and a detailed tutorial can also found on the libtrace wiki. For further information, please contact the WAND group. See http://www.wand.net.nz/ for details. upstream-ontologist-0.1.37/tests/readme_data/libtrace/description000066400000000000000000000010451462717511400252740ustar00rootroot00000000000000This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. upstream-ontologist-0.1.37/tests/readme_data/perl-timedate/000077500000000000000000000000001462717511400237755ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/perl-timedate/README000066400000000000000000000014531462717511400246600ustar00rootroot00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* fatser. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. You install the library by running these commands: perl Makefile.PL make make test make install Please report any bugs/suggestions to Copyright 1995-2009 Graham Barr. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. Share and Enjoy! Graham upstream-ontologist-0.1.37/tests/readme_data/perl-timedate/description000066400000000000000000000007101462717511400262410ustar00rootroot00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* fatser. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. upstream-ontologist-0.1.37/tests/readme_data/perl5-xml-compile-cache/000077500000000000000000000000001462717511400255555ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/perl5-xml-compile-cache/README.md000066400000000000000000000042201462717511400270320ustar00rootroot00000000000000# distribution XML-Compile-Cache * My extended documentation: * Development via GitHub: * Download from CPAN: * Indexed from CPAN: and The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. ## Development → Release Important to know, is that I use an extension on POD to write the manuals. The "raw" unprocessed version is visible on GitHub. It will run without problems, but does not contain manual-pages. Releases to CPAN are different: "raw" documentation gets removed from the code and translated into real POD and clean HTML. This reformatting is implemented with the OODoc distribution (A name I chose before OpenOffice existed, sorry for the confusion) Clone from github for the "raw" version. For instance, when you want to contribute a new feature. On github, you can find the processed version for each release. But the better source is CPAN; to get it installed simply run: ```sh cpan -i XML::Compile::Cache ``` ## Contributing When you want to contribute to this module, you do not need to provide a perfect patch... actually: it is nearly impossible to create a patch which I will merge without modification. Usually, I need to adapt the style of code and documentation to my own strict rules. When you submit an extension, please contribute a set with 1. code 2. code documentation 3. regression tests in t/ **Please note:** When you contribute in any way, you agree to transfer the copyrights to Mark Overmeer (you will get the honors in the code and/or ChangeLog). You also automatically agree that your contribution is released under the same license as this project: licensed as perl itself. ## Copyright and License This project is free software; you can redistribute it and/or modify it under the same terms as Perl itself. See upstream-ontologist-0.1.37/tests/readme_data/perl5-xml-compile-cache/description000066400000000000000000000002701462717511400300220ustar00rootroot00000000000000The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. upstream-ontologist-0.1.37/tests/readme_data/pylint-flask/000077500000000000000000000000001462717511400236565ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/pylint-flask/README.md000066400000000000000000000041431462717511400251370ustar00rootroot00000000000000pylint-flask =============== [![Build Status](https://travis-ci.org/jschaf/pylint-flask.svg?branch=master)](https://travis-ci.org/jschaf/pylint-flask) [![Coverage Status](https://coveralls.io/repos/jschaf/pylint-flask/badge.svg?branch=master)](https://coveralls.io/r/jschaf/pylint-flask?branch=master) [![PyPI](https://img.shields.io/pypi/v/pylint-flask.svg)](https://pypi.python.org/pypi/pylint-flask) [![License](https://img.shields.io/badge/license-GPLv2%20License-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) ## About `pylint-flask` is [Pylint](http://pylint.org) plugin for improving code analysis when editing code using [Flask](http://flask.pocoo.org/). Inspired by [pylint-django](https://github.com/landscapeio/pylint-django). ### Problems pylint-flask solves: 1. Recognize `flask.ext.*` style imports. Say you have the following code: ```python from flask.ext import wtf from flask.ext.wtf import validators class PostForm(wtf.Form): content = wtf.TextAreaField('Content', validators=[validators.Required()]) ``` Normally, pylint will throw errors like: ``` E: 1,0: No name 'wtf' in module 'flask.ext' E: 2,0: No name 'wtf' in module 'flask.ext' F: 2,0: Unable to import 'flask.ext.wtf' ``` As pylint builds it's own abstract syntax tree, `pylint-flask` will translate the `flask.ext` imports into the actual module name, so pylint can continue checking your code. ## Usage Ensure `pylint-flask` is installed and on your path, and then run pylint using pylint-flask as a plugin. ``` pip install pylint-flask pylint --load-plugins pylint_flask [..your module..] ``` ## Contributing Pull requests are always welcome. Here's an outline of the steps you need to prepare your code. 1. git clone https://github.com/jschaf/pylint-flask.git 2. cd pylint-flask 3. mkvirtualenv pylint-flask 4. pip install -r dev-requirements.txt 5. git checkout -b MY-NEW-FIX 6. Hack away 7. Make sure everything is green by running `tox` 7. git push origin MY-NEW-FIX 8. Create a pull request ## License pylint-flask is available under the GPLv2 license.upstream-ontologist-0.1.37/tests/readme_data/pylint-flask/description000066400000000000000000000001641462717511400261250ustar00rootroot00000000000000pylint-flask is Pylint plugin for improving code analysis when editing code using Flask. Inspired by pylint-django. upstream-ontologist-0.1.37/tests/readme_data/python-icalendar/000077500000000000000000000000001462717511400245025ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/python-icalendar/README.rst000066400000000000000000000022431462717511400261720ustar00rootroot00000000000000========================================================== Internet Calendaring and Scheduling (iCalendar) for Python ========================================================== The `icalendar`_ package is a `RFC 5545`_ compatible parser/generator for iCalendar files. ---- :Homepage: https://icalendar.readthedocs.io :Code: https://github.com/collective/icalendar :Mailing list: https://github.com/collective/icalendar/issues :Dependencies: `python-dateutil`_ and `pytz`_. :Compatible with: Python 2.7 and 3.4+ :License: `BSD`_ ---- .. image:: https://travis-ci.org/collective/icalendar.svg?branch=master :target: https://travis-ci.org/collective/icalendar .. _`icalendar`: https://pypi.org/project/icalendar/ .. _`RFC 5545`: https://www.ietf.org/rfc/rfc5545.txt .. _`python-dateutil`: https://github.com/dateutil/dateutil/ .. _`pytz`: https://pypi.org/project/pytz/ .. _`BSD`: https://github.com/collective/icalendar/issues/2 Related projects ================ * `icalevents `_. It is built on top of icalendar and allows you to query iCal files and get the events happening on specific dates. It manages recurrent events as well. upstream-ontologist-0.1.37/tests/readme_data/python-icalendar/description000066400000000000000000000001251462717511400267460ustar00rootroot00000000000000The icalendar package is a RFC 5545 compatible parser/generator for iCalendar files. upstream-ontologist-0.1.37/tests/readme_data/python-rsa/000077500000000000000000000000001462717511400233455ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/python-rsa/README.md000066400000000000000000000036101462717511400246240ustar00rootroot00000000000000# Pure Python RSA implementation [![PyPI](https://img.shields.io/pypi/v/rsa.svg)](https://pypi.org/project/rsa/) [![Build Status](https://travis-ci.org/sybrenstuvel/python-rsa.svg?branch=master)](https://travis-ci.org/sybrenstuvel/python-rsa) [![Coverage Status](https://coveralls.io/repos/github/sybrenstuvel/python-rsa/badge.svg?branch=master)](https://coveralls.io/github/sybrenstuvel/python-rsa?branch=master) [![Code Climate](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability) [Python-RSA](https://stuvel.eu/rsa) is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. Documentation can be found at the [Python-RSA homepage](https://stuvel.eu/rsa). For all changes, check [the changelog](https://github.com/sybrenstuvel/python-rsa/blob/master/CHANGELOG.md). Download and install using: pip install rsa or download it from the [Python Package Index](https://pypi.org/project/rsa/). The source code is maintained at [GitHub](https://github.com/sybrenstuvel/python-rsa/) and is licensed under the [Apache License, version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ## Security Because of how Python internally stores numbers, it is very hard (if not impossible) to make a pure-Python program secure against timing attacks. This library is no exception, so use it with care. See https://securitypitfalls.wordpress.com/2018/08/03/constant-time-compare-in-python/ for more info. ## Setup of Development Environment ``` python3 -m venv .venv . ./.venv/bin/activate pip install poetry poetry install ``` ## Publishing a New Release ``` . ./.venv/bin/activate poetry publish --build ``` upstream-ontologist-0.1.37/tests/readme_data/python-rsa/description000066400000000000000000000004421462717511400256130ustar00rootroot00000000000000Python-RSA is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. upstream-ontologist-0.1.37/tests/readme_data/ruby-columnize/000077500000000000000000000000001462717511400242255ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/ruby-columnize/README.md000066400000000000000000000051541462717511400255110ustar00rootroot00000000000000[![Build Status](https://travis-ci.org/rocky/columnize.png)](https://travis-ci.org/rocky/columnize) [![Gem Version](https://badge.fury.io/rb/columnize.svg)](http://badge.fury.io/rb/columnize) Columnize - Format an Array as a Column-aligned String ============================================================================ In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. Setup ----- $ irb >> require 'columnize' => true With numeric data ----------------- >> a = (1..10).to_a => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] >> a.columnize => "1 2 3 4 5 6 7 8 9 10" >> puts a.columnize :arrange_array => true, :displaywidth => 10 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil >> puts a.columnize :arrange_array => true, :displaywidth => 20 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil With String data ---------------- >> g = %w(bibrons golden madascar leopard mourning suras tokay) => ["bibrons", "golden", "madascar", "leopard", "mourning", "suras", "tokay"] >> puts g.columnize :displaywidth => 15 bibrons suras golden tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 19, :colsep => ' | ' bibrons | suras golden | tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 18, :colsep => ' | ', :ljust => false bibrons | mourning golden | suras madascar | tokay leopard => nil Using Columnize.columnize ------------------------- >> Columnize.columnize(a) => "1 2 3 4 5 6 7 8 9 10" >> puts Columnize.columnize(a, :displaywidth => 10) 1 5 9 2 6 10 3 7 4 8 => nil >> Columnize.columnize(g) => "bibrons golden madascar leopard mourning suras tokay" >> puts Columnize.columnize(g, :displaywidth => 19, :colsep => ' | ') bibrons | mourning golden | suras madascar | tokay leopard => nil Credits ------- This is adapted from a method of the same name from Python's cmd module. Other stuff ----------- Authors: Rocky Bernstein [![endorse](https://api.coderwall.com/rocky/endorsecount.png)](https://coderwall.com/rocky) and [Martin Davis](https://github.com/waslogic) License: Copyright (c) 2011,2013 Rocky Bernstein Warranty -------- You can redistribute it and/or modify it under either the terms of the GPL version 2 or the conditions listed in COPYING upstream-ontologist-0.1.37/tests/readme_data/ruby-columnize/description000066400000000000000000000003371462717511400264760ustar00rootroot00000000000000In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. upstream-ontologist-0.1.37/tests/readme_data/ruby-sha3/000077500000000000000000000000001462717511400230565ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/ruby-sha3/README.md000066400000000000000000000076751462717511400243540ustar00rootroot00000000000000# sha3 [![Gem Version](https://badge.fury.io/rb/sha3.svg)](https://badge.fury.io/rb/sha3) [![CI](https://secure.travis-ci.org/johanns/sha3.png)](https://secure.travis-ci.org/johanns/sha3) [![Dependencies](https://gemnasium.com/johanns/sha3.png)](https://gemnasium.com/johanns/sha3) [![CodeClimate](https://codeclimate.com/github/johanns/sha3.png)](https://codeclimate.com/github/johanns/sha3) **SHA3 for Ruby** is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. - Home :: [https://github.com/johanns/sha3#readme]() - Issues :: [https://github.com/johanns/sha3/issues]() - Documentation :: [http://rubydoc.info/gems/sha3/frames]() ## Warnings - Version 1.0+ breaks compatibility with previous versions of this gem. - Do NOT use SHA3 to hash passwords; use either ```bcrypt``` or ```scrypt``` instead! ## Module details **SHA3::Digest**: A standard *Digest* _subclass_. The interface, and operation of this class are parallel to digest classes bundled with MRI-based Rubies (e.g.: **Digest::SHA2**, and **OpenSSL::Digest**). See [documentation for Ruby's **Digest** class for additional details](http://www.ruby-doc.org/stdlib-2.2.3/libdoc/digest/rdoc/Digest.html). ## Installation ```shell gem install sha3 ``` ## Usage ```ruby require 'sha3' ``` Valid hash bit-lengths are: *224*, *256*, *384*, *512*. ```ruby :sha224 :sha256 :sha384 :sha512 # SHA3::Digest.new(224) is SHA3::Digest.new(:sha224) ``` Alternatively, you can instantiate using one of four sub-classes: ```ruby SHA3::Digest::SHA224.new() # 224 bits SHA3::Digest::SHA256.new() # 256 bits SHA3::Digest::SHA384.new() # 384 bits SHA3::Digest::SHA512.new() # 512 bits ``` ### Basics ```ruby # Instantiate a new SHA3::Digest class with 256 bit length s = SHA3::Digest.new(:sha256) # OR # s = SHA3::Digest::SHA256.new() # Update hash state, and compute new value s.update "Compute Me" # << is an .update() alias s << "Me too" # Returns digest value in bytes s.digest # => "\xBE\xDF\r\xD9\xA1..." # Returns digest value as hex string s.hexdigest # => "bedf0dd9a15b647..." ### Digest class-methods: ### SHA3::Digest.hexdigest(:sha224, "Hash me, please") # => "200e7bc18cd613..." SHA3::Digest::SHA384.digest("Hash me, please") # => "\xF5\xCEpC\xB0eV..." ``` ### Hashing a file ```ruby # Compute the hash value for given file, and return the result as hex s = SHA3::Digest::SHA224.file("my_fantastical_file.bin").hexdigest # Calling SHA3::Digest.file(...) defaults to SHA256 s = SHA3::Digest.file("tests.sh") # => # ``` ## Development * Native build tools (e.g., GCC, Minigw, etc.) * Gems: rubygems-tasks, rake, rspec, yard ### Testing + RSpec Call ```rake``` to run the included RSpec tests. Only a small subset of test vectors are included in the source repository; however, the complete test vectors suite is available for download. Simply run the ```tests.sh``` shell script (available in the root of source directory) to generate full byte-length RSpec test files. ```sh tests.sh``` ### Rubies Tested with Rubies: - MRI Ruby-Head - MRI 2.1.0 - MRI 2.0.0 - MRI 1.9.3 - MRI 1.9.2 - MRI 1.8.7 - Rubinius 2 On: - Ubuntu 12.04, 12.10, 13.04, 14.04, 15.04 - Windows 7, 8, 8.1, 10 - Mac OS X 10.6 - 10.11 ## Releases - *1.0.1* :: FIPS 202 compliance (breaks compatibility with earlier releases) - *0.2.6* :: Fixed bug #4 - *0.2.5* :: Bug fixes. (See ChangeLog.rdoc) - *0.2.4* :: Bug fixes. (YANKED) - *0.2.3* :: Added documentation file (decoupled form C source); refactored C source. - *0.2.2* :: Added sub-class for each SHA3 supported bit-lengths (example: SHA3::Digest::SHA256). Minor bug fix. - *0.2.0* :: Production worthy, but breaks API compatibility with 0.1.x. Backward-compatibility will be maintained henceforth. - *0.1.x* :: Alpha code, and not suitable for production. ## TO DO - Add SHAKE128/256 support ## Copyright Copyright (c) 2012 - 2015 Johanns Gregorian (https://github.com/johanns) **See LICENSE.txt for details.** upstream-ontologist-0.1.37/tests/readme_data/ruby-sha3/description000066400000000000000000000001411462717511400253200ustar00rootroot00000000000000SHA3 for Ruby is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. upstream-ontologist-0.1.37/tests/readme_data/samba/000077500000000000000000000000001462717511400223245ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/samba/README.md000066400000000000000000000114341462717511400236060ustar00rootroot00000000000000About Samba =========== Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely This software is freely distributable under the GNU public license, a copy of which you should have received with this software (in a file called COPYING). CONTRIBUTIONS ============= Please see https://wiki.samba.org/index.php/Contribute for detailed set-by-step instructions on how to submit a patch for Samba via GitLab. Samba's GitLab mirror is at https://gitlab.com/samba-team/samba OUR CONTRIBUTORS ================ See https://www.samba.org/samba/team/ for details of the Samba Team, as well as details of all those currently active in Samba development. If you like a particular feature then look through the git change-log (on the web at https://gitweb.samba.org/?p=samba.git;a=summary) and see who added it, then send them an email. Remember that free software of this kind lives or dies by the response we get. If no one tells us they like it then we'll probably move onto something else. MORE INFO ========= DOCUMENTATION ------------- There is quite a bit of documentation included with the package, including man pages and the wiki at https://wiki.samba.org If you would like to help with our documentation, please contribute that improved content to the wiki, we are moving as much content there as possible. MAILING LIST ------------ Please do NOT send subscription/unsubscription requests to the lists! There is a mailing list for discussion of Samba. For details go to or send mail to There is also an announcement mailing list where new versions are announced. To subscribe go to or send mail to . All announcements also go to the samba list, so you only need to be on one. For details of other Samba mailing lists and for access to archives, see MAILING LIST ETIQUETTE ---------------------- A few tips when submitting to this or any mailing list. 1. Make your subject short and descriptive. Avoid the words "help" or "Samba" in the subject. The readers of this list already know that a) you need help, and b) you are writing about samba (of course, you may need to distinguish between Samba PDC and other file sharing software). Avoid phrases such as "what is" and "how do i". Some good subject lines might look like "Slow response with Excel files" or "Migrating from Samba PDC to NT PDC". 2. If you include the original message in your reply, trim it so that only the relevant lines, enough to establish context, are included. Chances are (since this is a mailing list) we've already read the original message. 3. Trim irrelevant headers from the original message in your reply. All we need to see is a) From, b) Date, and c) Subject. We don't even really need the Subject, if you haven't changed it. Better yet is to just preface the original message with "On [date] [someone] wrote:". 4. Please don't reply to or argue about spam, spam filters or viruses on any Samba lists. We do have a spam filtering system that is working quite well thank you very much but occasionally unwanted messages slip through. Deal with it. 5. Never say "Me too." It doesn't help anyone solve the problem. Instead, if you ARE having the same problem, give more information. Have you seen something that the other writer hasn't mentioned, which may be helpful? 6. If you ask about a problem, then come up with the solution on your own or through another source, by all means post it. Someone else may have the same problem and is waiting for an answer, but never hears of it. 7. Give as much *relevant* information as possible such as Samba release number, OS, kernel version, etc... 8. RTFM. Google. WEBSITE ------- A Samba website has been setup with lots of useful info. Connect to: https://www.samba.org/ As well as general information and documentation, this also has searchable archives of the mailing list and links to other useful resources such as the wiki. upstream-ontologist-0.1.37/tests/readme_data/samba/description000066400000000000000000000014621462717511400245750ustar00rootroot00000000000000Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely upstream-ontologist-0.1.37/tests/readme_data/saneyaml/000077500000000000000000000000001462717511400230525ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/saneyaml/README.rst000066400000000000000000000026101462717511400245400ustar00rootroot00000000000000======== saneyaml ======== This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. License: apache-2.0 Homepage_url: https://github.com/nexB/saneyaml Usage:: pip install saneyaml >>> from saneyaml import load as l >>> from saneyaml import dump as d >>> a=l('''version: 3.0.0.dev6 ... ... description: | ... AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file ... provides a way to document a software component. ... ''') >>> a OrderedDict([ (u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. ' 'An ABOUT file\nprovides a way to document a software component.\n')]) >>> pprint(a.items()) [(u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file\nprovides a way to document a software component.\n')] >>> print(d(a)) version: 3.0.0.dev6 description: | AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file provides a way to document a software component. upstream-ontologist-0.1.37/tests/readme_data/saneyaml/description000066400000000000000000000007001462717511400253150ustar00rootroot00000000000000This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. upstream-ontologist-0.1.37/tests/readme_data/sfcgal/000077500000000000000000000000001462717511400225005ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/sfcgal/README.md000066400000000000000000000004411462717511400237560ustar00rootroot00000000000000SFCGAL ====== SFCGAL is a C++ wrapper library around [CGAL](http://www.cgal.org) with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. Please refer to the project page for an updated installation procedure. upstream-ontologist-0.1.37/tests/readme_data/sfcgal/description000066400000000000000000000002031462717511400247410ustar00rootroot00000000000000SFCGAL is a C++ wrapper library around CGAL with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. upstream-ontologist-0.1.37/tests/readme_data/statuscake/000077500000000000000000000000001462717511400234105ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/statuscake/README.md000066400000000000000000000006071462717511400246720ustar00rootroot00000000000000# statuscake [![Build Status](https://travis-ci.org/DreamItGetIT/statuscake.svg?branch=master)](https://travis-ci.org/DreamItGetIT/statuscake) `statuscake` is a Go pkg that implements a client for the [statuscake]("https://statuscake.com") API. More documentation and examples at [http://godoc.org/github.com/DreamItGetIT/statuscake](http://godoc.org/github.com/DreamItGetIT/statuscake). upstream-ontologist-0.1.37/tests/readme_data/statuscake/description000066400000000000000000000001101462717511400256460ustar00rootroot00000000000000statuscake is a Go pkg that implements a client for the statuscake API. upstream-ontologist-0.1.37/tests/readme_data/text-worddif/000077500000000000000000000000001462717511400236615ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/text-worddif/README.md000066400000000000000000000022451462717511400251430ustar00rootroot00000000000000Text/WordDiff version 0.09 ========================== This library's module, Text::WordDiff, is a variation on the lovely [Text::Diff](http://search.cpan.org/perldoc?Text::Diff) module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. INSTALLATION To install this module, type the following: perl Build.PL ./Build ./Build test ./Build install Or, if you don't have Module::Build installed, type the following: perl Makefile.PL make make test make install Dependencies ------------ Text::WordDiff requires the following modules: * Algorithm::Diff '1.19', * Term::ANSIColor '0', * HTML::Entities '0', Copyright and License --------------------- Copyright (c) 2005-2011 David E. Wheeler. Some Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. upstream-ontologist-0.1.37/tests/readme_data/text-worddif/description000066400000000000000000000007211462717511400261270ustar00rootroot00000000000000This library's module, Text::WordDiff, is a variation on the lovely Text::Diff module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. upstream-ontologist-0.1.37/tests/readme_data/wandio/000077500000000000000000000000001462717511400225225ustar00rootroot00000000000000upstream-ontologist-0.1.37/tests/readme_data/wandio/README000066400000000000000000000024441462717511400234060ustar00rootroot00000000000000WANDIO 4.2.1 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install WANDIO. This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). Documentation for WANDIO and its included tools can be found at https://github.com/wanduow/wandio/wiki Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/wanduow/wandio It is licensed under the Lesser GNU General Public License (LGPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. upstream-ontologist-0.1.37/tests/readme_data/wandio/description000066400000000000000000000007261462717511400247750ustar00rootroot00000000000000This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). upstream-ontologist-0.1.37/tests/test_data.py000066400000000000000000000057361462717511400213500ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2024 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from typing import Any from unittest import TestCase from upstream_ontologist import UpstreamMetadata class UpstreamMetadataFromDictTests(TestCase): def test_from_dict(self): d = { 'Name': 'foo', 'Version': '1.2.3', 'Homepage': 'https://example.com', } metadata = UpstreamMetadata.from_dict(d) self.assertEqual(metadata['Name'].value, 'foo') self.assertEqual(metadata['Version'].value, '1.2.3') self.assertEqual(metadata['Homepage'].value, 'https://example.com') def test_from_dict_missing(self): d = { 'Name': 'foo', 'Version': '1.2.3', } metadata = UpstreamMetadata.from_dict(d) self.assertEqual(metadata['Name'].value, 'foo') self.assertEqual(metadata['Version'].value, '1.2.3') self.assertRaises(KeyError, metadata.__getitem__, 'Homepage') def test_from_dict_empty(self): d: dict[str, Any] = {} metadata = UpstreamMetadata.from_dict(d) self.assertRaises(KeyError, metadata.__getitem__, 'Name') self.assertRaises(KeyError, metadata.__getitem__, 'Version') self.assertRaises(KeyError, metadata.__getitem__, 'Homepage') def test_from_dict_invalid(self): d = { 'Name': 'foo', 'Version': '1.2.3', 'Homepage': 42, } with self.assertRaises(TypeError): UpstreamMetadata.from_dict(d) def test_from_dict_yaml(self): from ruamel.yaml import YAML yaml = YAML() d = yaml.load('''Name: foo Version: 1.2.3 # comment Homepage: https://example.com ''') metadata = UpstreamMetadata.from_dict(d) self.assertEqual(metadata['Name'].value, 'foo') self.assertEqual(metadata['Version'].value, '1.2.3') self.assertEqual(metadata['Homepage'].value, 'https://example.com') def test_from_dict_registry(self): d = { 'Name': 'foo', 'Version': '1.2.3', 'Registry': [{'Name': 'conda:conda-forge', 'Entry': 'r-tsne'}] } metadata = UpstreamMetadata.from_dict(d) self.assertEqual(metadata['Registry'].value, [{'Name': 'conda:conda-forge', 'Entry': 'r-tsne'}]) upstream-ontologist-0.1.37/tests/test_readme.py000066400000000000000000000062431462717511400216660ustar00rootroot00000000000000#!/usr/bin/python # Copyright (C) 2019 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """Tests for readme parsing.""" import os import platform from unittest import TestCase, TestSuite from upstream_ontologist.readme import ( description_from_readme_md, description_from_readme_plain, description_from_readme_rst, ) class ReadmeTestCase(TestCase): def __init__(self, path): super().__init__() self.path = path def setUp(self): super().setUp() self.maxDiff = None def runTest(self): readme_md = None readme_rst = None readme_plain = None description = None for entry in os.scandir(self.path): if entry.name.endswith("~"): continue base, ext = os.path.splitext(entry.name) if entry.name == "description": with open(entry.path) as f: description = f.read() elif base == "README": if ext == ".md": with open(entry.path) as f: readme_md = f.read() elif ext == ".rst": with open(entry.path) as f: readme_rst = f.read() elif ext == "": with open(entry.path) as f: readme_plain = f.read() else: raise NotImplementedError(ext) else: raise NotImplementedError(ext) if readme_md is not None: actual_description, unused_md = description_from_readme_md(readme_md) self.assertEqual(actual_description, description) if readme_rst is not None: if platform.python_implementation() == "PyPy": self.skipTest("Skipping README.rst tests on pypy") try: import docutils # noqa: F401 except ModuleNotFoundError: self.skipTest("Skipping README.rst tests, docutils not available") actual_description, unused_rst = description_from_readme_rst(readme_rst) self.assertEqual(actual_description, description) if readme_plain is not None: actual_description, unused_rst = description_from_readme_plain(readme_plain) self.assertEqual(actual_description, description) def test_suite(): suite = TestSuite() for entry in os.scandir(os.path.join(os.path.dirname(__file__), "readme_data")): suite.addTest(ReadmeTestCase(entry.path)) return suite upstream-ontologist-0.1.37/tests/test_vcs.py000066400000000000000000000133361462717511400212250ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from unittest import TestCase from upstream_ontologist.vcs import ( browse_url_from_repo_url, canonical_git_repo_url, find_public_repo_url, fixup_rcp_style_git_repo_url, guess_repo_from_url, is_gitlab_site, plausible_url, ) class PlausibleUrlTests(TestCase): def test_url(self): self.assertFalse(plausible_url("the")) self.assertFalse(plausible_url("1")) self.assertTrue(plausible_url("git@foo:blah")) self.assertTrue(plausible_url("git+ssh://git@foo/blah")) self.assertTrue(plausible_url("https://foo/blah")) class TestIsGitLabSite(TestCase): def test_not_gitlab(self): self.assertFalse(is_gitlab_site("foo.example.com")) self.assertFalse(is_gitlab_site("github.com")) def test_gitlab(self): self.assertTrue(is_gitlab_site("gitlab.somehost.com")) self.assertTrue(is_gitlab_site("salsa.debian.org")) class CanonicalizeVcsUrlTests(TestCase): def test_github(self): self.assertEqual( "https://github.com/jelmer/example.git", canonical_git_repo_url("https://github.com/jelmer/example"), ) def test_salsa(self): self.assertEqual( "https://salsa.debian.org/jelmer/example.git", canonical_git_repo_url("https://salsa.debian.org/jelmer/example"), ) self.assertEqual( "https://salsa.debian.org/jelmer/example.git", canonical_git_repo_url("https://salsa.debian.org/jelmer/example.git"), ) class FindPublicVcsUrlTests(TestCase): def test_github(self): self.assertEqual( "https://github.com/jelmer/example", find_public_repo_url("ssh://git@github.com/jelmer/example"), ) self.assertEqual( "https://github.com/jelmer/example", find_public_repo_url("https://github.com/jelmer/example"), ) self.assertEqual( "https://github.com/jelmer/example", find_public_repo_url("git@github.com:jelmer/example"), ) def test_salsa(self): self.assertEqual( "https://salsa.debian.org/jelmer/example", find_public_repo_url("ssh://salsa.debian.org/jelmer/example"), ) self.assertEqual( "https://salsa.debian.org/jelmer/example", find_public_repo_url("https://salsa.debian.org/jelmer/example"), ) class FixupRcpStyleUrlTests(TestCase): def test_fixup(self): try: import breezy # noqa: F401 except ModuleNotFoundError: self.skipTest("breezy is not available") self.assertEqual( "ssh://github.com/jelmer/example", fixup_rcp_style_git_repo_url("github.com:jelmer/example"), ) self.assertEqual( "ssh://git@github.com/jelmer/example", fixup_rcp_style_git_repo_url("git@github.com:jelmer/example"), ) def test_leave(self): try: import breezy # noqa: F401 except ModuleNotFoundError: self.skipTest("breezy is not available") self.assertEqual( "https://salsa.debian.org/jelmer/example", fixup_rcp_style_git_repo_url("https://salsa.debian.org/jelmer/example"), ) self.assertEqual( "ssh://git@salsa.debian.org/jelmer/example", fixup_rcp_style_git_repo_url("ssh://git@salsa.debian.org/jelmer/example"), ) class GuessRepoFromUrlTests(TestCase): def test_travis_ci_org(self): self.assertEqual( "https://github.com/jelmer/dulwich", guess_repo_from_url("https://travis-ci.org/jelmer/dulwich"), ) def test_coveralls(self): self.assertEqual( "https://github.com/jelmer/dulwich", guess_repo_from_url("https://coveralls.io/r/jelmer/dulwich"), ) def test_gitlab(self): self.assertEqual( "https://gitlab.com/jelmer/dulwich", guess_repo_from_url("https://gitlab.com/jelmer/dulwich"), ) self.assertEqual( "https://gitlab.com/jelmer/dulwich", guess_repo_from_url("https://gitlab.com/jelmer/dulwich/tags"), ) class BrowseUrlFromRepoUrl(TestCase): def test_github(self): self.assertEqual( "https://github.com/jelmer/dulwich", browse_url_from_repo_url("https://github.com/jelmer/dulwich"), ) self.assertEqual( "https://github.com/jelmer/dulwich", browse_url_from_repo_url("https://github.com/jelmer/dulwich.git"), ) self.assertEqual( "https://github.com/jelmer/dulwich/tree/foo", browse_url_from_repo_url( "https://github.com/jelmer/dulwich.git", branch="foo" ), ) self.assertEqual( "https://github.com/jelmer/dulwich/tree/HEAD/foo", browse_url_from_repo_url( "https://github.com/jelmer/dulwich.git", subpath="foo" ), ) upstream-ontologist-0.1.37/tests/testdata.py000066400000000000000000000034601462717511400212010ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import os import unittest from upstream_ontologist import yaml from upstream_ontologist.guess import get_upstream_info class TestDataTestCase(unittest.TestCase): """Test case that runs a fixer test.""" def __init__(self, name, path): self.name = name self.path = path self.maxDiff = None super().__init__() def id(self): return f"{__name__}.{self.name}" def __str__(self): return f"testdata test: {self.name}" def runTest(self): got = get_upstream_info( self.path, trust_package=True, net_access=False, check=False ) jp = os.path.join(self.path, "expected.yaml") with open(jp) as f: expected = yaml.load(f) self.assertEqual(expected, got) def test_suite(): suite = unittest.TestSuite() for entry in os.scandir(os.path.join(os.path.dirname(__file__), "..", "testdata")): if entry.name.endswith("~"): continue suite.addTest(TestDataTestCase(entry.name, entry.path)) return suite upstream-ontologist-0.1.37/tox.ini000066400000000000000000000004571462717511400171720ustar00rootroot00000000000000[tox] downloadcache = {toxworkdir}/cache/ [testenv] deps = setuptools-rust commands = python setup.py build_ext -i python3 -m unittest tests.test_suite recreate = True whitelist_externals = make extras = cargo, debian_rules, debian_changelog, debian_watch, pyproject, homepage, readme, setup.cfg upstream-ontologist-0.1.37/upstream-ontologist-py/000077500000000000000000000000001462717511400223365ustar00rootroot00000000000000upstream-ontologist-0.1.37/upstream-ontologist-py/Cargo.toml000066400000000000000000000014061462717511400242670ustar00rootroot00000000000000[package] name = "upstream-ontologist-py" version = { workspace = true } authors = ["Jelmer Vernooij "] edition = "2018" license = "Apache-2.0" repository = "https://github.com/jelmer/upstream-ontologist.git" homepage = "https://github.com/jelmer/upstream-ontologist" [lib] crate-type = ["cdylib"] [dependencies] upstream-ontologist = { path = ".." } pyo3 = { workspace = true } pyo3-log = { workspace = true } reqwest = { version = "^0.12", features=["blocking", "json"], default-features = false } url = "2.2" serde_json = "1" log = "0.4" [features] default = [] default-tls = ["upstream-ontologist/default-tls", "reqwest/default-tls"] rustls-tls = ["upstream-ontologist/rustls-tls", "reqwest/rustls-tls"] extension-module = ["pyo3/extension-module"] upstream-ontologist-0.1.37/upstream-ontologist-py/src/000077500000000000000000000000001462717511400231255ustar00rootroot00000000000000upstream-ontologist-0.1.37/upstream-ontologist-py/src/lib.rs000066400000000000000000001105721462717511400242470ustar00rootroot00000000000000use pyo3::create_exception; use pyo3::exceptions::{PyException, PyKeyError, PyRuntimeError, PyStopIteration, PyValueError}; use pyo3::import_exception; use pyo3::prelude::*; use pyo3::types::{PyDict, PyTuple, PyType}; use std::str::FromStr; use upstream_ontologist::{CanonicalizeError, Certainty, Origin, UpstreamPackage}; use url::Url; import_exception!(urllib.error, HTTPError); create_exception!(upstream_ontologist, UnverifiableUrl, PyException); create_exception!(upstream_ontologist, InvalidUrl, PyException); create_exception!(upstream_ontologist, NoSuchForgeProject, PyException); #[pyfunction] fn url_from_git_clone_command(command: &[u8]) -> Option { upstream_ontologist::vcs_command::url_from_git_clone_command(command) } #[pyfunction] fn url_from_fossil_clone_command(command: &[u8]) -> Option { upstream_ontologist::vcs_command::url_from_fossil_clone_command(command) } #[pyfunction] fn url_from_svn_co_command(command: &[u8]) -> Option { upstream_ontologist::vcs_command::url_from_svn_co_command(command) } #[pyfunction] fn url_from_cvs_co_command(command: &[u8]) -> Option { upstream_ontologist::vcs_command::url_from_cvs_co_command(command) } #[pyfunction] fn url_from_vcs_command(command: &[u8]) -> Option { upstream_ontologist::vcs_command::url_from_vcs_command(command) } #[pyfunction] fn drop_vcs_in_scheme(url: &str) -> String { upstream_ontologist::vcs::drop_vcs_in_scheme(&url.parse().unwrap()) .map_or_else(|| url.to_string(), |u| u.to_string()) } #[pyfunction] fn unsplit_vcs_url( repo_url: &str, branch: Option<&str>, subpath: Option<&str>, ) -> PyResult { let location = upstream_ontologist::vcs::VcsLocation { url: repo_url .parse() .map_err(|e: url::ParseError| PyValueError::new_err(e.to_string()))?, branch: branch.map(|b| b.to_string()), subpath: subpath.map(|b| b.to_string()), }; Ok(upstream_ontologist::vcs::unsplit_vcs_url(&location)) } #[pyclass(subclass)] struct Forge(Box); #[pymethods] impl Forge { #[getter] fn name(&self) -> PyResult { Ok(self.0.name().to_string()) } fn bug_database_url_from_bug_submit_url(&self, url: &str) -> PyResult> { let url = url.parse().unwrap(); Ok(self .0 .bug_database_url_from_bug_submit_url(&url) .map(|x| x.to_string())) } fn bug_submit_url_from_bug_database_url(&self, url: &str) -> PyResult> { let url = url.parse().unwrap(); Ok(self .0 .bug_submit_url_from_bug_database_url(&url) .map(|x| x.to_string())) } fn check_bug_database_canonical(&self, url: &str) -> PyResult { let url = url.parse().unwrap(); Ok(self .0 .check_bug_database_canonical(&url) .map_err(|e| match e { CanonicalizeError::InvalidUrl(url, msg) => { InvalidUrl::new_err((url.to_string(), msg)) } CanonicalizeError::Unverifiable(url, msg) => { UnverifiableUrl::new_err((url.to_string(), msg)) } CanonicalizeError::RateLimited(url) => { UnverifiableUrl::new_err((url.to_string(), "rate limited")) } })? .to_string()) } fn check_bug_submit_url_canonical(&self, url: &str) -> PyResult { let url = url.parse().unwrap(); Ok(self .0 .check_bug_submit_url_canonical(&url) .map_err(|e| match e { CanonicalizeError::InvalidUrl(url, msg) => { InvalidUrl::new_err((url.to_string(), msg)) } CanonicalizeError::Unverifiable(url, msg) => { UnverifiableUrl::new_err((url.to_string(), msg)) } CanonicalizeError::RateLimited(url) => { UnverifiableUrl::new_err((url.to_string(), "rate limited")) } })? .to_string()) } fn bug_database_from_issue_url(&self, url: &str) -> PyResult> { let url = url.parse().unwrap(); Ok(self .0 .bug_database_from_issue_url(&url) .map(|x| x.to_string())) } fn bug_database_url_from_repo_url(&self, url: &str) -> PyResult> { let url = url.parse().unwrap(); Ok(self .0 .bug_database_url_from_repo_url(&url) .map(|x| x.to_string())) } fn repo_url_from_merge_request_url(&self, url: &str) -> PyResult> { let url = url.parse().unwrap(); Ok(self .0 .repo_url_from_merge_request_url(&url) .map(|x| x.to_string())) } #[getter] fn repository_browse_can_be_homepage(&self) -> bool { self.0.repository_browse_can_be_homepage() } } #[pyclass(subclass,extends=Forge)] struct GitHub; #[pymethods] impl GitHub { #[new] fn new() -> (Self, Forge) { let forge = upstream_ontologist::GitHub::new(); (Self, Forge(Box::new(forge))) } } #[pyclass(subclass,extends=Forge)] struct GitLab; #[pymethods] impl GitLab { #[new] fn new() -> (Self, Forge) { let forge = upstream_ontologist::GitLab::new(); (Self, Forge(Box::new(forge))) } } #[pyclass(subclass,extends=Forge)] struct Launchpad; #[pymethods] impl Launchpad { #[new] fn new() -> (Self, Forge) { let forge = upstream_ontologist::Launchpad::new(); (Self, Forge(Box::new(forge))) } } #[pyclass(subclass,extends=Forge)] struct SourceForge; #[pymethods] impl SourceForge { #[new] fn new() -> (Self, Forge) { let forge = upstream_ontologist::SourceForge::new(); (Self, Forge(Box::new(forge))) } } #[pyfunction] fn plausible_vcs_url(url: &str) -> PyResult { Ok(upstream_ontologist::vcs::plausible_url(url)) } #[pyfunction] fn plausible_vcs_browse_url(url: &str) -> PyResult { Ok(upstream_ontologist::vcs::plausible_browse_url(url)) } #[pyfunction] fn check_url_canonical(url: &str) -> PyResult { Ok(upstream_ontologist::check_url_canonical( &Url::parse(url).map_err(|e| InvalidUrl::new_err((url.to_string(), e.to_string())))?, ) .map_err(|e| match e { CanonicalizeError::InvalidUrl(u, m) => InvalidUrl::new_err((u.to_string(), m)), CanonicalizeError::Unverifiable(u, m) => UnverifiableUrl::new_err((u.to_string(), m)), CanonicalizeError::RateLimited(u) => { UnverifiableUrl::new_err((u.to_string(), "Rate limited")) } })? .to_string()) } #[pyfunction] fn guess_repo_from_url(url: &str, net_access: Option) -> PyResult> { if let Ok(url) = Url::parse(url) { Ok(upstream_ontologist::vcs::guess_repo_from_url( &url, net_access, )) } else { Ok(None) } } #[pyfunction] fn probe_gitlab_host(hostname: &str) -> bool { upstream_ontologist::vcs::probe_gitlab_host(hostname) } #[pyfunction] fn is_gitlab_site(hostname: &str, net_access: Option) -> bool { upstream_ontologist::vcs::is_gitlab_site(hostname, net_access) } #[pyfunction] fn check_repository_url_canonical(url: &str, version: Option<&str>) -> PyResult { Ok(upstream_ontologist::vcs::check_repository_url_canonical( Url::parse(url).map_err(|e| PyRuntimeError::new_err(format!("Invalid URL: {}", e)))?, version, ) .map_err(|e| match e { CanonicalizeError::InvalidUrl(u, m) => InvalidUrl::new_err((u.to_string(), m)), CanonicalizeError::Unverifiable(u, m) => UnverifiableUrl::new_err((u.to_string(), m)), CanonicalizeError::RateLimited(u) => { UnverifiableUrl::new_err((u.to_string(), "Rate limited")) } })? .to_string()) } #[pyfunction] fn probe_upstream_branch_url(url: &str, version: Option<&str>) -> Option { upstream_ontologist::vcs::probe_upstream_branch_url( &Url::parse(url).expect("URL parsing failed"), version, ) } #[pyfunction] fn guess_from_launchpad( py: Python, package: &str, distribution: Option<&str>, suite: Option<&str>, ) -> PyResult { let ret = upstream_ontologist::providers::launchpad::guess_from_launchpad( package, distribution, suite, ); if ret.is_none() { Ok(Vec::::new().to_object(py)) } else { Ok(ret.to_object(py)) } } #[pyfunction] fn browse_url_from_repo_url( url: &str, branch: Option<&str>, subpath: Option<&str>, net_access: Option, ) -> PyResult> { let location = upstream_ontologist::vcs::VcsLocation { url: Url::parse(url).map_err(|e| PyValueError::new_err(format!("Invalid URL: {}", e)))?, branch: branch.map(|s| s.to_string()), subpath: subpath.map(|s| s.to_string()), }; Ok( upstream_ontologist::vcs::browse_url_from_repo_url(&location, net_access) .map(|u| u.to_string()), ) } #[pyfunction] fn canonical_git_repo_url(url: &str, net_access: Option) -> PyResult { let url = Url::parse(url).map_err(|e| PyRuntimeError::new_err(format!("Invalid URL: {}", e)))?; Ok( upstream_ontologist::vcs::canonical_git_repo_url(&url, net_access) .map_or_else(|| url.to_string(), |u| u.to_string()), ) } #[pyfunction] fn find_public_repo_url(url: &str, net_access: Option) -> PyResult> { Ok(upstream_ontologist::vcs::find_public_repo_url( url, net_access, )) } #[pyfunction] fn find_forge(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; let forge = upstream_ontologist::find_forge(&url, net_access); if let Some(forge) = forge { Some(Forge(forge)) } else { None } } #[pyfunction] fn repo_url_from_merge_request_url(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; upstream_ontologist::repo_url_from_merge_request_url(&url, net_access).map(|x| x.to_string()) } #[pyfunction] fn bug_database_from_issue_url(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; upstream_ontologist::bug_database_from_issue_url(&url, net_access).map(|x| x.to_string()) } #[pyfunction] fn guess_bug_database_url_from_repo_url(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; upstream_ontologist::guess_bug_database_url_from_repo_url(&url, net_access) .map(|x| x.to_string()) } #[pyfunction] fn bug_database_url_from_bug_submit_url(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; upstream_ontologist::bug_database_url_from_bug_submit_url(&url, net_access) .map(|x| x.to_string()) } #[pyfunction] fn bug_submit_url_from_bug_database_url(url: &str, net_access: Option) -> Option { let url = Url::parse(url).ok()?; upstream_ontologist::bug_submit_url_from_bug_database_url(&url, net_access) .map(|x| x.to_string()) } #[pyfunction] fn check_bug_database_canonical(url: &str, net_access: Option) -> PyResult { let url = Url::parse(url).map_err(|e| PyRuntimeError::new_err(format!("Invalid URL: {}", e)))?; upstream_ontologist::check_bug_database_canonical(&url, net_access) .map_err(|e| match e { CanonicalizeError::InvalidUrl(url, msg) => InvalidUrl::new_err((url.to_string(), msg)), CanonicalizeError::Unverifiable(url, msg) => { UnverifiableUrl::new_err((url.to_string(), msg)) } CanonicalizeError::RateLimited(url) => { UnverifiableUrl::new_err((url.to_string(), "rate limited")) } }) .map(|x| x.to_string()) } #[pyfunction] fn check_bug_submit_url_canonical(url: &str, net_access: Option) -> PyResult { let url = Url::parse(url).map_err(|e| PyRuntimeError::new_err(format!("Invalid URL: {}", e)))?; upstream_ontologist::check_bug_submit_url_canonical(&url, net_access) .map_err(|e| match e { CanonicalizeError::InvalidUrl(url, msg) => InvalidUrl::new_err((url.to_string(), msg)), CanonicalizeError::Unverifiable(url, msg) => { UnverifiableUrl::new_err((url.to_string(), msg)) } CanonicalizeError::RateLimited(url) => { UnverifiableUrl::new_err((url.to_string(), "rate limited")) } }) .map(|x| x.to_string()) } #[pyfunction] fn known_bad_guess(py: Python, datum: PyObject) -> PyResult { let datum: upstream_ontologist::UpstreamDatum = datum.extract(py)?; Ok(datum.known_bad_guess()) } #[pyfunction(name = "skip_paragraph")] fn readme_skip_paragraph(py: Python, para: &str) -> PyResult<(bool, PyObject)> { let (skip, para) = upstream_ontologist::readme::skip_paragraph(para); Ok((skip, para.to_object(py))) } #[pyfunction] fn fixup_rcp_style_git_repo_url(url: &str) -> PyResult { Ok(upstream_ontologist::vcs::fixup_rcp_style_git_repo_url(url) .map_or(url.to_string(), |u| u.to_string())) } #[pyfunction] fn valid_debian_package_name(name: &str) -> PyResult { Ok(upstream_ontologist::debian::valid_debian_package_name(name)) } #[pyfunction] fn debian_to_upstream_version(version: &str) -> PyResult { Ok(upstream_ontologist::debian::debian_to_upstream_version(version).to_string()) } #[pyfunction] fn upstream_name_to_debian_source_name(name: &str) -> PyResult { Ok(upstream_ontologist::debian::upstream_name_to_debian_source_name(name)) } #[pyfunction] fn upstream_package_to_debian_binary_name(package: UpstreamPackage) -> PyResult { Ok(upstream_ontologist::debian::upstream_package_to_debian_binary_name(&package)) } #[pyfunction] fn upstream_package_to_debian_source_name(package: UpstreamPackage) -> PyResult { Ok(upstream_ontologist::debian::upstream_package_to_debian_source_name(&package)) } #[pyfunction] pub fn find_secure_repo_url( url: String, branch: Option<&str>, net_access: Option, ) -> Option { upstream_ontologist::vcs::find_secure_repo_url(url.parse().unwrap(), branch, net_access) .map(|u| u.to_string()) } #[pyfunction] fn sanitize_url(url: &str) -> PyResult { Ok(upstream_ontologist::vcs::sanitize_url(url)) } #[pyfunction] fn convert_cvs_list_to_str(urls: Vec<&str>) -> Option { upstream_ontologist::vcs::convert_cvs_list_to_str(urls.as_slice()) } #[pyfunction] fn fixup_broken_git_details( location: &str, branch: Option<&str>, subpath: Option<&str>, ) -> (String, Option, Option) { let url = upstream_ontologist::vcs::fixup_git_url(location); let location = upstream_ontologist::vcs::VcsLocation { url: url.parse().unwrap(), branch: branch.map(|s| s.to_string()), subpath: subpath.map(|s| s.to_string()), }; let ret = upstream_ontologist::vcs::fixup_git_location(&location); ( ret.url.to_string(), ret.branch.as_ref().map(|s| s.to_string()), ret.subpath.as_ref().map(|s| s.to_string()), ) } fn extract_str_value(py: Python, value: PyObject) -> PyResult { let value = value.extract::(py)?; value.extract::(py) } #[derive(Clone)] #[pyclass] struct UpstreamDatum(pub(crate) upstream_ontologist::UpstreamDatumWithMetadata); #[pymethods] impl UpstreamDatum { #[new] fn new( py: Python, field: String, value: PyObject, certainty: Option, origin: Option, ) -> PyResult { Ok(UpstreamDatum( upstream_ontologist::UpstreamDatumWithMetadata { datum: match field.as_str() { "Name" => { upstream_ontologist::UpstreamDatum::Name(extract_str_value(py, value)?) } "Version" => { upstream_ontologist::UpstreamDatum::Version(extract_str_value(py, value)?) } "Summary" => { upstream_ontologist::UpstreamDatum::Summary(extract_str_value(py, value)?) } "Description" => upstream_ontologist::UpstreamDatum::Description( extract_str_value(py, value)?, ), "Homepage" => { upstream_ontologist::UpstreamDatum::Homepage(extract_str_value(py, value)?) } "Repository" => { // Check if the value is a list rather than a string if let Ok(value) = value.extract::>(py) { upstream_ontologist::UpstreamDatum::Repository(value.join(" ")) } else { upstream_ontologist::UpstreamDatum::Repository(extract_str_value( py, value, )?) } } "Repository-Browse" => upstream_ontologist::UpstreamDatum::RepositoryBrowse( extract_str_value(py, value)?, ), "License" => { upstream_ontologist::UpstreamDatum::License(extract_str_value(py, value)?) } "Author" => { upstream_ontologist::UpstreamDatum::Author(value.extract(py).unwrap()) } "Bug-Database" => upstream_ontologist::UpstreamDatum::BugDatabase( extract_str_value(py, value)?, ), "Bug-Submit" => { upstream_ontologist::UpstreamDatum::BugSubmit(extract_str_value(py, value)?) } "Contact" => { upstream_ontologist::UpstreamDatum::Contact(extract_str_value(py, value)?) } "Cargo-Crate" => upstream_ontologist::UpstreamDatum::CargoCrate( extract_str_value(py, value)?, ), "Security-MD" => upstream_ontologist::UpstreamDatum::SecurityMD( extract_str_value(py, value)?, ), "Keywords" => { upstream_ontologist::UpstreamDatum::Keywords(value.extract(py).unwrap()) } "Maintainer" => { upstream_ontologist::UpstreamDatum::Maintainer(value.extract(py).unwrap()) } "Copyright" => { upstream_ontologist::UpstreamDatum::Copyright(value.extract(py).unwrap()) } "Documentation" => upstream_ontologist::UpstreamDatum::Documentation( value.extract(py).unwrap(), ), "Go-Import-Path" => { upstream_ontologist::UpstreamDatum::GoImportPath(value.extract(py).unwrap()) } "Download" => { upstream_ontologist::UpstreamDatum::Download(value.extract(py).unwrap()) } "Wiki" => upstream_ontologist::UpstreamDatum::Wiki(value.extract(py).unwrap()), "MailingList" => { upstream_ontologist::UpstreamDatum::MailingList(value.extract(py).unwrap()) } "SourceForge-Project" => { upstream_ontologist::UpstreamDatum::SourceForgeProject( value.extract(py).unwrap(), ) } "Archive" => { upstream_ontologist::UpstreamDatum::Archive(value.extract(py).unwrap()) } "Demo" => upstream_ontologist::UpstreamDatum::Demo(value.extract(py).unwrap()), "Pecl-Package" => { upstream_ontologist::UpstreamDatum::PeclPackage(value.extract(py).unwrap()) } "Haskell-Package" => upstream_ontologist::UpstreamDatum::HaskellPackage( value.extract(py).unwrap(), ), "Funding" => { upstream_ontologist::UpstreamDatum::Funding(value.extract(py).unwrap()) } "Changelog" => { upstream_ontologist::UpstreamDatum::Changelog(value.extract(py).unwrap()) } "Debian-ITP" => { upstream_ontologist::UpstreamDatum::DebianITP(value.extract(py).unwrap()) } "Screenshots" => { upstream_ontologist::UpstreamDatum::Screenshots(value.extract(py).unwrap()) } "Cite-As" => { upstream_ontologist::UpstreamDatum::CiteAs(value.extract(py).unwrap()) } "Registry" => { upstream_ontologist::UpstreamDatum::Registry(value.extract(py).unwrap()) } "Donation" => { upstream_ontologist::UpstreamDatum::Donation(value.extract(py).unwrap()) } "Webservice" => { upstream_ontologist::UpstreamDatum::Webservice(value.extract(py).unwrap()) } _ => { return Err(PyValueError::new_err(format!("Unknown field: {}", field))); } }, origin, certainty: certainty.map(|s| Certainty::from_str(&s).unwrap()), }, )) } #[getter] fn field(&self) -> PyResult { Ok(self.0.datum.field().to_string()) } #[getter] fn value(&self, py: Python) -> PyResult { let value = self .0 .datum .to_object(py) .extract::<(String, PyObject)>(py) .unwrap() .1; assert!(!value.as_ref(py).is_instance_of::()); Ok(value) } #[getter] fn origin(&self) -> Option { self.0.origin.clone() } #[setter] fn set_origin(&mut self, origin: Option) { self.0.origin = origin; } #[getter] fn certainty(&self) -> Option { self.0.certainty.map(|c| c.to_string()) } #[setter] pub fn set_certainty(&mut self, certainty: Option) { self.0.certainty = certainty.map(|s| Certainty::from_str(&s).unwrap()); } fn __eq__(lhs: &PyCell, rhs: &PyCell) -> PyResult { Ok(lhs.borrow().0 == rhs.borrow().0) } fn __ne__(lhs: &PyCell, rhs: &PyCell) -> PyResult { Ok(lhs.borrow().0 != rhs.borrow().0) } fn __str__(&self) -> PyResult { Ok(format!( "{}: {}", self.0.datum.field(), self.0.datum.to_string() )) } fn __repr__(slf: PyRef) -> PyResult { Ok(format!( "UpstreamDatum({}, {}, {}, certainty={})", slf.0.datum.field(), slf.0.datum.to_string(), slf.0 .origin .as_ref() .map(|s| format!("Some({})", s)) .unwrap_or_else(|| "None".to_string()), slf.0 .certainty .as_ref() .map(|c| format!("Some({})", c.to_string())) .unwrap_or_else(|| "None".to_string()), )) } } #[pyclass] struct UpstreamMetadata(pub(crate) upstream_ontologist::UpstreamMetadata); #[allow(non_snake_case)] #[pymethods] impl UpstreamMetadata { fn __getitem__(&self, field: &str) -> PyResult { self.0 .get(&field) .map(|datum| UpstreamDatum(datum.clone())) .ok_or_else(|| PyKeyError::new_err(format!("No such field: {}", field))) } fn __delitem__(&mut self, field: &str) -> PyResult<()> { self.0.remove(&field); Ok(()) } fn __contains__(&self, field: &str) -> bool { self.0.contains_key(&field) } pub fn items(&self) -> Vec<(String, UpstreamDatum)> { self.0 .iter() .map(|datum| { ( datum.datum.field().to_string(), UpstreamDatum(datum.clone()), ) }) .collect() } pub fn values(&self) -> Vec { self.0 .iter() .map(|datum| UpstreamDatum(datum.clone())) .collect() } pub fn get(&self, py: Python, field: &str, default: Option) -> PyObject { let default = default.unwrap_or_else(|| py.None()); let value = self .0 .get(&field) .map(|datum| UpstreamDatum(datum.clone()).into_py(py)); value.unwrap_or(default) } fn __setitem__(&mut self, field: &str, datum: UpstreamDatum) -> PyResult<()> { assert_eq!(field, datum.0.datum.field()); self.0.insert(datum.0); Ok(()) } #[new] #[pyo3(signature = (**kwargs))] fn new(kwargs: Option<&PyDict>) -> Self { let mut ret = UpstreamMetadata(upstream_ontologist::UpstreamMetadata::new()); if let Some(kwargs) = kwargs { for item in kwargs.items() { let datum = item.extract::().unwrap(); ret.0.insert(datum.0); } } ret } #[classmethod] pub fn from_dict( _cls: &PyType, py: Python, d: &PyDict, default_certainty: Option, ) -> PyResult { let mut data = Vec::new(); let mut di = d.iter(); while let Some(t) = di.next() { let t = t.to_object(py); let mut datum: upstream_ontologist::UpstreamDatumWithMetadata = if let Ok(wm) = t.extract(py) { wm } else { let wm: upstream_ontologist::UpstreamDatum = t.extract(py)?; upstream_ontologist::UpstreamDatumWithMetadata { datum: wm, certainty: default_certainty, origin: None, } }; if datum.certainty.is_none() { datum.certainty = default_certainty; } data.push(datum); } Ok(Self(upstream_ontologist::UpstreamMetadata::from_data(data))) } pub fn __iter__(slf: PyRef) -> PyResult { #[pyclass] struct UpstreamDatumIter { inner: Vec, } #[pymethods] impl UpstreamDatumIter { fn __next__(&mut self) -> Option { self.inner.pop().map(UpstreamDatum) } } Ok(UpstreamDatumIter { inner: slf.0.iter().cloned().collect::>(), } .into_py(slf.py())) } } #[pyfunction] fn guess_upstream_info( py: Python, path: std::path::PathBuf, trust_package: Option, ) -> PyResult> { let mut result = Vec::new(); for datum in upstream_ontologist::guess_upstream_info(&path, trust_package) { let datum = match datum { Ok(datum) => datum, Err(e) => { log::warn!("Warning: {}", e); continue; } }; result.push(datum.to_object(py)); } Ok(result) } #[pyfunction] fn description_from_readme_md( py: Python, contents: &str, ) -> PyResult<(Option, Vec)> { let (description, metadata) = upstream_ontologist::readme::description_from_readme_md(contents)?; let metadata = metadata .into_iter() .map(|datum| datum.to_object(py)) .collect(); Ok((description, metadata)) } #[pyfunction] fn get_upstream_info( py: Python, path: std::path::PathBuf, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> PyResult<&PyDict> { let metadata = upstream_ontologist::get_upstream_info( path.as_path(), trust_package, net_access, consult_external_directory, check, )?; let ret = PyDict::new(py); for datum in metadata.iter() { ret.set_item( datum.datum.field(), datum .datum .to_object(py) .extract::<(String, PyObject)>(py)? .1, )?; } Ok(ret) } #[pyfunction] fn check_upstream_metadata(metadata: &mut UpstreamMetadata) -> PyResult<()> { upstream_ontologist::check_upstream_metadata(&mut metadata.0, None); Ok(()) } #[pyfunction] fn extend_upstream_metadata( metadata: &mut UpstreamMetadata, path: std::path::PathBuf, minimum_certainty: Option, net_access: Option, consult_external_directory: Option, ) -> PyResult<()> { let minimum_certainty = minimum_certainty .map(|s| s.parse()) .transpose() .map_err(|e: String| { PyValueError::new_err(format!("Invalid minimum_certainty: {}", e.to_string())) })?; upstream_ontologist::extend_upstream_metadata( &mut metadata.0, path.as_path(), minimum_certainty, net_access, consult_external_directory, )?; Ok(()) } #[pyfunction] fn guess_upstream_metadata( path: std::path::PathBuf, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> PyResult { Ok(UpstreamMetadata( upstream_ontologist::guess_upstream_metadata( path.as_path(), trust_package, net_access, consult_external_directory, check, )?, )) } #[pyfunction] fn guess_upstream_metadata_items( py: Python, path: std::path::PathBuf, trust_package: Option, minimum_certainty: Option, ) -> PyResult> { let metadata = upstream_ontologist::guess_upstream_metadata_items( path.as_path(), trust_package, minimum_certainty .map(|s| s.parse()) .transpose() .map_err(|e: String| { PyValueError::new_err(format!("Invalid minimum_certainty: {}", e.to_string())) })?, ); Ok(metadata .into_iter() .map(|datum| datum.map(|o| o.to_object(py))) .filter_map(Result::ok) .collect::>()) } #[pyfunction] fn fix_upstream_metadata(metadata: &mut UpstreamMetadata) -> PyResult<()> { upstream_ontologist::fix_upstream_metadata(&mut metadata.0); Ok(()) } #[pyfunction] fn update_from_guesses( py: Python, metadata: &mut UpstreamMetadata, items_iter: PyObject, ) -> PyResult> { let mut items = vec![]; loop { let item = match items_iter.call_method0(py, "__next__") { Ok(item) => item, Err(e) => { if e.is_instance_of::(py) { break; } return Err(e); } }; items.push(item.extract::(py)?); } Ok(upstream_ontologist::update_from_guesses( metadata.0.mut_items(), items.into_iter().map(|datum| datum.0), ) .into_iter() .map(UpstreamDatum) .collect()) } #[pyfunction] fn parse_first_header_text(text: &str) -> (Option<&str>, Option<&str>, Option<&str>) { upstream_ontologist::readme::parse_first_header_text(text) } #[pyfunction] fn description_from_readme_plain(text: &str) -> PyResult<(Option, Vec)> { let (description, data) = upstream_ontologist::readme::description_from_readme_plain(text)?; Ok((description, data.into_iter().map(UpstreamDatum).collect())) } #[pymodule] fn _upstream_ontologist(py: Python, m: &PyModule) -> PyResult<()> { pyo3_log::init(); m.add_wrapped(wrap_pyfunction!(url_from_git_clone_command))?; m.add_wrapped(wrap_pyfunction!(url_from_vcs_command))?; m.add_wrapped(wrap_pyfunction!(url_from_fossil_clone_command))?; m.add_wrapped(wrap_pyfunction!(url_from_svn_co_command))?; m.add_wrapped(wrap_pyfunction!(url_from_cvs_co_command))?; m.add_wrapped(wrap_pyfunction!(drop_vcs_in_scheme))?; m.add_wrapped(wrap_pyfunction!(unsplit_vcs_url))?; m.add_wrapped(wrap_pyfunction!(plausible_vcs_url))?; m.add_wrapped(wrap_pyfunction!(plausible_vcs_browse_url))?; m.add_wrapped(wrap_pyfunction!(check_url_canonical))?; m.add_wrapped(wrap_pyfunction!(guess_repo_from_url))?; m.add_wrapped(wrap_pyfunction!(probe_gitlab_host))?; m.add_wrapped(wrap_pyfunction!(is_gitlab_site))?; m.add_wrapped(wrap_pyfunction!(check_repository_url_canonical))?; m.add_wrapped(wrap_pyfunction!(probe_upstream_branch_url))?; m.add_wrapped(wrap_pyfunction!(guess_from_launchpad))?; m.add_wrapped(wrap_pyfunction!(canonical_git_repo_url))?; m.add_wrapped(wrap_pyfunction!(browse_url_from_repo_url))?; m.add_wrapped(wrap_pyfunction!(find_public_repo_url))?; m.add_wrapped(wrap_pyfunction!(find_forge))?; m.add_wrapped(wrap_pyfunction!(repo_url_from_merge_request_url))?; m.add_wrapped(wrap_pyfunction!(bug_database_from_issue_url))?; m.add_wrapped(wrap_pyfunction!(guess_bug_database_url_from_repo_url))?; m.add_wrapped(wrap_pyfunction!(bug_database_url_from_bug_submit_url))?; m.add_wrapped(wrap_pyfunction!(bug_submit_url_from_bug_database_url))?; m.add_wrapped(wrap_pyfunction!(check_bug_database_canonical))?; m.add_wrapped(wrap_pyfunction!(check_bug_submit_url_canonical))?; m.add_wrapped(wrap_pyfunction!(fixup_rcp_style_git_repo_url))?; m.add_wrapped(wrap_pyfunction!(check_upstream_metadata))?; m.add_wrapped(wrap_pyfunction!(extend_upstream_metadata))?; m.add_wrapped(wrap_pyfunction!(guess_upstream_metadata))?; m.add_wrapped(wrap_pyfunction!(fix_upstream_metadata))?; m.add_wrapped(wrap_pyfunction!(guess_upstream_metadata_items))?; m.add_wrapped(wrap_pyfunction!(update_from_guesses))?; m.add_wrapped(wrap_pyfunction!(description_from_readme_plain))?; let debianm = PyModule::new(py, "debian")?; debianm.add_wrapped(wrap_pyfunction!(upstream_package_to_debian_source_name))?; debianm.add_wrapped(wrap_pyfunction!(upstream_package_to_debian_binary_name))?; debianm.add_wrapped(wrap_pyfunction!(valid_debian_package_name))?; debianm.add_wrapped(wrap_pyfunction!(debian_to_upstream_version))?; debianm.add_wrapped(wrap_pyfunction!(upstream_name_to_debian_source_name))?; m.add("debian", debianm)?; m.add_wrapped(wrap_pyfunction!(find_secure_repo_url))?; m.add_wrapped(wrap_pyfunction!(sanitize_url))?; m.add_wrapped(wrap_pyfunction!(convert_cvs_list_to_str))?; m.add_wrapped(wrap_pyfunction!(fixup_broken_git_details))?; m.add_wrapped(wrap_pyfunction!(guess_upstream_info))?; m.add_wrapped(wrap_pyfunction!(get_upstream_info))?; m.add_wrapped(wrap_pyfunction!(description_from_readme_md))?; m.add_wrapped(wrap_pyfunction!(parse_first_header_text))?; m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add("InvalidUrl", py.get_type::())?; m.add("UnverifiableUrl", py.get_type::())?; m.add("NoSuchForgeProject", py.get_type::())?; m.add_wrapped(wrap_pyfunction!(known_bad_guess))?; let readmem = PyModule::new(py, "readme")?; readmem.add_wrapped(wrap_pyfunction!(readme_skip_paragraph))?; m.add_submodule(readmem)?; m.add( "ParseError", py.get_type::(), )?; m.add( "KNOWN_GITLAB_SITES", upstream_ontologist::vcs::KNOWN_GITLAB_SITES.to_vec(), )?; m.add( "SECURE_SCHEMES", upstream_ontologist::vcs::SECURE_SCHEMES.to_vec(), )?; m.add("__version__", env!("CARGO_PKG_VERSION"))?; Ok(()) } upstream-ontologist-0.1.37/upstream_ontologist/000077500000000000000000000000001462717511400217725ustar00rootroot00000000000000upstream-ontologist-0.1.37/upstream_ontologist/__init__.py000066400000000000000000000105001462717511400240770ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """Functions for working with upstream metadata. This gathers information about upstreams from various places. Each bit of information gathered is wrapped in a UpstreamDatum object, which contains the field name. The fields used here match those in https://wiki.debian.org/UpstreamMetadata Supported fields: - Homepage - Name - Contact - Repository - Repository-Browse - Bug-Database - Bug-Submit - Screenshots - Archive - Security-Contact Extensions for upstream-ontologist. - SourceForge-Project: Name of the SourceForge project - Wiki: URL to a wiki - Summary: A one-line description - Description: Multi-line description - License: Short description of the license - Copyright - Maintainer - Authors Supported, but currently not set. - FAQ - Donation - Documentation - Registration - Webservice """ from dataclasses import dataclass from email.utils import parseaddr from typing import Optional import ruamel.yaml from . import _upstream_ontologist get_upstream_info = _upstream_ontologist.get_upstream_info SUPPORTED_CERTAINTIES = ["certain", "confident", "likely", "possible", None] version_string = "0.1.37" USER_AGENT = "upstream-ontologist/" + version_string # Too aggressive? DEFAULT_URLLIB_TIMEOUT = 3 yaml = ruamel.yaml.YAML(typ="safe") @dataclass @yaml.register_class class Person: yaml_tag = "!Person" name: str email: Optional[str] = None url: Optional[str] = None def __init__(self, name, email=None, url=None): self.name = name self.email = email if url and url.startswith("mailto:"): self.email = url[len("mailto:") :] self.url = None else: self.url = url @classmethod def from_yaml(cls, constructor, node): d = {} for k, v in node.value: d[k.value] = v.value return cls(name=d.get("name"), email=d.get("email"), url=d.get("url")) @classmethod def from_string(cls, text): text = text.replace(" at ", "@") text = text.replace(" -at- ", "@") text = text.replace(" -dot- ", ".") text = text.replace("[AT]", "@") if "(" in text and text.endswith(")"): (p1, p2) = text[:-1].split("(", 1) if p2.startswith("https://") or p2.startswith("http://"): url = p2 if "<" in p1: (name, email) = parseaddr(p1) return cls(name=name, email=email, url=url) return cls(name=p1, url=url) elif "@" in p2: return cls(name=p1, email=p2) return cls(text) elif "<" in text: (name, email) = parseaddr(text) return cls(name=name, email=email) else: return cls(name=text) def __str__(self): if self.email: return f"{self.name} <{self.email}>" return self.name UpstreamDatum = _upstream_ontologist.UpstreamDatum UpstreamMetadata = _upstream_ontologist.UpstreamMetadata class UpstreamPackage: def __init__(self, family, name): self.family = family self.name = name # If we're setting them new, put Name and Contact first def upstream_metadata_sort_key(x): (k, v) = x return { "Name": "00-Name", "Contact": "01-Contact", }.get(k, k) class UrlUnverifiable(Exception): """Unable to check specified URL.""" def __init__(self, url, reason): self.url = url self.reason = reason class InvalidUrl(Exception): """Specified URL is invalid.""" def __init__(self, url, reason): self.url = url self.reason = reason upstream-ontologist-0.1.37/upstream_ontologist/_upstream_ontologist.pyi000066400000000000000000000116751462717511400270070ustar00rootroot00000000000000from typing import Any, Iterator from upstream_ontologist import UpstreamPackage def drop_vcs_in_scheme(url: str) -> str: ... def unsplit_vcs_url(repo_url: str, branch: str | None, subpath: str | None) -> str: ... def probe_gitlab_host(hostname: str) -> bool: ... def is_gitlab_site(hostname: str, net_access: bool | None = None) -> bool: ... def guess_repo_from_url(url: str, net_access: bool | None = None) -> str | None: ... def probe_gitlabb_host(hostname: str) -> bool: ... def find_public_repo_url(url: str, net_access: bool | None = None) -> str | None: ... def browse_url_from_repo_url( url: str, branch: str | None = None, subpath: str | None = None, net_access: bool | None = None, ) -> str | None: ... def plausible_vcs_url(url: str) -> bool: ... def plausible_vcs_browse_url(url: str) -> bool: ... def probe_upstream_branch_url(url: str, version: str | None = None) -> bool | None: ... def canonical_git_repo_url(url: str, net_access: bool | None = None) -> str: ... def check_repository_url_canonical(url: str, version: str | None = None) -> str: ... def known_bad_guess(datum: UpstreamDatum) -> bool: ... def url_from_svn_co_command(command: bytes) -> str | None: ... def url_from_git_clone_command(command: bytes) -> str | None: ... def url_from_fossil_clone_command(command: bytes) -> str | None: ... def url_from_cvs_co_command(command: bytes) -> str | None: ... def url_from_vcs_command(command: bytes) -> str | None: ... def find_forge(url: str, net_access: bool | None = None) -> Forge | None: ... def repo_url_from_merge_request_url( url: str, net_access: bool | None = None ) -> str | None: ... def bug_database_from_issue_url( url: str, net_access: bool | None = None ) -> str | None: ... def guess_bug_database_url_from_repo_url( url: str, net_access: bool | None = None ) -> str | None: ... def bug_database_url_from_bug_submit_url( url: str, net_access: bool | None = None ) -> str | None: ... def bug_submit_url_from_bug_database_url( url: str, net_access: bool | None = None ) -> str | None: ... def check_bug_database_canonical(url: str, net_access: bool | None = None) -> str: ... def check_bug_submit_url_canonical(url: str, net_access: bool | None = None) -> str: ... def check_url_canonical(url: str) -> str: ... def fixup_rcp_style_git_repo_url(url: str) -> str: ... def valid_debian_package_name(name: str) -> bool: ... def debian_to_upstream_version(version: str) -> str: ... def upstream_name_to_debian_source_name(upstream_name: str) -> str: ... def upstream_version_to_debian_upstream_version( version: str, family: str | None = None ) -> str: ... def check_upstream_metadata(data: UpstreamMetadata) -> None: ... def upstream_package_to_debian_source_name(package: UpstreamPackage) -> str: ... def upstream_package_to_debian_binary_name(package: UpstreamPackage) -> str: ... class ParseError(Exception): ... class NoSuchForgeProject(Exception): ... class Forge: @classmethod def extend_metadata( cls, upstream_metadata: UpstreamMetadata, project: str, certainty: str ) -> None: ... repository_browse_can_be_homepage: bool class GitHub(Forge): ... class GitLab(Forge): ... class SourceForge(Forge): ... class Launchpad(Forge): ... SECURE_SCHEMES: list[str] KNOWN_GITLAB_SITES: list[str] def find_secure_repo_url( url: str, branch: str | None = None, net_access: bool | None = None ) -> str | None: ... def sanitize_url(url: str) -> str: ... def convert_cvs_list_to_str(cvs_list: list[str]) -> str: ... def fixup_broken_git_details( url: str, branch: str | None = None, subpath: str | None = None ) -> tuple[str, str | None, str | None]: ... def guess_upstream_info( path: str, trust_package: bool = False ) -> list[UpstreamDatum]: ... def get_upstream_info( path: str, trust_package: bool = False, net_access: bool | None = None, consult_external_directory: bool = True, check: bool = True, ) -> UpstreamMetadata: ... def extend_upstream_metadata( upstream_metadata: UpstreamMetadata, path: str, minimum_certainty: str, net_access: bool | None = None, consult_external_directory: bool = True, ) -> None: ... def guess_upstream_metadata( path: str, trust_package: bool | None = None, net_access: bool | None = None, consult_external_directory: bool | None = None, check: bool | None = None, ) -> UpstreamMetadata: ... def fix_upstream_metadata(upstream_metadata: UpstreamMetadata) -> None: ... def guess_upstream_metadata_items( path: str, trust_package: bool | None = None, minimum_certainty: str | None = None ) -> Iterator[UpstreamDatum]: ... def update_from_guesses( upstream_metadata: UpstreamMetadata, items: list[UpstreamDatum], ) -> list[UpstreamDatum]: ... class UpstreamMetadata: def __init__(self, **kwargs): ... ... @classmethod def from_dict(cls, data, default_certainty: str | None = None): ... class UpstreamDatum: def __init__(self, name, value: Any, certainty: str | None = None): ... # noqa: ANN401 __version__: str upstream-ontologist-0.1.37/upstream_ontologist/debian.py000066400000000000000000000025761462717511400236000ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from . import _upstream_ontologist upstream_package_to_debian_source_name = ( _upstream_ontologist.debian.upstream_package_to_debian_source_name # type: ignore ) upstream_package_to_debian_binary_name = ( _upstream_ontologist.debian.upstream_package_to_debian_binary_name # type: ignore ) valid_debian_package_name = _upstream_ontologist.debian.valid_debian_package_name # type: ignore debian_to_upstream_version = _upstream_ontologist.debian.debian_to_upstream_version # type: ignore upstream_name_to_debian_source_name = ( _upstream_ontologist.debian.upstream_name_to_debian_source_name # type: ignore ) upstream-ontologist-0.1.37/upstream_ontologist/guess.py000066400000000000000000000062331462717511400234760ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import logging from typing import Iterable, Iterator from . import ( UpstreamDatum, _upstream_ontologist, ) logger = logging.getLogger(__name__) NoSuchForgeProject = _upstream_ontologist.NoSuchForgeProject def guess_upstream_info(path, trust_package): return iter(_upstream_ontologist.guess_upstream_info(path, trust_package)) url_from_cvs_co_command = _upstream_ontologist.url_from_cvs_co_command url_from_svn_co_command = _upstream_ontologist.url_from_svn_co_command url_from_git_clone_command = _upstream_ontologist.url_from_git_clone_command url_from_fossil_clone_command = _upstream_ontologist.url_from_fossil_clone_command url_from_vcs_command = _upstream_ontologist.url_from_vcs_command class NoSuchPackage(Exception): def __init__(self, package): self.package = package GitHub = _upstream_ontologist.GitHub GitLab = _upstream_ontologist.GitLab SourceForge = _upstream_ontologist.SourceForge Launchpad = _upstream_ontologist.Launchpad find_forge = _upstream_ontologist.find_forge repo_url_from_merge_request_url = _upstream_ontologist.repo_url_from_merge_request_url bug_database_from_issue_url = _upstream_ontologist.bug_database_from_issue_url guess_bug_database_url_from_repo_url = ( _upstream_ontologist.guess_bug_database_url_from_repo_url ) bug_database_url_from_bug_submit_url = ( _upstream_ontologist.bug_database_url_from_bug_submit_url ) bug_submit_url_from_bug_database_url = ( _upstream_ontologist.bug_submit_url_from_bug_database_url ) check_bug_database_canonical = _upstream_ontologist.check_bug_database_canonical check_bug_submit_url_canonical = _upstream_ontologist.check_bug_submit_url_canonical check_url_canonical = _upstream_ontologist.check_url_canonical get_upstream_info = _upstream_ontologist.get_upstream_info check_upstream_metadata = _upstream_ontologist.check_upstream_metadata extend_upstream_metadata = _upstream_ontologist.extend_upstream_metadata guess_upstream_metadata = _upstream_ontologist.guess_upstream_metadata known_bad_guess = _upstream_ontologist.known_bad_guess def filter_bad_guesses( guesses: Iterable[UpstreamDatum], ) -> Iterator[UpstreamDatum]: return (guess for guess in guesses if not known_bad_guess(guess)) fix_upstream_metadata = _upstream_ontologist.fix_upstream_metadata guess_upstream_metadata_items = _upstream_ontologist.guess_upstream_metadata_items update_from_guesses = _upstream_ontologist.update_from_guesses upstream-ontologist-0.1.37/upstream_ontologist/py.typed000066400000000000000000000000001462717511400234570ustar00rootroot00000000000000upstream-ontologist-0.1.37/upstream_ontologist/readme.py000066400000000000000000000261021462717511400236020ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """README parsing.""" import logging import platform import re from typing import Iterable, List, Optional, Tuple from urllib.parse import urlparse from . import UpstreamDatum, _upstream_ontologist logger = logging.getLogger(__name__) _skip_paragraph = _upstream_ontologist.readme.skip_paragraph # type: ignore description_from_readme_md = _upstream_ontologist.description_from_readme_md # type: ignore _parse_first_header_text = _upstream_ontologist.parse_first_header_text # type: ignore description_from_readme_plain = _upstream_ontologist.description_from_readme_plain # type: ignore def _skip_paragraph_block(para): # noqa: C901 (skip, extra_metadata) = _skip_paragraph(para.get_text()) if skip: return (True, extra_metadata) for c in para.children: if isinstance(c, str) and not c.strip(): continue if c.name == "a": if len(list(c.children)) != 1: name = None elif isinstance(list(c.children)[0], str): name = list(c.children)[0] elif list(c.children)[0].name == "img": name = list(c.children)[0].get("alt") else: name = None if name in ("CRAN", "CRAN_Status_Badge", "CRAN_Logs_Badge"): extra_metadata.append( UpstreamDatum("Archive", "CRAN", certainty="confident") ) elif name == "Gitter": parsed_url = urlparse(c.get("href")) extra_metadata.append( UpstreamDatum( "Repository", "https://github.com/{}".format( "/".join(parsed_url.path.strip("/").split("/")[:2]) ), certainty="confident", ) ) elif name and name.lower() == "build status": parsed_url = urlparse(c.get("href")) if parsed_url.hostname == "travis-ci.org": extra_metadata.append( UpstreamDatum( "Repository", "https://github.com/{}".format( "/".join(parsed_url.path.strip("/").split("/")[:2]) ), certainty="confident", ) ) elif name and name.lower() == "documentation": extra_metadata.append( UpstreamDatum("Documentation", c.get("href"), certainty="confident") ) elif name and name.lower() == "api docs": extra_metadata.append( UpstreamDatum( "API-Documentation", c.get("href"), certainty="confident" ) ) elif name and name.lower() == "downloads": extra_metadata.append( UpstreamDatum("Download", c.get("href"), certainty="confident") ) elif name and name.lower() == "crates.io": href = c.get("href") if href.startswith("https://crates.io/crates/"): extra_metadata.append( UpstreamDatum( "Cargo-Crate", href.rsplit("/")[-1], certainty="confident" ) ) elif name: m = re.match("(.*) License", name) if m: extra_metadata.append( UpstreamDatum("License", m.group(1), certainty="likely") ) else: logger.debug("Unhandled field %r in README", name) continue break else: return (True, extra_metadata) if para.get_text() == "": return (True, extra_metadata) return (False, []) def render(el): return el.get_text() def _parse_first_header(el): name, summary, version = _parse_first_header_text(el.get_text()) if not name and el.get_text(): name = el.get_text() if name: if "installation" in name.lower(): certainty = "possible" else: certainty = "likely" if name.startswith("About "): name = name[len("About ") :] yield UpstreamDatum("Name", name.strip(), certainty=certainty) if summary: yield UpstreamDatum("Summary", summary, certainty="likely") if version: yield UpstreamDatum("Version", version, certainty="likely") def _is_semi_header(el): if el.name != "p": return False if el.get_text().strip() == "INSTALLATION": return True if el.get_text().count("\n") > 0: return False m = re.match(r"([a-z-A-Z0-9]+) - ([^\.]+)", el.get_text()) if m: return True return False def _ul_is_field_list(el): names = ["Issues", "Home", "Documentation", "License"] for li in el.findAll("li"): m = re.match(r"([A-Za-z]+)\s*:.*", li.get_text().strip()) if not m or m.group(1) not in names: return False return True def _extract_paragraphs(children, metadata): paragraphs = [] for el in children: if isinstance(el, str): continue if el.name == "div": paragraphs.extend(_extract_paragraphs(el.children, metadata)) if paragraphs and "section" in (el.get("class") or []): break if el.name == "p": if _is_semi_header(el): if len(paragraphs) == 0: metadata.extend(_parse_first_header(el)) continue else: break (skip, extra_metadata) = _skip_paragraph_block(el) metadata.extend(extra_metadata) if skip: if len(paragraphs) > 0: break else: continue if el.get_text().strip(): paragraphs.append(render(el) + "\n") elif el.name == "pre": paragraphs.append(render(el)) elif el.name == "ul" and len(paragraphs) > 0: if _ul_is_field_list(el): metadata.extend(_parse_ul_field_list(el)) else: paragraphs.append( "".join(f"* {li.get_text()}\n" for li in el.findAll("li")) ) elif re.match("h[0-9]", el.name): if len(paragraphs) == 0: if el.get_text() not in ("About", "Introduction", "Overview"): metadata.extend(_parse_first_header(el)) continue break return paragraphs def _parse_field(name, body): if name == "Homepage" and body.find("a"): yield UpstreamDatum( "Homepage", body.find("a").get("href"), certainty="confident" ) if name == "Home" and body.find("a"): yield UpstreamDatum( "Homepage", body.find("a").get("href"), certainty="confident" ) if name == "Issues" and body.find("a"): yield UpstreamDatum( "Bug-Database", body.find("a").get("href"), certainty="confident" ) if name == "Documentation" and body.find("a"): yield UpstreamDatum( "Documentation", body.find("a").get("href"), certainty="confident" ) if name == "License": yield UpstreamDatum("License", body.get_text(), certainty="confident") def _parse_ul_field_list(el): for li in el.findAll("li"): cs = list(li.children) if len(cs) == 2 and isinstance(cs[0], str): name = cs[0].strip().rstrip(":") body = cs[1] yield from _parse_field(name, body) def _parse_field_list(tab): for tr in tab.findAll("tr", {"class": "field"}): name_cell = tr.find("th", {"class": "field-name"}) if not name_cell: continue name = name_cell.get_text().rstrip(":") body = tr.find("td", {"class": "field-body"}) if not body: continue yield from _parse_field(name, body) def _description_from_basic_soup(soup) -> Tuple[Optional[str], Iterable[UpstreamDatum]]: # Drop any headers metadata = [] if soup is None: return None, [] # First, skip past the first header. for el in soup.children: if el.name in ("h1", "h2", "h3"): metadata.extend(_parse_first_header(el)) el.decompose() break elif isinstance(el, str): pass else: break table = soup.find("table", {"class": "field-list"}) if table: metadata.extend(_parse_field_list(table)) paragraphs: List[str] = [] paragraphs.extend(_extract_paragraphs(soup.children, metadata)) if len(paragraphs) == 0: logger.debug("Empty description; no paragraphs.") return None, metadata if len(paragraphs) < 6: return "\n".join(paragraphs), metadata logger.debug( "Not returning description, number of paragraphs too high: %d", len(paragraphs) ) return None, metadata def description_from_readme_html( html_text: str, ) -> Tuple[Optional[str], Iterable[UpstreamDatum]]: """Description from HTML.""" try: from bs4 import BeautifulSoup, FeatureNotFound except ModuleNotFoundError: logger.debug("BeautifulSoup not available, not parsing HTML") return None, {} try: soup = BeautifulSoup(html_text, "lxml") except FeatureNotFound: logger.debug("lxml not available, not parsing HTML") return None, {} return _description_from_basic_soup(soup.body) def description_from_readme_rst( rst_text: str, ) -> Tuple[Optional[str], Iterable[UpstreamDatum]]: """Description from README.rst.""" if platform.python_implementation() == "PyPy": logger.debug("docutils does not appear to work on PyPy, skipping README.rst.") return None, {} try: from docutils.core import publish_parts except ModuleNotFoundError: logger.debug("docutils not available, not parsing README.rst") return None, {} from docutils.writers.html4css1 import Writer settings = {"initial_header_level": 2, "report_level": 0} html_text = publish_parts( rst_text, writer=Writer(), settings_overrides=settings ).get("html_body") return description_from_readme_html(html_text) upstream-ontologist-0.1.37/upstream_ontologist/vcs.py000066400000000000000000000035621462717511400231450ustar00rootroot00000000000000#!/usr/bin/python3 # Copyright (C) 2018 Jelmer Vernooij # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA __all__ = [ "plausible_url", "plausible_browse_url", "sanitize_url", "is_gitlab_site", "browse_url_from_repo_url", "probe_gitlab_host", "guess_repo_from_url", "probe_upstream_branch_url", "check_repository_url_canonical", "unsplit_vcs_url", "browse_url_from_repo_url", "find_public_repo_url", "SECURE_SCHEMES", "find_secure_repo_url", "convert_cvs_list_to_str", "fixup_broken_git_details", ] from ._upstream_ontologist import ( # noqa: F401 KNOWN_GITLAB_SITES, SECURE_SCHEMES, browse_url_from_repo_url, canonical_git_repo_url, check_repository_url_canonical, convert_cvs_list_to_str, drop_vcs_in_scheme, find_public_repo_url, find_secure_repo_url, fixup_broken_git_details, fixup_rcp_style_git_repo_url, guess_repo_from_url, is_gitlab_site, probe_gitlab_host, probe_upstream_branch_url, sanitize_url, unsplit_vcs_url, ) from ._upstream_ontologist import ( plausible_vcs_browse_url as plausible_browse_url, ) from ._upstream_ontologist import ( plausible_vcs_url as plausible_url, )