pax_global_header00006660000000000000000000000064150377542040014521gustar00rootroot0000000000000052 comment=bc6e9c1b9c363a1e6101daaa4a2f13492ebf5f4c line_profiler-5.0.0/000077500000000000000000000000001503775420400143545ustar00rootroot00000000000000line_profiler-5.0.0/.codecov.yml000066400000000000000000000007671503775420400166110ustar00rootroot00000000000000# For more configuration details: # https://docs.codecov.io/docs/codecov-yaml # Check if this file is valid by running in bash: # curl -X POST --data-binary @.codecov.yml https://codecov.io/validate codecov: require_ci_to_pass: yes coverage: precision: 2 round: down range: "50...100" parsers: gcov: branch_detection: conditional: yes loop: yes method: no macro: no comment: layout: "reach,diff,flags,files,footer" behavior: default require_changes: no line_profiler-5.0.0/.github/000077500000000000000000000000001503775420400157145ustar00rootroot00000000000000line_profiler-5.0.0/.github/dependabot.yml000066400000000000000000000002671503775420400205510ustar00rootroot00000000000000version: 2 updates: # Maintain dependencies for GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" day: "friday" line_profiler-5.0.0/.github/workflows/000077500000000000000000000000001503775420400177515ustar00rootroot00000000000000line_profiler-5.0.0/.github/workflows/tests.yml000066400000000000000000000544031503775420400216440ustar00rootroot00000000000000# This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions # Based on ~/code/xcookie/xcookie/rc/tests.yml.in # Now based on ~/code/xcookie/xcookie/builders/github_actions.py # See: https://github.com/Erotemic/xcookie name: BinPyCI on: push: pull_request: branches: [ main ] jobs: lint_job: ## # Run quick linting and typing checks. # To disable all linting add "linter=false" to the xcookie config. # To disable type checks add "notypes" to the xcookie tags. ## runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v4.1.1 - name: Set up Python 3.13 for linting uses: actions/setup-python@v5.1.1 with: python-version: '3.13' - name: Install dependencies run: |- python -m pip install pip uv -U python -m uv pip install flake8 - name: Lint with flake8 run: |- # stop the build if there are Python syntax errors or undefined names flake8 ./line_profiler --count --select=E9,F63,F7,F82 --show-source --statistics - name: Typecheck with mypy run: |- python -m pip install mypy pip install -r requirements/runtime.txt mypy --install-types --non-interactive ./line_profiler mypy ./line_profiler build_and_test_sdist: ## # Build the binary package from source and test it in the same # environment. ## name: Build sdist runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v4.1.1 - name: Set up Python 3.13 uses: actions/setup-python@v5.1.1 with: python-version: '3.13' - name: Upgrade pip run: |- python -m pip install pip uv -U python -m uv pip install -r requirements/tests.txt python -m uv pip install -r requirements/runtime.txt - name: Build sdist shell: bash run: |- python -m pip install pip uv -U python -m uv pip install setuptools>=0.8 wheel build twine python -m build --sdist --outdir wheelhouse python -m twine check ./wheelhouse/line_profiler*.tar.gz - name: Install sdist run: |- ls -al wheelhouse python -m uv pip install wheelhouse/line_profiler*.tar.gz -v - name: Test minimal loose sdist run: |- pwd ls -al # Run in a sandboxed directory WORKSPACE_DNAME="testsrcdir_minimal_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" mkdir -p $WORKSPACE_DNAME cd $WORKSPACE_DNAME # Run the tests # Get path to installed package MOD_DPATH=$(python -c "import line_profiler, os; print(os.path.dirname(line_profiler.__file__))") echo "MOD_DPATH = $MOD_DPATH" python -m pytest --verbose --cov=line_profiler $MOD_DPATH ../tests cd .. - name: Test full loose sdist run: |- pwd ls -al true # Run in a sandboxed directory WORKSPACE_DNAME="testsrcdir_full_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" mkdir -p $WORKSPACE_DNAME cd $WORKSPACE_DNAME # Run the tests # Get path to installed package MOD_DPATH=$(python -c "import line_profiler, os; print(os.path.dirname(line_profiler.__file__))") echo "MOD_DPATH = $MOD_DPATH" python -m pytest --verbose --cov=line_profiler $MOD_DPATH ../tests cd .. - uses: actions/upload-artifact@v4.4.0 name: Upload sdist artifact with: name: sdist_wheels path: ./wheelhouse/line_profiler*.tar.gz build_binpy_wheels: ## # Build the binary wheels. Note: even though cibuildwheel will test # them internally here, we will test them independently later in the # test_binpy_wheels step. ## name: ${{ matrix.os }}, arch=${{ matrix.arch }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: # Normally, xcookie generates explicit lists of platforms to build / test # on, but in this case cibuildwheel does that for us, so we need to just # set the environment variables for cibuildwheel. These are parsed out of # the standard [tool.cibuildwheel] section in pyproject.toml and set # explicitly here. os: - ubuntu-latest - macOS-latest - windows-latest cibw_skip: - '*-win32 *-win32 cp313-musllinux_i686' arch: - auto steps: - name: Checkout source uses: actions/checkout@v4.1.1 - name: Enable MSVC 64bit uses: ilammy/msvc-dev-cmd@v1 if: matrix.os == 'windows-latest' && ${{ contains(matrix.cibw_skip, '*-win32') }} - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all - name: Build binary wheels uses: pypa/cibuildwheel@v2.21.0 with: output-dir: wheelhouse config-file: pyproject.toml env: CIBW_SKIP: ${{ matrix.cibw_skip }} CIBW_ARCHS_LINUX: ${{ matrix.arch }} CIBW_ENVIRONMENT: PYTHONUTF8=1 PYTHONUTF8: '1' - name: Show built files shell: bash run: ls -la wheelhouse - name: Set up Python 3.13 to combine coverage uses: actions/setup-python@v5.1.1 if: runner.os == 'Linux' with: python-version: '3.13' - name: Combine coverage Linux if: runner.os == 'Linux' run: |- echo '############ PWD' pwd cp .wheelhouse/.coverage* . || true ls -al uv pip install coverage[toml] | pip install coverage[toml] echo '############ combine' coverage combine . || true echo '############ XML' coverage xml -o ./coverage.xml || true echo '### The cwd should now have a coverage.xml' ls -altr pwd - uses: codecov/codecov-action@v4.5.0 name: Codecov Upload env: HAVE_CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN != '' }} # Only upload coverage if we have the token if: ${{ env.HAVE_PERSONAL_TOKEN == 'true' }} with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} - uses: codecov/codecov-action@v4.5.0 name: Codecov Upload with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} - uses: actions/upload-artifact@v4.4.0 name: Upload wheels artifact with: name: wheels-${{ matrix.os }}-${{ matrix.arch }} path: ./wheelhouse/line_profiler*.whl test_binpy_wheels: ## # Download the previously build binary wheels from the # build_binpy_wheels step, and test them in an independent # environment. ## name: ${{ matrix.python-version }} on ${{ matrix.os }}, arch=${{ matrix.arch }} with ${{ matrix.install-extras }} if: "! startsWith(github.event.ref, 'refs/heads/release')" runs-on: ${{ matrix.os }} needs: - build_binpy_wheels strategy: fail-fast: false matrix: # Xcookie generates an explicit list of environments that will be used # for testing instead of using the more concise matrix notation. include: - python-version: '3.8' install-extras: tests-strict,runtime-strict os: ubuntu-latest arch: auto - python-version: '3.8' install-extras: tests-strict,runtime-strict os: macOS-latest arch: auto - python-version: '3.8' install-extras: tests-strict,runtime-strict os: windows-latest arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict os: ubuntu-latest arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict os: macOS-latest arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict os: windows-latest arch: auto - python-version: '3.13' install-extras: tests os: macOS-latest arch: auto - python-version: '3.13' install-extras: tests os: windows-latest arch: auto - python-version: '3.8' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.9' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.10' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.11' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.12' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.13' install-extras: tests,optional os: ubuntu-latest arch: auto - python-version: '3.8' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.9' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.10' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.11' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.12' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.13' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.8' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.9' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.10' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.11' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.12' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.13' install-extras: tests,optional os: windows-latest arch: auto steps: - name: Checkout source uses: actions/checkout@v4.1.1 - name: Enable MSVC 64bit uses: ilammy/msvc-dev-cmd@v1 if: matrix.os == 'windows-latest' - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all - name: Setup Python uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} - uses: actions/download-artifact@v4.1.8 name: Download wheels with: pattern: wheels-* merge-multiple: true path: wheelhouse - name: Install wheel ${{ matrix.install-extras }} shell: bash env: INSTALL_EXTRAS: ${{ matrix.install-extras }} run: |- echo "Finding the path to the wheel" ls wheelhouse || echo "wheelhouse does not exist" echo "Installing helpers" python -m pip install pip uv -U python -m uv pip install setuptools>=0.8 setuptools_scm wheel build -U python -m uv pip install tomli pkginfo export WHEEL_FPATH=$(python -c "if 1: import pathlib dist_dpath = pathlib.Path('wheelhouse') candidates = list(dist_dpath.glob('line_profiler*.whl')) candidates += list(dist_dpath.glob('line_profiler*.tar.gz')) fpath = sorted(candidates)[-1] print(str(fpath).replace(chr(92), chr(47))) ") export MOD_VERSION=$(python -c "if 1: from pkginfo import Wheel, SDist fpath = '$WHEEL_FPATH' cls = Wheel if fpath.endswith('.whl') else SDist print(cls(fpath).version) ") echo "WHEEL_FPATH=$WHEEL_FPATH" echo "INSTALL_EXTRAS=$INSTALL_EXTRAS" echo "MOD_VERSION=$MOD_VERSION" python -m uv pip install "line_profiler[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse echo "Install finished." - name: Test wheel ${{ matrix.install-extras }} shell: bash env: CI_PYTHON_VERSION: py${{ matrix.python-version }} run: |- echo "Creating test sandbox directory" export WORKSPACE_DNAME="testdir_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" echo "WORKSPACE_DNAME=$WORKSPACE_DNAME" mkdir -p $WORKSPACE_DNAME echo "cd-ing into the workspace" cd $WORKSPACE_DNAME pwd ls -altr # Get the path to the installed package and run the tests export MOD_DPATH=$(python -c "import line_profiler, os; print(os.path.dirname(line_profiler.__file__))") export MOD_NAME=line_profiler echo " --- MOD_DPATH = $MOD_DPATH --- running the pytest command inside the workspace --- " python -m pytest --verbose -p pytester -p no:doctest --xdoctest --cov-config ../pyproject.toml --cov-report term --durations=100 --cov="$MOD_NAME" "$MOD_DPATH" ../tests echo "pytest command finished, moving the coverage file to the repo root" ls -al # Move coverage file to a new name mv .coverage "../.coverage.$WORKSPACE_DNAME" echo "changing directory back to th repo root" cd .. ls -al - name: Combine coverage Linux if: runner.os == 'Linux' run: |- echo '############ PWD' pwd cp .wheelhouse/.coverage* . || true ls -al uv pip install coverage[toml] | pip install coverage[toml] echo '############ combine' coverage combine . || true echo '############ XML' coverage xml -o ./coverage.xml || true echo '### The cwd should now have a coverage.xml' ls -altr pwd - uses: codecov/codecov-action@v4.5.0 name: Codecov Upload with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} test_deploy: name: Deploy Test runs-on: ubuntu-latest if: github.event_name == 'push' && ! startsWith(github.event.ref, 'refs/tags') && ! startsWith(github.event.ref, 'refs/heads/release') needs: - build_and_test_sdist - build_binpy_wheels steps: - name: Checkout source uses: actions/checkout@v4.1.1 - uses: actions/download-artifact@v4.1.8 name: Download wheels with: pattern: wheels-* merge-multiple: true path: wheelhouse - uses: actions/download-artifact@v4.1.8 name: Download sdist with: name: sdist_wheels path: wheelhouse - name: Show files to upload shell: bash run: ls -la wheelhouse - name: Sign and Publish env: TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.TEST_TWINE_PASSWORD }} CI_SECRET: ${{ secrets.CI_SECRET }} run: |- GPG_EXECUTABLE=gpg $GPG_EXECUTABLE --version openssl version $GPG_EXECUTABLE --list-keys echo "Decrypting Keys" openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import echo "Finish Decrypt Keys" $GPG_EXECUTABLE --list-keys || true $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" $GPG_EXECUTABLE --list-keys VERSION=$(python -c "import setup; print(setup.VERSION)") python -m pip install pip uv -U python -m pip install packaging twine -U python -m pip install urllib3 requests[security] GPG_KEYID=$(cat dev/public_gpg_key) echo "GPG_KEYID = '$GPG_KEYID'" GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") echo "$WHEEL_PATHS_STR" for WHEEL_PATH in "${WHEEL_PATHS[@]}" do echo "------" echo "WHEEL_PATH = $WHEEL_PATH" $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH done ls -la wheelhouse python -m pip install opentimestamps-client ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc ls -la wheelhouse twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - uses: actions/upload-artifact@v4.4.0 name: Upload deploy artifacts with: name: deploy_artifacts path: |- wheelhouse/*.whl wheelhouse/*.zip wheelhouse/*.tar.gz wheelhouse/*.asc wheelhouse/*.ots live_deploy: name: Deploy Live runs-on: ubuntu-latest if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) needs: - build_and_test_sdist - build_binpy_wheels steps: - name: Checkout source uses: actions/checkout@v4.1.1 - uses: actions/download-artifact@v4.1.8 name: Download wheels with: pattern: wheels-* merge-multiple: true path: wheelhouse - uses: actions/download-artifact@v4.1.8 name: Download sdist with: name: sdist_wheels path: wheelhouse - name: Show files to upload shell: bash run: ls -la wheelhouse - name: Sign and Publish env: TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} CI_SECRET: ${{ secrets.CI_SECRET }} run: |- GPG_EXECUTABLE=gpg $GPG_EXECUTABLE --version openssl version $GPG_EXECUTABLE --list-keys echo "Decrypting Keys" openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import echo "Finish Decrypt Keys" $GPG_EXECUTABLE --list-keys || true $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" $GPG_EXECUTABLE --list-keys VERSION=$(python -c "import setup; print(setup.VERSION)") python -m pip install pip uv -U python -m pip install packaging twine -U python -m pip install urllib3 requests[security] GPG_KEYID=$(cat dev/public_gpg_key) echo "GPG_KEYID = '$GPG_KEYID'" GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") echo "$WHEEL_PATHS_STR" for WHEEL_PATH in "${WHEEL_PATHS[@]}" do echo "------" echo "WHEEL_PATH = $WHEEL_PATH" $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH done ls -la wheelhouse python -m pip install opentimestamps-client ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc ls -la wheelhouse twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - uses: actions/upload-artifact@v4.4.0 name: Upload deploy artifacts with: name: deploy_artifacts path: |- wheelhouse/*.whl wheelhouse/*.zip wheelhouse/*.tar.gz wheelhouse/*.asc wheelhouse/*.ots release: name: Create Github Release if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) runs-on: ubuntu-latest permissions: contents: write needs: - live_deploy steps: - name: Checkout source uses: actions/checkout@v4.1.1 - uses: actions/download-artifact@v4.1.8 name: Download artifacts with: name: deploy_artifacts path: wheelhouse - name: Show files to release shell: bash run: ls -la wheelhouse - run: 'echo "Automatic Release Notes. TODO: improve" > ${{ github.workspace }}-CHANGELOG.txt' - name: Tag Release Commit if: (startsWith(github.event.ref, 'refs/heads/release')) run: |- export VERSION=$(python -c "import setup; print(setup.VERSION)") git tag "v$VERSION" git push origin "v$VERSION" - uses: softprops/action-gh-release@v1 name: Create Release id: create_release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: body_path: ${{ github.workspace }}-CHANGELOG.txt tag_name: ${{ github.ref }} name: Release ${{ github.ref }} body: Automatic Release generate_release_notes: true draft: true prerelease: false files: |- wheelhouse/*.whl wheelhouse/*.asc wheelhouse/*.ots wheelhouse/*.zip wheelhouse/*.tar.gz line_profiler-5.0.0/.gitignore000066400000000000000000000004411503775420400163430ustar00rootroot00000000000000# Swap files. .*.swp .*.swo *~ *.pyc *.pyo *.pyd *.so *.o *.a *.cpp build/ dist/ _skbuild _line_profiler.c line_profiler.egg-info/ MANIFEST pypi-site-docs.zip index.html line_profiler/_line_profiler.html .coverage tests/coverage.xml tests/htmlcov wheelhouse profile_output* *.egg-info/ line_profiler-5.0.0/.readthedocs.yml000066400000000000000000000006541503775420400174470ustar00rootroot00000000000000# .readthedocs.yml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # # See Also: # https://readthedocs.org/dashboard/line_profiler/advanced/ # Required version: 2 build: os: "ubuntu-22.04" tools: python: "3.11" sphinx: configuration: docs/source/conf.py formats: all python: install: - requirements: requirements/docs.txt - method: pip path: . line_profiler-5.0.0/CHANGELOG.rst000066400000000000000000000223561503775420400164050ustar00rootroot00000000000000Changes ======= 5.0.0 ~~~~~ * FIX: win32 encoding issues * ENH: Add support for ``sys.monitoring`` (Python >= 3.12) * FIX: Fixed issue when calling ``kernprof`` with neither the ``-l`` nor ``-b`` flag; also refactored common methods to ``LineProfiler`` and ``ContextualProfile`` * FIX: Fixed auto-profiling of async function definitions #330 * ENH: Added CLI argument ``-m`` to ``kernprof`` for running a library module as a script; also made it possible for profiling targets to be supplied across multiple ``-p`` flags * FIX: Fixed explicit profiling of class methods; added handling for profiling static, bound, and partial methods, ``functools.partial`` objects, (cached) properties, and async generator functions * FIX: Fixed namespace bug when running ``kernprof -m`` on certain modules (e.g. ``calendar`` on Python 3.12+). * FIX: Fixed ``@contextlib.contextmanager`` bug where the cleanup code (e.g. restoration of ``sys`` attributes) is not run if exceptions occurred inside the context * ENH: Added CLI arguments ``-c`` to ``kernprof`` for (auto-)profiling module/package/inline-script execution instead of that of script files; passing ``'-'`` as the script-file name now also reads from and profiles ``stdin`` * ENH: In Python >=3.11, profiled objects are reported using their qualified name. * ENH: Highlight final summary using rich if enabled * ENH: Made it possible to use multiple profiler instances simultaneously * ENH: various improvements related to auto-profiling: * ``kernprof -p`` target entities are now imported and profiled regardless of whether they are directly imported in the run script/module/code (old behavior restored by passing ``--no-preimports``) * ``kernprof -v`` and the new ``-q`` now control the verbosity level instead of being a boolean, allowing diagnostic outputs or output suppression * On-import profiling is now more aggressive so that it doesn't miss entities like class methods and properties * ``LineProfiler`` can now be used as a class decorator * FIX: Fixed line tracing for Cython code; superseded use of the legacy tracing system with ``sys.monitoring`` * FIX: Fixed edge cases where: * ``LineProfiler.get_stats()`` neglected data from duplicate code objects (#348) * ``LineProfiler`` instances may stop receiving tracing events when multiple instances were used (#350) * Line events were not reported for ``raise`` statements and ``finally:`` bodies when using ``sys.monitoring`` (#355) * FIX: Tracing-system-related fixes (#333): * ``LineProfiler`` now caches the existing ``sys`` or ``sys.monitoring`` trace callbacks in ``.enable()`` and restores them in ``.disable()``, instead of always discarding them on the way out * Also added experimental support for calling (instead of suspending) said callbacks during profiling * Now allowing switching back to the "legacy" trace system on Python 3.12+, controlled by an environment variable * ENH: Added capability to parse TOML config files for defaults (#335): * ``kernprof`` and ``python -m line_profiler`` CLI options * ``GlobalProfiler`` configurations, and * profiler output (e.g. ``LineProfiler.print_stats()``) formatting 4.2.0 ~~~~~ * FIX: Fix issue with auto-profile of editable installs #279 * FIX: Lookup OP-codes instead of hard coding them #284 * CHANGE: Drop support for Python 3.6 and Python 3.7 * ENH: Add support for Python 3.13 4.1.3 ~~~~~ * FIX: duration summary now respects the stripzeros argument. * FIX: minor test fixes. * ENH: building osx wheels for x86 and arm64. * ENH: documentation improvements. * ENH: signed wheels are now hosted on github and published as release artifacts. 4.1.2 ~~~~~ * ENH: Add support for Python 3.12 #246 * ENH: Add osx universal2 and arm64 wheels #251 * ENH: Fix issue with integer overflow on 32 bit systems #249 4.1.1 ~~~~~ * FIX: ``get_stats`` is no longer slowed down when profiling many code sections #236 4.1.0 ~~~~~ * FIX: skipzeros now checks for zero hits instead of zero time * FIX: Fixed errors in Python 3.11 with duplicate functions. * FIX: ``show_text`` now increases column sizes or switches to scientific notation to maintain alignment * ENH: ``show_text`` now has new options: sort and summarize * ENH: Added new CLI arguments ``-srm`` to ``line_profiler`` to control sorting, rich printing, and summary printing. * ENH: New global ``profile`` function that can be enabled by ``--profile`` or ``LINE_PROFILE=1``. * ENH: New auto-profile feature in ``kernprof`` that will profile all functions in specified modules. * ENH: Kernprof now outputs instructions on how to view results. * ENH: Added readthedocs integration: https://kernprof.readthedocs.io/en/latest/index.html 4.0.3 ~~~~~ * FIX: Stop requiring bleeding-edge Cython unless necesasry (for Python 3.12). #206 4.0.2 ~~~~~ * FIX: AttributeError on certain methods. #191 4.0.1 ~~~~~ * FIX: Profiling classmethods works again. #183 4.0.0 ~~~~~ * ENH: Python 3.11 is now supported. * ENH: Profiling overhead is now drastically smaller, thanks to reimplementing almost all of the tracing callback in C++. You can expect to see reductions of between 0.3 and 1 microseconds per line hit, resulting in a speedup of up to 4x for codebases with many lines of Python that only do a little work per line. * ENH: Added the ``-i <# of seconds>`` option to the ``kernprof`` script. This uses the threading module to output profiling data to the output file every n seconds, and is useful for long-running tasks that shouldn't be stopped in the middle of processing. * CHANGE: Cython's native cythonize function is now used to compile the project, instead of scikit-build's convoluted process. * CHANGE: Due to optimizations done while reimplementing the callback in C++, the profiler's code_map and last_time attributes now are indexed by a hash of the code block's bytecode and its line number. Any code that directly reads (and processes) or edits the code_map and/or last_time attributes will likely break. 3.5.2 ~~~~~ * FIX: filepath test in is_ipython_kernel_cell for Windows #161 * ADD: setup.py now checks LINE_PROFILER_BUILD_METHOD to determine how to build binaries * ADD: LineProfiler.add_function warns if an added function has a __wrapped__ attribute 3.5.1 ~~~~~ * FIX: #19 line profiler now works on async functions again 3.5.0 ~~~~~ * FIX: #109 kernprof fails to write to stdout if stdout was replaced * FIX: Fixes max of an empty sequence error #118 * Make IPython optional * FIX: #100 Exception raise ZeroDivisionError 3.4.0 ~~~~~ * Drop support for Python <= 3.5.x * FIX: #104 issue with new IPython kernels 3.3.1 ~~~~~ * FIX: Fix bug where lines were not displayed in Jupyter>=6.0 via #93 * CHANGE: moving forward, new pypi releases will be signed with the GPG key 2A290272C174D28EA9CA48E9D7224DAF0347B114 for PyUtils-CI . For reference, older versions were signed with either 262A1DF005BE5D2D5210237C85CD61514641325F or 1636DAF294BA22B89DBB354374F166CFA2F39C18. 3.3.0 ~~~~~ * New CI for building wheels. 3.2.6 ~~~~~ * FIX: Update MANIFEST.in to package pyproj.toml and missing pyx file * CHANGE: Removed version experimental augmentation. 3.2.5 ~~~~~ * FIX: Update MANIFEST.in to package nested c source files in the sdist 3.2.4 ~~~~~ * FIX: Update MANIFEST.in to package nested CMakeLists.txt in the sdist 3.2.3 ~~~~~ * FIX: Use ImportError instead of ModuleNotFoundError while 3.5 is being supported * FIX: Add MANIFEST.in to package CMakeLists.txt in the sdist 3.2.2 ~~~~~ * ENH: Added better error message when c-extension is not compiled. * FIX: Kernprof no longer imports line_profiler to avoid side effects. 3.2.0 ~~~~~ * Dropped 2.7 support, manylinux docker images no longer support 2.7 * ENH: Add command line option to specify time unit and skip displaying functions which have not been profiled. * ENH: Unified versions of line_profiler and kernprof: kernprof version is now identical to line_profiler version. 3.1.0 ~~~~~ * ENH: fix Python 3.9 3.0.2 ~~~~~ * BUG: fix ``__version__`` attribute in Python 2 CLI. 3.0.1 ~~~~~ * BUG: fix calling the package from the command line 3.0.0 ~~~~~ * ENH: Fix Python 3.7 * ENH: Restructure into package 2.1 ~~~ * ENH: Add support for Python 3.5 coroutines * ENH: Documentation updates * ENH: CI for most recent Python versions (3.5, 3.6, 3.6-dev, 3.7-dev, nightly) * ENH: Add timer unit argument for output time granularity spec 2.0 ~~~ * BUG: Added support for IPython 5.0+, removed support for IPython <=0.12 1.1 ~~~ * BUG: Read source files as bytes. 1.0 ~~~ * ENH: `kernprof.py` is now installed as `kernprof`. * ENH: Python 3 support. Thanks to the long-suffering Mikhail Korobov for being patient. * Dropped 2.6 as it was too annoying. * ENH: The `stripzeros` and `add_module` options. Thanks to Erik Tollerud for contributing it. * ENH: Support for IPython cell blocks. Thanks to Michael Forbes for adding this feature. * ENH: Better warnings when building without Cython. Thanks to David Cournapeau for spotting this. 1.0b3 ~~~~~ * ENH: Profile generators. * BUG: Update for compatibility with newer versions of Cython. Thanks to Ondrej Certik for spotting the bug. * BUG: Update IPython compatibility for 0.11+. Thanks to Yaroslav Halchenko and others for providing the updated imports. 1.0b2 ~~~~~ * BUG: fixed line timing overflow on Windows. * DOC: improved the README. 1.0b1 ~~~~~ * Initial release. line_profiler-5.0.0/CMakeLists.txt000066400000000000000000000026021503775420400171140ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.13.0) project(_line_profiler LANGUAGES C) ### # Private helper function to execute `python -c ""` # # Runs a python command and populates an outvar with the result of stdout. # Be careful of indentation if `cmd` is multiline. # function(pycmd outvar cmd) execute_process( COMMAND "${PYTHON_EXECUTABLE}" -c "${cmd}" RESULT_VARIABLE _exitcode OUTPUT_VARIABLE _output) if(NOT ${_exitcode} EQUAL 0) message(ERROR "Failed when running python code: \"\"\" ${cmd}\"\"\"") message(FATAL_ERROR "Python command failed with error code: ${_exitcode}") endif() # Remove supurflous newlines (artifacts of print) string(STRIP "${_output}" _output) set(${outvar} "${_output}" PARENT_SCOPE) endfunction() find_package(PythonInterp REQUIRED) ### # Find scikit-build and include its cmake resource scripts # if (NOT SKBUILD) pycmd(skbuild_location "import os, skbuild; print(os.path.dirname(skbuild.__file__))") set(skbuild_cmake_dir "${skbuild_location}/resources/cmake") message(STATUS "[LINE_PROFILER] skbuild_cmake_dir = ${skbuild_cmake_dir}") # If skbuild is not the driver, then we need to include its utilities in our CMAKE_MODULE_PATH list(APPEND CMAKE_MODULE_PATH ${skbuild_cmake_dir}) endif() find_package(Cython REQUIRED) find_package(PythonExtensions REQUIRED) find_package(PythonLibs REQUIRED) add_subdirectory("line_profiler") line_profiler-5.0.0/LICENSE.txt000066400000000000000000000031201503775420400161730ustar00rootroot00000000000000This software is OSI Certified Open Source Software. OSI Certified is a certification mark of the Open Source Initiative. Copyright (c) 2008, Enthought, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Enthought, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. line_profiler-5.0.0/LICENSE_Python.txt000066400000000000000000000330571503775420400175500ustar00rootroot00000000000000The file timers.c was derived from the timer code in Python 2.5.2's _lsprof.c file and falls under the PSF license given below. A. HISTORY OF THE SOFTWARE ========================== Python was created in the early 1990s by Guido van Rossum at Stichting Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands as a successor of a language called ABC. Guido remains Python's principal author, although it includes many contributions from others. In 1995, Guido continued his work on Python at the Corporation for National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software. In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same year, the PythonLabs team moved to Digital Creations (now Zope Corporation, see http://www.zope.com). In 2001, the Python Software Foundation (PSF, see http://www.python.org/psf/) was formed, a non-profit organization created specifically to own Python-related Intellectual Property. Zope Corporation is a sponsoring member of the PSF. All Python releases are Open Source (see http://www.opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases. Release Derived Year Owner GPL- from compatible? (1) 0.9.0 thru 1.2 1991-1995 CWI yes 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes 1.6 1.5.2 2000 CNRI no 2.0 1.6 2000 BeOpen.com no 1.6.1 1.6 2001 CNRI yes (2) 2.1 2.0+1.6.1 2001 PSF no 2.0.1 2.0+1.6.1 2001 PSF yes 2.1.1 2.1+2.0.1 2001 PSF yes 2.2 2.1.1 2001 PSF yes 2.1.2 2.1.1 2002 PSF yes 2.1.3 2.1.2 2002 PSF yes 2.2.1 2.2 2002 PSF yes 2.2.2 2.2.1 2002 PSF yes 2.2.3 2.2.2 2003 PSF yes 2.3 2.2.2 2002-2003 PSF yes 2.3.1 2.3 2002-2003 PSF yes 2.3.2 2.3.1 2002-2003 PSF yes 2.3.3 2.3.2 2002-2003 PSF yes 2.3.4 2.3.3 2004 PSF yes 2.3.5 2.3.4 2005 PSF yes 2.4 2.3 2004 PSF yes 2.4.1 2.4 2005 PSF yes 2.4.2 2.4.1 2005 PSF yes 2.4.3 2.4.2 2006 PSF yes 2.4.4 2.4.3 2006 PSF yes 2.5 2.4 2006 PSF yes 2.5.1 2.5 2007 PSF yes 2.5.2 2.5.2 2008 PSF yes Footnotes: (1) GPL-compatible doesn't mean that we're distributing Python under the GPL. All Python licenses, unlike the GPL, let you distribute a modified version without making your changes open source. The GPL-compatible licenses make it possible to combine Python with other software that is released under the GPL; the others don't. (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, because its license has a choice of law clause. According to CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 is "not incompatible" with the GPL. Thanks to the many outside volunteers who have worked under Guido's direction to make these releases possible. B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON =============================================================== PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python. 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 ------------------------------------------- BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the Individual or Organization ("Licensee") accessing and otherwise using this software in source or binary form and its associated documentation ("the Software"). 2. Subject to the terms and conditions of this BeOpen Python License Agreement, BeOpen hereby grants Licensee a non-exclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use the Software alone or in any derivative version, provided, however, that the BeOpen Python License is retained in the Software, alone or in any derivative version prepared by Licensee. 3. BeOpen is making the Software available to Licensee on an "AS IS" basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 5. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 6. This License Agreement shall be governed by and interpreted in all respects by the law of the State of California, excluding conflict of law provisions. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between BeOpen and Licensee. This License Agreement does not grant permission to use BeOpen trademarks or trade names in a trademark sense to endorse or promote products or services of Licensee, or any third party. As an exception, the "BeOpen Python" logos available at http://www.pythonlabs.com/logos.html may be used according to the permissions granted on that web page. 7. By copying, installing or otherwise using the software, Licensee agrees to be bound by the terms and conditions of this License Agreement. CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 --------------------------------------- 1. This LICENSE AGREEMENT is between the Corporation for National Research Initiatives, having an office at 1895 Preston White Drive, Reston, VA 20191 ("CNRI"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 1.6.1 software in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, CNRI hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 1.6.1 alone or in any derivative version, provided, however, that CNRI's License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) 1995-2001 Corporation for National Research Initiatives; All Rights Reserved" are retained in Python 1.6.1 alone or in any derivative version prepared by Licensee. Alternately, in lieu of CNRI's License Agreement, Licensee may substitute the following text (omitting the quotes): "Python 1.6.1 is made available subject to the terms and conditions in CNRI's License Agreement. This Agreement together with Python 1.6.1 may be located on the Internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the Internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 1.6.1. 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions. 7. This License Agreement shall be governed by the federal intellectual property law of the United States, including without limitation the federal copyright law, and, to the extent such U.S. federal law does not apply, by the law of the Commonwealth of Virginia, excluding Virginia's conflict of law provisions. Notwithstanding the foregoing, with regard to derivative works based on Python 1.6.1 that incorporate non-separable material that was previously distributed under the GNU General Public License (GPL), the law of the Commonwealth of Virginia shall govern this License Agreement only as to issues arising under or with respect to Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between CNRI and Licensee. This License Agreement does not grant permission to use CNRI trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party. 8. By clicking on the "ACCEPT" button where indicated, or by copying, installing or otherwise using Python 1.6.1, Licensee agrees to be bound by the terms and conditions of this License Agreement. ACCEPT CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 -------------------------------------------------- Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, The Netherlands. All rights reserved. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Stichting Mathematisch Centrum or CWI not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. line_profiler-5.0.0/MANIFEST.in000066400000000000000000000006231503775420400161130ustar00rootroot00000000000000graft line_profiler/rc include *.md include *.rst include *.py include *.txt include *.toml include run_tests.sh recursive-include requirements *.txt recursive-include tests *.py recursive-include line_profiler *.txt recursive-include line_profiler *.pyx recursive-include line_profiler *.pxd recursive-include line_profiler *.pyd recursive-include line_profiler *.c recursive-include line_profiler *.h line_profiler-5.0.0/README.rst000066400000000000000000000507541503775420400160560ustar00rootroot00000000000000line_profiler and kernprof -------------------------- |Pypi| |ReadTheDocs| |Downloads| |CircleCI| |GithubActions| |Codecov| This is the official ``line_profiler`` repository. The most recent version of `line-profiler `_ on pypi points to this repo. The original `line_profiler `_ package by `@rkern `_ is unmaintained. This fork is the official continuation of the project. +---------------+--------------------------------------------+ | Github | https://github.com/pyutils/line_profiler | +---------------+--------------------------------------------+ | Pypi | https://pypi.org/project/line_profiler | +---------------+--------------------------------------------+ | ReadTheDocs | https://kernprof.readthedocs.io/en/latest/ | +---------------+--------------------------------------------+ ---- ``line_profiler`` is a module for doing line-by-line profiling of functions. kernprof is a convenient script for running either ``line_profiler`` or the Python standard library's cProfile or profile modules, depending on what is available. They are available under a `BSD license`_. .. _BSD license: https://raw.githubusercontent.com/pyutils/line_profiler/master/LICENSE.txt .. contents:: Quick Start (Modern) ==================== This guide is for versions of line profiler starting at ``4.1.0``. To profile a python script: * Install line_profiler: ``pip install line_profiler``. * In the relevant file(s), import line profiler and decorate function(s) you want to profile with ``@line_profiler.profile``. * Set the environment variable ``LINE_PROFILE=1`` and run your script as normal. When the script ends a summary of profile results, files written to disk, and instructions for inspecting details will be written to stdout. For more details and a short tutorial see `Line Profiler Basic Usage `_. Quick Start (Legacy) ==================== This section is the original quick-start guide, and may eventually be removed from the README. This will work with current and older (pre ``4.1.0``) versions of line profiler. To profile a python script: * Install line_profiler: ``pip install line_profiler``. * Decorate function(s) you want to profile with @profile. The decorator will be made automatically available on run. * Run ``kernprof -lv script_to_profile.py``. Installation ============ Releases of ``line_profiler`` can be installed using pip:: $ pip install line_profiler Installation while ensuring a compatible IPython version can also be installed using pip:: $ pip install line_profiler[ipython] To check out the development sources, you can use Git_:: $ git clone https://github.com/pyutils/line_profiler.git You may also download source tarballs of any snapshot from that URL. Source releases will require a C compiler in order to build `line_profiler`. In addition, git checkouts will also require Cython. Source releases on PyPI should contain the pregenerated C sources, so Cython should not be required in that case. ``kernprof`` is a single-file pure Python script and does not require a compiler. If you wish to use it to run cProfile and not line-by-line profiling, you may copy it to a directory on your ``PATH`` manually and avoid trying to build any C extensions. As of 2021-06-04 Linux (x86_64 and i686), OSX (10_9_x86_64), and Win32 (win32, and amd64) binaries are available on pypi. The last version of line profiler to support Python 2.7 was 3.1.0 and the last version to support Python 3.5 was 3.3.1. .. _git: http://git-scm.com/ .. _Cython: http://www.cython.org .. _build and install: http://docs.python.org/install/index.html line_profiler ============= The current profiling tools supported in Python only time function calls. This is a good first step for locating hotspots in one's program and is frequently all one needs to do to optimize the program. However, sometimes the cause of the hotspot is actually a single line in the function, and that line may not be obvious from just reading the source code. These cases are particularly frequent in scientific computing. Functions tend to be larger (sometimes because of legitimate algorithmic complexity, sometimes because the programmer is still trying to write FORTRAN code), and a single statement without function calls can trigger lots of computation when using libraries like numpy. cProfile only times explicit function calls, not special methods called because of syntax. Consequently, a relatively slow numpy operation on large arrays like this, :: a[large_index_array] = some_other_large_array is a hotspot that never gets broken out by cProfile because there is no explicit function call in that statement. LineProfiler can be given functions to profile, and it will time the execution of each individual line inside those functions. In a typical workflow, one only cares about line timings of a few functions because wading through the results of timing every single line of code would be overwhelming. However, LineProfiler does need to be explicitly told what functions to profile. The easiest way to get started is to use the ``kernprof`` script. :: $ kernprof -l script_to_profile.py ``kernprof`` will create an instance of LineProfiler and insert it into the ``__builtins__`` namespace with the name ``profile``. It has been written to be used as a decorator, so in your script, you decorate the functions you want to profile with @profile. :: @profile def slow_function(a, b, c): ... The default behavior of ``kernprof`` is to put the results into a binary file script_to_profile.py.lprof . You can tell ``kernprof`` to immediately view the formatted results at the terminal with the [-v/--view] option. Otherwise, you can view the results later like so:: $ python -m line_profiler script_to_profile.py.lprof For example, here are the results of profiling a single function from a decorated version of the pystone.py benchmark (the first two lines are output from ``pystone.py``, not ``kernprof``):: Pystone(1.1) time for 50000 passes = 2.48 This machine benchmarks at 20161.3 pystones/second Wrote profile results to pystone.py.lprof Timer unit: 1e-06 s File: pystone.py Function: Proc2 at line 149 Total time: 0.606656 s Line # Hits Time Per Hit % Time Line Contents ============================================================== 149 @profile 150 def Proc2(IntParIO): 151 50000 82003 1.6 13.5 IntLoc = IntParIO + 10 152 50000 63162 1.3 10.4 while 1: 153 50000 69065 1.4 11.4 if Char1Glob == 'A': 154 50000 66354 1.3 10.9 IntLoc = IntLoc - 1 155 50000 67263 1.3 11.1 IntParIO = IntLoc - IntGlob 156 50000 65494 1.3 10.8 EnumLoc = Ident1 157 50000 68001 1.4 11.2 if EnumLoc == Ident1: 158 50000 63739 1.3 10.5 break 159 50000 61575 1.2 10.1 return IntParIO The source code of the function is printed with the timing information for each line. There are six columns of information. * Line #: The line number in the file. * Hits: The number of times that line was executed. * Time: The total amount of time spent executing the line in the timer's units. In the header information before the tables, you will see a line "Timer unit:" giving the conversion factor to seconds. It may be different on different systems. * Per Hit: The average amount of time spent executing the line once in the timer's units. * % Time: The percentage of time spent on that line relative to the total amount of recorded time spent in the function. * Line Contents: The actual source code. Note that this is always read from disk when the formatted results are viewed, *not* when the code was executed. If you have edited the file in the meantime, the lines will not match up, and the formatter may not even be able to locate the function for display. If you are using IPython, there is an implementation of an %lprun magic command which will let you specify functions to profile and a statement to execute. It will also add its LineProfiler instance into the __builtins__, but typically, you would not use it like that. For IPython 0.11+, you can install it by editing the IPython configuration file ``~/.ipython/profile_default/ipython_config.py`` to add the ``'line_profiler'`` item to the extensions list:: c.TerminalIPythonApp.extensions = [ 'line_profiler', ] Or explicitly call:: %load_ext line_profiler To get usage help for %lprun, use the standard IPython help mechanism:: In [1]: %lprun? These two methods are expected to be the most frequent user-level ways of using LineProfiler and will usually be the easiest. However, if you are building other tools with LineProfiler, you will need to use the API. There are two ways to inform LineProfiler of functions to profile: you can pass them as arguments to the constructor or use the ``add_function(f)`` method after instantiation. :: profile = LineProfiler(f, g) profile.add_function(h) LineProfiler has the same ``run()``, ``runctx()``, and ``runcall()`` methods as cProfile.Profile as well as ``enable()`` and ``disable()``. It should be noted, though, that ``enable()`` and ``disable()`` are not entirely safe when nested. Nesting is common when using LineProfiler as a decorator. In order to support nesting, use ``enable_by_count()`` and ``disable_by_count()``. These functions will increment and decrement a counter and only actually enable or disable the profiler when the count transitions from or to 0. After profiling, the ``dump_stats(filename)`` method will pickle the results out to the given file. ``print_stats([stream])`` will print the formatted results to sys.stdout or whatever stream you specify. ``get_stats()`` will return LineStats object, which just holds two attributes: a dictionary containing the results and the timer unit. kernprof ======== ``kernprof`` also works with cProfile, its third-party incarnation lsprof, or the pure-Python profile module depending on what is available. It has a few main features: * Encapsulation of profiling concerns. You do not have to modify your script in order to initiate profiling and save the results. Unless if you want to use the advanced __builtins__ features, of course. * Robust script execution. Many scripts require things like __name__, __file__, and sys.path to be set relative to it. A naive approach at encapsulation would just use execfile(), but many scripts which rely on that information will fail. kernprof will set those variables correctly before executing the script. * Easy executable location. If you are profiling an application installed on your PATH, you can just give the name of the executable. If kernprof does not find the given script in the current directory, it will search your PATH for it. * Inserting the profiler into __builtins__. Sometimes, you just want to profile a small part of your code. With the [-b/--builtin] argument, the Profiler will be instantiated and inserted into your __builtins__ with the name "profile". Like LineProfiler, it may be used as a decorator, or enabled/disabled with ``enable_by_count()`` and ``disable_by_count()``, or even as a context manager with the "with profile:" statement. * Pre-profiling setup. With the [-s/--setup] option, you can provide a script which will be executed without profiling before executing the main script. This is typically useful for cases where imports of large libraries like wxPython or VTK are interfering with your results. If you can modify your source code, the __builtins__ approach may be easier. The results of profile script_to_profile.py will be written to script_to_profile.py.prof by default. It will be a typical marshalled file that can be read with pstats.Stats(). They may be interactively viewed with the command:: $ python -m pstats script_to_profile.py.prof Such files may also be viewed with graphical tools. A list of 3rd party tools built on ``cProfile`` or ``line_profiler`` are as follows: * `pyprof2calltree `_: converts profiling data to a format that can be visualized using kcachegrind_ (linux only), wincachegrind_ (windows only, unmaintained), or qcachegrind_. * `Line Profiler GUI `_: Qt GUI for line_profiler. * `SnakeViz `_: A web viewer for Python profiling data. * `SnakeRunner `_: A fork of RunSnakeRun_, ported to Python 3. * `Pycharm plugin `_: A PyCharm plugin for line_profiler. * `Spyder plugin `_: A plugin to run line_profiler from within the Spyder IDE. * `pprof `_: A render web report for ``line_profiler``. .. _qcachegrind: https://sourceforge.net/projects/qcachegrindwin/ .. _kcachegrind: https://kcachegrind.github.io/html/Home.html .. _wincachegrind: https://github.com/ceefour/wincachegrind .. _pyprof2calltree: http://pypi.python.org/pypi/pyprof2calltree/ .. _SnakeViz: https://github.com/jiffyclub/snakeviz/ .. _SnakeRunner: https://github.com/venthur/snakerunner .. _RunSnakeRun: https://pypi.org/project/RunSnakeRun/ .. _qt_profiler_gui: https://github.com/Nodd/lineprofilergui .. _pycharm_line_profiler_plugin: https://plugins.jetbrains.com/plugin/16536-line-profiler .. _spyder_line_profiler_plugin: https://github.com/spyder-ide/spyder-line-profiler .. _web_profiler_ui: https://github.com/mirecl/pprof Related Work ============ Check out these other Python profilers: * `Scalene `_: A CPU+GPU+memory sampling based profiler. * `PyInstrument `_: A call stack profiler. * `Yappi `_: A tracing profiler that is multithreading, asyncio and gevent aware. * `profile / cProfile `_: The builtin profile module. * `timeit `_: The builtin timeit module for profiling single statements. * `timerit `_: A multi-statements alternative to the builtin ``timeit`` module. Frequently Asked Questions ========================== * Why the name "kernprof"? I didn't manage to come up with a meaningful name, so I named it after myself. * The line-by-line timings don't add up when one profiled function calls another. What's up with that? Let's say you have function F() calling function G(), and you are using LineProfiler on both. The total time reported for G() is less than the time reported on the line in F() that calls G(). The reason is that I'm being reasonably clever (and possibly too clever) in recording the times. Basically, I try to prevent recording the time spent inside LineProfiler doing all of the bookkeeping for each line. Each time Python's tracing facility issues a line event (which happens just before a line actually gets executed), LineProfiler will find two timestamps, one at the beginning before it does anything (t_begin) and one as close to the end as possible (t_end). Almost all of the overhead of LineProfiler's data structures happens in between these two times. When a line event comes in, LineProfiler finds the function it belongs to. If it's the first line in the function, we record the line number and *t_end* associated with the function. The next time we see a line event belonging to that function, we take t_begin of the new event and subtract the old t_end from it to find the amount of time spent in the old line. Then we record the new t_end as the active line for this function. This way, we are removing most of LineProfiler's overhead from the results. Well almost. When one profiled function F calls another profiled function G, the line in F that calls G basically records the total time spent executing the line, which includes the time spent inside the profiler while inside G. The first time this question was asked, the questioner had the G() function call as part of a larger expression, and he wanted to try to estimate how much time was being spent in the function as opposed to the rest of the expression. My response was that, even if I could remove the effect, it might still be misleading. G() might be called elsewhere, not just from the relevant line in F(). The workaround would be to modify the code to split it up into two lines, one which just assigns the result of G() to a temporary variable and the other with the rest of the expression. I am open to suggestions on how to make this more robust. Or simple admonitions against trying to be clever. * Why do my list comprehensions have so many hits when I use the LineProfiler? LineProfiler records the line with the list comprehension once for each iteration of the list comprehension. * Why is kernprof distributed with line_profiler? It works with just cProfile, right? Partly because kernprof.py is essential to using line_profiler effectively, but mostly because I'm lazy and don't want to maintain the overhead of two projects for modules as small as these. However, kernprof.py is a standalone, pure Python script that can be used to do function profiling with just the Python standard library. You may grab it and install it by itself without ``line_profiler``. * Do I need a C compiler to build ``line_profiler``? kernprof.py? You do need a C compiler for line_profiler. kernprof.py is a pure Python script and can be installed separately, though. * Do I need Cython to build ``line_profiler``? Wheels for supported versions of Python are available on PyPI and support linux, osx, and windows for x86-64 architectures. Linux additionally ships with i686 wheels for manylinux and musllinux. If you have a different CPU architecture, or an unsupported Python version, then you will need to build from source. * What version of Python do I need? Both ``line_profiler`` and ``kernprof`` have been tested with Python 3.6-3.11. Older versions of ``line_profiler`` support older versions of Python. To Do ===== cProfile uses a neat "rotating trees" data structure to minimize the overhead of looking up and recording entries. LineProfiler uses Python dictionaries and extension objects thanks to Cython. This mostly started out as a prototype that I wanted to play with as quickly as possible, so I passed on stealing the rotating trees for now. As usual, I got it working, and it seems to have acceptable performance, so I am much less motivated to use a different strategy now. Maybe later. Contributions accepted! Bugs and Such ============= Bugs and pull requested can be submitted on GitHub_. .. _GitHub: https://github.com/pyutils/line_profiler Changes ======= See `CHANGELOG`_. .. _CHANGELOG: CHANGELOG.rst .. |CircleCI| image:: https://circleci.com/gh/pyutils/line_profiler.svg?style=svg :target: https://circleci.com/gh/pyutils/line_profiler .. |Travis| image:: https://img.shields.io/travis/pyutils/line_profiler/master.svg?label=Travis%20CI :target: https://travis-ci.org/pyutils/line_profiler?branch=master .. |Appveyor| image:: https://ci.appveyor.com/api/projects/status/github/pyutils/line_profiler?branch=master&svg=True :target: https://ci.appveyor.com/project/pyutils/line_profiler/branch/master .. |Codecov| image:: https://codecov.io/github/pyutils/line_profiler/badge.svg?branch=master&service=github :target: https://codecov.io/github/pyutils/line_profiler?branch=master .. |Pypi| image:: https://img.shields.io/pypi/v/line_profiler.svg :target: https://pypi.python.org/pypi/line_profiler .. |Downloads| image:: https://img.shields.io/pypi/dm/line_profiler.svg :target: https://pypistats.org/packages/line-profiler .. |GithubActions| image:: https://github.com/pyutils/line_profiler/actions/workflows/tests.yml/badge.svg?branch=main :target: https://github.com/pyutils/line_profiler/actions?query=branch%3Amain .. |ReadTheDocs| image:: https://readthedocs.org/projects/kernprof/badge/?version=latest :target: http://kernprof.readthedocs.io/en/latest/ line_profiler-5.0.0/build_wheels.sh000077500000000000000000000012341503775420400173610ustar00rootroot00000000000000#!/usr/bin/env bash __doc__=" Runs cibuildwheel to create linux binary wheels. Requirements: pip install cibuildwheel SeeAlso: pyproject.toml " if ! which docker ; then echo "Missing requirement: docker. Please install docker before running build_wheels.sh" exit 1 fi if ! which cibuildwheel ; then echo "The cibuildwheel module is not installed. Please pip install cibuildwheel before running build_wheels.sh" exit 1 fi #pip wheel -w wheelhouse . # python -m build --wheel -o wheelhouse # line_profiler: +COMMENT_IF(binpy) cibuildwheel --config-file pyproject.toml --platform linux --arch x86_64 # line_profiler: +UNCOMMENT_IF(binpy) line_profiler-5.0.0/clean.sh000077500000000000000000000014541503775420400160010ustar00rootroot00000000000000#!/bin/bash echo "start clean" rm -rf _skbuild rm -rf _line_profiler.c rm -rf *.so rm -rf line_profiler/_line_profiler.c rm -rf line_profiler/*.so rm -rf build rm -rf line_profiler.egg-info rm -rf dist rm -rf mb_work rm -rf wheelhouse rm -rf pip-wheel-metadata rm -rf htmlcov rm -rf tests/htmlcov rm -rf CMakeCache.txt rm -rf CMakeTmp rm -rf CMakeFiles rm -rf tests/htmlcov rm -rf demo_primes* rm -rf docs/demo.py* rm -rf docs/script_to_profile.py* rm -rf tests/complex_example.py.lprof rm -rf tests/complex_example.py.prof rm -rf script_to_profile.py* if [ -f "distutils.errors" ]; then rm distutils.errors || echo "skip rm" fi CLEAN_PYTHON='find . -regex ".*\(__pycache__\|\.py[co]\)" -delete || find . -iname *.pyc -delete || find . -iname *.pyo -delete' bash -c "$CLEAN_PYTHON" echo "finish clean" line_profiler-5.0.0/dev/000077500000000000000000000000001503775420400151325ustar00rootroot00000000000000line_profiler-5.0.0/dev/autoprofile-poc.py000066400000000000000000000077701503775420400206270ustar00rootroot00000000000000import ubelt as ub # try: # import ast # unparse = ast.unparse # except AttributeError: # try: # import astunparse # unparse = astunparse.unparse # except ModuleNotFoundError: # unparse = None # import sys # sys.path.append('../') from line_profiler.autoprofile import autoprofile def create_poc(dry_run=False): root = ub.Path.appdir('line_profiler/test/poc/') repo = (root / 'repo') modpaths = {} modpaths['script'] = (root / 'repo/script.py') modpaths['foo'] = (root / 'repo/foo') modpaths['foo.__init__'] = (root / 'repo/foo/__init__.py') modpaths['foo.bar'] = (root / 'repo/foo/bar.py') modpaths['foo.baz'] = (root / 'repo/foo/baz') modpaths['foo.baz.__init__'] = (root / 'repo/foo/baz/__init__.py') modpaths['foo.baz.spam'] = (root / 'repo/foo/baz/spam.py') modpaths['foo.baz.eggs'] = (root / 'repo/foo/baz/eggs.py') if not dry_run: root.delete().ensuredir() repo.ensuredir() modpaths['script'].touch() modpaths['foo'].ensuredir() modpaths['foo.__init__'].touch() modpaths['foo.bar'].touch() modpaths['foo.bar'].write_text('def asdf():\n 2**(1/65536)') modpaths['foo.baz'].ensuredir() modpaths['foo.baz.__init__'].touch() modpaths['foo.baz.spam'].touch() modpaths['foo.baz.spam'].write_text('def spamfunc():\n ...') modpaths['foo.baz.eggs'].touch() modpaths['foo.baz.eggs'].write_text('def eggfunc():\n ...') """different import variations to handle""" script_text = ub.codeblock( ''' import foo # mod import foo.bar # py from foo import bar # py from foo.bar import asdf # fn import foo.baz as foodotbaz # mod from foo import baz as foobaz # mod from foo import bar, baz as baz2 # py,mod import foo.baz.eggs # py from foo.baz import eggs # py from foo.baz.eggs import eggfunc # fn from foo.baz.spam import spamfunc as yum # fn from numpy import round # fn # @profile def test(): 2**65536 foo.bar.asdf() def main(): 2**65536 test() # foo.bar.asdf() main() test() # asdf() ''') ub.writeto(modpaths['script'], script_text) return root, repo, modpaths def main(): root, repo, modpaths = create_poc(dry_run=False) script_file = str(modpaths['script']) """separate from prof_mod, profile all imports in script""" profile_script_imports = False """modnames to profile""" modnames = [ # 'fool', # doesn't exist # 'foo', 'foo.bar', # 'foo.baz', # 'foo.baz.eggs', # 'foo.baz.spam', # 'numpy.round', ] """modpaths to profile""" modpaths = [ # str(root), # str((repo / 'fool')), # doesn't exist # str(modpaths['foo']), # str(modpaths['foo.__init__']), # str(modpaths['foo.bar']), # str(modpaths['foo.baz']), # str(modpaths['foo.baz.__init__']), # str(modpaths['foo.baz.spam']), # str(modpaths['foo.baz.eggs']), # str(modpaths['script']), # special case to profile all items ] prof_mod = modnames + modpaths # prof_mod = modpaths # prof_mod = modnames """mimick running using kernprof""" import sys import os import builtins __file__ = script_file __name__ = '__main__' script_directory = os.path.realpath(os.path.dirname(script_file)) sys.path.insert(0, script_directory) import line_profiler prof = line_profiler.LineProfiler() builtins.__dict__['profile'] = prof ns = locals() autoprofile.run(script_file, ns, prof_mod=prof_mod) print('\nprofiled') print('=' * 10) prof.print_stats(output_unit=1e-6, stripzeros=True, stream=sys.stdout) if __name__ == '__main__': main() line_profiler-5.0.0/dev/ci_public_gpg_key.pgp.enc000066400000000000000000000037371503775420400220560ustar00rootroot00000000000000U2FsdGVkX1/lD17FCUUkPN/rIRXSULj25r5BZqHuKeC7DLIY8/wF8rVec3qemb4S DgPB1jJY07hKLlzbQ60aCUe0xJku3kS+jKkk3iWx7BdPVQxojierLx39piImi44O 2rV6KNRKzBCjgMWCwmmAD7HdH7rj+bSkoIPXwbMFYzFJszkCoBdbzk/HfxyxD0jj qkrMH331iekJnfcZOa0mttGzIoiZM+DAIQUvE/wbr6yt7AvPRGV9kR2z/cc63CKo k5VGEY87zAROom+s2eUFiD7+G4hML666MccdxBgjBwo9UjTI5aE+CMtQJNjE7Cs2 u0CynwiUlSwN01cyA+rZGysLNJZBwvo/r4AlpOVvdkinZG2U+uwTUXydoTBViN3r KKcaNdcwmZaBimTv0TmSi+F8JMcq+E4xMQMa6xRXFHoA8nDjtT7dnvSpuWJpU8jb q0fhKMZd65GL03nyxcMcJn/FgwUSctr+vouO919MV3ldFubTZkxOa+0K2NRIJhhr WxOBCUiLgZWCYIQ/ildXdPDQaJcLaaShIF2Bd9k+H7RxbC0SMNfHVGlpJag/27DJ +GBhEIvjB3lxVLoaC5jTdVR9IzXMmCmciNk9GnVVkHpArYV7ZIiwOeiqmmIYtP7w 6ey7T9x0nVTJWRpK5gE7DvWsRQ4VyKDzuS4lkD0ejB9ZI5sxIVC2LWO5858grbCX 9ciuXKrMdYkixpZo+cCNTDfiTe+7M05s4edUywxzpi5Ft/7qk3yJ0eVPtHKHurW+ pd5330ltLVPqFhxYcYaOeuQcTvPtwu1V9k6QPuW2sAR1CjoDOMHoBZEFdUky7xRm nWtQ49dZB3FkJLj8LKGxmldvch4UAlu7oyUQVgiLLnICuAUNbjd0OzJpwV/xCTQI QaAKU9CZdzKjZspMk0amclc3lCjZJJvHruBgq6rNe2qOVIeGm65Ngm6ZcuPwMaza Qgc06Tak7nAJ7BBxtravANb0sidPy7Qqfi1mHQYOZWQR49FZYynteKorwDMkuUA2 K4X8PWO7lppbd1rvbhsOg6qKCw4P3O9S/Bms9A4CjfN9dAGCdC9Fhgxfv/I1AeJo WjjlnWE4963sgjmb3DwdWUhJs7i/aRXeip32VTJdqow6VE4OjeZ4kUd42UEPWxkb aCG7jP5G2YD6tx/bMRcU4BUJVx5NLflwo+87WFXp9P4puyVWcUg57KEU40dSxJ50 0jDdtkpqoTViuJT7hdGFSoHV2NxA0NjJnFslKb40wbYUie9Uh6/cR1YF5ccEcjUN 1aA2UR1PJQvvioDHeKPAFHKq9O1G5U4F5LDYONCHaar7bqNCLmCmWa9EmPNwXa72 0RbapH37yKZjhnIatW98q9TlScF0Xs37byhVpOL4bHLNl6JAEYidndHm5IvuLiw2 4nMPNSrKi6CadfiiHtaM4v0iRBcxTztRa8aH4ySimHRwddbr7QySWNvUMwyGvH2l Te0Lvj0YFEB/5DASmeV9PhI1iYjxP7EB6KHdRisSUR8oKM9neoYS+1dzx9rFbc5h q6FloFEF6iWxGENaLNNRnsFg63f5YIug6cGbCYzSR8I/RetzbdtV67HB8Rb2ZUUC TKT1/V36Zi300M3r5CzBePoO2XPfohcxc3VNxZKqFFxlLZ1huOdq1cf2czh0fQt8 GHzyh4/iRsjbU23cVslpVeb/S72dpIu7xZKIh1A6UmY9NJ9Z/QTK8KO+S2LcW/jD +4jrqrvljStzdwTyP+MrKr5Imu0/KX37D50li6F1CZzeiTvdMim0vQOe2oBEmMxA q3k0/UzEH1BBTe97eYCrRYmQcLf4y9R4rd9rV6xCSMW6pJu/NWBLvQEkAPLybHPt VHiLv5NeEIfSnd4Fm/eJDAw4iuUqc8Qi+7WX0GxMjlA2z6w38NaHQwuEhMR1yYkC 6de5F+rUaD2UJctxmtQCPJBIFH4OEOf+2/1ZNRpe0EKIGWm8UMcTdaalkN3IU/6X line_profiler-5.0.0/dev/ci_secret_gpg_subkeys.pgp.enc000066400000000000000000000027601503775420400227550ustar00rootroot00000000000000U2FsdGVkX1+2Gt4R/7fOi+1XrnBnrcfQJ9LH5RswUHfgbMs5CZe/aCVNZ9P3XKNY nJoq+Z0IQcpw0thEKFvEiR3wfLv+YX3OaQrV5nh5jhuaTbMT/QIWQSWjgEC6Rrjp EXhiy/ymlwxaOm/X1z0QEyGFek4eQOCmWNGY/+OscsUpWxwIucevwY+Cgi809FmT Quev8XlexUdjk5CWVeW7HnZp8zNNy/rn413fhIIRjwbYEuZdcUArzvSohShOajml YylKJP3mENRIWtaMfm/kkbOz+iSUS80TcX6Hwq9mY7YR24mhSkbUP+PN4I/fN/e/ +/GYN/oejZpzdXBS0/jKOtv1v5HkYYE80T7LFZiQouKsnK8ivKvUMLJF4g6EBMyo M2C1A27Cq5ngPLRCcf0qD62gDLUwKlp7x4WA6XX2KzAuH7LSP1VkyAkpz9yuJYSs nXBB/wm8WEayfG2h3tcmrHkjYGFbFsK07H/ni07/0vZLN7xgOL2iq8YTc7xwCJXf 6IfRiGyrr9GlIBlFXy1wJb9vi7P28k92CkYJm6ohKSwc/DYAwtjtHR3FvPR3/KbY 964Wg88o9bOLAAAi5KxXn9XfdTXMuVYOwOw8o398d7kzIGUGJi+FDKdz6oOizNe3 APxMZboJPqBOLW2iuQmqEaq41RBnUyPijpOEZuLQZjTjWJ0bTWCDU0KWDZOM+AaD BEyFhcaxdDFvVFczT7EO54sq1fcfCV9DYU7kyYw88EDRqPS8aUlMxUusFpEZHT5w hOhb2n9qHjY/Ahi4fa0MIa/XjrVWkHsHv3JXZ6DqNrT1nKPb3xmrSg9FXOZAKdtR h8+A/ZCOvVl6KiL4uTQ/tbYH8KkocqqbdBflbrSVINXs7pJTxyvT5RUD2RnzYmyQ O5tYqD4N8m64KPz2BRJ1tcD98CFwTUvlKYbbNxyXi0p1hNe20rkRouMQ3p3sJdnE EFPwO7mJdJX9jHuezelHJcPV+bzEaqokVgctBVh2DYLoES07MYStQiJcuqNJyupR mlkkYwCx241FvxoWeHuNvy4oFu9XfG9+Gdx7i9s9pvwrDdf07oC5FIB3lDvR815g 1uh7elyd++ShxwrPmVq8c/7qFdZk//20egGOIKAVPzOlfPxggeNLc+cwLVq4Ef7M uCTitOuwbYuUyLlsfAOKd/MVrbS7IljCGtEx0E8tHDMiT+UFLEkDqYTsBRJtK1uh jNvwySI8LDY/+jBLmIFasD7DhkmEEUqkf3YvGOVH0wakU7gtTQWje3l7UBPf0O9c qw6mfz6e5N1nWeo/z3IoKNsmIcTj2ceIJlpM0LGa1b9/qBNfPmbuPzHPBUuDiDS1 1Az+QbcxLrR/vUAw/mdnwUOe2pE3XhZB4t4ynXKPy2m3+PGp0NtkuiEPlnIX510D rPEqsDVWprTrJXTg7kLhZXjIarOIkeLB39qD0jqzZvMttsnPyT3Mm8x+S+laMBE9 W7sEoZ2UfdhEfvCo1uFdUQ== line_profiler-5.0.0/dev/gpg_owner_trust.enc000066400000000000000000000011421503775420400210470ustar00rootroot00000000000000U2FsdGVkX19dF37Fp1whP9e+sczGySWdyqypXrCfymhiGsNEwrcCM9xtr2gOAf2N u3s2zc+4CSnlzpOFh/WebfybRsGBThod45FXdACRrZta5Vp6xePcHLzdIQe4/jtG Ix/hdq8FPKb9ASVQNxo9m95MCBTtsXk4utyv+lqFUXuGtJCE1uao8GCHOtAZzkNd 06mdnYSRvTUwko2cS5ZEsQsQIr5HUYnxBZCzHgSKoDfAsywoXQDFC05+2WzGJxdT kPRW1Nx1F6QucA06TGBiJih2jcV/V4tEcbg+PIA0TtbhwOSAGWQ0x2VXyP1FMB6S urZfx3u/61NnFeS6aJGNhoEJxHnivyJX+b3rTJcfGz44bAyQlX+O2gasiqdGomR5 alh0BtIeCQeNkwdDl67rtQXjisLUVHyLz9owY1qYwE+CLRwrip5jU73YT4B2AUm4 SQGH7VSbHSeV74NxtQCImYkaMvM7AfkZRxzeZ0gN9h+jIqSt8v2gA4XE09lMtu9S b480kTF+efnsZYwZagWPdbxrE9cFCiwJzaMie+60803Buj/7x3+LYZjaF0TJTDSQ 1MFS5TcnptaEhzHc4L1Slg== line_profiler-5.0.0/dev/maintain/000077500000000000000000000000001503775420400167325ustar00rootroot00000000000000line_profiler-5.0.0/dev/maintain/port_utilities.py000066400000000000000000000050311503775420400223620ustar00rootroot00000000000000""" Statically port utilities from ubelt and xdocest need for the autoprofile features. Similar Scripts: ~/code/xdoctest/dev/maintain/port_ubelt_utils.py ~/code/mkinit/dev/maintain/port_ubelt_code.py ~/code/line_profiler/dev/maintain/port_utilities.py """ import ubelt as ub import liberator import re def generate_util_static(): lib = liberator.Liberator(verbose=0) import ubelt import xdoctest if 1: lib.add_dynamic(ubelt.util_import.modpath_to_modname) lib.add_dynamic(ubelt.util_import.modname_to_modpath) lib.expand(['ubelt']) if 1: lib.add_dynamic(xdoctest.static_analysis.package_modpaths) lib.expand(['xdoctest']) # # Hack because ubelt and xdoctest define this del lib.body_defs['xdoctest.utils.util_import._platform_pylib_exts'] # lib.expand(['ubelt', 'xdoctest']) text = lib.current_sourcecode() """ pip install rope pip install parso """ prefix = ub.codeblock( ''' """ This file was autogenerated based on code in :py:mod:`ubelt` and :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ ''') # Remove doctest references to ubelt new_lines = [] for line in text.split('\n'): if line.strip().startswith('>>> from ubelt'): continue if line.strip().startswith('>>> import ubelt as ub'): line = re.sub('>>> .*', '>>> # xdoctest: +SKIP("ubelt dependency")', line) new_lines.append(line) text = '\n'.join(new_lines) text = prefix + '\n' + text + '\n' return text def main(): text = generate_util_static() print(ub.highlight_code(text, backend='rich')) import parso import line_profiler target_fpath = ub.Path(line_profiler.__file__).parent / 'autoprofile' / 'util_static.py' new_module = parso.parse(text) if target_fpath.exists(): old_module = parso.parse(target_fpath.read_text()) new_names = [child.name.value for child in new_module.children if child.type in {'funcdef', 'classdef'}] old_names = [child.name.value for child in old_module.children if child.type in {'funcdef', 'classdef'}] print(set(old_names) - set(new_names)) print(set(new_names) - set(old_names)) target_fpath.write_text(text) # Fixup formatting if 1: ub.cmd(['black', target_fpath]) if __name__ == '__main__': """ CommandLine: python ~/code/line_profiler/dev/maintain/port_utilities.py """ main() line_profiler-5.0.0/dev/public_gpg_key000066400000000000000000000000511503775420400200340ustar00rootroot000000000000002A290272C174D28EA9CA48E9D7224DAF0347B114 line_profiler-5.0.0/dev/secrets_configuration.sh000066400000000000000000000005671503775420400220750ustar00rootroot00000000000000export VARNAME_CI_SECRET="PYUTILS_CI_SECRET" export VARNAME_TWINE_PASSWORD="PYUTILS_PYPI_MASTER_TOKEN" export VARNAME_TEST_TWINE_PASSWORD="PYUTILS_TEST_PYPI_MASTER_TOKEN" export VARNAME_TWINE_USERNAME="PYUTILS_PYPI_MASTER_TOKEN_USERNAME" export VARNAME_TEST_TWINE_USERNAME="PYUTILS_TEST_PYPI_MASTER_TOKEN_USERNAME" export GPG_IDENTIFIER="=PyUtils-CI " line_profiler-5.0.0/dev/setup_secrets.sh000066400000000000000000000471461503775420400203720ustar00rootroot00000000000000#!/usr/bin/env bash __doc__=' ============================ SETUP CI SECRET INSTRUCTIONS ============================ TODO: These instructions are currently pieced together from old disparate instances, and are not yet fully organized. The original template file should be: ~/code/xcookie/dev/setup_secrets.sh Development script for updating secrets when they rotate The intent of this script is to help setup secrets for whichever of the following CI platforms is used: ../.github/workflows/tests.yml ../.gitlab-ci.yml ../.circleci/config.yml ========================= GITHUB ACTION INSTRUCTIONS ========================= * `PERSONAL_GITHUB_PUSH_TOKEN` - This is only needed if you want to automatically git-tag release branches. To make a API token go to: https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token ========================= GITLAB ACTION INSTRUCTIONS ========================= ```bash cat .setup_secrets.sh | \ sed "s|utils||g" | \ sed "s|xcookie||g" | \ sed "s|travis-ci-Erotemic||g" | \ sed "s|CI_SECRET||g" | \ sed "s|GITLAB_ORG_PUSH_TOKEN||g" | \ sed "s|gitlab.org.com|gitlab.your-instance.com|g" | \ tee /tmp/repl && colordiff .setup_secrets.sh /tmp/repl ``` * Make sure you add Runners to your project https://gitlab.org.com/utils/xcookie/-/settings/ci_cd in Runners-> Shared Runners and Runners-> Available specific runners * Ensure that you are auto-cancel redundant pipelines. Navigate to https://gitlab.kitware.com/utils/xcookie/-/settings/ci_cd and ensure "Auto-cancel redundant pipelines" is checked. More details are here https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-redundant-pipelines * TWINE_USERNAME - this is your pypi username twine info is only needed if you want to automatically publish to pypi * TWINE_PASSWORD - this is your pypi password * CI_SECRET - We will use this as a secret key to encrypt/decrypt gpg secrets This is only needed if you want to automatically sign published wheels with a gpg key. * GITLAB_ORG_PUSH_TOKEN - This is only needed if you want to automatically git-tag release branches. Create a new personal access token in User->Settings->Tokens, You can name the token GITLAB_ORG_PUSH_TOKEN_VALUE Give it api and write repository permissions SeeAlso: https://gitlab.org.com/profile/personal_access_tokens Take this variable and record its value somewhere safe. I put it in my secrets file as such: export GITLAB_ORG_PUSH_TOKEN_VALUE= I also create another variable with the prefix "git-push-token", which is necessary export GITLAB_ORG_PUSH_TOKEN=git-push-token:$GITLAB_ORG_PUSH_TOKEN_VALUE Then add this as a secret variable here: https://gitlab.org.com/groups/utils/-/settings/ci_cd Note the value of GITLAB_ORG_PUSH_TOKEN will look something like: "{token-name}:{token-password}" For instance it may look like this: "git-push-token:62zutpzqga6tvrhklkdjqm" References: https://stackoverflow.com/questions/51465858/how-do-you-push-to-a-gitlab-repo-using-a-gitlab-ci-job # ADD RELEVANT VARIABLES TO GITLAB SECRET VARIABLES # https://gitlab.kitware.com/computer-vision/kwcoco/-/settings/ci_cd # Note that it is important to make sure that these variables are # only decrpyted on protected branches by selecting the protected # and masked option. Also make sure you have master and release # branches protected. # https://gitlab.kitware.com/computer-vision/kwcoco/-/settings/repository#js-protected-branches-settings ============================ Relevant CI Secret Locations ============================ https://github.com/pyutils/line_profiler/settings/secrets/actions https://app.circleci.com/settings/project/github/pyutils/line_profiler/environment-variables?return-to=https%3A%2F%2Fapp.circleci.com%2Fpipelines%2Fgithub%2Fpyutils%2Fline_profiler ' setup_package_environs(){ __doc__=" Setup environment variables specific for this project. The remainder of this script should ideally be general to any repo. These non-secret variables are written to disk and loaded by the script, such that the specific repo only needs to modify that configuration file. " echo "Choose an organization specific setting or make your own. This needs to be generalized more" } ### FIXME: Should be configurable for general use setup_package_environs_gitlab_kitware(){ echo ' export VARNAME_CI_SECRET="CI_KITWARE_SECRET" export VARNAME_TWINE_PASSWORD="EROTEMIC_PYPI_MASTER_TOKEN" export VARNAME_TEST_TWINE_PASSWORD="EROTEMIC_TEST_PYPI_MASTER_TOKEN" export VARNAME_PUSH_TOKEN="GITLAB_KITWARE_TOKEN" export VARNAME_TWINE_USERNAME="EROTEMIC_PYPI_MASTER_TOKEN_USERNAME" export VARNAME_TEST_TWINE_USERNAME="EROTEMIC_TEST_PYPI_MASTER_TOKEN_USERNAME" export GPG_IDENTIFIER="=Erotemic-CI " ' | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip(chr(10)))" > dev/secrets_configuration.sh git add dev/secrets_configuration.sh } setup_package_environs_github_erotemic(){ echo ' export VARNAME_CI_SECRET="EROTEMIC_CI_SECRET" export VARNAME_TWINE_PASSWORD="EROTEMIC_PYPI_MASTER_TOKEN" export VARNAME_TEST_TWINE_PASSWORD="EROTEMIC_TEST_PYPI_MASTER_TOKEN" export VARNAME_TWINE_USERNAME="EROTEMIC_PYPI_MASTER_TOKEN_USERNAME" export VARNAME_TEST_TWINE_USERNAME="EROTEMIC_TEST_PYPI_MASTER_TOKEN_USERNAME" export GPG_IDENTIFIER="=Erotemic-CI " ' | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip(chr(10)))" > dev/secrets_configuration.sh git add dev/secrets_configuration.sh } setup_package_environs_github_pyutils(){ echo ' export VARNAME_CI_SECRET="PYUTILS_CI_SECRET" export VARNAME_TWINE_PASSWORD="PYUTILS_PYPI_MASTER_TOKEN" export VARNAME_TEST_TWINE_PASSWORD="PYUTILS_TEST_PYPI_MASTER_TOKEN" export VARNAME_TWINE_USERNAME="PYUTILS_PYPI_MASTER_TOKEN_USERNAME" export VARNAME_TEST_TWINE_USERNAME="PYUTILS_TEST_PYPI_MASTER_TOKEN_USERNAME" export GPG_IDENTIFIER="=PyUtils-CI " ' | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip(chr(10)))" > dev/secrets_configuration.sh git add dev/secrets_configuration.sh #echo ' #export VARNAME_CI_SECRET="PYUTILS_CI_SECRET" #export GPG_IDENTIFIER="=PyUtils-CI " #' | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip(chr(10)))" > dev/secrets_configuration.sh } upload_github_secrets(){ load_secrets unset GITHUB_TOKEN #printf "%s" "$GITHUB_TOKEN" | gh auth login --hostname Github.com --with-token if ! gh auth status ; then gh auth login fi source dev/secrets_configuration.sh gh secret set "TWINE_USERNAME" -b"${!VARNAME_TWINE_USERNAME}" gh secret set "TEST_TWINE_USERNAME" -b"${!VARNAME_TEST_TWINE_USERNAME}" toggle_setx_enter gh secret set "CI_SECRET" -b"${!VARNAME_CI_SECRET}" gh secret set "TWINE_PASSWORD" -b"${!VARNAME_TWINE_PASSWORD}" gh secret set "TEST_TWINE_PASSWORD" -b"${!VARNAME_TEST_TWINE_PASSWORD}" toggle_setx_exit } toggle_setx_enter(){ # Can we do something like a try/finally? # https://stackoverflow.com/questions/15656492/writing-try-catch-finally-in-shell echo "Enter sensitive area" if [[ -n "${-//[^x]/}" ]]; then __context_1_toggle_setx=1 else __context_1_toggle_setx=0 fi if [[ "$__context_1_toggle_setx" == "1" ]]; then echo "Setx was on, disable temporarily" set +x fi } toggle_setx_exit(){ echo "Exit sensitive area" # Can we guarantee this will happen? if [[ "$__context_1_toggle_setx" == "1" ]]; then set -x fi } upload_gitlab_group_secrets(){ __doc__=" Use the gitlab API to modify group-level secrets " # In Repo Directory load_secrets REMOTE=origin GROUP_NAME=$(git remote get-url $REMOTE | cut -d ":" -f 2 | cut -d "/" -f 1) HOST=https://$(git remote get-url $REMOTE | cut -d "/" -f 1 | cut -d "@" -f 2 | cut -d ":" -f 1) echo " * GROUP_NAME = $GROUP_NAME * HOST = $HOST " PRIVATE_GITLAB_TOKEN=$(git_token_for "$HOST") if [[ "$PRIVATE_GITLAB_TOKEN" == "ERROR" ]]; then echo "Failed to load authentication key" return 1 fi TMP_DIR=$(mktemp -d -t ci-XXXXXXXXXX) curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups" > "$TMP_DIR/all_group_info" GROUP_ID=$(< "$TMP_DIR/all_group_info" jq ". | map(select(.path==\"$GROUP_NAME\")) | .[0].id") echo "GROUP_ID = $GROUP_ID" curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID" > "$TMP_DIR/group_info" < "$TMP_DIR/group_info" jq # Get group-level secret variables curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID/variables" > "$TMP_DIR/group_vars" < "$TMP_DIR/group_vars" jq '.[] | .key' if [[ "$?" != "0" ]]; then echo "Failed to access group level variables. Probably a permission issue" fi source dev/secrets_configuration.sh SECRET_VARNAME_ARR=(VARNAME_CI_SECRET VARNAME_TWINE_PASSWORD VARNAME_TEST_TWINE_PASSWORD VARNAME_TWINE_USERNAME VARNAME_TEST_TWINE_USERNAME VARNAME_PUSH_TOKEN) for SECRET_VARNAME_PTR in "${SECRET_VARNAME_ARR[@]}"; do SECRET_VARNAME=${!SECRET_VARNAME_PTR} echo "" echo " ---- " LOCAL_VALUE=${!SECRET_VARNAME} REMOTE_VALUE=$(< "$TMP_DIR/group_vars" jq -r ".[] | select(.key==\"$SECRET_VARNAME\") | .value") # Print current local and remote value of a variable echo "SECRET_VARNAME_PTR = $SECRET_VARNAME_PTR" echo "SECRET_VARNAME = $SECRET_VARNAME" echo "(local) $SECRET_VARNAME = $LOCAL_VALUE" echo "(remote) $SECRET_VARNAME = $REMOTE_VALUE" #curl --request GET --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID/variables/SECRET_VARNAME" | jq -r .message if [[ "$REMOTE_VALUE" == "" ]]; then # New variable echo "Remove variable does not exist, posting" toggle_setx_enter curl --request POST --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID/variables" \ --form "key=${SECRET_VARNAME}" \ --form "value=${LOCAL_VALUE}" \ --form "protected=true" \ --form "masked=true" \ --form "environment_scope=*" \ --form "variable_type=env_var" toggle_setx_exit elif [[ "$REMOTE_VALUE" != "$LOCAL_VALUE" ]]; then echo "Remove variable does not agree, putting" # Update variable value toggle_setx_enter curl --request PUT --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID/variables/$SECRET_VARNAME" \ --form "value=${LOCAL_VALUE}" toggle_setx_exit else echo "Remote value agrees with local" fi done rm "$TMP_DIR/group_vars" } upload_gitlab_repo_secrets(){ __doc__=" Use the gitlab API to modify group-level secrets " # In Repo Directory load_secrets REMOTE=origin GROUP_NAME=$(git remote get-url $REMOTE | cut -d ":" -f 2 | cut -d "/" -f 1) PROJECT_NAME=$(git remote get-url $REMOTE | cut -d ":" -f 2 | cut -d "/" -f 2 | cut -d "." -f 1) HOST=https://$(git remote get-url $REMOTE | cut -d "/" -f 1 | cut -d "@" -f 2 | cut -d ":" -f 1) echo " * GROUP_NAME = $GROUP_NAME * PROJECT_NAME = $PROJECT_NAME * HOST = $HOST " PRIVATE_GITLAB_TOKEN=$(git_token_for "$HOST") if [[ "$PRIVATE_GITLAB_TOKEN" == "ERROR" ]]; then echo "Failed to load authentication key" return 1 fi TMP_DIR=$(mktemp -d -t ci-XXXXXXXXXX) toggle_setx_enter curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups" > "$TMP_DIR/all_group_info" toggle_setx_exit GROUP_ID=$(< "$TMP_DIR/all_group_info" jq ". | map(select(.path==\"$GROUP_NAME\")) | .[0].id") echo "GROUP_ID = $GROUP_ID" toggle_setx_enter curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID" > "$TMP_DIR/group_info" toggle_setx_exit GROUP_ID=$(< "$TMP_DIR/all_group_info" jq ". | map(select(.path==\"$GROUP_NAME\")) | .[0].id") < "$TMP_DIR/group_info" jq PROJECT_ID=$(< "$TMP_DIR/group_info" jq ".projects | map(select(.path==\"$PROJECT_NAME\")) | .[0].id") echo "PROJECT_ID = $PROJECT_ID" # Get group-level secret variables toggle_setx_enter curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/projects/$PROJECT_ID/variables" > "$TMP_DIR/project_vars" toggle_setx_exit < "$TMP_DIR/project_vars" jq '.[] | .key' if [[ "$?" != "0" ]]; then echo "Failed to access project level variables. Probably a permission issue" fi LIVE_MODE=1 source dev/secrets_configuration.sh SECRET_VARNAME_ARR=(VARNAME_CI_SECRET VARNAME_TWINE_PASSWORD VARNAME_TEST_TWINE_PASSWORD VARNAME_TWINE_USERNAME VARNAME_TEST_TWINE_USERNAME VARNAME_PUSH_TOKEN) for SECRET_VARNAME_PTR in "${SECRET_VARNAME_ARR[@]}"; do SECRET_VARNAME=${!SECRET_VARNAME_PTR} echo "" echo " ---- " LOCAL_VALUE=${!SECRET_VARNAME} REMOTE_VALUE=$(< "$TMP_DIR/project_vars" jq -r ".[] | select(.key==\"$SECRET_VARNAME\") | .value") # Print current local and remote value of a variable echo "SECRET_VARNAME_PTR = $SECRET_VARNAME_PTR" echo "SECRET_VARNAME = $SECRET_VARNAME" echo "(local) $SECRET_VARNAME = $LOCAL_VALUE" echo "(remote) $SECRET_VARNAME = $REMOTE_VALUE" #curl --request GET --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/projects/$PROJECT_ID/variables/SECRET_VARNAME" | jq -r .message if [[ "$REMOTE_VALUE" == "" ]]; then # New variable echo "Remove variable does not exist, posting" if [[ "$LIVE_MODE" == "1" ]]; then curl --request POST --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/projects/$PROJECT_ID/variables" \ --form "key=${SECRET_VARNAME}" \ --form "value=${LOCAL_VALUE}" \ --form "protected=true" \ --form "masked=true" \ --form "environment_scope=*" \ --form "variable_type=env_var" else echo "dry run, not posting" fi elif [[ "$REMOTE_VALUE" != "$LOCAL_VALUE" ]]; then echo "Remove variable does not agree, putting" # Update variable value if [[ "$LIVE_MODE" == "1" ]]; then curl --request PUT --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/projects/$PROJECT_ID/variables/$SECRET_VARNAME" \ --form "value=${LOCAL_VALUE}" else echo "dry run, not putting" fi else echo "Remote value agrees with local" fi done rm "$TMP_DIR/project_vars" } export_encrypted_code_signing_keys(){ # You will need to rerun this whenever the signkeys expire and are renewed # Load or generate secrets load_secrets source dev/secrets_configuration.sh CI_SECRET="${!VARNAME_CI_SECRET}" echo "VARNAME_CI_SECRET = $VARNAME_CI_SECRET" echo "CI_SECRET=$CI_SECRET" echo "GPG_IDENTIFIER=$GPG_IDENTIFIER" # ADD RELEVANT VARIABLES TO THE CI SECRET VARIABLES # HOW TO ENCRYPT YOUR SECRET GPG KEY # You need to have a known public gpg key for this to make any sense MAIN_GPG_KEYID=$(gpg --list-keys --keyid-format LONG "$GPG_IDENTIFIER" | head -n 2 | tail -n 1 | awk '{print $1}') GPG_SIGN_SUBKEY=$(gpg --list-keys --with-subkey-fingerprints "$GPG_IDENTIFIER" | grep "\[S\]" -A 1 | tail -n 1 | awk '{print $1}') # Careful, if you don't have a subkey, requesting it will export more than you want. # Export the main key instead (its better to have subkeys, but this is a lesser evil) if [[ "$GPG_SIGN_SUBKEY" == "" ]]; then # NOTE: if you get here this probably means your subkeys expired (and # wont even be visible), so we probably should check for that here and # thrown an error instead of using this hack, which likely wont work # anyway. GPG_SIGN_SUBKEY=$(gpg --list-keys --with-subkey-fingerprints "$GPG_IDENTIFIER" | grep "\[C\]" -A 1 | tail -n 1 | awk '{print $1}') fi echo "MAIN_GPG_KEYID = $MAIN_GPG_KEYID" echo "GPG_SIGN_SUBKEY = $GPG_SIGN_SUBKEY" # Only export the signing secret subkey # Export plaintext gpg public keys, private sign key, and trust info mkdir -p dev gpg --armor --export-options export-backup --export-secret-subkeys "${GPG_SIGN_SUBKEY}!" > dev/ci_secret_gpg_subkeys.pgp gpg --armor --export "${GPG_SIGN_SUBKEY}" > dev/ci_public_gpg_key.pgp gpg --export-ownertrust > dev/gpg_owner_trust # Encrypt gpg keys and trust with CI secret GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -e -a -in dev/ci_public_gpg_key.pgp > dev/ci_public_gpg_key.pgp.enc GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -e -a -in dev/ci_secret_gpg_subkeys.pgp > dev/ci_secret_gpg_subkeys.pgp.enc GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -e -a -in dev/gpg_owner_trust > dev/gpg_owner_trust.enc echo "$MAIN_GPG_KEYID" > dev/public_gpg_key # Test decrpyt GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_public_gpg_key.pgp.enc | gpg --list-packets --verbose GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | gpg --list-packets --verbose GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/gpg_owner_trust.enc cat dev/public_gpg_key unload_secrets # Look at what we did, clean up, and add it to git ls dev/*.enc rm dev/*.pgp rm dev/gpg_owner_trust git status git add dev/*.enc git add dev/gpg_owner_trust git add dev/public_gpg_key } # See the xcookie module gitlab python API #gitlab_set_protected_branches(){ #} _test_gnu(){ # shellcheck disable=SC2155 export GNUPGHOME=$(mktemp -d -t) ls -al "$GNUPGHOME" chmod 700 -R "$GNUPGHOME" source dev/secrets_configuration.sh gpg -k load_secrets CI_SECRET="${!VARNAME_CI_SECRET}" echo "CI_SECRET = $CI_SECRET" cat dev/public_gpg_key GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_public_gpg_key.pgp.enc GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/gpg_owner_trust.enc GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_public_gpg_key.pgp.enc | gpg --import GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/gpg_owner_trust.enc | gpg --import-ownertrust GLKWS=$CI_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | gpg --import gpg -k # | gpg --import # | gpg --list-packets --verbose } line_profiler-5.0.0/docs/000077500000000000000000000000001503775420400153045ustar00rootroot00000000000000line_profiler-5.0.0/docs/Makefile000066400000000000000000000011761503775420400167510ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) line_profiler-5.0.0/docs/for_developers/000077500000000000000000000000001503775420400203225ustar00rootroot00000000000000line_profiler-5.0.0/docs/for_developers/release_process.rst000066400000000000000000000025501503775420400242340ustar00rootroot00000000000000Releasing a New Version ======================= The github action to push to PYPI will trigger on the push of a new tag. By convention we tag each release as ``v{__version__}``, where ``{__version__}`` is the current ``__version__`` number in ``line_profiler/__init__.py``. The ``./publish.sh`` script handles the creation of the version tag and pushing it to github, at which point the github action will build wheels for all supported operating systems and machine architectures. The steps are as follows: 1. Run ``./publish.sh``. 2. When given the prompt, ``Do you want to git tag and push version='{}'?``, confirm the new version looks correct and respond with "yes". 3. When asked: ``do you need to build wheels?`` Respond "no". (The CI will take care of this). 4. When asked: ``Are you ready to directly publish version xxx?`` Answer no. Again, the CI will do this. 5. The script will summarize its actions. Double check them, then press enter to create and push the new release tag. These options can be programatically given for a non-interactive interface. See the ``publish.sh`` script for details (and make a PR that adds that information here). Notes on Signed Releases ======================== The "dev" folder contains encrypted GPG keys used to sign the wheels on the CI. The CI is given a secret variable which is the key to decrypt them. line_profiler-5.0.0/docs/make.bat000066400000000000000000000014441503775420400167140ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd line_profiler-5.0.0/docs/source/000077500000000000000000000000001503775420400166045ustar00rootroot00000000000000line_profiler-5.0.0/docs/source/auto/000077500000000000000000000000001503775420400175545ustar00rootroot00000000000000line_profiler-5.0.0/docs/source/auto/kernprof.rst000066400000000000000000000002621503775420400221340ustar00rootroot00000000000000.. .. manually created (not sure how to get automodule to do it) kernprof module =============== .. automodule:: kernprof :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.__main__.rst000066400000000000000000000002441503775420400246560ustar00rootroot00000000000000line\_profiler.\_\_main\_\_ module ================================== .. automodule:: line_profiler.__main__ :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler._line_profiler.rst000066400000000000000000000003341503775420400261260ustar00rootroot00000000000000line\_profiler.\_line\_profiler module ====================================== .. automodule:: line_profiler._line_profiler :private-members: _LineProfilerManager :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.ast_profile_transformer.rst000066400000000000000000000003571503775420400324240ustar00rootroot00000000000000line\_profiler.autoprofile.ast\_profle\_transformer module ========================================================== .. automodule:: line_profiler.autoprofile.ast_profile_transformer :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.ast_tree_profiler.rst000066400000000000000000000003371503775420400312010ustar00rootroot00000000000000line\_profiler.autoprofile.ast\_tree\_profiler module ===================================================== .. automodule:: line_profiler.autoprofile.ast_tree_profiler :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.autoprofile.rst000066400000000000000000000003111503775420400300120ustar00rootroot00000000000000line\_profiler.autoprofile.autoprofile module ============================================= .. automodule:: line_profiler.autoprofile.autoprofile :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.eager_preimports.rst000066400000000000000000000003321503775420400310330ustar00rootroot00000000000000line\_profiler.autoprofile.eager\_preimports module =================================================== .. automodule:: line_profiler.autoprofile.eager_preimports :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.line_profiler_utils.rst000066400000000000000000000003451503775420400315410ustar00rootroot00000000000000line\_profiler.autoprofile.line\_profiler\_utils module ======================================================= .. automodule:: line_profiler.autoprofile.line_profiler_utils :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.profmod_extractor.rst000066400000000000000000000003351503775420400312300ustar00rootroot00000000000000line\_profiler.autoprofile.profmod\_extractor module ==================================================== .. automodule:: line_profiler.autoprofile.profmod_extractor :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.rst000066400000000000000000000011521503775420400254660ustar00rootroot00000000000000line\_profiler.autoprofile package ================================== Submodules ---------- .. toctree:: :maxdepth: 4 line_profiler.autoprofile.ast_profile_transformer line_profiler.autoprofile.ast_tree_profiler line_profiler.autoprofile.autoprofile line_profiler.autoprofile.eager_preimports line_profiler.autoprofile.line_profiler_utils line_profiler.autoprofile.profmod_extractor line_profiler.autoprofile.run_module line_profiler.autoprofile.util_static Module contents --------------- .. automodule:: line_profiler.autoprofile :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.run_module.rst000066400000000000000000000003101503775420400276310ustar00rootroot00000000000000line\_profiler.autoprofile.run\_module module ============================================= .. automodule:: line_profiler.autoprofile.run_module :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.autoprofile.util_static.rst000066400000000000000000000003131503775420400300070ustar00rootroot00000000000000line\_profiler.autoprofile.util\_static module ============================================== .. automodule:: line_profiler.autoprofile.util_static :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.cli_utils.rst000066400000000000000000000002411503775420400251220ustar00rootroot00000000000000line\_profiler.cli\_utils module ================================ .. automodule:: line_profiler.cli_utils :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.explicit_profiler.rst000066400000000000000000000002711503775420400266610ustar00rootroot00000000000000line\_profiler.explicit\_profiler module ======================================== .. automodule:: line_profiler.explicit_profiler :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.ipython_extension.rst000066400000000000000000000002711503775420400267240ustar00rootroot00000000000000line\_profiler.ipython\_extension module ======================================== .. automodule:: line_profiler.ipython_extension :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.line_profiler.rst000066400000000000000000000002551503775420400257710ustar00rootroot00000000000000line\_profiler.line\_profiler module ==================================== .. automodule:: line_profiler.line_profiler :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.profiler_mixin.rst000066400000000000000000000002601503775420400261620ustar00rootroot00000000000000line\_profiler.profiler\_mixin module ===================================== .. automodule:: line_profiler.profiler_mixin :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.rst000066400000000000000000000011061503775420400231350ustar00rootroot00000000000000line\_profiler package ====================== Subpackages ----------- .. toctree:: :maxdepth: 4 line_profiler.autoprofile Submodules ---------- .. toctree:: :maxdepth: 4 line_profiler.__main__ line_profiler._line_profiler line_profiler.cli_utils line_profiler.explicit_profiler line_profiler.ipython_extension line_profiler.line_profiler line_profiler.profiler_mixin line_profiler.scoping_policy line_profiler.toml_config Module contents --------------- .. automodule:: line_profiler :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.scoping_policy.rst000066400000000000000000000002601503775420400261550ustar00rootroot00000000000000line\_profiler.scoping\_policy module ===================================== .. automodule:: line_profiler.scoping_policy :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.timers.rst000066400000000000000000000002261503775420400244410ustar00rootroot00000000000000line\_profiler.timers module ============================ .. automodule:: line_profiler.timers :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/line_profiler.toml_config.rst000066400000000000000000000002451503775420400254370ustar00rootroot00000000000000line\_profiler.toml\_config module ================================ .. automodule:: line_profiler.toml_config :members: :undoc-members: :show-inheritance: line_profiler-5.0.0/docs/source/auto/modules.rst000066400000000000000000000001141503775420400217520ustar00rootroot00000000000000line_profiler ============= .. toctree:: :maxdepth: 4 line_profiler line_profiler-5.0.0/docs/source/conf.py000066400000000000000000001057361503775420400201170ustar00rootroot00000000000000""" Notes: Based on template code in: ~/code/xcookie/xcookie/builders/docs.py ~/code/xcookie/xcookie/rc/conf_ext.py http://docs.readthedocs.io/en/latest/getting_started.html pip install sphinx sphinx-autobuild sphinx_rtd_theme sphinxcontrib-napoleon cd ~/code/line_profiler mkdir -p docs cd docs sphinx-quickstart # need to edit the conf.py cd ~/code/line_profiler/docs sphinx-apidoc --private --separate --force --output-dir ~/code/line_profiler/docs/source/auto ~/code/line_profiler/line_profiler # Note: the module should importable before running this # (e.g. install it in developer mode or munge the PYTHONPATH) make html git add source/auto/*.rst Also: To turn on PR checks https://docs.readthedocs.io/en/stable/guides/autobuild-docs-for-pull-requests.html https://readthedocs.org/dashboard/line-profiler/advanced/ ensure your github account is connected to readthedocs https://readthedocs.org/accounts/social/connections/ ### For gitlab To enable the read-the-docs go to https://readthedocs.org/dashboard/ and login The user will need to enable the repo on their readthedocs account: https://readthedocs.org/dashboard/import/manual/? Enter the following information: Set the Repository NAME: line_profiler Set the Repository URL: https://github.com/pyutils/line_profiler Make sure you have a .readthedocs.yml file For gitlab you also need to setup an integrations. Navigate to: https://readthedocs.org/dashboard/line-profiler/integrations/create/ Then add gitlab incoming webhook and copy the URL (make sure you copy the real url and not the text so https is included), specifically: In the "Integration type:" dropdown menu, select "Gitlab incoming webhook" Click "Add integration" Copy the text in the "Webhook URL" box to be used later. Copy the text in the "Secret" box to be used later. Then go to https://github.com/pyutils/line_profiler/hooks Click "Add new webhook". Copy the text previously saved from the "Webhook URL" box in the readthedocs form into the "URL" box in the gitlab form. Copy the text previously saved from the "Secret" box in the readthedocs form into the "Secret token" box in the gitlab form. For trigger permissions select the following checkboxes: push events, tag push events, merge request events Click the "Add webhook" button. See Docs for more details https://docs.readthedocs.io/en/stable/integrations.html Will also need to activate the main branch: https://readthedocs.org/projects/line-profiler/versions/ """ # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/stable/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- Project information ----------------------------------------------------- import sphinx_rtd_theme from os.path import exists from os.path import dirname from os.path import join def parse_version(fpath): """ Statically parse the version number from a python file """ import ast if not exists(fpath): raise ValueError('fpath={!r} does not exist'.format(fpath)) with open(fpath, 'r') as file_: sourcecode = file_.read() pt = ast.parse(sourcecode) class VersionVisitor(ast.NodeVisitor): def visit_Assign(self, node): for target in node.targets: if getattr(target, 'id', None) == '__version__': self.version = node.value.s visitor = VersionVisitor() visitor.visit(pt) return visitor.version project = 'line_profiler' copyright = '2025, Robert Kern' author = 'Robert Kern' modname = 'line_profiler' repo_dpath = dirname(dirname(dirname(__file__))) mod_dpath = join(repo_dpath, 'line_profiler') src_dpath = dirname(mod_dpath) modpath = join(mod_dpath, '__init__.py') release = parse_version(modpath) version = '.'.join(release.split('.')[0:2]) # Hack to ensure the module is importable # sys.path.insert(0, os.path.abspath(src_dpath)) # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ # 'autoapi.extension', 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.intersphinx', 'sphinx.ext.napoleon', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'myst_parser', # For markdown docs 'sphinx.ext.imgconverter', # For building latexpdf 'sphinx.ext.githubpages', # 'sphinxcontrib.redirects', 'sphinx_reredirects', ] todo_include_todos = True napoleon_google_docstring = True napoleon_use_param = False napoleon_use_ivar = True #autoapi_type = 'python' #autoapi_dirs = [mod_dpath] autodoc_inherit_docstrings = False # Hack for geowatch, todo configure autosummary_mock_imports = [ 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_24_and_lt_4_xx', 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_22_and_lt_4_24', 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_21_and_lt_4_22', 'geowatch.tasks.fusion.datamodules.temporal_sampling.affinity_sampling', 'geowatch.tasks.depth_pcd.model', 'geowatch.tasks.cold.export_change_map', ] autodoc_default_options = { # Document callable classes 'special-members': '__call__'} autodoc_member_order = 'bysource' autoclass_content = 'both' # autodoc_mock_imports = ['torch', 'torchvision', 'visdom'] # autoapi_modules = { # modname: { # 'override': False, # 'output': 'auto' # } # } # autoapi_dirs = [f'../../src/{modname}'] # autoapi_keep_files = True # References: # https://stackoverflow.com/questions/21538983/specifying-targets-for-intersphinx-links-to-numpy-scipy-and-matplotlib intersphinx_mapping = { # 'pytorch': ('http://pytorch.org/docs/master/', None), 'python': ('https://docs.python.org/3', None), 'click': ('https://click.palletsprojects.com/', None), # 'xxhash': ('https://pypi.org/project/xxhash/', None), # 'pygments': ('https://pygments.org/docs/', None), # 'tqdm': ('https://tqdm.github.io/', None), # Requires that the repo have objects.inv 'kwarray': ('https://kwarray.readthedocs.io/en/latest/', None), 'kwimage': ('https://kwimage.readthedocs.io/en/latest/', None), # 'kwplot': ('https://kwplot.readthedocs.io/en/latest/', None), 'ndsampler': ('https://ndsampler.readthedocs.io/en/latest/', None), 'ubelt': ('https://ubelt.readthedocs.io/en/latest/', None), 'xdoctest': ('https://xdoctest.readthedocs.io/en/latest/', None), 'networkx': ('https://networkx.org/documentation/stable/', None), 'scriptconfig': ('https://scriptconfig.readthedocs.io/en/latest/', None), 'rich': ('https://rich.readthedocs.io/en/latest/', None), 'numpy': ('https://numpy.org/doc/stable/', None), 'sympy': ('https://docs.sympy.org/latest/', None), 'scikit-learn': ('https://scikit-learn.org/stable/', None), 'pandas': ('https://pandas.pydata.org/docs/', None), 'matplotlib': ('https://matplotlib.org/stable/', None), 'pytest': ('https://docs.pytest.org/en/latest/', None), 'platformdirs': ('https://platformdirs.readthedocs.io/en/latest/', None), 'timerit': ('https://timerit.readthedocs.io/en/latest/', None), 'progiter': ('https://progiter.readthedocs.io/en/latest/', None), 'dateutil': ('https://dateutil.readthedocs.io/en/latest/', None), # 'pytest._pytest.doctest': ('https://docs.pytest.org/en/latest/_modules/_pytest/doctest.html', None), # 'colorama': ('https://pypi.org/project/colorama/', None), # 'cv2' : ('http://docs.opencv.org/2.4/', None), # 'h5py' : ('http://docs.h5py.org/en/latest/', None) } __dev_note__ = """ python -m sphinx.ext.intersphinx https://docs.python.org/3/objects.inv python -m sphinx.ext.intersphinx https://kwcoco.readthedocs.io/en/latest/objects.inv python -m sphinx.ext.intersphinx https://networkx.org/documentation/stable/objects.inv python -m sphinx.ext.intersphinx https://kwarray.readthedocs.io/en/latest/objects.inv python -m sphinx.ext.intersphinx https://kwimage.readthedocs.io/en/latest/objects.inv python -m sphinx.ext.intersphinx https://ubelt.readthedocs.io/en/latest/objects.inv python -m sphinx.ext.intersphinx https://networkx.org/documentation/stable/objects.inv sphobjinv suggest -t 90 -u https://readthedocs.org/projects/pytest/reference/objects.inv "signal.convolve2d" python -m sphinx.ext.intersphinx https://pygments-doc.readthedocs.io/en/latest/objects.inv """ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'collapse_navigation': False, 'display_version': True, 'navigation_depth': -1, # 'logo_only': True, } # html_logo = '.static/line_profiler.svg' # html_favicon = '.static/line_profiler.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # -- Options for LaTeX output ------------------------------------------------ # References: # https://tex.stackexchange.com/questions/546246/centos-8-the-font-freeserif-cannot-be-found """ # https://www.sphinx-doc.org/en/master/usage/builders/index.html#sphinx.builders.latex.LaTeXBuilder # https://tex.stackexchange.com/a/570691/83399 sudo apt install fonts-freefont-otf texlive-luatex texlive-latex-extra texlive-fonts-recommended texlive-latex-recommended tex-gyre latexmk make latexpdf LATEXMKOPTS="-shell-escape --synctex=-1 -src-specials -interaction=nonstopmode" make latexpdf LATEXMKOPTS="-lualatex -interaction=nonstopmode" make LATEXMKOPTS="-lualatex -interaction=nonstopmode" """ # latex_engine = 'lualatex' # latex_engine = 'xelatex' latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'line_profiler.tex', 'line_profiler Documentation', 'Robert Kern', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'line_profiler', 'line_profiler Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'line_profiler', 'line_profiler Documentation', author, 'line_profiler', 'One line description of project.', 'Miscellaneous'), ] # -- Extension configuration ------------------------------------------------- from sphinx.domains.python import PythonDomain # NOQA # from sphinx.application import Sphinx # NOQA from typing import Any, List # NOQA # HACK TO PREVENT EXCESSIVE TIME. # TODO: FIXME FOR REAL MAX_TIME_MINUTES = None if MAX_TIME_MINUTES: import ubelt # NOQA TIMER = ubelt.Timer() TIMER.tic() class PatchedPythonDomain(PythonDomain): """ References: https://github.com/sphinx-doc/sphinx/issues/3866 """ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): """ Helps to resolves cross-references """ if target.startswith('ub.'): target = 'ubelt.' + target[3] if target.startswith('xdoc.'): target = 'xdoctest.' + target[3] return_value = super(PatchedPythonDomain, self).resolve_xref( env, fromdocname, builder, typ, target, node, contnode) return return_value class GoogleStyleDocstringProcessor: """ A small extension that runs after napoleon and reformats erotemic-flavored google-style docstrings for sphinx. """ def __init__(self, autobuild=1): self.debug = 0 self.registry = {} if autobuild: self._register_builtins() def register_section(self, tag, alias=None): """ Decorator that adds a custom processing function for a non-standard google style tag. The decorated function should accept a list of docstring lines, where the first one will be the google-style tag that likely needs to be replaced, and then return the appropriate sphinx format (TODO what is the name? Is it just RST?). """ alias = [] if alias is None else alias alias = [alias] if not isinstance(alias, (list, tuple, set)) else alias alias.append(tag) alias = tuple(alias) # TODO: better tag patterns def _wrap(func): self.registry[tag] = { 'tag': tag, 'alias': alias, 'func': func, } return func return _wrap def _register_builtins(self): """ Adds definitions I like of CommandLine, TextArt, and Ignore """ @self.register_section(tag='CommandLine') def commandline(lines): new_lines = [] new_lines.append('.. rubric:: CommandLine') new_lines.append('') new_lines.append('.. code-block:: bash') new_lines.append('') new_lines.extend(lines[1:]) return new_lines @self.register_section(tag='SpecialExample', alias=['Benchmark', 'Sympy', 'Doctest']) def benchmark(lines): import textwrap new_lines = [] tag = lines[0].replace(':', '').strip() # new_lines.append(lines[0]) # TODO: it would be nice to change the tagline. # new_lines.append('') new_lines.append('.. rubric:: {}'.format(tag)) new_lines.append('') new_text = textwrap.dedent('\n'.join(lines[1:])) redone = new_text.split('\n') new_lines.extend(redone) # import ubelt as ub # print('new_lines = {}'.format(ub.urepr(new_lines, nl=1))) # new_lines.append('') return new_lines @self.register_section(tag='TextArt', alias=['Ascii']) def text_art(lines): new_lines = [] new_lines.append('.. rubric:: TextArt') new_lines.append('') new_lines.append('.. code-block:: bash') new_lines.append('') new_lines.extend(lines[1:]) return new_lines # @self.register_section(tag='TODO', alias=['.. todo::']) # def todo_section(lines): # """ # Fixup todo sections # """ # import xdev # xdev.embed() # import ubelt as ub # print('lines = {}'.format(ub.urepr(lines, nl=1))) # return new_lines @self.register_section(tag='Ignore') def ignore(lines): return [] def process(self, lines): """ Example: >>> import ubelt as ub >>> self = GoogleStyleDocstringProcessor() >>> lines = ['Hello world', >>> '', >>> 'CommandLine:', >>> ' hi', >>> '', >>> 'CommandLine:', >>> '', >>> ' bye', >>> '', >>> 'TextArt:', >>> '', >>> ' 1', >>> ' 2', >>> '', >>> ' 345', >>> '', >>> 'Foobar:', >>> '', >>> 'TextArt:'] >>> new_lines = self.process(lines[:]) >>> print(chr(10).join(new_lines)) """ orig_lines = lines[:] new_lines = [] curr_mode = '__doc__' accum = [] def accept(): """ called when we finish reading a section """ if curr_mode == '__doc__': # Keep the lines as-is new_lines.extend(accum) else: # Process this section with the given function regitem = self.registry[curr_mode] func = regitem['func'] fixed = func(accum) new_lines.extend(fixed) # Reset the accumulator for the next section accum[:] = [] for line in orig_lines: found = None for regitem in self.registry.values(): if line.startswith(regitem['alias']): found = regitem['tag'] break if not found and line and not line.startswith(' '): # if the line startswith anything but a space, we are no longer # in the previous nested scope. NOTE: This assumption may not # be general, but it works for my code. found = '__doc__' if found: # New section is found, accept the previous one and start # accumulating the new one. accept() curr_mode = found accum.append(line) # Finalize the last section accept() lines[:] = new_lines # make sure there is a blank line at the end if lines and lines[-1]: lines.append('') return lines def process_docstring_callback(self, app, what_: str, name: str, obj: Any, options: Any, lines: List[str]) -> None: """ Callback to be registered to autodoc-process-docstring Custom process to transform docstring lines Remove "Ignore" blocks Args: app (sphinx.application.Sphinx): the Sphinx application object what (str): the type of the object which the docstring belongs to (one of "module", "class", "exception", "function", "method", "attribute") name (str): the fully qualified name of the object obj: the object itself options: the options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are true if the flag option of same name was given to the auto directive lines (List[str]): the lines of the docstring, see above References: https://www.sphinx-doc.org/en/1.5.1/_modules/sphinx/ext/autodoc.html https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html """ if self.debug: print(f'ProcessDocstring: name={name}, what_={what_}, num_lines={len(lines)}') # print('BEFORE:') # import ubelt as ub # print('lines = {}'.format(ub.urepr(lines, nl=1))) self.process(lines) # docstr = '\n'.join(lines) # if 'Convert the Mask' in docstr: # import xdev # xdev.embed() # if 'keys in this dictionary ' in docstr: # import xdev # xdev.embed() render_doc_images = 0 if MAX_TIME_MINUTES and TIMER.toc() > (60 * MAX_TIME_MINUTES): render_doc_images = False # FIXME too slow on RTD if render_doc_images: # DEVELOPING if any('REQUIRES(--show)' in line for line in lines): # import xdev # xdev.embed() create_doctest_figure(app, obj, name, lines) FIX_EXAMPLE_FORMATTING = 1 if FIX_EXAMPLE_FORMATTING: for idx, line in enumerate(lines): if line == "Example:": lines[idx] = "**Example:**" lines.insert(idx + 1, "") REFORMAT_SECTIONS = 0 if REFORMAT_SECTIONS: REFORMAT_RETURNS = 0 REFORMAT_PARAMS = 0 docstr = SphinxDocstring(lines) if REFORMAT_PARAMS: for found in docstr.find_tagged_lines('Parameters'): print(found['text']) edit_slice = found['edit_slice'] # TODO: figure out how to do this. # # file = 'foo.rst' # import rstparse # rst = rstparse.Parser() # import io # rst.read(io.StringIO(found['text'])) # rst.parse() # for line in rst.lines: # print(line) # # found['text'] # import docutils # settings = docutils.frontend.OptionParser( # components=(docutils.parsers.rst.Parser,) # ).get_default_values() # document = docutils.utils.new_document('', settings) # from docutils.parsers import rst # rst.Parser().parse(found['text'], document) if REFORMAT_RETURNS: for found in docstr.find_tagged_lines('returns'): # FIXME: account for new slice with -2 offset edit_slice = found['edit_slice'] text = found['text'] new_lines = [] for para in text.split('\n\n'): indent = para[:len(para) - len(para.lstrip())] new_paragraph = indent + paragraph(para) new_lines.append(new_paragraph) new_lines.append('') new_lines = new_lines[:-1] lines[edit_slice] = new_lines # print('AFTER:') # print('lines = {}'.format(ub.urepr(lines, nl=1))) # if name == 'kwimage.Affine.translate': # import sys # sys.exit(1) class SphinxDocstring: """ Helper to parse and modify sphinx docstrings """ def __init__(docstr, lines): docstr.lines = lines # FORMAT THE RETURNS SECTION A BIT NICER import re tag_pat = re.compile(r'^:(\w*):') directive_pat = re.compile(r'^.. (\w*)::\s*(\w*)') # Split by sphinx types, mark the line offset where they start / stop sphinx_parts = [] for idx, line in enumerate(lines): tag_match = tag_pat.search(line) directive_match = directive_pat.search(line) if tag_match: tag = tag_match.groups()[0] sphinx_parts.append({ 'tag': tag, 'start_offset': idx, 'type': 'tag', }) elif directive_match: tag = directive_match.groups()[0] sphinx_parts.append({ 'tag': tag, 'start_offset': idx, 'type': 'directive', }) prev_offset = len(lines) for part in sphinx_parts[::-1]: part['end_offset'] = prev_offset prev_offset = part['start_offset'] docstr.sphinx_parts = sphinx_parts if 0: for line in lines: print(line) def find_tagged_lines(docstr, tag): for part in docstr.sphinx_parts[::-1]: if part['tag'] == tag: edit_slice = slice(part['start_offset'], part['end_offset']) return_section = docstr.lines[edit_slice] text = '\n'.join(return_section) found = { 'edit_slice': edit_slice, 'text': text, } yield found def paragraph(text): r""" Wraps multi-line strings and restructures the text to remove all newlines, heading, trailing, and double spaces. Useful for writing log messages Args: text (str): typically a multiline string Returns: str: the reduced text block """ import re out = re.sub(r'\s\s*', ' ', text).strip() return out def create_doctest_figure(app, obj, name, lines): """ The idea is that each doctest that produces a figure should generate that and then that figure should be part of the docs. """ import xdoctest import sys import types if isinstance(obj, types.ModuleType): module = obj else: module = sys.modules[obj.__module__] # TODO: read settings from pyproject.toml? if '--show' not in sys.argv: sys.argv.append('--show') if '--nointeract' not in sys.argv: sys.argv.append('--nointeract') modpath = module.__file__ # print(doctest.format_src()) import pathlib # HACK: write to the srcdir doc_outdir = pathlib.Path(app.outdir) doc_srcdir = pathlib.Path(app.srcdir) doc_static_outdir = doc_outdir / '_static' doc_static_srcdir = doc_srcdir / '_static' src_fig_dpath = (doc_static_srcdir / 'images') src_fig_dpath.mkdir(exist_ok=True, parents=True) out_fig_dpath = (doc_static_outdir / 'images') out_fig_dpath.mkdir(exist_ok=True, parents=True) # fig_dpath = (doc_outdir / 'autofigs' / name).mkdir(exist_ok=True) fig_num = 1 import kwplot kwplot.autompl(force='agg') plt = kwplot.autoplt() docstr = '\n'.join(lines) # TODO: The freeform parser does not work correctly here. # We need to parse out the sphinx (epdoc)? individual examples # so we can get different figures. But we can hack it for now. import re split_parts = re.split('({}\\s*\n)'.format(re.escape('.. rubric:: Example')), docstr) # split_parts = docstr.split('.. rubric:: Example') # import xdev # xdev.embed() def doctest_line_offsets(doctest): # Where the doctests starts and ends relative to the file start_line_offset = doctest.lineno - 1 last_part = doctest._parts[-1] last_line_offset = start_line_offset + last_part.line_offset + last_part.n_lines - 1 offsets = { 'start': start_line_offset, 'end': last_line_offset, 'stop': last_line_offset + 1, } return offsets # from xdoctest import utils # part_lines = utils.add_line_numbers(docstr.split('\n'), n_digits=3, start=0) # print('\n'.join(part_lines)) to_insert_fpaths = [] curr_line_offset = 0 for part in split_parts: num_lines = part.count('\n') doctests = list(xdoctest.core.parse_docstr_examples( part, modpath=modpath, callname=name, # style='google' )) # print(doctests) # doctests = list(xdoctest.core.parse_docstr_examples( # docstr, modpath=modpath, callname=name)) for doctest in doctests: if '--show' in part: ... # print('-- SHOW TEST---')/) # kwplot.close_figures() try: import pytest # NOQA except ImportError: pass try: from xdoctest.exceptions import Skipped except ImportError: # nocover # Define dummy skipped exception if pytest is not available class Skipped(Exception): pass try: doctest.mode = 'native' doctest.run(verbose=0, on_error='raise') ... except Skipped: print(f'Skip doctest={doctest}') except Exception as ex: print(f'ex={ex}') print(f'Error in doctest={doctest}') offsets = doctest_line_offsets(doctest) doctest_line_end = curr_line_offset + offsets['stop'] insert_line_index = doctest_line_end figures = kwplot.all_figures() for fig in figures: fig_num += 1 # path_name = path_sanatize(name) path_name = (name).replace('.', '_') fig_fpath = src_fig_dpath / f'fig_{path_name}_{fig_num:03d}.jpeg' fig.savefig(fig_fpath) print(f'Wrote figure: {fig_fpath}') to_insert_fpaths.append({ 'insert_line_index': insert_line_index, 'fpath': fig_fpath, }) for fig in figures: plt.close(fig) # kwplot.close_figures(figures) curr_line_offset += (num_lines) # if len(doctests) > 1: # doctests # import xdev # xdev.embed() INSERT_AT = 'end' INSERT_AT = 'inline' end_index = len(lines) # Reverse order for inserts import shutil for info in to_insert_fpaths[::-1]: src_abs_fpath = info['fpath'] rel_to_static_fpath = src_abs_fpath.relative_to(doc_static_srcdir) # dst_abs_fpath = doc_static_outdir / rel_to_static_fpath # dst_abs_fpath.parent.mkdir(parents=True, exist_ok=True) rel_to_root_fpath = src_abs_fpath.relative_to(doc_srcdir) dst_abs_fpath1 = doc_outdir / rel_to_root_fpath dst_abs_fpath1.parent.mkdir(parents=True, exist_ok=True) shutil.copy(src_abs_fpath, dst_abs_fpath1) dst_abs_fpath2 = doc_outdir / rel_to_static_fpath dst_abs_fpath2.parent.mkdir(parents=True, exist_ok=True) shutil.copy(src_abs_fpath, dst_abs_fpath2) dst_abs_fpath3 = doc_srcdir / rel_to_static_fpath dst_abs_fpath3.parent.mkdir(parents=True, exist_ok=True) shutil.copy(src_abs_fpath, dst_abs_fpath3) if INSERT_AT == 'inline': # Try to insert after test insert_index = info['insert_line_index'] elif INSERT_AT == 'end': insert_index = end_index else: raise KeyError(INSERT_AT) lines.insert(insert_index, '.. image:: {}'.format('..' / rel_to_root_fpath)) # lines.insert(insert_index, '.. image:: {}'.format(rel_to_root_fpath)) # lines.insert(insert_index, '.. image:: {}'.format(rel_to_static_fpath)) lines.insert(insert_index, '') def postprocess_hyperlinks(app, doctree, docname): """ Extension to fixup hyperlinks. This should be connected to the Sphinx application's "autodoc-process-docstring" event. """ # Your hyperlink postprocessing logic here from docutils import nodes import pathlib for node in doctree.traverse(nodes.reference): if 'refuri' in node.attributes: refuri = node.attributes['refuri'] if '.rst' in refuri: if 'source' in node.document: fpath = pathlib.Path(node.document['source']) parent_dpath = fpath.parent if (parent_dpath / refuri).exists(): node.attributes['refuri'] = refuri.replace('.rst', '.html') else: raise AssertionError def fix_rst_todo_section(lines): new_lines = [] for line in lines: ... ... def setup(app): import sphinx app : sphinx.application.Sphinx = app app.add_domain(PatchedPythonDomain, override=True) app.connect("doctree-resolved", postprocess_hyperlinks) docstring_processor = GoogleStyleDocstringProcessor() # https://stackoverflow.com/questions/26534184/can-sphinx-ignore-certain-tags-in-python-docstrings app.connect('autodoc-process-docstring', docstring_processor.process_docstring_callback) def copy(src, dst): import shutil print(f'Copy {src} -> {dst}') assert src.exists() if not dst.parent.exists(): dst.parent.mkdir() shutil.copy(src, dst) ### Hack for kwcoco: TODO: figure out a way for the user to configure this. HACK_FOR_KWCOCO = 0 if HACK_FOR_KWCOCO: import pathlib doc_outdir = pathlib.Path(app.outdir) / 'auto' doc_srcdir = pathlib.Path(app.srcdir) / 'auto' mod_dpath = doc_srcdir / '../../../kwcoco' src_fpath = (mod_dpath / 'coco_schema.json') copy(src_fpath, doc_outdir / src_fpath.name) copy(src_fpath, doc_srcdir / src_fpath.name) src_fpath = (mod_dpath / 'coco_schema_informal.rst') copy(src_fpath, doc_outdir / src_fpath.name) copy(src_fpath, doc_srcdir / src_fpath.name) return app line_profiler-5.0.0/docs/source/index.rst000066400000000000000000000006241503775420400204470ustar00rootroot00000000000000.. The __init__ files contains the top-level documentation overview .. automodule:: line_profiler.__init__ :show-inheritance: .. toctree:: :maxdepth: 8 :caption: Package Layout auto/line_profiler auto/line_profiler.autoprofile auto/line_profiler.explicit_profiler auto/kernprof manual/examples/index Indices and tables ================== * :ref:`genindex` * :ref:`modindex` line_profiler-5.0.0/docs/source/manual/000077500000000000000000000000001503775420400200615ustar00rootroot00000000000000line_profiler-5.0.0/docs/source/manual/examples/000077500000000000000000000000001503775420400216775ustar00rootroot00000000000000line_profiler-5.0.0/docs/source/manual/examples/example_kernprof.rst000066400000000000000000000274521503775420400260040ustar00rootroot00000000000000``kernprof`` invocations ======================== The module (and installed script) :py:mod:`kernprof` can be used to run and profile Python code in various forms. For the following, we assume that we have: * the below file ``fib.py`` in the current directory, and * :py:mod:`line_profiler` and :py:mod:`kernprof` installed. .. code:: python import functools import sys from argparse import ArgumentParser from typing import Callable, Optional, Sequence @functools.lru_cache() def fib(n: int) -> int: return _run_fib(fib, n) def fib_no_cache(n: int) -> int: return _run_fib(fib_no_cache, n) def _run_fib(fib: Callable[[int], int], n: int) -> int: if n < 0: raise ValueError(f'{n = !r}: expected non-negative integer') if n < 2: return 1 prev_prev = fib(n - 2) prev = fib(n - 1) return prev_prev + prev def main(args: Optional[Sequence[str]] = None) -> None: parser = ArgumentParser() parser.add_argument('n', nargs='+', type=int) parser.add_argument('--verbose', action='store_true') parser.add_argument('--no-cache', action='store_true') arguments = parser.parse_args(args) pattern = 'fib({!r}) = {!r}' if arguments.verbose else '{1!r}' func = fib_no_cache if arguments.no_cache else fib for n in arguments.n: result = func(n) print(pattern.format(n, result)) if __name__ == '__main__': main() Script execution ---------------- In the most basic form, one passes the path to the executed script and its arguments to ``kernprof``: .. code:: bash kernprof --prof-mod fib.py --line-by-line --view \ fib.py --verbose 10 20 30 .. raw:: html
Output (click to expand) .. code:: fib(10) = 89 fib(20) = 10946 fib(30) = 1346269 Wrote profile results to fib.py.lprof Timer unit: 1e-06 s Total time: 5.6e-05 s File: fib.py Function: fib at line 7 Line # Hits Time Per Hit % Time Line Contents ============================================================== 7 @functools.lru_cache() 8 def fib(n: int) -> int: 9 31 56.0 1.8 100.0 return _run_fib(fib, n) Total time: 0 s File: fib.py Function: fib_no_cache at line 12 Line # Hits Time Per Hit % Time Line Contents ============================================================== 12 def fib_no_cache(n: int) -> int: 13 return _run_fib(fib_no_cache, n) Total time: 3.8e-05 s File: fib.py Function: _run_fib at line 16 Line # Hits Time Per Hit % Time Line Contents ============================================================== 16 def _run_fib(fib: Callable[[int], int], n: int) -> int: 17 31 3.0 0.1 7.9 if n < 0: 18 raise ValueError(f'{n = !r}: expected non-negative integer') 19 31 2.0 0.1 5.3 if n < 2: 20 2 0.0 0.0 0.0 return 1 21 29 18.0 0.6 47.4 prev_prev = fib(n - 2) 22 29 12.0 0.4 31.6 prev = fib(n - 1) 23 29 3.0 0.1 7.9 return prev_prev + prev Total time: 0.000486 s File: fib.py Function: main at line 26 Line # Hits Time Per Hit % Time Line Contents ============================================================== 26 def main(args: Optional[Sequence[str]] = None) -> None: 27 1 184.0 184.0 37.9 parser = ArgumentParser() 28 1 17.0 17.0 3.5 parser.add_argument('n', nargs='+', type=int) 29 1 16.0 16.0 3.3 parser.add_argument('--verbose', action='store_true') 30 1 14.0 14.0 2.9 parser.add_argument('--no-cache', action='store_true') 31 1 144.0 144.0 29.6 arguments = parser.parse_args(args) 32 33 1 0.0 0.0 0.0 pattern = 'fib({!r}) = {!r}' if arguments.verbose else '{1!r}' 34 1 0.0 0.0 0.0 func = fib_no_cache if arguments.no_cache else fib 35 36 4 0.0 0.0 0.0 for n in arguments.n: 37 3 91.0 30.3 18.7 result = func(n) 38 3 20.0 6.7 4.1 print(pattern.format(n, result)) .. raw:: html

.. _kernprof-script-note: .. note:: Instead of passing the ``--view`` flag to ``kernprof`` to view the profiling results immediately, sometimes it can be more convenient to just generate the profiling results and view them later by running the :py:mod:`line_profiler` module (``python -m line_profiler``). Module execution ---------------- It is also possible to use ``kernprof -m`` to run installed modules and packages: .. code:: bash PYTHONPATH="${PYTHONPATH}:${PWD}" \ kernprof --prof-mod fib --line-by-line --view -m \ fib --verbose 10 20 30 .. raw:: html

Output (click to expand) .. code:: fib(10) = 89 fib(20) = 10946 fib(30) = 1346269 Wrote profile results to fib.lprof ... .. raw:: html

.. _kernprof-m-note: .. note:: As with ``python -m``, the ``-m`` option terminates further parsing of arguments by ``kernprof`` and passes them all to the argument thereafter (the run module). If there isn't one, an error is raised: .. code:: bash kernprof -m .. raw:: html

Output (click to expand) .. code:: pycon Traceback (most recent call last): ... ValueError: argument expected for the -m option .. raw:: html
Literal-code execution ---------------------- Like how ``kernprof -m`` parallels ``python -m``, ``kernprof -c`` can be used to run and profile literal snippets supplied on the command line like ``python -c``: .. code:: bash PYTHONPATH="${PYTHONPATH}:${PWD}" \ kernprof --prof-mod fib._run_fib --line-by-line --view -c " import sys from fib import _run_fib, fib_no_cache as fib for n in sys.argv[1:]: print(f'fib({n})', '=', fib(int(n))) " 10 20 .. raw:: html
Output (click to expand) .. code:: fib(10) = 89 fib(20) = 10946 Wrote profile results to <...>/kernprof-command-imuhz89_.lprof Timer unit: 1e-06 s Total time: 0.007666 s File: <...>/fib.py Function: _run_fib at line 16 Line # Hits Time Per Hit % Time Line Contents ============================================================== 16 def _run_fib(fib: Callable[[int], int], n: int) -> int: 17 22068 1656.0 0.1 20.6 if n < 0: 18 raise ValueError(f'{n = !r}: expected non-negative integer') 19 22068 1663.0 0.1 20.7 if n < 2: 20 11035 814.0 0.1 10.1 return 1 21 11033 1668.0 0.2 20.7 prev_prev = fib(n - 2) 22 11033 1477.0 0.1 18.4 prev = fib(n - 1) 23 11033 770.0 0.1 9.6 return prev_prev + prev .. raw:: html

.. note:: * As with ``python -c``, the ``-c`` option terminates further parsing of arguments by ``kernprof`` and passes them all to the argument thereafter (the executed code). If there isn't one, an error is raised as :ref:`above ` with ``kernprof -m``. * .. _kernprof-c-note: Since the temporary file containing the executed code will not exist beyond the ``kernprof`` process, profiling results pertaining to targets (function definitions) local to said code :ref:`will not be accessible later ` by ``python -m line_profiler`` and has to be ``--view``-ed immediately: .. code:: bash PYTHONPATH="${PYTHONPATH}:${PWD}" \ kernprof --line-by-line --view -c " from fib import fib def my_func(n=50): result = fib(n) print(n, '->', result) my_func()" .. raw:: html

Output (click to expand) .. code:: 50 -> 20365011074 Wrote profile results to <...>/kernprof-command-ni6nis6t.lprof Timer unit: 1e-06 s Total time: 3.8e-05 s File: <...>/kernprof-command.py Function: my_func at line 3 Line # Hits Time Per Hit % Time Line Contents ============================================================== 3 def my_func(n=50): 4 1 26.0 26.0 68.4 result = fib(n) 5 1 12.0 12.0 31.6 print(n, '->', result) .. raw:: html

.. code:: bash python -m line_profiler kernprof-command-ni6nis6t.lprof .. raw:: html

Output (click to expand) .. code:: Timer unit: 1e-06 s Total time: 3.6e-05 s Could not find file <...>/kernprof-command.py Are you sure you are running this program from the same directory that you ran the profiler from? Continuing without the function's contents. Line # Hits Time Per Hit % Time Line Contents ============================================================== 3 4 1 26.0 26.0 72.2 5 1 10.0 10.0 27.8 .. raw:: html
Executing code read from ``stdin`` ---------------------------------- It is also possible to read, run, and profile code from ``stdin``, by passing ``-`` to ``kernprof`` in place of a filename: .. code:: bash { # This example doesn't make much sense on its own, but just # imagine if this is a command generating code dynamically echo 'import sys' echo 'from fib import _run_fib, fib_no_cache as fib' echo 'for n in sys.argv[1:]:' echo ' print(f"fib({n})", "=", fib(int(n)))' } | PYTHONPATH="${PYTHONPATH}:${PWD}" \ kernprof --prof-mod fib._run_fib --line-by-line --view - 10 20 .. raw:: html
Output (click to expand) .. code:: fib(10) = 89 fib(20) = 10946 Wrote profile results to <...>/kernprof-stdin-kntk2lo1.lprof ... .. raw:: html

.. note:: Since the temporary file containing the executed code will not exist beyond the ``kernprof`` process, profiling results pertaining to targets (function definitions) local to said code will not be accessible later and has to be ``--view``-ed immediately (see :ref:`above note ` on ``kernprof -c``). line_profiler-5.0.0/docs/source/manual/examples/example_toml_config.rst000066400000000000000000000142561503775420400264540ustar00rootroot00000000000000Using the line-profiler TOML configuration ------------------------------------------ This tutorial walks the user through setting up a toy Python project and then interacting with it via the new line-profiler TOML configuration. First, we need to setup a small project, for which we will use ``uv``. We will also use the ``tomlkit`` package to edit the config file programatically. If you don't have these installed, first run: .. code:: bash pip install uv tomlkit Next, we are going to setup a small package for this demonstration. .. code:: bash TEMP_DIR=$(mktemp -d --suffix=demo_pkg) mkdir -p $TEMP_DIR cd $TEMP_DIR uv init --lib --name demo_pkg # helper to prevent indentation errors codeblock(){ echo "$1" | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip('\n'))" } codeblock " import time from demo_pkg.utils import leq from demo_pkg import utils def fib(n): if leq(n, 1): return n part1 = fib(n - 1) part2 = fib(n - 2) result = utils.add(part1, part2) return result def sleep_loop(n): for _ in range(n): time.sleep(0.01) " > src/demo_pkg/core.py codeblock " def leq(a, b): return a <= b def add(a, b): return a + b " > src/demo_pkg/utils.py codeblock " from demo_pkg import core import uuid def main(): run_uuid = uuid.uuid4() print('The UUID of this run is', run_uuid) print('compute fib 10') result = core.fib(10) print('result', result) print('sleeping 5') core.sleep_loop(5) print('done') if __name__ == '__main__': main() " > src/demo_pkg/__main__.py # Run `uv pip install -e .` to install the project locally: uv pip install -e . Test that the main entrypoint works. .. code:: bash python -m demo_pkg Running kernprof with a main script that uses your package behaves as in 4.x in that no defaults are modified. .. code:: bash kernprof -m demo_pkg However, you can modify pyproject.toml to specify new defaults. After doing this, running kernprof will use defaults specified in your pyproject.toml (You may also pass ``--config`` to tell kernprof to use a different file to load the default config). .. code:: bash # Edit the `pyproject.toml` file to modify default behavior update_pyproject_toml(){ python -c "if 1: import pathlib import tomllib import tomlkit import sys config_path = pathlib.Path('pyproject.toml') config = tomllib.loads(config_path.read_text()) # Add in new values from textwrap import dedent new_text = dedent(sys.argv[1]) new_parts = tomllib.loads(new_text) config.update(new_parts) new_text = tomlkit.dumps(config) config_path.write_text(new_text) " "$1" } update_pyproject_toml " # New Config [tool.line_profiler.kernprof] line-by-line = true rich = true verbose = true skip-zero = true prof-mod = ['demo_pkg'] " # Now, running kernprof uses the new defaults kernprof -m demo_pkg You will now see how long each function took, and what the line-by line breakdown is .. code:: # line-by-line breakdown omitted here 0.05 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/__main__.py:4 - main 0.00 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/core.py:5 - fib 0.05 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/core.py:13 - sleep_loop 0.00 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/utils.py:1 - leq 0.00 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/utils.py:4 - add Note that by specifying ``prof-mod``, every function within the package is automatically profiled without any need for the ``@profile`` decorator. It is worth noting, there is no requirement that the module you are profiling is part of your package. You can specify any module name as part of ``prof-mod``. For example, lets profile the stdlib uuid module. .. code:: bash update_pyproject_toml " # New Config [tool.line_profiler.kernprof] line-by-line = true rich = true verbose = 0 skip-zero = true prof-mod = ['uuid'] " # Now, running kernprof uses the new defaults kernprof -m demo_pkg python -m line_profiler -rmtz demo_pkg.lprof This results in only showing calls in the uuid package: .. code:: # line-by-line breakdown omitted here 0.00 seconds - .pyenv/versions/3.13.2/lib/python3.13/uuid.py:142 - UUID.__init__ 0.00 seconds - .pyenv/versions/3.13.2/lib/python3.13/uuid.py:283 - UUID.__str__ 0.00 seconds - .pyenv/versions/3.13.2/lib/python3.13/uuid.py:277 - UUID.__repr__ 0.00 seconds - .pyenv/versions/3.13.2/lib/python3.13/uuid.py:710 - uuid4 You can list exact functions to profile as long as they are addressable by dotted names. The above only profiles the ``fib`` function in our package: .. code:: bash update_pyproject_toml " # New Config [tool.line_profiler.kernprof] line-by-line = true rich = true verbose = 0 skip-zero = true prof-mod = ['demo_pkg.core.fib'] " # Now, running kernprof uses the new defaults kernprof -m demo_pkg python -m line_profiler -rmtz demo_pkg.lprof The output is: .. code:: Line # Hits Time Per Hit % Time Line Contents ============================================================== 5 def fib(n): 6 177 145.1 0.8 42.5 if leq(n, 1): 7 89 29.7 0.3 8.7 return n 8 88 29.1 0.3 8.5 part1 = fib(n - 1) 9 88 27.7 0.3 8.1 part2 = fib(n - 2) 10 88 78.0 0.9 22.8 result = utils.add(part1, part2) 11 88 32.2 0.4 9.4 return result 0.00 seconds - /tmp/tmp.vKpODQr6wndemo_pkg/src/demo_pkg/core.py:5 - fib line_profiler-5.0.0/docs/source/manual/examples/example_units.rst000066400000000000000000000053121503775420400253070ustar00rootroot00000000000000Timing Units ------------ This example demonstrates how you can change the units in which the time is reported. Write the following demo script to disk .. code:: bash echo "if 1: from line_profiler import profile @profile def is_prime(n): max_val = n ** 0.5 stop = int(max_val + 1) for i in range(2, stop): if n % i == 0: return False return True def find_primes(size): primes = [] for n in range(size): flag = is_prime(n) if flag: primes.append(n) return primes def main(): print('start calculating') primes = find_primes(10) primes = find_primes(1000) primes = find_primes(100000) print(f'done calculating. Found {len(primes)} primes.') if __name__ == '__main__': main() " > script.py Run the script with line profiling on. To change the unit in which time is reported use the ``--unit`` command line argument. The following example shows 4 variants: .. code:: bash LINE_PROFILE=1 python script.py # Use different values for the unit report python -m line_profiler -rtmz --unit 1 profile_output.lprof python -m line_profiler -rtmz --unit 1e-3 profile_output.lprof python -m line_profiler -rtmz --unit 1e-6 profile_output.lprof python -m line_profiler -rtmz --unit 1e-9 profile_output.lprof You will notice the relevant difference in the output lines: .. code:: ============== unit 1 variant ============== Timer unit: 1 s ... 6 101010 0.0 0.0 3.6 max_val = n ** 0.5 7 101010 0.1 0.0 4.0 stop = int(max_val + 1) ... ================= unit 1e-3 variant ================= Timer unit: 0.001 s ... 6 101010 46.6 0.0 3.6 max_val = n ** 0.5 7 101010 51.5 0.0 4.0 stop = int(max_val + 1) ... ================= unit 1e-6 variant ================= Timer unit: 1e-06 s ... 6 101010 46558.2 0.5 3.6 max_val = n ** 0.5 7 101010 51491.7 0.5 4.0 stop = int(max_val + 1) ... ================= unit 1e-9 variant ================= Timer unit: 1e-09 s ... 6 101010 46558246.0 460.9 3.6 max_val = n ** 0.5 7 101010 51491716.0 509.8 4.0 stop = int(max_val + 1) ... line_profiler-5.0.0/docs/source/manual/examples/index.rst000066400000000000000000000006711503775420400235440ustar00rootroot00000000000000Examples -------- Examples of line profiler usage: + `Basic Usage <../../index.html#line-profiler-basic-usage>`_ + `Kernprof Usage `_ + `Auto Profiling <../../auto/line_profiler.autoprofile.html#auto-profiling>`_ + `Explicit Profiler <../../auto/line_profiler.explicit_profiler.html#module-line_profiler.explicit_profiler>`_ + `Timing Units `_ + `TOML Config Usage `_ line_profiler-5.0.0/docs/source/modules.rst000066400000000000000000000001141503775420400210020ustar00rootroot00000000000000line_profiler ============= .. toctree:: :maxdepth: 4 line_profiler line_profiler-5.0.0/kernprof.py000077500000000000000000001364021503775420400165650ustar00rootroot00000000000000#!/usr/bin/env python """ Script to conveniently run profilers on code in a variety of circumstances. To profile a script, decorate the functions of interest with :py:deco:`profile `: .. code:: bash echo "if 1: @profile def main(): 1 + 1 main() " > script_to_profile.py NOTE: New in 4.1.0: Instead of relying on injecting :py:deco:`profile` into the builtins you can now ``import line_profiler`` and use :py:deco:`line_profiler.profile ` to decorate your functions. This allows the script to remain functional even if it is not actively profiled. See :py:mod:`!line_profiler` (:ref:`link `) for details. Then run the script using :program:`kernprof`: .. code:: bash kernprof -b script_to_profile.py By default this runs with the default :py:mod:`cProfile` profiler and does not require compiled modules. Instructions to view the results will be given in the output. Alternatively, adding :option:`!-v` to the command line will write results to stdout. To enable line-by-line profiling, :py:mod:`line_profiler` must be available and compiled, and the :option:`!-l` argument should be added to the :program:`kernprof` invocation: .. code:: bash kernprof -lb script_to_profile.py NOTE: New in 4.3.0: More code execution options are added: * :command:`kernprof -m some.module ` parallels :command:`python -m` and runs the provided module as :py:mod:`__main__`. * :command:`kernprof -c "some code" ` parallels :command:`python -c` and executes the provided literal code. * :command:`kernprof - ` parallels :command:`python -` and executes literal code passed via the :file:`stdin`. See also :doc:`kernprof invocations `. For more details and options, refer to the CLI help. To view the :program:`kernprof` help text run: .. code:: bash kernprof --help which displays: .. code:: usage: kernprof [-h] [-V] [--config CONFIG] [--no-config] [--line-by-line [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [--builtin [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [-s SETUP] [-p {path/to/script | object.dotted.path}[,...]] [--preimports [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [--prof-imports [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [-o OUTFILE] [-v] [-q] [--rich [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [-u UNIT] [--skip-zero [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [--summarize [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0]] [-i [OUTPUT_INTERVAL]] {path/to/script | -m path.to.module | -c "literal code"} ... Run and profile a python script or module. positional arguments: {path/to/script | -m path.to.module | -c "literal code"} The python script file, module, or literal code to run args Optional script arguments options: -h, --help show this help message and exit -V, --version show program's version number and exit --config CONFIG Path to the TOML file, from the `tool.line_profiler.kernprof` table of which to load defaults for the options. (Default: 'pyproject.toml') --no-config Disable the loading of configuration files other than the default one profiling options: --line-by-line [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Use the line-by-line profiler instead of cProfile. Implies `--builtin`. (Default: False; short form: -l) --builtin [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Put `profile` in the builtins. Use `profile.enable()`/`.disable()` to toggle profiling, `@profile` to decorate functions, or `with profile:` to profile a section of code. (Default: False; short form: -b) -s, --setup SETUP Path to the Python source file containing setup code to execute before the code to profile. (Default: N/A) -p, --prof-mod PROF_MOD List of modules, functions and/or classes to profile specified by their name or path. These profiling targets can be supplied both as comma-separated items, or separately with multiple copies of this flag. Packages are automatically recursed into unless they are specified with `.__init__`. Adding the current script/module profiles the entirety of it. Only works with line profiling (`-l`/`--line-by-line`). (Default: N/A; pass an empty string to clear the defaults (or any `-p` target specified earlier) ---preimports [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Instead of eagerly importing all profiling targets specified via `-p` and profiling them, only profile those that are directly imported in the profiled code. Only works with line profiling (`-l`/`--line-by-line`). (Default: False) Eagerly import all profiling targets specified via `-p` and profile them, instead of only profiling those that are directly imported in the profiled code. Only works with line profiling (`-l`/`--line-by-line`). (Default: True) --prof-imports [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] If the script/module profiled is in `--prof-mod`, autoprofile all its imports. Only works with line profiling (`-l`/`--line- by-line`). (Default: False) output options: -o, --outfile OUTFILE Save stats to OUTFILE. (Default: '.lprof' in line-profiling mode (`-l`/`--line-by-line`); '.prof' otherwise) -v, --verbose, --view Increase verbosity level (default: 0). At level 1, view the profiling results in addition to saving them; at level 2, show other diagnostic info. -q, --quiet Decrease verbosity level (default: 0). At level -1, disable helpful messages (e.g. "Wrote profile results to <...>"); at level -2, silence the stdout; at level -3, silence the stderr. --rich [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Use rich formatting if viewing output. (Default: False; short form: -r) -u, --unit UNIT Output unit (in seconds) in which the timing info is displayed. (Default: 1e-06 s) --skip-zero [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Hide functions which have not been called. (Default: False; short form: -z) --summarize [Y[es] | N[o] | T[rue] | F[alse] | on | off | 1 | 0] Print a summary of total function time. (Default: False) -i, --output-interval [OUTPUT_INTERVAL] Enables outputting of cumulative profiling results to OUTFILE every OUTPUT_INTERVAL seconds. Uses the threading module. Minimum value (and the value implied if the bare option is given) is 1 s. (Default: 0 s (disabled)) NOTE: New in 5.0.0: For more intuitive profiling behavior, profiling targets in :option:`!--prof-mod` (except the profiled script/code) are now: * Eagerly pre-imported to be profiled (see :py:mod:`line_profiler.autoprofile.eager_preimports`), regardless of whether those imports directly occur in the profiled script/module/code. * Descended/Recursed into if they are packages; pass ``.__init__`` instead of ```` to curtail descent and limit profiling to classes and functions in the local namespace of the :file:`__init__.py`. To restore the old behavior, pass the :option:`!--no-preimports` flag. """ # noqa: E501 import atexit import builtins import functools import os import sys import threading import asyncio # NOQA import concurrent.futures # NOQA import contextlib import shutil import tempfile import time import warnings from argparse import ArgumentParser from io import StringIO from operator import methodcaller from runpy import run_module from pathlib import Path from pprint import pformat from shlex import quote from textwrap import indent, dedent from types import MethodType, SimpleNamespace # NOTE: This version needs to be manually maintained in # line_profiler/line_profiler.py and line_profiler/__init__.py as well __version__ = '5.0.0' # Guard the import of cProfile such that 3.x people # without lsprof can still use this script. try: from cProfile import Profile except ImportError: from profile import Profile # type: ignore[assignment,no-redef] import line_profiler from line_profiler.cli_utils import ( add_argument, get_cli_config, get_python_executable as _python_command, # Compatibility positive_float, short_string_path) from line_profiler.profiler_mixin import ByCountProfilerMixin from line_profiler._logger import Logger from line_profiler import _diagnostics as diagnostics DIAGNOSITICS_VERBOSITY = 2 def execfile(filename, globals=None, locals=None): """ Python 3.x doesn't have :py:func:`execfile` builtin """ with open(filename, 'rb') as f: exec(compile(f.read(), filename, 'exec'), globals, locals) # ===================================== class ContextualProfile(ByCountProfilerMixin, Profile): """ A subclass of :py:class:`Profile` that adds a context manager for Python 2.5 with: statements and a decorator. """ def __init__(self, *args, **kwds): super(ByCountProfilerMixin, self).__init__(*args, **kwds) self.enable_count = 0 def __call__(self, func): return self.wrap_callable(func) def enable_by_count(self, subcalls=True, builtins=True): """ Enable the profiler if it hasn't been enabled before. """ if self.enable_count == 0: self.enable(subcalls=subcalls, builtins=builtins) self.enable_count += 1 def disable_by_count(self): """ Disable the profiler if the number of disable requests matches the number of enable requests. """ if self.enable_count > 0: self.enable_count -= 1 if self.enable_count == 0: self.disable() # FIXME: `profile.Profile` is fundamentally incompatible with the # by-count paradigm we use, as it can't be `.enable()`-ed nor # `.disable()`-ed class RepeatedTimer: """ Background timer for outputting file every ``n`` seconds. Adapted from [SO474528]_. References: .. [SO474528] https://stackoverflow.com/questions/474528/execute-function-every-x-seconds/40965385#40965385 """ # noqa: E501 def __init__(self, interval, dump_func, outfile): self._timer = None self.interval = interval self.dump_func = dump_func self.outfile = outfile self.is_running = False self.next_call = time.time() self.start() def _run(self): self.is_running = False self.start() self.dump_func(self.outfile) def start(self): if not self.is_running: self.next_call += self.interval self._timer = threading.Timer(self.next_call - time.time(), self._run) self._timer.start() self.is_running = True def stop(self): self._timer.cancel() self.is_running = False def find_module_script(module_name, *, exit_on_error=True): """Find the path to the executable script for a module or package.""" from line_profiler.autoprofile.util_static import modname_to_modpath for suffix in '.__main__', '': fname = modname_to_modpath(module_name + suffix) if fname: return fname msg = f'Could not find module `{module_name}`' if exit_on_error: print(msg, file=sys.stderr) raise SystemExit(1) else: raise ModuleNotFoundError(msg) def find_script(script_name, *, exit_on_error=True): """ Find the script. If the input is not a file, then :envvar:`PATH` will be searched. """ if os.path.isfile(script_name): return script_name path = os.getenv('PATH', os.defpath).split(os.pathsep) for dir in path: if dir == '': continue fn = os.path.join(dir, script_name) if os.path.isfile(fn): return fn msg = f'Could not find script {script_name!r}' if exit_on_error: print(msg, file=sys.stderr) raise SystemExit(1) else: raise FileNotFoundError(msg) def _normalize_profiling_targets(targets): """ Normalize the parsed :option:`!--prof-mod` by: * Normalizing file paths with :py:func:`find_script()`, and subsequently to absolute paths. * Splitting non-file paths at commas into (presumably) file paths and/or dotted paths. * Allowing paths specified earlier to be invalidated by an empty string. * Removing duplicates. """ def find(path): try: path = find_script(path, exit_on_error=False) except FileNotFoundError: return None return os.path.abspath(path) results = {} for chunk in targets: if not chunk: results.clear() continue filename = find(chunk) if filename is not None: results.setdefault(filename) continue for subchunk in chunk.split(','): filename = find(subchunk) results.setdefault(subchunk if filename is None else filename) return list(results) class _restore: """ Restore a collection like :py:data:`sys.path` after running code which potentially modifies it. """ def __init__(self, obj, getter, setter): self.obj = obj self.setter = setter self.getter = getter self.old = None def __enter__(self): assert self.old is None self.old = self.getter(self.obj) def __exit__(self, *_, **__): self.setter(self.obj, self.old) self.old = None def __call__(self, func): @functools.wraps(func) def wrapper(*args, **kwargs): with self: return func(*args, **kwargs) return wrapper @classmethod def sequence(cls, seq): """ Example ------- >>> l = [1, 2, 3] >>> >>> with _restore.sequence(l): ... print(l) ... l.append(4) ... print(l) ... l[:] = 5, 6 ... print(l) ... [1, 2, 3] [1, 2, 3, 4] [5, 6] >>> l [1, 2, 3] """ def set_list(orig, copy): orig[:] = copy return cls(seq, methodcaller('copy'), set_list) @classmethod def mapping(cls, mpg): """ Example ------- >>> d = {1: 2} >>> >>> with _restore.mapping(d): ... print(d) ... d[2] = 3 ... print(d) ... d.clear() ... d.update({1: 4, 3: 5}) ... print(d) ... {1: 2} {1: 2, 2: 3} {1: 4, 3: 5} >>> d {1: 2} """ def set_mapping(orig, copy): orig.clear() orig.update(copy) return cls(mpg, methodcaller('copy'), set_mapping) @classmethod def instance_dict(cls, obj): """ Example ------- >>> class Obj: ... def __init__(self, x, y): ... self.x, self.y = x, y ... ... def __repr__(self): ... return 'Obj({0.x!r}, {0.y!r})'.format(self) ... >>> >>> obj = Obj(1, 2) >>> >>> with _restore.instance_dict(obj): ... print(obj) ... obj.x, obj.y, obj.z = 4, 5, 6 ... print(obj, obj.z) ... Obj(1, 2) Obj(4, 5) 6 >>> obj Obj(1, 2) >>> hasattr(obj, 'z') False """ return cls.mapping(vars(obj)) def pre_parse_single_arg_directive(args, flag, sep='--'): """ Pre-parse high-priority single-argument directives like :option:`!-m module` to emulate the behavior of :command:`python [...]`. Examples -------- >>> import functools >>> pre_parse = functools.partial(pre_parse_single_arg_directive, ... flag='-m') Normal parsing: >>> pre_parse(['foo', 'bar', 'baz']) (['foo', 'bar', 'baz'], None, []) >>> pre_parse(['foo', 'bar', '-m', 'baz']) (['foo', 'bar'], 'baz', []) >>> pre_parse(['foo', 'bar', '-m', 'baz', 'foobar']) (['foo', 'bar'], 'baz', ['foobar']) Erroneous case: >>> pre_parse(['foo', 'bar', '-m']) Traceback (most recent call last): ... ValueError: argument expected for the -m option Prevent erroneous consumption of the flag by passing it `'--'`: >>> pre_parse(['foo', '--', 'bar', '-m', 'baz']) (['foo', '--'], None, ['bar', '-m', 'baz']) >>> pre_parse(['foo', '-m', 'spam', ... 'eggs', '--', 'bar', '-m', 'baz']) (['foo'], 'spam', ['eggs', '--', 'bar', '-m', 'baz']) """ args = list(args) pre = [] post = [] try: i_sep = args.index(sep) except ValueError: # No such element pass else: pre[:] = args[:i_sep] post[:] = args[i_sep + 1:] pre_pre, arg, pre_post = pre_parse_single_arg_directive(pre, flag) if arg is None: assert not pre_post return pre_pre + [sep], arg, post else: return pre_pre, arg, [*pre_post, sep, *post] try: i_flag = args.index(flag) except ValueError: # No such element return args, None, [] if i_flag == len(args) - 1: # Last element raise ValueError(f'argument expected for the {flag} option') args, thing, post_args = args[:i_flag], args[i_flag + 1], args[i_flag + 2:] return args, thing, post_args def no_op(*_, **__) -> None: pass def _add_core_parser_arguments(parser): """ Add the core kernprof args to a :py:class:`~argparse.ArgumentParser`. """ default = get_cli_config('kernprof') add_argument(parser, '-V', '--version', action='version', version=__version__) add_argument(parser, '--config', help='Path to the TOML file, from the ' '`tool.line_profiler.kernprof` table of which to load ' 'defaults for the options. ' f'(Default: {short_string_path(default.path)!r})') add_argument(parser, '--no-config', action='store_const', dest='config', const=False, help='Disable the loading of configuration files other ' 'than the default one') prof_opts = parser.add_argument_group('profiling options') add_argument(prof_opts, '-l', '--line-by-line', action='store_true', help='Use the line-by-line profiler instead of cProfile. ' 'Implies `--builtin`. ' f'(Default: {default.conf_dict["line_by_line"]})') add_argument(prof_opts, '-b', '--builtin', action='store_true', help="Put `profile` in the builtins. " "Use `profile.enable()`/`.disable()` to " "toggle profiling, " "`@profile` to decorate functions, " "or `with profile:` to profile a section of code. " f"(Default: {default.conf_dict['builtin']})") if default.conf_dict['setup']: def_setupfile = repr(default.conf_dict['setup']) else: def_setupfile = 'N/A' add_argument(prof_opts, '-s', '--setup', help='Path to the Python source file containing setup ' 'code to execute before the code to profile. ' f'(Default: {def_setupfile})') if default.conf_dict['prof_mod']: def_prof_mod = repr(default.conf_dict['prof_mod']) else: def_prof_mod = 'N/A' add_argument(prof_opts, '-p', '--prof-mod', action='append', help="List of modules, functions and/or classes to profile " "specified by their name or path. These profiling targets " "can be supplied both as comma-separated items, or " "separately with multiple copies of this flag. Packages " "are automatically recursed into unless they are specified " "with `.__init__`. Adding the current script/module " "profiles the entirety of it. Only works with line " "profiling (`-l`/`--line-by-line`). " f"(Default: {def_prof_mod}; " "pass an empty string to clear the defaults (or any `-p` " "target specified earlier))") add_argument(prof_opts, '--preimports', action='store_true', help="Eagerly import all profiling targets specified via " "`-p` and profile them, instead of only profiling those " "that are directly imported in the profiled code. " "Only works with line profiling (`-l`/`--line-by-line`). " f"(Default: {default.conf_dict['preimports']})") add_argument(prof_opts, '--prof-imports', action='store_true', help="If the script/module profiled is in `--prof-mod`, " "autoprofile all its imports. " "Only works with line profiling (`-l`/`--line-by-line`). " f"(Default: {default.conf_dict['prof_imports']})") out_opts = parser.add_argument_group('output options') if default.conf_dict['outfile']: def_outfile = repr(default.conf_dict['outfile']) else: def_outfile = ( "'.lprof' in line-profiling mode " "(`-l`/`--line-by-line`); " "'.prof' otherwise") add_argument(out_opts, '-o', '--outfile', help=f'Save stats to OUTFILE. (Default: {def_outfile})') add_argument(out_opts, '-v', '--verbose', '--view', action='count', default=default.conf_dict['verbose'], help="Increase verbosity level " f"(default: {default.conf_dict['verbose']}). " "At level 1, view the profiling results in addition to " "saving them; " "at level 2, show other diagnostic info.") add_argument(out_opts, '-q', '--quiet', action='count', default=0, help='Decrease verbosity level ' f"(default: {default.conf_dict['verbose']}). " 'At level -1, disable ' 'helpful messages (e.g. "Wrote profile results to <...>"); ' 'at level -2, silence the stdout; ' 'at level -3, silence the stderr.') add_argument(out_opts, '-r', '--rich', action='store_true', help='Use rich formatting if viewing output. ' f'(Default: {default.conf_dict["rich"]})') add_argument(out_opts, '-u', '--unit', type=positive_float, help='Output unit (in seconds) in which ' 'the timing info is displayed. ' f'(Default: {default.conf_dict["unit"]} s)') add_argument(out_opts, '-z', '--skip-zero', action='store_true', help="Hide functions which have not been called. " f"(Default: {default.conf_dict['skip_zero']})") add_argument(out_opts, '--summarize', action='store_true', help='Print a summary of total function time. ' f'(Default: {default.conf_dict["summarize"]})') if default.conf_dict['output_interval']: def_out_int = f'{default.conf_dict["output_interval"]} s' else: def_out_int = '0 s (disabled)' add_argument(out_opts, '-i', '--output-interval', type=int, const=1, nargs='?', help="Enables outputting of cumulative profiling results " "to OUTFILE every OUTPUT_INTERVAL seconds. " "Uses the threading module. " "Minimum value (and the value implied if the bare option " f"is given) is 1 s. (Default: {def_out_int})") def _build_parsers(args=None): parser_kwargs = { 'description': 'Run and profile a python script.', } if args is None: args = sys.argv[1:] # Special cases: `kernprof [...] -m ` or # `kernprof [...] -c