pax_global_header00006660000000000000000000000064146405340150014514gustar00rootroot0000000000000052 comment=b1283b14419969e36329c1ae957509690126b057 ipykernel-6.29.5/000077500000000000000000000000001464053401500136215ustar00rootroot00000000000000ipykernel-6.29.5/.git-blame-ignore-revs000066400000000000000000000001531464053401500177200ustar00rootroot00000000000000# Black formatting: https://github.com/ipython/ipykernel/pull/892 c5bca730f82bbdfb005ab93969ff5a1d028c2341 ipykernel-6.29.5/.github/000077500000000000000000000000001464053401500151615ustar00rootroot00000000000000ipykernel-6.29.5/.github/dependabot.yml000066400000000000000000000005071464053401500200130ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" groups: actions: patterns: - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" groups: actions: patterns: - "*" ipykernel-6.29.5/.github/workflows/000077500000000000000000000000001464053401500172165ustar00rootroot00000000000000ipykernel-6.29.5/.github/workflows/ci.yml000066400000000000000000000135771464053401500203510ustar00rootroot00000000000000name: ipykernel tests on: push: branches: ["main"] pull_request: schedule: - cron: "0 0 * * *" concurrency: group: ci-${{ github.ref }} cancel-in-progress: true defaults: run: shell: bash -eux {0} jobs: build: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] python-version: ["3.8", "3.12"] include: - os: windows-latest python-version: "3.9" - os: ubuntu-latest python-version: "pypy-3.9" - os: macos-latest python-version: "3.10" - os: ubuntu-latest python-version: "3.11" steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run the tests timeout-minutes: 15 if: ${{ !startsWith( matrix.python-version, 'pypy' ) && !startsWith(matrix.os, 'windows') }} run: | hatch run cov:test --cov-fail-under 50 || hatch run test:test --lf - name: Run the tests on pypy timeout-minutes: 15 if: ${{ startsWith( matrix.python-version, 'pypy' ) }} run: | hatch run test:nowarn || hatch run test:nowarn --lf - name: Run the tests on Windows timeout-minutes: 15 if: ${{ startsWith(matrix.os, 'windows') }} run: | hatch run cov:nowarn || hatch run test:nowarn --lf - name: Check Launcher run: | pip install . cd $HOME python -m ipykernel_launcher --help - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 coverage: runs-on: ubuntu-latest needs: - build steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1 with: fail_under: 80 test_lint: name: Test Lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Linters run: | hatch run typing:test hatch run lint:build pipx run interrogate -vv . pipx run doc8 --max-line-length=200 check_release: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2 with: token: ${{ secrets.GITHUB_TOKEN }} test_docs: runs-on: windows-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Build API docs run: | hatch run docs:api # If this fails run `hatch run docs:api` locally # and commit. git status --porcelain git status -s | grep "A" && exit 1 git status -s | grep "M" && exit 1 echo "API docs done" - run: hatch run docs:build test_without_debugpy: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest] python-version: ["3.9"] steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies without debugpy run: | pip install .[test] pip uninstall --yes debugpy - name: List installed packages run: | pip freeze - name: Run the tests timeout-minutes: 15 run: pytest -W default -vv || pytest --vv -W default --lf test_miniumum_versions: name: Test Minimum Versions timeout-minutes: 20 runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: dependency_type: minimum - name: List installed packages run: | hatch -v run test:list - name: Run the unit tests run: | hatch -v run test:nowarn || hatch run test:nowarn --lf test_prereleases: name: Test Prereleases runs-on: ubuntu-latest timeout-minutes: 20 steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: dependency_type: pre - name: Run the tests run: | hatch run test:nowarn || hatch run test:nowarn --lf make_sdist: name: Make SDist runs-on: ubuntu-latest timeout-minutes: 20 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1 test_sdist: runs-on: ubuntu-latest needs: [make_sdist] name: Install from SDist and Test timeout-minutes: 20 steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 link_check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 tests_check: # This job does nothing and is only used for the branch protection if: always() needs: - coverage - test_docs - test_without_debugpy - test_miniumum_versions - test_lint - test_prereleases - check_release - link_check - test_sdist runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} ipykernel-6.29.5/.github/workflows/downstream.yml000066400000000000000000000117511464053401500221310ustar00rootroot00000000000000name: Test downstream projects on: push: branches: ["main"] pull_request: concurrency: group: downstream-${{ github.ref }} cancel-in-progress: true jobs: nbclient: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: nbclient env_values: IPYKERNEL_CELL_NAME=\ ipywidgets: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: ipywidgets test_command: pytest -vv -raXxs -k \"not deprecation_fa_icons and not tooltip_deprecation and not on_submit_deprecation\" -W default --durations 10 --color=yes jupyter_client: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: jupyter_client ipyparallel: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: ipyparallel package_spec: '-e ".[test]"' jupyter_kernel_test: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test run: | git clone https://github.com/jupyter/jupyter_kernel_test.git cd jupyter_kernel_test pip install -e ".[test]" python test_ipykernel.py qtconsole: runs-on: ubuntu-latest timeout-minutes: 20 steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.9" architecture: "x64" - name: Install System Packages run: | sudo apt-get update sudo apt-get install -y --no-install-recommends '^libxcb.*-dev' libx11-xcb-dev libglu1-mesa-dev libxrender-dev libxi-dev libxkbcommon-dev libxkbcommon-x11-dev - name: Install qtconsole dependencies shell: bash -l {0} run: | cd ${GITHUB_WORKSPACE}/.. git clone https://github.com/jupyter/qtconsole.git cd qtconsole ${pythonLocation}/bin/python -m pip install -e ".[test]" ${pythonLocation}/bin/python -m pip install pyqt5 - name: Install Ipykernel changes shell: bash -l {0} run: ${pythonLocation}/bin/python -m pip install -e . - name: Test qtconsole shell: bash -l {0} run: | cd ${GITHUB_WORKSPACE}/../qtconsole xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -x -vv -s --full-trace --color=yes qtconsole spyder_kernels: runs-on: ubuntu-latest timeout-minutes: 20 steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.9" architecture: "x64" - name: Install System Packages run: | sudo apt-get update sudo apt-get install -y --no-install-recommends libegl1-mesa - name: Install spyder-kernels dependencies shell: bash -l {0} run: | cd ${GITHUB_WORKSPACE}/.. git clone https://github.com/spyder-ide/spyder-kernels.git cd spyder-kernels ${pythonLocation}/bin/python -m pip install -e ".[test]" - name: Install IPykernel changes shell: bash -l {0} run: ${pythonLocation}/bin/python -m pip install -e . - name: Test spyder-kernels shell: bash -l {0} run: | cd ${GITHUB_WORKSPACE}/../spyder-kernels xvfb-run --auto-servernum ${pythonLocation}/bin/python -m pytest -x -vv -s --full-trace --color=yes spyder_kernels downstream_check: # This job does nothing and is only used for the branch protection if: always() needs: - nbclient - ipywidgets - jupyter_client - ipyparallel - jupyter_kernel_test - spyder_kernels - qtconsole runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} ipykernel-6.29.5/.github/workflows/enforce-label.yml000066400000000000000000000006121464053401500224360ustar00rootroot00000000000000name: Enforce PR label concurrency: group: label-${{ github.ref }} cancel-in-progress: true on: pull_request: types: [labeled, unlabeled, opened, edited, synchronize] jobs: enforce-label: runs-on: ubuntu-latest permissions: pull-requests: write steps: - name: enforce-triage-label uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1 ipykernel-6.29.5/.github/workflows/prep-release.yml000066400000000000000000000032311464053401500223240ustar00rootroot00000000000000name: "Step 1: Prep Release" on: workflow_dispatch: inputs: version_spec: description: "New Version Specifier" default: "next" required: false branch: description: "The branch to target" required: false post_version_spec: description: "Post Version Specifier" required: false silent: description: "Set a placeholder in the changelog and don't publish the release." required: false type: boolean since: description: "Use PRs with activity since this date or git reference" required: false since_last_stable: description: "Use PRs with activity since the last stable git tag" required: false type: boolean jobs: prep_release: runs-on: ubuntu-latest permissions: contents: write steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Prep Release id: prep-release uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 with: token: ${{ secrets.GITHUB_TOKEN }} version_spec: ${{ github.event.inputs.version_spec }} silent: ${{ github.event.inputs.silent }} post_version_spec: ${{ github.event.inputs.post_version_spec }} target: ${{ github.event.inputs.target }} branch: ${{ github.event.inputs.branch }} since: ${{ github.event.inputs.since }} since_last_stable: ${{ github.event.inputs.since_last_stable }} - name: "** Next Step **" run: | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" ipykernel-6.29.5/.github/workflows/publish-changelog.yml000066400000000000000000000016401464053401500233350ustar00rootroot00000000000000name: "Publish Changelog" on: release: types: [published] workflow_dispatch: inputs: branch: description: "The branch to target" required: false jobs: publish_changelog: runs-on: ubuntu-latest environment: release steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: actions/create-github-app-token@v1 id: app-token with: app-id: ${{ vars.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Publish changelog id: publish-changelog uses: jupyter-server/jupyter_releaser/.github/actions/publish-changelog@v2 with: token: ${{ steps.app-token.outputs.token }} branch: ${{ github.event.inputs.branch }} - name: "** Next Step **" run: | echo "Merge the changelog update PR: ${{ steps.publish-changelog.outputs.pr_url }}" ipykernel-6.29.5/.github/workflows/publish-release.yml000066400000000000000000000034061464053401500230300ustar00rootroot00000000000000name: "Step 2: Publish Release" on: workflow_dispatch: inputs: branch: description: "The target branch" required: false release_url: description: "The URL of the draft GitHub release" required: false steps_to_skip: description: "Comma separated list of steps to skip" required: false jobs: publish_release: runs-on: ubuntu-latest environment: release permissions: id-token: write steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: actions/create-github-app-token@v1 id: app-token with: app-id: ${{ vars.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Populate Release id: populate-release uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 with: token: ${{ steps.app-token.outputs.token }} branch: ${{ github.event.inputs.branch }} release_url: ${{ github.event.inputs.release_url }} steps_to_skip: ${{ github.event.inputs.steps_to_skip }} - name: Finalize Release id: finalize-release uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2 with: token: ${{ steps.app-token.outputs.token }} release_url: ${{ steps.populate-release.outputs.release_url }} - name: "** Next Step **" if: ${{ success() }} run: | echo "Verify the final release" echo ${{ steps.finalize-release.outputs.release_url }} - name: "** Failure Message **" if: ${{ failure() }} run: | echo "Failed to Publish the Draft Release Url:" echo ${{ steps.populate-release.outputs.release_url }} ipykernel-6.29.5/.gitignore000066400000000000000000000006231464053401500156120ustar00rootroot00000000000000MANIFEST build cover dist _build docs/man/*.gz docs/source/api/generated docs/source/config/options docs/source/interactive/magics-generated.txt docs/gh-pages IPython/html/notebook/static/mathjax IPython/html/static/style/*.map *.py[co] __pycache__ *.egg-info *~ *.bak .ipynb_checkpoints .tox .DS_Store \#*# .#* .coverage .cache data_kernelspec .pytest_cache # copied changelog file docs/changelog.md ipykernel-6.29.5/.mailmap000066400000000000000000000250661464053401500152530ustar00rootroot00000000000000A. J. Holyoake ajholyoake Aaron Culich Aaron Culich Aron Ahmadia ahmadia Benjamin Ragan-Kelley Benjamin Ragan-Kelley Min RK Benjamin Ragan-Kelley MinRK Barry Wark Barry Wark Ben Edwards Ben Edwards Bradley M. Froehle Bradley M. Froehle Bradley M. Froehle Bradley Froehle Brandon Parsons Brandon Parsons Brian E. Granger Brian Granger Brian E. Granger Brian Granger <> Brian E. Granger bgranger <> Brian E. Granger bgranger Christoph Gohlke cgohlke Cyrille Rossant rossant Damián Avila damianavila Damián Avila damianavila Damon Allen damontallen Darren Dale darren.dale <> Darren Dale Darren Dale <> Dav Clark Dav Clark <> Dav Clark Dav Clark David Hirschfeld dhirschfeld David P. Sanders David P. Sanders David Warde-Farley David Warde-Farley <> Doug Blank Doug Blank Eugene Van den Bulke Eugene Van den Bulke Evan Patterson Evan Patterson Evan Patterson Evan Patterson Evan Patterson epatters Evan Patterson epatters Ernie French Ernie French Ernie French ernie french Ernie French ernop Fernando Perez Fernando Perez Fernando Perez Fernando Perez fperez <> Fernando Perez fptest <> Fernando Perez fptest1 <> Fernando Perez Fernando Perez Fernando Perez Fernando Perez <> Fernando Perez Fernando Perez Frank Murphy Frank Murphy Gabriel Becker gmbecker Gael Varoquaux gael.varoquaux <> Gael Varoquaux gvaroquaux Gael Varoquaux Gael Varoquaux <> Ingolf Becker watercrossing Jake Vanderplas Jake Vanderplas Jakob Gager jakobgager Jakob Gager jakobgager Jakob Gager jakobgager Jason Grout Jason Grout Jason Gors jason gors Jason Gors jgors Jens Hedegaard Nielsen Jens Hedegaard Nielsen Jens Hedegaard Nielsen Jens H Nielsen Jens Hedegaard Nielsen Jens H. Nielsen Jez Ng Jez Ng Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic jon Jonathan Frederic U-Jon-PC\Jon Jonathan March Jonathan March Jonathan March jdmarch Jörgen Stenarson Jörgen Stenarson Jörgen Stenarson Jorgen Stenarson Jörgen Stenarson Jorgen Stenarson <> Jörgen Stenarson jstenar Jörgen Stenarson jstenar <> Jörgen Stenarson Jörgen Stenarson Juergen Hasch juhasch Juergen Hasch juhasch Julia Evans Julia Evans Kester Tong KesterTong Kyle Kelley Kyle Kelley Kyle Kelley rgbkrk Laurent Dufréchou Laurent Dufréchou Laurent Dufréchou laurent dufrechou <> Laurent Dufréchou laurent.dufrechou <> Laurent Dufréchou Laurent Dufrechou <> Laurent Dufréchou laurent.dufrechou@gmail.com <> Laurent Dufréchou ldufrechou Lorena Pantano Lorena Luis Pedro Coelho Luis Pedro Coelho Marc Molla marcmolla Martín Gaitán Martín Gaitán Matthias Bussonnier Matthias BUSSONNIER Matthias Bussonnier Bussonnier Matthias Matthias Bussonnier Matthias BUSSONNIER Matthias Bussonnier Matthias Bussonnier Michael Droettboom Michael Droettboom Nicholas Bollweg Nicholas Bollweg (Nick) Nicolas Rougier Nikolay Koldunov Nikolay Koldunov Omar Andrés Zapata Mesa Omar Andres Zapata Mesa Omar Andrés Zapata Mesa Omar Andres Zapata Mesa Pankaj Pandey Pankaj Pandey Pascal Schetelat pascal-schetelat Paul Ivanov Paul Ivanov Pauli Virtanen Pauli Virtanen <> Pauli Virtanen Pauli Virtanen Pierre Gerold Pierre Gerold Pietro Berkes Pietro Berkes Piti Ongmongkolkul piti118 Prabhu Ramachandran Prabhu Ramachandran <> Puneeth Chaganti Puneeth Chaganti Robert Kern rkern <> Robert Kern Robert Kern Robert Kern Robert Kern Robert Kern Robert Kern <> Robert Marchman Robert Marchman Satrajit Ghosh Satrajit Ghosh Satrajit Ghosh Satrajit Ghosh Scott Sanderson Scott Sanderson smithj1 smithj1 smithj1 smithj1 Steven Johnson stevenJohnson Steven Silvester blink1073 S. Weber s8weber Stefan van der Walt Stefan van der Walt Silvia Vinyes Silvia Silvia Vinyes silviav12 Sylvain Corlay Sylvain Corlay sylvain.corlay Ted Drain TD22057 Théophile Studer Théophile Studer Thomas Kluyver Thomas Thomas Spura Thomas Spura Timo Paulssen timo vds vds2212 vds vds Ville M. Vainio Ville M. Vainio ville Ville M. Vainio ville Ville M. Vainio vivainio <> Ville M. Vainio Ville M. Vainio Ville M. Vainio Ville M. Vainio Walter Doerwald walter.doerwald <> Walter Doerwald Walter Doerwald <> W. Trevor King W. Trevor King Yoval P. y-p ipykernel-6.29.5/.pre-commit-config.yaml000066400000000000000000000044761464053401500201150ustar00rootroot00000000000000ci: autoupdate_schedule: monthly autoupdate_commit_msg: "chore: update pre-commit hooks" repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-case-conflict - id: check-ast - id: check-docstring-first - id: check-executables-have-shebangs - id: check-added-large-files - id: check-case-conflict - id: check-merge-conflict - id: check-json - id: check-toml - id: check-yaml - id: debug-statements exclude: ipykernel/kernelapp.py - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.27.4 hooks: - id: check-github-workflows - repo: https://github.com/executablebooks/mdformat rev: 0.7.17 hooks: - id: mdformat additional_dependencies: [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] - repo: https://github.com/pre-commit/mirrors-prettier rev: "v4.0.0-alpha.8" hooks: - id: prettier types_or: [yaml, html, json] - repo: https://github.com/pre-commit/mirrors-mypy rev: "v1.8.0" hooks: - id: mypy files: ipykernel stages: [manual] args: ["--install-types", "--non-interactive"] additional_dependencies: [ "traitlets>=5.13", "ipython>=8.16.1", "jupyter_client>=8.5", "appnope", ] - repo: https://github.com/adamchainz/blacken-docs rev: "1.16.0" hooks: - id: blacken-docs additional_dependencies: [black==23.7.0] - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" hooks: - id: codespell args: ["-L", "sur,nd"] - repo: https://github.com/pre-commit/pygrep-hooks rev: "v1.10.0" hooks: - id: rst-backticks - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.2.0 hooks: - id: ruff types_or: [python, jupyter] args: ["--fix", "--show-fixes"] - id: ruff-format types_or: [python, jupyter] - repo: https://github.com/scientific-python/cookie rev: "2024.01.24" hooks: - id: sp-repo-review additional_dependencies: ["repo-review[cli]"] ipykernel-6.29.5/.readthedocs.yaml000066400000000000000000000003561464053401500170540ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.11" sphinx: configuration: docs/conf.py python: install: # install itself with pip install . - method: pip path: . extra_requirements: - docs ipykernel-6.29.5/CHANGELOG.md000066400000000000000000003557011464053401500154450ustar00rootroot00000000000000# Changes in IPython kernel ## 6.29.5 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.29.4...1e62d48298e353a9879fae99bc752f9bb48797ef)) ### Bugs fixed - Fix use of "%matplotlib osx" [#1237](https://github.com/ipython/ipykernel/pull/1237) ([@ianthomas23](https://github.com/ianthomas23)) ### Maintenance and upkeep improvements - \[6.x\] Update Release Scripts [#1251](https://github.com/ipython/ipykernel/pull/1251) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2024-03-27&to=2024-06-29&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2024-03-27..2024-06-29&type=Issues) | [@ianthomas23](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aianthomas23+updated%3A2024-03-27..2024-06-29&type=Issues) ## 6.29.4 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.29.3...1cea5332ffc37f32e8232fd2b8b8ddd91b2bbdcf)) ### Bugs fixed - Fix side effect import for pickleutil [#1216](https://github.com/ipython/ipykernel/pull/1216) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Do not import debugger/debugpy unless needed [#1223](https://github.com/ipython/ipykernel/pull/1223) ([@krassowski](https://github.com/krassowski)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2024-02-26&to=2024-03-27&type=c)) [@agronholm](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aagronholm+updated%3A2024-02-26..2024-03-27&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2024-02-26..2024-03-27&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adavidbrochart+updated%3A2024-02-26..2024-03-27&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akrassowski+updated%3A2024-02-26..2024-03-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2024-02-26..2024-03-27&type=Issues) ## 6.29.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.29.2...de2221ce155668c343084fde37b77fb6b1671dc9)) ### Enhancements made - Eventloop scheduling improvements for stop_on_error_timeout and schedule_next [#1212](https://github.com/ipython/ipykernel/pull/1212) ([@jdranczewski](https://github.com/jdranczewski)) ### Bugs fixed - Disable frozen modules by default, add a toggle [#1213](https://github.com/ipython/ipykernel/pull/1213) ([@krassowski](https://github.com/krassowski)) ### Maintenance and upkeep improvements - Fix typings and update project urls [#1214](https://github.com/ipython/ipykernel/pull/1214) ([@blink1073](https://github.com/blink1073)) - Unpin pytest-asyncio and update ruff config [#1209](https://github.com/ipython/ipykernel/pull/1209) ([@blink1073](https://github.com/blink1073)) ### Documentation improvements - Correct spelling mistake [#1208](https://github.com/ipython/ipykernel/pull/1208) ([@joouha](https://github.com/joouha)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2024-02-07&to=2024-02-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2024-02-07..2024-02-26&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2024-02-07..2024-02-26&type=Issues) | [@jdranczewski](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajdranczewski+updated%3A2024-02-07..2024-02-26&type=Issues) | [@joouha](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajoouha+updated%3A2024-02-07..2024-02-26&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akrassowski+updated%3A2024-02-07..2024-02-26&type=Issues) ## 6.29.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.29.1...d45fe71990d26c0bd5b7b3b2a4ccd3d1f6609899)) ### Bugs fixed - Fix: ipykernel_launcher, delete absolute sys.path\[0\] [#1206](https://github.com/ipython/ipykernel/pull/1206) ([@stdll00](https://github.com/stdll00)) ### Maintenance and upkeep improvements - Re-enable skipped debugger test [#1207](https://github.com/ipython/ipykernel/pull/1207) ([@ianthomas23](https://github.com/ianthomas23)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2024-02-06&to=2024-02-07&type=c)) [@ianthomas23](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aianthomas23+updated%3A2024-02-06..2024-02-07&type=Issues) | [@stdll00](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Astdll00+updated%3A2024-02-06..2024-02-07&type=Issues) ## 6.29.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.29.0...09c9b2ad9c15202c5d1896ba24ec978b726c073b)) ### Bugs fixed - fix: on exception, return a 0, so that the "sum" still computes [#1204](https://github.com/ipython/ipykernel/pull/1204) ([@petervandenabeele](https://github.com/petervandenabeele)) - Fix handling of "silent" in execute request [#1200](https://github.com/ipython/ipykernel/pull/1200) ([@Haadem](https://github.com/Haadem)) ### Maintenance and upkeep improvements - chore: update pre-commit hooks [#1205](https://github.com/ipython/ipykernel/pull/1205) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Do git ignore of /node_modules/.cache [#1203](https://github.com/ipython/ipykernel/pull/1203) ([@petervandenabeele](https://github.com/petervandenabeele)) - Bump the actions group with 1 update [#1201](https://github.com/ipython/ipykernel/pull/1201) ([@dependabot](https://github.com/dependabot)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2024-01-16&to=2024-02-06&type=c)) [@dependabot](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adependabot+updated%3A2024-01-16..2024-02-06&type=Issues) | [@Haadem](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AHaadem+updated%3A2024-01-16..2024-02-06&type=Issues) | [@petervandenabeele](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apetervandenabeele+updated%3A2024-01-16..2024-02-06&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2024-01-16..2024-02-06&type=Issues) ## 6.29.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.28.0...84955484ec1636ee4c7611471d20df2016b5cb57)) ### Enhancements made - Always set debugger to true in kernelspec [#1191](https://github.com/ipython/ipykernel/pull/1191) ([@ianthomas23](https://github.com/ianthomas23)) ### Bugs fixed - Revert "Enable `ProactorEventLoop` on windows for `ipykernel`" [#1194](https://github.com/ipython/ipykernel/pull/1194) ([@blink1073](https://github.com/blink1073)) - Make outputs go to correct cell when generated in threads/asyncio [#1186](https://github.com/ipython/ipykernel/pull/1186) ([@krassowski](https://github.com/krassowski)) ### Maintenance and upkeep improvements - Pin pytest-asyncio to 0.23.2 [#1189](https://github.com/ipython/ipykernel/pull/1189) ([@ianthomas23](https://github.com/ianthomas23)) - chore: update pre-commit hooks [#1187](https://github.com/ipython/ipykernel/pull/1187) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-12-26&to=2024-01-16&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-12-26..2024-01-16&type=Issues) | [@ianthomas23](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aianthomas23+updated%3A2023-12-26..2024-01-16&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akrassowski+updated%3A2023-12-26..2024-01-16&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-12-26..2024-01-16&type=Issues) ## 6.28.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.27.1...de45c7a49e197f0889f867f33f24cce322768a0e)) ### Enhancements made - Enable `ProactorEventLoop` on windows for `ipykernel` [#1184](https://github.com/ipython/ipykernel/pull/1184) ([@NewUserHa](https://github.com/NewUserHa)) - Adds a flag in debug_info for the copyToGlobals support [#1099](https://github.com/ipython/ipykernel/pull/1099) ([@brichet](https://github.com/brichet)) ### Maintenance and upkeep improvements - Support python 3.12 [#1185](https://github.com/ipython/ipykernel/pull/1185) ([@blink1073](https://github.com/blink1073)) - Bump actions/setup-python from 4 to 5 [#1181](https://github.com/ipython/ipykernel/pull/1181) ([@dependabot](https://github.com/dependabot)) - chore: update pre-commit hooks [#1179](https://github.com/ipython/ipykernel/pull/1179) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Refactor execute_request to reduce redundancy and improve consistency [#1177](https://github.com/ipython/ipykernel/pull/1177) ([@jjvraw](https://github.com/jjvraw)) ### Documentation improvements - Update pytest commands in README [#1178](https://github.com/ipython/ipykernel/pull/1178) ([@ianthomas23](https://github.com/ianthomas23)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-11-27&to=2023-12-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-11-27..2023-12-26&type=Issues) | [@brichet](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Abrichet+updated%3A2023-11-27..2023-12-26&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adependabot+updated%3A2023-11-27..2023-12-26&type=Issues) | [@ianthomas23](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aianthomas23+updated%3A2023-11-27..2023-12-26&type=Issues) | [@jjvraw](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajjvraw+updated%3A2023-11-27..2023-12-26&type=Issues) | [@NewUserHa](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ANewUserHa+updated%3A2023-11-27..2023-12-26&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-11-27..2023-12-26&type=Issues) ## 6.27.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.27.0...f9c517e868462d05d6854204c2ad0a244db1cd19)) ### Bugs fixed - Fix edit magic payload type [#1171](https://github.com/ipython/ipykernel/pull/1171) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-11-21&to=2023-11-27&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-11-21..2023-11-27&type=Issues) ## 6.27.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.26.0...465d34483103d23f471a4795fe5fabb9cf7ac3f5)) ### Enhancements made - Extend argument handling of do_execute with cell metadata [#1169](https://github.com/ipython/ipykernel/pull/1169) ([@jjvraw](https://github.com/jjvraw)) ### Maintenance and upkeep improvements - Update ruff and typings [#1167](https://github.com/ipython/ipykernel/pull/1167) ([@blink1073](https://github.com/blink1073)) - Clean up ruff config [#1165](https://github.com/ipython/ipykernel/pull/1165) ([@blink1073](https://github.com/blink1073)) - chore: update pre-commit hooks [#1164](https://github.com/ipython/ipykernel/pull/1164) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Clean up typing config [#1163](https://github.com/ipython/ipykernel/pull/1163) ([@blink1073](https://github.com/blink1073)) - Update typing for traitlets 5.13 [#1162](https://github.com/ipython/ipykernel/pull/1162) ([@blink1073](https://github.com/blink1073)) - Adopt ruff format [#1161](https://github.com/ipython/ipykernel/pull/1161) ([@blink1073](https://github.com/blink1073)) - Update typing for jupyter_client 8.5 [#1160](https://github.com/ipython/ipykernel/pull/1160) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-10-24&to=2023-11-21&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-10-24..2023-11-21&type=Issues) | [@jjvraw](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajjvraw+updated%3A2023-10-24..2023-11-21&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-10-24..2023-11-21&type=Issues) ## 6.26.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.25.2...966e0a41fc61e7850378ae672e28202eb29b10b0)) ### Maintenance and upkeep improvements - Update lint deps and add more typing [#1156](https://github.com/ipython/ipykernel/pull/1156) ([@blink1073](https://github.com/blink1073)) - Update typing for traitlets 5.11 [#1154](https://github.com/ipython/ipykernel/pull/1154) ([@blink1073](https://github.com/blink1073)) - chore: update pre-commit hooks [#1153](https://github.com/ipython/ipykernel/pull/1153) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Update IPython Typing Usage [#1152](https://github.com/ipython/ipykernel/pull/1152) ([@blink1073](https://github.com/blink1073)) - Update typing [#1150](https://github.com/ipython/ipykernel/pull/1150) ([@blink1073](https://github.com/blink1073)) - Use sp-repo-review [#1146](https://github.com/ipython/ipykernel/pull/1146) ([@blink1073](https://github.com/blink1073)) - Bump actions/checkout from 3 to 4 [#1144](https://github.com/ipython/ipykernel/pull/1144) ([@dependabot](https://github.com/dependabot)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-09-04&to=2023-10-24&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-09-04..2023-10-24&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adependabot+updated%3A2023-09-04..2023-10-24&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-09-04..2023-10-24&type=Issues) ## 6.25.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.25.1...9d3f7aecc4fe68f14ebcc4dad4b65b19676e820e)) ### Bugs fixed - Make iostream shutdown more robust [#1143](https://github.com/ipython/ipykernel/pull/1143) ([@blink1073](https://github.com/blink1073)) - Don't call QApplication.setQuitOnLastWindowClosed(False). [#1142](https://github.com/ipython/ipykernel/pull/1142) ([@anntzer](https://github.com/anntzer)) - Avoid starting IOPub background thread after it's been stopped [#1137](https://github.com/ipython/ipykernel/pull/1137) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-08-07&to=2023-09-04&type=c)) [@anntzer](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aanntzer+updated%3A2023-08-07..2023-09-04&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-08-07..2023-09-04&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2023-08-07..2023-09-04&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2023-08-07..2023-09-04&type=Issues) ## 6.25.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.25.0...18e54f31725d6645dd71a8749c9e1eb28281f804)) ### Bugs fixed - Modifying debugger to return the same breakpoints in 'debugInfo' response as 'setBreakpoints' [#1140](https://github.com/ipython/ipykernel/pull/1140) ([@vaishnavi17](https://github.com/vaishnavi17)) ### Maintenance and upkeep improvements ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-07-25&to=2023-08-07&type=c)) [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-07-25..2023-08-07&type=Issues) | [@vaishnavi17](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Avaishnavi17+updated%3A2023-07-25..2023-08-07&type=Issues) ## 6.25.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.24.0...09c3c359addf60e26078207990ad2ca932cf2613)) ### Enhancements made - feat: let display hook handle clear_output [#1135](https://github.com/ipython/ipykernel/pull/1135) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Bugs fixed - Merge connection info into existing connection file if it already exists [#1133](https://github.com/ipython/ipykernel/pull/1133) ([@jasongrout](https://github.com/jasongrout)) ### Maintenance and upkeep improvements - Clean up lint [#1134](https://github.com/ipython/ipykernel/pull/1134) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-07-03&to=2023-07-25&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-07-03..2023-07-25&type=Issues) | [@fecet](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afecet+updated%3A2023-07-03..2023-07-25&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajasongrout+updated%3A2023-07-03..2023-07-25&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2023-07-03..2023-07-25&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-07-03..2023-07-25&type=Issues) ## 6.24.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.23.3...0c1db099a32c4cb28bfb4b3508bb808d8b4092e7)) ### New features added - Let get_parent decide the channel to get parent header [#1128](https://github.com/ipython/ipykernel/pull/1128) ([@dby-tmwctw](https://github.com/dby-tmwctw)) ### Bugs fixed - Bugfix: binary stdout/stderr handling [#1129](https://github.com/ipython/ipykernel/pull/1129) ([@arieleiz](https://github.com/arieleiz)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-06-23&to=2023-07-03&type=c)) [@arieleiz](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aarieleiz+updated%3A2023-06-23..2023-07-03&type=Issues) | [@dby-tmwctw](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adby-tmwctw+updated%3A2023-06-23..2023-07-03&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2023-06-23..2023-07-03&type=Issues) ## 6.23.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.23.2...ea3e6479aca70f87282ec0b60412f2cfba59eb35)) ### Bugs fixed - Check existence of connection_file before writing [#1127](https://github.com/ipython/ipykernel/pull/1127) ([@fecet](https://github.com/fecet)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-06-12&to=2023-06-23&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-06-12..2023-06-23&type=Issues) | [@fecet](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afecet+updated%3A2023-06-12..2023-06-23&type=Issues) ## 6.23.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.23.1...112ca66da0ee8156b983094b2c8e2926ed63cfcb)) ### Bugs fixed - Avoid ResourceWarning on implicitly closed event pipe sockets [#1125](https://github.com/ipython/ipykernel/pull/1125) ([@minrk](https://github.com/minrk)) - fix: protect stdout/stderr restoration in `InProcessKernel._redirected_io` [#1122](https://github.com/ipython/ipykernel/pull/1122) ([@charles-cooper](https://github.com/charles-cooper)) ### Maintenance and upkeep improvements ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-05-15&to=2023-06-12&type=c)) [@charles-cooper](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Acharles-cooper+updated%3A2023-05-15..2023-06-12&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2023-05-15..2023-06-12&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-05-15..2023-06-12&type=Issues) ## 6.23.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.23.0...d63c33afb9872f2781997b2428d7e9e0c1d23d41)) ### Bugs fixed - Avoid echoing onto a captured FD [#1111](https://github.com/ipython/ipykernel/pull/1111) ([@minrk](https://github.com/minrk)) ### Maintenance and upkeep improvements - update readthedocs env to 3.11 [#1117](https://github.com/ipython/ipykernel/pull/1117) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-05-08&to=2023-05-15&type=c)) [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2023-05-08..2023-05-15&type=Issues) ## 6.23.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.22.0...3dd6dc9712ff6eb0a53cf79969dcefa0ba1b086e)) ### Enhancements made - Support control\<>iopub messages to e.g. unblock comm_msg from command execution [#1114](https://github.com/ipython/ipykernel/pull/1114) ([@tkrabel-db](https://github.com/tkrabel-db)) - Add outstream hook similar to display publisher [#1110](https://github.com/ipython/ipykernel/pull/1110) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Maintenance and upkeep improvements - Use local coverage [#1109](https://github.com/ipython/ipykernel/pull/1109) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-03-20&to=2023-05-08&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-03-20..2023-05-08&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2023-03-20..2023-05-08&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-03-20..2023-05-08&type=Issues) | [@tkrabel-db](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Atkrabel-db+updated%3A2023-03-20..2023-05-08&type=Issues) ## 6.22.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.21.3...e2972d763b5357d4e1cb9b5355593583ca6d5657)) ### Bugs fixed - Deprecate Comm class + Fix incompatibility with ipywidgets [#1097](https://github.com/ipython/ipykernel/pull/1097) ([@martinRenou](https://github.com/martinRenou)) ### Maintenance and upkeep improvements ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-03-06&to=2023-03-20&type=c)) [@martinRenou](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AmartinRenou+updated%3A2023-03-06..2023-03-20&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-03-06..2023-03-20&type=Issues) ## 6.21.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.21.2...e46f75b93c388886f4b6ba32182e29c3cc486984)) ### Bugs fixed - Fix interrupt reply [#1101](https://github.com/ipython/ipykernel/pull/1101) ([@garlandz-db](https://github.com/garlandz-db)) ### Maintenance and upkeep improvements - Update docs link [#1103](https://github.com/ipython/ipykernel/pull/1103) ([@blink1073](https://github.com/blink1073)) - Add license [#1098](https://github.com/ipython/ipykernel/pull/1098) ([@dcsaba89](https://github.com/dcsaba89)) ### Documentation improvements - Update changelog for markdown typo [#1096](https://github.com/ipython/ipykernel/pull/1096) ([@mlucool](https://github.com/mlucool)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-02-13&to=2023-03-06&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-02-13..2023-03-06&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2023-02-13..2023-03-06&type=Issues) | [@dcsaba89](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adcsaba89+updated%3A2023-02-13..2023-03-06&type=Issues) | [@garlandz-db](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Agarlandz-db+updated%3A2023-02-13..2023-03-06&type=Issues) | [@mlucool](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amlucool+updated%3A2023-02-13..2023-03-06&type=Issues) ## 6.21.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.21.1...1a486e06155a4d8e58e716fd40468cb5738ed6bb)) ### Bugs fixed - Un-expose `__file__` and expose `__session__` instead. [#1095](https://github.com/ipython/ipykernel/pull/1095) ([@Carreau](https://github.com/Carreau)) ### Maintenance and upkeep improvements - Remove test_enter_eventloop [#1084](https://github.com/ipython/ipykernel/pull/1084) ([@davidbrochart](https://github.com/davidbrochart)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-02-02&to=2023-02-13&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-02-02..2023-02-13&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2023-02-02..2023-02-13&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adavidbrochart+updated%3A2023-02-02..2023-02-13&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2023-02-02..2023-02-13&type=Issues) ## 6.21.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.21.0...ac7776dfd68861ae005e1f142ec87cd6703847ea)) ### Maintenance and upkeep improvements - Restore nest-asyncio for tk loop [#1086](https://github.com/ipython/ipykernel/pull/1086) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-01-30&to=2023-02-02&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-01-30..2023-02-02&type=Issues) ## 6.21.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.20.2...dde698850d865dec89bba2305d1f3dc3134f8413)) ### Enhancements made - Expose session start file in `__file__`. [#1078](https://github.com/ipython/ipykernel/pull/1078) ([@Carreau](https://github.com/Carreau)) - Add copy_to_globals debug request handling [#1055](https://github.com/ipython/ipykernel/pull/1055) ([@brichet](https://github.com/brichet)) ### Maintenance and upkeep improvements - Adopt more lint rules [#1082](https://github.com/ipython/ipykernel/pull/1082) ([@blink1073](https://github.com/blink1073)) - Maintenance updates [#1081](https://github.com/ipython/ipykernel/pull/1081) ([@blink1073](https://github.com/blink1073)) - Test spyder kernels [#1080](https://github.com/ipython/ipykernel/pull/1080) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-01-16&to=2023-01-30&type=c)) [@agronholm](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aagronholm+updated%3A2023-01-16..2023-01-30&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2023-01-16..2023-01-30&type=Issues) | [@brichet](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Abrichet+updated%3A2023-01-16..2023-01-30&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2023-01-16..2023-01-30&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2023-01-16..2023-01-30&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2023-01-16..2023-01-30&type=Issues) ## 6.20.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.20.1...203ee2bce0b506257bd561d082e983330d1ebd14)) ### Bugs fixed - Fix Exception in OutStream.close() [#1076](https://github.com/ipython/ipykernel/pull/1076) ([@ilyasher](https://github.com/ilyasher)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2023-01-09&to=2023-01-16&type=c)) [@ilyasher](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ailyasher+updated%3A2023-01-09..2023-01-16&type=Issues) ## 6.20.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.20.0...5f07abc22a1c75672f7bee129505f19c954a7c36)) ### Bugs fixed - Don't raise error when trying to create another Qt app for Qt eventloop [#1071](https://github.com/ipython/ipykernel/pull/1071) ([@ccordoba12](https://github.com/ccordoba12)) ### Maintenance and upkeep improvements - Update CI [#1073](https://github.com/ipython/ipykernel/pull/1073) ([@blink1073](https://github.com/blink1073)) - Fix types and sync lint deps [#1070](https://github.com/ipython/ipykernel/pull/1070) ([@blink1073](https://github.com/blink1073)) ### Documentation improvements - Add api docs [#1067](https://github.com/ipython/ipykernel/pull/1067) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-26&to=2023-01-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-12-26..2023-01-09&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2022-12-26..2023-01-09&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-12-26..2023-01-09&type=Issues) ## 6.20.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.19.4...fbea757e117c1d3b0da29a40b4abcf3133a310f4)) ### Enhancements made - ENH: add `%gui` support for Qt6 [#1054](https://github.com/ipython/ipykernel/pull/1054) ([@shaperilio](https://github.com/shaperilio)) ### Maintenance and upkeep improvements - Add more ci checks [#1063](https://github.com/ipython/ipykernel/pull/1063) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-20&to=2022-12-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-12-20..2022-12-26&type=Issues) | [@shaperilio](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ashaperilio+updated%3A2022-12-20..2022-12-26&type=Issues) ## 6.19.4 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.19.3...07da48e686b5906525c2a6b8cfc11cd7c3d96a5f)) ### Bugs fixed - Don't pass `None` kernels to logging configurable in `Comm` [#1061](https://github.com/ipython/ipykernel/pull/1061) ([@bollwyvl](https://github.com/bollwyvl)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-19&to=2022-12-20&type=c)) [@bollwyvl](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Abollwyvl+updated%3A2022-12-19..2022-12-20&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-12-19..2022-12-20&type=Issues) ## 6.19.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.19.2...0925d09075280beb23c009ca0d361f73e5402e27)) ### Bugs fixed - format dates as ISO8601 [#1057](https://github.com/ipython/ipykernel/pull/1057) ([@GRcharles](https://github.com/GRcharles)) - Fix comms and add qtconsole downstream test [#1056](https://github.com/ipython/ipykernel/pull/1056) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Fix lint [#1058](https://github.com/ipython/ipykernel/pull/1058) ([@blink1073](https://github.com/blink1073)) - Fix comms and add qtconsole downstream test [#1056](https://github.com/ipython/ipykernel/pull/1056) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-08&to=2022-12-19&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-12-08..2022-12-19&type=Issues) | [@GRcharles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AGRcharles+updated%3A2022-12-08..2022-12-19&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-12-08..2022-12-19&type=Issues) ## 6.19.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.19.1...3c125ad5aa27de2ff412d7690de051115f175104)) ### Bugs fixed - Fix error in `%edit` magic [#1053](https://github.com/ipython/ipykernel/pull/1053) ([@ccordoba12](https://github.com/ccordoba12)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-08&to=2022-12-08&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-12-08..2022-12-08&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2022-12-08..2022-12-08&type=Issues) ## 6.19.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.19.0...5e1b155207c506f01df5808b1ba41f868a10f097)) ### Bugs fixed - fix: too many arguments dropped when passing to base comm constructor [#1051](https://github.com/ipython/ipykernel/pull/1051) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-12-07&to=2022-12-08&type=c)) [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-12-07..2022-12-08&type=Issues) ## 6.19.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.18.3...2c80e6c31e4912b2deaf5276b27568ba5088ad97)) ### Bugs fixed - Fix: there can be only one comm_manager [#1049](https://github.com/ipython/ipykernel/pull/1049) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Maintenance and upkeep improvements - Adopt ruff and address lint [#1046](https://github.com/ipython/ipykernel/pull/1046) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-11-29&to=2022-12-07&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-11-29..2022-12-07&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-11-29..2022-12-07&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-11-29..2022-12-07&type=Issues) ## 6.18.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.18.2...c0f5b7e3a5287c288eff477ae70848decf25332d)) ### Bugs fixed - Fix Comm interface for downstream users [#1042](https://github.com/ipython/ipykernel/pull/1042) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-11-29&to=2022-11-29&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-11-29..2022-11-29&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-11-29..2022-11-29&type=Issues) ## 6.18.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.18.1...a38167b1c689130df231fa77d712827bc75a8ba6)) ### Bugs fixed - Configurables needs to be configurable [#1037](https://github.com/ipython/ipykernel/pull/1037) ([@Carreau](https://github.com/Carreau)) ### Maintenance and upkeep improvements ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-11-28&to=2022-11-29&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-11-28..2022-11-29&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2022-11-28..2022-11-29&type=Issues) | [@fperez](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afperez+updated%3A2022-11-28..2022-11-29&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-11-28..2022-11-29&type=Issues) ## 6.18.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.18.0...252c406a82fb9bab4071bfbc287b7a24a51752d8)) ### Bugs fixed - fix: use comm package in backwards compatible way [#1028](https://github.com/ipython/ipykernel/pull/1028) ([@maartenbreddels](https://github.com/maartenbreddels)) ### Maintenance and upkeep improvements - Add more testing and deprecate the Gtk event loops [#1036](https://github.com/ipython/ipykernel/pull/1036) ([@blink1073](https://github.com/blink1073)) - More coverage improvements [#1035](https://github.com/ipython/ipykernel/pull/1035) ([@blink1073](https://github.com/blink1073)) - Add more tests [#1034](https://github.com/ipython/ipykernel/pull/1034) ([@blink1073](https://github.com/blink1073)) - Add more kernel tests [#1032](https://github.com/ipython/ipykernel/pull/1032) ([@blink1073](https://github.com/blink1073)) - Add more coverage and add Readme badges [#1031](https://github.com/ipython/ipykernel/pull/1031) ([@blink1073](https://github.com/blink1073)) - Clean up testing and coverage [#1030](https://github.com/ipython/ipykernel/pull/1030) ([@blink1073](https://github.com/blink1073)) - Use base setup dependency type [#1029](https://github.com/ipython/ipykernel/pull/1029) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-11-21&to=2022-11-28&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-11-21..2022-11-28&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amaartenbreddels+updated%3A2022-11-21..2022-11-28&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AmartinRenou+updated%3A2022-11-21..2022-11-28&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-11-21..2022-11-28&type=Issues) ## 6.18.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.17.1...ce0b6c296bc19223d426892657878f28af0ec206)) ### Enhancements made - Add terminal color support [#1025](https://github.com/ipython/ipykernel/pull/1025) ([@blink1073](https://github.com/blink1073)) - Extract the Comm Python package [#973](https://github.com/ipython/ipykernel/pull/973) ([@martinRenou](https://github.com/martinRenou)) ### Maintenance and upkeep improvements - Add windows coverage and clean up workflows [#1023](https://github.com/ipython/ipykernel/pull/1023) ([@blink1073](https://github.com/blink1073)) - Increase coverage [#1021](https://github.com/ipython/ipykernel/pull/1021) ([@blink1073](https://github.com/blink1073)) - Allow releasing from repo [#1020](https://github.com/ipython/ipykernel/pull/1020) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-11-09&to=2022-11-21&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-11-09..2022-11-21&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AmartinRenou+updated%3A2022-11-09..2022-11-21&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-11-09..2022-11-21&type=Issues) ## 6.17.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.17.0...a06867786eaf0c5d9454d2df61f354c7012a625e)) ### Maintenance and upkeep improvements - Ignore the new Jupyter_core deprecation warning in CI [#1019](https://github.com/ipython/ipykernel/pull/1019) ([@jasongrout](https://github.com/jasongrout)) - Bump actions/checkout from 2 to 3 [#1018](https://github.com/ipython/ipykernel/pull/1018) ([@dependabot](https://github.com/dependabot)) - Add dependabot [#1017](https://github.com/ipython/ipykernel/pull/1017) ([@blink1073](https://github.com/blink1073)) - Add pyupgrade to pre-commit [#1014](https://github.com/ipython/ipykernel/pull/1014) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-10-31&to=2022-11-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-10-31..2022-11-09&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adependabot+updated%3A2022-10-31..2022-11-09&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajasongrout+updated%3A2022-10-31..2022-11-09&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-10-31..2022-11-09&type=Issues) ## 6.17.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.16.2...db00586a25a4f047a90386f4947e60ff1dbee2b6)) ### Enhancements made - Enable webagg in %matplotlib [#1012](https://github.com/ipython/ipykernel/pull/1012) ([@zhizheng1](https://github.com/zhizheng1)) ### Maintenance and upkeep improvements - Update supported pythons to 3.8-3.11 [#1013](https://github.com/ipython/ipykernel/pull/1013) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-10-25&to=2022-10-31&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-10-25..2022-10-31&type=Issues) | [@zhizheng1](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Azhizheng1+updated%3A2022-10-25..2022-10-31&type=Issues) ## 6.16.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.16.1...99706182995e0fd5431965d4c9d96a8ce7afae12)) ### Maintenance and upkeep improvements - Fix failing test and update matrix [#1010](https://github.com/ipython/ipykernel/pull/1010) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-10-20&to=2022-10-25&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-10-20..2022-10-25&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-10-20..2022-10-25&type=Issues) ## 6.16.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.16.0...632a1ba3892bed707e1ee19fe1344e92475e19c9)) ### Bugs fixed - PR: Destroy tk app to avoid memory leak [#1008](https://github.com/ipython/ipykernel/pull/1008) ([@impact27](https://github.com/impact27)) ### Maintenance and upkeep improvements - Maintenance cleanup [#1006](https://github.com/ipython/ipykernel/pull/1006) ([@blink1073](https://github.com/blink1073)) - Ignore warnings in prereleases test [#1002](https://github.com/ipython/ipykernel/pull/1002) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-09-26&to=2022-10-20&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-09-26..2022-10-20&type=Issues) | [@impact27](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aimpact27+updated%3A2022-09-26..2022-10-20&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-09-26..2022-10-20&type=Issues) ## 6.16.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.15.3...92292ad9d844e594e9c97f7f391149023e58de9e)) ### Maintenance and upkeep improvements - Use hatch for version [#998](https://github.com/ipython/ipykernel/pull/998) ([@blink1073](https://github.com/blink1073)) - Add client 8 support [#996](https://github.com/ipython/ipykernel/pull/996) ([@blink1073](https://github.com/blink1073)) - Remove unused manifest file [#994](https://github.com/ipython/ipykernel/pull/994) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-09-13&to=2022-09-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-09-13..2022-09-26&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-09-13..2022-09-26&type=Issues) ## 6.15.3 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.15.2...861b1242a7601f1608707ed8bbfb6e801914cb4a)) ### Bugs fixed - PR: Close memory leak [#990](https://github.com/ipython/ipykernel/pull/990) ([@impact27](https://github.com/impact27)) - Handle all possible exceptions when trying to import the debugger [#987](https://github.com/ipython/ipykernel/pull/987) ([@JohanMabille](https://github.com/JohanMabille)) ### Maintenance and upkeep improvements - \[pre-commit.ci\] pre-commit autoupdate [#989](https://github.com/ipython/ipykernel/pull/989) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#985](https://github.com/ipython/ipykernel/pull/985) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Add python logo in svg format [#984](https://github.com/ipython/ipykernel/pull/984) ([@steff456](https://github.com/steff456)) - \[pre-commit.ci\] pre-commit autoupdate [#982](https://github.com/ipython/ipykernel/pull/982) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-08-29&to=2022-09-13&type=c)) [@impact27](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aimpact27+updated%3A2022-08-29..2022-09-13&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2022-08-29..2022-09-13&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-08-29..2022-09-13&type=Issues) | [@steff456](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Asteff456+updated%3A2022-08-29..2022-09-13&type=Issues) ## 6.15.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.15.1...724753a185b0954f0e662c226b86dc8146c62bcb)) ### Bugs fixed - `_abort_queues` is no longer async [#942](https://github.com/ipython/ipykernel/pull/942) ([@rhelmot](https://github.com/rhelmot)) ### Maintenance and upkeep improvements - \[pre-commit.ci\] pre-commit autoupdate [#978](https://github.com/ipython/ipykernel/pull/978) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#977](https://github.com/ipython/ipykernel/pull/977) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#976](https://github.com/ipython/ipykernel/pull/976) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#974](https://github.com/ipython/ipykernel/pull/974) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#971](https://github.com/ipython/ipykernel/pull/971) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#968](https://github.com/ipython/ipykernel/pull/968) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#966](https://github.com/ipython/ipykernel/pull/966) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-07-08&to=2022-08-29&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-07-08..2022-08-29&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-07-08..2022-08-29&type=Issues) | [@rayosborn](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Arayosborn+updated%3A2022-07-08..2022-08-29&type=Issues) | [@rhelmot](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Arhelmot+updated%3A2022-07-08..2022-08-29&type=Issues) ## 6.15.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.15.0...d9a8578ab2864b4ee636b12252e04a9b70047d0b)) ### Bugs fixed - Fix inclusion of launcher file and check in CI [#964](https://github.com/ipython/ipykernel/pull/964) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - \[pre-commit.ci\] pre-commit autoupdate [#962](https://github.com/ipython/ipykernel/pull/962) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#961](https://github.com/ipython/ipykernel/pull/961) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#960](https://github.com/ipython/ipykernel/pull/960) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-06-15&to=2022-07-08&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-06-15..2022-07-08&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-06-15..2022-07-08&type=Issues) ## 6.15.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.14.0...5c1adcae929d8b4d28bf2b7849fe0e220c729b26)) ### Bugs fixed - Fix compatibility with tornado 6.2 beta [#956](https://github.com/ipython/ipykernel/pull/956) ([@minrk](https://github.com/minrk)) ### Maintenance and upkeep improvements - Back to top-level tornado IOLoop [#958](https://github.com/ipython/ipykernel/pull/958) ([@minrk](https://github.com/minrk)) - Explicitly require pyzmq >= 17 [#957](https://github.com/ipython/ipykernel/pull/957) ([@minrk](https://github.com/minrk)) - \[pre-commit.ci\] pre-commit autoupdate [#954](https://github.com/ipython/ipykernel/pull/954) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-06-13&to=2022-06-15&type=c)) [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2022-06-13..2022-06-15&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-06-13..2022-06-15&type=Issues) ## 6.14.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.13.1...269569787419a47da562ed69fbe6363619f3b7e5)) ### Enhancements made - Add cpu_count to the usage_reply [#952](https://github.com/ipython/ipykernel/pull/952) ([@echarles](https://github.com/echarles)) ### Bugs fixed - use pss memory info type if available for the resource usage reply [#948](https://github.com/ipython/ipykernel/pull/948) ([@echarles](https://github.com/echarles)) - Ensure psutil for the process is accurate [#937](https://github.com/ipython/ipykernel/pull/937) ([@echarles](https://github.com/echarles)) ### Maintenance and upkeep improvements - Fix sphinx 5.0 support [#951](https://github.com/ipython/ipykernel/pull/951) ([@blink1073](https://github.com/blink1073)) - \[pre-commit.ci\] pre-commit autoupdate [#950](https://github.com/ipython/ipykernel/pull/950) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-06-06&to=2022-06-13&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-06-06..2022-06-13&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-06-06..2022-06-13&type=Issues) | [@nishikantparmariam](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Anishikantparmariam+updated%3A2022-06-06..2022-06-13&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-06-06..2022-06-13&type=Issues) ## 6.13.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.13.0...82179ef8ae4e9bdcd99a4a4c3807e8f773f1e92c)) ### Bugs fixed - Fix richInspectVariables [#943](https://github.com/ipython/ipykernel/pull/943) ([@davidbrochart](https://github.com/davidbrochart)) - Force debugger metadata in built wheel [#941](https://github.com/ipython/ipykernel/pull/941) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - \[pre-commit.ci\] pre-commit autoupdate [#945](https://github.com/ipython/ipykernel/pull/945) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Clean up typings [#939](https://github.com/ipython/ipykernel/pull/939) ([@blink1073](https://github.com/blink1073)) - \[pre-commit.ci\] pre-commit autoupdate [#938](https://github.com/ipython/ipykernel/pull/938) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Clean up types [#933](https://github.com/ipython/ipykernel/pull/933) ([@blink1073](https://github.com/blink1073)) - \[pre-commit.ci\] pre-commit autoupdate [#932](https://github.com/ipython/ipykernel/pull/932) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Switch to hatch backend [#931](https://github.com/ipython/ipykernel/pull/931) ([@blink1073](https://github.com/blink1073)) - \[pre-commit.ci\] pre-commit autoupdate [#928](https://github.com/ipython/ipykernel/pull/928) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#926](https://github.com/ipython/ipykernel/pull/926) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Allow enforce PR label workflow to add labels [#921](https://github.com/ipython/ipykernel/pull/921) ([@blink1073](https://github.com/blink1073)) - \[pre-commit.ci\] pre-commit autoupdate [#920](https://github.com/ipython/ipykernel/pull/920) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#919](https://github.com/ipython/ipykernel/pull/919) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - \[pre-commit.ci\] pre-commit autoupdate [#917](https://github.com/ipython/ipykernel/pull/917) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-04-11&to=2022-06-06&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-04-11..2022-06-06&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adavidbrochart+updated%3A2022-04-11..2022-06-06&type=Issues) | [@fabioz](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afabioz+updated%3A2022-04-11..2022-06-06&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afcollonval+updated%3A2022-04-11..2022-06-06&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-04-11..2022-06-06&type=Issues) ## 6.13.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.12.1...05c6e655e497a944fd738d9b744fad90bc78b70a)) ### Enhancements made - Add the PID to the resource usage reply [#908](https://github.com/ipython/ipykernel/pull/908) ([@echarles](https://github.com/echarles)) ### Bugs fixed - Fix qtconsole spawn [#915](https://github.com/ipython/ipykernel/pull/915) ([@andia89](https://github.com/andia89)) ### Maintenance and upkeep improvements - Add basic mypy support [#913](https://github.com/ipython/ipykernel/pull/913) ([@blink1073](https://github.com/blink1073)) - Clean up pre-commit [#911](https://github.com/ipython/ipykernel/pull/911) ([@blink1073](https://github.com/blink1073)) - Update setup.py [#909](https://github.com/ipython/ipykernel/pull/909) ([@tlinhart](https://github.com/tlinhart)) - \[pre-commit.ci\] pre-commit autoupdate [#906](https://github.com/ipython/ipykernel/pull/906) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-04-04&to=2022-04-11&type=c)) [@andia89](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aandia89+updated%3A2022-04-04..2022-04-11&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-04-04..2022-04-11&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-04-04..2022-04-11&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ameeseeksdev+updated%3A2022-04-04..2022-04-11&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apre-commit-ci+updated%3A2022-04-04..2022-04-11&type=Issues) | [@tlinhart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Atlinhart+updated%3A2022-04-04..2022-04-11&type=Issues) ## 6.12.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.12.0...3a04ea3fa50d01bcc09f10e3de8bb5570c2cd619)) ### Maintenance and upkeep improvements - Clean up test deps and test setup [#904](https://github.com/ipython/ipykernel/pull/904) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-04-04&to=2022-04-04&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-04-04..2022-04-04&type=Issues) ## 6.12.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.11.0...70073edbdae17be396093be96bf880da069e7e52)) ### Enhancements made - use packaging instead of pkg_resources to parse versions [#900](https://github.com/ipython/ipykernel/pull/900) ([@minrk](https://github.com/minrk)) ### Bugs fixed - Make cell_id optional [#902](https://github.com/ipython/ipykernel/pull/902) ([@blink1073](https://github.com/blink1073)) - Do not try to send on iostream if closed [#899](https://github.com/ipython/ipykernel/pull/899) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-03-31&to=2022-04-04&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-03-31..2022-04-04&type=Issues) | [@bollwyvl](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Abollwyvl+updated%3A2022-03-31..2022-04-04&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2022-03-31..2022-04-04&type=Issues) ## 6.11.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.10.0...d8520c1c68e0e1c401ecc36e962cf369366c3707)) ### Enhancements made - Include method signatures in experimental completion results [#895](https://github.com/ipython/ipykernel/pull/895) ([@MrBago](https://github.com/MrBago)) - Try to pass cell id to executing kernel. [#886](https://github.com/ipython/ipykernel/pull/886) ([@Carreau](https://github.com/Carreau)) ### Maintenance and upkeep improvements - Handle warnings in tests [#896](https://github.com/ipython/ipykernel/pull/896) ([@blink1073](https://github.com/blink1073)) - Run flake and remove deprecated import [#894](https://github.com/ipython/ipykernel/pull/894) ([@blink1073](https://github.com/blink1073)) - Add ignore-revs file [#893](https://github.com/ipython/ipykernel/pull/893) ([@blink1073](https://github.com/blink1073)) - Autoformat with black and isort [#892](https://github.com/ipython/ipykernel/pull/892) ([@blink1073](https://github.com/blink1073)) - Add pytest opts and pre-commit [#889](https://github.com/ipython/ipykernel/pull/889) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-03-28&to=2022-03-31&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-03-28..2022-03-31&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2022-03-28..2022-03-31&type=Issues) | [@MrBago](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AMrBago+updated%3A2022-03-28..2022-03-31&type=Issues) | [@SylvainCorlay](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ASylvainCorlay+updated%3A2022-03-28..2022-03-31&type=Issues) ## 6.10.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.9.2...3059fd97b7ccbd72e778f123bfb0ad92e7d9e9c8)) ### Enhancements made - Improve performance of stderr and stdout stream buffer [#888](https://github.com/ipython/ipykernel/pull/888) ([@MrBago](https://github.com/MrBago)) ### Bugs fixed - Check if the current thread is the io thread [#884](https://github.com/ipython/ipykernel/pull/884) ([@jamadeo](https://github.com/jamadeo)) ### Maintenance and upkeep improvements - More CI cleanup [#887](https://github.com/ipython/ipykernel/pull/887) ([@blink1073](https://github.com/blink1073)) - CI cleanup [#885](https://github.com/ipython/ipykernel/pull/885) ([@blink1073](https://github.com/blink1073)) ### Documentation improvements - Add precision about subprocess stdout/stderr capturing [#883](https://github.com/ipython/ipykernel/pull/883) ([@lesteve](https://github.com/lesteve)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-03-14&to=2022-03-28&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-03-14..2022-03-28&type=Issues) | [@jamadeo](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajamadeo+updated%3A2022-03-14..2022-03-28&type=Issues) | [@lesteve](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Alesteve+updated%3A2022-03-14..2022-03-28&type=Issues) | [@MrBago](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AMrBago+updated%3A2022-03-14..2022-03-28&type=Issues) | [@SylvainCorlay](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ASylvainCorlay+updated%3A2022-03-14..2022-03-28&type=Issues) ## 6.9.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.9.1...d6744f9e423dacc6b317b1d31805304e89cbec5d)) ### Bugs fixed - Catch error when shutting down kernel from the control channel [#877](https://github.com/ipython/ipykernel/pull/877) ([@ccordoba12](https://github.com/ccordoba12)) - Only kill children in process group at shutdown [#874](https://github.com/ipython/ipykernel/pull/874) ([@minrk](https://github.com/minrk)) - BUG: Kill subprocesses on shutdown. [#869](https://github.com/ipython/ipykernel/pull/869) ([@Carreau](https://github.com/Carreau)) ### Maintenance and upkeep improvements - Clean up CI [#871](https://github.com/ipython/ipykernel/pull/871) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-02-15&to=2022-03-14&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-02-15..2022-03-14&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2022-02-15..2022-03-14&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2022-02-15..2022-03-14&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-02-15..2022-03-14&type=Issues) | [@fabioz](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afabioz+updated%3A2022-02-15..2022-03-14&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2022-02-15..2022-03-14&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Avidartf+updated%3A2022-02-15..2022-03-14&type=Issues) ## 6.9.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.9.0...c27e5b95c3d104d9fb6cae3375aec0e98974dcff)) ### Bugs fixed - Add hostname to the usage reply [#865](https://github.com/ipython/ipykernel/pull/865) ([@echarles](https://github.com/echarles)) - Enable standard library debugging via config [#863](https://github.com/ipython/ipykernel/pull/863) ([@echarles](https://github.com/echarles)) - process_one only accepts coroutines for dispatch [#861](https://github.com/ipython/ipykernel/pull/861) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-02-07&to=2022-02-15&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-02-07..2022-02-15&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-02-07..2022-02-15&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2022-02-07..2022-02-15&type=Issues) ## 6.9.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.8.0...7a229c6c83d44d315f637ef63159a43c64ec73d6)) ### Bugs fixed - Fixed event forwarding [#855](https://github.com/ipython/ipykernel/pull/855) ([@JohanMabille](https://github.com/JohanMabille)) - use message queue for abort_queues [#853](https://github.com/ipython/ipykernel/pull/853) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-02-01&to=2022-02-07&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-02-01..2022-02-07&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2022-02-01..2022-02-07&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2022-02-01..2022-02-07&type=Issues) ## 6.8.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.7.0...4e775b70e7e1be7e96fe7c3c747f21f3d93f0181)) ### Enhancements made - Add support for the debug modules request [#816](https://github.com/ipython/ipykernel/pull/816) ([@echarles](https://github.com/echarles)) ### Bugs fixed - Handle all threads stopped correctly [#849](https://github.com/ipython/ipykernel/pull/849) ([@JohanMabille](https://github.com/JohanMabille)) - Fix the debug modules model [#848](https://github.com/ipython/ipykernel/pull/848) ([@echarles](https://github.com/echarles)) - Handled AllThreadsContinued and workaround for wrong threadId in cont… [#844](https://github.com/ipython/ipykernel/pull/844) ([@JohanMabille](https://github.com/JohanMabille)) ### Maintenance and upkeep improvements - Cancel duplicate runs [#850](https://github.com/ipython/ipykernel/pull/850) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-01-13&to=2022-02-01&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2022-01-13..2022-02-01&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-01-13..2022-02-01&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2022-01-13..2022-02-01&type=Issues) ## 6.7.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.6.1...0be80cbc81927f4fb20343840bf5834b48884717)) ### Enhancements made - Add usage_request and usage_reply based on psutil [#805](https://github.com/ipython/ipykernel/pull/805) ([@echarles](https://github.com/echarles)) ### Bugs fixed - Removed DebugStdLib from arguments of attach [#839](https://github.com/ipython/ipykernel/pull/839) ([@JohanMabille](https://github.com/JohanMabille)) - Normalize debugger temp file paths on Windows [#838](https://github.com/ipython/ipykernel/pull/838) ([@kycutler](https://github.com/kycutler)) - Breakpoint in cell with leading empty lines may be ignored [#829](https://github.com/ipython/ipykernel/pull/829) ([@fcollonval](https://github.com/fcollonval)) ### Maintenance and upkeep improvements - Skip on PyPy, seem to fail. [#837](https://github.com/ipython/ipykernel/pull/837) ([@Carreau](https://github.com/Carreau)) - Remove pipx to fix conflicts [#835](https://github.com/ipython/ipykernel/pull/835) ([@Carreau](https://github.com/Carreau)) - Remove impossible skipif. [#834](https://github.com/ipython/ipykernel/pull/834) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2022-01-03&to=2022-01-13&type=c)) [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2022-01-03..2022-01-13&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2022-01-03..2022-01-13&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afcollonval+updated%3A2022-01-03..2022-01-13&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2022-01-03..2022-01-13&type=Issues) | [@kycutler](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akycutler+updated%3A2022-01-03..2022-01-13&type=Issues) ## 6.6.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.6.0...bdce14b32ca8cc8f4b1635ea47200f0828ec1e05)) ### Bugs fixed - PR: do_one_iteration is a coroutine [#830](https://github.com/ipython/ipykernel/pull/830) ([@impact27](https://github.com/impact27)) ### Maintenance and upkeep improvements - Clean python 2 artifacts. Fix #826 [#827](https://github.com/ipython/ipykernel/pull/827) ([@penguinolog](https://github.com/penguinolog)) ### Documentation improvements - Fix title position in changelog [#828](https://github.com/ipython/ipykernel/pull/828) ([@fcollonval](https://github.com/fcollonval)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-12-01&to=2022-01-03&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2021-12-01..2022-01-03&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2021-12-01..2022-01-03&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afcollonval+updated%3A2021-12-01..2022-01-03&type=Issues) | [@impact27](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aimpact27+updated%3A2021-12-01..2022-01-03&type=Issues) | [@ivanov](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aivanov+updated%3A2021-12-01..2022-01-03&type=Issues) | [@penguinolog](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apenguinolog+updated%3A2021-12-01..2022-01-03&type=Issues) ## 6.6.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.5.1...9566304175d844c23a1f2b1d70c10df475ed2868)) ### Enhancements made - Set `debugOptions` for breakpoints in python standard library source [#812](https://github.com/ipython/ipykernel/pull/812) ([@echarles](https://github.com/echarles)) - Send `omit_sections` to IPython to choose which sections of documentation you do not want [#809](https://github.com/ipython/ipykernel/pull/809) ([@fasiha](https://github.com/fasiha)) ### Bugs fixed - Added missing `exceptionPaths` field to `debugInfo` reply [#814](https://github.com/ipython/ipykernel/pull/814) ([@JohanMabille](https://github.com/JohanMabille)) ### Maintenance and upkeep improvements - Test `jupyter_kernel_test` as downstream [#813](https://github.com/ipython/ipykernel/pull/813) ([@blink1073](https://github.com/blink1073)) - Remove `nose` dependency [#808](https://github.com/ipython/ipykernel/pull/808) ([@Kojoley](https://github.com/Kojoley)) - Add explicit encoding to open calls in debugger [#807](https://github.com/ipython/ipykernel/pull/807) ([@dlukes](https://github.com/dlukes)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-11-18&to=2021-12-01&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2021-11-18..2021-12-01&type=Issues) | [@dlukes](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adlukes+updated%3A2021-11-18..2021-12-01&type=Issues) | [@echarles](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aecharles+updated%3A2021-11-18..2021-12-01&type=Issues) | [@fasiha](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afasiha+updated%3A2021-11-18..2021-12-01&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2021-11-18..2021-12-01&type=Issues) | [@Kojoley](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AKojoley+updated%3A2021-11-18..2021-12-01&type=Issues) ## 6.5.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.5.0...1ef2017781435d54348fbb170b8c5d096e3e1351)) ### Bugs fixed - Fix the temp file name created by the debugger [#801](https://github.com/ipython/ipykernel/pull/801) ([@eastonsuo](https://github.com/eastonsuo)) ### Maintenance and upkeep improvements - Enforce labels on PRs [#803](https://github.com/ipython/ipykernel/pull/803) ([@blink1073](https://github.com/blink1073)) - Unpin `IPython`, and remove some dependencies on it. [#796](https://github.com/ipython/ipykernel/pull/796) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-11-01&to=2021-11-18&type=c)) [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2021-11-01..2021-11-18&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2021-11-01..2021-11-18&type=Issues) | [@eastonsuo](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aeastonsuo+updated%3A2021-11-01..2021-11-18&type=Issues) ## 6.5.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.4.2...e8d4f66e0f65e284aab444c53e9812dbbc814cb2)) ### Bugs fixed - Fix rich variables inspection [#793](https://github.com/ipython/ipykernel/pull/793) ([@fcollonval](https://github.com/fcollonval)) - Do not call `setQuitOnLastWindowClosed()` on a `QCoreApplication` [#791](https://github.com/ipython/ipykernel/pull/791) ([@stukowski](https://github.com/stukowski)) ### Maintenance and upkeep improvements - Drop `ipython_genutils` requirement [#792](https://github.com/ipython/ipykernel/pull/792) ([@penguinolog](https://github.com/penguinolog)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-10-20&to=2021-11-01&type=c)) [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2021-10-20..2021-11-01&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afcollonval+updated%3A2021-10-20..2021-11-01&type=Issues) | [@penguinolog](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apenguinolog+updated%3A2021-10-20..2021-11-01&type=Issues) | [@stukowski](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Astukowski+updated%3A2021-10-20..2021-11-01&type=Issues) ## 6.4.2 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.4.1...231fd3c65f8a15e9e015546c0a6846e22df9ba2a)) ### Enhancements made - Enabled rich rendering of variables in the debugger [#787](https://github.com/ipython/ipykernel/pull/787) ([@JohanMabille](https://github.com/JohanMabille)) ### Bugs fixed - Remove setting of the eventloop function in the InProcessKernel [#781](https://github.com/ipython/ipykernel/pull/781) ([@rayosborn](https://github.com/rayosborn)) ### Maintenance and upkeep improvements - Add python version classifiers [#783](https://github.com/ipython/ipykernel/pull/783) ([@emuccino](https://github.com/emuccino)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-09-10&to=2021-10-19&type=c)) [@emuccino](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aemuccino+updated%3A2021-09-10..2021-10-19&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2021-09-10..2021-10-19&type=Issues) | [@rayosborn](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Arayosborn+updated%3A2021-09-10..2021-10-19&type=Issues) ## 6.4.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.4.0...4da7623c1ae733f32c0792d70e7af283a7b19d22)) ### Merged PRs - debugpy is now a build requirement [#773](https://github.com/ipython/ipykernel/pull/773) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-09-09&to=2021-09-10&type=c)) [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2021-09-09..2021-09-10&type=Issues) ## 6.4.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.3.1...1ba6b48a97877ff7a564af32c531618efb7d2a57)) ### Enhancements made - Make `json_clean` a no-op for `jupyter-client` >= 7 [#708](https://github.com/ipython/ipykernel/pull/708) ([@martinRenou](https://github.com/martinRenou)) ### Bugs fixed - Do not assume kernels have loops [#766](https://github.com/ipython/ipykernel/pull/766) ([@Carreau](https://github.com/Carreau)) - Fix undefined variable [#765](https://github.com/ipython/ipykernel/pull/765) ([@martinRenou](https://github.com/martinRenou)) ### Maintenance and upkeep improvements - Make `ipykernel` work without `debugpy` [#767](https://github.com/ipython/ipykernel/pull/767) ([@frenzymadness](https://github.com/frenzymadness)) - Stop using deprecated `recv_multipart` when using in-process socket. [#762](https://github.com/ipython/ipykernel/pull/762) ([@Carreau](https://github.com/Carreau)) - Update some warnings with instructions and version number. [#761](https://github.com/ipython/ipykernel/pull/761) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-08-31&to=2021-09-09&type=c)) [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2021-08-31..2021-09-09&type=Issues) | [@frenzymadness](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Afrenzymadness+updated%3A2021-08-31..2021-09-09&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AmartinRenou+updated%3A2021-08-31..2021-09-09&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2021-08-31..2021-09-09&type=Issues) ## 6.3 ## 6.3.1 ([Full Changelog](https://github.com/ipython/ipykernel/compare/v6.3.0...0b4a8eaa080fc11e240ada9c44c95841463da58c)) ### Merged PRs - Add dependency on IPython genutils. [#756](https://github.com/ipython/ipykernel/pull/756) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-08-30&to=2021-08-31&type=c)) [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2021-08-30..2021-08-31&type=Issues) ## 6.3.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/6.2.0...07af2633ca88eda583e13649279a5b98473618a2)) ### Enhancements made - Add deep variable inspection [#753](https://github.com/ipython/ipykernel/pull/753) ([@JohanMabille](https://github.com/JohanMabille)) - Add `IPKernelApp.capture_fd_output` config to disable FD-level capture [#752](https://github.com/ipython/ipykernel/pull/752) ([@minrk](https://github.com/minrk)) ### Maintenance and upkeep improvements - Remove more `nose` test references [#750](https://github.com/ipython/ipykernel/pull/750) ([@blink1073](https://github.com/blink1073)) - Remove `nose` `skipIf` in favor of `pytest` [#748](https://github.com/ipython/ipykernel/pull/748) ([@Carreau](https://github.com/Carreau)) - Remove more `nose` [#747](https://github.com/ipython/ipykernel/pull/747) ([@Carreau](https://github.com/Carreau)) - Set up release helper plumbing [#745](https://github.com/ipython/ipykernel/pull/745) ([@afshin](https://github.com/afshin)) - Test downstream projects [#635](https://github.com/ipython/ipykernel/pull/635) ([@davidbrochart](https://github.com/davidbrochart)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-08-16&to=2021-08-30&type=c)) [@afshin](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aafshin+updated%3A2021-08-16..2021-08-30&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2021-08-16..2021-08-30&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2021-08-16..2021-08-30&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2021-08-16..2021-08-30&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adavidbrochart+updated%3A2021-08-16..2021-08-30&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2021-08-16..2021-08-30&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akevin-bates+updated%3A2021-08-16..2021-08-30&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2021-08-16..2021-08-30&type=Issues) | [@SylvainCorlay](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ASylvainCorlay+updated%3A2021-08-16..2021-08-30&type=Issues) ## 6.2 ## 6.2.0 ### Enhancements made - Add Support for Message Based Interrupt [#741](https://github.com/ipython/ipykernel/pull/741) ([@afshin](https://github.com/afshin)) ### Maintenance and upkeep improvements - Remove some more dependency on nose/iptest [#743](https://github.com/ipython/ipykernel/pull/743) ([@Carreau](https://github.com/Carreau)) - Remove block param from get_msg() [#736](https://github.com/ipython/ipykernel/pull/736) ([@davidbrochart](https://github.com/davidbrochart)) ## 6.1 ## 6.1.0 ### Enhancements made - Implemented `richInspectVariable` request handler [#734](https://github.com/ipython/ipykernel/pull/734) ([@JohanMabille](https://github.com/JohanMabille)) ### Maintenance and upkeep improvements - Bump `importlib-metadata` limit for `python<3.8` [#738](https://github.com/ipython/ipykernel/pull/738) ([@ltalirz](https://github.com/ltalirz)) ### Bug Fixes - Fix exception raised by `OutStream.write` [#726](https://github.com/ipython/ipykernel/pull/726) ([@SimonKrughoff](https://github.com/SimonKrughoff)) ## 6.0 ## 6.0.3 - `KernelApp`: rename ports variable to avoid override [#731](https://github.com/ipython/ipykernel/pull/731) ([@amorenoz](https://github.com/amorenoz)) ## 6.0.2 ### Bugs fixed - Add watchfd keyword to InProcessKernel OutStream initialization [#727](https://github.com/ipython/ipykernel/pull/727) ([@rayosborn](https://github.com/rayosborn)) - Fix typo in eventloops.py [#711](https://github.com/ipython/ipykernel/pull/711) ([@selasley](https://github.com/selasley)) - \[bugfix\] fix in setup.py (comma before appnope) [#709](https://github.com/ipython/ipykernel/pull/709) ([@jstriebel](https://github.com/jstriebel)) ### Maintenance and upkeep improvements - Add upper bound to dependency versions. [#714](https://github.com/ipython/ipykernel/pull/714) ([@martinRenou](https://github.com/martinRenou)) - Replace non-existing function. [#723](https://github.com/ipython/ipykernel/pull/723) ([@Carreau](https://github.com/Carreau)) - Remove unused variables [#722](https://github.com/ipython/ipykernel/pull/722) ([@Carreau](https://github.com/Carreau)) - Do not use bare except [#721](https://github.com/ipython/ipykernel/pull/721) ([@Carreau](https://github.com/Carreau)) - misc whitespace and line too long [#720](https://github.com/ipython/ipykernel/pull/720) ([@Carreau](https://github.com/Carreau)) - Formatting: remove semicolon [#719](https://github.com/ipython/ipykernel/pull/719) ([@Carreau](https://github.com/Carreau)) - Clean most flake8 unused import warnings. [#718](https://github.com/ipython/ipykernel/pull/718) ([@Carreau](https://github.com/Carreau)) - Minimal flake8 config [#717](https://github.com/ipython/ipykernel/pull/717) ([@Carreau](https://github.com/Carreau)) - Remove CachingCompiler's filename_mapper [#710](https://github.com/ipython/ipykernel/pull/710) ([@martinRenou](https://github.com/martinRenou)) ## 6.0.1 - Fix Tk and asyncio event loops [#704](https://github.com/ipython/ipykernel/pull/704) ([@ccordoba12](https://github.com/ccordoba12)) - Stringify variables that are not json serializable in inspectVariable [#702](https://github.com/ipython/ipykernel/pull/702) ([@JohanMabille](https://github.com/JohanMabille)) ## 6.0.0 ([Full Changelog](https://github.com/ipython/ipykernel/compare/aba2179420a3fa81ee6b8a13f928bf9e5ce50716...6d04ad2bdccd0dc0daf20f8d53555174b5fefc7b)) IPykernel 6.0 is the first major release in about two years, that brings a number of improvements, code cleanup, and new features to IPython. You should be able to view all closed issues and merged Pull Request for this milestone [on GitHub](https://github.com/ipython/ipykernel/issues?q=milestone%3A6.0+is%3Aclosed+), as for any major releases, we advise greater care when updating that for minor release and welcome any feedback (~50 Pull-requests). IPykernel 6 should contain all changes of the 5.x series, in addition to the following non-exhaustive changes. - Support for the debugger protocol, when using `JupyterLab`, `RetroLab` or any frontend supporting the debugger protocol you should have access to the debugger functionalities. - The control channel on IPykernel 6.0 is run in a separate thread, this may change the order in which messages are processed, though this change was necessary to accommodate the debugger. - We now have a new dependency: `matplotlib-inline`, this helps to separate the circular dependency between IPython/IPykernel and matplotlib. - On POSIX systems, all outputs to stdout/stderr should now be captured, including subprocesses and output of compiled libraries (blas, lapack....). In notebook server, some outputs that would previously go to the notebooks logs will now both head to notebook logs and in notebooks outputs. In terminal frontend like Jupyter Console, Emacs or other, this may ends up as duplicated outputs. - coroutines are now native (async-def) , instead of using tornado's `@gen.coroutine` - OutStreams can now be configured to report `istty() == True`, while this should make some output nicer (for example colored), it is likely to break others. Use with care. ### New features added - Implementation of the debugger [#597](https://github.com/ipython/ipykernel/pull/597) ([@JohanMabille](https://github.com/JohanMabille)) ### Enhancements made - Make the `isatty` method of `OutStream` return `true` [#683](https://github.com/ipython/ipykernel/pull/683) ([@peendebak](https://github.com/peendebak)) - Allow setting cell name [#652](https://github.com/ipython/ipykernel/pull/652) ([@davidbrochart](https://github.com/davidbrochart)) - Try to capture all file descriptor output and err [#630](https://github.com/ipython/ipykernel/pull/630) ([@Carreau](https://github.com/Carreau)) - Implemented `inspectVariables` request [#624](https://github.com/ipython/ipykernel/pull/624) ([@JohanMabille](https://github.com/JohanMabille)) - Specify `ipykernel` in kernelspec [#616](https://github.com/ipython/ipykernel/pull/616) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Use `matplotlib-inline` [#591](https://github.com/ipython/ipykernel/pull/591) ([@martinRenou](https://github.com/martinRenou)) - Run control channel in separate thread [#585](https://github.com/ipython/ipykernel/pull/585) ([@SylvainCorlay](https://github.com/SylvainCorlay)) ### Bugs fixed - Remove references to deprecated `ipyparallel` [#695](https://github.com/ipython/ipykernel/pull/695) ([@minrk](https://github.com/minrk)) - Return len of item written to `OutStream` [#685](https://github.com/ipython/ipykernel/pull/685) ([@Carreau](https://github.com/Carreau)) - Call metadata methods on abort replies [#684](https://github.com/ipython/ipykernel/pull/684) ([@minrk](https://github.com/minrk)) - Fix keyboard interrupt issue in `dispatch_shell` [#673](https://github.com/ipython/ipykernel/pull/673) ([@marcoamonteiro](https://github.com/marcoamonteiro)) - Update `Trio` mode for compatibility with `Trio >= 0.18.0` [#627](https://github.com/ipython/ipykernel/pull/627) ([@mehaase](https://github.com/mehaase)) - Follow up `DeprecationWarning` Fix [#617](https://github.com/ipython/ipykernel/pull/617) ([@afshin](https://github.com/afshin)) - Flush control stream upon shutdown [#611](https://github.com/ipython/ipykernel/pull/611) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Fix Handling of `shell.should_run_async` [#605](https://github.com/ipython/ipykernel/pull/605) ([@afshin](https://github.com/afshin)) - Deacrease lag time for eventloop [#573](https://github.com/ipython/ipykernel/pull/573) ([@impact27](https://github.com/impact27)) - Fix "Socket operation on nonsocket" in downstream `nbclient` test. [#641](https://github.com/ipython/ipykernel/pull/641) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Stop control thread before closing sockets on it [#659](https://github.com/ipython/ipykernel/pull/659) ([@minrk](https://github.com/minrk)) - Fix debugging with native coroutines [#651](https://github.com/ipython/ipykernel/pull/651) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Fixup master build [#649](https://github.com/ipython/ipykernel/pull/649) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Fix parent header retrieval [#639](https://github.com/ipython/ipykernel/pull/639) ([@davidbrochart](https://github.com/davidbrochart)) - Add missing self [#636](https://github.com/ipython/ipykernel/pull/636) ([@Carreau](https://github.com/Carreau)) - Backwards compat with older versions of zmq [#665](https://github.com/ipython/ipykernel/pull/665) ([@mlucool](https://github.com/mlucool)) ### Maintenance and upkeep improvements - Remove pin on Jedi because that was already fixed in IPython [#692](https://github.com/ipython/ipykernel/pull/692) ([@ccordoba12](https://github.com/ccordoba12)) - Remove deprecated source parameter since 4.0.1 (2015) [#690](https://github.com/ipython/ipykernel/pull/690) ([@Carreau](https://github.com/Carreau)) - Remove deprecated `SocketABC` since 4.5.0 [#689](https://github.com/ipython/ipykernel/pull/689) ([@Carreau](https://github.com/Carreau)) - Remove deprecated profile options of `connect.py` [#688](https://github.com/ipython/ipykernel/pull/688) ([@Carreau](https://github.com/Carreau)) - Remove `ipykernel.codeutil` deprecated since IPykernel 4.3.1 (Feb 2016) [#687](https://github.com/ipython/ipykernel/pull/687) ([@Carreau](https://github.com/Carreau)) - Keep preferring `SelectorEventLoop` on Windows [#669](https://github.com/ipython/ipykernel/pull/669) ([@minrk](https://github.com/minrk)) - Add `Kernel.get_parent` to match `set_parent` [#661](https://github.com/ipython/ipykernel/pull/661) ([@minrk](https://github.com/minrk)) - Flush control queue prior to handling shell messages [#658](https://github.com/ipython/ipykernel/pull/658) ([@minrk](https://github.com/minrk)) - Add `Kernel.get_parent_header` [#657](https://github.com/ipython/ipykernel/pull/657) ([@minrk](https://github.com/minrk)) - Build docs only on Ubuntu: add jobs to check docstring formatting. [#644](https://github.com/ipython/ipykernel/pull/644) ([@Carreau](https://github.com/Carreau)) - Make deprecated `shell_streams` writable [#638](https://github.com/ipython/ipykernel/pull/638) ([@minrk](https://github.com/minrk)) - Use channel `get_msg` helper method [#634](https://github.com/ipython/ipykernel/pull/634) ([@davidbrochart](https://github.com/davidbrochart)) - Use native coroutines instead of tornado coroutines [#632](https://github.com/ipython/ipykernel/pull/632) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Make less use of `ipython_genutils` [#631](https://github.com/ipython/ipykernel/pull/631) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Run GitHub Actions on all branches [#625](https://github.com/ipython/ipykernel/pull/625) ([@afshin](https://github.com/afshin)) - Move Python-specific bits to ipkernel [#610](https://github.com/ipython/ipykernel/pull/610) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Update Python Requirement to 3.7 [#608](https://github.com/ipython/ipykernel/pull/608) ([@afshin](https://github.com/afshin)) - Replace import item from `ipython_genutils` to traitlets. [#601](https://github.com/ipython/ipykernel/pull/601) ([@Carreau](https://github.com/Carreau)) - Some removal of `ipython_genutils.py3compat`. [#600](https://github.com/ipython/ipykernel/pull/600) ([@Carreau](https://github.com/Carreau)) - Fixup `get_parent_header` call [#662](https://github.com/ipython/ipykernel/pull/662) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Update of `ZMQInteractiveshell`. [#643](https://github.com/ipython/ipykernel/pull/643) ([@Carreau](https://github.com/Carreau)) - Removed filtering of stack frames for testing [#633](https://github.com/ipython/ipykernel/pull/633) ([@JohanMabille](https://github.com/JohanMabille)) - Added 'type' field to variables returned by `inspectVariables` request [#628](https://github.com/ipython/ipykernel/pull/628) ([@JohanMabille](https://github.com/JohanMabille)) - Changed default timeout to 0.0 seconds for `stop_on_error_timeout` [#618](https://github.com/ipython/ipykernel/pull/618) ([@MSeal](https://github.com/MSeal)) - Attempt longer timeout [#615](https://github.com/ipython/ipykernel/pull/615) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Clean up release process and add tests [#596](https://github.com/ipython/ipykernel/pull/596) ([@afshin](https://github.com/afshin)) - Kernelspec: ensure path is writable before writing `kernel.json`. [#593](https://github.com/ipython/ipykernel/pull/593) ([@jellelicht](https://github.com/jellelicht)) - Add `configure_inline_support` and call it in the shell [#590](https://github.com/ipython/ipykernel/pull/590) ([@martinRenou](https://github.com/martinRenou)) ### Documentation improvements - Misc Updates to changelog for 6.0 [#686](https://github.com/ipython/ipykernel/pull/686) ([@Carreau](https://github.com/Carreau)) - Add 5.5.x Changelog entries [#672](https://github.com/ipython/ipykernel/pull/672) ([@blink1073](https://github.com/blink1073)) - Build docs only on ubuntu: add jobs to check docstring formatting. [#644](https://github.com/ipython/ipykernel/pull/644) ([@Carreau](https://github.com/Carreau)) - DOC: Autoreformat all docstrings. [#642](https://github.com/ipython/ipykernel/pull/642) ([@Carreau](https://github.com/Carreau)) - Bump Python to 3.8 in `readthedocs.yml` [#612](https://github.com/ipython/ipykernel/pull/612) ([@minrk](https://github.com/minrk)) - Fix typo [#663](https://github.com/ipython/ipykernel/pull/663) ([@SylvainCorlay](https://github.com/SylvainCorlay)) - Add release note to 5.5.0 about `stop_on_error_timeout` [#613](https://github.com/ipython/ipykernel/pull/613) ([@glentakahashi](https://github.com/glentakahashi)) - Move changelog to standard location [#604](https://github.com/ipython/ipykernel/pull/604) ([@afshin](https://github.com/afshin)) - Add changelog for 5.5 [#594](https://github.com/ipython/ipykernel/pull/594) ([@blink1073](https://github.com/blink1073)) - Change to markdown for changelog [#595](https://github.com/ipython/ipykernel/pull/595) ([@afshin](https://github.com/afshin)) ### Deprecations in 6.0 - `Kernel`s now support only a single shell stream, multiple streams will now be ignored. The attribute `Kernel.shell_streams` (plural) is deprecated in ipykernel 6.0. Use `Kernel.shell_stream` (singular) - `Kernel._parent_header` is deprecated, even though it was private. Use `.get_parent()` now. ### Removal in 6.0 - `ipykernel.codeutils` was deprecated since 4.x series (2016) and has been removed, please import similar functionalities from `ipyparallel` - remove `find_connection_file` and `profile` argument of `connect_qtconsole` and `get_connection_info`, deprecated since IPykernel 4.2.2 (2016). ### Contributors to this release ([GitHub contributors page for this release](https://github.com/ipython/ipykernel/graphs/contributors?from=2021-01-11&to=2021-06-29&type=c)) [@afshin](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aafshin+updated%3A2021-01-11..2021-06-29&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ablink1073+updated%3A2021-01-11..2021-06-29&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ACarreau+updated%3A2021-01-11..2021-06-29&type=Issues) | [@ccordoba12](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Accordoba12+updated%3A2021-01-11..2021-06-29&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adavidbrochart+updated%3A2021-01-11..2021-06-29&type=Issues) | [@dsblank](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Adsblank+updated%3A2021-01-11..2021-06-29&type=Issues) | [@glentakahashi](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aglentakahashi+updated%3A2021-01-11..2021-06-29&type=Issues) | [@impact27](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aimpact27+updated%3A2021-01-11..2021-06-29&type=Issues) | [@ivanov](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aivanov+updated%3A2021-01-11..2021-06-29&type=Issues) | [@jellelicht](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajellelicht+updated%3A2021-01-11..2021-06-29&type=Issues) | [@jkablan](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Ajkablan+updated%3A2021-01-11..2021-06-29&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AJohanMabille+updated%3A2021-01-11..2021-06-29&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Akevin-bates+updated%3A2021-01-11..2021-06-29&type=Issues) | [@marcoamonteiro](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amarcoamonteiro+updated%3A2021-01-11..2021-06-29&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AmartinRenou+updated%3A2021-01-11..2021-06-29&type=Issues) | [@mehaase](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amehaase+updated%3A2021-01-11..2021-06-29&type=Issues) | [@minrk](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Aminrk+updated%3A2021-01-11..2021-06-29&type=Issues) | [@mlucool](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Amlucool+updated%3A2021-01-11..2021-06-29&type=Issues) | [@MSeal](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3AMSeal+updated%3A2021-01-11..2021-06-29&type=Issues) | [@peendebak](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Apeendebak+updated%3A2021-01-11..2021-06-29&type=Issues) | [@SylvainCorlay](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3ASylvainCorlay+updated%3A2021-01-11..2021-06-29&type=Issues) | [@tacaswell](https://github.com/search?q=repo%3Aipython%2Fipykernel+involves%3Atacaswell+updated%3A2021-01-11..2021-06-29&type=Issues) ## 5.5 ### 5.5.5 - Keep preferring SelectorEventLoop on Windows. [#669](https://github.com/ipython/ipykernel/pull/669) ### 5.5.4 - Import `configure_inline_support` from `matplotlib_inline` if available [#654](https://github.com/ipython/ipykernel/pull/654) ### 5.5.3 - Revert Backport of #605: Fix Handling of `shell.should_run_async` [#622](https://github.com/ipython/ipykernel/pull/622) ### 5.5.2 **Note:** This release was deleted from PyPI since it had breaking changes. - Changed default timeout to 0.0 seconds for stop_on_error_timeout. [#618](https://github.com/ipython/ipykernel/pull/618) ### 5.5.1 **Note:** This release was deleted from PyPI since it had breaking changes. - Fix Handling of `shell.should_run_async`. [#605](https://github.com/ipython/ipykernel/pull/605) ### 5.5.0 - kernelspec: ensure path is writable before writing `kernel.json`. [#593](https://github.com/ipython/ipykernel/pull/593) - Add `configure_inline_support` and call it in the shell. [#590](https://github.com/ipython/ipykernel/pull/590) - Fix `stop_on_error_timeout` to now properly abort `execute_request`'s that fall within the timeout after an error. [#572](https://github.com/ipython/ipykernel/pull/572) ## 5.4 ### 5.4.3 - Rework `wait_for_ready` logic. [#578](https://github.com/ipython/ipykernel/pull/578) ### 5.4.2 - Revert "Fix stop_on_error_timeout blocking other messages in queue". [#570](https://github.com/ipython/ipykernel/pull/570) ### 5.4.1 - Invalid syntax in `ipykernel/log.py`. [#567](https://github.com/ipython/ipykernel/pull/567) ### 5.4.0 5.4.0 is generally focused on code quality improvements and tornado asyncio compatibility. - Add github actions, bail on asyncio patch for tornado 6.1. [#564](https://github.com/ipython/ipykernel/pull/564) - Start testing on Python 3.9. [#551](https://github.com/ipython/ipykernel/pull/551) - Fix stack levels for ipykernel's deprecation warnings and stop using some deprecated APIs. [#547](https://github.com/ipython/ipykernel/pull/547) - Add env parameter to kernel installation [#541](https://github.com/ipython/ipykernel/pull/541) - Fix stop_on_error_timeout blocking other messages in queue. [#539](https://github.com/ipython/ipykernel/pull/539) - Remove most of the python 2 compat code. [#537](https://github.com/ipython/ipykernel/pull/537) - Remove u-prefix from strings. [#538](https://github.com/ipython/ipykernel/pull/538) ## 5.3 ### 5.3.4 - Only run Qt eventloop in the shell stream. [#531](https://github.com/ipython/ipykernel/pull/531) ### 5.3.3 - Fix QSocketNotifier in the Qt event loop not being disabled for the control channel. [#525](https://github.com/ipython/ipykernel/pull/525) ### 5.3.2 - Restore timer based event loop as a Windows-compatible fallback. [#523](https://github.com/ipython/ipykernel/pull/523) ### 5.3.1 - Fix #520: run post_execute and post_run_cell on async cells [#521](https://github.com/ipython/ipykernel/pull/521) - Fix exception causes in zmqshell.py [#516](https://github.com/ipython/ipykernel/pull/516) - Make pdb on Windows interruptible [#490](https://github.com/ipython/ipykernel/pull/490) ### 5.3.0 5.3.0 Adds support for Trio event loops and has some bug fixes. - Fix ipython display imports [#509](https://github.com/ipython/ipykernel/pull/509) - Skip test_unc_paths if OS is not Windows [#507](https://github.com/ipython/ipykernel/pull/507) - Allow interrupting input() on Windows, as part of effort to make pdb interruptible [#498](https://github.com/ipython/ipykernel/pull/498) - Add Trio Loop [#479](https://github.com/ipython/ipykernel/pull/479) - Flush from process even without newline [#478](https://github.com/ipython/ipykernel/pull/478) ## 5.2 ### 5.2.1 - Handle system commands that use UNC paths on Windows [#500](https://github.com/ipython/ipykernel/pull/500) - Add offset argument to seek in io test [#496](https://github.com/ipython/ipykernel/pull/496) ### 5.2.0 5.2.0 Includes several bugfixes and internal logic improvements. - Produce better traceback when kernel is interrupted [#491](https://github.com/ipython/ipykernel/pull/491) - Add `InProcessKernelClient.control_channel` for compatibility with jupyter-client v6.0.0 [#489](https://github.com/ipython/ipykernel/pull/489) - Drop support for Python 3.4 [#483](https://github.com/ipython/ipykernel/pull/483) - Work around issue related to Tornado with python3.8 on Windows ([#480](https://github.com/ipython/ipykernel/pull/480), [#481](https://github.com/ipython/ipykernel/pull/481)) - Prevent entering event loop if it is None [#464](https://github.com/ipython/ipykernel/pull/464) - Use `shell.input_transformer_manager` when available [#411](https://github.com/ipython/ipykernel/pull/411) ## 5.1 ### 5.1.4 5.1.4 Includes a few bugfixes, especially for compatibility with Python 3.8 on Windows. - Fix pickle issues when using inline matplotlib backend [#476](https://github.com/ipython/ipykernel/pull/476) - Fix an error during kernel shutdown [#463](https://github.com/ipython/ipykernel/pull/463) - Fix compatibility issues with Python 3.8 ([#456](https://github.com/ipython/ipykernel/pull/456), [#461](https://github.com/ipython/ipykernel/pull/461)) - Remove some dead code ([#474](https://github.com/ipython/ipykernel/pull/474), [#467](https://github.com/ipython/ipykernel/pull/467)) ### 5.1.3 5.1.3 Includes several bugfixes and internal logic improvements. - Fix comm shutdown behavior by adding a `deleting` option to `close` which can be set to prevent registering new comm channels during shutdown ([#433](https://github.com/ipython/ipykernel/pull/433), [#435](https://github.com/ipython/ipykernel/pull/435)) - Fix `Heartbeat._bind_socket` to return on the first bind ([#431](https://github.com/ipython/ipykernel/pull/431)) - Moved `InProcessKernelClient.flush` to `DummySocket` ([#437](https://github.com/ipython/ipykernel/pull/437)) - Don't redirect stdout if nose machinery is not present ([#427](https://github.com/ipython/ipykernel/pull/427)) - Rename `_asyncio.py` to `_asyncio_utils.py` to avoid name conflicts on Python 3.6+ ([#426](https://github.com/ipython/ipykernel/pull/426)) - Only generate kernelspec when installing or building wheel ([#425](https://github.com/ipython/ipykernel/pull/425)) - Fix priority ordering of control-channel messages in some cases [#443](https://github.com/ipython/ipykernel/pull/443) ### 5.1.2 5.1.2 fixes some socket-binding race conditions that caused testing failures in nbconvert. - Fix socket-binding race conditions ([#412](https://github.com/ipython/ipykernel/pull/412), [#419](https://github.com/ipython/ipykernel/pull/419)) - Add a no-op `flush` method to `DummySocket` and comply with stream API ([#405](https://github.com/ipython/ipykernel/pull/405)) - Update kernel version to indicate kernel v5.3 support ([#394](https://github.com/ipython/ipykernel/pull/394)) - Add testing for upcoming Python 3.8 and PEP 570 positional parameters ([#396](https://github.com/ipython/ipykernel/pull/396), [#408](https://github.com/ipython/ipykernel/pull/408)) ### 5.1.1 5.1.1 fixes a bug that caused cells to get stuck in a busy state. - Flush after sending replies [#390](https://github.com/ipython/ipykernel/pull/390) ### 5.1.0 5.1.0 fixes some important regressions in 5.0, especially on Windows. [5.1.0 on GitHub](https://github.com/ipython/ipykernel/milestones/5.1) - Fix message-ordering bug that could result in out-of-order executions, especially on Windows [#356](https://github.com/ipython/ipykernel/pull/356) - Fix classifiers to indicate dropped Python 2 support [#354](https://github.com/ipython/ipykernel/pull/354) - Remove some dead code [#355](https://github.com/ipython/ipykernel/pull/355) - Support rich-media responses in `inspect_requests` (tooltips) [#361](https://github.com/ipython/ipykernel/pull/361) ## 5.0 ### 5.0.0 [5.0.0 on GitHub](https://github.com/ipython/ipykernel/milestones/5.0) - Drop support for Python 2. `ipykernel` 5.0 requires Python >= 3.4 - Add support for IPython's asynchronous code execution [#323](https://github.com/ipython/ipykernel/pull/323) - Update release process in `CONTRIBUTING.md` [#339](https://github.com/ipython/ipykernel/pull/339) ## 4.10 [4.10 on GitHub](https://github.com/ipython/ipykernel/milestones/4.10) - Fix compatibility with IPython 7.0 [#348](https://github.com/ipython/ipykernel/pull/348) - Fix compatibility in cases where sys.stdout can be None [#344](https://github.com/ipython/ipykernel/pull/344) ## 4.9 ### 4.9.0 [4.9.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.9) - Python 3.3 is no longer supported [#336](https://github.com/ipython/ipykernel/pull/336) - Flush stdout/stderr in KernelApp before replacing [#314](https://github.com/ipython/ipykernel/pull/314) - Allow preserving stdout and stderr in KernelApp [#315](https://github.com/ipython/ipykernel/pull/315) - Override writable method on OutStream [#316](https://github.com/ipython/ipykernel/pull/316) - Add metadata to help display matplotlib figures legibly [#336](https://github.com/ipython/ipykernel/pull/336) ## 4.8 ### 4.8.2 [4.8.2 on GitHub](https://github.com/ipython/ipykernel/milestones/4.8.2) - Fix compatibility issue with qt eventloop and pyzmq 17 [#307](https://github.com/ipython/ipykernel/pull/307). ### 4.8.1 [4.8.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.8.1) - set zmq.ROUTER_HANDOVER socket option when available to workaround libzmq reconnect bug [#300](https://github.com/ipython/ipykernel/pull/300). - Fix sdists including absolute paths for kernelspec files, which prevented installation from sdist on Windows [#306](https://github.com/ipython/ipykernel/pull/306). ### 4.8.0 [4.8.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.8) - Cleanly shutdown integrated event loops when shutting down the kernel. [#290](https://github.com/ipython/ipykernel/pull/290) - `%gui qt` now uses Qt 5 by default rather than Qt 4, following a similar change in terminal IPython. [#293](https://github.com/ipython/ipykernel/pull/293) - Fix event loop integration for `asyncio` when run with Tornado 5, which uses asyncio where available. [#296](https://github.com/ipython/ipykernel/pull/296) ## 4.7 ### 4.7.0 [4.7.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.7) - Add event loop integration for `asyncio`. - Use the new IPython completer API. - Add support for displaying GIF images (mimetype `image/gif`). - Allow the kernel to be interrupted without killing the Qt console. - Fix `is_complete` response with cell magics. - Clean up encoding of bytes objects. - Clean up help links to use `https` and improve display titles. - Clean up ioloop handling in preparation for tornado 5. ## 4.6 ### 4.6.1 [4.6.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.6.1) - Fix eventloop-integration bug preventing Qt windows/widgets from displaying with ipykernel 4.6.0 and IPython ≥ 5.2. - Avoid deprecation warnings about naive datetimes when working with jupyter_client ≥ 5.0. ### 4.6.0 [4.6.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.6) - Add to API `DisplayPublisher.publish` two new fully backward-compatible keyword-args: > - `update: bool` > - `transient: dict` - Support new `transient` key in `display_data` messages spec for `publish`. For a display data message, `transient` contains data that shouldn't be persisted to files or documents. Add a `display_id` to this `transient` dict by `display(obj, display_id=\...)` - Add `ipykernel_launcher` module which removes the current working directory from `sys.path` before launching the kernel. This helps to reduce the cases where the kernel won't start because there's a `random.py` (or similar) module in the current working directory. - Add busy/idle messages on IOPub during processing of aborted requests - Add active event loop setting to GUI, which enables the correct response to IPython's `is_event_loop_running_xxx` - Include IPython kernelspec in wheels to reduce reliance on "native kernel spec" in jupyter_client - Modify `OutStream` to inherit from `TextIOBase` instead of object to improve API support and error reporting - Fix IPython kernel death messages at start, such as "Kernel Restarting..." and "Kernel appears to have died", when parent-poller handles PID 1 - Various bugfixes ## 4.5 ### 4.5.2 [4.5.2 on GitHub](https://github.com/ipython/ipykernel/milestones/4.5.2) - Fix bug when instantiating Comms outside of the IPython kernel (introduced in 4.5.1). ### 4.5.1 [4.5.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.5.1) - Add missing `stream` parameter to overridden `getpass` - Remove locks from iopub thread, which could cause deadlocks during debugging - Fix regression where KeyboardInterrupt was treated as an aborted request, rather than an error - Allow instantiating Comms outside of the IPython kernel ### 4.5.0 [4.5 on GitHub](https://github.com/ipython/ipykernel/milestones/4.5) - Use figure.dpi instead of savefig.dpi to set DPI for inline figures - Support ipympl matplotlib backend (requires IPython update as well to fully work) - Various bugfixes, including fixes for output coming from threads, and `input` when called with non-string prompts, which stdlib allows. ## 4.4 ### 4.4.1 [4.4.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.4.1) - Fix circular import of matplotlib on Python 2 caused by the inline backend changes in 4.4.0. ### 4.4.0 [4.4.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.4) - Use [MPLBACKEND](http://matplotlib.org/devel/coding_guide.html?highlight=mplbackend#developing-a-new-backend) environment variable to tell matplotlib >= 1.5 use use the inline backend by default. This is only done if MPLBACKEND is not already set and no backend has been explicitly loaded, so setting `MPLBACKEND=Qt4Agg` or calling `%matplotlib notebook` or `matplotlib.use('Agg')` will take precedence. - Fixes for logging problems caused by 4.3, where logging could go to the terminal instead of the notebook. - Add `--sys-prefix` and `--profile` arguments to `ipython kernel install`. - Allow Comm (Widget) messages to be sent from background threads. - Select inline matplotlib backend by default if `%matplotlib` magic or `matplotlib.use()` are not called explicitly (for matplotlib >= 1.5). - Fix some longstanding minor deviations from the message protocol (missing status: ok in a few replies, connect_reply format). - Remove calls to NoOpContext from IPython, deprecated in 5.0. ## 4.3 ### 4.3.2 - Use a nonempty dummy session key for inprocess kernels to avoid security warnings. ### 4.3.1 - Fix Windows Python 3.5 incompatibility caused by faulthandler patch in 4.3 ### 4.3.0 [4.3.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.3) - Publish all IO in a thread, via `IOPubThread`. This solves the problem of requiring `sys.stdout.flush` to be called in the notebook to produce output promptly during long-running cells. - Remove references to outdated IPython guiref in kernel banner. - Patch faulthandler to use `sys.__stderr__` instead of forwarded `sys.stderr`, which has no fileno when forwarded. - Deprecate some vestiges of the Big Split: - `ipykernel.find_connection_file` is deprecated. Use `jupyter_client.find_connection_file` instead. \- Various pieces of code specific to IPython parallel are deprecated in ipykernel and moved to ipyparallel. ## 4.2 ### 4.2.2 [4.2.2 on GitHub](https://github.com/ipython/ipykernel/milestones/4.2.2) - Don't show interactive debugging info when kernel crashes - Fix handling of numerical types in json_clean - Testing fixes for output capturing ### 4.2.1 [4.2.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.2.1) - Fix default display name back to "Python X" instead of "pythonX" ### 4.2.0 [4.2 on GitHub](https://github.com/ipython/ipykernel/milestones/4.2) - Support sending a full message in initial opening of comms (metadata, buffers were not previously allowed) - When using `ipython kernel install --name` to install the IPython kernelspec, default display-name to the same value as `--name`. ## 4.1 ### 4.1.1 [4.1.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.1.1) - Fix missing `ipykernel.__version__` on Python 2. - Fix missing `target_name` when opening comms from the frontend. ### 4.1.0 [4.1 on GitHub](https://github.com/ipython/ipykernel/milestones/4.1) - add `ipython kernel install` entrypoint for installing the IPython kernelspec - provisional implementation of `comm_info` request/reply for msgspec v5.1 ## 4.0 [4.0 on GitHub](https://github.com/ipython/ipykernel/milestones/4.0) 4.0 is the first release of ipykernel as a standalone package. ipykernel-6.29.5/CONTRIBUTING.md000066400000000000000000000054521464053401500160600ustar00rootroot00000000000000# Contributing Welcome! For contributing tips, follow the [Jupyter Contributing Guide](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html). Please make sure to follow the [Jupyter Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md). ## Installing ipykernel for development ipykernel is a pure Python package, so setting up for development is the same as most other Python projects: ```bash # clone the repo git clone https://github.com/ipython/ipykernel cd ipykernel # do a 'development' or 'editable' install with pip: pip install -e . ``` ## Code Styling `ipykernel` has adopted automatic code formatting so you shouldn't need to worry too much about your code style. As long as your code is valid, the pre-commit hook should take care of how it should look. To install `pre-commit`, run the following:: ``` pip install pre-commit pre-commit install ``` You can invoke the pre-commit hook by hand at any time with:: ``` pre-commit run ``` which should run any autoformatting on your code and tell you about any errors it couldn't fix automatically. You may also install [black integration](https://github.com/psf/black#editor-integration) into your text editor to format code automatically. If you have already committed files before setting up the pre-commit hook with `pre-commit install`, you can fix everything up using `pre-commit run --all-files`. You need to make the fixing commit yourself after that. Some of the hooks only run on CI by default, but you can invoke them by running with the `--hook-stage manual` argument. ## Releasing ipykernel Releasing ipykernel is _almost_ standard for a Python package: - set version for release - make and publish tag - publish release to PyPI - set version back to development The one extra step for ipykernel is that we need to make separate wheels for Python 2 and 3 because the bundled kernelspec has different contents for Python 2 and 3. This affects only the 4.x branch of ipykernel as the 5+ version is only compatible Python 3. The full release process is available below: ```bash # make sure version is set in ipykernel/_version.py VERSION="4.9.0" # commit the version and make a release tag git add ipykernel/_version.py git commit -m "release $VERSION" git tag -am "release $VERSION" $VERSION # push the changes to the repo git push git push --tags # publish the release to PyPI # note the extra `python2 setup.py bdist_wheel` for creating # the wheel for Python 2 pip install --upgrade twine git clean -xfd python3 setup.py sdist bdist_wheel python2 setup.py bdist_wheel # the extra step for the 4.x branch. twine upload dist/* # set the version back to '.dev' in ipykernel/_version.py # e.g. 4.10.0.dev if we just released 4.9.0 git add ipykernel/_version.py git commit -m "back to dev" git push ``` ipykernel-6.29.5/LICENSE000066400000000000000000000027751464053401500146410ustar00rootroot00000000000000BSD 3-Clause License Copyright (c) 2015, IPython Development Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ipykernel-6.29.5/README.md000066400000000000000000000042061464053401500151020ustar00rootroot00000000000000# IPython Kernel for Jupyter [![Build Status](https://github.com/ipython/ipykernel/actions/workflows/ci.yml/badge.svg?query=branch%3Amain++)](https://github.com/ipython/ipykernel/actions/workflows/ci.yml/badge.svg?query=branch%3Amain++) [![Documentation Status](https://readthedocs.org/projects/ipykernel/badge/?version=latest)](http://ipykernel.readthedocs.io/en/latest/?badge=latest) This package provides the IPython kernel for Jupyter. ## Installation from source 1. `git clone` 1. `cd ipykernel` 1. `pip install -e ".[test]"` After that, all normal `ipython` commands will use this newly-installed version of the kernel. ## Running tests Follow the instructions from `Installation from source`. and then from the root directory ```bash pytest ``` ## Running tests with coverage Follow the instructions from `Installation from source`. and then from the root directory ```bash pytest -vv -s --cov ipykernel --cov-branch --cov-report term-missing:skip-covered --durations 10 ``` ## About the IPython Development Team The IPython Development Team is the set of all contributors to the IPython project. This includes all of the IPython subprojects. The core team that coordinates development on GitHub can be found here: https://github.com/ipython/. ## Our Copyright Policy IPython uses a shared copyright model. Each contributor maintains copyright over their contributions to IPython. But, it is important to note that these contributions are typically only changes to the repositories. Thus, the IPython source code, in its entirety is not the copyright of any single person or institution. Instead, it is the collective copyright of the entire IPython Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the IPython repositories. With this in mind, the following banner should be used in any source code file to indicate the copyright and license terms: ``` # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. ``` ipykernel-6.29.5/RELEASE.md000066400000000000000000000010361464053401500152230ustar00rootroot00000000000000# Release Guide ## Using `jupyter_releaser` The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html). ## Manual Release - Update `CHANGELOG` - Run the following: ```bash export VERSION= pip install pipx pipx run hatch version $VERSION git commit -a -m "Release $VERSION" git tag $VERSION; true; git push --all git push --tags rm -rf dist build pipx run build . pipx run twine check dist/* pipx run twine upload dist/* ``` ipykernel-6.29.5/docs/000077500000000000000000000000001464053401500145515ustar00rootroot00000000000000ipykernel-6.29.5/docs/Makefile000066400000000000000000000164151464053401500162200ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/IPythonKernel.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/IPythonKernel.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/IPythonKernel" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/IPythonKernel" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." ipykernel-6.29.5/docs/api/000077500000000000000000000000001464053401500153225ustar00rootroot00000000000000ipykernel-6.29.5/docs/api/ipykernel.comm.rst000066400000000000000000000005671464053401500210200ustar00rootroot00000000000000ipykernel.comm package ====================== Submodules ---------- .. automodule:: ipykernel.comm.comm :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.comm.manager :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: ipykernel.comm :members: :undoc-members: :show-inheritance: ipykernel-6.29.5/docs/api/ipykernel.inprocess.rst000066400000000000000000000016121464053401500220620ustar00rootroot00000000000000ipykernel.inprocess package =========================== Submodules ---------- .. automodule:: ipykernel.inprocess.blocking :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.channels :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.client :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.constants :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.ipkernel :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.manager :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.inprocess.socket :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: ipykernel.inprocess :members: :undoc-members: :show-inheritance: ipykernel-6.29.5/docs/api/ipykernel.rst000066400000000000000000000035741464053401500200670ustar00rootroot00000000000000ipykernel package ================= Subpackages ----------- .. toctree:: :maxdepth: 4 ipykernel.comm ipykernel.inprocess Submodules ---------- .. automodule:: ipykernel.compiler :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.connect :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.control :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.debugger :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.displayhook :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.embed :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.eventloops :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.heartbeat :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.iostream :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.ipkernel :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.jsonutil :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.kernelapp :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.kernelbase :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.kernelspec :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.log :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.parentpoller :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.trio_runner :members: :undoc-members: :show-inheritance: .. automodule:: ipykernel.zmqshell :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: ipykernel :members: :undoc-members: :show-inheritance: ipykernel-6.29.5/docs/api/modules.rst000066400000000000000000000001001464053401500175130ustar00rootroot00000000000000ipykernel ========= .. toctree:: :maxdepth: 4 ipykernel ipykernel-6.29.5/docs/conf.py000066400000000000000000000236711464053401500160610ustar00rootroot00000000000000# IPython Kernel documentation build configuration file, created by # sphinx-quickstart on Mon Oct 5 11:32:44 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import shutil from pathlib import Path from typing import Any, Dict, List # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "myst_parser", "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinxcontrib_github_alt", "sphinx_autodoc_typehints", ] try: import enchant # noqa: F401 extensions += ["sphinxcontrib.spelling"] except ImportError: pass github_project_url = "https://github.com/ipython/ipykernel" # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = "IPython Kernel" copyright = "2015, IPython Development Team" author = "IPython Development Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # version_ns: Dict[str, Any] = {} here = Path(__file__).parent.resolve() version_py = Path(here) / os.pardir / "ipykernel" / "_version.py" with open(version_py) as f: exec(compile(f.read(), version_py, "exec"), version_ns) # The short X.Y version. version = "%i.%i" % version_ns["version_info"][:2] # The full version, including alpha/beta/rc tags. release = version_ns["__version__"] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = "literal" # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "pydata_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = {"navigation_with_keys": False} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path: List[str] = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = "ipykerneldoc" # -- Options for LaTeX output --------------------------------------------- latex_elements: Dict[str, object] = {} # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "ipykernel.tex", "IPython Kernel Documentation", "IPython Development Team", "manual", ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "ipykernel", "IPython Kernel Documentation", [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "ipykernel", "IPython Kernel Documentation", author, "ipykernel", "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "ipython": ("https://ipython.readthedocs.io/en/latest", None), "jupyter": ("https://jupyter.readthedocs.io/en/latest", None), } def setup(app): shutil.copy(Path(here) / ".." / "CHANGELOG.md", "changelog.md") ipykernel-6.29.5/docs/index.rst000066400000000000000000000006561464053401500164210ustar00rootroot00000000000000.. _index: IPython Kernel Docs =================== This contains minimal version-sensitive documentation for the IPython kernel package. Most IPython kernel documentation is in the `IPython documentation `_. Contents: .. toctree:: :maxdepth: 1 changelog API docs Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ipykernel-6.29.5/docs/make.bat000066400000000000000000000161321464053401500161610ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 2> nul if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\IPythonKernel.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\IPythonKernel.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ipykernel-6.29.5/examples/000077500000000000000000000000001464053401500154375ustar00rootroot00000000000000ipykernel-6.29.5/examples/embedding/000077500000000000000000000000001464053401500173555ustar00rootroot00000000000000ipykernel-6.29.5/examples/embedding/inprocess_qtconsole.py000066400000000000000000000047161464053401500240330ustar00rootroot00000000000000"""An in-process qt console app.""" import os import sys import tornado from IPython.lib import guisupport from qtconsole.inprocess import QtInProcessKernelManager from qtconsole.rich_ipython_widget import RichIPythonWidget def print_process_id(): """Print the process id.""" print("Process ID is:", os.getpid()) def init_asyncio_patch(): """set default asyncio policy to be compatible with tornado Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows Pick the older SelectorEventLoopPolicy on Windows if the known-incompatible default policy is in use. do this as early as possible to make it a low priority and overridable ref: https://github.com/tornadoweb/tornado/issues/2608 FIXME: if/when tornado supports the defaults in asyncio, remove and bump tornado requirement for py38 """ if ( sys.platform.startswith("win") and sys.version_info >= (3, 8) and tornado.version_info < (6, 1) ): import asyncio try: from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) def main(): """The main entry point.""" # Print the ID of the main process print_process_id() init_asyncio_patch() app = guisupport.get_app_qt4() # Create an in-process kernel # >>> print_process_id() # will print the same process ID as the main process kernel_manager = QtInProcessKernelManager() kernel_manager.start_kernel() kernel = kernel_manager.kernel kernel.gui = "qt4" kernel.shell.push({"foo": 43, "print_process_id": print_process_id}) kernel_client = kernel_manager.client() kernel_client.start_channels() def stop(): kernel_client.stop_channels() kernel_manager.shutdown_kernel() app.exit() control = RichIPythonWidget() control.kernel_manager = kernel_manager control.kernel_client = kernel_client control.exit_requested.connect(stop) control.show() guisupport.start_event_loop_qt4(app) if __name__ == "__main__": main() ipykernel-6.29.5/examples/embedding/inprocess_terminal.py000066400000000000000000000041541464053401500236330ustar00rootroot00000000000000"""An in-process terminal example.""" import os import sys import tornado from jupyter_console.ptshell import ZMQTerminalInteractiveShell from ipykernel.inprocess.manager import InProcessKernelManager def print_process_id(): """Print the process id.""" print("Process ID is:", os.getpid()) def init_asyncio_patch(): """set default asyncio policy to be compatible with tornado Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows Pick the older SelectorEventLoopPolicy on Windows if the known-incompatible default policy is in use. do this as early as possible to make it a low priority and overridable ref: https://github.com/tornadoweb/tornado/issues/2608 FIXME: if/when tornado supports the defaults in asyncio, remove and bump tornado requirement for py38 """ if ( sys.platform.startswith("win") and sys.version_info >= (3, 8) and tornado.version_info < (6, 1) ): import asyncio try: from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) def main(): """The main function.""" print_process_id() # Create an in-process kernel # >>> print_process_id() # will print the same process ID as the main process init_asyncio_patch() kernel_manager = InProcessKernelManager() kernel_manager.start_kernel() kernel = kernel_manager.kernel kernel.gui = "qt4" kernel.shell.push({"foo": 43, "print_process_id": print_process_id}) client = kernel_manager.client() client.start_channels() shell = ZMQTerminalInteractiveShell(manager=kernel_manager, client=client) shell.mainloop() if __name__ == "__main__": main() ipykernel-6.29.5/examples/embedding/internal_ipkernel.py000066400000000000000000000042321464053401500234350ustar00rootroot00000000000000"""An internal ipykernel example.""" # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- import sys from IPython.lib.kernel import connect_qtconsole from ipykernel.kernelapp import IPKernelApp # ----------------------------------------------------------------------------- # Functions and classes # ----------------------------------------------------------------------------- def mpl_kernel(gui): """Launch and return an IPython kernel with matplotlib support for the desired gui""" kernel = IPKernelApp.instance() kernel.initialize( [ "python", "--matplotlib=%s" % gui, #'--log-level=10' ] ) return kernel class InternalIPKernel: """An internal ipykernel class.""" def init_ipkernel(self, backend): """Start IPython kernel with GUI event loop and mpl support.""" self.ipkernel = mpl_kernel(backend) # To create and track active qt consoles self.consoles = [] # This application will also act on the shell user namespace self.namespace = self.ipkernel.shell.user_ns # Example: a variable that will be seen by the user in the shell, and # that the GUI modifies (the 'Counter++' button increments it): self.namespace["app_counter"] = 0 # self.namespace['ipkernel'] = self.ipkernel # dbg def print_namespace(self, evt=None): """Print the namespace.""" print("\n***Variables in User namespace***") for k, v in self.namespace.items(): if not k.startswith("_"): print(f"{k} -> {v!r}") sys.stdout.flush() def new_qt_console(self, evt=None): """start a new qtconsole connected to our kernel""" return connect_qtconsole(self.ipkernel.abs_connection_file, profile=self.ipkernel.profile) def count(self, evt=None): """Get the app counter value.""" self.namespace["app_counter"] += 1 def cleanup_consoles(self, evt=None): """Clean up the consoles.""" for c in self.consoles: c.kill() ipykernel-6.29.5/examples/embedding/ipkernel_qtapp.py000077500000000000000000000055371464053401500227620ustar00rootroot00000000000000#!/usr/bin/env python """Example integrating an IPython kernel into a GUI App. This trivial GUI application internally starts an IPython kernel, to which Qt consoles can be connected either by the user at the command line or started from the GUI itself, via a button. The GUI can also manipulate one variable in the kernel's namespace, and print the namespace to the console. Play with it by running the script and then opening one or more consoles, and pushing the 'Counter++' and 'Namespace' buttons. Upon exit, it should automatically close all consoles opened from the GUI. Consoles attached separately from a terminal will not be terminated, though they will notice that their kernel died. """ # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- from internal_ipkernel import InternalIPKernel from PyQt4 import Qt # ----------------------------------------------------------------------------- # Functions and classes # ----------------------------------------------------------------------------- class SimpleWindow(Qt.QWidget, InternalIPKernel): """A custom Qt widget for IPykernel.""" def __init__(self, app): """Initialize the widget.""" Qt.QWidget.__init__(self) self.app = app self.add_widgets() self.init_ipkernel("qt") def add_widgets(self): """Add the widget.""" self.setGeometry(300, 300, 400, 70) self.setWindowTitle("IPython in your app") # Add simple buttons: console = Qt.QPushButton("Qt Console", self) console.setGeometry(10, 10, 100, 35) self.connect(console, Qt.SIGNAL("clicked()"), self.new_qt_console) namespace = Qt.QPushButton("Namespace", self) namespace.setGeometry(120, 10, 100, 35) self.connect(namespace, Qt.SIGNAL("clicked()"), self.print_namespace) count = Qt.QPushButton("Count++", self) count.setGeometry(230, 10, 80, 35) self.connect(count, Qt.SIGNAL("clicked()"), self.count) # Quit and cleanup quit = Qt.QPushButton("Quit", self) quit.setGeometry(320, 10, 60, 35) self.connect(quit, Qt.SIGNAL("clicked()"), Qt.qApp, Qt.SLOT("quit()")) self.app.connect(self.app, Qt.SIGNAL("lastWindowClosed()"), self.app, Qt.SLOT("quit()")) self.app.aboutToQuit.connect(self.cleanup_consoles) # ----------------------------------------------------------------------------- # Main script # ----------------------------------------------------------------------------- if __name__ == "__main__": app = Qt.QApplication([]) # Create our window win = SimpleWindow(app) win.show() # Very important, IPython-specific step: this gets GUI event loop # integration going, and it replaces calling app.exec_() win.ipkernel.start() ipykernel-6.29.5/examples/embedding/ipkernel_wxapp.py000077500000000000000000000104371464053401500227670ustar00rootroot00000000000000#!/usr/bin/env python """Example integrating an IPython kernel into a GUI App. This trivial GUI application internally starts an IPython kernel, to which Qt consoles can be connected either by the user at the command line or started from the GUI itself, via a button. The GUI can also manipulate one variable in the kernel's namespace, and print the namespace to the console. Play with it by running the script and then opening one or more consoles, and pushing the 'Counter++' and 'Namespace' buttons. Upon exit, it should automatically close all consoles opened from the GUI. Consoles attached separately from a terminal will not be terminated, though they will notice that their kernel died. Ref: Modified from wxPython source code wxPython/samples/simple/simple.py """ # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- import sys import wx from internal_ipkernel import InternalIPKernel # ----------------------------------------------------------------------------- # Functions and classes # ----------------------------------------------------------------------------- class MyFrame(wx.Frame, InternalIPKernel): """ This is MyFrame. It just shows a few controls on a wxPanel, and has a simple menu. """ def __init__(self, parent, title): """Initialize the frame.""" wx.Frame.__init__(self, parent, -1, title, pos=(150, 150), size=(350, 285)) # Create the menubar menuBar = wx.MenuBar() # and a menu menu = wx.Menu() # add an item to the menu, using \tKeyName automatically # creates an accelerator, the third param is some help text # that will show up in the statusbar menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample") # bind the menu event to an event handler self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT) # and put the menu on the menubar menuBar.Append(menu, "&File") self.SetMenuBar(menuBar) self.CreateStatusBar() # Now create the Panel to put the other controls on. panel = wx.Panel(self) # and a few controls text = wx.StaticText(panel, -1, "Hello World!") text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD)) text.SetSize(text.GetBestSize()) qtconsole_btn = wx.Button(panel, -1, "Qt Console") ns_btn = wx.Button(panel, -1, "Namespace") count_btn = wx.Button(panel, -1, "Count++") close_btn = wx.Button(panel, -1, "Quit") # bind the button events to handlers self.Bind(wx.EVT_BUTTON, self.new_qt_console, qtconsole_btn) self.Bind(wx.EVT_BUTTON, self.print_namespace, ns_btn) self.Bind(wx.EVT_BUTTON, self.count, count_btn) self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, close_btn) # Use a sizer to layout the controls, stacked vertically and with # a 10 pixel border around each sizer = wx.BoxSizer(wx.VERTICAL) for ctrl in [text, qtconsole_btn, ns_btn, count_btn, close_btn]: sizer.Add(ctrl, 0, wx.ALL, 10) panel.SetSizer(sizer) panel.Layout() # Start the IPython kernel with gui support self.init_ipkernel("wx") def OnTimeToClose(self, evt): """Event handler for the button click.""" print("See ya later!") sys.stdout.flush() self.cleanup_consoles(evt) self.Close() # Not sure why, but our IPython kernel seems to prevent normal WX # shutdown, so an explicit exit() call is needed. sys.exit() class MyApp(wx.App): """A custom wx app.""" def OnInit(self): """Initialize app.""" frame = MyFrame(None, "Simple wxPython App") self.SetTopWindow(frame) frame.Show(True) self.ipkernel = frame.ipkernel return True # ----------------------------------------------------------------------------- # Main script # ----------------------------------------------------------------------------- if __name__ == "__main__": app = MyApp(redirect=False, clearSigInt=False) # Very important, IPython-specific step: this gets GUI event loop # integration going, and it replaces calling app.MainLoop() app.ipkernel.start() ipykernel-6.29.5/hatch_build.py000066400000000000000000000021241464053401500164400ustar00rootroot00000000000000"""A custom hatch build hook for ipykernel.""" import shutil import sys from pathlib import Path from hatchling.builders.hooks.plugin.interface import BuildHookInterface class CustomHook(BuildHookInterface): """The IPykernel build hook.""" def initialize(self, version, build_data): """Initialize the hook.""" here = Path(__file__).parent.resolve() sys.path.insert(0, str(here)) from ipykernel.kernelspec import make_ipkernel_cmd, write_kernel_spec overrides = {} # When building a standard wheel, the executable specified in the kernelspec is simply 'python'. if version == "standard": overrides["metadata"] = dict(debugger=True) argv = make_ipkernel_cmd(executable="python") # When installing an editable wheel, the full `sys.executable` can be used. else: argv = make_ipkernel_cmd() overrides["argv"] = argv dest = Path(here) / "data_kernelspec" if Path(dest).exists(): shutil.rmtree(dest) write_kernel_spec(dest, overrides=overrides) ipykernel-6.29.5/ipykernel/000077500000000000000000000000001464053401500156235ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/__init__.py000066400000000000000000000002411464053401500177310ustar00rootroot00000000000000from ._version import ( __version__, kernel_protocol_version, kernel_protocol_version_info, version_info, ) from .connect import * # noqa: F403 ipykernel-6.29.5/ipykernel/__main__.py000066400000000000000000000002161464053401500177140ustar00rootroot00000000000000"""The cli entry point for ipykernel.""" if __name__ == "__main__": from ipykernel import kernelapp as app app.launch_new_instance() ipykernel-6.29.5/ipykernel/_eventloop_macos.py000066400000000000000000000107061464053401500215350ustar00rootroot00000000000000"""Eventloop hook for OS X Calls NSApp / CoreFoundation APIs via ctypes. """ # cribbed heavily from IPython.terminal.pt_inputhooks.osx # obj-c boilerplate from appnope, used under BSD 2-clause import ctypes import ctypes.util from threading import Event objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("objc")) # type:ignore[arg-type] void_p = ctypes.c_void_p objc.objc_getClass.restype = void_p objc.sel_registerName.restype = void_p objc.objc_msgSend.restype = void_p msg = objc.objc_msgSend def _utf8(s): """ensure utf8 bytes""" if not isinstance(s, bytes): s = s.encode("utf8") return s def n(name): """create a selector name (for ObjC methods)""" return objc.sel_registerName(_utf8(name)) def C(classname): """get an ObjC Class by name""" return objc.objc_getClass(_utf8(classname)) # end obj-c boilerplate from appnope # CoreFoundation C-API calls we will use: CoreFoundation = ctypes.cdll.LoadLibrary( ctypes.util.find_library("CoreFoundation") # type:ignore[arg-type] ) CFAbsoluteTimeGetCurrent = CoreFoundation.CFAbsoluteTimeGetCurrent CFAbsoluteTimeGetCurrent.restype = ctypes.c_double CFRunLoopGetCurrent = CoreFoundation.CFRunLoopGetCurrent CFRunLoopGetCurrent.restype = void_p CFRunLoopGetMain = CoreFoundation.CFRunLoopGetMain CFRunLoopGetMain.restype = void_p CFRunLoopStop = CoreFoundation.CFRunLoopStop CFRunLoopStop.restype = None CFRunLoopStop.argtypes = [void_p] CFRunLoopTimerCreate = CoreFoundation.CFRunLoopTimerCreate CFRunLoopTimerCreate.restype = void_p CFRunLoopTimerCreate.argtypes = [ void_p, # allocator (NULL) ctypes.c_double, # fireDate ctypes.c_double, # interval ctypes.c_int, # flags (0) ctypes.c_int, # order (0) void_p, # callout void_p, # context ] CFRunLoopAddTimer = CoreFoundation.CFRunLoopAddTimer CFRunLoopAddTimer.restype = None CFRunLoopAddTimer.argtypes = [void_p, void_p, void_p] kCFRunLoopCommonModes = void_p.in_dll(CoreFoundation, "kCFRunLoopCommonModes") def _NSApp(): """Return the global NSApplication instance (NSApp)""" objc.objc_msgSend.argtypes = [void_p, void_p] return msg(C("NSApplication"), n("sharedApplication")) def _wake(NSApp): """Wake the Application""" objc.objc_msgSend.argtypes = [ void_p, void_p, void_p, void_p, void_p, void_p, void_p, void_p, void_p, void_p, void_p, ] event = msg( C("NSEvent"), n( "otherEventWithType:location:modifierFlags:" "timestamp:windowNumber:context:subtype:data1:data2:" ), 15, # Type 0, # location 0, # flags 0, # timestamp 0, # window None, # context 0, # subtype 0, # data1 0, # data2 ) objc.objc_msgSend.argtypes = [void_p, void_p, void_p, void_p] msg(NSApp, n("postEvent:atStart:"), void_p(event), True) _triggered = Event() def stop(timer=None, loop=None): """Callback to fire when there's input to be read""" _triggered.set() NSApp = _NSApp() # if NSApp is not running, stop CFRunLoop directly, # otherwise stop and wake NSApp objc.objc_msgSend.argtypes = [void_p, void_p] if msg(NSApp, n("isRunning")): objc.objc_msgSend.argtypes = [void_p, void_p, void_p] msg(NSApp, n("stop:"), NSApp) _wake(NSApp) else: CFRunLoopStop(CFRunLoopGetCurrent()) _c_callback_func_type = ctypes.CFUNCTYPE(None, void_p, void_p) _c_stop_callback = _c_callback_func_type(stop) def _stop_after(delay): """Register callback to stop eventloop after a delay""" timer = CFRunLoopTimerCreate( None, # allocator CFAbsoluteTimeGetCurrent() + delay, # fireDate 0, # interval 0, # flags 0, # order _c_stop_callback, None, ) CFRunLoopAddTimer( CFRunLoopGetMain(), timer, kCFRunLoopCommonModes, ) def mainloop(duration=1): """run the Cocoa eventloop for the specified duration (seconds)""" _triggered.clear() NSApp = _NSApp() _stop_after(duration) objc.objc_msgSend.argtypes = [void_p, void_p] msg(NSApp, n("run")) if not _triggered.is_set(): # app closed without firing callback, # probably due to last window being closed. # Run the loop manually in this case, # since there may be events still to process (ipython/ipython#9734) CoreFoundation.CFRunLoopRun() ipykernel-6.29.5/ipykernel/_version.py000066400000000000000000000011701464053401500200200ustar00rootroot00000000000000""" store the current version info of the server. """ import re from typing import List # Version string must appear intact for hatch versioning __version__ = "6.29.5" # Build up version_info tuple for backwards compatibility pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" match = re.match(pattern, __version__) assert match is not None parts: List[object] = [int(match[part]) for part in ["major", "minor", "patch"]] if match["rest"]: parts.append(match["rest"]) version_info = tuple(parts) kernel_protocol_version_info = (5, 3) kernel_protocol_version = "{}.{}".format(*kernel_protocol_version_info) ipykernel-6.29.5/ipykernel/comm/000077500000000000000000000000001464053401500165565ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/comm/__init__.py000066400000000000000000000001331464053401500206640ustar00rootroot00000000000000__all__ = ["Comm", "CommManager"] from .comm import Comm from .manager import CommManager ipykernel-6.29.5/ipykernel/comm/comm.py000066400000000000000000000062221464053401500200650ustar00rootroot00000000000000"""Base class for a Comm""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import uuid from typing import Optional from warnings import warn import comm.base_comm import traitlets.config from traitlets import Bool, Bytes, Instance, Unicode, default from ipykernel.jsonutil import json_clean from ipykernel.kernelbase import Kernel # this is the class that will be created if we do comm.create_comm class BaseComm(comm.base_comm.BaseComm): # type:ignore[misc] """The base class for comms.""" kernel: Optional["Kernel"] = None def publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys): """Helper for sending a comm message on IOPub""" if not Kernel.initialized(): return data = {} if data is None else data metadata = {} if metadata is None else metadata content = json_clean(dict(data=data, comm_id=self.comm_id, **keys)) if self.kernel is None: self.kernel = Kernel.instance() assert self.kernel.session is not None self.kernel.session.send( self.kernel.iopub_socket, msg_type, content, metadata=json_clean(metadata), parent=self.kernel.get_parent(), ident=self.topic, buffers=buffers, ) # but for backwards compatibility, we need to inherit from LoggingConfigurable class Comm(BaseComm, traitlets.config.LoggingConfigurable): """Class for communicating between a Frontend and a Kernel""" kernel = Instance("ipykernel.kernelbase.Kernel", allow_none=True) # type:ignore[assignment] comm_id = Unicode() primary = Bool(True, help="Am I the primary or secondary Comm?") target_name = Unicode("comm") target_module = Unicode( None, allow_none=True, help="""requirejs module from which to load comm target.""", ) topic = Bytes() @default("kernel") def _default_kernel(self): if Kernel.initialized(): return Kernel.instance() return None @default("comm_id") def _default_comm_id(self): return uuid.uuid4().hex def __init__( self, target_name="", data=None, metadata=None, buffers=None, show_warning=True, **kwargs ): """Initialize a comm.""" if show_warning: warn( "The `ipykernel.comm.Comm` class has been deprecated. Please use the `comm` module instead." "For creating comms, use the function `from comm import create_comm`.", DeprecationWarning, stacklevel=2, ) # Handle differing arguments between base classes. had_kernel = "kernel" in kwargs kernel = kwargs.pop("kernel", None) if target_name: kwargs["target_name"] = target_name BaseComm.__init__(self, data=data, metadata=metadata, buffers=buffers, **kwargs) # type:ignore[call-arg] # only re-add kernel if explicitly provided if had_kernel: kwargs["kernel"] = kernel traitlets.config.LoggingConfigurable.__init__(self, **kwargs) __all__ = ["Comm"] ipykernel-6.29.5/ipykernel/comm/manager.py000066400000000000000000000040461464053401500205460ustar00rootroot00000000000000"""Base class to manage comms""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import logging import comm.base_comm import traitlets import traitlets.config from .comm import Comm logger = logging.getLogger("ipykernel.comm") class CommManager(comm.base_comm.CommManager, traitlets.config.LoggingConfigurable): # type:ignore[misc] """A comm manager.""" kernel = traitlets.Instance("ipykernel.kernelbase.Kernel") comms = traitlets.Dict() targets = traitlets.Dict() def __init__(self, **kwargs): """Initialize the manager.""" # CommManager doesn't take arguments, so we explicitly forward arguments comm.base_comm.CommManager.__init__(self) traitlets.config.LoggingConfigurable.__init__(self, **kwargs) def comm_open(self, stream, ident, msg): """Handler for comm_open messages""" # This is for backward compatibility, the comm_open creates a a new ipykernel.comm.Comm # but we should let the base class create the comm with comm.create_comm in a major release content = msg["content"] comm_id = content["comm_id"] target_name = content["target_name"] f = self.targets.get(target_name, None) comm = Comm( comm_id=comm_id, primary=False, target_name=target_name, show_warning=False, ) self.register_comm(comm) if f is None: logger.error("No such comm target registered: %s", target_name) else: try: f(comm, msg) return except Exception: logger.error("Exception opening comm with target: %s", target_name, exc_info=True) # noqa: G201 # Failure. try: comm.close() except Exception: logger.error( # noqa: G201 """Could not close comm during `comm_open` failure clean-up. The comm may not have been opened yet.""", exc_info=True, ) ipykernel-6.29.5/ipykernel/compiler.py000066400000000000000000000054721464053401500200170ustar00rootroot00000000000000"""Compiler helpers for the debugger.""" import os import sys import tempfile from IPython.core.compilerop import CachingCompiler def murmur2_x86(data, seed): """Get the murmur2 hash.""" m = 0x5BD1E995 data = [chr(d) for d in str.encode(data, "utf8")] length = len(data) h = seed ^ length rounded_end = length & 0xFFFFFFFC for i in range(0, rounded_end, 4): k = ( (ord(data[i]) & 0xFF) | ((ord(data[i + 1]) & 0xFF) << 8) | ((ord(data[i + 2]) & 0xFF) << 16) | (ord(data[i + 3]) << 24) ) k = (k * m) & 0xFFFFFFFF k ^= k >> 24 k = (k * m) & 0xFFFFFFFF h = (h * m) & 0xFFFFFFFF h ^= k val = length & 0x03 k = 0 if val == 3: k = (ord(data[rounded_end + 2]) & 0xFF) << 16 if val in [2, 3]: k |= (ord(data[rounded_end + 1]) & 0xFF) << 8 if val in [1, 2, 3]: k |= ord(data[rounded_end]) & 0xFF h ^= k h = (h * m) & 0xFFFFFFFF h ^= h >> 13 h = (h * m) & 0xFFFFFFFF h ^= h >> 15 return h convert_to_long_pathname = lambda filename: filename # noqa: E731 if sys.platform == "win32": try: import ctypes from ctypes.wintypes import DWORD, LPCWSTR, LPWSTR, MAX_PATH _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW _GetLongPathName.argtypes = [LPCWSTR, LPWSTR, DWORD] _GetLongPathName.restype = DWORD def _convert_to_long_pathname(filename): buf = ctypes.create_unicode_buffer(MAX_PATH) rv = _GetLongPathName(filename, buf, MAX_PATH) if rv != 0 and rv <= MAX_PATH: filename = buf.value return filename # test that it works so if there are any issues we fail just once here _convert_to_long_pathname(__file__) except Exception: pass else: convert_to_long_pathname = _convert_to_long_pathname def get_tmp_directory(): """Get a temp directory.""" tmp_dir = convert_to_long_pathname(tempfile.gettempdir()) pid = os.getpid() return tmp_dir + os.sep + "ipykernel_" + str(pid) def get_tmp_hash_seed(): """Get a temp hash seed.""" return 0xC70F6907 def get_file_name(code): """Get a file name.""" cell_name = os.environ.get("IPYKERNEL_CELL_NAME") if cell_name is None: name = murmur2_x86(code, get_tmp_hash_seed()) cell_name = get_tmp_directory() + os.sep + str(name) + ".py" return cell_name class XCachingCompiler(CachingCompiler): """A custom caching compiler.""" def __init__(self, *args, **kwargs): """Initialize the compiler.""" super().__init__(*args, **kwargs) self.log = None def get_code_name(self, raw_code, code, number): """Get the code name.""" return get_file_name(raw_code) ipykernel-6.29.5/ipykernel/connect.py000066400000000000000000000100101464053401500176160ustar00rootroot00000000000000"""Connection file-related utilities for the kernel """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import json import sys from subprocess import PIPE, Popen from typing import TYPE_CHECKING, Any import jupyter_client from jupyter_client import write_connection_file if TYPE_CHECKING: from ipykernel.kernelapp import IPKernelApp def get_connection_file(app: IPKernelApp | None = None) -> str: """Return the path to the connection file of an app Parameters ---------- app : IPKernelApp instance [optional] If unspecified, the currently running app will be used """ from traitlets.utils import filefind if app is None: from ipykernel.kernelapp import IPKernelApp if not IPKernelApp.initialized(): msg = "app not specified, and not in a running Kernel" raise RuntimeError(msg) app = IPKernelApp.instance() return filefind(app.connection_file, [".", app.connection_dir]) def _find_connection_file(connection_file): """Return the absolute path for a connection file - If nothing specified, return current Kernel's connection file - Otherwise, call jupyter_client.find_connection_file """ if connection_file is None: # get connection file from current kernel return get_connection_file() return jupyter_client.find_connection_file(connection_file) def get_connection_info( connection_file: str | None = None, unpack: bool = False ) -> str | dict[str, Any]: """Return the connection information for the current Kernel. Parameters ---------- connection_file : str [optional] The connection file to be used. Can be given by absolute path, or IPython will search in the security directory. If run from IPython, If unspecified, the connection file for the currently running IPython Kernel will be used, which is only allowed from inside a kernel. unpack : bool [default: False] if True, return the unpacked dict, otherwise just the string contents of the file. Returns ------- The connection dictionary of the current kernel, as string or dict, depending on `unpack`. """ cf = _find_connection_file(connection_file) with open(cf) as f: info_str = f.read() if unpack: info = json.loads(info_str) # ensure key is bytes: info["key"] = info.get("key", "").encode() return info # type:ignore[no-any-return] return info_str def connect_qtconsole( connection_file: str | None = None, argv: list[str] | None = None ) -> Popen[Any]: """Connect a qtconsole to the current kernel. This is useful for connecting a second qtconsole to a kernel, or to a local notebook. Parameters ---------- connection_file : str [optional] The connection file to be used. Can be given by absolute path, or IPython will search in the security directory. If run from IPython, If unspecified, the connection file for the currently running IPython Kernel will be used, which is only allowed from inside a kernel. argv : list [optional] Any extra args to be passed to the console. Returns ------- :class:`subprocess.Popen` instance running the qtconsole frontend """ argv = [] if argv is None else argv cf = _find_connection_file(connection_file) cmd = ";".join(["from qtconsole import qtconsoleapp", "qtconsoleapp.main()"]) kwargs: dict[str, Any] = {} # Launch the Qt console in a separate session & process group, so # interrupting the kernel doesn't kill it. kwargs["start_new_session"] = True return Popen( [sys.executable, "-c", cmd, "--existing", cf, *argv], stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != "win32"), **kwargs, ) __all__ = [ "write_connection_file", "get_connection_file", "get_connection_info", "connect_qtconsole", ] ipykernel-6.29.5/ipykernel/control.py000066400000000000000000000014561464053401500176630ustar00rootroot00000000000000"""A thread for a control channel.""" from threading import Thread from tornado.ioloop import IOLoop CONTROL_THREAD_NAME = "Control" class ControlThread(Thread): """A thread for a control channel.""" def __init__(self, **kwargs): """Initialize the thread.""" Thread.__init__(self, name=CONTROL_THREAD_NAME, **kwargs) self.io_loop = IOLoop(make_current=False) self.pydev_do_not_trace = True self.is_pydev_daemon_thread = True def run(self): """Run the thread.""" self.name = CONTROL_THREAD_NAME try: self.io_loop.start() finally: self.io_loop.close() def stop(self): """Stop the thread. This method is threadsafe. """ self.io_loop.add_callback(self.io_loop.stop) ipykernel-6.29.5/ipykernel/datapub.py000066400000000000000000000043641464053401500176240ustar00rootroot00000000000000# Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. """Publishing native (typically pickled) objects. """ import warnings from traitlets import Any, CBytes, Dict, Instance from traitlets.config import Configurable from ipykernel.jsonutil import json_clean try: # available since ipyparallel 5.0.0 from ipyparallel.serialize import serialize_object except ImportError: # Deprecated since ipykernel 4.3.0 from ipykernel.serialize import serialize_object from jupyter_client.session import Session, extract_header warnings.warn( "ipykernel.datapub is deprecated. It has moved to ipyparallel.datapub", DeprecationWarning, stacklevel=2, ) class ZMQDataPublisher(Configurable): """A zmq data publisher.""" topic = topic = CBytes(b"datapub") session = Instance(Session, allow_none=True) pub_socket = Any(allow_none=True) parent_header = Dict({}) def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) def publish_data(self, data): """publish a data_message on the IOPub channel Parameters ---------- data : dict The data to be published. Think of it as a namespace. """ session = self.session assert session is not None buffers = serialize_object( data, buffer_threshold=session.buffer_threshold, item_threshold=session.item_threshold, ) content = json_clean(dict(keys=list(data.keys()))) session.send( self.pub_socket, "data_message", content=content, parent=self.parent_header, buffers=buffers, ident=self.topic, ) def publish_data(data): """publish a data_message on the IOPub channel Parameters ---------- data : dict The data to be published. Think of it as a namespace. """ warnings.warn( "ipykernel.datapub is deprecated. It has moved to ipyparallel.datapub", DeprecationWarning, stacklevel=2, ) from ipykernel.zmqshell import ZMQInteractiveShell ZMQInteractiveShell.instance().data_pub.publish_data(data) ipykernel-6.29.5/ipykernel/debugger.py000066400000000000000000000647421464053401500177760ustar00rootroot00000000000000"""Debugger implementation for the IPython kernel.""" import os import re import sys import typing as t from pathlib import Path import zmq from IPython.core.getipython import get_ipython from IPython.core.inputtransformer2 import leading_empty_lines from tornado.locks import Event from tornado.queues import Queue from zmq.utils import jsonapi try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default from .compiler import get_file_name, get_tmp_directory, get_tmp_hash_seed try: # This import is required to have the next ones working... from debugpy.server import api # noqa: F401 from _pydevd_bundle import pydevd_frame_utils # isort: skip from _pydevd_bundle.pydevd_suspended_frames import ( # isort: skip SuspendedFramesManager, _FramesTracker, ) _is_debugpy_available = True except ImportError: _is_debugpy_available = False except Exception as e: # We cannot import the module where the DebuggerInitializationError # is defined if e.__class__.__name__ == "DebuggerInitializationError": _is_debugpy_available = False else: raise e # Required for backwards compatibility ROUTING_ID = getattr(zmq, "ROUTING_ID", None) or zmq.IDENTITY class _FakeCode: """Fake code class.""" def __init__(self, co_filename, co_name): """Init.""" self.co_filename = co_filename self.co_name = co_name class _FakeFrame: """Fake frame class.""" def __init__(self, f_code, f_globals, f_locals): """Init.""" self.f_code = f_code self.f_globals = f_globals self.f_locals = f_locals self.f_back = None class _DummyPyDB: """Fake PyDb class.""" def __init__(self): """Init.""" from _pydevd_bundle.pydevd_api import PyDevdAPI self.variable_presentation = PyDevdAPI.VariablePresentation() class VariableExplorer: """A variable explorer.""" def __init__(self): """Initialize the explorer.""" self.suspended_frame_manager = SuspendedFramesManager() self.py_db = _DummyPyDB() self.tracker = _FramesTracker(self.suspended_frame_manager, self.py_db) self.frame = None def track(self): """Start tracking.""" var = get_ipython().user_ns self.frame = _FakeFrame(_FakeCode("", get_file_name("sys._getframe()")), var, var) self.tracker.track("thread1", pydevd_frame_utils.create_frames_list_from_frame(self.frame)) def untrack_all(self): """Stop tracking.""" self.tracker.untrack_all() def get_children_variables(self, variable_ref=None): """Get the child variables for a variable reference.""" var_ref = variable_ref if not var_ref: var_ref = id(self.frame) variables = self.suspended_frame_manager.get_variable(var_ref) return [x.get_var_data() for x in variables.get_children_variables()] class DebugpyMessageQueue: """A debugpy message queue.""" HEADER = "Content-Length: " HEADER_LENGTH = 16 SEPARATOR = "\r\n\r\n" SEPARATOR_LENGTH = 4 def __init__(self, event_callback, log): """Init the queue.""" self.tcp_buffer = "" self._reset_tcp_pos() self.event_callback = event_callback self.message_queue: Queue[t.Any] = Queue() self.log = log def _reset_tcp_pos(self): self.header_pos = -1 self.separator_pos = -1 self.message_size = 0 self.message_pos = -1 def _put_message(self, raw_msg): self.log.debug("QUEUE - _put_message:") msg = t.cast(t.Dict[str, t.Any], jsonapi.loads(raw_msg)) if msg["type"] == "event": self.log.debug("QUEUE - received event:") self.log.debug(msg) self.event_callback(msg) else: self.log.debug("QUEUE - put message:") self.log.debug(msg) self.message_queue.put_nowait(msg) def put_tcp_frame(self, frame): """Put a tcp frame in the queue.""" self.tcp_buffer += frame self.log.debug("QUEUE - received frame") while True: # Finds header if self.header_pos == -1: self.header_pos = self.tcp_buffer.find(DebugpyMessageQueue.HEADER) if self.header_pos == -1: return self.log.debug("QUEUE - found header at pos %i", self.header_pos) # Finds separator if self.separator_pos == -1: hint = self.header_pos + DebugpyMessageQueue.HEADER_LENGTH self.separator_pos = self.tcp_buffer.find(DebugpyMessageQueue.SEPARATOR, hint) if self.separator_pos == -1: return self.log.debug("QUEUE - found separator at pos %i", self.separator_pos) if self.message_pos == -1: size_pos = self.header_pos + DebugpyMessageQueue.HEADER_LENGTH self.message_pos = self.separator_pos + DebugpyMessageQueue.SEPARATOR_LENGTH self.message_size = int(self.tcp_buffer[size_pos : self.separator_pos]) self.log.debug("QUEUE - found message at pos %i", self.message_pos) self.log.debug("QUEUE - message size is %i", self.message_size) if len(self.tcp_buffer) - self.message_pos < self.message_size: return self._put_message( self.tcp_buffer[self.message_pos : self.message_pos + self.message_size] ) if len(self.tcp_buffer) - self.message_pos == self.message_size: self.log.debug("QUEUE - resetting tcp_buffer") self.tcp_buffer = "" self._reset_tcp_pos() return self.tcp_buffer = self.tcp_buffer[self.message_pos + self.message_size :] self.log.debug("QUEUE - slicing tcp_buffer: %s", self.tcp_buffer) self._reset_tcp_pos() async def get_message(self): """Get a message from the queue.""" return await self.message_queue.get() class DebugpyClient: """A client for debugpy.""" def __init__(self, log, debugpy_stream, event_callback): """Initialize the client.""" self.log = log self.debugpy_stream = debugpy_stream self.event_callback = event_callback self.message_queue = DebugpyMessageQueue(self._forward_event, self.log) self.debugpy_host = "127.0.0.1" self.debugpy_port = -1 self.routing_id = None self.wait_for_attach = True self.init_event = Event() self.init_event_seq = -1 def _get_endpoint(self): host, port = self.get_host_port() return "tcp://" + host + ":" + str(port) def _forward_event(self, msg): if msg["event"] == "initialized": self.init_event.set() self.init_event_seq = msg["seq"] self.event_callback(msg) def _send_request(self, msg): if self.routing_id is None: self.routing_id = self.debugpy_stream.socket.getsockopt(ROUTING_ID) content = jsonapi.dumps( msg, default=json_default, ensure_ascii=False, allow_nan=False, ) content_length = str(len(content)) buf = (DebugpyMessageQueue.HEADER + content_length + DebugpyMessageQueue.SEPARATOR).encode( "ascii" ) buf += content self.log.debug("DEBUGPYCLIENT:") self.log.debug(self.routing_id) self.log.debug(buf) self.debugpy_stream.send_multipart((self.routing_id, buf)) async def _wait_for_response(self): # Since events are never pushed to the message_queue # we can safely assume the next message in queue # will be an answer to the previous request return await self.message_queue.get_message() async def _handle_init_sequence(self): # 1] Waits for initialized event await self.init_event.wait() # 2] Sends configurationDone request configurationDone = { "type": "request", "seq": int(self.init_event_seq) + 1, "command": "configurationDone", } self._send_request(configurationDone) # 3] Waits for configurationDone response await self._wait_for_response() # 4] Waits for attachResponse and returns it return await self._wait_for_response() def get_host_port(self): """Get the host debugpy port.""" if self.debugpy_port == -1: socket = self.debugpy_stream.socket socket.bind_to_random_port("tcp://" + self.debugpy_host) self.endpoint = socket.getsockopt(zmq.LAST_ENDPOINT).decode("utf-8") socket.unbind(self.endpoint) index = self.endpoint.rfind(":") self.debugpy_port = self.endpoint[index + 1 :] return self.debugpy_host, self.debugpy_port def connect_tcp_socket(self): """Connect to the tcp socket.""" self.debugpy_stream.socket.connect(self._get_endpoint()) self.routing_id = self.debugpy_stream.socket.getsockopt(ROUTING_ID) def disconnect_tcp_socket(self): """Disconnect from the tcp socket.""" self.debugpy_stream.socket.disconnect(self._get_endpoint()) self.routing_id = None self.init_event = Event() self.init_event_seq = -1 self.wait_for_attach = True def receive_dap_frame(self, frame): """Receive a dap frame.""" self.message_queue.put_tcp_frame(frame) async def send_dap_request(self, msg): """Send a dap request.""" self._send_request(msg) if self.wait_for_attach and msg["command"] == "attach": rep = await self._handle_init_sequence() self.wait_for_attach = False return rep rep = await self._wait_for_response() self.log.debug("DEBUGPYCLIENT - returning:") self.log.debug(rep) return rep class Debugger: """The debugger class.""" # Requests that requires that the debugger has started started_debug_msg_types = [ "dumpCell", "setBreakpoints", "source", "stackTrace", "variables", "attach", "configurationDone", ] # Requests that can be handled even if the debugger is not running static_debug_msg_types = [ "debugInfo", "inspectVariables", "richInspectVariables", "modules", "copyToGlobals", ] def __init__( self, log, debugpy_stream, event_callback, shell_socket, session, just_my_code=True ): """Initialize the debugger.""" self.log = log self.debugpy_client = DebugpyClient(log, debugpy_stream, self._handle_event) self.shell_socket = shell_socket self.session = session self.is_started = False self.event_callback = event_callback self.just_my_code = just_my_code self.stopped_queue: Queue[t.Any] = Queue() self.started_debug_handlers = {} for msg_type in Debugger.started_debug_msg_types: self.started_debug_handlers[msg_type] = getattr(self, msg_type) self.static_debug_handlers = {} for msg_type in Debugger.static_debug_msg_types: self.static_debug_handlers[msg_type] = getattr(self, msg_type) self.breakpoint_list = {} self.stopped_threads = set() self.debugpy_initialized = False self._removed_cleanup = {} self.debugpy_host = "127.0.0.1" self.debugpy_port = 0 self.endpoint = None self.variable_explorer = VariableExplorer() def _handle_event(self, msg): if msg["event"] == "stopped": if msg["body"]["allThreadsStopped"]: self.stopped_queue.put_nowait(msg) # Do not forward the event now, will be done in the handle_stopped_event return self.stopped_threads.add(msg["body"]["threadId"]) self.event_callback(msg) elif msg["event"] == "continued": if msg["body"]["allThreadsContinued"]: self.stopped_threads = set() else: self.stopped_threads.remove(msg["body"]["threadId"]) self.event_callback(msg) else: self.event_callback(msg) async def _forward_message(self, msg): return await self.debugpy_client.send_dap_request(msg) def _build_variables_response(self, request, variables): var_list = [var for var in variables if self.accept_variable(var["name"])] return { "seq": request["seq"], "type": "response", "request_seq": request["seq"], "success": True, "command": request["command"], "body": {"variables": var_list}, } def _accept_stopped_thread(self, thread_name): # TODO: identify Thread-2, Thread-3 and Thread-4. These are NOT # Control, IOPub or Heartbeat threads forbid_list = ["IPythonHistorySavingThread", "Thread-2", "Thread-3", "Thread-4"] return thread_name not in forbid_list async def handle_stopped_event(self): """Handle a stopped event.""" # Wait for a stopped event message in the stopped queue # This message is used for triggering the 'threads' request event = await self.stopped_queue.get() req = {"seq": event["seq"] + 1, "type": "request", "command": "threads"} rep = await self._forward_message(req) for thread in rep["body"]["threads"]: if self._accept_stopped_thread(thread["name"]): self.stopped_threads.add(thread["id"]) self.event_callback(event) @property def tcp_client(self): return self.debugpy_client def start(self): """Start the debugger.""" if not self.debugpy_initialized: tmp_dir = get_tmp_directory() if not Path(tmp_dir).exists(): Path(tmp_dir).mkdir(parents=True) host, port = self.debugpy_client.get_host_port() code = "import debugpy;" code += 'debugpy.listen(("' + host + '",' + port + "))" content = {"code": code, "silent": True} self.session.send( self.shell_socket, "execute_request", content, None, (self.shell_socket.getsockopt(ROUTING_ID)), ) ident, msg = self.session.recv(self.shell_socket, mode=0) self.debugpy_initialized = msg["content"]["status"] == "ok" # Don't remove leading empty lines when debugging so the breakpoints are correctly positioned cleanup_transforms = get_ipython().input_transformer_manager.cleanup_transforms if leading_empty_lines in cleanup_transforms: index = cleanup_transforms.index(leading_empty_lines) self._removed_cleanup[index] = cleanup_transforms.pop(index) self.debugpy_client.connect_tcp_socket() return self.debugpy_initialized def stop(self): """Stop the debugger.""" self.debugpy_client.disconnect_tcp_socket() # Restore remove cleanup transformers cleanup_transforms = get_ipython().input_transformer_manager.cleanup_transforms for index in sorted(self._removed_cleanup): func = self._removed_cleanup.pop(index) cleanup_transforms.insert(index, func) async def dumpCell(self, message): """Handle a dump cell message.""" code = message["arguments"]["code"] file_name = get_file_name(code) with open(file_name, "w", encoding="utf-8") as f: f.write(code) return { "type": "response", "request_seq": message["seq"], "success": True, "command": message["command"], "body": {"sourcePath": file_name}, } async def setBreakpoints(self, message): """Handle a set breakpoints message.""" source = message["arguments"]["source"]["path"] self.breakpoint_list[source] = message["arguments"]["breakpoints"] message_response = await self._forward_message(message) # debugpy can set breakpoints on different lines than the ones requested, # so we want to record the breakpoints that were actually added if message_response.get("success"): self.breakpoint_list[source] = [ {"line": breakpoint["line"]} for breakpoint in message_response["body"]["breakpoints"] ] return message_response async def source(self, message): """Handle a source message.""" reply = {"type": "response", "request_seq": message["seq"], "command": message["command"]} source_path = message["arguments"]["source"]["path"] if Path(source_path).is_file(): with open(source_path, encoding="utf-8") as f: reply["success"] = True reply["body"] = {"content": f.read()} else: reply["success"] = False reply["message"] = "source unavailable" reply["body"] = {} return reply async def stackTrace(self, message): """Handle a stack trace message.""" reply = await self._forward_message(message) # The stackFrames array can have the following content: # { frames from the notebook} # ... # { 'id': xxx, 'name': '', ... } <= this is the first frame of the code from the notebook # { frames from ipykernel } # ... # {'id': yyy, 'name': '', ... } <= this is the first frame of ipykernel code # or only the frames from the notebook. # We want to remove all the frames from ipykernel when they are present. try: sf_list = reply["body"]["stackFrames"] module_idx = len(sf_list) - next( i for i, v in enumerate(reversed(sf_list), 1) if v["name"] == "" and i != 1 ) reply["body"]["stackFrames"] = reply["body"]["stackFrames"][: module_idx + 1] except StopIteration: pass return reply def accept_variable(self, variable_name): """Accept a variable by name.""" forbid_list = [ "__name__", "__doc__", "__package__", "__loader__", "__spec__", "__annotations__", "__builtins__", "__builtin__", "__display__", "get_ipython", "debugpy", "exit", "quit", "In", "Out", "_oh", "_dh", "_", "__", "___", ] cond = variable_name not in forbid_list cond = cond and not bool(re.search(r"^_\d", variable_name)) cond = cond and variable_name[0:2] != "_i" return cond # noqa: RET504 async def variables(self, message): """Handle a variables message.""" reply = {} if not self.stopped_threads: variables = self.variable_explorer.get_children_variables( message["arguments"]["variablesReference"] ) return self._build_variables_response(message, variables) reply = await self._forward_message(message) # TODO : check start and count arguments work as expected in debugpy reply["body"]["variables"] = [ var for var in reply["body"]["variables"] if self.accept_variable(var["name"]) ] return reply async def attach(self, message): """Handle an attach message.""" host, port = self.debugpy_client.get_host_port() message["arguments"]["connect"] = {"host": host, "port": port} message["arguments"]["logToFile"] = True # Experimental option to break in non-user code. # The ipykernel source is in the call stack, so the user # has to manipulate the step-over and step-into in a wize way. # Set debugOptions for breakpoints in python standard library source. if not self.just_my_code: message["arguments"]["debugOptions"] = ["DebugStdLib"] return await self._forward_message(message) async def configurationDone(self, message): """Handle a configuration done message.""" return { "seq": message["seq"], "type": "response", "request_seq": message["seq"], "success": True, "command": message["command"], } async def debugInfo(self, message): """Handle a debug info message.""" breakpoint_list = [] for key, value in self.breakpoint_list.items(): breakpoint_list.append({"source": key, "breakpoints": value}) return { "type": "response", "request_seq": message["seq"], "success": True, "command": message["command"], "body": { "isStarted": self.is_started, "hashMethod": "Murmur2", "hashSeed": get_tmp_hash_seed(), "tmpFilePrefix": get_tmp_directory() + os.sep, "tmpFileSuffix": ".py", "breakpoints": breakpoint_list, "stoppedThreads": list(self.stopped_threads), "richRendering": True, "exceptionPaths": ["Python Exceptions"], "copyToGlobals": True, }, } async def inspectVariables(self, message): """Handle an inspect variables message.""" self.variable_explorer.untrack_all() # looks like the implementation of untrack_all in ptvsd # destroys objects we nee din track. We have no choice but # reinstantiate the object self.variable_explorer = VariableExplorer() self.variable_explorer.track() variables = self.variable_explorer.get_children_variables() return self._build_variables_response(message, variables) async def richInspectVariables(self, message): """Handle a rich inspect variables message.""" reply = { "type": "response", "sequence_seq": message["seq"], "success": False, "command": message["command"], } var_name = message["arguments"]["variableName"] valid_name = str.isidentifier(var_name) if not valid_name: reply["body"] = {"data": {}, "metadata": {}} if var_name == "special variables" or var_name == "function variables": reply["success"] = True return reply repr_data = {} repr_metadata = {} if not self.stopped_threads: # The code did not hit a breakpoint, we use the interpreter # to get the rich representation of the variable result = get_ipython().user_expressions({var_name: var_name})[var_name] if result.get("status", "error") == "ok": repr_data = result.get("data", {}) repr_metadata = result.get("metadata", {}) else: # The code has stopped on a breakpoint, we use the setExpression # request to get the rich representation of the variable code = f"get_ipython().display_formatter.format({var_name})" frame_id = message["arguments"]["frameId"] seq = message["seq"] reply = await self._forward_message( { "type": "request", "command": "evaluate", "seq": seq + 1, "arguments": {"expression": code, "frameId": frame_id, "context": "clipboard"}, } ) if reply["success"]: repr_data, repr_metadata = eval(reply["body"]["result"], {}, {}) body = { "data": repr_data, "metadata": {k: v for k, v in repr_metadata.items() if k in repr_data}, } reply["body"] = body reply["success"] = True return reply async def copyToGlobals(self, message): dst_var_name = message["arguments"]["dstVariableName"] src_var_name = message["arguments"]["srcVariableName"] src_frame_id = message["arguments"]["srcFrameId"] expression = f"globals()['{dst_var_name}']" seq = message["seq"] return await self._forward_message( { "type": "request", "command": "setExpression", "seq": seq + 1, "arguments": { "expression": expression, "value": src_var_name, "frameId": src_frame_id, }, } ) async def modules(self, message): """Handle a modules message.""" modules = list(sys.modules.values()) startModule = message.get("startModule", 0) moduleCount = message.get("moduleCount", len(modules)) mods = [] for i in range(startModule, moduleCount): module = modules[i] filename = getattr(getattr(module, "__spec__", None), "origin", None) if filename and filename.endswith(".py"): mods.append({"id": i, "name": module.__name__, "path": filename}) return {"body": {"modules": mods, "totalModules": len(modules)}} async def process_request(self, message): """Process a request.""" reply = {} if message["command"] == "initialize": if self.is_started: self.log.info("The debugger has already started") else: self.is_started = self.start() if self.is_started: self.log.info("The debugger has started") else: reply = { "command": "initialize", "request_seq": message["seq"], "seq": 3, "success": False, "type": "response", } handler = self.static_debug_handlers.get(message["command"], None) if handler is not None: reply = await handler(message) elif self.is_started: handler = self.started_debug_handlers.get(message["command"], None) if handler is not None: reply = await handler(message) else: reply = await self._forward_message(message) if message["command"] == "disconnect": self.stop() self.breakpoint_list = {} self.stopped_threads = set() self.is_started = False self.log.info("The debugger has stopped") return reply ipykernel-6.29.5/ipykernel/displayhook.py000066400000000000000000000061141464053401500205250ustar00rootroot00000000000000"""Replacements for sys.displayhook that publish over ZMQ.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import builtins import sys import typing as t from IPython.core.displayhook import DisplayHook from jupyter_client.session import Session, extract_header from traitlets import Any, Dict, Instance from ipykernel.jsonutil import encode_images, json_clean class ZMQDisplayHook: """A simple displayhook that publishes the object's repr over a ZeroMQ socket.""" topic = b"execute_result" def __init__(self, session, pub_socket): """Initialize the hook.""" self.session = session self.pub_socket = pub_socket self.parent_header = {} def get_execution_count(self): """This method is replaced in kernelapp""" return 0 def __call__(self, obj): """Handle a hook call.""" if obj is None: return builtins._ = obj # type:ignore[attr-defined] sys.stdout.flush() sys.stderr.flush() contents = { "execution_count": self.get_execution_count(), "data": {"text/plain": repr(obj)}, "metadata": {}, } self.session.send( self.pub_socket, "execute_result", contents, parent=self.parent_header, ident=self.topic ) def set_parent(self, parent): """Set the parent header.""" self.parent_header = extract_header(parent) class ZMQShellDisplayHook(DisplayHook): """A displayhook subclass that publishes data using ZeroMQ. This is intended to work with an InteractiveShell instance. It sends a dict of different representations of the object.""" topic = None session = Instance(Session, allow_none=True) pub_socket = Any(allow_none=True) parent_header = Dict({}) msg: dict[str, t.Any] | None def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) def start_displayhook(self): """Start the display hook.""" if self.session: self.msg = self.session.msg( "execute_result", { "data": {}, "metadata": {}, }, parent=self.parent_header, ) def write_output_prompt(self): """Write the output prompt.""" if self.msg: self.msg["content"]["execution_count"] = self.prompt_count def write_format_data(self, format_dict, md_dict=None): """Write format data to the message.""" if self.msg: self.msg["content"]["data"] = json_clean(encode_images(format_dict)) self.msg["content"]["metadata"] = md_dict def finish_displayhook(self): """Finish up all displayhook activities.""" sys.stdout.flush() sys.stderr.flush() if self.msg and self.msg["content"]["data"] and self.session: self.session.send(self.pub_socket, self.msg, ident=self.topic) self.msg = None ipykernel-6.29.5/ipykernel/embed.py000066400000000000000000000040441464053401500172530ustar00rootroot00000000000000"""Simple function for embedding an IPython kernel """ # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- import sys from IPython.utils.frame import extract_module_locals from .kernelapp import IPKernelApp # ----------------------------------------------------------------------------- # Code # ----------------------------------------------------------------------------- def embed_kernel(module=None, local_ns=None, **kwargs): """Embed and start an IPython kernel in a given scope. Parameters ---------- module : ModuleType, optional The module to load into IPython globals (default: caller) local_ns : dict, optional The namespace to load into IPython user namespace (default: caller) kwargs : dict, optional Further keyword args are relayed to the IPKernelApp constructor, allowing configuration of the Kernel. Will only have an effect on the first embed_kernel call for a given process. """ # get the app if it exists, or set it up if it doesn't if IPKernelApp.initialized(): app = IPKernelApp.instance() else: app = IPKernelApp.instance(**kwargs) app.initialize([]) # Undo unnecessary sys module mangling from init_sys_modules. # This would not be necessary if we could prevent it # in the first place by using a different InteractiveShell # subclass, as in the regular embed case. main = app.kernel.shell._orig_sys_modules_main_mod if main is not None: sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main # load the calling scope if not given (caller_module, caller_locals) = extract_module_locals(1) if module is None: module = caller_module if local_ns is None: local_ns = caller_locals app.kernel.user_module = module app.kernel.user_ns = local_ns app.shell.set_completer_frame() # type:ignore[union-attr] app.start() ipykernel-6.29.5/ipykernel/eventloops.py000066400000000000000000000457161464053401500204100ustar00rootroot00000000000000"""Event loop integration for the ZeroMQ-based kernels.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import os import platform import sys from functools import partial import zmq from packaging.version import Version as V from traitlets.config.application import Application def _use_appnope(): """Should we use appnope for dealing with OS X app nap? Checks if we are on OS X 10.9 or greater. """ return sys.platform == "darwin" and V(platform.mac_ver()[0]) >= V("10.9") # mapping of keys to loop functions loop_map = { "inline": None, "nbagg": None, "webagg": None, "notebook": None, "ipympl": None, "widget": None, None: None, } def register_integration(*toolkitnames): """Decorator to register an event loop to integrate with the IPython kernel The decorator takes names to register the event loop as for the %gui magic. You can provide alternative names for the same toolkit. The decorated function should take a single argument, the IPython kernel instance, arrange for the event loop to call ``kernel.do_one_iteration()`` at least every ``kernel._poll_interval`` seconds, and start the event loop. :mod:`ipykernel.eventloops` provides and registers such functions for a few common event loops. """ def decorator(func): """Integration registration decorator.""" for name in toolkitnames: loop_map[name] = func func.exit_hook = lambda kernel: None # noqa: ARG005 def exit_decorator(exit_func): """@func.exit is now a decorator to register a function to be called on exit """ func.exit_hook = exit_func return exit_func func.exit = exit_decorator return func return decorator def _notify_stream_qt(kernel): import operator from functools import lru_cache from IPython.external.qt_for_kernel import QtCore try: from IPython.external.qt_for_kernel import enum_helper except ImportError: @lru_cache(None) def enum_helper(name): return operator.attrgetter(name.rpartition(".")[0])(sys.modules[QtCore.__package__]) def exit_loop(): """fall back to main loop""" kernel._qt_notifier.setEnabled(False) kernel.app.qt_event_loop.quit() def process_stream_events(): """fall back to main loop when there's a socket event""" # call flush to ensure that the stream doesn't lose events # due to our consuming of the edge-triggered FD # flush returns the number of events consumed. # if there were any, wake it up if kernel.shell_stream.flush(limit=1): exit_loop() if not hasattr(kernel, "_qt_notifier"): fd = kernel.shell_stream.getsockopt(zmq.FD) kernel._qt_notifier = QtCore.QSocketNotifier( fd, enum_helper("QtCore.QSocketNotifier.Type").Read, kernel.app.qt_event_loop ) kernel._qt_notifier.activated.connect(process_stream_events) else: kernel._qt_notifier.setEnabled(True) # allow for scheduling exits from the loop in case a timeout needs to # be set from the kernel level def _schedule_exit(delay): """schedule fall back to main loop in [delay] seconds""" # The signatures of QtCore.QTimer.singleShot are inconsistent between PySide and PyQt # if setting the TimerType, so we create a timer explicitly and store it # to avoid a memory leak. # PreciseTimer is needed so we exit after _at least_ the specified delay, not within 5% of it if not hasattr(kernel, "_qt_timer"): kernel._qt_timer = QtCore.QTimer(kernel.app) kernel._qt_timer.setSingleShot(True) kernel._qt_timer.setTimerType(enum_helper("QtCore.Qt.TimerType").PreciseTimer) kernel._qt_timer.timeout.connect(exit_loop) kernel._qt_timer.start(int(1000 * delay)) loop_qt._schedule_exit = _schedule_exit # there may already be unprocessed events waiting. # these events will not wake zmq's edge-triggered FD # since edge-triggered notification only occurs on new i/o activity. # process all the waiting events immediately # so we start in a clean state ensuring that any new i/o events will notify. # schedule first call on the eventloop as soon as it's running, # so we don't block here processing events QtCore.QTimer.singleShot(0, process_stream_events) @register_integration("qt", "qt5", "qt6") def loop_qt(kernel): """Event loop for all supported versions of Qt.""" _notify_stream_qt(kernel) # install hook to stop event loop. # Start the event loop. kernel.app._in_event_loop = True # `exec` blocks until there's ZMQ activity. el = kernel.app.qt_event_loop # for brevity el.exec() if hasattr(el, "exec") else el.exec_() kernel.app._in_event_loop = False # NOTE: To be removed in version 7 loop_qt5 = loop_qt # exit and watch are the same for qt 4 and 5 @loop_qt.exit def loop_qt_exit(kernel): kernel.app.exit() def _loop_wx(app): """Inner-loop for running the Wx eventloop Pulled from guisupport.start_event_loop in IPython < 5.2, since IPython 5.2 only checks `get_ipython().active_eventloop` is defined, rather than if the eventloop is actually running. """ app._in_event_loop = True app.MainLoop() app._in_event_loop = False @register_integration("wx") def loop_wx(kernel): """Start a kernel with wx event loop support.""" import wx # Wx uses milliseconds poll_interval = int(1000 * kernel._poll_interval) def wake(): """wake from wx""" if kernel.shell_stream.flush(limit=1): kernel.app.ExitMainLoop() return # We have to put the wx.Timer in a wx.Frame for it to fire properly. # We make the Frame hidden when we create it in the main app below. class TimerFrame(wx.Frame): # type:ignore[misc] def __init__(self, func): wx.Frame.__init__(self, None, -1) self.timer = wx.Timer(self) # Units for the timer are in milliseconds self.timer.Start(poll_interval) self.Bind(wx.EVT_TIMER, self.on_timer) self.func = func def on_timer(self, event): self.func() # We need a custom wx.App to create our Frame subclass that has the # wx.Timer to defer back to the tornado event loop. class IPWxApp(wx.App): # type:ignore[misc] def OnInit(self): self.frame = TimerFrame(wake) self.frame.Show(False) return True # The redirect=False here makes sure that wx doesn't replace # sys.stdout/stderr with its own classes. if not (getattr(kernel, "app", None) and isinstance(kernel.app, wx.App)): kernel.app = IPWxApp(redirect=False) # The import of wx on Linux sets the handler for signal.SIGINT # to 0. This is a bug in wx or gtk. We fix by just setting it # back to the Python default. import signal if not callable(signal.getsignal(signal.SIGINT)): signal.signal(signal.SIGINT, signal.default_int_handler) _loop_wx(kernel.app) @loop_wx.exit def loop_wx_exit(kernel): """Exit the wx loop.""" import wx wx.Exit() @register_integration("tk") def loop_tk(kernel): """Start a kernel with the Tk event loop.""" from tkinter import READABLE, Tk app = Tk() # Capability detection: # per https://docs.python.org/3/library/tkinter.html#file-handlers # file handlers are not available on Windows if hasattr(app, "createfilehandler"): # A basic wrapper for structural similarity with the Windows version class BasicAppWrapper: def __init__(self, app): self.app = app self.app.withdraw() def exit_loop(): """fall back to main loop""" app.tk.deletefilehandler(kernel.shell_stream.getsockopt(zmq.FD)) app.quit() app.destroy() del kernel.app_wrapper def process_stream_events(*a, **kw): """fall back to main loop when there's a socket event""" if kernel.shell_stream.flush(limit=1): exit_loop() # allow for scheduling exits from the loop in case a timeout needs to # be set from the kernel level def _schedule_exit(delay): """schedule fall back to main loop in [delay] seconds""" app.after(int(1000 * delay), exit_loop) loop_tk._schedule_exit = _schedule_exit # For Tkinter, we create a Tk object and call its withdraw method. kernel.app_wrapper = BasicAppWrapper(app) app.tk.createfilehandler( kernel.shell_stream.getsockopt(zmq.FD), READABLE, process_stream_events ) # schedule initial call after start app.after(0, process_stream_events) app.mainloop() else: import asyncio import nest_asyncio nest_asyncio.apply() doi = kernel.do_one_iteration # Tk uses milliseconds poll_interval = int(1000 * kernel._poll_interval) class TimedAppWrapper: def __init__(self, app, func): self.app = app self.app.withdraw() self.func = func def on_timer(self): loop = asyncio.get_event_loop() try: loop.run_until_complete(self.func()) except Exception: kernel.log.exception("Error in message handler") self.app.after(poll_interval, self.on_timer) def start(self): self.on_timer() # Call it once to get things going. self.app.mainloop() kernel.app_wrapper = TimedAppWrapper(app, doi) kernel.app_wrapper.start() @loop_tk.exit def loop_tk_exit(kernel): """Exit the tk loop.""" try: kernel.app_wrapper.app.destroy() del kernel.app_wrapper except (RuntimeError, AttributeError): pass @register_integration("gtk") def loop_gtk(kernel): """Start the kernel, coordinating with the GTK event loop""" from .gui.gtkembed import GTKEmbed gtk_kernel = GTKEmbed(kernel) gtk_kernel.start() kernel._gtk = gtk_kernel @loop_gtk.exit def loop_gtk_exit(kernel): """Exit the gtk loop.""" kernel._gtk.stop() @register_integration("gtk3") def loop_gtk3(kernel): """Start the kernel, coordinating with the GTK event loop""" from .gui.gtk3embed import GTKEmbed gtk_kernel = GTKEmbed(kernel) gtk_kernel.start() kernel._gtk = gtk_kernel @loop_gtk3.exit def loop_gtk3_exit(kernel): """Exit the gtk3 loop.""" kernel._gtk.stop() @register_integration("osx") def loop_cocoa(kernel): """Start the kernel, coordinating with the Cocoa CFRunLoop event loop via the matplotlib MacOSX backend. """ from ._eventloop_macos import mainloop, stop real_excepthook = sys.excepthook def handle_int(etype, value, tb): """don't let KeyboardInterrupts look like crashes""" # wake the eventloop when we get a signal stop() if etype is KeyboardInterrupt: print("KeyboardInterrupt caught in CFRunLoop", file=sys.__stdout__) else: real_excepthook(etype, value, tb) while not kernel.shell.exit_now: try: # double nested try/except, to properly catch KeyboardInterrupt # due to pyzmq Issue #130 try: # don't let interrupts during mainloop invoke crash_handler: sys.excepthook = handle_int mainloop(kernel._poll_interval) if kernel.shell_stream.flush(limit=1): # events to process, return control to kernel return except BaseException: raise except KeyboardInterrupt: # Ctrl-C shouldn't crash the kernel print("KeyboardInterrupt caught in kernel", file=sys.__stdout__) finally: # ensure excepthook is restored sys.excepthook = real_excepthook @loop_cocoa.exit def loop_cocoa_exit(kernel): """Exit the cocoa loop.""" from ._eventloop_macos import stop stop() @register_integration("asyncio") def loop_asyncio(kernel): """Start a kernel with asyncio event loop support.""" import asyncio loop = asyncio.get_event_loop() # loop is already running (e.g. tornado 5), nothing left to do if loop.is_running(): return if loop.is_closed(): # main loop is closed, create a new one loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop._should_close = False # type:ignore[attr-defined] # pause eventloop when there's an event on a zmq socket def process_stream_events(stream): """fall back to main loop when there's a socket event""" if stream.flush(limit=1): loop.stop() notifier = partial(process_stream_events, kernel.shell_stream) loop.add_reader(kernel.shell_stream.getsockopt(zmq.FD), notifier) loop.call_soon(notifier) while True: error = None try: loop.run_forever() except KeyboardInterrupt: continue except Exception as e: error = e if loop._should_close: # type:ignore[attr-defined] loop.close() if error is not None: raise error break @loop_asyncio.exit def loop_asyncio_exit(kernel): """Exit hook for asyncio""" import asyncio loop = asyncio.get_event_loop() async def close_loop(): if hasattr(loop, "shutdown_asyncgens"): yield loop.shutdown_asyncgens() loop._should_close = True # type:ignore[attr-defined] loop.stop() if loop.is_running(): close_loop() elif not loop.is_closed(): loop.run_until_complete(close_loop) # type:ignore[arg-type] loop.close() def set_qt_api_env_from_gui(gui): """ Sets the QT_API environment variable by trying to import PyQtx or PySidex. The user can generically request `qt` or a specific Qt version, e.g. `qt6`. For a generic Qt request, we let the mechanism in IPython choose the best available version by leaving the `QT_API` environment variable blank. For specific versions, we check to see whether the PyQt or PySide implementations are present and set `QT_API` accordingly to indicate to IPython which version we want. If neither implementation is present, we leave the environment variable set so IPython will generate a helpful error message. Notes ----- - If the environment variable is already set, it will be used unchanged, regardless of what the user requested. """ qt_api = os.environ.get("QT_API", None) from IPython.external.qt_loaders import ( QT_API_PYQT5, QT_API_PYQT6, QT_API_PYSIDE2, QT_API_PYSIDE6, loaded_api, ) loaded = loaded_api() qt_env2gui = { QT_API_PYSIDE2: "qt5", QT_API_PYQT5: "qt5", QT_API_PYSIDE6: "qt6", QT_API_PYQT6: "qt6", } if loaded is not None and gui != "qt" and qt_env2gui[loaded] != gui: print(f"Cannot switch Qt versions for this session; you must use {qt_env2gui[loaded]}.") return if qt_api is not None and gui != "qt": if qt_env2gui[qt_api] != gui: print( f'Request for "{gui}" will be ignored because `QT_API` ' f'environment variable is set to "{qt_api}"' ) return else: if gui == "qt5": try: import PyQt5 # noqa: F401 os.environ["QT_API"] = "pyqt5" except ImportError: try: import PySide2 # noqa: F401 os.environ["QT_API"] = "pyside2" except ImportError: os.environ["QT_API"] = "pyqt5" elif gui == "qt6": try: import PyQt6 # noqa: F401 os.environ["QT_API"] = "pyqt6" except ImportError: try: import PySide6 # noqa: F401 os.environ["QT_API"] = "pyside6" except ImportError: os.environ["QT_API"] = "pyqt6" elif gui == "qt": # Don't set QT_API; let IPython logic choose the version. if "QT_API" in os.environ: del os.environ["QT_API"] else: print(f'Unrecognized Qt version: {gui}. Should be "qt5", "qt6", or "qt".') return # Do the actual import now that the environment variable is set to make sure it works. try: pass except Exception as e: # Clear the environment variable for the next attempt. if "QT_API" in os.environ: del os.environ["QT_API"] print(f"QT_API couldn't be set due to error {e}") return def make_qt_app_for_kernel(gui, kernel): """Sets the `QT_API` environment variable if it isn't already set.""" if hasattr(kernel, "app"): # Kernel is already running a Qt event loop, so there's no need to # create another app for it. return set_qt_api_env_from_gui(gui) # This import is guaranteed to work now: from IPython.external.qt_for_kernel import QtCore from IPython.lib.guisupport import get_app_qt4 kernel.app = get_app_qt4([" "]) kernel.app.qt_event_loop = QtCore.QEventLoop(kernel.app) def enable_gui(gui, kernel=None): """Enable integration with a given GUI""" if gui not in loop_map: e = f"Invalid GUI request {gui!r}, valid ones are:{loop_map.keys()}" raise ValueError(e) if kernel is None: if Application.initialized(): kernel = getattr(Application.instance(), "kernel", None) if kernel is None: msg = ( "You didn't specify a kernel," " and no IPython Application with a kernel appears to be running." ) raise RuntimeError(msg) if gui is None: # User wants to turn off integration; clear any evidence if Qt was the last one. if hasattr(kernel, "app"): delattr(kernel, "app") if hasattr(kernel, "_qt_notifier"): delattr(kernel, "_qt_notifier") if hasattr(kernel, "_qt_timer"): delattr(kernel, "_qt_timer") else: if gui.startswith("qt"): # Prepare the kernel here so any exceptions are displayed in the client. make_qt_app_for_kernel(gui, kernel) loop = loop_map[gui] if ( loop and kernel.eventloop is not None and kernel.eventloop is not loop # type:ignore[unreachable] ): msg = "Cannot activate multiple GUI eventloops" # type:ignore[unreachable] raise RuntimeError(msg) kernel.eventloop = loop # We set `eventloop`; the function the user chose is executed in `Kernel.enter_eventloop`, thus # any exceptions raised during the event loop will not be shown in the client. ipykernel-6.29.5/ipykernel/gui/000077500000000000000000000000001464053401500164075ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/gui/__init__.py000066400000000000000000000011021464053401500205120ustar00rootroot00000000000000"""GUI support for the IPython ZeroMQ kernel. This package contains the various toolkit-dependent utilities we use to enable coordination between the IPython kernel and the event loops of the various GUI toolkits. """ # ----------------------------------------------------------------------------- # Copyright (C) 2010-2011 The IPython Development Team. # # Distributed under the terms of the BSD License. # # The full license is in the file LICENSE, distributed as part of this # software. # ----------------------------------------------------------------------------- ipykernel-6.29.5/ipykernel/gui/gtk3embed.py000066400000000000000000000065511464053401500206350ustar00rootroot00000000000000"""GUI support for the IPython ZeroMQ kernel - GTK toolkit support. """ # ----------------------------------------------------------------------------- # Copyright (C) 2010-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- # stdlib import sys import warnings # Third-party import gi gi.require_version("Gdk", "3.0") gi.require_version("Gtk", "3.0") from gi.repository import GObject, Gtk # noqa: E402 warnings.warn( "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2 ) # ----------------------------------------------------------------------------- # Classes and functions # ----------------------------------------------------------------------------- class GTKEmbed: """A class to embed a kernel into the GTK main event loop.""" def __init__(self, kernel): """Initialize the embed.""" self.kernel = kernel # These two will later store the real gtk functions when we hijack them self.gtk_main = None self.gtk_main_quit = None def start(self): """Starts the GTK main event loop and sets our kernel startup routine.""" # Register our function to initiate the kernel and start gtk GObject.idle_add(self._wire_kernel) Gtk.main() def _wire_kernel(self): """Initializes the kernel inside GTK. This is meant to run only once at startup, so it does its job and returns False to ensure it doesn't get run again by GTK. """ self.gtk_main, self.gtk_main_quit = self._hijack_gtk() GObject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel) return False def iterate_kernel(self): """Run one iteration of the kernel and return True. GTK timer functions must return True to be called again, so we make the call to :meth:`do_one_iteration` and then return True for GTK. """ self.kernel.do_one_iteration() return True def stop(self): """Stop the embed.""" # FIXME: this one isn't getting called because we have no reliable # kernel shutdown. We need to fix that: once the kernel has a # shutdown mechanism, it can call this. if self.gtk_main_quit: self.gtk_main_quit() sys.exit() def _hijack_gtk(self): """Hijack a few key functions in GTK for IPython integration. Modifies pyGTK's main and main_quit with a dummy so user code does not block IPython. This allows us to use %run to run arbitrary pygtk scripts from a long-lived IPython session, and when they attempt to start or stop Returns ------- The original functions that have been hijacked: - Gtk.main - Gtk.main_quit """ def dummy(*args, **kw): """No-op.""" # save and trap main and main_quit from gtk orig_main, Gtk.main = Gtk.main, dummy orig_main_quit, Gtk.main_quit = Gtk.main_quit, dummy return orig_main, orig_main_quit ipykernel-6.29.5/ipykernel/gui/gtkembed.py000066400000000000000000000064011464053401500205440ustar00rootroot00000000000000"""GUI support for the IPython ZeroMQ kernel - GTK toolkit support. """ # ----------------------------------------------------------------------------- # Copyright (C) 2010-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- # stdlib import sys import warnings # Third-party import gobject import gtk warnings.warn( "The Gtk3 event loop for ipykernel is deprecated", category=DeprecationWarning, stacklevel=2 ) # ----------------------------------------------------------------------------- # Classes and functions # ----------------------------------------------------------------------------- class GTKEmbed: """A class to embed a kernel into the GTK main event loop.""" def __init__(self, kernel): """Initialize the embed.""" self.kernel = kernel # These two will later store the real gtk functions when we hijack them self.gtk_main = None self.gtk_main_quit = None def start(self): """Starts the GTK main event loop and sets our kernel startup routine.""" # Register our function to initiate the kernel and start gtk gobject.idle_add(self._wire_kernel) gtk.main() def _wire_kernel(self): """Initializes the kernel inside GTK. This is meant to run only once at startup, so it does its job and returns False to ensure it doesn't get run again by GTK. """ self.gtk_main, self.gtk_main_quit = self._hijack_gtk() gobject.timeout_add(int(1000 * self.kernel._poll_interval), self.iterate_kernel) return False def iterate_kernel(self): """Run one iteration of the kernel and return True. GTK timer functions must return True to be called again, so we make the call to :meth:`do_one_iteration` and then return True for GTK. """ self.kernel.do_one_iteration() return True def stop(self): """Stop the embed.""" # FIXME: this one isn't getting called because we have no reliable # kernel shutdown. We need to fix that: once the kernel has a # shutdown mechanism, it can call this. if self.gtk_main_quit: self.gtk_main_quit() sys.exit() def _hijack_gtk(self): """Hijack a few key functions in GTK for IPython integration. Modifies pyGTK's main and main_quit with a dummy so user code does not block IPython. This allows us to use %run to run arbitrary pygtk scripts from a long-lived IPython session, and when they attempt to start or stop Returns ------- The original functions that have been hijacked: - gtk.main - gtk.main_quit """ def dummy(*args, **kw): """No-op.""" # save and trap main and main_quit from gtk orig_main, gtk.main = gtk.main, dummy orig_main_quit, gtk.main_quit = gtk.main_quit, dummy return orig_main, orig_main_quit ipykernel-6.29.5/ipykernel/heartbeat.py000066400000000000000000000106241464053401500201370ustar00rootroot00000000000000"""The client and server for a basic ping-pong style heartbeat. """ # ----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- import errno import socket from pathlib import Path from threading import Thread import zmq from jupyter_client.localinterfaces import localhost # ----------------------------------------------------------------------------- # Code # ----------------------------------------------------------------------------- class Heartbeat(Thread): """A simple ping-pong style heartbeat that runs in a thread.""" def __init__(self, context, addr=None): """Initialize the heartbeat thread.""" if addr is None: addr = ("tcp", localhost(), 0) Thread.__init__(self, name="Heartbeat") self.context = context self.transport, self.ip, self.port = addr self.original_port = self.port if self.original_port == 0: self.pick_port() self.addr = (self.ip, self.port) self.daemon = True self.pydev_do_not_trace = True self.is_pydev_daemon_thread = True self.name = "Heartbeat" def pick_port(self): """Pick a port for the heartbeat.""" if self.transport == "tcp": s = socket.socket() # '*' means all interfaces to 0MQ, which is '' to socket.socket s.bind(("" if self.ip == "*" else self.ip, 0)) self.port = s.getsockname()[1] s.close() elif self.transport == "ipc": self.port = 1 while Path(f"{self.ip}-{self.port}").exists(): self.port = self.port + 1 else: raise ValueError("Unrecognized zmq transport: %s" % self.transport) return self.port def _try_bind_socket(self): c = ":" if self.transport == "tcp" else "-" return self.socket.bind(f"{self.transport}://{self.ip}" + c + str(self.port)) def _bind_socket(self): try: win_in_use = errno.WSAEADDRINUSE # type:ignore[attr-defined] except AttributeError: win_in_use = None # Try up to 100 times to bind a port when in conflict to avoid # infinite attempts in bad setups max_attempts = 1 if self.original_port else 100 for attempt in range(max_attempts): try: self._try_bind_socket() except zmq.ZMQError as ze: if attempt == max_attempts - 1: raise # Raise if we have any error not related to socket binding if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use: raise # Raise if we have any error not related to socket binding if self.original_port == 0: self.pick_port() else: raise else: return def run(self): """Run the heartbeat thread.""" self.name = "Heartbeat" self.socket = self.context.socket(zmq.ROUTER) self.socket.linger = 1000 try: self._bind_socket() except Exception: self.socket.close() raise while True: try: zmq.device(zmq.QUEUE, self.socket, self.socket) except zmq.ZMQError as e: if e.errno == errno.EINTR: # signal interrupt, resume heartbeat continue if e.errno == zmq.ETERM: # context terminated, close socket and exit try: self.socket.close() except zmq.ZMQError: # suppress further errors during cleanup # this shouldn't happen, though pass break if e.errno == zmq.ENOTSOCK: # socket closed elsewhere, exit break raise else: break ipykernel-6.29.5/ipykernel/inprocess/000077500000000000000000000000001464053401500176305ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/inprocess/__init__.py000066400000000000000000000003051464053401500217370ustar00rootroot00000000000000from .blocking import BlockingInProcessKernelClient from .channels import InProcessChannel, InProcessHBChannel from .client import InProcessKernelClient from .manager import InProcessKernelManager ipykernel-6.29.5/ipykernel/inprocess/blocking.py000066400000000000000000000071511464053401500217760ustar00rootroot00000000000000""" Implements a fully blocking kernel client. Useful for test suites and blocking terminal interfaces. """ import sys # ----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- from queue import Empty, Queue # IPython imports from traitlets import Type # Local imports from .channels import InProcessChannel from .client import InProcessKernelClient class BlockingInProcessChannel(InProcessChannel): """A blocking in-process channel.""" def __init__(self, *args, **kwds): """Initialize the channel.""" super().__init__(*args, **kwds) self._in_queue: Queue[object] = Queue() def call_handlers(self, msg): """Call the handlers for a message.""" self._in_queue.put(msg) def get_msg(self, block=True, timeout=None): """Gets a message if there is one that is ready.""" if timeout is None: # Queue.get(timeout=None) has stupid uninteruptible # behavior, so wait for a week instead timeout = 604800 return self._in_queue.get(block, timeout) def get_msgs(self): """Get all messages that are currently ready.""" msgs = [] while True: try: msgs.append(self.get_msg(block=False)) except Empty: break return msgs def msg_ready(self): """Is there a message that has been received?""" return not self._in_queue.empty() class BlockingInProcessStdInChannel(BlockingInProcessChannel): """A blocking in-process stdin channel.""" def call_handlers(self, msg): """Overridden for the in-process channel. This methods simply calls raw_input directly. """ msg_type = msg["header"]["msg_type"] if msg_type == "input_request": _raw_input = self.client.kernel._sys_raw_input prompt = msg["content"]["prompt"] print(prompt, end="", file=sys.__stdout__) sys.__stdout__.flush() self.client.input(_raw_input()) class BlockingInProcessKernelClient(InProcessKernelClient): """A blocking in-process kernel client.""" # The classes to use for the various channels. shell_channel_class = Type(BlockingInProcessChannel) # type:ignore[arg-type] iopub_channel_class = Type(BlockingInProcessChannel) # type:ignore[arg-type] stdin_channel_class = Type(BlockingInProcessStdInChannel) # type:ignore[arg-type] def wait_for_ready(self): """Wait for kernel info reply on shell channel.""" while True: self.kernel_info() try: msg = self.shell_channel.get_msg(block=True, timeout=1) except Empty: pass else: if msg["msg_type"] == "kernel_info_reply": # Checking that IOPub is connected. If it is not connected, start over. try: self.iopub_channel.get_msg(block=True, timeout=0.2) except Empty: pass else: self._handle_kernel_info_reply(msg) break # Flush IOPub channel while True: try: msg = self.iopub_channel.get_msg(block=True, timeout=0.2) print(msg["msg_type"]) except Empty: break ipykernel-6.29.5/ipykernel/inprocess/channels.py000066400000000000000000000057121464053401500220020ustar00rootroot00000000000000"""A kernel client for in-process kernels.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from typing import List from jupyter_client.channelsabc import HBChannelABC # ----------------------------------------------------------------------------- # Channel classes # ----------------------------------------------------------------------------- class InProcessChannel: """Base class for in-process channels.""" proxy_methods: List[object] = [] def __init__(self, client=None): """Initialize the channel.""" super().__init__() self.client = client self._is_alive = False def is_alive(self): """Test if the channel is alive.""" return self._is_alive def start(self): """Start the channel.""" self._is_alive = True def stop(self): """Stop the channel.""" self._is_alive = False def call_handlers(self, msg): """This method is called in the main thread when a message arrives. Subclasses should override this method to handle incoming messages. """ msg = "call_handlers must be defined in a subclass." raise NotImplementedError(msg) def flush(self, timeout=1.0): """Flush the channel.""" def call_handlers_later(self, *args, **kwds): """Call the message handlers later. The default implementation just calls the handlers immediately, but this method exists so that GUI toolkits can defer calling the handlers until after the event loop has run, as expected by GUI frontends. """ self.call_handlers(*args, **kwds) def process_events(self): """Process any pending GUI events. This method will be never be called from a frontend without an event loop (e.g., a terminal frontend). """ raise NotImplementedError class InProcessHBChannel: """A dummy heartbeat channel interface for in-process kernels. Normally we use the heartbeat to check that the kernel process is alive. When the kernel is in-process, that doesn't make sense, but clients still expect this interface. """ time_to_dead = 3.0 def __init__(self, client=None): """Initialize the channel.""" super().__init__() self.client = client self._is_alive = False self._pause = True def is_alive(self): """Test if the channel is alive.""" return self._is_alive def start(self): """Start the channel.""" self._is_alive = True def stop(self): """Stop the channel.""" self._is_alive = False def pause(self): """Pause the channel.""" self._pause = True def unpause(self): """Unpause the channel.""" self._pause = False def is_beating(self): """Test if the channel is beating.""" return not self._pause HBChannelABC.register(InProcessHBChannel) ipykernel-6.29.5/ipykernel/inprocess/client.py000066400000000000000000000202431464053401500214610ustar00rootroot00000000000000"""A client for in-process kernels.""" # ----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Imports # ----------------------------------------------------------------------------- import asyncio from jupyter_client.client import KernelClient from jupyter_client.clientabc import KernelClientABC from jupyter_core.utils import run_sync # IPython imports from traitlets import Instance, Type, default # Local imports from .channels import InProcessChannel, InProcessHBChannel # ----------------------------------------------------------------------------- # Main kernel Client class # ----------------------------------------------------------------------------- class InProcessKernelClient(KernelClient): """A client for an in-process kernel. This class implements the interface of `jupyter_client.clientabc.KernelClientABC` and allows (asynchronous) frontends to be used seamlessly with an in-process kernel. See `jupyter_client.client.KernelClient` for docstrings. """ # The classes to use for the various channels. shell_channel_class = Type(InProcessChannel) # type:ignore[arg-type] iopub_channel_class = Type(InProcessChannel) # type:ignore[arg-type] stdin_channel_class = Type(InProcessChannel) # type:ignore[arg-type] control_channel_class = Type(InProcessChannel) # type:ignore[arg-type] hb_channel_class = Type(InProcessHBChannel) # type:ignore[arg-type] kernel = Instance("ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True) # -------------------------------------------------------------------------- # Channel management methods # -------------------------------------------------------------------------- @default("blocking_class") def _default_blocking_class(self): from .blocking import BlockingInProcessKernelClient return BlockingInProcessKernelClient def get_connection_info(self): """Get the connection info for the client.""" d = super().get_connection_info() d["kernel"] = self.kernel # type:ignore[assignment] return d def start_channels(self, *args, **kwargs): """Start the channels on the client.""" super().start_channels() if self.kernel: self.kernel.frontends.append(self) @property def shell_channel(self): if self._shell_channel is None: self._shell_channel = self.shell_channel_class(self) # type:ignore[abstract,call-arg] return self._shell_channel @property def iopub_channel(self): if self._iopub_channel is None: self._iopub_channel = self.iopub_channel_class(self) # type:ignore[abstract,call-arg] return self._iopub_channel @property def stdin_channel(self): if self._stdin_channel is None: self._stdin_channel = self.stdin_channel_class(self) # type:ignore[abstract,call-arg] return self._stdin_channel @property def control_channel(self): if self._control_channel is None: self._control_channel = self.control_channel_class(self) # type:ignore[abstract,call-arg] return self._control_channel @property def hb_channel(self): if self._hb_channel is None: self._hb_channel = self.hb_channel_class(self) # type:ignore[abstract,call-arg] return self._hb_channel # Methods for sending specific messages # ------------------------------------- def execute( self, code, silent=False, store_history=True, user_expressions=None, allow_stdin=None ): """Execute code on the client.""" if allow_stdin is None: allow_stdin = self.allow_stdin content = dict( code=code, silent=silent, store_history=store_history, user_expressions=user_expressions or {}, allow_stdin=allow_stdin, ) msg = self.session.msg("execute_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def complete(self, code, cursor_pos=None): """Get code completion.""" if cursor_pos is None: cursor_pos = len(code) content = dict(code=code, cursor_pos=cursor_pos) msg = self.session.msg("complete_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def inspect(self, code, cursor_pos=None, detail_level=0): """Get code inspection.""" if cursor_pos is None: cursor_pos = len(code) content = dict( code=code, cursor_pos=cursor_pos, detail_level=detail_level, ) msg = self.session.msg("inspect_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def history(self, raw=True, output=False, hist_access_type="range", **kwds): """Get code history.""" content = dict(raw=raw, output=output, hist_access_type=hist_access_type, **kwds) msg = self.session.msg("history_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def shutdown(self, restart=False): """Handle shutdown.""" # FIXME: What to do here? msg = "Cannot shutdown in-process kernel" raise NotImplementedError(msg) def kernel_info(self): """Request kernel info.""" msg = self.session.msg("kernel_info_request") self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def comm_info(self, target_name=None): """Request a dictionary of valid comms and their targets.""" content = {} if target_name is None else dict(target_name=target_name) msg = self.session.msg("comm_info_request", content) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def input(self, string): """Handle kernel input.""" if self.kernel is None: msg = "Cannot send input reply. No kernel exists." raise RuntimeError(msg) self.kernel.raw_input_str = string def is_complete(self, code): """Handle an is_complete request.""" msg = self.session.msg("is_complete_request", {"code": code}) self._dispatch_to_kernel(msg) return msg["header"]["msg_id"] def _dispatch_to_kernel(self, msg): """Send a message to the kernel and handle a reply.""" kernel = self.kernel if kernel is None: msg = "Cannot send request. No kernel exists." raise RuntimeError(msg) stream = kernel.shell_stream self.session.send(stream, msg) msg_parts = stream.recv_multipart() if run_sync is not None: dispatch_shell = run_sync(kernel.dispatch_shell) dispatch_shell(msg_parts) else: loop = asyncio.get_event_loop() # type:ignore[unreachable] loop.run_until_complete(kernel.dispatch_shell(msg_parts)) idents, reply_msg = self.session.recv(stream, copy=False) self.shell_channel.call_handlers_later(reply_msg) def get_shell_msg(self, block=True, timeout=None): """Get a shell message.""" return self.shell_channel.get_msg(block, timeout) def get_iopub_msg(self, block=True, timeout=None): """Get an iopub message.""" return self.iopub_channel.get_msg(block, timeout) def get_stdin_msg(self, block=True, timeout=None): """Get a stdin message.""" return self.stdin_channel.get_msg(block, timeout) def get_control_msg(self, block=True, timeout=None): """Get a control message.""" return self.control_channel.get_msg(block, timeout) # ----------------------------------------------------------------------------- # ABC Registration # ----------------------------------------------------------------------------- KernelClientABC.register(InProcessKernelClient) ipykernel-6.29.5/ipykernel/inprocess/constants.py000066400000000000000000000004571464053401500222240ustar00rootroot00000000000000"""Shared constants. """ # Because inprocess communication is not networked, we can use a common Session # key everywhere. This is not just the empty bytestring to avoid tripping # certain security checks in the rest of Jupyter that assumes that empty keys # are insecure. INPROCESS_KEY = b"inprocess" ipykernel-6.29.5/ipykernel/inprocess/ipkernel.py000066400000000000000000000162471464053401500220250ustar00rootroot00000000000000"""An in-process kernel""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import logging import sys from contextlib import contextmanager from IPython.core.interactiveshell import InteractiveShellABC from traitlets import Any, Enum, Instance, List, Type, default from ipykernel.ipkernel import IPythonKernel from ipykernel.jsonutil import json_clean from ipykernel.zmqshell import ZMQInteractiveShell from ..iostream import BackgroundSocket, IOPubThread, OutStream from .constants import INPROCESS_KEY from .socket import DummySocket # ----------------------------------------------------------------------------- # Main kernel class # ----------------------------------------------------------------------------- class InProcessKernel(IPythonKernel): """An in-process kernel.""" # ------------------------------------------------------------------------- # InProcessKernel interface # ------------------------------------------------------------------------- # The frontends connected to this kernel. frontends = List(Instance("ipykernel.inprocess.client.InProcessKernelClient", allow_none=True)) # The GUI environment that the kernel is running under. This need not be # specified for the normal operation for the kernel, but is required for # IPython's GUI support (including pylab). The default is 'inline' because # it is safe under all GUI toolkits. gui = Enum(("tk", "gtk", "wx", "qt", "qt4", "inline"), default_value="inline") raw_input_str = Any() stdout = Any() stderr = Any() # ------------------------------------------------------------------------- # Kernel interface # ------------------------------------------------------------------------- shell_class = Type(allow_none=True) # type:ignore[assignment] _underlying_iopub_socket = Instance(DummySocket, ()) iopub_thread: IOPubThread = Instance(IOPubThread) # type:ignore[assignment] shell_stream = Instance(DummySocket, ()) # type:ignore[arg-type] @default("iopub_thread") def _default_iopub_thread(self): thread = IOPubThread(self._underlying_iopub_socket) thread.start() return thread iopub_socket: BackgroundSocket = Instance(BackgroundSocket) # type:ignore[assignment] @default("iopub_socket") def _default_iopub_socket(self): return self.iopub_thread.background_socket stdin_socket = Instance(DummySocket, ()) # type:ignore[assignment] def __init__(self, **traits): """Initialize the kernel.""" super().__init__(**traits) self._underlying_iopub_socket.observe(self._io_dispatch, names=["message_sent"]) if self.shell: self.shell.kernel = self async def execute_request(self, stream, ident, parent): """Override for temporary IO redirection.""" with self._redirected_io(): await super().execute_request(stream, ident, parent) def start(self): """Override registration of dispatchers for streams.""" if self.shell: self.shell.exit_now = False def _abort_queues(self): """The in-process kernel doesn't abort requests.""" async def _flush_control_queue(self): """No need to flush control queues for in-process""" def _input_request(self, prompt, ident, parent, password=False): # Flush output before making the request. self.raw_input_str = None sys.stderr.flush() sys.stdout.flush() # Send the input request. content = json_clean(dict(prompt=prompt, password=password)) assert self.session is not None msg = self.session.msg("input_request", content, parent) for frontend in self.frontends: assert frontend is not None if frontend.session.session == parent["header"]["session"]: frontend.stdin_channel.call_handlers(msg) break else: logging.error("No frontend found for raw_input request") return "" # Await a response. while self.raw_input_str is None: frontend.stdin_channel.process_events() return self.raw_input_str # type:ignore[unreachable] # ------------------------------------------------------------------------- # Protected interface # ------------------------------------------------------------------------- @contextmanager def _redirected_io(self): """Temporarily redirect IO to the kernel.""" sys_stdout, sys_stderr = sys.stdout, sys.stderr try: sys.stdout, sys.stderr = self.stdout, self.stderr yield finally: sys.stdout, sys.stderr = sys_stdout, sys_stderr # ------ Trait change handlers -------------------------------------------- def _io_dispatch(self, change): """Called when a message is sent to the IO socket.""" assert self.iopub_socket.io_thread is not None assert self.session is not None ident, msg = self.session.recv(self.iopub_socket.io_thread.socket, copy=False) for frontend in self.frontends: assert frontend is not None frontend.iopub_channel.call_handlers(msg) # ------ Trait initializers ----------------------------------------------- @default("log") def _default_log(self): return logging.getLogger(__name__) @default("session") def _default_session(self): from jupyter_client.session import Session return Session(parent=self, key=INPROCESS_KEY) @default("shell_class") def _default_shell_class(self): return InProcessInteractiveShell @default("stdout") def _default_stdout(self): return OutStream(self.session, self.iopub_thread, "stdout", watchfd=False) @default("stderr") def _default_stderr(self): return OutStream(self.session, self.iopub_thread, "stderr", watchfd=False) # ----------------------------------------------------------------------------- # Interactive shell subclass # ----------------------------------------------------------------------------- class InProcessInteractiveShell(ZMQInteractiveShell): """An in-process interactive shell.""" kernel: InProcessKernel = Instance( "ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True ) # type:ignore[assignment] # ------------------------------------------------------------------------- # InteractiveShell interface # ------------------------------------------------------------------------- def enable_gui(self, gui=None): """Enable GUI integration for the kernel.""" if not gui: gui = self.kernel.gui self.active_eventloop = gui def enable_matplotlib(self, gui=None): """Enable matplotlib integration for the kernel.""" if not gui: gui = self.kernel.gui return super().enable_matplotlib(gui) def enable_pylab(self, gui=None, import_all=True, welcome_message=False): """Activate pylab support at runtime.""" if not gui: gui = self.kernel.gui return super().enable_pylab(gui, import_all, welcome_message) InteractiveShellABC.register(InProcessInteractiveShell) ipykernel-6.29.5/ipykernel/inprocess/manager.py000066400000000000000000000057561464053401500216310ustar00rootroot00000000000000"""A kernel manager for in-process kernels.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from jupyter_client.manager import KernelManager from jupyter_client.managerabc import KernelManagerABC from jupyter_client.session import Session from traitlets import DottedObjectName, Instance, default from .constants import INPROCESS_KEY class InProcessKernelManager(KernelManager): """A manager for an in-process kernel. This class implements the interface of `jupyter_client.kernelmanagerabc.KernelManagerABC` and allows (asynchronous) frontends to be used seamlessly with an in-process kernel. See `jupyter_client.kernelmanager.KernelManager` for docstrings. """ # The kernel process with which the KernelManager is communicating. kernel = Instance("ipykernel.inprocess.ipkernel.InProcessKernel", allow_none=True) # the client class for KM.client() shortcut client_class = DottedObjectName("ipykernel.inprocess.BlockingInProcessKernelClient") @default("blocking_class") def _default_blocking_class(self): from .blocking import BlockingInProcessKernelClient return BlockingInProcessKernelClient @default("session") def _default_session(self): # don't sign in-process messages return Session(key=INPROCESS_KEY, parent=self) # -------------------------------------------------------------------------- # Kernel management methods # -------------------------------------------------------------------------- def start_kernel(self, **kwds): """Start the kernel.""" from ipykernel.inprocess.ipkernel import InProcessKernel self.kernel = InProcessKernel(parent=self, session=self.session) def shutdown_kernel(self): """Shutdown the kernel.""" if self.kernel: self.kernel.iopub_thread.stop() self._kill_kernel() def restart_kernel(self, now=False, **kwds): """Restart the kernel.""" self.shutdown_kernel() self.start_kernel(**kwds) @property def has_kernel(self): return self.kernel is not None def _kill_kernel(self): self.kernel = None def interrupt_kernel(self): """Interrupt the kernel.""" msg = "Cannot interrupt in-process kernel." raise NotImplementedError(msg) def signal_kernel(self, signum): """Send a signal to the kernel.""" msg = "Cannot signal in-process kernel." raise NotImplementedError(msg) def is_alive(self): """Test if the kernel is alive.""" return self.kernel is not None def client(self, **kwargs): """Get a client for the kernel.""" kwargs["kernel"] = self.kernel return super().client(**kwargs) # ----------------------------------------------------------------------------- # ABC Registration # ----------------------------------------------------------------------------- KernelManagerABC.register(InProcessKernelManager) ipykernel-6.29.5/ipykernel/inprocess/socket.py000066400000000000000000000025461464053401500215010ustar00rootroot00000000000000""" Defines a dummy socket implementing (part of) the zmq.Socket interface. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from queue import Queue import zmq from traitlets import HasTraits, Instance, Int # ----------------------------------------------------------------------------- # Dummy socket class # ----------------------------------------------------------------------------- class DummySocket(HasTraits): """A dummy socket implementing (part of) the zmq.Socket interface.""" queue = Instance(Queue, ()) message_sent = Int(0) # Should be an Event context = Instance(zmq.Context) def _context_default(self): return zmq.Context() # ------------------------------------------------------------------------- # Socket interface # ------------------------------------------------------------------------- def recv_multipart(self, flags=0, copy=True, track=False): """Recv a multipart message.""" return self.queue.get_nowait() def send_multipart(self, msg_parts, flags=0, copy=True, track=False): """Send a multipart message.""" msg_parts = list(map(zmq.Message, msg_parts)) self.queue.put_nowait(msg_parts) self.message_sent += 1 def flush(self, timeout=1.0): """no-op to comply with stream API""" ipykernel-6.29.5/ipykernel/iostream.py000066400000000000000000000647011464053401500200300ustar00rootroot00000000000000"""Wrappers for forwarding stdout/stderr over zmq""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import atexit import contextvars import io import os import sys import threading import traceback import warnings from binascii import b2a_hex from collections import defaultdict, deque from io import StringIO, TextIOBase from threading import local from typing import Any, Callable, Deque, Dict, Optional import zmq from jupyter_client.session import extract_header from tornado.ioloop import IOLoop from zmq.eventloop.zmqstream import ZMQStream # ----------------------------------------------------------------------------- # Globals # ----------------------------------------------------------------------------- MASTER = 0 CHILD = 1 PIPE_BUFFER_SIZE = 1000 # ----------------------------------------------------------------------------- # IO classes # ----------------------------------------------------------------------------- class IOPubThread: """An object for sending IOPub messages in a background thread Prevents a blocking main thread from delaying output from threads. IOPubThread(pub_socket).background_socket is a Socket-API-providing object whose IO is always run in a thread. """ def __init__(self, socket, pipe=False): """Create IOPub thread Parameters ---------- socket : zmq.PUB Socket the socket on which messages will be sent. pipe : bool Whether this process should listen for IOPub messages piped from subprocesses. """ self.socket = socket self._stopped = False self.background_socket = BackgroundSocket(self) self._master_pid = os.getpid() self._pipe_flag = pipe self.io_loop = IOLoop(make_current=False) if pipe: self._setup_pipe_in() self._local = threading.local() self._events: Deque[Callable[..., Any]] = deque() self._event_pipes: Dict[threading.Thread, Any] = {} self._event_pipe_gc_lock: threading.Lock = threading.Lock() self._event_pipe_gc_seconds: float = 10 self._event_pipe_gc_task: Optional[asyncio.Task[Any]] = None self._setup_event_pipe() self.thread = threading.Thread(target=self._thread_main, name="IOPub") self.thread.daemon = True self.thread.pydev_do_not_trace = True # type:ignore[attr-defined] self.thread.is_pydev_daemon_thread = True # type:ignore[attr-defined] self.thread.name = "IOPub" def _thread_main(self): """The inner loop that's actually run in a thread""" def _start_event_gc(): self._event_pipe_gc_task = asyncio.ensure_future(self._run_event_pipe_gc()) self.io_loop.run_sync(_start_event_gc) if not self._stopped: # avoid race if stop called before start thread gets here # probably only comes up in tests self.io_loop.start() if self._event_pipe_gc_task is not None: # cancel gc task to avoid pending task warnings async def _cancel(): self._event_pipe_gc_task.cancel() # type:ignore[union-attr] if not self._stopped: self.io_loop.run_sync(_cancel) else: self._event_pipe_gc_task.cancel() self.io_loop.close(all_fds=True) def _setup_event_pipe(self): """Create the PULL socket listening for events that should fire in this thread.""" ctx = self.socket.context pipe_in = ctx.socket(zmq.PULL) pipe_in.linger = 0 _uuid = b2a_hex(os.urandom(16)).decode("ascii") iface = self._event_interface = "inproc://%s" % _uuid pipe_in.bind(iface) self._event_puller = ZMQStream(pipe_in, self.io_loop) self._event_puller.on_recv(self._handle_event) async def _run_event_pipe_gc(self): """Task to run event pipe gc continuously""" while True: await asyncio.sleep(self._event_pipe_gc_seconds) try: await self._event_pipe_gc() except Exception as e: print(f"Exception in IOPubThread._event_pipe_gc: {e}", file=sys.__stderr__) async def _event_pipe_gc(self): """run a single garbage collection on event pipes""" if not self._event_pipes: # don't acquire the lock if there's nothing to do return with self._event_pipe_gc_lock: for thread, socket in list(self._event_pipes.items()): if not thread.is_alive(): socket.close() del self._event_pipes[thread] @property def _event_pipe(self): """thread-local event pipe for signaling events that should be processed in the thread""" try: event_pipe = self._local.event_pipe except AttributeError: # new thread, new event pipe ctx = self.socket.context event_pipe = ctx.socket(zmq.PUSH) event_pipe.linger = 0 event_pipe.connect(self._event_interface) self._local.event_pipe = event_pipe # associate event pipes to their threads # so they can be closed explicitly # implicit close on __del__ throws a ResourceWarning with self._event_pipe_gc_lock: self._event_pipes[threading.current_thread()] = event_pipe return event_pipe def _handle_event(self, msg): """Handle an event on the event pipe Content of the message is ignored. Whenever *an* event arrives on the event stream, *all* waiting events are processed in order. """ # freeze event count so new writes don't extend the queue # while we are processing n_events = len(self._events) for _ in range(n_events): event_f = self._events.popleft() event_f() def _setup_pipe_in(self): """setup listening pipe for IOPub from forked subprocesses""" ctx = self.socket.context # use UUID to authenticate pipe messages self._pipe_uuid = os.urandom(16) pipe_in = ctx.socket(zmq.PULL) pipe_in.linger = 0 try: self._pipe_port = pipe_in.bind_to_random_port("tcp://127.0.0.1") except zmq.ZMQError as e: warnings.warn( "Couldn't bind IOPub Pipe to 127.0.0.1: %s" % e + "\nsubprocess output will be unavailable.", stacklevel=2, ) self._pipe_flag = False pipe_in.close() return self._pipe_in = ZMQStream(pipe_in, self.io_loop) self._pipe_in.on_recv(self._handle_pipe_msg) def _handle_pipe_msg(self, msg): """handle a pipe message from a subprocess""" if not self._pipe_flag or not self._is_master_process(): return if msg[0] != self._pipe_uuid: print("Bad pipe message: %s", msg, file=sys.__stderr__) return self.send_multipart(msg[1:]) def _setup_pipe_out(self): # must be new context after fork ctx = zmq.Context() pipe_out = ctx.socket(zmq.PUSH) pipe_out.linger = 3000 # 3s timeout for pipe_out sends before discarding the message pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port) return ctx, pipe_out def _is_master_process(self): return os.getpid() == self._master_pid def _check_mp_mode(self): """check for forks, and switch to zmq pipeline if necessary""" if not self._pipe_flag or self._is_master_process(): return MASTER return CHILD def start(self): """Start the IOPub thread""" self.thread.name = "IOPub" self.thread.start() # make sure we don't prevent process exit # I'm not sure why setting daemon=True above isn't enough, but it doesn't appear to be. atexit.register(self.stop) def stop(self): """Stop the IOPub thread""" self._stopped = True if not self.thread.is_alive(): return self.io_loop.add_callback(self.io_loop.stop) self.thread.join(timeout=30) if self.thread.is_alive(): # avoid infinite hang if stop fails msg = "IOPub thread did not terminate in 30 seconds" raise TimeoutError(msg) # close *all* event pipes, created in any thread # event pipes can only be used from other threads while self.thread.is_alive() # so after thread.join, this should be safe for _thread, event_pipe in self._event_pipes.items(): event_pipe.close() def close(self): """Close the IOPub thread.""" if self.closed: return self.socket.close() self.socket = None @property def closed(self): return self.socket is None def schedule(self, f): """Schedule a function to be called in our IO thread. If the thread is not running, call immediately. """ if self.thread.is_alive(): self._events.append(f) # wake event thread (message content is ignored) self._event_pipe.send(b"") else: f() def send_multipart(self, *args, **kwargs): """send_multipart schedules actual zmq send in my thread. If my thread isn't running (e.g. forked process), send immediately. """ self.schedule(lambda: self._really_send(*args, **kwargs)) def _really_send(self, msg, *args, **kwargs): """The callback that actually sends messages""" if self.closed: return mp_mode = self._check_mp_mode() if mp_mode != CHILD: # we are master, do a regular send self.socket.send_multipart(msg, *args, **kwargs) else: # we are a child, pipe to master # new context/socket for every pipe-out # since forks don't teardown politely, use ctx.term to ensure send has completed ctx, pipe_out = self._setup_pipe_out() pipe_out.send_multipart([self._pipe_uuid, *msg], *args, **kwargs) pipe_out.close() ctx.term() class BackgroundSocket: """Wrapper around IOPub thread that provides zmq send[_multipart]""" io_thread = None def __init__(self, io_thread): """Initialize the socket.""" self.io_thread = io_thread def __getattr__(self, attr): """Wrap socket attr access for backward-compatibility""" if attr.startswith("__") and attr.endswith("__"): # don't wrap magic methods super().__getattr__(attr) # type:ignore[misc] assert self.io_thread is not None if hasattr(self.io_thread.socket, attr): warnings.warn( f"Accessing zmq Socket attribute {attr} on BackgroundSocket" f" is deprecated since ipykernel 4.3.0" f" use .io_thread.socket.{attr}", DeprecationWarning, stacklevel=2, ) return getattr(self.io_thread.socket, attr) return super().__getattr__(attr) # type:ignore[misc] def __setattr__(self, attr, value): """Set an attribute on the socket.""" if attr == "io_thread" or (attr.startswith("__") and attr.endswith("__")): super().__setattr__(attr, value) else: warnings.warn( f"Setting zmq Socket attribute {attr} on BackgroundSocket" f" is deprecated since ipykernel 4.3.0" f" use .io_thread.socket.{attr}", DeprecationWarning, stacklevel=2, ) assert self.io_thread is not None setattr(self.io_thread.socket, attr, value) def send(self, msg, *args, **kwargs): """Send a message to the socket.""" return self.send_multipart([msg], *args, **kwargs) def send_multipart(self, *args, **kwargs): """Schedule send in IO thread""" assert self.io_thread is not None return self.io_thread.send_multipart(*args, **kwargs) class OutStream(TextIOBase): """A file like object that publishes the stream to a 0MQ PUB socket. Output is handed off to an IO Thread """ # timeout for flush to avoid infinite hang # in case of misbehavior flush_timeout = 10 # The time interval between automatic flushes, in seconds. flush_interval = 0.2 topic = None encoding = "UTF-8" _exc: Optional[Any] = None def fileno(self): """ Things like subprocess will peak and write to the fileno() of stderr/stdout. """ if getattr(self, "_original_stdstream_copy", None) is not None: return self._original_stdstream_copy msg = "fileno" raise io.UnsupportedOperation(msg) def _watch_pipe_fd(self): """ We've redirected standards streams 0 and 1 into a pipe. We need to watch in a thread and redirect them to the right places. 1) the ZMQ channels to show in notebook interfaces, 2) the original stdout/err, to capture errors in terminals. We cannot schedule this on the ioloop thread, as this might be blocking. """ try: bts = os.read(self._fid, PIPE_BUFFER_SIZE) while bts and self._should_watch: self.write(bts.decode(errors="replace")) os.write(self._original_stdstream_copy, bts) bts = os.read(self._fid, PIPE_BUFFER_SIZE) except Exception: self._exc = sys.exc_info() def __init__( self, session, pub_thread, name, pipe=None, echo=None, *, watchfd=True, isatty=False, ): """ Parameters ---------- session : object the session object pub_thread : threading.Thread the publication thread name : str {'stderr', 'stdout'} the name of the standard stream to replace pipe : object the pipe object echo : bool whether to echo output watchfd : bool (default, True) Watch the file descriptor corresponding to the replaced stream. This is useful if you know some underlying code will write directly the file descriptor by its number. It will spawn a watching thread, that will swap the give file descriptor for a pipe, read from the pipe, and insert this into the current Stream. isatty : bool (default, False) Indication of whether this stream has terminal capabilities (e.g. can handle colors) """ if pipe is not None: warnings.warn( "pipe argument to OutStream is deprecated and ignored since ipykernel 4.2.3.", DeprecationWarning, stacklevel=2, ) # This is necessary for compatibility with Python built-in streams self.session = session if not isinstance(pub_thread, IOPubThread): # Backward-compat: given socket, not thread. Wrap in a thread. warnings.warn( "Since IPykernel 4.3, OutStream should be created with " "IOPubThread, not %r" % pub_thread, DeprecationWarning, stacklevel=2, ) pub_thread = IOPubThread(pub_thread) pub_thread.start() self.pub_thread = pub_thread self.name = name self.topic = b"stream." + name.encode() self._parent_header: contextvars.ContextVar[Dict[str, Any]] = contextvars.ContextVar( "parent_header" ) self._parent_header.set({}) self._thread_to_parent = {} self._thread_to_parent_header = {} self._parent_header_global = {} self._master_pid = os.getpid() self._flush_pending = False self._subprocess_flush_pending = False self._io_loop = pub_thread.io_loop self._buffer_lock = threading.RLock() self._buffers = defaultdict(StringIO) self.echo = None self._isatty = bool(isatty) self._should_watch = False self._local = local() if ( watchfd and ( (sys.platform.startswith("linux") or sys.platform.startswith("darwin")) # Pytest set its own capture. Don't redirect from within pytest. and ("PYTEST_CURRENT_TEST" not in os.environ) ) # allow forcing watchfd (mainly for tests) or watchfd == "force" ): self._should_watch = True self._setup_stream_redirects(name) if echo: if hasattr(echo, "read") and hasattr(echo, "write"): # make sure we aren't trying to echo on the FD we're watching! # that would cause an infinite loop, always echoing on itself if self._should_watch: try: echo_fd = echo.fileno() except Exception: echo_fd = None if echo_fd is not None and echo_fd == self._original_stdstream_fd: # echo on the _copy_ we made during # this is the actual terminal FD now echo = io.TextIOWrapper( io.FileIO( self._original_stdstream_copy, "w", ) ) self.echo = echo else: msg = "echo argument must be a file-like object" raise ValueError(msg) @property def parent_header(self): try: # asyncio-specific return self._parent_header.get() except LookupError: try: # thread-specific identity = threading.current_thread().ident # retrieve the outermost (oldest ancestor, # discounting the kernel thread) thread identity while identity in self._thread_to_parent: identity = self._thread_to_parent[identity] # use the header of the oldest ancestor return self._thread_to_parent_header[identity] except KeyError: # global (fallback) return self._parent_header_global @parent_header.setter def parent_header(self, value): self._parent_header_global = value return self._parent_header.set(value) def isatty(self): """Return a bool indicating whether this is an 'interactive' stream. Returns: Boolean """ return self._isatty def _setup_stream_redirects(self, name): pr, pw = os.pipe() fno = self._original_stdstream_fd = getattr(sys, name).fileno() self._original_stdstream_copy = os.dup(fno) os.dup2(pw, fno) self._fid = pr self._exc = None self.watch_fd_thread = threading.Thread(target=self._watch_pipe_fd) self.watch_fd_thread.daemon = True self.watch_fd_thread.start() def _is_master_process(self): return os.getpid() == self._master_pid def set_parent(self, parent): """Set the parent header.""" self.parent_header = extract_header(parent) def close(self): """Close the stream.""" if self._should_watch: self._should_watch = False # thread won't wake unless there's something to read # writing something after _should_watch will not be echoed os.write(self._original_stdstream_fd, b"\0") self.watch_fd_thread.join() # restore original FDs os.dup2(self._original_stdstream_copy, self._original_stdstream_fd) os.close(self._original_stdstream_copy) if self._exc: etype, value, tb = self._exc traceback.print_exception(etype, value, tb) self.pub_thread = None @property def closed(self): return self.pub_thread is None def _schedule_flush(self): """schedule a flush in the IO thread call this on write, to indicate that flush should be called soon. """ if self._flush_pending: return self._flush_pending = True # add_timeout has to be handed to the io thread via event pipe def _schedule_in_thread(): self._io_loop.call_later(self.flush_interval, self._flush) self.pub_thread.schedule(_schedule_in_thread) def flush(self): """trigger actual zmq send send will happen in the background thread """ if ( self.pub_thread and self.pub_thread.thread is not None and self.pub_thread.thread.is_alive() and self.pub_thread.thread.ident != threading.current_thread().ident ): # request flush on the background thread self.pub_thread.schedule(self._flush) # wait for flush to actually get through, if we can. evt = threading.Event() self.pub_thread.schedule(evt.set) # and give a timeout to avoid if not evt.wait(self.flush_timeout): # write directly to __stderr__ instead of warning because # if this is happening sys.stderr may be the problem. print("IOStream.flush timed out", file=sys.__stderr__) else: self._flush() def _flush(self): """This is where the actual send happens. _flush should generally be called in the IO thread, unless the thread has been destroyed (e.g. forked subprocess). """ self._flush_pending = False self._subprocess_flush_pending = False if self.echo is not None: try: self.echo.flush() except OSError as e: if self.echo is not sys.__stderr__: print(f"Flush failed: {e}", file=sys.__stderr__) for parent, data in self._flush_buffers(): if data: # FIXME: this disables Session's fork-safe check, # since pub_thread is itself fork-safe. # There should be a better way to do this. self.session.pid = os.getpid() content = {"name": self.name, "text": data} msg = self.session.msg("stream", content, parent=parent) # Each transform either returns a new # message or None. If None is returned, # the message has been 'used' and we return. for hook in self._hooks: msg = hook(msg) if msg is None: return self.session.send( self.pub_thread, msg, ident=self.topic, ) def write(self, string: str) -> Optional[int]: # type:ignore[override] """Write to current stream after encoding if necessary Returns ------- len : int number of items from input parameter written to stream. """ parent = self.parent_header if not isinstance(string, str): msg = f"write() argument must be str, not {type(string)}" # type:ignore[unreachable] raise TypeError(msg) if self.echo is not None: try: self.echo.write(string) except OSError as e: if self.echo is not sys.__stderr__: print(f"Write failed: {e}", file=sys.__stderr__) if self.pub_thread is None: msg = "I/O operation on closed file" raise ValueError(msg) is_child = not self._is_master_process() # only touch the buffer in the IO thread to avoid races with self._buffer_lock: self._buffers[frozenset(parent.items())].write(string) if is_child: # mp.Pool cannot be trusted to flush promptly (or ever), # and this helps. if self._subprocess_flush_pending: return None self._subprocess_flush_pending = True # We can not rely on self._io_loop.call_later from a subprocess self.pub_thread.schedule(self._flush) else: self._schedule_flush() return len(string) def writelines(self, sequence): """Write lines to the stream.""" if self.pub_thread is None: msg = "I/O operation on closed file" raise ValueError(msg) for string in sequence: self.write(string) def writable(self): """Test whether the stream is writable.""" return True def _flush_buffers(self): """clear the current buffer and return the current buffer data.""" buffers = self._rotate_buffers() for frozen_parent, buffer in buffers.items(): data = buffer.getvalue() buffer.close() yield dict(frozen_parent), data def _rotate_buffers(self): """Returns the current buffer and replaces it with an empty buffer.""" with self._buffer_lock: old_buffers = self._buffers self._buffers = defaultdict(StringIO) return old_buffers @property def _hooks(self): if not hasattr(self._local, "hooks"): # create new list for a new thread self._local.hooks = [] return self._local.hooks def register_hook(self, hook): """ Registers a hook with the thread-local storage. Parameters ---------- hook : Any callable object Returns ------- Either a publishable message, or `None`. The hook callable must return a message from the __call__ method if they still require the `session.send` method to be called after transformation. Returning `None` will halt that execution path, and session.send will not be called. """ self._hooks.append(hook) def unregister_hook(self, hook): """ Un-registers a hook with the thread-local storage. Parameters ---------- hook : Any callable object which has previously been registered as a hook. Returns ------- bool - `True` if the hook was removed, `False` if it wasn't found. """ try: self._hooks.remove(hook) return True except ValueError: return False ipykernel-6.29.5/ipykernel/ipkernel.py000066400000000000000000000715431464053401500200200ustar00rootroot00000000000000"""The IPython kernel implementation""" import asyncio import builtins import gc import getpass import os import signal import sys import threading import typing as t from contextlib import contextmanager from functools import partial import comm from IPython.core import release from IPython.utils.tokenutil import line_at_cursor, token_at_cursor from jupyter_client.session import extract_header from traitlets import Any, Bool, HasTraits, Instance, List, Type, observe, observe_compat from zmq.eventloop.zmqstream import ZMQStream from .comm.comm import BaseComm from .comm.manager import CommManager from .compiler import XCachingCompiler from .eventloops import _use_appnope from .iostream import OutStream from .kernelbase import Kernel as KernelBase from .kernelbase import _accepts_parameters from .zmqshell import ZMQInteractiveShell try: from IPython.core.interactiveshell import _asyncio_runner # type:ignore[attr-defined] except ImportError: _asyncio_runner = None # type:ignore[assignment] try: from IPython.core.completer import provisionalcompleter as _provisionalcompleter from IPython.core.completer import rectify_completions as _rectify_completions _use_experimental_60_completion = True except ImportError: _use_experimental_60_completion = False _EXPERIMENTAL_KEY_NAME = "_jupyter_types_experimental" def _create_comm(*args, **kwargs): """Create a new Comm.""" return BaseComm(*args, **kwargs) # there can only be one comm manager in a ipykernel process _comm_lock = threading.Lock() _comm_manager: t.Optional[CommManager] = None def _get_comm_manager(*args, **kwargs): """Create a new CommManager.""" global _comm_manager # noqa: PLW0603 if _comm_manager is None: with _comm_lock: if _comm_manager is None: _comm_manager = CommManager(*args, **kwargs) return _comm_manager comm.create_comm = _create_comm comm.get_comm_manager = _get_comm_manager class IPythonKernel(KernelBase): """The IPython Kernel class.""" shell = Instance("IPython.core.interactiveshell.InteractiveShellABC", allow_none=True) shell_class = Type(ZMQInteractiveShell) use_experimental_completions = Bool( True, help="Set this flag to False to deactivate the use of experimental IPython completion APIs.", ).tag(config=True) debugpy_stream = Instance(ZMQStream, allow_none=True) user_module = Any() @observe("user_module") @observe_compat def _user_module_changed(self, change): if self.shell is not None: self.shell.user_module = change["new"] user_ns = Instance(dict, args=None, allow_none=True) @observe("user_ns") @observe_compat def _user_ns_changed(self, change): if self.shell is not None: self.shell.user_ns = change["new"] self.shell.init_user_ns() # A reference to the Python builtin 'raw_input' function. # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) _sys_raw_input = Any() _sys_eval_input = Any() def __init__(self, **kwargs): """Initialize the kernel.""" super().__init__(**kwargs) from .debugger import Debugger, _is_debugpy_available # Initialize the Debugger if _is_debugpy_available: self.debugger = Debugger( self.log, self.debugpy_stream, self._publish_debug_event, self.debug_shell_socket, self.session, self.debug_just_my_code, ) # Initialize the InteractiveShell subclass self.shell = self.shell_class.instance( parent=self, profile_dir=self.profile_dir, user_module=self.user_module, user_ns=self.user_ns, kernel=self, compiler_class=XCachingCompiler, ) self.shell.displayhook.session = self.session # type:ignore[attr-defined] jupyter_session_name = os.environ.get("JPY_SESSION_NAME") if jupyter_session_name: self.shell.user_ns["__session__"] = jupyter_session_name self.shell.displayhook.pub_socket = self.iopub_socket # type:ignore[attr-defined] self.shell.displayhook.topic = self._topic("execute_result") # type:ignore[attr-defined] self.shell.display_pub.session = self.session # type:ignore[attr-defined] self.shell.display_pub.pub_socket = self.iopub_socket # type:ignore[attr-defined] self.comm_manager = comm.get_comm_manager() assert isinstance(self.comm_manager, HasTraits) self.shell.configurables.append(self.comm_manager) # type:ignore[arg-type] comm_msg_types = ["comm_open", "comm_msg", "comm_close"] for msg_type in comm_msg_types: self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) if _use_appnope() and self._darwin_app_nap: # Disable app-nap as the kernel is not a gui but can have guis import appnope # type:ignore[import-untyped] appnope.nope() self._new_threads_parent_header = {} self._initialize_thread_hooks() if hasattr(gc, "callbacks"): # while `gc.callbacks` exists since Python 3.3, pypy does not # implement it even as of 3.9. gc.callbacks.append(self._clean_thread_parent_frames) help_links = List( [ { "text": "Python Reference", "url": "https://docs.python.org/%i.%i" % sys.version_info[:2], }, { "text": "IPython Reference", "url": "https://ipython.org/documentation.html", }, { "text": "NumPy Reference", "url": "https://docs.scipy.org/doc/numpy/reference/", }, { "text": "SciPy Reference", "url": "https://docs.scipy.org/doc/scipy/reference/", }, { "text": "Matplotlib Reference", "url": "https://matplotlib.org/contents.html", }, { "text": "SymPy Reference", "url": "http://docs.sympy.org/latest/index.html", }, { "text": "pandas Reference", "url": "https://pandas.pydata.org/pandas-docs/stable/", }, ] ).tag(config=True) # Kernel info fields implementation = "ipython" implementation_version = release.version language_info = { "name": "python", "version": sys.version.split()[0], "mimetype": "text/x-python", "codemirror_mode": {"name": "ipython", "version": sys.version_info[0]}, "pygments_lexer": "ipython%d" % 3, "nbconvert_exporter": "python", "file_extension": ".py", } def dispatch_debugpy(self, msg): from .debugger import _is_debugpy_available if _is_debugpy_available: # The first frame is the socket id, we can drop it frame = msg[1].bytes.decode("utf-8") self.log.debug("Debugpy received: %s", frame) self.debugger.tcp_client.receive_dap_frame(frame) @property def banner(self): if self.shell: return self.shell.banner return None async def poll_stopped_queue(self): """Poll the stopped queue.""" while True: await self.debugger.handle_stopped_event() def start(self): """Start the kernel.""" if self.shell: self.shell.exit_now = False if self.debugpy_stream is None: self.log.warning("debugpy_stream undefined, debugging will not be enabled") else: self.debugpy_stream.on_recv(self.dispatch_debugpy, copy=False) super().start() if self.debugpy_stream: asyncio.run_coroutine_threadsafe( self.poll_stopped_queue(), self.control_thread.io_loop.asyncio_loop ) def set_parent(self, ident, parent, channel="shell"): """Overridden from parent to tell the display hook and output streams about the parent message. """ super().set_parent(ident, parent, channel) if channel == "shell" and self.shell: self.shell.set_parent(parent) def init_metadata(self, parent): """Initialize metadata. Run at the beginning of each execution request. """ md = super().init_metadata(parent) # FIXME: remove deprecated ipyparallel-specific code # This is required for ipyparallel < 5.0 md.update( { "dependencies_met": True, "engine": self.ident, } ) return md def finish_metadata(self, parent, metadata, reply_content): """Finish populating metadata. Run after completing an execution request. """ # FIXME: remove deprecated ipyparallel-specific code # This is required by ipyparallel < 5.0 metadata["status"] = reply_content["status"] if reply_content["status"] == "error" and reply_content["ename"] == "UnmetDependency": metadata["dependencies_met"] = False return metadata def _forward_input(self, allow_stdin=False): """Forward raw_input and getpass to the current frontend. via input_request """ self._allow_stdin = allow_stdin self._sys_raw_input = builtins.input builtins.input = self.raw_input self._save_getpass = getpass.getpass getpass.getpass = self.getpass def _restore_input(self): """Restore raw_input, getpass""" builtins.input = self._sys_raw_input getpass.getpass = self._save_getpass @property def execution_count(self): if self.shell: return self.shell.execution_count return None @execution_count.setter def execution_count(self, value): # Ignore the incrementing done by KernelBase, in favour of our shell's # execution counter. pass @contextmanager def _cancel_on_sigint(self, future): """ContextManager for capturing SIGINT and cancelling a future SIGINT raises in the event loop when running async code, but we want it to halt a coroutine. Ideally, it would raise KeyboardInterrupt, but this turns it into a CancelledError. At least it gets a decent traceback to the user. """ sigint_future: asyncio.Future[int] = asyncio.Future() # whichever future finishes first, # cancel the other one def cancel_unless_done(f, _ignored): if f.cancelled() or f.done(): return f.cancel() # when sigint finishes, # abort the coroutine with CancelledError sigint_future.add_done_callback(partial(cancel_unless_done, future)) # when the main future finishes, # stop watching for SIGINT events future.add_done_callback(partial(cancel_unless_done, sigint_future)) def handle_sigint(*args): def set_sigint_result(): if sigint_future.cancelled() or sigint_future.done(): return sigint_future.set_result(1) # use add_callback for thread safety self.io_loop.add_callback(set_sigint_result) # set the custom sigint handler during this context save_sigint = signal.signal(signal.SIGINT, handle_sigint) try: yield finally: # restore the previous sigint handler signal.signal(signal.SIGINT, save_sigint) async def execute_request(self, stream, ident, parent): """Override for cell output - cell reconciliation.""" parent_header = extract_header(parent) self._associate_new_top_level_threads_with(parent_header) await super().execute_request(stream, ident, parent) async def do_execute( self, code, silent, store_history=True, user_expressions=None, allow_stdin=False, *, cell_meta=None, cell_id=None, ): """Handle code execution.""" shell = self.shell # we'll need this a lot here assert shell is not None self._forward_input(allow_stdin) reply_content: t.Dict[str, t.Any] = {} if hasattr(shell, "run_cell_async") and hasattr(shell, "should_run_async"): run_cell = shell.run_cell_async should_run_async = shell.should_run_async accepts_params = _accepts_parameters(run_cell, ["cell_id"]) else: should_run_async = lambda cell: False # noqa: ARG005, E731 # older IPython, # use blocking run_cell and wrap it in coroutine async def run_cell(*args, **kwargs): return shell.run_cell(*args, **kwargs) accepts_params = _accepts_parameters(shell.run_cell, ["cell_id"]) try: # default case: runner is asyncio and asyncio is already running # TODO: this should check every case for "are we inside the runner", # not just asyncio preprocessing_exc_tuple = None try: transformed_cell = shell.transform_cell(code) except Exception: transformed_cell = code preprocessing_exc_tuple = sys.exc_info() if ( _asyncio_runner # type:ignore[truthy-bool] and shell.loop_runner is _asyncio_runner and asyncio.get_event_loop().is_running() and should_run_async( code, transformed_cell=transformed_cell, preprocessing_exc_tuple=preprocessing_exc_tuple, ) ): if accepts_params["cell_id"]: coro = run_cell( code, store_history=store_history, silent=silent, transformed_cell=transformed_cell, preprocessing_exc_tuple=preprocessing_exc_tuple, cell_id=cell_id, ) else: coro = run_cell( code, store_history=store_history, silent=silent, transformed_cell=transformed_cell, preprocessing_exc_tuple=preprocessing_exc_tuple, ) coro_future = asyncio.ensure_future(coro) with self._cancel_on_sigint(coro_future): res = None try: res = await coro_future finally: shell.events.trigger("post_execute") if not silent: shell.events.trigger("post_run_cell", res) else: # runner isn't already running, # make synchronous call, # letting shell dispatch to loop runners if accepts_params["cell_id"]: res = shell.run_cell( code, store_history=store_history, silent=silent, cell_id=cell_id, ) else: res = shell.run_cell(code, store_history=store_history, silent=silent) finally: self._restore_input() err = res.error_before_exec if res.error_before_exec is not None else res.error_in_exec if res.success: reply_content["status"] = "ok" else: reply_content["status"] = "error" reply_content.update( { "traceback": shell._last_traceback or [], "ename": str(type(err).__name__), "evalue": str(err), } ) # FIXME: deprecated piece for ipyparallel (remove in 5.0): e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="execute") reply_content["engine_info"] = e_info # Return the execution counter so clients can display prompts reply_content["execution_count"] = shell.execution_count - 1 if "traceback" in reply_content: self.log.info( "Exception in execute request:\n%s", "\n".join(reply_content["traceback"]), ) # At this point, we can tell whether the main code execution succeeded # or not. If it did, we proceed to evaluate user_expressions if reply_content["status"] == "ok": reply_content["user_expressions"] = shell.user_expressions(user_expressions or {}) else: # If there was an error, don't even try to compute expressions reply_content["user_expressions"] = {} # Payloads should be retrieved regardless of outcome, so we can both # recover partial output (that could have been generated early in a # block, before an error) and always clear the payload system. reply_content["payload"] = shell.payload_manager.read_payload() # Be aggressive about clearing the payload because we don't want # it to sit in memory until the next execute_request comes in. shell.payload_manager.clear_payload() return reply_content def do_complete(self, code, cursor_pos): """Handle code completion.""" if _use_experimental_60_completion and self.use_experimental_completions: return self._experimental_do_complete(code, cursor_pos) # FIXME: IPython completers currently assume single line, # but completion messages give multi-line context # For now, extract line from cell, based on cursor_pos: if cursor_pos is None: cursor_pos = len(code) line, offset = line_at_cursor(code, cursor_pos) line_cursor = cursor_pos - offset assert self.shell is not None txt, matches = self.shell.complete("", line, line_cursor) return { "matches": matches, "cursor_end": cursor_pos, "cursor_start": cursor_pos - len(txt), "metadata": {}, "status": "ok", } async def do_debug_request(self, msg): """Handle a debug request.""" from .debugger import _is_debugpy_available if _is_debugpy_available: return await self.debugger.process_request(msg) return None def _experimental_do_complete(self, code, cursor_pos): """ Experimental completions from IPython, using Jedi. """ if cursor_pos is None: cursor_pos = len(code) with _provisionalcompleter(): assert self.shell is not None raw_completions = self.shell.Completer.completions(code, cursor_pos) completions = list(_rectify_completions(code, raw_completions)) comps = [] for comp in completions: comps.append( dict( start=comp.start, end=comp.end, text=comp.text, type=comp.type, signature=comp.signature, ) ) if completions: s = completions[0].start e = completions[0].end matches = [c.text for c in completions] else: s = cursor_pos e = cursor_pos matches = [] return { "matches": matches, "cursor_end": e, "cursor_start": s, "metadata": {_EXPERIMENTAL_KEY_NAME: comps}, "status": "ok", } def do_inspect(self, code, cursor_pos, detail_level=0, omit_sections=()): """Handle code inspection.""" name = token_at_cursor(code, cursor_pos) reply_content: t.Dict[str, t.Any] = {"status": "ok"} reply_content["data"] = {} reply_content["metadata"] = {} assert self.shell is not None try: if release.version_info >= (8,): # `omit_sections` keyword will be available in IPython 8, see # https://github.com/ipython/ipython/pull/13343 bundle = self.shell.object_inspect_mime( name, detail_level=detail_level, omit_sections=omit_sections, ) else: bundle = self.shell.object_inspect_mime(name, detail_level=detail_level) reply_content["data"].update(bundle) if not self.shell.enable_html_pager: reply_content["data"].pop("text/html") reply_content["found"] = True except KeyError: reply_content["found"] = False return reply_content def do_history( self, hist_access_type, output, raw, session=0, start=0, stop=None, n=None, pattern=None, unique=False, ): """Handle code history.""" assert self.shell is not None if hist_access_type == "tail": hist = self.shell.history_manager.get_tail( n, raw=raw, output=output, include_latest=True ) elif hist_access_type == "range": hist = self.shell.history_manager.get_range( session, start, stop, raw=raw, output=output ) elif hist_access_type == "search": hist = self.shell.history_manager.search( pattern, raw=raw, output=output, n=n, unique=unique ) else: hist = [] return { "status": "ok", "history": list(hist), } def do_shutdown(self, restart): """Handle kernel shutdown.""" if self.shell: self.shell.exit_now = True return dict(status="ok", restart=restart) def do_is_complete(self, code): """Handle an is_complete request.""" transformer_manager = getattr(self.shell, "input_transformer_manager", None) if transformer_manager is None: # input_splitter attribute is deprecated assert self.shell is not None transformer_manager = self.shell.input_splitter status, indent_spaces = transformer_manager.check_complete(code) r = {"status": status} if status == "incomplete": r["indent"] = " " * indent_spaces return r def do_apply(self, content, bufs, msg_id, reply_metadata): """Handle an apply request.""" try: from ipyparallel.serialize import serialize_object, unpack_apply_message except ImportError: from .serialize import serialize_object, unpack_apply_message shell = self.shell assert shell is not None try: working = shell.user_ns prefix = "_" + str(msg_id).replace("-", "") + "_" f, args, kwargs = unpack_apply_message(bufs, working, copy=False) fname = getattr(f, "__name__", "f") fname = prefix + "f" argname = prefix + "args" kwargname = prefix + "kwargs" resultname = prefix + "result" ns = {fname: f, argname: args, kwargname: kwargs, resultname: None} # print ns working.update(ns) code = f"{resultname} = {fname}(*{argname},**{kwargname})" try: exec(code, shell.user_global_ns, shell.user_ns) result = working.get(resultname) finally: for key in ns: working.pop(key) assert self.session is not None result_buf = serialize_object( result, buffer_threshold=self.session.buffer_threshold, item_threshold=self.session.item_threshold, ) except BaseException as e: # invoke IPython traceback formatting shell.showtraceback() reply_content = { "traceback": shell._last_traceback or [], "ename": str(type(e).__name__), "evalue": str(e), } # FIXME: deprecated piece for ipyparallel (remove in 5.0): e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="apply") reply_content["engine_info"] = e_info self.send_response( self.iopub_socket, "error", reply_content, ident=self._topic("error"), ) self.log.info("Exception in apply request:\n%s", "\n".join(reply_content["traceback"])) result_buf = [] reply_content["status"] = "error" else: reply_content = {"status": "ok"} return reply_content, result_buf def do_clear(self): """Clear the kernel.""" if self.shell: self.shell.reset(False) return dict(status="ok") def _associate_new_top_level_threads_with(self, parent_header): """Store the parent header to associate it with new top-level threads""" self._new_threads_parent_header = parent_header def _initialize_thread_hooks(self): """Store thread hierarchy and thread-parent_header associations.""" stdout = self._stdout stderr = self._stderr kernel_thread_ident = threading.get_ident() kernel = self _threading_Thread_run = threading.Thread.run _threading_Thread__init__ = threading.Thread.__init__ def run_closure(self: threading.Thread): """Wrap the `threading.Thread.start` to intercept thread identity. This is needed because there is no "start" hook yet, but there might be one in the future: https://bugs.python.org/issue14073 This is a no-op if the `self._stdout` and `self._stderr` are not sub-classes of `OutStream`. """ try: parent = self._ipykernel_parent_thread_ident # type:ignore[attr-defined] except AttributeError: return for stream in [stdout, stderr]: if isinstance(stream, OutStream): if parent == kernel_thread_ident: stream._thread_to_parent_header[ self.ident ] = kernel._new_threads_parent_header else: stream._thread_to_parent[self.ident] = parent _threading_Thread_run(self) def init_closure(self: threading.Thread, *args, **kwargs): _threading_Thread__init__(self, *args, **kwargs) self._ipykernel_parent_thread_ident = threading.get_ident() # type:ignore[attr-defined] threading.Thread.__init__ = init_closure # type:ignore[method-assign] threading.Thread.run = run_closure # type:ignore[method-assign] def _clean_thread_parent_frames( self, phase: t.Literal["start", "stop"], info: t.Dict[str, t.Any] ): """Clean parent frames of threads which are no longer running. This is meant to be invoked by garbage collector callback hook. The implementation enumerates the threads because there is no "exit" hook yet, but there might be one in the future: https://bugs.python.org/issue14073 This is a no-op if the `self._stdout` and `self._stderr` are not sub-classes of `OutStream`. """ # Only run before the garbage collector starts if phase != "start": return active_threads = {thread.ident for thread in threading.enumerate()} for stream in [self._stdout, self._stderr]: if isinstance(stream, OutStream): thread_to_parent_header = stream._thread_to_parent_header for identity in list(thread_to_parent_header.keys()): if identity not in active_threads: try: del thread_to_parent_header[identity] except KeyError: pass thread_to_parent = stream._thread_to_parent for identity in list(thread_to_parent.keys()): if identity not in active_threads: try: del thread_to_parent[identity] except KeyError: pass # This exists only for backwards compatibility - use IPythonKernel instead class Kernel(IPythonKernel): """DEPRECATED. An alias for the IPython kernel class.""" def __init__(self, *args, **kwargs): # pragma: no cover """DEPRECATED.""" import warnings warnings.warn( "Kernel is a deprecated alias of ipykernel.ipkernel.IPythonKernel", DeprecationWarning, stacklevel=2, ) super().__init__(*args, **kwargs) ipykernel-6.29.5/ipykernel/jsonutil.py000066400000000000000000000116531464053401500200520ustar00rootroot00000000000000"""Utilities to manipulate JSON objects.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import math import numbers import re import types from binascii import b2a_base64 from datetime import date, datetime from jupyter_client._version import version_info as jupyter_client_version next_attr_name = "__next__" # ----------------------------------------------------------------------------- # Globals and constants # ----------------------------------------------------------------------------- # timestamp formats ISO8601 = "%Y-%m-%dT%H:%M:%S.%f" ISO8601_PAT = re.compile( r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(\.\d{1,6})?Z?([\+\-]\d{2}:?\d{2})?$" ) # holy crap, strptime is not threadsafe. # Calling it once at import seems to help. datetime.strptime("1", "%d") # ----------------------------------------------------------------------------- # Classes and functions # ----------------------------------------------------------------------------- # constants for identifying png/jpeg data PNG = b"\x89PNG\r\n\x1a\n" # front of PNG base64-encoded PNG64 = b"iVBORw0KG" JPEG = b"\xff\xd8" # front of JPEG base64-encoded JPEG64 = b"/9" # constants for identifying gif data GIF_64 = b"R0lGODdh" GIF89_64 = b"R0lGODlh" # front of PDF base64-encoded PDF64 = b"JVBER" JUPYTER_CLIENT_MAJOR_VERSION = jupyter_client_version[0] def encode_images(format_dict): """b64-encodes images in a displaypub format dict Perhaps this should be handled in json_clean itself? Parameters ---------- format_dict : dict A dictionary of display data keyed by mime-type Returns ------- format_dict : dict A copy of the same dictionary, but binary image data ('image/png', 'image/jpeg' or 'application/pdf') is base64-encoded. """ # no need for handling of ambiguous bytestrings on Python 3, # where bytes objects always represent binary data and thus # base64-encoded. return format_dict def json_clean(obj): # pragma: no cover """Deprecated, this is a no-op for jupyter-client>=7. Clean an object to ensure it's safe to encode in JSON. Atomic, immutable objects are returned unmodified. Sets and tuples are converted to lists, lists are copied and dicts are also copied. Note: dicts whose keys could cause collisions upon encoding (such as a dict with both the number 1 and the string '1' as keys) will cause a ValueError to be raised. Parameters ---------- obj : any python object Returns ------- out : object A version of the input which will not cause an encoding error when encoded as JSON. Note that this function does not *encode* its inputs, it simply sanitizes it so that there will be no encoding errors later. """ if int(JUPYTER_CLIENT_MAJOR_VERSION) >= 7: return obj # types that are 'atomic' and ok in json as-is. atomic_ok = (str, type(None)) # containers that we need to convert into lists container_to_list = (tuple, set, types.GeneratorType) # Since bools are a subtype of Integrals, which are a subtype of Reals, # we have to check them in that order. if isinstance(obj, bool): return obj if isinstance(obj, numbers.Integral): # cast int to int, in case subclasses override __str__ (e.g. boost enum, #4598) return int(obj) if isinstance(obj, numbers.Real): # cast out-of-range floats to their reprs if math.isnan(obj) or math.isinf(obj): return repr(obj) return float(obj) if isinstance(obj, atomic_ok): return obj if isinstance(obj, bytes): # unanmbiguous binary data is base64-encoded # (this probably should have happened upstream) return b2a_base64(obj).decode("ascii") if isinstance(obj, container_to_list) or ( hasattr(obj, "__iter__") and hasattr(obj, next_attr_name) ): obj = list(obj) if isinstance(obj, list): return [json_clean(x) for x in obj] if isinstance(obj, dict): # First, validate that the dict won't lose data in conversion due to # key collisions after stringification. This can happen with keys like # True and 'true' or 1 and '1', which collide in JSON. nkeys = len(obj) nkeys_collapsed = len(set(map(str, obj))) if nkeys != nkeys_collapsed: msg = ( "dict cannot be safely converted to JSON: " "key collision would lead to dropped values" ) raise ValueError(msg) # If all OK, proceed by making the new dict that will be json-safe out = {} for k, v in obj.items(): out[str(k)] = json_clean(v) return out if isinstance(obj, (datetime, date)): return obj.strftime(ISO8601) # we don't understand it, it's probably an unserializable object raise ValueError("Can't clean for JSON: %r" % obj) ipykernel-6.29.5/ipykernel/kernelapp.py000066400000000000000000000706171464053401500201710ustar00rootroot00000000000000"""An Application for launching a kernel""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import atexit import errno import logging import os import signal import sys import traceback import typing as t from functools import partial from io import FileIO, TextIOWrapper from logging import StreamHandler from pathlib import Path import zmq from IPython.core.application import ( # type:ignore[attr-defined] BaseIPythonApplication, base_aliases, base_flags, catch_config_error, ) from IPython.core.profiledir import ProfileDir from IPython.core.shellapp import InteractiveShellApp, shell_aliases, shell_flags from jupyter_client.connect import ConnectionFileMixin from jupyter_client.session import Session, session_aliases, session_flags from jupyter_core.paths import jupyter_runtime_dir from tornado import ioloop from traitlets.traitlets import ( Any, Bool, Dict, DottedObjectName, Instance, Integer, Type, Unicode, default, ) from traitlets.utils import filefind from traitlets.utils.importstring import import_item from zmq.eventloop.zmqstream import ZMQStream from .connect import get_connection_info, write_connection_file # local imports from .control import ControlThread from .heartbeat import Heartbeat from .iostream import IOPubThread from .ipkernel import IPythonKernel from .parentpoller import ParentPollerUnix, ParentPollerWindows from .zmqshell import ZMQInteractiveShell # ----------------------------------------------------------------------------- # Flags and Aliases # ----------------------------------------------------------------------------- kernel_aliases = dict(base_aliases) kernel_aliases.update( { "ip": "IPKernelApp.ip", "hb": "IPKernelApp.hb_port", "shell": "IPKernelApp.shell_port", "iopub": "IPKernelApp.iopub_port", "stdin": "IPKernelApp.stdin_port", "control": "IPKernelApp.control_port", "f": "IPKernelApp.connection_file", "transport": "IPKernelApp.transport", } ) kernel_flags = dict(base_flags) kernel_flags.update( { "no-stdout": ({"IPKernelApp": {"no_stdout": True}}, "redirect stdout to the null device"), "no-stderr": ({"IPKernelApp": {"no_stderr": True}}, "redirect stderr to the null device"), "pylab": ( {"IPKernelApp": {"pylab": "auto"}}, """Pre-load matplotlib and numpy for interactive use with the default matplotlib backend.""", ), "trio-loop": ( {"InteractiveShell": {"trio_loop": False}}, "Enable Trio as main event loop.", ), } ) # inherit flags&aliases for any IPython shell apps kernel_aliases.update(shell_aliases) kernel_flags.update(shell_flags) # inherit flags&aliases for Sessions kernel_aliases.update(session_aliases) kernel_flags.update(session_flags) _ctrl_c_message = """\ NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work. To exit, you will have to explicitly quit this process, by either sending "quit" from a client, or using Ctrl-\\ in UNIX-like environments. To read more about this, see https://github.com/ipython/ipython/issues/2049 """ # ----------------------------------------------------------------------------- # Application class for starting an IPython Kernel # ----------------------------------------------------------------------------- class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin): """The IPYKernel application class.""" name = "ipython-kernel" aliases = Dict(kernel_aliases) # type:ignore[assignment] flags = Dict(kernel_flags) # type:ignore[assignment] classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = Type( "ipykernel.ipkernel.IPythonKernel", klass="ipykernel.kernelbase.Kernel", help="""The Kernel subclass to be used. This should allow easy reuse of the IPKernelApp entry point to configure and launch kernels other than IPython's own. """, ).tag(config=True) kernel = Any() poller = Any() # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat, allow_none=True) context: zmq.Context[t.Any] | None = Any() # type:ignore[assignment] shell_socket = Any() control_socket = Any() debugpy_socket = Any() debug_shell_socket = Any() stdin_socket = Any() iopub_socket = Any() iopub_thread = Any() control_thread = Any() _ports = Dict() subcommands = { "install": ( "ipykernel.kernelspec.InstallIPythonKernelSpecApp", "Install the IPython kernel", ), } # connection info: connection_dir = Unicode() @default("connection_dir") def _default_connection_dir(self): return jupyter_runtime_dir() @property def abs_connection_file(self): if Path(self.connection_file).name == self.connection_file and self.connection_dir: return str(Path(str(self.connection_dir)) / self.connection_file) return self.connection_file # streams, etc. no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True) no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True) trio_loop = Bool(False, help="Set main event loop.").tag(config=True) quiet = Bool(True, help="Only send stdout/stderr to output stream").tag(config=True) outstream_class = DottedObjectName( "ipykernel.iostream.OutStream", help="The importstring for the OutStream factory", allow_none=True, ).tag(config=True) displayhook_class = DottedObjectName( "ipykernel.displayhook.ZMQDisplayHook", help="The importstring for the DisplayHook factory" ).tag(config=True) capture_fd_output = Bool( True, help="""Attempt to capture and forward low-level output, e.g. produced by Extension libraries. """, ).tag(config=True) # polling parent_handle = Integer( int(os.environ.get("JPY_PARENT_PID") or 0), help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """, ).tag(config=True) interrupt = Integer( int(os.environ.get("JPY_INTERRUPT_EVENT") or 0), help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """, ).tag(config=True) def init_crash_handler(self): """Initialize the crash handler.""" sys.excepthook = self.excepthook def excepthook(self, etype, evalue, tb): """Handle an exception.""" # write uncaught traceback to 'real' stderr, not zmq-forwarder traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) def init_poller(self): """Initialize the poller.""" if sys.platform == "win32": if self.interrupt or self.parent_handle: self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) elif self.parent_handle and self.parent_handle != 1: # PID 1 (init) is special and will never go away, # only be reassigned. # Parent polling doesn't work if ppid == 1 to start with. self.poller = ParentPollerUnix() def _try_bind_socket(self, s, port): iface = f"{self.transport}://{self.ip}" if self.transport == "tcp": if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == "ipc": if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while Path(path).exists(): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def _bind_socket(self, s, port): try: win_in_use = errno.WSAEADDRINUSE # type:ignore[attr-defined] except AttributeError: win_in_use = None # Try up to 100 times to bind a port when in conflict to avoid # infinite attempts in bad setups max_attempts = 1 if port else 100 for attempt in range(max_attempts): try: return self._try_bind_socket(s, port) except zmq.ZMQError as ze: # Raise if we have any error not related to socket binding if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use: raise if attempt == max_attempts - 1: raise return None def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file connection_info = dict( ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port, ) if Path(cf).exists(): # If the file exists, merge our info into it. For example, if the # original file had port number 0, we update with the actual port # used. existing_connection_info = get_connection_info(cf, unpack=True) assert isinstance(existing_connection_info, dict) connection_info = dict(existing_connection_info, **connection_info) if connection_info == existing_connection_info: self.log.debug("Connection file %s with current information already exists", cf) return self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, **connection_info) def cleanup_connection_file(self): """Clean up our connection file.""" cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: Path(cf).unlink() except OSError: pass self.cleanup_ipc_files() def init_connection_file(self): """Initialize our connection file.""" if not self.connection_file: self.connection_file = "kernel-%s.json" % os.getpid() try: self.connection_file = filefind(self.connection_file, [".", self.connection_dir]) except OSError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, and I'll create it in this directory: Path(self.abs_connection_file).parent.mkdir(mode=0o700, exist_ok=True, parents=True) # Also, I will clean it up: atexit.register(self.cleanup_connection_file) return try: self.load_connection_file() except Exception: self.log.error( # noqa: G201 "Failed to load connection file: %r", self.connection_file, exc_info=True ) self.exit(1) def init_sockets(self): """Create a context, a session, and the kernel sockets.""" self.log.info("Starting the kernel at pid: %i", os.getpid()) assert self.context is None, "init_sockets cannot be called twice!" self.context = context = zmq.Context() atexit.register(self.close) self.shell_socket = context.socket(zmq.ROUTER) self.shell_socket.linger = 1000 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_socket.linger = 1000 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) if hasattr(zmq, "ROUTER_HANDOVER"): # set router-handover to workaround zeromq reconnect problems # in certain rare circumstances # see ipython/ipykernel#270 and zeromq/libzmq#2892 self.shell_socket.router_handover = self.stdin_socket.router_handover = 1 self.init_control(context) self.init_iopub(context) def init_control(self, context): """Initialize the control channel.""" self.control_socket = context.socket(zmq.ROUTER) self.control_socket.linger = 1000 self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) self.debugpy_socket = context.socket(zmq.STREAM) self.debugpy_socket.linger = 1000 self.debug_shell_socket = context.socket(zmq.DEALER) self.debug_shell_socket.linger = 1000 if self.shell_socket.getsockopt(zmq.LAST_ENDPOINT): self.debug_shell_socket.connect(self.shell_socket.getsockopt(zmq.LAST_ENDPOINT)) if hasattr(zmq, "ROUTER_HANDOVER"): # set router-handover to workaround zeromq reconnect problems # in certain rare circumstances # see ipython/ipykernel#270 and zeromq/libzmq#2892 self.control_socket.router_handover = 1 self.control_thread = ControlThread(daemon=True) def init_iopub(self, context): """Initialize the iopub channel.""" self.iopub_socket = context.socket(zmq.PUB) self.iopub_socket.linger = 1000 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.configure_tornado_logger() self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True) self.iopub_thread.start() # backward-compat: wrap iopub socket API in background thread self.iopub_socket = self.iopub_thread.background_socket def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() def close(self): """Close zmq sockets in an orderly fashion""" # un-capture IO before we start closing channels self.reset_io() self.log.info("Cleaning up sockets") if self.heartbeat: self.log.debug("Closing heartbeat channel") self.heartbeat.context.term() if self.iopub_thread: self.log.debug("Closing iopub channel") self.iopub_thread.stop() self.iopub_thread.close() if self.control_thread and self.control_thread.is_alive(): self.log.debug("Closing control thread") self.control_thread.stop() self.control_thread.join() if self.debugpy_socket and not self.debugpy_socket.closed: self.debugpy_socket.close() if self.debug_shell_socket and not self.debug_shell_socket.closed: self.debug_shell_socket.close() for channel in ("shell", "control", "stdin"): self.log.debug("Closing %s channel", channel) socket = getattr(self, channel + "_socket", None) if socket and not socket.closed: socket.close() self.log.debug("Terminating zmq context") if self.context: self.context.term() self.log.debug("Terminated zmq context") def log_connection_info(self): """display connection info, and store ports""" basename = Path(self.connection_file).name if ( basename == self.connection_file or str(Path(self.connection_file).parent) == self.connection_dir ): # use shortname tail = basename else: tail = self.connection_file lines = [ "To connect another client to this kernel, use:", " --existing %s" % tail, ] # log connection info # info-level, so often not shown. # frontends should use the %connect_info magic # to see the connection info for line in lines: self.log.info(line) # also raw print to the terminal if no parent_handle (`ipython kernel`) # unless log-level is CRITICAL (--quiet) if not self.parent_handle and int(self.log_level) < logging.CRITICAL: # type:ignore[call-overload] print(_ctrl_c_message, file=sys.__stdout__) for line in lines: print(line, file=sys.__stdout__) self._ports = dict( shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port, ) def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, "w") # noqa: SIM115 if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole # type:ignore[misc] if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole # type:ignore[misc] def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) if sys.stdout is not None: sys.stdout.flush() e_stdout = None if self.quiet else sys.__stdout__ e_stderr = None if self.quiet else sys.__stderr__ if not self.capture_fd_output: outstream_factory = partial(outstream_factory, watchfd=False) sys.stdout = outstream_factory(self.session, self.iopub_thread, "stdout", echo=e_stdout) if sys.stderr is not None: sys.stderr.flush() sys.stderr = outstream_factory(self.session, self.iopub_thread, "stderr", echo=e_stderr) if hasattr(sys.stderr, "_original_stdstream_copy"): for handler in self.log.handlers: if isinstance(handler, StreamHandler) and (handler.stream.buffer.fileno() == 2): self.log.debug("Seeing logger to stderr, rerouting to raw filedescriptor.") handler.stream = TextIOWrapper( FileIO( sys.stderr._original_stdstream_copy, "w", ) ) if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) self.displayhook = displayhook_factory(self.session, self.iopub_socket) sys.displayhook = self.displayhook self.patch_io() def reset_io(self): """restore original io restores state after init_io """ sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ sys.displayhook = sys.__displayhook__ def patch_io(self): """Patch important libraries that can't handle sys.stdout forwarding""" try: import faulthandler except ImportError: pass else: # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible # updates to the upstream API and update accordingly (up-to-date as of Python 3.5): # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable # change default file to __stderr__ from forwarded stderr faulthandler_enable = faulthandler.enable def enable(file=sys.__stderr__, all_threads=True, **kwargs): return faulthandler_enable(file=file, all_threads=all_threads, **kwargs) faulthandler.enable = enable if hasattr(faulthandler, "register"): faulthandler_register = faulthandler.register def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs): return faulthandler_register( signum, file=file, all_threads=all_threads, chain=chain, **kwargs ) faulthandler.register = register def init_signal(self): """Initialize the signal handler.""" signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket, self.control_thread.io_loop) debugpy_stream = ZMQStream(self.debugpy_socket, self.control_thread.io_loop) self.control_thread.start() kernel_factory = self.kernel_class.instance # type:ignore[attr-defined] kernel = kernel_factory( parent=self, session=self.session, control_stream=control_stream, debugpy_stream=debugpy_stream, debug_shell_socket=self.debug_shell_socket, shell_stream=shell_stream, control_thread=self.control_thread, iopub_thread=self.iopub_thread, iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, user_ns=self.user_ns, ) kernel.record_ports({name + "_port": port for name, port in self._ports.items()}) self.kernel = kernel # Allow the displayhook to get the execution count self.displayhook.get_execution_count = lambda: kernel.execution_count def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Register inline backend as default # this is higher priority than matplotlibrc, # but lower priority than anything else (mpl.use() for instance). # This only affects matplotlib >= 1.5 if not os.environ.get("MPLBACKEND"): os.environ["MPLBACKEND"] = "module://matplotlib_inline.backend_inline" # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell assert shell is not None _showtraceback = shell._showtraceback try: # replace error-sending traceback with stderr def print_tb(etype, evalue, stb): print("GUI event loop or pylab initialization failed", file=sys.stderr) assert shell is not None print(shell.InteractiveTB.stb2text(stb), file=sys.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): """Initialize the shell channel.""" self.shell = getattr(self.kernel, "shell", None) if self.shell: self.shell.configurables.append(self) def configure_tornado_logger(self): """Configure the tornado logging.Logger. Must set up the tornado logger or else tornado will call basicConfig for the root logger which makes the root logger go to the real sys.stderr instead of the capture streams. This function mimics the setup of logging.basicConfig. """ logger = logging.getLogger("tornado") handler = logging.StreamHandler() formatter = logging.Formatter(logging.BASIC_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) def _init_asyncio_patch(self): """set default asyncio policy to be compatible with tornado Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows Pick the older SelectorEventLoopPolicy on Windows if the known-incompatible default policy is in use. Support for Proactor via a background thread is available in tornado 6.1, but it is still preferable to run the Selector in the main thread instead of the background. do this as early as possible to make it a low priority and overridable ref: https://github.com/tornadoweb/tornado/issues/2608 FIXME: if/when tornado supports the defaults in asyncio without threads, remove and bump tornado requirement for py38. Most likely, this will mean a new Python version where asyncio.ProactorEventLoop supports add_reader and friends. """ if sys.platform.startswith("win") and sys.version_info >= (3, 8): import asyncio try: from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) def init_pdb(self): """Replace pdb with IPython's version that is interruptible. With the non-interruptible version, stopping pdb() locks up the kernel in a non-recoverable state. """ import pdb from IPython.core import debugger if hasattr(debugger, "InterruptiblePdb"): # Only available in newer IPython releases: debugger.Pdb = debugger.InterruptiblePdb # type:ignore[misc] pdb.Pdb = debugger.Pdb # type:ignore[assignment,misc] pdb.set_trace = debugger.set_trace @catch_config_error def initialize(self, argv=None): """Initialize the application.""" self._init_asyncio_patch() super().initialize(argv) if self.subapp is not None: return self.init_pdb() self.init_blackhole() self.init_connection_file() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.write_connection_file() # Log connection info after writing connection file, so that the connection # file is definitely available at the time someone reads the log. self.log_connection_info() self.init_io() try: self.init_signal() except Exception: # Catch exception when initializing signal fails, eg when running the # kernel on a separate thread if int(self.log_level) < logging.CRITICAL: # type:ignore[call-overload] self.log.error("Unable to initialize signal:", exc_info=True) # noqa: G201 self.init_kernel() # shell init steps self.init_path() self.init_shell() if self.shell: self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): """Start the application.""" if self.subapp is not None: return self.subapp.start() if self.poller is not None: self.poller.start() self.kernel.start() self.io_loop = ioloop.IOLoop.current() if self.trio_loop: from ipykernel.trio_runner import TrioRunner tr = TrioRunner() tr.initialize(self.kernel, self.io_loop) try: tr.run() except KeyboardInterrupt: pass else: try: self.io_loop.start() except KeyboardInterrupt: pass launch_new_instance = IPKernelApp.launch_instance def main(): # pragma: no cover """Run an IPKernel as an application""" app = IPKernelApp.instance() app.initialize() app.start() if __name__ == "__main__": main() ipykernel-6.29.5/ipykernel/kernelbase.py000066400000000000000000001416331464053401500203200ustar00rootroot00000000000000"""Base class for a kernel that talks to frontends over 0MQ.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import asyncio import concurrent.futures import inspect import itertools import logging import os import socket import sys import threading import time import typing as t import uuid import warnings from datetime import datetime from functools import partial from signal import SIGINT, SIGTERM, Signals, default_int_handler, signal from .control import CONTROL_THREAD_NAME if sys.platform != "win32": from signal import SIGKILL else: SIGKILL = "windown-SIGKILL-sentinel" try: # jupyter_client >= 5, use tz-aware now from jupyter_client.session import utcnow as now except ImportError: # jupyter_client < 5, use local now() now = datetime.now import psutil import zmq from IPython.core.error import StdinNotImplementedError from jupyter_client.session import Session from tornado import ioloop from tornado.queues import Queue, QueueEmpty from traitlets.config.configurable import SingletonConfigurable from traitlets.traitlets import ( Any, Bool, Dict, Float, Instance, Integer, List, Set, Unicode, default, observe, ) from zmq.eventloop.zmqstream import ZMQStream from ipykernel.jsonutil import json_clean from ._version import kernel_protocol_version from .iostream import OutStream def _accepts_parameters(meth, param_names): parameters = inspect.signature(meth).parameters accepts = {param: False for param in param_names} for param in param_names: param_spec = parameters.get(param) accepts[param] = ( param_spec and param_spec.kind in [param_spec.KEYWORD_ONLY, param_spec.POSITIONAL_OR_KEYWORD] ) or any(p.kind == p.VAR_KEYWORD for p in parameters.values()) return accepts class Kernel(SingletonConfigurable): """The base kernel class.""" # --------------------------------------------------------------------------- # Kernel interface # --------------------------------------------------------------------------- # attribute to override with a GUI eventloop = Any(None) processes: dict[str, psutil.Process] = {} @observe("eventloop") def _update_eventloop(self, change): """schedule call to eventloop from IOLoop""" loop = ioloop.IOLoop.current() if change.new is not None: loop.add_callback(self.enter_eventloop) session = Instance(Session, allow_none=True) profile_dir = Instance("IPython.core.profiledir.ProfileDir", allow_none=True) shell_stream = Instance(ZMQStream, allow_none=True) shell_streams: List[t.Any] = List( help="""Deprecated shell_streams alias. Use shell_stream .. versionchanged:: 6.0 shell_streams is deprecated. Use shell_stream. """ ) implementation: str implementation_version: str banner: str @default("shell_streams") def _shell_streams_default(self): # pragma: no cover warnings.warn( "Kernel.shell_streams is deprecated in ipykernel 6.0. Use Kernel.shell_stream", DeprecationWarning, stacklevel=2, ) if self.shell_stream is not None: return [self.shell_stream] return [] @observe("shell_streams") def _shell_streams_changed(self, change): # pragma: no cover warnings.warn( "Kernel.shell_streams is deprecated in ipykernel 6.0. Use Kernel.shell_stream", DeprecationWarning, stacklevel=2, ) if len(change.new) > 1: warnings.warn( "Kernel only supports one shell stream. Additional streams will be ignored.", RuntimeWarning, stacklevel=2, ) if change.new: self.shell_stream = change.new[0] control_stream = Instance(ZMQStream, allow_none=True) debug_shell_socket = Any() control_thread = Any() iopub_socket = Any() iopub_thread = Any() stdin_socket = Any() log: logging.Logger = Instance(logging.Logger, allow_none=True) # type:ignore[assignment] # identities: int_id = Integer(-1) ident = Unicode() @default("ident") def _default_ident(self): return str(uuid.uuid4()) # This should be overridden by wrapper kernels that implement any real # language. language_info: dict[str, object] = {} # any links that should go in the help menu help_links: List[dict[str, str]] = List() # Experimental option to break in non-user code. # The ipykernel source is in the call stack, so the user # has to manipulate the step-over and step-into in a wize way. debug_just_my_code = Bool( True, help="""Set to False if you want to debug python standard and dependent libraries. """, ).tag(config=True) # track associations with current request # Private interface _darwin_app_nap = Bool( True, help="""Whether to use appnope for compatibility with OS X App Nap. Only affects OS X >= 10.9. """, ).tag(config=True) # track associations with current request _allow_stdin = Bool(False) _parents: Dict[str, t.Any] = Dict({"shell": {}, "control": {}}) _parent_ident = Dict({"shell": b"", "control": b""}) @property def _parent_header(self): warnings.warn( "Kernel._parent_header is deprecated in ipykernel 6. Use .get_parent()", DeprecationWarning, stacklevel=2, ) return self.get_parent() # Time to sleep after flushing the stdout/err buffers in each execute # cycle. While this introduces a hard limit on the minimal latency of the # execute cycle, it helps prevent output synchronization problems for # clients. # Units are in seconds. The minimum zmq latency on local host is probably # ~150 microseconds, set this to 500us for now. We may need to increase it # a little if it's not enough after more interactive testing. _execute_sleep = Float(0.0005).tag(config=True) # Frequency of the kernel's event loop. # Units are in seconds, kernel subclasses for GUI toolkits may need to # adapt to milliseconds. _poll_interval = Float(0.01).tag(config=True) stop_on_error_timeout = Float( 0.0, config=True, help="""time (in seconds) to wait for messages to arrive when aborting queued requests after an error. Requests that arrive within this window after an error will be cancelled. Increase in the event of unusually slow network causing significant delays, which can manifest as e.g. "Run all" in a notebook aborting some, but not all, messages after an error. """, ) # If the shutdown was requested over the network, we leave here the # necessary reply message so it can be sent by our registered atexit # handler. This ensures that the reply is only sent to clients truly at # the end of our shutdown process (which happens after the underlying # IPython shell's own shutdown). _shutdown_message = None # This is a dict of port number that the kernel is listening on. It is set # by record_ports and used by connect_request. _recorded_ports = Dict() # set of aborted msg_ids aborted = Set() # Track execution count here. For IPython, we override this to use the # execution count we store in the shell. execution_count = 0 msg_types = [ "execute_request", "complete_request", "inspect_request", "history_request", "comm_info_request", "kernel_info_request", "connect_request", "shutdown_request", "is_complete_request", "interrupt_request", # deprecated: "apply_request", ] # add deprecated ipyparallel control messages control_msg_types = [ *msg_types, "clear_request", "abort_request", "debug_request", "usage_request", ] def __init__(self, **kwargs): """Initialize the kernel.""" super().__init__(**kwargs) # Kernel application may swap stdout and stderr to OutStream, # which is the case in `IPKernelApp.init_io`, hence `sys.stdout` # can already by different from TextIO at initialization time. self._stdout: OutStream | t.TextIO = sys.stdout self._stderr: OutStream | t.TextIO = sys.stderr # Build dict of handlers for message types self.shell_handlers = {} for msg_type in self.msg_types: self.shell_handlers[msg_type] = getattr(self, msg_type) self.control_handlers = {} for msg_type in self.control_msg_types: self.control_handlers[msg_type] = getattr(self, msg_type) self.control_queue: Queue[t.Any] = Queue() # Storing the accepted parameters for do_execute, used in execute_request self._do_exec_accepted_params = _accepts_parameters( self.do_execute, ["cell_meta", "cell_id"] ) def dispatch_control(self, msg): self.control_queue.put_nowait(msg) async def poll_control_queue(self): while True: msg = await self.control_queue.get() # handle tracers from _flush_control_queue if isinstance(msg, (concurrent.futures.Future, asyncio.Future)): msg.set_result(None) continue await self.process_control(msg) async def _flush_control_queue(self): """Flush the control queue, wait for processing of any pending messages""" tracer_future: concurrent.futures.Future[object] | asyncio.Future[object] if self.control_thread: control_loop = self.control_thread.io_loop # concurrent.futures.Futures are threadsafe # and can be used to await across threads tracer_future = concurrent.futures.Future() awaitable_future = asyncio.wrap_future(tracer_future) else: control_loop = self.io_loop tracer_future = awaitable_future = asyncio.Future() def _flush(): # control_stream.flush puts messages on the queue if self.control_stream: self.control_stream.flush() # put Future on the queue after all of those, # so we can wait for all queued messages to be processed self.control_queue.put(tracer_future) control_loop.add_callback(_flush) return awaitable_future async def process_control(self, msg): """dispatch control requests""" if not self.session: return idents, msg = self.session.feed_identities(msg, copy=False) try: msg = self.session.deserialize(msg, content=True, copy=False) except Exception: self.log.error("Invalid Control Message", exc_info=True) # noqa: G201 return self.log.debug("Control received: %s", msg) # Set the parent message for side effects. self.set_parent(idents, msg, channel="control") self._publish_status("busy", "control") header = msg["header"] msg_type = header["msg_type"] handler = self.control_handlers.get(msg_type, None) if handler is None: self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type) else: try: result = handler(self.control_stream, idents, msg) if inspect.isawaitable(result): await result except Exception: self.log.error("Exception in control handler:", exc_info=True) # noqa: G201 sys.stdout.flush() sys.stderr.flush() self._publish_status("idle", "control") # flush to ensure reply is sent if self.control_stream: self.control_stream.flush(zmq.POLLOUT) def should_handle(self, stream, msg, idents): """Check whether a shell-channel message should be handled Allows subclasses to prevent handling of certain messages (e.g. aborted requests). """ msg_id = msg["header"]["msg_id"] if msg_id in self.aborted: # is it safe to assume a msg_id will not be resubmitted? self.aborted.remove(msg_id) self._send_abort_reply(stream, msg, idents) return False return True async def dispatch_shell(self, msg): """dispatch shell requests""" if not self.session: return # flush control queue before handling shell requests await self._flush_control_queue() idents, msg = self.session.feed_identities(msg, copy=False) try: msg = self.session.deserialize(msg, content=True, copy=False) except Exception: self.log.error("Invalid Message", exc_info=True) # noqa: G201 return # Set the parent message for side effects. self.set_parent(idents, msg, channel="shell") self._publish_status("busy", "shell") msg_type = msg["header"]["msg_type"] # Only abort execute requests if self._aborting and msg_type == "execute_request": self._send_abort_reply(self.shell_stream, msg, idents) self._publish_status("idle", "shell") # flush to ensure reply is sent before # handling the next request if self.shell_stream: self.shell_stream.flush(zmq.POLLOUT) return # Print some info about this message and leave a '--->' marker, so it's # easier to trace visually the message chain when debugging. Each # handler prints its message at the end. self.log.debug("\n*** MESSAGE TYPE:%s***", msg_type) self.log.debug(" Content: %s\n --->\n ", msg["content"]) if not self.should_handle(self.shell_stream, msg, idents): return handler = self.shell_handlers.get(msg_type, None) if handler is None: self.log.warning("Unknown message type: %r", msg_type) else: self.log.debug("%s: %s", msg_type, msg) try: self.pre_handler_hook() except Exception: self.log.debug("Unable to signal in pre_handler_hook:", exc_info=True) try: result = handler(self.shell_stream, idents, msg) if inspect.isawaitable(result): await result except Exception: self.log.error("Exception in message handler:", exc_info=True) # noqa: G201 except KeyboardInterrupt: # Ctrl-c shouldn't crash the kernel here. self.log.error("KeyboardInterrupt caught in kernel.") finally: try: self.post_handler_hook() except Exception: self.log.debug("Unable to signal in post_handler_hook:", exc_info=True) sys.stdout.flush() sys.stderr.flush() self._publish_status("idle", "shell") # flush to ensure reply is sent before # handling the next request if self.shell_stream: self.shell_stream.flush(zmq.POLLOUT) def pre_handler_hook(self): """Hook to execute before calling message handler""" # ensure default_int_handler during handler call self.saved_sigint_handler = signal(SIGINT, default_int_handler) def post_handler_hook(self): """Hook to execute after calling message handler""" signal(SIGINT, self.saved_sigint_handler) def enter_eventloop(self): """enter eventloop""" self.log.info("Entering eventloop %s", self.eventloop) # record handle, so we can check when this changes eventloop = self.eventloop if eventloop is None: self.log.info("Exiting as there is no eventloop") return async def advance_eventloop(): # check if eventloop changed: if self.eventloop is not eventloop: self.log.info("exiting eventloop %s", eventloop) return if self.msg_queue.qsize(): self.log.debug("Delaying eventloop due to waiting messages") # still messages to process, make the eventloop wait schedule_next() return self.log.debug("Advancing eventloop %s", eventloop) try: eventloop(self) except KeyboardInterrupt: # Ctrl-C shouldn't crash the kernel self.log.error("KeyboardInterrupt caught in kernel") if self.eventloop is eventloop: # schedule advance again schedule_next() def schedule_next(): """Schedule the next advance of the eventloop""" # call_later allows the io_loop to process other events if needed. # Going through schedule_dispatch ensures all other dispatches on msg_queue # are processed before we enter the eventloop, even if the previous dispatch was # already consumed from the queue by process_one and the queue is # technically empty. self.log.debug("Scheduling eventloop advance") self.io_loop.call_later(0.001, partial(self.schedule_dispatch, advance_eventloop)) # begin polling the eventloop schedule_next() async def do_one_iteration(self): """Process a single shell message Any pending control messages will be flushed as well .. versionchanged:: 5 This is now a coroutine """ # flush messages off of shell stream into the message queue if self.shell_stream: self.shell_stream.flush() # process at most one shell message per iteration await self.process_one(wait=False) async def process_one(self, wait=True): """Process one request Returns None if no message was handled. """ if wait: t, dispatch, args = await self.msg_queue.get() else: try: t, dispatch, args = self.msg_queue.get_nowait() except (asyncio.QueueEmpty, QueueEmpty): return await dispatch(*args) async def dispatch_queue(self): """Coroutine to preserve order of message handling Ensures that only one message is processing at a time, even when the handler is async """ while True: try: await self.process_one() except Exception: self.log.exception("Error in message handler") _message_counter = Any( help="""Monotonic counter of messages """, ) @default("_message_counter") def _message_counter_default(self): return itertools.count() def schedule_dispatch(self, dispatch, *args): """schedule a message for dispatch""" idx = next(self._message_counter) self.msg_queue.put_nowait( ( idx, dispatch, args, ) ) # ensure the eventloop wakes up self.io_loop.add_callback(lambda: None) def start(self): """register dispatchers for streams""" self.io_loop = ioloop.IOLoop.current() self.msg_queue: Queue[t.Any] = Queue() self.io_loop.add_callback(self.dispatch_queue) if self.control_stream: self.control_stream.on_recv(self.dispatch_control, copy=False) control_loop = self.control_thread.io_loop if self.control_thread else self.io_loop asyncio.run_coroutine_threadsafe(self.poll_control_queue(), control_loop.asyncio_loop) if self.shell_stream: self.shell_stream.on_recv( partial( self.schedule_dispatch, self.dispatch_shell, ), copy=False, ) # publish idle status self._publish_status("starting", "shell") def record_ports(self, ports): """Record the ports that this kernel is using. The creator of the Kernel instance must call this methods if they want the :meth:`connect_request` method to return the port numbers. """ self._recorded_ports = ports # --------------------------------------------------------------------------- # Kernel request handlers # --------------------------------------------------------------------------- def _publish_execute_input(self, code, parent, execution_count): """Publish the code request on the iopub stream.""" if not self.session: return self.session.send( self.iopub_socket, "execute_input", {"code": code, "execution_count": execution_count}, parent=parent, ident=self._topic("execute_input"), ) def _publish_status(self, status, channel, parent=None): """send status (busy/idle) on IOPub""" if not self.session: return self.session.send( self.iopub_socket, "status", {"execution_state": status}, parent=parent or self.get_parent(channel), ident=self._topic("status"), ) def _publish_debug_event(self, event): if not self.session: return self.session.send( self.iopub_socket, "debug_event", event, parent=self.get_parent(), ident=self._topic("debug_event"), ) def set_parent(self, ident, parent, channel="shell"): """Set the current parent request Side effects (IOPub messages) and replies are associated with the request that caused them via the parent_header. The parent identity is used to route input_request messages on the stdin channel. """ self._parent_ident[channel] = ident self._parents[channel] = parent def get_parent(self, channel=None): """Get the parent request associated with a channel. .. versionadded:: 6 Parameters ---------- channel : str the name of the channel ('shell' or 'control') Returns ------- message : dict the parent message for the most recent request on the channel. """ if channel is None: # If a channel is not specified, get information from current thread if threading.current_thread().name == CONTROL_THREAD_NAME: channel = "control" else: channel = "shell" return self._parents.get(channel, {}) def send_response( self, stream, msg_or_type, content=None, ident=None, buffers=None, track=False, header=None, metadata=None, channel=None, ): """Send a response to the message we're currently processing. This accepts all the parameters of :meth:`jupyter_client.session.Session.send` except ``parent``. This relies on :meth:`set_parent` having been called for the current message. """ if not self.session: return None return self.session.send( stream, msg_or_type, content, self.get_parent(channel), ident, buffers, track, header, metadata, ) def init_metadata(self, parent): """Initialize metadata. Run at the beginning of execution requests. """ # FIXME: `started` is part of ipyparallel # Remove for ipykernel 5.0 return { "started": now(), } def finish_metadata(self, parent, metadata, reply_content): """Finish populating metadata. Run after completing an execution request. """ return metadata async def execute_request(self, stream, ident, parent): """handle an execute_request""" if not self.session: return try: content = parent["content"] code = content["code"] silent = content.get("silent", False) store_history = content.get("store_history", not silent) user_expressions = content.get("user_expressions", {}) allow_stdin = content.get("allow_stdin", False) cell_meta = parent.get("metadata", {}) cell_id = cell_meta.get("cellId") except Exception: self.log.error("Got bad msg: ") self.log.error("%s", parent) return stop_on_error = content.get("stop_on_error", True) metadata = self.init_metadata(parent) # Re-broadcast our input for the benefit of listening clients, and # start computing output if not silent: self.execution_count += 1 self._publish_execute_input(code, parent, self.execution_count) # Arguments based on the do_execute signature do_execute_args = { "code": code, "silent": silent, "store_history": store_history, "user_expressions": user_expressions, "allow_stdin": allow_stdin, } if self._do_exec_accepted_params["cell_meta"]: do_execute_args["cell_meta"] = cell_meta if self._do_exec_accepted_params["cell_id"]: do_execute_args["cell_id"] = cell_id # Call do_execute with the appropriate arguments reply_content = self.do_execute(**do_execute_args) if inspect.isawaitable(reply_content): reply_content = await reply_content # Flush output before sending the reply. sys.stdout.flush() sys.stderr.flush() # FIXME: on rare occasions, the flush doesn't seem to make it to the # clients... This seems to mitigate the problem, but we definitely need # to better understand what's going on. if self._execute_sleep: time.sleep(self._execute_sleep) # Send the reply. reply_content = json_clean(reply_content) metadata = self.finish_metadata(parent, metadata, reply_content) reply_msg: dict[str, t.Any] = self.session.send( # type:ignore[assignment] stream, "execute_reply", reply_content, parent, metadata=metadata, ident=ident, ) self.log.debug("%s", reply_msg) if not silent and reply_msg["content"]["status"] == "error" and stop_on_error: self._abort_queues() def do_execute( self, code, silent, store_history=True, user_expressions=None, allow_stdin=False, *, cell_meta=None, cell_id=None, ): """Execute user code. Must be overridden by subclasses.""" raise NotImplementedError async def complete_request(self, stream, ident, parent): """Handle a completion request.""" if not self.session: return content = parent["content"] code = content["code"] cursor_pos = content["cursor_pos"] matches = self.do_complete(code, cursor_pos) if inspect.isawaitable(matches): matches = await matches matches = json_clean(matches) self.session.send(stream, "complete_reply", matches, parent, ident) def do_complete(self, code, cursor_pos): """Override in subclasses to find completions.""" return { "matches": [], "cursor_end": cursor_pos, "cursor_start": cursor_pos, "metadata": {}, "status": "ok", } async def inspect_request(self, stream, ident, parent): """Handle an inspect request.""" if not self.session: return content = parent["content"] reply_content = self.do_inspect( content["code"], content["cursor_pos"], content.get("detail_level", 0), set(content.get("omit_sections", [])), ) if inspect.isawaitable(reply_content): reply_content = await reply_content # Before we send this object over, we scrub it for JSON usage reply_content = json_clean(reply_content) msg = self.session.send(stream, "inspect_reply", reply_content, parent, ident) self.log.debug("%s", msg) def do_inspect(self, code, cursor_pos, detail_level=0, omit_sections=()): """Override in subclasses to allow introspection.""" return {"status": "ok", "data": {}, "metadata": {}, "found": False} async def history_request(self, stream, ident, parent): """Handle a history request.""" if not self.session: return content = parent["content"] reply_content = self.do_history(**content) if inspect.isawaitable(reply_content): reply_content = await reply_content reply_content = json_clean(reply_content) msg = self.session.send(stream, "history_reply", reply_content, parent, ident) self.log.debug("%s", msg) def do_history( self, hist_access_type, output, raw, session=None, start=None, stop=None, n=None, pattern=None, unique=False, ): """Override in subclasses to access history.""" return {"status": "ok", "history": []} async def connect_request(self, stream, ident, parent): """Handle a connect request.""" if not self.session: return content = self._recorded_ports.copy() if self._recorded_ports else {} content["status"] = "ok" msg = self.session.send(stream, "connect_reply", content, parent, ident) self.log.debug("%s", msg) @property def kernel_info(self): return { "protocol_version": kernel_protocol_version, "implementation": self.implementation, "implementation_version": self.implementation_version, "language_info": self.language_info, "banner": self.banner, "help_links": self.help_links, } async def kernel_info_request(self, stream, ident, parent): """Handle a kernel info request.""" if not self.session: return content = {"status": "ok"} content.update(self.kernel_info) msg = self.session.send(stream, "kernel_info_reply", content, parent, ident) self.log.debug("%s", msg) async def comm_info_request(self, stream, ident, parent): """Handle a comm info request.""" if not self.session: return content = parent["content"] target_name = content.get("target_name", None) # Should this be moved to ipkernel? if hasattr(self, "comm_manager"): comms = { k: dict(target_name=v.target_name) for (k, v) in self.comm_manager.comms.items() if v.target_name == target_name or target_name is None } else: comms = {} reply_content = dict(comms=comms, status="ok") msg = self.session.send(stream, "comm_info_reply", reply_content, parent, ident) self.log.debug("%s", msg) def _send_interrupt_children(self): if os.name == "nt": self.log.error("Interrupt message not supported on Windows") else: pid = os.getpid() pgid = os.getpgid(pid) # Prefer process-group over process # but only if the kernel is the leader of the process group if pgid and pgid == pid and hasattr(os, "killpg"): try: os.killpg(pgid, SIGINT) except OSError: os.kill(pid, SIGINT) raise else: os.kill(pid, SIGINT) async def interrupt_request(self, stream, ident, parent): """Handle an interrupt request.""" if not self.session: return content: dict[str, t.Any] = {"status": "ok"} try: self._send_interrupt_children() except OSError as err: import traceback content = { "status": "error", "traceback": traceback.format_stack(), "ename": str(type(err).__name__), "evalue": str(err), } self.session.send(stream, "interrupt_reply", content, parent, ident=ident) return async def shutdown_request(self, stream, ident, parent): """Handle a shutdown request.""" if not self.session: return content = self.do_shutdown(parent["content"]["restart"]) if inspect.isawaitable(content): content = await content self.session.send(stream, "shutdown_reply", content, parent, ident=ident) # same content, but different msg_id for broadcasting on IOPub self._shutdown_message = self.session.msg("shutdown_reply", content, parent) await self._at_shutdown() self.log.debug("Stopping control ioloop") if self.control_stream: control_io_loop = self.control_stream.io_loop control_io_loop.add_callback(control_io_loop.stop) self.log.debug("Stopping shell ioloop") if self.shell_stream: shell_io_loop = self.shell_stream.io_loop shell_io_loop.add_callback(shell_io_loop.stop) def do_shutdown(self, restart): """Override in subclasses to do things when the frontend shuts down the kernel. """ return {"status": "ok", "restart": restart} async def is_complete_request(self, stream, ident, parent): """Handle an is_complete request.""" if not self.session: return content = parent["content"] code = content["code"] reply_content = self.do_is_complete(code) if inspect.isawaitable(reply_content): reply_content = await reply_content reply_content = json_clean(reply_content) reply_msg = self.session.send(stream, "is_complete_reply", reply_content, parent, ident) self.log.debug("%s", reply_msg) def do_is_complete(self, code): """Override in subclasses to find completions.""" return {"status": "unknown"} async def debug_request(self, stream, ident, parent): """Handle a debug request.""" if not self.session: return content = parent["content"] reply_content = self.do_debug_request(content) if inspect.isawaitable(reply_content): reply_content = await reply_content reply_content = json_clean(reply_content) reply_msg = self.session.send(stream, "debug_reply", reply_content, parent, ident) self.log.debug("%s", reply_msg) def get_process_metric_value(self, process, name, attribute=None): """Get the process metric value.""" try: metric_value = getattr(process, name)() if attribute is not None: # ... a named tuple return getattr(metric_value, attribute) # ... or a number return metric_value # Avoid littering logs with stack traces # complaining about dead processes except BaseException: return 0 async def usage_request(self, stream, ident, parent): """Handle a usage request.""" if not self.session: return reply_content = {"hostname": socket.gethostname(), "pid": os.getpid()} current_process = psutil.Process() all_processes = [current_process, *current_process.children(recursive=True)] # Ensure 1) self.processes is updated to only current subprocesses # and 2) we reuse processes when possible (needed for accurate CPU) self.processes = { process.pid: self.processes.get(process.pid, process) # type:ignore[misc,call-overload] for process in all_processes } reply_content["kernel_cpu"] = sum( [ self.get_process_metric_value(process, "cpu_percent", None) for process in self.processes.values() ] ) mem_info_type = "pss" if hasattr(current_process.memory_full_info(), "pss") else "rss" reply_content["kernel_memory"] = sum( [ self.get_process_metric_value(process, "memory_full_info", mem_info_type) for process in self.processes.values() ] ) cpu_percent = psutil.cpu_percent() # https://psutil.readthedocs.io/en/latest/index.html?highlight=cpu#psutil.cpu_percent # The first time cpu_percent is called it will return a meaningless 0.0 value which you are supposed to ignore. if cpu_percent is not None and cpu_percent != 0.0: # type:ignore[redundant-expr] reply_content["host_cpu_percent"] = cpu_percent reply_content["cpu_count"] = psutil.cpu_count(logical=True) reply_content["host_virtual_memory"] = dict(psutil.virtual_memory()._asdict()) reply_msg = self.session.send(stream, "usage_reply", reply_content, parent, ident) self.log.debug("%s", reply_msg) async def do_debug_request(self, msg): raise NotImplementedError # --------------------------------------------------------------------------- # Engine methods (DEPRECATED) # --------------------------------------------------------------------------- async def apply_request(self, stream, ident, parent): # pragma: no cover """Handle an apply request.""" self.log.warning("apply_request is deprecated in kernel_base, moving to ipyparallel.") try: content = parent["content"] bufs = parent["buffers"] msg_id = parent["header"]["msg_id"] except Exception: self.log.error("Got bad msg: %s", parent, exc_info=True) # noqa: G201 return md = self.init_metadata(parent) reply_content, result_buf = self.do_apply(content, bufs, msg_id, md) # flush i/o sys.stdout.flush() sys.stderr.flush() md = self.finish_metadata(parent, md, reply_content) if not self.session: return self.session.send( stream, "apply_reply", reply_content, parent=parent, ident=ident, buffers=result_buf, metadata=md, ) def do_apply(self, content, bufs, msg_id, reply_metadata): """DEPRECATED""" raise NotImplementedError # --------------------------------------------------------------------------- # Control messages (DEPRECATED) # --------------------------------------------------------------------------- async def abort_request(self, stream, ident, parent): # pragma: no cover """abort a specific msg by id""" self.log.warning( "abort_request is deprecated in kernel_base. It is only part of IPython parallel" ) msg_ids = parent["content"].get("msg_ids", None) if isinstance(msg_ids, str): msg_ids = [msg_ids] if not msg_ids: self._abort_queues() for mid in msg_ids: self.aborted.add(str(mid)) content = dict(status="ok") if not self.session: return reply_msg = self.session.send( stream, "abort_reply", content=content, parent=parent, ident=ident ) self.log.debug("%s", reply_msg) async def clear_request(self, stream, idents, parent): # pragma: no cover """Clear our namespace.""" self.log.warning( "clear_request is deprecated in kernel_base. It is only part of IPython parallel" ) content = self.do_clear() if self.session: self.session.send(stream, "clear_reply", ident=idents, parent=parent, content=content) def do_clear(self): """DEPRECATED since 4.0.3""" raise NotImplementedError # --------------------------------------------------------------------------- # Protected interface # --------------------------------------------------------------------------- def _topic(self, topic): """prefixed topic for IOPub messages""" base = "kernel.%s" % self.ident return (f"{base}.{topic}").encode() _aborting = Bool(False) def _abort_queues(self): # while this flag is true, # execute requests will be aborted self._aborting = True self.log.info("Aborting queue") # flush streams, so all currently waiting messages # are added to the queue if self.shell_stream: self.shell_stream.flush() # Callback to signal that we are done aborting # dispatch functions _must_ be async async def stop_aborting(): self.log.info("Finishing abort") self._aborting = False # put the stop-aborting event on the message queue # so that all messages already waiting in the queue are aborted # before we reset the flag schedule_stop_aborting = partial(self.schedule_dispatch, stop_aborting) if self.stop_on_error_timeout: # if we have a delay, give messages this long to arrive on the queue # before we stop aborting requests self.io_loop.call_later(self.stop_on_error_timeout, schedule_stop_aborting) # If we have an eventloop, it may interfere with the call_later above. # If the loop has a _schedule_exit method, we call that so the loop exits # after stop_on_error_timeout, returning to the main io_loop and letting # the call_later fire. if self.eventloop is not None and hasattr(self.eventloop, "_schedule_exit"): self.eventloop._schedule_exit(self.stop_on_error_timeout + 0.01) else: schedule_stop_aborting() def _send_abort_reply(self, stream, msg, idents): """Send a reply to an aborted request""" if not self.session: return self.log.info("Aborting %s: %s", msg["header"]["msg_id"], msg["header"]["msg_type"]) reply_type = msg["header"]["msg_type"].rsplit("_", 1)[0] + "_reply" status = {"status": "aborted"} md = self.init_metadata(msg) md = self.finish_metadata(msg, md, status) md.update(status) self.session.send( stream, reply_type, metadata=md, content=status, parent=msg, ident=idents, ) def _no_raw_input(self): """Raise StdinNotImplementedError if active frontend doesn't support stdin.""" msg = "raw_input was called, but this frontend does not support stdin." raise StdinNotImplementedError(msg) def getpass(self, prompt="", stream=None): """Forward getpass to frontends Raises ------ StdinNotImplementedError if active frontend doesn't support stdin. """ if not self._allow_stdin: msg = "getpass was called, but this frontend does not support input requests." raise StdinNotImplementedError(msg) if stream is not None: import warnings warnings.warn( "The `stream` parameter of `getpass.getpass` will have no effect when using ipykernel", UserWarning, stacklevel=2, ) return self._input_request( prompt, self._parent_ident["shell"], self.get_parent("shell"), password=True, ) def raw_input(self, prompt=""): """Forward raw_input to frontends Raises ------ StdinNotImplementedError if active frontend doesn't support stdin. """ if not self._allow_stdin: msg = "raw_input was called, but this frontend does not support input requests." raise StdinNotImplementedError(msg) return self._input_request( str(prompt), self._parent_ident["shell"], self.get_parent("shell"), password=False, ) def _input_request(self, prompt, ident, parent, password=False): # Flush output before making the request. sys.stderr.flush() sys.stdout.flush() # flush the stdin socket, to purge stale replies while True: try: self.stdin_socket.recv_multipart(zmq.NOBLOCK) except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: break raise # Send the input request. assert self.session is not None content = json_clean(dict(prompt=prompt, password=password)) self.session.send(self.stdin_socket, "input_request", content, parent, ident=ident) # Await a response. while True: try: # Use polling with select() so KeyboardInterrupts can get # through; doing a blocking recv() means stdin reads are # uninterruptible on Windows. We need a timeout because # zmq.select() is also uninterruptible, but at least this # way reads get noticed immediately and KeyboardInterrupts # get noticed fairly quickly by human response time standards. rlist, _, xlist = zmq.select([self.stdin_socket], [], [self.stdin_socket], 0.01) if rlist or xlist: ident, reply = self.session.recv(self.stdin_socket) if (ident, reply) != (None, None): break except KeyboardInterrupt: # re-raise KeyboardInterrupt, to truncate traceback msg = "Interrupted by user" raise KeyboardInterrupt(msg) from None except Exception: self.log.warning("Invalid Message:", exc_info=True) try: value = reply["content"]["value"] # type:ignore[index] except Exception: self.log.error("Bad input_reply: %s", parent) value = "" if value == "\x04": # EOF raise EOFError return value def _signal_children(self, signum): """ Send a signal to all our children Like `killpg`, but does not include the current process (or possible parents). """ sig_rep = f"{Signals(signum)!r}" for p in self._process_children(): self.log.debug("Sending %s to subprocess %s", sig_rep, p) try: if signum == SIGTERM: p.terminate() elif signum == SIGKILL: p.kill() else: p.send_signal(signum) except psutil.NoSuchProcess: pass def _process_children(self): """Retrieve child processes in the kernel's process group Avoids: - including parents and self with killpg - including all children that may have forked-off a new group """ kernel_process = psutil.Process() all_children = kernel_process.children(recursive=True) if os.name == "nt": return all_children kernel_pgid = os.getpgrp() process_group_children = [] for child in all_children: try: child_pgid = os.getpgid(child.pid) except OSError: pass else: if child_pgid == kernel_pgid: process_group_children.append(child) return process_group_children async def _progressively_terminate_all_children(self): sleeps = (0.01, 0.03, 0.1, 0.3, 1, 3, 10) if not self._process_children(): self.log.debug("Kernel has no children.") return for signum in (SIGTERM, SIGKILL): for delay in sleeps: children = self._process_children() if not children: self.log.debug("No more children, continuing shutdown routine.") return # signals only children, not current process self._signal_children(signum) self.log.debug( "Will sleep %s sec before checking for children and retrying. %s", delay, children, ) await asyncio.sleep(delay) async def _at_shutdown(self): """Actions taken at shutdown by the kernel, called by python's atexit.""" try: await self._progressively_terminate_all_children() except Exception as e: self.log.exception("Exception during subprocesses termination %s", e) finally: if self._shutdown_message is not None and self.session: self.session.send( self.iopub_socket, self._shutdown_message, ident=self._topic("shutdown"), ) self.log.debug("%s", self._shutdown_message) if self.control_stream: self.control_stream.flush(zmq.POLLOUT) ipykernel-6.29.5/ipykernel/kernelspec.py000066400000000000000000000231201464053401500203260ustar00rootroot00000000000000"""The IPython kernel spec for Jupyter""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import errno import json import os import platform import shutil import stat import sys import tempfile from pathlib import Path from typing import Any from jupyter_client.kernelspec import KernelSpecManager from traitlets import Unicode from traitlets.config import Application pjoin = os.path.join KERNEL_NAME = "python%i" % sys.version_info[0] # path to kernelspec resources RESOURCES = pjoin(Path(__file__).parent, "resources") def make_ipkernel_cmd( mod: str = "ipykernel_launcher", executable: str | None = None, extra_arguments: list[str] | None = None, python_arguments: list[str] | None = None, ) -> list[str]: """Build Popen command list for launching an IPython kernel. Parameters ---------- mod : str, optional (default 'ipykernel') A string of an IPython module whose __main__ starts an IPython kernel executable : str, optional (default sys.executable) The Python executable to use for the kernel process. extra_arguments : list, optional A list of extra arguments to pass when executing the launch code. Returns ------- A Popen command list """ if executable is None: executable = sys.executable extra_arguments = extra_arguments or [] python_arguments = python_arguments or [] return [executable, *python_arguments, "-m", mod, "-f", "{connection_file}", *extra_arguments] def get_kernel_dict( extra_arguments: list[str] | None = None, python_arguments: list[str] | None = None ) -> dict[str, Any]: """Construct dict for kernel.json""" return { "argv": make_ipkernel_cmd( extra_arguments=extra_arguments, python_arguments=python_arguments ), "display_name": "Python %i (ipykernel)" % sys.version_info[0], "language": "python", "metadata": {"debugger": True}, } def write_kernel_spec( path: Path | str | None = None, overrides: dict[str, Any] | None = None, extra_arguments: list[str] | None = None, python_arguments: list[str] | None = None, ) -> str: """Write a kernel spec directory to `path` If `path` is not specified, a temporary directory is created. If `overrides` is given, the kernelspec JSON is updated before writing. The path to the kernelspec is always returned. """ if path is None: path = Path(tempfile.mkdtemp(suffix="_kernels")) / KERNEL_NAME # stage resources shutil.copytree(RESOURCES, path) # ensure path is writable mask = Path(path).stat().st_mode if not mask & stat.S_IWUSR: Path(path).chmod(mask | stat.S_IWUSR) # write kernel.json kernel_dict = get_kernel_dict(extra_arguments, python_arguments) if overrides: kernel_dict.update(overrides) with open(pjoin(path, "kernel.json"), "w") as f: json.dump(kernel_dict, f, indent=1) return str(path) def install( kernel_spec_manager: KernelSpecManager | None = None, user: bool = False, kernel_name: str = KERNEL_NAME, display_name: str | None = None, prefix: str | None = None, profile: str | None = None, env: dict[str, str] | None = None, frozen_modules: bool = False, ) -> str: """Install the IPython kernelspec for Jupyter Parameters ---------- kernel_spec_manager : KernelSpecManager [optional] A KernelSpecManager to use for installation. If none provided, a default instance will be created. user : bool [default: False] Whether to do a user-only install, or system-wide. kernel_name : str, optional Specify a name for the kernelspec. This is needed for having multiple IPython kernels for different environments. display_name : str, optional Specify the display name for the kernelspec profile : str, optional Specify a custom profile to be loaded by the kernel. prefix : str, optional Specify an install prefix for the kernelspec. This is needed to install into a non-default location, such as a conda/virtual-env. env : dict, optional A dictionary of extra environment variables for the kernel. These will be added to the current environment variables before the kernel is started frozen_modules : bool, optional Whether to use frozen modules for potentially faster kernel startup. Using frozen modules prevents debugging inside of some built-in Python modules, such as io, abc, posixpath, ntpath, or stat. The frozen modules are used in CPython for faster interpreter startup. Ignored for cPython <3.11 and for other Python implementations. Returns ------- The path where the kernelspec was installed. """ if kernel_spec_manager is None: kernel_spec_manager = KernelSpecManager() if env is None: env = {} if (kernel_name != KERNEL_NAME) and (display_name is None): # kernel_name is specified and display_name is not # default display_name to kernel_name display_name = kernel_name overrides: dict[str, Any] = {} if display_name: overrides["display_name"] = display_name if profile: extra_arguments = ["--profile", profile] if not display_name: # add the profile to the default display name overrides["display_name"] = "Python %i [profile=%s]" % (sys.version_info[0], profile) else: extra_arguments = None python_arguments = None # addresses the debugger warning from debugpy about frozen modules if sys.version_info >= (3, 11) and platform.python_implementation() == "CPython": if not frozen_modules: # disable frozen modules python_arguments = ["-Xfrozen_modules=off"] elif "PYDEVD_DISABLE_FILE_VALIDATION" not in env: # user opted-in to have frozen modules, and we warned them about # consequences for the - disable the debugger warning env["PYDEVD_DISABLE_FILE_VALIDATION"] = "1" if env: overrides["env"] = env path = write_kernel_spec( overrides=overrides, extra_arguments=extra_arguments, python_arguments=python_arguments ) dest = kernel_spec_manager.install_kernel_spec( path, kernel_name=kernel_name, user=user, prefix=prefix ) # cleanup afterward shutil.rmtree(path) return dest # Entrypoint class InstallIPythonKernelSpecApp(Application): """Dummy app wrapping argparse""" name = Unicode("ipython-kernel-install") def initialize(self, argv: list[str] | None = None) -> None: """Initialize the app.""" if argv is None: argv = sys.argv[1:] self.argv = argv def start(self) -> None: """Start the app.""" import argparse parser = argparse.ArgumentParser( prog=self.name, description="Install the IPython kernel spec." ) parser.add_argument( "--user", action="store_true", help="Install for the current user instead of system-wide", ) parser.add_argument( "--name", type=str, default=KERNEL_NAME, help="Specify a name for the kernelspec." " This is needed to have multiple IPython kernels at the same time.", ) parser.add_argument( "--display-name", type=str, help="Specify the display name for the kernelspec." " This is helpful when you have multiple IPython kernels.", ) parser.add_argument( "--profile", type=str, help="Specify an IPython profile to load. " "This can be used to create custom versions of the kernel.", ) parser.add_argument( "--prefix", type=str, help="Specify an install prefix for the kernelspec." " This is needed to install into a non-default location, such as a conda/virtual-env.", ) parser.add_argument( "--sys-prefix", action="store_const", const=sys.prefix, dest="prefix", help="Install to Python's sys.prefix." " Shorthand for --prefix='%s'. For use in conda/virtual-envs." % sys.prefix, ) parser.add_argument( "--env", action="append", nargs=2, metavar=("ENV", "VALUE"), help="Set environment variables for the kernel.", ) parser.add_argument( "--frozen_modules", action="store_true", help="Enable frozen modules for potentially faster startup." " This has a downside of preventing the debugger from navigating to certain built-in modules.", ) opts = parser.parse_args(self.argv) if opts.env: opts.env = dict(opts.env) try: dest = install( user=opts.user, kernel_name=opts.name, profile=opts.profile, prefix=opts.prefix, display_name=opts.display_name, env=opts.env, ) except OSError as e: if e.errno == errno.EACCES: print(e, file=sys.stderr) if opts.user: print("Perhaps you want `sudo` or `--user`?", file=sys.stderr) self.exit(1) raise print(f"Installed kernelspec {opts.name} in {dest}") if __name__ == "__main__": InstallIPythonKernelSpecApp.launch_instance() ipykernel-6.29.5/ipykernel/log.py000066400000000000000000000015131464053401500167560ustar00rootroot00000000000000"""A PUB log handler.""" import warnings from zmq.log.handlers import PUBHandler warnings.warn( "ipykernel.log is deprecated. It has moved to ipyparallel.engine.log", DeprecationWarning, stacklevel=2, ) class EnginePUBHandler(PUBHandler): """A simple PUBHandler subclass that sets root_topic""" engine = None def __init__(self, engine, *args, **kwargs): """Initialize the handler.""" PUBHandler.__init__(self, *args, **kwargs) self.engine = engine @property # type:ignore[misc] def root_topic(self): """this is a property, in case the handler is created before the engine gets registered with an id""" if isinstance(getattr(self.engine, "id", None), int): return "engine.%i" % self.engine.id # type:ignore[union-attr] return "engine" ipykernel-6.29.5/ipykernel/parentpoller.py000066400000000000000000000102651464053401500207100ustar00rootroot00000000000000"""A parent poller for unix.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. try: import ctypes except ImportError: ctypes = None # type:ignore[assignment] import os import platform import signal import time import warnings from _thread import interrupt_main # Py 3 from threading import Thread from traitlets.log import get_logger class ParentPollerUnix(Thread): """A Unix-specific daemon thread that terminates the program immediately when the parent process no longer exists. """ def __init__(self): """Initialize the poller.""" super().__init__() self.daemon = True def run(self): """Run the poller.""" # We cannot use os.waitpid because it works only for child processes. from errno import EINTR while True: try: if os.getppid() == 1: get_logger().warning("Parent appears to have exited, shutting down.") os._exit(1) time.sleep(1.0) except OSError as e: if e.errno == EINTR: continue raise class ParentPollerWindows(Thread): """A Windows-specific daemon thread that listens for a special event that signals an interrupt and, optionally, terminates the program immediately when the parent process no longer exists. """ def __init__(self, interrupt_handle=None, parent_handle=None): """Create the poller. At least one of the optional parameters must be provided. Parameters ---------- interrupt_handle : HANDLE (int), optional If provided, the program will generate a Ctrl+C event when this handle is signaled. parent_handle : HANDLE (int), optional If provided, the program will terminate immediately when this handle is signaled. """ assert interrupt_handle or parent_handle super().__init__() if ctypes is None: msg = "ParentPollerWindows requires ctypes" # type:ignore[unreachable] raise ImportError(msg) self.daemon = True self.interrupt_handle = interrupt_handle self.parent_handle = parent_handle def run(self): """Run the poll loop. This method never returns.""" try: from _winapi import INFINITE, WAIT_OBJECT_0 # type:ignore[attr-defined] except ImportError: from _subprocess import INFINITE, WAIT_OBJECT_0 # Build the list of handle to listen on. handles = [] if self.interrupt_handle: handles.append(self.interrupt_handle) if self.parent_handle: handles.append(self.parent_handle) arch = platform.architecture()[0] c_int = ctypes.c_int64 if arch.startswith("64") else ctypes.c_int # Listen forever. while True: result = ctypes.windll.kernel32.WaitForMultipleObjects( # type:ignore[attr-defined] len(handles), # nCount (c_int * len(handles))(*handles), # lpHandles False, # bWaitAll INFINITE, ) # dwMilliseconds if WAIT_OBJECT_0 <= result < len(handles): handle = handles[result - WAIT_OBJECT_0] if handle == self.interrupt_handle: # check if signal handler is callable # to avoid 'int not callable' error (Python issue #23395) if callable(signal.getsignal(signal.SIGINT)): interrupt_main() elif handle == self.parent_handle: get_logger().warning("Parent appears to have exited, shutting down.") os._exit(1) elif result < 0: # wait failed, just give up and stop polling. warnings.warn( """Parent poll failed. If the frontend dies, the kernel may be left running. Please let us know about your system (bitness, Python, etc.) at ipython-dev@scipy.org""", stacklevel=2, ) return ipykernel-6.29.5/ipykernel/pickleutil.py000066400000000000000000000317561464053401500203560ustar00rootroot00000000000000"""Pickle related utilities. Perhaps this should be called 'can'.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import copy import pickle import sys import typing import warnings from types import FunctionType # This registers a hook when it's imported try: from ipyparallel.serialize import codeutil # noqa: F401 except ImportError: pass from traitlets.log import get_logger from traitlets.utils.importstring import import_item warnings.warn( "ipykernel.pickleutil is deprecated. It has moved to ipyparallel.", DeprecationWarning, stacklevel=2, ) buffer = memoryview class_type = type PICKLE_PROTOCOL = pickle.DEFAULT_PROTOCOL def _get_cell_type(a=None): """the type of a closure cell doesn't seem to be importable, so just create one """ def inner(): return a return type(inner.__closure__[0]) # type:ignore[index] cell_type = _get_cell_type() # ------------------------------------------------------------------------------- # Functions # ------------------------------------------------------------------------------- def interactive(f): """decorator for making functions appear as interactively defined. This results in the function being linked to the user_ns as globals() instead of the module globals(). """ # build new FunctionType, so it can have the right globals # interactive functions never have closures, that's kind of the point if isinstance(f, FunctionType): mainmod = __import__("__main__") f = FunctionType( f.__code__, mainmod.__dict__, f.__name__, f.__defaults__, ) # associate with __main__ for uncanning f.__module__ = "__main__" return f def use_dill(): """use dill to expand serialization support adds support for object methods and closures to serialization. """ # import dill causes most of the magic import dill # dill doesn't work with cPickle, # tell the two relevant modules to use plain pickle global pickle # noqa: PLW0603 pickle = dill try: from ipykernel import serialize except ImportError: pass else: serialize.pickle = dill # type:ignore[attr-defined] # disable special function handling, let dill take care of it can_map.pop(FunctionType, None) def use_cloudpickle(): """use cloudpickle to expand serialization support adds support for object methods and closures to serialization. """ import cloudpickle global pickle # noqa: PLW0603 pickle = cloudpickle try: from ipykernel import serialize except ImportError: pass else: serialize.pickle = cloudpickle # type:ignore[attr-defined] # disable special function handling, let cloudpickle take care of it can_map.pop(FunctionType, None) # ------------------------------------------------------------------------------- # Classes # ------------------------------------------------------------------------------- class CannedObject: """A canned object.""" def __init__(self, obj, keys=None, hook=None): """can an object for safe pickling Parameters ---------- obj The object to be canned keys : list (optional) list of attribute names that will be explicitly canned / uncanned hook : callable (optional) An optional extra callable, which can do additional processing of the uncanned object. Notes ----- large data may be offloaded into the buffers list, used for zero-copy transfers. """ self.keys = keys or [] self.obj = copy.copy(obj) self.hook = can(hook) for key in keys: setattr(self.obj, key, can(getattr(obj, key))) self.buffers = [] def get_object(self, g=None): """Get an object.""" if g is None: g = {} obj = self.obj for key in self.keys: setattr(obj, key, uncan(getattr(obj, key), g)) if self.hook: self.hook = uncan(self.hook, g) self.hook(obj, g) return self.obj class Reference(CannedObject): """object for wrapping a remote reference by name.""" def __init__(self, name): """Initialize the reference.""" if not isinstance(name, str): raise TypeError("illegal name: %r" % name) self.name = name self.buffers = [] def __repr__(self): """Get the string repr of the reference.""" return "" % self.name def get_object(self, g=None): """Get an object in the reference.""" if g is None: g = {} return eval(self.name, g) class CannedCell(CannedObject): """Can a closure cell""" def __init__(self, cell): """Initialize the canned cell.""" self.cell_contents = can(cell.cell_contents) def get_object(self, g=None): """Get an object in the cell.""" cell_contents = uncan(self.cell_contents, g) def inner(): """Inner function.""" return cell_contents return inner.__closure__[0] # type:ignore[index] class CannedFunction(CannedObject): """Can a function.""" def __init__(self, f): """Initialize the can""" self._check_type(f) self.code = f.__code__ self.defaults: typing.Optional[typing.List[typing.Any]] if f.__defaults__: self.defaults = [can(fd) for fd in f.__defaults__] else: self.defaults = None self.closure: typing.Any closure = f.__closure__ if closure: self.closure = tuple(can(cell) for cell in closure) else: self.closure = None self.module = f.__module__ or "__main__" self.__name__ = f.__name__ self.buffers = [] def _check_type(self, obj): assert isinstance(obj, FunctionType), "Not a function type" def get_object(self, g=None): """Get an object out of the can.""" # try to load function back into its module: if not self.module.startswith("__"): __import__(self.module) g = sys.modules[self.module].__dict__ if g is None: g = {} defaults = tuple(uncan(cfd, g) for cfd in self.defaults) if self.defaults else None closure = tuple(uncan(cell, g) for cell in self.closure) if self.closure else None return FunctionType(self.code, g, self.__name__, defaults, closure) class CannedClass(CannedObject): """A canned class object.""" def __init__(self, cls): """Initialize the can.""" self._check_type(cls) self.name = cls.__name__ self.old_style = not isinstance(cls, type) self._canned_dict = {} for k, v in cls.__dict__.items(): if k not in ("__weakref__", "__dict__"): self._canned_dict[k] = can(v) mro = [] if self.old_style else cls.mro() self.parents = [can(c) for c in mro[1:]] self.buffers = [] def _check_type(self, obj): assert isinstance(obj, class_type), "Not a class type" def get_object(self, g=None): """Get an object from the can.""" parents = tuple(uncan(p, g) for p in self.parents) return type(self.name, parents, uncan_dict(self._canned_dict, g=g)) class CannedArray(CannedObject): """A canned numpy array.""" def __init__(self, obj): """Initialize the can.""" from numpy import ascontiguousarray self.shape = obj.shape self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str self.pickled = False if sum(obj.shape) == 0: self.pickled = True elif obj.dtype == "O": # can't handle object dtype with buffer approach self.pickled = True elif obj.dtype.fields and any(dt == "O" for dt, sz in obj.dtype.fields.values()): self.pickled = True if self.pickled: # just pickle it self.buffers = [pickle.dumps(obj, PICKLE_PROTOCOL)] else: # ensure contiguous obj = ascontiguousarray(obj, dtype=None) self.buffers = [buffer(obj)] def get_object(self, g=None): """Get the object.""" from numpy import frombuffer data = self.buffers[0] if self.pickled: # we just pickled it return pickle.loads(data) return frombuffer(data, dtype=self.dtype).reshape(self.shape) class CannedBytes(CannedObject): """A canned bytes object.""" @staticmethod def wrap(buf: typing.Union[memoryview, bytes, typing.SupportsBytes]) -> bytes: """Cast a buffer or memoryview object to bytes""" if isinstance(buf, memoryview): return buf.tobytes() if not isinstance(buf, bytes): return bytes(buf) return buf def __init__(self, obj): """Initialize the can.""" self.buffers = [obj] def get_object(self, g=None): """Get the canned object.""" data = self.buffers[0] return self.wrap(data) class CannedBuffer(CannedBytes): """A canned buffer.""" wrap = buffer # type:ignore[assignment] class CannedMemoryView(CannedBytes): """A canned memory view.""" wrap = memoryview # type:ignore[assignment] # ------------------------------------------------------------------------------- # Functions # ------------------------------------------------------------------------------- def _import_mapping(mapping, original=None): """import any string-keys in a type mapping""" log = get_logger() log.debug("Importing canning map") for key, _ in list(mapping.items()): if isinstance(key, str): try: cls = import_item(key) except Exception: if original and key not in original: # only message on user-added classes log.error("canning class not importable: %r", key, exc_info=True) # noqa: G201 mapping.pop(key) else: mapping[cls] = mapping.pop(key) def istype(obj, check): """like isinstance(obj, check), but strict This won't catch subclasses. """ if isinstance(check, tuple): return any(type(obj) is cls for cls in check) return type(obj) is check def can(obj): """prepare an object for pickling""" import_needed = False for cls, canner in can_map.items(): if isinstance(cls, str): import_needed = True break if istype(obj, cls): return canner(obj) if import_needed: # perform can_map imports, then try again # this will usually only happen once _import_mapping(can_map, _original_can_map) return can(obj) return obj def can_class(obj): """Can a class object.""" if isinstance(obj, class_type) and obj.__module__ == "__main__": return CannedClass(obj) return obj def can_dict(obj): """can the *values* of a dict""" if istype(obj, dict): newobj = {} for k, v in obj.items(): newobj[k] = can(v) return newobj return obj sequence_types = (list, tuple, set) def can_sequence(obj): """can the elements of a sequence""" if istype(obj, sequence_types): t = type(obj) return t([can(i) for i in obj]) return obj def uncan(obj, g=None): """invert canning""" import_needed = False for cls, uncanner in uncan_map.items(): if isinstance(cls, str): import_needed = True break if isinstance(obj, cls): return uncanner(obj, g) if import_needed: # perform uncan_map imports, then try again # this will usually only happen once _import_mapping(uncan_map, _original_uncan_map) return uncan(obj, g) return obj def uncan_dict(obj, g=None): """Uncan a dict object.""" if istype(obj, dict): newobj = {} for k, v in obj.items(): newobj[k] = uncan(v, g) return newobj return obj def uncan_sequence(obj, g=None): """Uncan a sequence.""" if istype(obj, sequence_types): t = type(obj) return t([uncan(i, g) for i in obj]) return obj # ------------------------------------------------------------------------------- # API dictionaries # ------------------------------------------------------------------------------- # These dicts can be extended for custom serialization of new objects can_map = { "numpy.ndarray": CannedArray, FunctionType: CannedFunction, bytes: CannedBytes, memoryview: CannedMemoryView, cell_type: CannedCell, class_type: can_class, } if buffer is not memoryview: can_map[buffer] = CannedBuffer uncan_map: typing.Dict[type, typing.Any] = { CannedObject: lambda obj, g: obj.get_object(g), dict: uncan_dict, } # for use in _import_mapping: _original_can_map = can_map.copy() _original_uncan_map = uncan_map.copy() ipykernel-6.29.5/ipykernel/py.typed000066400000000000000000000000001464053401500173100ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/pylab/000077500000000000000000000000001464053401500167325ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/pylab/__init__.py000066400000000000000000000000001464053401500210310ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/pylab/backend_inline.py000066400000000000000000000007241464053401500222340ustar00rootroot00000000000000"""A matplotlib backend for publishing figures via display_data""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import warnings from matplotlib_inline.backend_inline import * # type:ignore[import-untyped] # noqa: F403 # analysis: ignore warnings.warn( "`ipykernel.pylab.backend_inline` is deprecated, directly " "use `matplotlib_inline.backend_inline`", DeprecationWarning, stacklevel=2, ) ipykernel-6.29.5/ipykernel/pylab/config.py000066400000000000000000000006011464053401500205460ustar00rootroot00000000000000"""Configurable for configuring the IPython inline backend This module does not import anything from matplotlib. """ import warnings from matplotlib_inline.config import * # type:ignore[import-untyped] # noqa: F403 # analysis: ignore warnings.warn( "`ipykernel.pylab.config` is deprecated, directly use `matplotlib_inline.config`", DeprecationWarning, stacklevel=2, ) ipykernel-6.29.5/ipykernel/resources/000077500000000000000000000000001464053401500176355ustar00rootroot00000000000000ipykernel-6.29.5/ipykernel/resources/logo-32x32.png000066400000000000000000000020741464053401500220650ustar00rootroot00000000000000PNG  IHDR szzbKGD pHYs}}tIME "4kIDATXW]h\E=nmXBZ "?OUSQ"VI[EP-PP ZkXJcQ1anߦW7s9372ʈ{ d "#&ғ*;=TvuP1"AH^2:rdxKE)>.c)=L%BwH AYsp͟I{Pij,:#BǙ4dSvFW2(Yt2YGT$xҟobcXr|'cGЕ:@)<`QOw\Ḩ3Z1@:`)wZ*b 3ߍz:%2 îsmOFO5Ny]kG]{4y ^lSpn<@?.n@[4I}by1o Tb>IENDB`ipykernel-6.29.5/ipykernel/resources/logo-64x64.png000066400000000000000000000042041464053401500220740ustar00rootroot00000000000000PNG  IHDR@@iqbKGD pHYsEtIME ")IDATx[}lUg=" m:vU6u6D.u: n3deiAVV(t.3#Qh7=?s9v;ƾm޷=:o15t\6>$HR$9dIN'9Hb@ (Q6ɣ)S/U )B8s-\5{mX]ֽaUi7$G^ y C}=4?D?Y<- Yώx @\ J^P"[Hp 0 ֦A񫥩_L/'ڈOFF>v`MoT$No kuN=v`A]]{wiB #DŇcIR uQH.p4ZX cI!*5y7<`ڷB_@$ ͋J?֌EpF]`xtf;C/ ØhXKL-@w< P[`V{P#83a`5p'ڠ )L&eFӴh߳PO@ :O5[[Xw`T߱G$i˾%T٨ !:'%Wr  }2 %'BM[:[TdĒ|!!ge0ϵU@Vn\kA Nu`#pֿ$;@M,p z{;FR23 6Q| !5OhH!J|/AB|E[K33iߞp%H@/G3՗+kxFH-\(YQ ^ +#8<FWP5VH- !("YK;~3 pFB+lrpb\\5vc?WH.=(`;H_cFaG%Q<[)3z$=E(oǡ0xTv%uDW'TjA1;Duh@Ïo3R)@,#I-˞`6Fj5>Pp3uY5H yE \`)o޼gۖbM6(EBESD!#t>'Ksrh޼C1;ϑEqta Dvhx׏o-[W\g-jא=   D*pxۗ9Z8g#9:SPeD 81>PoN#HnZ뵦;,J+^G(d-"sj:@J I}Uq6 qF}fk1c2yޭ%nFY;"5D*v7kna&ÁilDP#3pmWG$:6|&@aÀ4L4oJ-"]WIME4Z A RpolhR?Kt`>6Zbx!(E)1u ?tۓZ|ߎ[ Tαs̷ÜZ@{2ΞIiXdcC@v˲ro\hXf|O޹o(+h (C /&9`N}DIU1< 4u5D*q Dii:Q4-',#Yx[yX9NY/_2vrPeI}qG{ . 'HwEzRJzԧu1;|>,Jxґw h12l9wF!; 2+s~67N4܋Q}?K58 { -yf9;buz^ɖn)AI12U>p /c|kP0^,h` 2Q" "AyR`jL15&ynIENDB`ipykernel-6.29.5/ipykernel/resources/logo-svg.svg000066400000000000000000000226051464053401500221200ustar00rootroot00000000000000 image/svg+xml ipykernel-6.29.5/ipykernel/serialize.py000066400000000000000000000142641464053401500201730ustar00rootroot00000000000000"""serialization utilities for apply messages""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import pickle import warnings from itertools import chain try: # available since ipyparallel 5.0.0 from ipyparallel.serialize.canning import ( CannedObject, can, can_sequence, istype, sequence_types, uncan, uncan_sequence, ) from ipyparallel.serialize.serialize import PICKLE_PROTOCOL except ImportError: # Deprecated since ipykernel 4.3.0 from ipykernel.pickleutil import ( PICKLE_PROTOCOL, CannedObject, can, can_sequence, istype, sequence_types, uncan, uncan_sequence, ) from jupyter_client.session import MAX_BYTES, MAX_ITEMS warnings.warn( "ipykernel.serialize is deprecated. It has moved to ipyparallel.serialize", DeprecationWarning, stacklevel=2, ) # ----------------------------------------------------------------------------- # Serialization Functions # ----------------------------------------------------------------------------- def _extract_buffers(obj, threshold=MAX_BYTES): """extract buffers larger than a certain threshold""" buffers = [] if isinstance(obj, CannedObject) and obj.buffers: for i, buf in enumerate(obj.buffers): if len(buf) > threshold: # buffer larger than threshold, prevent pickling obj.buffers[i] = None buffers.append(buf) # buffer too small for separate send, coerce to bytes # because pickling buffer objects just results in broken pointers elif isinstance(buf, memoryview): obj.buffers[i] = buf.tobytes() return buffers def _restore_buffers(obj, buffers): """restore buffers extracted by""" if isinstance(obj, CannedObject) and obj.buffers: for i, buf in enumerate(obj.buffers): if buf is None: obj.buffers[i] = buffers.pop(0) def serialize_object(obj, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): """Serialize an object into a list of sendable buffers. Parameters ---------- obj : object The object to be serialized buffer_threshold : int The threshold (in bytes) for pulling out data buffers to avoid pickling them. item_threshold : int The maximum number of items over which canning will iterate. Containers (lists, dicts) larger than this will be pickled without introspection. Returns ------- [bufs] : list of buffers representing the serialized object. """ buffers = [] if istype(obj, sequence_types) and len(obj) < item_threshold: cobj = can_sequence(obj) for c in cobj: buffers.extend(_extract_buffers(c, buffer_threshold)) elif istype(obj, dict) and len(obj) < item_threshold: cobj = {} for k in sorted(obj): c = can(obj[k]) buffers.extend(_extract_buffers(c, buffer_threshold)) cobj[k] = c else: cobj = can(obj) buffers.extend(_extract_buffers(cobj, buffer_threshold)) buffers.insert(0, pickle.dumps(cobj, PICKLE_PROTOCOL)) return buffers def deserialize_object(buffers, g=None): """reconstruct an object serialized by serialize_object from data buffers. Parameters ---------- buffers : list of buffers/bytes g : globals to be used when uncanning Returns ------- (newobj, bufs) : unpacked object, and the list of remaining unused buffers. """ bufs = list(buffers) pobj = bufs.pop(0) canned = pickle.loads(pobj) if istype(canned, sequence_types) and len(canned) < MAX_ITEMS: for c in canned: _restore_buffers(c, bufs) newobj = uncan_sequence(canned, g) elif istype(canned, dict) and len(canned) < MAX_ITEMS: newobj = {} for k in sorted(canned): c = canned[k] _restore_buffers(c, bufs) newobj[k] = uncan(c, g) else: _restore_buffers(canned, bufs) newobj = uncan(canned, g) return newobj, bufs def pack_apply_message(f, args, kwargs, buffer_threshold=MAX_BYTES, item_threshold=MAX_ITEMS): """pack up a function, args, and kwargs to be sent over the wire Each element of args/kwargs will be canned for special treatment, but inspection will not go any deeper than that. Any object whose data is larger than `threshold` will not have their data copied (only numpy arrays and bytes/buffers support zero-copy) Message will be a list of bytes/buffers of the format: [ cf, pinfo, , ] With length at least two + len(args) + len(kwargs) """ arg_bufs = list( chain.from_iterable(serialize_object(arg, buffer_threshold, item_threshold) for arg in args) ) kw_keys = sorted(kwargs.keys()) kwarg_bufs = list( chain.from_iterable( serialize_object(kwargs[key], buffer_threshold, item_threshold) for key in kw_keys ) ) info = dict(nargs=len(args), narg_bufs=len(arg_bufs), kw_keys=kw_keys) msg = [pickle.dumps(can(f), PICKLE_PROTOCOL)] msg.append(pickle.dumps(info, PICKLE_PROTOCOL)) msg.extend(arg_bufs) msg.extend(kwarg_bufs) return msg def unpack_apply_message(bufs, g=None, copy=True): """unpack f,args,kwargs from buffers packed by pack_apply_message() Returns: original f,args,kwargs""" bufs = list(bufs) # allow us to pop assert len(bufs) >= 2, "not enough buffers!" pf = bufs.pop(0) f = uncan(pickle.loads(pf), g) pinfo = bufs.pop(0) info = pickle.loads(pinfo) arg_bufs, kwarg_bufs = bufs[: info["narg_bufs"]], bufs[info["narg_bufs"] :] args_list = [] for _ in range(info["nargs"]): arg, arg_bufs = deserialize_object(arg_bufs, g) args_list.append(arg) args = tuple(args_list) assert not arg_bufs, "Shouldn't be any arg bufs left over" kwargs = {} for key in info["kw_keys"]: kwarg, kwarg_bufs = deserialize_object(kwarg_bufs, g) kwargs[key] = kwarg assert not kwarg_bufs, "Shouldn't be any kwarg bufs left over" return f, args, kwargs ipykernel-6.29.5/ipykernel/trio_runner.py000066400000000000000000000046541464053401500205540ustar00rootroot00000000000000"""A trio loop runner.""" import builtins import logging import signal import threading import traceback import warnings import trio class TrioRunner: """A trio loop runner.""" def __init__(self): """Initialize the runner.""" self._cell_cancel_scope = None self._trio_token = None def initialize(self, kernel, io_loop): """Initialize the runner.""" kernel.shell.set_trio_runner(self) kernel.shell.run_line_magic("autoawait", "trio") kernel.shell.magics_manager.magics["line"]["autoawait"] = lambda _: warnings.warn( "Autoawait isn't allowed in Trio background loop mode.", stacklevel=2 ) self._interrupted = False bg_thread = threading.Thread(target=io_loop.start, daemon=True, name="TornadoBackground") bg_thread.start() def interrupt(self, signum, frame): """Interuppt the runner.""" if self._cell_cancel_scope: self._cell_cancel_scope.cancel() else: msg = "Kernel interrupted but no cell is running" raise Exception(msg) def run(self): """Run the loop.""" old_sig = signal.signal(signal.SIGINT, self.interrupt) def log_nursery_exc(exc): exc = "\n".join(traceback.format_exception(type(exc), exc, exc.__traceback__)) logging.error("An exception occurred in a global nursery task.\n%s", exc) async def trio_main(): """Run the main loop.""" self._trio_token = trio.lowlevel.current_trio_token() async with trio.open_nursery() as nursery: # TODO This hack prevents the nursery from cancelling all child # tasks when an uncaught exception occurs, but it's ugly. nursery._add_exc = log_nursery_exc builtins.GLOBAL_NURSERY = nursery # type:ignore[attr-defined] await trio.sleep_forever() trio.run(trio_main) signal.signal(signal.SIGINT, old_sig) def __call__(self, async_fn): """Handle a function call.""" async def loc(coro): """A thread runner context.""" self._cell_cancel_scope = trio.CancelScope() with self._cell_cancel_scope: return await coro self._cell_cancel_scope = None # type:ignore[unreachable] return None return trio.from_thread.run(loc, async_fn, trio_token=self._trio_token) ipykernel-6.29.5/ipykernel/zmqshell.py000066400000000000000000000576271464053401500200550ustar00rootroot00000000000000"""A ZMQ-based subclass of InteractiveShell. This code is meant to ease the refactoring of the base InteractiveShell into something with a cleaner architecture for 2-process use, without actually breaking InteractiveShell itself. So we're doing something a bit ugly, where we subclass and override what we want to fix. Once this is working well, we can go back to the base class and refactor the code for a cleaner inheritance implementation that doesn't rely on so much monkeypatching. But this lets us maintain a fully working IPython as we develop the new machinery. This should thus be thought of as scaffolding. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import os import sys import warnings from pathlib import Path from threading import local from IPython.core import page, payloadpage from IPython.core.autocall import ZMQExitAutocall from IPython.core.displaypub import DisplayPublisher from IPython.core.error import UsageError from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC from IPython.core.magic import Magics, line_magic, magics_class from IPython.core.magics import CodeMagics, MacroToEdit # type:ignore[attr-defined] from IPython.core.usage import default_banner from IPython.display import Javascript, display from IPython.utils import openpy from IPython.utils.process import arg_split, system # type:ignore[attr-defined] from jupyter_client.session import Session, extract_header from jupyter_core.paths import jupyter_runtime_dir from traitlets import Any, CBool, CBytes, Dict, Instance, Type, default, observe from ipykernel import connect_qtconsole, get_connection_file, get_connection_info from ipykernel.displayhook import ZMQShellDisplayHook from ipykernel.jsonutil import encode_images, json_clean # ----------------------------------------------------------------------------- # Functions and classes # ----------------------------------------------------------------------------- class ZMQDisplayPublisher(DisplayPublisher): """A display publisher that publishes data using a ZeroMQ PUB socket.""" session = Instance(Session, allow_none=True) pub_socket = Any(allow_none=True) parent_header = Dict({}) topic = CBytes(b"display_data") # thread_local: # An attribute used to ensure the correct output message # is processed. See ipykernel Issue 113 for a discussion. _thread_local = Any() def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) def _flush_streams(self): """flush IO Streams prior to display""" sys.stdout.flush() sys.stderr.flush() @default("_thread_local") def _default_thread_local(self): """Initialize our thread local storage""" return local() @property def _hooks(self): if not hasattr(self._thread_local, "hooks"): # create new list for a new thread self._thread_local.hooks = [] return self._thread_local.hooks def publish( self, data, metadata=None, transient=None, update=False, ): """Publish a display-data message Parameters ---------- data : dict A mime-bundle dict, keyed by mime-type. metadata : dict, optional Metadata associated with the data. transient : dict, optional, keyword-only Transient data that may only be relevant during a live display, such as display_id. Transient data should not be persisted to documents. update : bool, optional, keyword-only If True, send an update_display_data message instead of display_data. """ self._flush_streams() if metadata is None: metadata = {} if transient is None: transient = {} self._validate_data(data, metadata) content = {} content["data"] = encode_images(data) content["metadata"] = metadata content["transient"] = transient msg_type = "update_display_data" if update else "display_data" # Use 2-stage process to send a message, # in order to put it through the transform # hooks before potentially sending. assert self.session is not None msg = self.session.msg(msg_type, json_clean(content), parent=self.parent_header) # Each transform either returns a new # message or None. If None is returned, # the message has been 'used' and we return. for hook in self._hooks: msg = hook(msg) if msg is None: return # type:ignore[unreachable] self.session.send( self.pub_socket, msg, ident=self.topic, ) def clear_output(self, wait=False): """Clear output associated with the current execution (cell). Parameters ---------- wait : bool (default: False) If True, the output will not be cleared immediately, instead waiting for the next display before clearing. This reduces bounce during repeated clear & display loops. """ content = dict(wait=wait) self._flush_streams() assert self.session is not None msg = self.session.msg("clear_output", json_clean(content), parent=self.parent_header) # see publish() for details on how this works for hook in self._hooks: msg = hook(msg) if msg is None: return # type:ignore[unreachable] self.session.send( self.pub_socket, msg, ident=self.topic, ) def register_hook(self, hook): """ Registers a hook with the thread-local storage. Parameters ---------- hook : Any callable object Returns ------- Either a publishable message, or `None`. The DisplayHook objects must return a message from the __call__ method if they still require the `session.send` method to be called after transformation. Returning `None` will halt that execution path, and session.send will not be called. """ self._hooks.append(hook) def unregister_hook(self, hook): """ Un-registers a hook with the thread-local storage. Parameters ---------- hook : Any callable object which has previously been registered as a hook. Returns ------- bool - `True` if the hook was removed, `False` if it wasn't found. """ try: self._hooks.remove(hook) return True except ValueError: return False @magics_class class KernelMagics(Magics): """Kernel magics.""" # ------------------------------------------------------------------------ # Magic overrides # ------------------------------------------------------------------------ # Once the base class stops inheriting from magic, this code needs to be # moved into a separate machinery as well. For now, at least isolate here # the magics which this class needs to implement differently from the base # class, or that are unique to it. @line_magic def edit(self, parameter_s="", last_call=None): """Bring up an editor and execute the resulting code. Usage: %edit [options] [args] %edit runs an external text editor. You will need to set the command for this editor via the ``TerminalInteractiveShell.editor`` option in your configuration file before it will work. This command allows you to conveniently edit multi-line code right in your IPython session. If called without arguments, %edit opens up an empty editor with a temporary file and will execute the contents of this file when you close it (don't forget to save it!). Options: -n Open the editor at a specified line number. By default, the IPython editor hook uses the unix syntax 'editor +N filename', but you can configure this by providing your own modified hook if your favorite editor supports line-number specifications with a different syntax. -p Call the editor with the same data as the previous time it was used, regardless of how long ago (in your current session) it was. -r Use 'raw' input. This option only applies to input taken from the user's history. By default, the 'processed' history is used, so that magics are loaded in their transformed version to valid Python. If this option is given, the raw input as typed as the command line is used instead. When you exit the editor, it will be executed by IPython's own processor. Arguments: If arguments are given, the following possibilities exist: - The arguments are numbers or pairs of colon-separated numbers (like 1 4:8 9). These are interpreted as lines of previous input to be loaded into the editor. The syntax is the same of the %macro command. - If the argument doesn't start with a number, it is evaluated as a variable and its contents loaded into the editor. You can thus edit any string which contains python code (including the result of previous edits). - If the argument is the name of an object (other than a string), IPython will try to locate the file where it was defined and open the editor at the point where it is defined. You can use ``%edit function`` to load an editor exactly at the point where 'function' is defined, edit it and have the file be executed automatically. If the object is a macro (see %macro for details), this opens up your specified editor with a temporary file containing the macro's data. Upon exit, the macro is reloaded with the contents of the file. Note: opening at an exact line is only supported under Unix, and some editors (like kedit and gedit up to Gnome 2.8) do not understand the '+NUMBER' parameter necessary for this feature. Good editors like (X)Emacs, vi, jed, pico and joe all do. - If the argument is not found as a variable, IPython will look for a file with that name (adding .py if necessary) and load it into the editor. It will execute its contents with execfile() when you exit, loading any code in the file into your interactive namespace. Unlike in the terminal, this is designed to use a GUI editor, and we do not know when it has closed. So the file you edit will not be automatically executed or printed. Note that %edit is also available through the alias %ed. """ last_call = last_call or ["", ""] opts, args = self.parse_options(parameter_s, "prn:") try: filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call) except MacroToEdit: # TODO: Implement macro editing over 2 processes. print("Macro editing not yet implemented in 2-process model.") return # Make sure we send to the client an absolute path, in case the working # directory of client and kernel don't match filename = str(Path(filename).resolve()) payload = {"source": "edit_magic", "filename": filename, "line_number": lineno} assert self.shell is not None self.shell.payload_manager.write_payload(payload) # A few magics that are adapted to the specifics of using pexpect and a # remote terminal @line_magic def clear(self, arg_s): """Clear the terminal.""" assert self.shell is not None if os.name == "posix": self.shell.system("clear") else: self.shell.system("cls") if os.name == "nt": # This is the usual name in windows cls = line_magic("cls")(clear) # Terminal pagers won't work over pexpect, but we do have our own pager @line_magic def less(self, arg_s): """Show a file through the pager. Files ending in .py are syntax-highlighted.""" if not arg_s: msg = "Missing filename." raise UsageError(msg) if arg_s.endswith(".py"): assert self.shell is not None cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False)) else: with open(arg_s) as fid: cont = fid.read() page.page(cont) more = line_magic("more")(less) # Man calls a pager, so we also need to redefine it if os.name == "posix": @line_magic def man(self, arg_s): """Find the man page for the given command and display in pager.""" assert self.shell is not None page.page(self.shell.getoutput("man %s | col -b" % arg_s, split=False)) @line_magic def connect_info(self, arg_s): """Print information for connecting other clients to this kernel It will print the contents of this session's connection file, as well as shortcuts for local clients. In the simplest case, when called from the most recently launched kernel, secondary clients can be connected, simply with: $> jupyter --existing """ try: connection_file = get_connection_file() info = get_connection_info(unpack=False) except Exception as e: warnings.warn("Could not get connection info: %r" % e, stacklevel=2) return # if it's in the default dir, truncate to basename if jupyter_runtime_dir() == str(Path(connection_file).parent): connection_file = Path(connection_file).name assert isinstance(info, str) print(info + "\n") print( f"Paste the above JSON into a file, and connect with:\n" f" $> jupyter --existing \n" f"or, if you are local, you can connect with just:\n" f" $> jupyter --existing {connection_file}\n" f"or even just:\n" f" $> jupyter --existing\n" f"if this is the most recent Jupyter kernel you have started." ) @line_magic def qtconsole(self, arg_s): """Open a qtconsole connected to this kernel. Useful for connecting a qtconsole to running notebooks, for better debugging. """ # %qtconsole should imply bind_kernel for engines: # FIXME: move to ipyparallel Kernel subclass if "ipyparallel" in sys.modules: from ipyparallel import bind_kernel bind_kernel() try: connect_qtconsole(argv=arg_split(arg_s, os.name == "posix")) except Exception as e: warnings.warn("Could not start qtconsole: %r" % e, stacklevel=2) return @line_magic def autosave(self, arg_s): """Set the autosave interval in the notebook (in seconds). The default value is 120, or two minutes. ``%autosave 0`` will disable autosave. This magic only has an effect when called from the notebook interface. It has no effect when called in a startup file. """ try: interval = int(arg_s) except ValueError as e: raise UsageError("%%autosave requires an integer, got %r" % arg_s) from e # javascript wants milliseconds milliseconds = 1000 * interval display( Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds), include=["application/javascript"], ) if interval: print("Autosaving every %i seconds" % interval) else: print("Autosave disabled") class ZMQInteractiveShell(InteractiveShell): """A subclass of InteractiveShell for ZMQ.""" displayhook_class = Type(ZMQShellDisplayHook) display_pub_class = Type(ZMQDisplayPublisher) data_pub_class = Any() # type:ignore[assignment] kernel = Any() parent_header = Any() @default("banner1") def _default_banner1(self): return default_banner # Override the traitlet in the parent class, because there's no point using # readline for the kernel. Can be removed when the readline code is moved # to the terminal frontend. readline_use = CBool(False) # autoindent has no meaning in a zmqshell, and attempting to enable it # will print a warning in the absence of readline. autoindent = CBool(False) exiter = Instance(ZMQExitAutocall) @default("exiter") def _default_exiter(self): return ZMQExitAutocall(self) @observe("exit_now") def _update_exit_now(self, change): """stop eventloop when exit_now fires""" if change["new"]: if hasattr(self.kernel, "io_loop"): loop = self.kernel.io_loop loop.call_later(0.1, loop.stop) if self.kernel.eventloop: exit_hook = getattr(self.kernel.eventloop, "exit_hook", None) if exit_hook: exit_hook(self.kernel) keepkernel_on_exit = None # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no # interactive input being read; we provide event loop support in ipkernel def enable_gui(self, gui): """Enable a given guil.""" from .eventloops import enable_gui as real_enable_gui try: real_enable_gui(gui) self.active_eventloop = gui except ValueError as e: raise UsageError("%s" % e) from e def init_environment(self): """Configure the user's environment.""" env = os.environ # These two ensure 'ls' produces nice coloring on BSD-derived systems env["TERM"] = "xterm-color" env["CLICOLOR"] = "1" # These two add terminal color in tools that support it. env["FORCE_COLOR"] = "1" env["CLICOLOR_FORCE"] = "1" # Since normal pagers don't work at all (over pexpect we don't have # single-key control of the subprocess), try to disable paging in # subprocesses as much as possible. env["PAGER"] = "cat" env["GIT_PAGER"] = "cat" def init_hooks(self): """Initialize hooks.""" super().init_hooks() self.set_hook("show_in_pager", page.as_hook(payloadpage.page), 99) def init_data_pub(self): """Delay datapub init until request, for deprecation warnings""" @property def data_pub(self): if not hasattr(self, "_data_pub"): warnings.warn( "InteractiveShell.data_pub is deprecated outside IPython parallel.", DeprecationWarning, stacklevel=2, ) self._data_pub = self.data_pub_class(parent=self) # type:ignore[has-type] self._data_pub.session = self.display_pub.session # type:ignore[attr-defined] self._data_pub.pub_socket = self.display_pub.pub_socket # type:ignore[attr-defined] return self._data_pub @data_pub.setter def data_pub(self, pub): self._data_pub = pub def ask_exit(self): """Engage the exit actions.""" self.exit_now = not self.keepkernel_on_exit payload = dict( source="ask_exit", keepkernel=self.keepkernel_on_exit, ) self.payload_manager.write_payload(payload) # type:ignore[union-attr] def run_cell(self, *args, **kwargs): """Run a cell.""" self._last_traceback = None return super().run_cell(*args, **kwargs) def _showtraceback(self, etype, evalue, stb): # try to preserve ordering of tracebacks and print statements sys.stdout.flush() sys.stderr.flush() exc_content = { "traceback": stb, "ename": str(etype.__name__), "evalue": str(evalue), } dh = self.displayhook # Send exception info over pub socket for other clients than the caller # to pick up topic = None if dh.topic: # type:ignore[attr-defined] topic = dh.topic.replace(b"execute_result", b"error") # type:ignore[attr-defined] dh.session.send( # type:ignore[attr-defined] dh.pub_socket, # type:ignore[attr-defined] "error", json_clean(exc_content), dh.parent_header, # type:ignore[attr-defined] ident=topic, ) # FIXME - Once we rely on Python 3, the traceback is stored on the # exception object, so we shouldn't need to store it here. self._last_traceback = stb def set_next_input(self, text, replace=False): """Send the specified text to the frontend to be presented at the next input cell.""" payload = dict( source="set_next_input", text=text, replace=replace, ) self.payload_manager.write_payload(payload) # type:ignore[union-attr] def set_parent(self, parent): """Set the parent header for associating output with its triggering input""" self.parent_header = parent self.displayhook.set_parent(parent) # type:ignore[attr-defined] self.display_pub.set_parent(parent) # type:ignore[attr-defined] if hasattr(self, "_data_pub"): self.data_pub.set_parent(parent) try: sys.stdout.set_parent(parent) # type:ignore[attr-defined] except AttributeError: pass try: sys.stderr.set_parent(parent) # type:ignore[attr-defined] except AttributeError: pass def get_parent(self): """Get the parent header.""" return self.parent_header def init_magics(self): """Initialize magics.""" super().init_magics() self.register_magics(KernelMagics) self.magics_manager.register_alias("ed", "edit") def init_virtualenv(self): """Initialize virtual environment.""" # Overridden not to do virtualenv detection, because it's probably # not appropriate in a kernel. To use a kernel in a virtualenv, install # it inside the virtualenv. # https://ipython.readthedocs.io/en/latest/install/kernel_install.html def system_piped(self, cmd): """Call the given cmd in a subprocess, piping stdout/err Parameters ---------- cmd : str Command to execute (can not end in '&', as background processes are not supported. Should not be a command that expects input other than simple text. """ if cmd.rstrip().endswith("&"): # this is *far* from a rigorous test # We do not support backgrounding processes because we either use # pexpect or pipes to read from. Users can always just call # os.system() or use ip.system=ip.system_raw # if they really want a background process. msg = "Background processes not supported." raise OSError(msg) # we explicitly do NOT return the subprocess status code, because # a non-None value would trigger :func:`sys.displayhook` calls. # Instead, we store the exit_code in user_ns. # Also, protect system call from UNC paths on Windows here too # as is done in InteractiveShell.system_raw if sys.platform == "win32": cmd = self.var_expand(cmd, depth=1) from IPython.utils._process_win32 import AvoidUNCPath with AvoidUNCPath() as path: if path is not None: cmd = f"pushd {path} &&{cmd}" self.user_ns["_exit_code"] = system(cmd) else: self.user_ns["_exit_code"] = system(self.var_expand(cmd, depth=1)) # Ensure new system_piped implementation is used system = system_piped InteractiveShellABC.register(ZMQInteractiveShell) ipykernel-6.29.5/ipykernel_launcher.py000066400000000000000000000010001464053401500200450ustar00rootroot00000000000000"""Entry point for launching an IPython kernel. This is separate from the ipykernel package so we can avoid doing imports until after removing the cwd from sys.path. """ import sys from pathlib import Path if __name__ == "__main__": # Remove the CWD from sys.path while we load stuff. # This is added back by InteractiveShellApp.init_path() if sys.path[0] == "" or Path(sys.path[0]) == Path.cwd(): del sys.path[0] from ipykernel import kernelapp as app app.launch_new_instance() ipykernel-6.29.5/pyproject.toml000066400000000000000000000201061464053401500165340ustar00rootroot00000000000000[build-system] requires = ["hatchling>=1.4", "jupyter_client>=6"] build-backend = "hatchling.build" [project] name = "ipykernel" dynamic = ["version"] authors = [{name = "IPython Development Team", email = "ipython-dev@scipy.org"}] license = {file = "LICENSE"} readme = "README.md" description = "IPython Kernel for Jupyter" keywords = ["Interactive", "Interpreter", "Shell", "Web"] classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3", ] requires-python = ">=3.8" dependencies = [ "debugpy>=1.6.5", "ipython>=7.23.1", "comm>=0.1.1", "traitlets>=5.4.0", "jupyter_client>=6.1.12", "jupyter_core>=4.12,!=5.0.*", # For tk event loop support only. "nest_asyncio", "tornado>=6.1", "matplotlib-inline>=0.1", 'appnope;platform_system=="Darwin"', "pyzmq>=24", "psutil", "packaging", ] [project.urls] Homepage = "https://ipython.org" Documentation = "https://ipykernel.readthedocs.io" Funding = "https://numfocus.org/donate" Source = "https://github.com/ipython/ipykernel" Tracker = "https://github.com/ipython/ipykernel/issues" [project.optional-dependencies] docs = [ "sphinx", "myst_parser", "pydata_sphinx_theme", "sphinxcontrib_github_alt", "sphinxcontrib-spelling", "sphinx-autodoc-typehints", "trio" ] test = [ "pytest>=7.0", "pytest-cov", "flaky", "ipyparallel", "pre-commit", "pytest-asyncio>=0.23.5", "pytest-timeout" ] cov = [ "coverage[toml]", "pytest-cov", "matplotlib", "curio", "trio", ] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] [tool.hatch.version] path = "ipykernel/_version.py" # Used to call hatch_build.py [tool.hatch.build.hooks.custom] [tool.hatch.build.targets.wheel.shared-data] "data_kernelspec" = "share/jupyter/kernels/python3" [tool.hatch.build.force-include] "./ipykernel_launcher.py" = "ipykernel_launcher.py" [tool.hatch.envs.docs] features = ["docs"] [tool.hatch.envs.docs.scripts] build = "make -C docs html SPHINXOPTS='-W'" api = "sphinx-apidoc -o docs/api -f -E ipykernel tests ipykernel/datapub.py ipykernel/pickleutil.py ipykernel/serialize.py ipykernel/gui ipykernel/pylab" [tool.hatch.envs.test] features = ["test"] [tool.hatch.envs.test.scripts] list = "python -m pip freeze" test = "python -m pytest -vv {args}" nowarn = "test -W default {args}" [tool.hatch.envs.cov] features = ["test", "cov"] [tool.hatch.envs.cov.scripts] test = "python -m pytest -vv --cov ipykernel --cov-branch --cov-report term-missing:skip-covered {args}" nowarn = "test -W default {args}" [[tool.hatch.envs.cov.matrix]] qt = ["qt5", "qt6"] [tool.hatch.envs.cov.overrides] matrix.qt.features = [ { value = "pyqt5", if = ["qt5"] }, { value = "pyside6", if = ["qt6"] }, ] [tool.hatch.envs.typing] dependencies = ["pre-commit"] detached = true [tool.hatch.envs.typing.scripts] test = "pre-commit run --all-files --hook-stage manual mypy" [tool.hatch.envs.lint] dependencies = ["pre-commit"] detached = true [tool.hatch.envs.lint.scripts] build = [ "pre-commit run --all-files ruff", "pre-commit run --all-files ruff-format" ] [tool.mypy] files = "ipykernel" strict = true disable_error_code = ["no-untyped-def", "no-untyped-call", "import-not-found"] enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] follow_imports = "normal" pretty = true warn_unreachable = true [tool.pytest.ini_options] minversion = "6.0" xfail_strict = true log_cli_level = "info" addopts = [ "-raXs", "--durations=10", "--color=yes", "--doctest-modules", "--showlocals", "--strict-markers", "--strict-config", "--ignore=ipykernel/pylab/backend_inline.py", "--ignore=ipykernel/pylab/config.py", "--ignore=ipykernel/gui/gtk3embed.py", "--ignore=ipykernel/gui/gtkembed.py", "--ignore=ipykernel/datapub.py", "--ignore=ipykernel/log.py", "--ignore=ipykernel/pickleutil.py", "--ignore=ipykernel/serialize.py", "--ignore=ipykernel/_eventloop_macos.py" ] testpaths = [ "tests", "tests/inprocess" ] asyncio_mode = "auto" timeout = 300 # Restore this setting to debug failures #timeout_method = "thread" filterwarnings= [ # Fail on warnings "error", # Ignore our own warnings "ignore:The `stream` parameter of `getpass.getpass` will have no effect:UserWarning", "ignore:has moved to ipyparallel:DeprecationWarning", # IPython warnings "ignore: `Completer.complete` is pending deprecation since IPython 6.0 and will be replaced by `Completer.completions`:PendingDeprecationWarning", # Ignore jupyter_client warnings "ignore:unclosed = DEFAULT_SOFT: soft = DEFAULT_SOFT if hard < soft: hard = soft resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) # Enforce selector event loop on Windows. if os.name == "nt": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type:ignore class KernelMixin: log = logging.getLogger() def _initialize(self): self.context = context = zmq.Context() self.iopub_socket = context.socket(zmq.PUB) self.stdin_socket = context.socket(zmq.ROUTER) self.session = Session() self.test_sockets = [self.iopub_socket] self.test_streams = [] for name in ["shell", "control"]: socket = context.socket(zmq.ROUTER) stream = ZMQStream(socket) stream.on_send(self._on_send) self.test_sockets.append(socket) self.test_streams.append(stream) setattr(self, f"{name}_stream", stream) async def do_debug_request(self, msg): return {} def destroy(self): for stream in self.test_streams: stream.close() for socket in self.test_sockets: socket.close() self.context.destroy() @no_type_check async def test_shell_message(self, *args, **kwargs): msg_list = self._prep_msg(*args, **kwargs) await self.dispatch_shell(msg_list) self.shell_stream.flush() return await self._wait_for_msg() @no_type_check async def test_control_message(self, *args, **kwargs): msg_list = self._prep_msg(*args, **kwargs) await self.process_control(msg_list) self.control_stream.flush() return await self._wait_for_msg() def _on_send(self, msg, *args, **kwargs): self._reply = msg def _prep_msg(self, *args, **kwargs): self._reply = None raw_msg = self.session.msg(*args, **kwargs) msg = self.session.serialize(raw_msg) return [zmq.Message(m) for m in msg] async def _wait_for_msg(self): while not self._reply: await asyncio.sleep(0.1) _, msg = self.session.feed_identities(self._reply) return self.session.deserialize(msg) def _send_interrupt_children(self): # override to prevent deadlock pass class MockKernel(KernelMixin, Kernel): # type:ignore implementation = "test" implementation_version = "1.0" language = "no-op" language_version = "0.1" language_info = { "name": "test", "mimetype": "text/plain", "file_extension": ".txt", } banner = "test kernel" def __init__(self, *args, **kwargs): self._initialize() self.shell = MagicMock() super().__init__(*args, **kwargs) def do_execute( self, code, silent, store_history=True, user_expressions=None, allow_stdin=False ): if not silent: stream_content = {"name": "stdout", "text": code} self.send_response(self.iopub_socket, "stream", stream_content) return { "status": "ok", # The base class increments the execution count "execution_count": self.execution_count, "payload": [], "user_expressions": {}, } class MockIPyKernel(KernelMixin, IPythonKernel): # type:ignore def __init__(self, *args, **kwargs): self._initialize() super().__init__(*args, **kwargs) @pytest.fixture() def kernel(): kernel = MockKernel() kernel.io_loop = IOLoop.current() yield kernel kernel.destroy() @pytest.fixture() def ipkernel(): kernel = MockIPyKernel() kernel.io_loop = IOLoop.current() yield kernel kernel.destroy() ZMQInteractiveShell.clear_instance() ipykernel-6.29.5/tests/inprocess/000077500000000000000000000000001464053401500167705ustar00rootroot00000000000000ipykernel-6.29.5/tests/inprocess/__init__.py000066400000000000000000000000001464053401500210670ustar00rootroot00000000000000ipykernel-6.29.5/tests/inprocess/test_kernel.py000066400000000000000000000065061464053401500216700ustar00rootroot00000000000000# Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import sys from contextlib import contextmanager from io import StringIO import pytest from IPython.utils.io import capture_output # type:ignore[attr-defined] from jupyter_client.session import Session from ipykernel.inprocess.blocking import BlockingInProcessKernelClient from ipykernel.inprocess.ipkernel import InProcessKernel from ipykernel.inprocess.manager import InProcessKernelManager from ..utils import assemble_output orig_msg = Session.msg def _inject_cell_id(_self, *args, **kwargs): """ This patch jupyter_client.session:Session.msg to add a cell_id to the return message metadata """ assert isinstance(_self, Session) res = orig_msg(_self, *args, **kwargs) assert "cellId" not in res["metadata"] res["metadata"]["cellId"] = "test_cell_id" return res @contextmanager def patch_cell_id(): try: Session.msg = _inject_cell_id # type:ignore yield finally: Session.msg = orig_msg # type:ignore @pytest.fixture() def kc(): km = InProcessKernelManager() km.start_kernel() kc = km.client() kc.start_channels() kc.wait_for_ready() return kc def test_with_cell_id(kc): with patch_cell_id(): kc.execute("1+1") def test_pylab(kc): """Does %pylab work in the in-process kernel?""" _ = pytest.importorskip("matplotlib", reason="This test requires matplotlib") kc.execute("%pylab") out, err = assemble_output(kc.get_iopub_msg) assert "matplotlib" in out def test_raw_input(kc): """Does the in-process kernel handle raw_input correctly?""" io = StringIO("foobar\n") sys_stdin = sys.stdin sys.stdin = io try: kc.execute("x = input()") finally: sys.stdin = sys_stdin assert kc.kernel.shell.user_ns.get("x") == "foobar" @pytest.mark.skipif("__pypy__" in sys.builtin_module_names, reason="fails on pypy") def test_stdout(kc): """Does the in-process kernel correctly capture IO?""" kernel = InProcessKernel() with capture_output() as io: kernel.shell.run_cell('print("foo")') assert io.stdout == "foo\n" kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session) kernel.frontends.append(kc) kc.execute('print("bar")') out, err = assemble_output(kc.get_iopub_msg) assert out == "bar\n" @pytest.mark.skip(reason="Currently don't capture during test as pytest does its own capturing") def test_capfd(kc): """Does correctly capture fd""" kernel = InProcessKernel() with capture_output() as io: kernel.shell.run_cell('print("foo")') assert io.stdout == "foo\n" kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session) kernel.frontends.append(kc) kc.execute("import os") kc.execute('os.system("echo capfd")') out, err = assemble_output(kc.iopub_channel) assert out == "capfd\n" def test_getpass_stream(kc): """Tests that kernel getpass accept the stream parameter""" kernel = InProcessKernel() kernel._allow_stdin = True kernel._input_request = lambda *args, **kwargs: None # type:ignore kernel.getpass(stream="non empty") async def test_do_execute(kc): kernel = InProcessKernel() await kernel.do_execute("a=1", True) assert kernel.shell.user_ns["a"] == 1 ipykernel-6.29.5/tests/inprocess/test_kernelmanager.py000066400000000000000000000064141464053401500232210ustar00rootroot00000000000000# Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import unittest import pytest from flaky import flaky from ipykernel.inprocess.manager import InProcessKernelManager # ----------------------------------------------------------------------------- # Test case # ----------------------------------------------------------------------------- class InProcessKernelManagerTestCase(unittest.TestCase): def setUp(self): self.km = InProcessKernelManager() def tearDown(self): if self.km.has_kernel: self.km.shutdown_kernel() @flaky def test_interface(self): """Does the in-process kernel manager implement the basic KM interface?""" km = self.km assert not km.has_kernel km.start_kernel() assert km.has_kernel assert km.kernel is not None kc = km.client() assert not kc.channels_running kc.start_channels() assert kc.channels_running old_kernel = km.kernel km.restart_kernel() assert km.kernel is not None assert km.kernel != old_kernel km.shutdown_kernel() assert not km.has_kernel with pytest.raises(NotImplementedError): km.interrupt_kernel() with pytest.raises(NotImplementedError): km.signal_kernel(9) kc.stop_channels() assert not kc.channels_running def test_execute(self): """Does executing code in an in-process kernel work?""" km = self.km km.start_kernel() kc = km.client() kc.start_channels() kc.wait_for_ready() kc.execute("foo = 1") assert km.kernel.shell.user_ns["foo"] == 1 def test_complete(self): """Does requesting completion from an in-process kernel work?""" km = self.km km.start_kernel() kc = km.client() kc.start_channels() kc.wait_for_ready() km.kernel.shell.push({"my_bar": 0, "my_baz": 1}) kc.complete("my_ba", 5) msg = kc.get_shell_msg() assert msg["header"]["msg_type"] == "complete_reply" assert sorted(msg["content"]["matches"]) == ["my_bar", "my_baz"] def test_inspect(self): """Does requesting object information from an in-process kernel work?""" km = self.km km.start_kernel() kc = km.client() kc.start_channels() kc.wait_for_ready() km.kernel.shell.user_ns["foo"] = 1 kc.inspect("foo") msg = kc.get_shell_msg() assert msg["header"]["msg_type"] == "inspect_reply" content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "int" in text def test_history(self): """Does requesting history from an in-process kernel work?""" km = self.km km.start_kernel() kc = km.client() kc.start_channels() kc.wait_for_ready() kc.execute("1") kc.history(hist_access_type="tail", n=1) msg = kc.shell_channel.get_msgs()[-1] assert msg["header"]["msg_type"] == "history_reply" history = msg["content"]["history"] assert len(history) == 1 assert history[0][2] == "1" if __name__ == "__main__": unittest.main() ipykernel-6.29.5/tests/test_async.py000066400000000000000000000035601464053401500175150ustar00rootroot00000000000000"""Test async/await integration""" import pytest from .test_message_spec import validate_message from .utils import TIMEOUT, execute, flush_channels, start_new_kernel KC = KM = None def setup_function(): """start the global kernel (if it isn't running) and return its client""" global KM, KC KM, KC = start_new_kernel() flush_channels(KC) def teardown_function(): assert KC is not None assert KM is not None KC.stop_channels() KM.shutdown_kernel(now=True) def test_async_await(): flush_channels(KC) msg_id, content = execute("import asyncio; await asyncio.sleep(0.1)", KC) assert content["status"] == "ok", content @pytest.mark.parametrize("asynclib", ["asyncio", "trio", "curio"]) def test_async_interrupt(asynclib, request): assert KC is not None assert KM is not None try: __import__(asynclib) except ImportError: pytest.skip("Requires %s" % asynclib) request.addfinalizer(lambda: execute("%autoawait asyncio", KC)) flush_channels(KC) msg_id, content = execute("%autoawait " + asynclib, KC) assert content["status"] == "ok", content flush_channels(KC) msg_id = KC.execute(f"print('begin'); import {asynclib}; await {asynclib}.sleep(5)") busy = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(busy, "status", msg_id) assert busy["content"]["execution_state"] == "busy" echo = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(echo, "execute_input") stream = KC.get_iopub_msg(timeout=TIMEOUT) # wait for the stream output to be sure kernel is in the async block validate_message(stream, "stream") assert stream["content"]["text"] == "begin\n" KM.interrupt_kernel() reply = KC.get_shell_msg()["content"] assert reply["status"] == "error", reply assert reply["ename"] in {"CancelledError", "KeyboardInterrupt"} flush_channels(KC) ipykernel-6.29.5/tests/test_comm.py000066400000000000000000000054641464053401500173400ustar00rootroot00000000000000import unittest.mock import pytest from ipykernel.comm import Comm, CommManager from ipykernel.ipkernel import IPythonKernel from ipykernel.kernelbase import Kernel def test_comm(kernel: Kernel) -> None: manager = CommManager(kernel=kernel) kernel.comm_manager = manager # type:ignore with pytest.deprecated_call(): c = Comm(kernel=kernel, target_name="bar") msgs = [] assert kernel is c.kernel # type:ignore def on_close(msg): msgs.append(msg) def on_message(msg): msgs.append(msg) c.publish_msg("foo") c.open({}) c.on_msg(on_message) c.on_close(on_close) c.handle_msg({}) c.handle_close({}) c.close() assert len(msgs) == 2 assert c.target_name == "bar" def test_comm_manager(kernel: Kernel) -> None: manager = CommManager(kernel=kernel) msgs = [] def foo(comm, msg): msgs.append(msg) comm.close() def fizz(comm, msg): raise RuntimeError("hi") def on_close(msg): msgs.append(msg) def on_msg(msg): msgs.append(msg) manager.register_target("foo", foo) manager.register_target("fizz", fizz) kernel.comm_manager = manager # type:ignore with unittest.mock.patch.object(Comm, "publish_msg") as publish_msg: with pytest.deprecated_call(): comm = Comm() comm.on_msg(on_msg) comm.on_close(on_close) manager.register_comm(comm) assert publish_msg.call_count == 1 # make sure that when we don't pass a kernel, the 'default' kernel is taken Kernel._instance = kernel # type:ignore assert comm.kernel is kernel # type:ignore Kernel.clear_instance() assert manager.get_comm(comm.comm_id) == comm assert manager.get_comm("foo") is None msg = dict(content=dict(comm_id=comm.comm_id, target_name="foo")) manager.comm_open(None, None, msg) assert len(msgs) == 1 msg["content"]["target_name"] = "bar" manager.comm_open(None, None, msg) assert len(msgs) == 1 msg = dict(content=dict(comm_id=comm.comm_id, target_name="fizz")) manager.comm_open(None, None, msg) assert len(msgs) == 1 manager.register_comm(comm) assert manager.get_comm(comm.comm_id) == comm msg = dict(content=dict(comm_id=comm.comm_id)) manager.comm_msg(None, None, msg) assert len(msgs) == 2 msg["content"]["comm_id"] = "foo" manager.comm_msg(None, None, msg) assert len(msgs) == 2 manager.register_comm(comm) assert manager.get_comm(comm.comm_id) == comm msg = dict(content=dict(comm_id=comm.comm_id)) manager.comm_close(None, None, msg) assert len(msgs) == 3 assert comm._closed def test_comm_in_manager(ipkernel: IPythonKernel) -> None: with pytest.deprecated_call(): comm = Comm() assert comm.comm_id in ipkernel.comm_manager.comms ipykernel-6.29.5/tests/test_connect.py000066400000000000000000000102451464053401500200270ustar00rootroot00000000000000"""Tests for kernel connection utilities""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import errno import json import os from tempfile import TemporaryDirectory from typing import no_type_check from unittest.mock import patch import pytest import zmq from traitlets.config.loader import Config from ipykernel import connect from ipykernel.kernelapp import IPKernelApp from .utils import TemporaryWorkingDirectory sample_info: dict = { "ip": "1.2.3.4", "transport": "ipc", "shell_port": 1, "hb_port": 2, "iopub_port": 3, "stdin_port": 4, "control_port": 5, "key": b"abc123", "signature_scheme": "hmac-md5", } class DummyKernelApp(IPKernelApp): def _default_shell_port(self): return 0 def initialize(self, argv=None): self.init_profile_dir() self.init_connection_file() def test_get_connection_file(): cfg = Config() with TemporaryWorkingDirectory() as d: cfg.ProfileDir.location = d cf = "kernel.json" app = DummyKernelApp(config=cfg, connection_file=cf) app.initialize() profile_cf = os.path.join(app.connection_dir, cf) assert profile_cf == app.abs_connection_file with open(profile_cf, "w") as f: f.write("{}") assert os.path.exists(profile_cf) assert connect.get_connection_file(app) == profile_cf app.connection_file = cf assert connect.get_connection_file(app) == profile_cf def test_get_connection_info(): with TemporaryDirectory() as d: cf = os.path.join(d, "kernel.json") connect.write_connection_file(cf, **sample_info) json_info = connect.get_connection_info(cf) info = connect.get_connection_info(cf, unpack=True) assert isinstance(json_info, str) sub_info = {k: v for k, v in info.items() if k in sample_info} assert sub_info == sample_info info2 = json.loads(json_info) info2["key"] = info2["key"].encode("utf-8") sub_info2 = {k: v for k, v in info.items() if k in sample_info} assert sub_info2 == sample_info def test_port_bind_failure_raises(request): cfg = Config() with TemporaryWorkingDirectory() as d: cfg.ProfileDir.location = d cf = "kernel.json" app = DummyKernelApp(config=cfg, connection_file=cf) request.addfinalizer(app.close) app.initialize() with patch.object(app, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types") with pytest.raises(zmq.ZMQError): app.init_sockets() assert mock_try_bind.call_count == 1 @no_type_check def test_port_bind_failure_recovery(request): try: errno.WSAEADDRINUSE except AttributeError: # Fake windows address in-use code p = patch.object(errno, "WSAEADDRINUSE", 12345, create=True) p.start() request.addfinalizer(p.stop) cfg = Config() with TemporaryWorkingDirectory() as d: cfg.ProfileDir.location = d cf = "kernel.json" app = DummyKernelApp(config=cfg, connection_file=cf) request.addfinalizer(app.close) app.initialize() with patch.object(app, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = [ zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"), zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"), ] + [0] * 100 # Shouldn't raise anything as retries will kick in app.init_sockets() def test_port_bind_failure_gives_up_retries(request): cfg = Config() with TemporaryWorkingDirectory() as d: cfg.ProfileDir.location = d cf = "kernel.json" app = DummyKernelApp(config=cfg, connection_file=cf) request.addfinalizer(app.close) app.initialize() with patch.object(app, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind") with pytest.raises(zmq.ZMQError): app.init_sockets() assert mock_try_bind.call_count == 100 ipykernel-6.29.5/tests/test_debugger.py000066400000000000000000000272601464053401500201670ustar00rootroot00000000000000import sys import pytest from .utils import TIMEOUT, get_reply, new_kernel seq = 0 # Tests support debugpy not being installed, in which case the tests don't do anything useful # functionally as the debug message replies are usually empty dictionaries, but they confirm that # ipykernel doesn't block, or segfault, or raise an exception. try: import debugpy except ImportError: debugpy = None def wait_for_debug_request(kernel, command, arguments=None, full_reply=False): """Carry out a debug request and return the reply content. It does not check if the request was successful. """ global seq seq += 1 msg = kernel.session.msg( "debug_request", { "type": "request", "seq": seq, "command": command, "arguments": arguments or {}, }, ) kernel.control_channel.send(msg) reply = get_reply(kernel, msg["header"]["msg_id"], channel="control") return reply if full_reply else reply["content"] @pytest.fixture() def kernel(): with new_kernel() as kc: yield kc @pytest.fixture() def kernel_with_debug(kernel): # Initialize wait_for_debug_request( kernel, "initialize", { "clientID": "test-client", "clientName": "testClient", "adapterID": "", "pathFormat": "path", "linesStartAt1": True, "columnsStartAt1": True, "supportsVariableType": True, "supportsVariablePaging": True, "supportsRunInTerminalRequest": True, "locale": "en", }, ) # Attach wait_for_debug_request(kernel, "attach") try: yield kernel finally: # Detach wait_for_debug_request(kernel, "disconnect", {"restart": False, "terminateDebuggee": True}) def test_debug_initialize(kernel): reply = wait_for_debug_request( kernel, "initialize", { "clientID": "test-client", "clientName": "testClient", "adapterID": "", "pathFormat": "path", "linesStartAt1": True, "columnsStartAt1": True, "supportsVariableType": True, "supportsVariablePaging": True, "supportsRunInTerminalRequest": True, "locale": "en", }, ) if debugpy: assert reply["success"] else: assert reply == {} def test_attach_debug(kernel_with_debug): reply = wait_for_debug_request( kernel_with_debug, "evaluate", {"expression": "'a' + 'b'", "context": "repl"} ) if debugpy: assert reply["success"] assert reply["body"]["result"] == "" else: assert reply == {} def test_set_breakpoints(kernel_with_debug): code = """def f(a, b): c = a + b return c f(2, 3)""" r = wait_for_debug_request(kernel_with_debug, "dumpCell", {"code": code}) if debugpy: source = r["body"]["sourcePath"] else: assert r == {} source = "non-existent path" reply = wait_for_debug_request( kernel_with_debug, "setBreakpoints", { "breakpoints": [{"line": 2}], "source": {"path": source}, "sourceModified": False, }, ) if debugpy: assert reply["success"] assert len(reply["body"]["breakpoints"]) == 1 assert reply["body"]["breakpoints"][0]["verified"] assert reply["body"]["breakpoints"][0]["source"]["path"] == source else: assert reply == {} r = wait_for_debug_request(kernel_with_debug, "debugInfo") def func(b): return b["source"] if debugpy: assert source in map(func, r["body"]["breakpoints"]) else: assert r == {} r = wait_for_debug_request(kernel_with_debug, "configurationDone") if debugpy: assert r["success"] else: assert r == {} def test_stop_on_breakpoint(kernel_with_debug): code = """def f(a, b): c = a + b return c f(2, 3)""" r = wait_for_debug_request(kernel_with_debug, "dumpCell", {"code": code}) if debugpy: source = r["body"]["sourcePath"] else: assert r == {} source = "some path" wait_for_debug_request(kernel_with_debug, "debugInfo") wait_for_debug_request( kernel_with_debug, "setBreakpoints", { "breakpoints": [{"line": 2}], "source": {"path": source}, "sourceModified": False, }, ) wait_for_debug_request(kernel_with_debug, "configurationDone", full_reply=True) kernel_with_debug.execute(code) if not debugpy: # Cannot stop on breakpoint if debugpy not installed return # Wait for stop on breakpoint msg: dict = {"msg_type": "", "content": {}} while msg.get("msg_type") != "debug_event" or msg["content"].get("event") != "stopped": msg = kernel_with_debug.get_iopub_msg(timeout=TIMEOUT) assert msg["content"]["body"]["reason"] == "breakpoint" def test_breakpoint_in_cell_with_leading_empty_lines(kernel_with_debug): code = """ def f(a, b): c = a + b return c f(2, 3)""" r = wait_for_debug_request(kernel_with_debug, "dumpCell", {"code": code}) if debugpy: source = r["body"]["sourcePath"] else: assert r == {} source = "some path" wait_for_debug_request(kernel_with_debug, "debugInfo") wait_for_debug_request( kernel_with_debug, "setBreakpoints", { "breakpoints": [{"line": 6}], "source": {"path": source}, "sourceModified": False, }, ) wait_for_debug_request(kernel_with_debug, "configurationDone", full_reply=True) kernel_with_debug.execute(code) if not debugpy: # Cannot stop on breakpoint if debugpy not installed return # Wait for stop on breakpoint msg: dict = {"msg_type": "", "content": {}} while msg.get("msg_type") != "debug_event" or msg["content"].get("event") != "stopped": msg = kernel_with_debug.get_iopub_msg(timeout=TIMEOUT) assert msg["content"]["body"]["reason"] == "breakpoint" def test_rich_inspect_not_at_breakpoint(kernel_with_debug): var_name = "text" value = "Hello the world" code = f"""{var_name}='{value}' print({var_name}) """ msg_id = kernel_with_debug.execute(code) get_reply(kernel_with_debug, msg_id) r = wait_for_debug_request(kernel_with_debug, "inspectVariables") def func(v): return v["name"] if debugpy: assert var_name in list(map(func, r["body"]["variables"])) else: assert r == {} reply = wait_for_debug_request( kernel_with_debug, "richInspectVariables", {"variableName": var_name}, ) if debugpy: assert reply["body"]["data"] == {"text/plain": f"'{value}'"} else: assert reply == {} def test_rich_inspect_at_breakpoint(kernel_with_debug): code = """def f(a, b): c = a + b return c f(2, 3)""" r = wait_for_debug_request(kernel_with_debug, "dumpCell", {"code": code}) if debugpy: source = r["body"]["sourcePath"] else: assert r == {} source = "some path" wait_for_debug_request( kernel_with_debug, "setBreakpoints", { "breakpoints": [{"line": 2}], "source": {"path": source}, "sourceModified": False, }, ) r = wait_for_debug_request(kernel_with_debug, "debugInfo") r = wait_for_debug_request(kernel_with_debug, "configurationDone") kernel_with_debug.execute(code) if not debugpy: # Cannot stop on breakpoint if debugpy not installed return # Wait for stop on breakpoint msg: dict = {"msg_type": "", "content": {}} while msg.get("msg_type") != "debug_event" or msg["content"].get("event") != "stopped": msg = kernel_with_debug.get_iopub_msg(timeout=TIMEOUT) stacks = wait_for_debug_request(kernel_with_debug, "stackTrace", {"threadId": 1})["body"][ "stackFrames" ] scopes = wait_for_debug_request(kernel_with_debug, "scopes", {"frameId": stacks[0]["id"]})[ "body" ]["scopes"] locals_ = wait_for_debug_request( kernel_with_debug, "variables", { "variablesReference": next(filter(lambda s: s["name"] == "Locals", scopes))[ "variablesReference" ] }, )["body"]["variables"] reply = wait_for_debug_request( kernel_with_debug, "richInspectVariables", {"variableName": locals_[0]["name"], "frameId": stacks[0]["id"]}, ) assert reply["body"]["data"] == {"text/plain": locals_[0]["value"]} def test_convert_to_long_pathname(): if sys.platform == "win32": from ipykernel.compiler import _convert_to_long_pathname _convert_to_long_pathname(__file__) def test_copy_to_globals(kernel_with_debug): local_var_name = "var" global_var_name = "var_copy" code = f"""from IPython.core.display import HTML def my_test(): {local_var_name} = HTML('

test content

') pass a = 2 my_test()""" # Init debugger and set breakpoint r = wait_for_debug_request(kernel_with_debug, "dumpCell", {"code": code}) if debugpy: source = r["body"]["sourcePath"] else: assert r == {} source = "some path" wait_for_debug_request( kernel_with_debug, "setBreakpoints", { "breakpoints": [{"line": 4}], "source": {"path": source}, "sourceModified": False, }, ) wait_for_debug_request(kernel_with_debug, "debugInfo") wait_for_debug_request(kernel_with_debug, "configurationDone") # Execute code kernel_with_debug.execute(code) if not debugpy: # Cannot stop on breakpoint if debugpy not installed return # Wait for stop on breakpoint msg: dict = {"msg_type": "", "content": {}} while msg.get("msg_type") != "debug_event" or msg["content"].get("event") != "stopped": msg = kernel_with_debug.get_iopub_msg(timeout=TIMEOUT) stacks = wait_for_debug_request(kernel_with_debug, "stackTrace", {"threadId": 1})["body"][ "stackFrames" ] # Get local frame id frame_id = stacks[0]["id"] # Copy the variable wait_for_debug_request( kernel_with_debug, "copyToGlobals", { "srcVariableName": local_var_name, "dstVariableName": global_var_name, "srcFrameId": frame_id, }, ) # Get the scopes scopes = wait_for_debug_request(kernel_with_debug, "scopes", {"frameId": frame_id})["body"][ "scopes" ] # Get the local variable locals_ = wait_for_debug_request( kernel_with_debug, "variables", { "variablesReference": next(filter(lambda s: s["name"] == "Locals", scopes))[ "variablesReference" ] }, )["body"]["variables"] local_var = None for variable in locals_: if local_var_name in variable["evaluateName"]: local_var = variable assert local_var is not None # Get the global variable (copy of the local variable) globals_ = wait_for_debug_request( kernel_with_debug, "variables", { "variablesReference": next(filter(lambda s: s["name"] == "Globals", scopes))[ "variablesReference" ] }, )["body"]["variables"] global_var = None for variable in globals_: if global_var_name in variable["evaluateName"]: global_var = variable assert global_var is not None # Compare local and global variable assert global_var["value"] == local_var["value"] and global_var["type"] == local_var["type"] # noqa: PT018 ipykernel-6.29.5/tests/test_embed_kernel.py000066400000000000000000000135141464053401500210140ustar00rootroot00000000000000"""test embed_kernel""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json import os import sys import threading import time from contextlib import contextmanager from subprocess import PIPE, Popen import pytest from flaky import flaky from jupyter_client.blocking.client import BlockingKernelClient from jupyter_core import paths from ipykernel.embed import IPKernelApp, embed_kernel # type:ignore[attr-defined] SETUP_TIMEOUT = 60 TIMEOUT = 15 if os.name == "nt": pytest.skip("skipping tests on windows", allow_module_level=True) @contextmanager def setup_kernel(cmd): """start an embedded kernel in a subprocess, and wait for it to be ready Returns ------- kernel_manager: connected KernelManager instance """ def connection_file_ready(connection_file): """Check if connection_file is a readable json file.""" if not os.path.exists(connection_file): return False try: with open(connection_file) as f: json.load(f) return True except ValueError: return False kernel = Popen([sys.executable, "-c", cmd], stdout=PIPE, stderr=PIPE, encoding="utf-8") try: connection_file = os.path.join( paths.jupyter_runtime_dir(), "kernel-%i.json" % kernel.pid, ) # wait for connection file to exist, timeout after 5s tic = time.time() while ( not connection_file_ready(connection_file) and kernel.poll() is None and time.time() < tic + SETUP_TIMEOUT ): time.sleep(0.1) # Wait 100ms for the writing to finish time.sleep(0.1) if kernel.poll() is not None: o, e = kernel.communicate() raise OSError("Kernel failed to start:\n%s" % e) if not os.path.exists(connection_file): if kernel.poll() is None: kernel.terminate() raise OSError("Connection file %r never arrived" % connection_file) client = BlockingKernelClient(connection_file=connection_file) client.load_connection_file() client.start_channels() client.wait_for_ready() try: yield client finally: client.stop_channels() finally: kernel.terminate() kernel.wait() # Make sure all the fds get closed. for attr in ["stdout", "stderr", "stdin"]: fid = getattr(kernel, attr) if fid: fid.close() @flaky(max_runs=3) def test_embed_kernel_basic(): """IPython.embed_kernel() is basically functional""" cmd = "\n".join( [ "from IPython import embed_kernel", "def go():", " a=5", ' b="hi there"', " embed_kernel()", "go()", "", ] ) with setup_kernel(cmd) as client: # oinfo a (int) client.inspect("a") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] client.execute("c=a*2") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["status"] == "ok" # oinfo c (should be 10) client.inspect("c") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "10" in text @flaky(max_runs=3) def test_embed_kernel_namespace(): """IPython.embed_kernel() inherits calling namespace""" cmd = "\n".join( [ "from IPython import embed_kernel", "def go():", " a=5", ' b="hi there"', " embed_kernel()", "go()", "", ] ) with setup_kernel(cmd) as client: # oinfo a (int) client.inspect("a") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "5" in text # oinfo b (str) client.inspect("b") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "hi there" in text # oinfo c (undefined) client.inspect("c") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert not content["found"] @flaky(max_runs=3) def test_embed_kernel_reentrant(): """IPython.embed_kernel() can be called multiple times""" cmd = "\n".join( [ "from IPython import embed_kernel", "count = 0", "def go():", " global count", " embed_kernel()", " count = count + 1", "", "while True: go()", "", ] ) with setup_kernel(cmd) as client: for i in range(5): client.inspect("count") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert str(i) in text # exit from embed_kernel client.execute("get_ipython().exit_now = True") msg = client.get_shell_msg(timeout=TIMEOUT) time.sleep(0.2) def test_embed_kernel_func(): from types import ModuleType module = ModuleType("test") def trigger_stop(): time.sleep(1) app = IPKernelApp.instance() app.io_loop.add_callback(app.io_loop.stop) IPKernelApp.clear_instance() thread = threading.Thread(target=trigger_stop) thread.start() embed_kernel(module, outstream_class=None) ipykernel-6.29.5/tests/test_eventloop.py000066400000000000000000000076151464053401500204200ustar00rootroot00000000000000"""Test eventloop integration""" import asyncio import os import sys import threading import time import pytest import tornado from ipykernel.eventloops import ( enable_gui, loop_asyncio, loop_cocoa, loop_tk, ) from .utils import execute, flush_channels, start_new_kernel KC = KM = None qt_guis_avail = [] gui_to_module = {"qt6": "PySide6", "qt5": "PyQt5"} def _get_qt_vers(): """If any version of Qt is available, this will populate `guis_avail` with 'qt' and 'qtx'. Due to the import mechanism, we can't import multiple versions of Qt in one session.""" for gui in ["qt6", "qt5"]: print(f"Trying {gui}") try: __import__(gui_to_module[gui]) qt_guis_avail.append(gui) if "QT_API" in os.environ: del os.environ["QT_API"] except ImportError: pass # that version of Qt isn't available. _get_qt_vers() def setup(): """start the global kernel (if it isn't running) and return its client""" global KM, KC KM, KC = start_new_kernel() flush_channels(KC) def teardown(): assert KM is not None assert KC is not None KC.stop_channels() KM.shutdown_kernel(now=True) async_code = """ from tests._asyncio_utils import async_func async_func() """ @pytest.mark.skipif(tornado.version_info < (5,), reason="only relevant on tornado 5") def test_asyncio_interrupt(): assert KM is not None assert KC is not None flush_channels(KC) msg_id, content = execute("%gui asyncio", KC) assert content["status"] == "ok", content flush_channels(KC) msg_id, content = execute(async_code, KC) assert content["status"] == "ok", content KM.interrupt_kernel() flush_channels(KC) msg_id, content = execute(async_code, KC) assert content["status"] == "ok" windows_skip = pytest.mark.skipif(os.name == "nt", reason="causing failures on windows") @windows_skip @pytest.mark.skipif(sys.platform == "darwin", reason="hangs on macos") def test_tk_loop(kernel): def do_thing(): time.sleep(1) try: kernel.app_wrapper.app.quit() # guard for tk failing to start (if there is no display) except AttributeError: pass t = threading.Thread(target=do_thing) t.start() # guard for tk failing to start (if there is no display) try: loop_tk(kernel) except Exception: pass t.join() @windows_skip def test_asyncio_loop(kernel): def do_thing(): loop.call_soon(loop.stop) loop = asyncio.get_event_loop() loop.call_soon(do_thing) loop_asyncio(kernel) @windows_skip def test_enable_gui(kernel): enable_gui("tk", kernel) @pytest.mark.skipif(sys.platform != "darwin", reason="MacOS-only") def test_cocoa_loop(kernel): loop_cocoa(kernel) @pytest.mark.skipif( len(qt_guis_avail) == 0, reason="No viable version of PyQt or PySide installed." ) def test_qt_enable_gui(kernel, capsys): gui = qt_guis_avail[0] enable_gui(gui, kernel) # We store the `QApplication` instance in the kernel. assert hasattr(kernel, "app") # And the `QEventLoop` is added to `app`:` assert hasattr(kernel.app, "qt_event_loop") # Don't create another app even if `gui` is the same. app = kernel.app enable_gui(gui, kernel) assert app == kernel.app # Event loop integration can be turned off. enable_gui(None, kernel) assert not hasattr(kernel, "app") # But now we're stuck with this version of Qt for good; can't switch. for not_gui in ["qt6", "qt5"]: if not_gui not in qt_guis_avail: break enable_gui(not_gui, kernel) captured = capsys.readouterr() assert captured.out == f"Cannot switch Qt versions for this session; you must use {gui}.\n" # Check 'qt' gui, which means "the best available" enable_gui(None, kernel) enable_gui("qt", kernel) assert gui_to_module[gui] in str(kernel.app) ipykernel-6.29.5/tests/test_heartbeat.py000066400000000000000000000036211464053401500203350ustar00rootroot00000000000000"""Tests for heartbeat thread""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import errno from typing import no_type_check from unittest.mock import patch import pytest import zmq from ipykernel.heartbeat import Heartbeat def test_port_bind_failure_raises(): heart = Heartbeat(None) with patch.object(heart, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types") with pytest.raises(zmq.ZMQError): heart._bind_socket() assert mock_try_bind.call_count == 1 def test_port_bind_success(): heart = Heartbeat(None) with patch.object(heart, "_try_bind_socket") as mock_try_bind: heart._bind_socket() assert mock_try_bind.call_count == 1 @no_type_check def test_port_bind_failure_recovery(): try: errno.WSAEADDRINUSE except AttributeError: # Fake windows address in-use code errno.WSAEADDRINUSE = 12345 try: heart = Heartbeat(None) with patch.object(heart, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = [ zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"), zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows"), ] + [0] * 100 # Shouldn't raise anything as retries will kick in heart._bind_socket() finally: # Cleanup fake assignment if errno.WSAEADDRINUSE == 12345: del errno.WSAEADDRINUSE def test_port_bind_failure_gives_up_retries(): heart = Heartbeat(None) with patch.object(heart, "_try_bind_socket") as mock_try_bind: mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind") with pytest.raises(zmq.ZMQError): heart._bind_socket() assert mock_try_bind.call_count == 100 ipykernel-6.29.5/tests/test_io.py000066400000000000000000000164531464053401500170140ustar00rootroot00000000000000"""Test IO capturing functionality""" import io import os import subprocess import sys import threading import time import warnings from concurrent.futures import Future, ThreadPoolExecutor from unittest import mock import pytest import zmq from jupyter_client.session import Session from ipykernel.iostream import MASTER, BackgroundSocket, IOPubThread, OutStream @pytest.fixture() def ctx(): ctx = zmq.Context() yield ctx ctx.destroy() @pytest.fixture() def iopub_thread(ctx): with ctx.socket(zmq.PUB) as pub: thread = IOPubThread(pub) thread.start() yield thread thread.stop() thread.close() def test_io_api(iopub_thread): """Test that wrapped stdout has the same API as a normal TextIO object""" session = Session() stream = OutStream(session, iopub_thread, "stdout") assert stream.errors is None assert not stream.isatty() with pytest.raises(io.UnsupportedOperation): stream.detach() with pytest.raises(io.UnsupportedOperation): next(stream) with pytest.raises(io.UnsupportedOperation): stream.read() with pytest.raises(io.UnsupportedOperation): stream.readline() with pytest.raises(io.UnsupportedOperation): stream.seek(0) with pytest.raises(io.UnsupportedOperation): stream.tell() with pytest.raises(TypeError): stream.write(b"") # type:ignore def test_io_isatty(iopub_thread): session = Session() stream = OutStream(session, iopub_thread, "stdout", isatty=True) assert stream.isatty() def test_io_thread(iopub_thread): thread = iopub_thread thread._setup_pipe_in() msg = [thread._pipe_uuid, b"a"] thread._handle_pipe_msg(msg) ctx1, pipe = thread._setup_pipe_out() pipe.close() thread._pipe_in.close() thread._check_mp_mode = lambda: MASTER thread._really_send([b"hi"]) ctx1.destroy() thread.close() thread.close() thread._really_send(None) def test_background_socket(iopub_thread): sock = BackgroundSocket(iopub_thread) assert sock.__class__ == BackgroundSocket with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) sock.linger = 101 assert iopub_thread.socket.linger == 101 assert sock.io_thread == iopub_thread sock.send(b"hi") def test_outstream(iopub_thread): session = Session() pub = iopub_thread.socket with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) stream = OutStream(session, pub, "stdout") stream.close() stream = OutStream(session, iopub_thread, "stdout", pipe=object()) stream.close() stream = OutStream(session, iopub_thread, "stdout", watchfd=False) stream.close() stream = OutStream(session, iopub_thread, "stdout", isatty=True, echo=io.StringIO()) with stream: with pytest.raises(io.UnsupportedOperation): stream.fileno() stream._watch_pipe_fd() stream.flush() stream.write("hi") stream.writelines(["ab", "cd"]) assert stream.writable() async def test_event_pipe_gc(iopub_thread): session = Session(key=b"abc") stream = OutStream( session, iopub_thread, "stdout", isatty=True, watchfd=False, ) assert iopub_thread._event_pipes == {} with stream, mock.patch.object(sys, "stdout", stream), ThreadPoolExecutor(1) as pool: pool.submit(print, "x").result() pool_thread = pool.submit(threading.current_thread).result() assert list(iopub_thread._event_pipes) == [pool_thread] # run gc once in the iopub thread f: Future = Future() async def run_gc(): try: await iopub_thread._event_pipe_gc() except Exception as e: f.set_exception(e) else: f.set_result(None) iopub_thread.io_loop.add_callback(run_gc) # wait for call to finish in iopub thread f.result() assert iopub_thread._event_pipes == {} def subprocess_test_echo_watch(): # handshake Pub subscription session = Session(key=b"abc") # use PUSH socket to avoid subscription issues with zmq.Context() as ctx, ctx.socket(zmq.PUSH) as pub: pub.connect(os.environ["IOPUB_URL"]) iopub_thread = IOPubThread(pub) iopub_thread.start() stdout_fd = sys.stdout.fileno() sys.stdout.flush() stream = OutStream( session, iopub_thread, "stdout", isatty=True, echo=sys.stdout, watchfd="force", ) save_stdout = sys.stdout with stream, mock.patch.object(sys, "stdout", stream): # write to low-level FD os.write(stdout_fd, b"fd\n") # print (writes to stream) print("print\n", end="") sys.stdout.flush() # write to unwrapped __stdout__ (should also go to original FD) sys.__stdout__.write("__stdout__\n") sys.__stdout__.flush() # write to original sys.stdout (should be the same as __stdout__) save_stdout.write("stdout\n") save_stdout.flush() # is there another way to flush on the FD? fd_file = os.fdopen(stdout_fd, "w") fd_file.flush() # we don't have a sync flush on _reading_ from the watched pipe time.sleep(1) stream.flush() iopub_thread.stop() iopub_thread.close() @pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows") def test_echo_watch(ctx): """Test echo on underlying FD while capturing the same FD Test runs in a subprocess to avoid messing with pytest output capturing. """ s = ctx.socket(zmq.PULL) port = s.bind_to_random_port("tcp://127.0.0.1") url = f"tcp://127.0.0.1:{port}" session = Session(key=b"abc") stdout_chunks = [] with s: env = dict(os.environ) env["IOPUB_URL"] = url env["PYTHONUNBUFFERED"] = "1" env.pop("PYTEST_CURRENT_TEST", None) p = subprocess.run( [ sys.executable, "-c", f"import {__name__}; {__name__}.subprocess_test_echo_watch()", ], env=env, capture_output=True, check=True, text=True, timeout=10, ) print(f"{p.stdout=}") print(f"{p.stderr}=", file=sys.stderr) assert p.returncode == 0 while s.poll(timeout=100): ident, msg = session.recv(s) assert msg is not None # for type narrowing if msg["header"]["msg_type"] == "stream" and msg["content"]["name"] == "stdout": stdout_chunks.append(msg["content"]["text"]) # check outputs # use sets of lines to ignore ordering issues with # async flush and watchfd thread # Check the stream output forwarded over zmq zmq_stdout = "".join(stdout_chunks) assert set(zmq_stdout.strip().splitlines()) == { "fd", "print", "stdout", "__stdout__", } # Check what was written to the process stdout (kernel terminal) # just check that each output source went to the terminal assert set(p.stdout.strip().splitlines()) == { "fd", "print", "stdout", "__stdout__", } ipykernel-6.29.5/tests/test_ipkernel_direct.py000066400000000000000000000166641464053401500215540ustar00rootroot00000000000000"""Test IPythonKernel directly""" import asyncio import os import pytest import zmq from IPython.core.history import DummyDB from ipykernel.comm.comm import BaseComm from ipykernel.ipkernel import IPythonKernel, _create_comm from .conftest import MockIPyKernel if os.name == "nt": pytest.skip("skipping tests on windows", allow_module_level=True) class user_mod: __dict__ = {} async def test_properties(ipkernel: IPythonKernel) -> None: ipkernel.user_module = user_mod() ipkernel.user_ns = {} async def test_direct_kernel_info_request(ipkernel): reply = await ipkernel.test_shell_message("kernel_info_request", {}) assert reply["header"]["msg_type"] == "kernel_info_reply" async def test_direct_execute_request(ipkernel: MockIPyKernel) -> None: reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) assert reply["header"]["msg_type"] == "execute_reply" reply = await ipkernel.test_shell_message( "execute_request", dict(code="trigger_error", silent=False) ) assert reply["content"]["status"] == "aborted" reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) assert reply["header"]["msg_type"] == "execute_reply" async def test_direct_execute_request_aborting(ipkernel): ipkernel._aborting = True reply = await ipkernel.test_shell_message("execute_request", dict(code="hello", silent=False)) assert reply["header"]["msg_type"] == "execute_reply" assert reply["content"]["status"] == "aborted" async def test_complete_request(ipkernel): reply = await ipkernel.test_shell_message("complete_request", dict(code="hello", cursor_pos=0)) assert reply["header"]["msg_type"] == "complete_reply" ipkernel.use_experimental_completions = False reply = await ipkernel.test_shell_message( "complete_request", dict(code="hello", cursor_pos=None) ) assert reply["header"]["msg_type"] == "complete_reply" async def test_inspect_request(ipkernel): reply = await ipkernel.test_shell_message("inspect_request", dict(code="hello", cursor_pos=0)) assert reply["header"]["msg_type"] == "inspect_reply" async def test_history_request(ipkernel): ipkernel.shell.history_manager.db = DummyDB() reply = await ipkernel.test_shell_message( "history_request", dict(hist_access_type="", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await ipkernel.test_shell_message( "history_request", dict(hist_access_type="tail", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await ipkernel.test_shell_message( "history_request", dict(hist_access_type="range", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await ipkernel.test_shell_message( "history_request", dict(hist_access_type="search", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" async def test_comm_info_request(ipkernel): reply = await ipkernel.test_shell_message("comm_info_request") assert reply["header"]["msg_type"] == "comm_info_reply" async def test_direct_interrupt_request(ipkernel): reply = await ipkernel.test_control_message("interrupt_request", {}) assert reply["header"]["msg_type"] == "interrupt_reply" assert reply["content"] == {"status": "ok"} # test failure on interrupt request def raiseOSError(): msg = "evalue" raise OSError(msg) ipkernel._send_interrupt_children = raiseOSError reply = await ipkernel.test_control_message("interrupt_request", {}) assert reply["header"]["msg_type"] == "interrupt_reply" assert reply["content"]["status"] == "error" assert reply["content"]["ename"] == "OSError" assert reply["content"]["evalue"] == "evalue" assert len(reply["content"]["traceback"]) > 0 # TODO: this causes deadlock # async def test_direct_shutdown_request(ipkernel): # reply = await ipkernel.test_shell_message("shutdown_request", dict(restart=False)) # assert reply["header"]["msg_type"] == "shutdown_reply" # reply = await ipkernel.test_shell_message("shutdown_request", dict(restart=True)) # assert reply["header"]["msg_type"] == "shutdown_reply" # TODO: this causes deadlock # async def test_direct_usage_request(kernel): # reply = await kernel.test_control_message("usage_request", {}) # assert reply['header']['msg_type'] == 'usage_reply' async def test_is_complete_request(ipkernel: MockIPyKernel) -> None: reply = await ipkernel.test_shell_message("is_complete_request", dict(code="hello")) assert reply["header"]["msg_type"] == "is_complete_reply" setattr(ipkernel, "shell.input_transformer_manager", None) reply = await ipkernel.test_shell_message("is_complete_request", dict(code="hello")) assert reply["header"]["msg_type"] == "is_complete_reply" def test_do_apply(ipkernel: MockIPyKernel) -> None: from ipyparallel import pack_apply_message def hello(): pass msg = pack_apply_message(hello, (), {}) ipkernel.do_apply(None, msg, "1", {}) ipkernel.do_apply(None, [], "1", {}) async def test_direct_debug_request(ipkernel): reply = await ipkernel.test_control_message("debug_request", {}) assert reply["header"]["msg_type"] == "debug_reply" async def test_direct_clear(ipkernel): ipkernel.do_clear() async def test_cancel_on_sigint(ipkernel: IPythonKernel) -> None: future: asyncio.Future = asyncio.Future() with ipkernel._cancel_on_sigint(future): pass future.set_result(None) def test_dispatch_debugpy(ipkernel: IPythonKernel) -> None: msg = ipkernel.session.msg("debug_request", {}) msg_list = ipkernel.session.serialize(msg) ipkernel.dispatch_debugpy([zmq.Message(m) for m in msg_list]) async def test_start(ipkernel: IPythonKernel) -> None: shell_future: asyncio.Future = asyncio.Future() control_future: asyncio.Future = asyncio.Future() async def fake_dispatch_queue(): shell_future.set_result(None) async def fake_poll_control_queue(): control_future.set_result(None) ipkernel.dispatch_queue = fake_dispatch_queue # type:ignore ipkernel.poll_control_queue = fake_poll_control_queue # type:ignore ipkernel.start() ipkernel.debugpy_stream = None ipkernel.start() await ipkernel.process_one(False) await shell_future await control_future async def test_start_no_debugpy(ipkernel: IPythonKernel) -> None: shell_future: asyncio.Future = asyncio.Future() control_future: asyncio.Future = asyncio.Future() async def fake_dispatch_queue(): shell_future.set_result(None) async def fake_poll_control_queue(): control_future.set_result(None) ipkernel.dispatch_queue = fake_dispatch_queue # type:ignore ipkernel.poll_control_queue = fake_poll_control_queue # type:ignore ipkernel.debugpy_stream = None ipkernel.start() await shell_future await control_future def test_create_comm(): assert isinstance(_create_comm(), BaseComm) def test_finish_metadata(ipkernel: IPythonKernel) -> None: reply_content = dict(status="error", ename="UnmetDependency") metadata = ipkernel.finish_metadata({}, {}, reply_content) assert metadata["dependencies_met"] is False async def test_do_debug_request(ipkernel: IPythonKernel) -> None: msg = ipkernel.session.msg("debug_request", {}) ipkernel.session.serialize(msg) await ipkernel.do_debug_request(msg) ipykernel-6.29.5/tests/test_jsonutil.py000066400000000000000000000067241464053401500202540ustar00rootroot00000000000000"""Test suite for our JSON utilities.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json import numbers from binascii import a2b_base64 from datetime import date, datetime import pytest from jupyter_client._version import version_info as jupyter_client_version from ipykernel import jsonutil from ipykernel.jsonutil import encode_images, json_clean JUPYTER_CLIENT_MAJOR_VERSION: int = jupyter_client_version[0] # type:ignore class MyInt: def __int__(self): return 389 numbers.Integral.register(MyInt) class MyFloat: def __float__(self): return 3.14 numbers.Real.register(MyFloat) @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") def test(): # list of input/expected output. Use None for the expected output if it # can be the same as the input. pairs = [ (1, None), # start with scalars (1.0, None), ("a", None), (True, None), (False, None), (None, None), # Containers ([1, 2], None), ((1, 2), [1, 2]), ({1, 2}, [1, 2]), (dict(x=1), None), ({"x": 1, "y": [1, 2, 3], "1": "int"}, None), # More exotic objects ((x for x in range(3)), [0, 1, 2]), (iter([1, 2]), [1, 2]), (datetime(1991, 7, 3, 12, 00), "1991-07-03T12:00:00.000000"), (date(1991, 7, 3), "1991-07-03T00:00:00.000000"), (MyFloat(), 3.14), (MyInt(), 389), ] for val, jval in pairs: if jval is None: jval = val # type:ignore out = json_clean(val) # validate our cleanup assert out == jval # and ensure that what we return, indeed encodes cleanly json.loads(json.dumps(out)) @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") def test_encode_images(): # invalid data, but the header and footer are from real files pngdata = b"\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82" jpegdata = b"\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9" pdfdata = b"%PDF-1.\ntrailer<>]>>>>>>" bindata = b"\xff\xff\xff\xff" fmt = { "image/png": pngdata, "image/jpeg": jpegdata, "application/pdf": pdfdata, "application/unrecognized": bindata, } encoded = json_clean(encode_images(fmt)) for key, value in fmt.items(): # encoded has unicode, want bytes decoded = a2b_base64(encoded[key]) assert decoded == value encoded2 = json_clean(encode_images(encoded)) assert encoded == encoded2 for key, value in fmt.items(): decoded = a2b_base64(encoded[key]) assert decoded == value @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") def test_lambda(): with pytest.raises(ValueError): # noqa: PT011 json_clean(lambda: 1) @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") def test_exception(): bad_dicts = [ {1: "number", "1": "string"}, {True: "bool", "True": "string"}, ] for d in bad_dicts: with pytest.raises(ValueError): # noqa: PT011 json_clean(d) @pytest.mark.skipif(JUPYTER_CLIENT_MAJOR_VERSION >= 7, reason="json_clean is a no-op") def test_unicode_dict(): data = {"üniço∂e": "üniço∂e"} clean = jsonutil.json_clean(data) assert data == clean ipykernel-6.29.5/tests/test_kernel.py000066400000000000000000000535351464053401500176670ustar00rootroot00000000000000"""test the IPython Kernel""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import ast import os.path import platform import signal import subprocess import sys import time from subprocess import Popen from tempfile import TemporaryDirectory import IPython import psutil import pytest from flaky import flaky from IPython.paths import locate_profile from .utils import ( TIMEOUT, assemble_output, execute, flush_channels, get_reply, kernel, new_kernel, wait_for_idle, ) def _check_master(kc, expected=True, stream="stdout"): execute(kc=kc, code="import sys") flush_channels(kc) msg_id, content = execute(kc=kc, code="print(sys.%s._is_master_process())" % stream) stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout.strip() == repr(expected) def _check_status(content): """If status=error, show the traceback""" if content["status"] == "error": raise AssertionError("".join(["\n"] + content["traceback"])) # printing tests def test_simple_print(): """simple print statement in kernel""" with kernel() as kc: msg_id, content = execute(kc=kc, code="print('hi')") stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout == "hi\n" assert stderr == "" _check_master(kc, expected=True) def test_print_to_correct_cell_from_thread(): """should print to the cell that spawned the thread, not a subsequently run cell""" iterations = 5 interval = 0.25 code = f"""\ from threading import Thread from time import sleep def thread_target(): for i in range({iterations}): print(i, end='', flush=True) sleep({interval}) Thread(target=thread_target).start() """ with kernel() as kc: thread_msg_id = kc.execute(code) _ = kc.execute("pass") received = 0 while received < iterations: msg = kc.get_iopub_msg(timeout=interval * 2) if msg["msg_type"] != "stream": continue content = msg["content"] assert content["name"] == "stdout" assert content["text"] == str(received) # this is crucial as the parent header decides to which cell the output goes assert msg["parent_header"]["msg_id"] == thread_msg_id received += 1 def test_print_to_correct_cell_from_child_thread(): """should print to the cell that spawned the thread, not a subsequently run cell""" iterations = 5 interval = 0.25 code = f"""\ from threading import Thread from time import sleep def child_target(): for i in range({iterations}): print(i, end='', flush=True) sleep({interval}) def parent_target(): sleep({interval}) Thread(target=child_target).start() Thread(target=parent_target).start() """ with kernel() as kc: thread_msg_id = kc.execute(code) _ = kc.execute("pass") received = 0 while received < iterations: msg = kc.get_iopub_msg(timeout=interval * 2) if msg["msg_type"] != "stream": continue content = msg["content"] assert content["name"] == "stdout" assert content["text"] == str(received) # this is crucial as the parent header decides to which cell the output goes assert msg["parent_header"]["msg_id"] == thread_msg_id received += 1 def test_print_to_correct_cell_from_asyncio(): """should print to the cell that scheduled the task, not a subsequently run cell""" iterations = 5 interval = 0.25 code = f"""\ import asyncio async def async_task(): for i in range({iterations}): print(i, end='', flush=True) await asyncio.sleep({interval}) loop = asyncio.get_event_loop() loop.create_task(async_task()); """ with kernel() as kc: thread_msg_id = kc.execute(code) _ = kc.execute("pass") received = 0 while received < iterations: msg = kc.get_iopub_msg(timeout=interval * 2) if msg["msg_type"] != "stream": continue content = msg["content"] assert content["name"] == "stdout" assert content["text"] == str(received) # this is crucial as the parent header decides to which cell the output goes assert msg["parent_header"]["msg_id"] == thread_msg_id received += 1 @pytest.mark.skip(reason="Currently don't capture during test as pytest does its own capturing") def test_capture_fd(): """simple print statement in kernel""" with kernel() as kc: iopub = kc.iopub_channel msg_id, content = execute(kc=kc, code="import os; os.system('echo capsys')") stdout, stderr = assemble_output(iopub) assert stdout == "capsys\n" assert stderr == "" _check_master(kc, expected=True) @pytest.mark.skip(reason="Currently don't capture during test as pytest does its own capturing") def test_subprocess_peek_at_stream_fileno(): with kernel() as kc: iopub = kc.iopub_channel msg_id, content = execute( kc=kc, code="import subprocess, sys; subprocess.run(['python', '-c', 'import os; os.system(\"echo CAP1\"); print(\"CAP2\")'], stderr=sys.stderr)", ) stdout, stderr = assemble_output(iopub) assert stdout == "CAP1\nCAP2\n" assert stderr == "" _check_master(kc, expected=True) def test_sys_path(): """test that sys.path doesn't get messed up by default""" with kernel() as kc: msg_id, content = execute(kc=kc, code="import sys; print(repr(sys.path))") stdout, stderr = assemble_output(kc.get_iopub_msg) # for error-output on failure sys.stderr.write(stderr) sys_path = ast.literal_eval(stdout.strip()) assert "" in sys_path def test_sys_path_profile_dir(): """test that sys.path doesn't get messed up when `--profile-dir` is specified""" with new_kernel(["--profile-dir", locate_profile("default")]) as kc: msg_id, content = execute(kc=kc, code="import sys; print(repr(sys.path))") stdout, stderr = assemble_output(kc.get_iopub_msg) # for error-output on failure sys.stderr.write(stderr) sys_path = ast.literal_eval(stdout.strip()) assert "" in sys_path @flaky(max_runs=3) @pytest.mark.skipif( sys.platform == "win32" or (sys.platform == "darwin" and sys.version_info >= (3, 8)), reason="subprocess prints fail on Windows and MacOS Python 3.8+", ) def test_subprocess_print(): """printing from forked mp.Process""" with new_kernel() as kc: _check_master(kc, expected=True) flush_channels(kc) np = 5 code = "\n".join( [ "import time", "import multiprocessing as mp", "pool = [mp.Process(target=print, args=('hello', i,)) for i in range(%i)]" % np, "for p in pool: p.start()", "for p in pool: p.join()", "time.sleep(0.5),", ] ) msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout.count("hello") == np, stdout for n in range(np): assert stdout.count(str(n)) == 1, stdout assert stderr == "" _check_master(kc, expected=True) _check_master(kc, expected=True, stream="stderr") @flaky(max_runs=3) def test_subprocess_noprint(): """mp.Process without print doesn't trigger iostream mp_mode""" with kernel() as kc: np = 5 code = "\n".join( [ "import multiprocessing as mp", "pool = [mp.Process(target=range, args=(i,)) for i in range(%i)]" % np, "for p in pool: p.start()", "for p in pool: p.join()", ] ) msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout == "" assert stderr == "" _check_master(kc, expected=True) _check_master(kc, expected=True, stream="stderr") @flaky(max_runs=3) @pytest.mark.skipif( sys.platform == "win32" or (sys.platform == "darwin" and sys.version_info >= (3, 8)), reason="subprocess prints fail on Windows and MacOS Python 3.8+", ) def test_subprocess_error(): """error in mp.Process doesn't crash""" with new_kernel() as kc: code = "\n".join( [ "import multiprocessing as mp", "p = mp.Process(target=int, args=('hi',))", "p.start()", "p.join()", ] ) msg_id, content = execute(kc=kc, code=code) stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout == "" assert "ValueError" in stderr _check_master(kc, expected=True) _check_master(kc, expected=True, stream="stderr") # raw_input tests def test_raw_input(): """test input""" with kernel() as kc: input_f = "input" theprompt = "prompt> " code = f'print({input_f}("{theprompt}"))' kc.execute(code, allow_stdin=True) msg = kc.get_stdin_msg(timeout=TIMEOUT) assert msg["header"]["msg_type"] == "input_request" content = msg["content"] assert content["prompt"] == theprompt text = "some text" kc.input(text) reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "ok" stdout, stderr = assemble_output(kc.get_iopub_msg) assert stdout == text + "\n" def test_save_history(): # Saving history from the kernel with %hist -f was failing because of # unicode problems on Python 2. with kernel() as kc, TemporaryDirectory() as td: file = os.path.join(td, "hist.out") execute("a=1", kc=kc) wait_for_idle(kc) execute('b="abcþ"', kc=kc) wait_for_idle(kc) _, reply = execute("%hist -f " + file, kc=kc) assert reply["status"] == "ok" with open(file, encoding="utf-8") as f: content = f.read() assert "a=1" in content assert 'b="abcþ"' in content def test_smoke_faulthandler(): pytest.importorskip("faulthandler", reason="this test needs faulthandler") with kernel() as kc: # Note: faulthandler.register is not available on windows. code = "\n".join( [ "import sys", "import faulthandler", "import signal", "faulthandler.enable()", 'if not sys.platform.startswith("win32"):', " faulthandler.register(signal.SIGTERM)", ] ) _, reply = execute(code, kc=kc) assert reply["status"] == "ok", reply.get("traceback", "") def test_help_output(): """ipython kernel --help-all works""" cmd = [sys.executable, "-m", "IPython", "kernel", "--help-all"] proc = subprocess.run(cmd, timeout=30, capture_output=True, check=True) assert proc.returncode == 0, proc.stderr assert b"Traceback" not in proc.stderr assert b"Options" in proc.stdout assert b"Class" in proc.stdout def test_is_complete(): with kernel() as kc: # There are more test cases for this in core - here we just check # that the kernel exposes the interface correctly. kc.is_complete("2+2") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "complete" # SyntaxError kc.is_complete("raise = 2") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "invalid" kc.is_complete("a = [1,\n2,") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "incomplete" assert reply["content"]["indent"] == "" # Cell magic ends on two blank lines for console UIs kc.is_complete("%%timeit\na\n\n") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "complete" @pytest.mark.skipif(sys.platform != "win32", reason="only run on Windows") def test_complete(): with kernel() as kc: execute("a = 1", kc=kc) wait_for_idle(kc) cell = "import IPython\nb = a." kc.complete(cell) reply = kc.get_shell_msg(timeout=TIMEOUT) c = reply["content"] assert c["status"] == "ok" start = cell.find("a.") end = start + 2 assert c["cursor_end"] == cell.find("a.") + 2 assert c["cursor_start"] <= end # there are many right answers for cursor_start, # so verify application of the completion # rather than the value of cursor_start matches = c["matches"] assert matches for m in matches: completed = cell[: c["cursor_start"]] + m assert completed.startswith(cell) def test_matplotlib_inline_on_import(): pytest.importorskip("matplotlib", reason="this test requires matplotlib") with kernel() as kc: cell = "\n".join( ["import matplotlib, matplotlib.pyplot as plt", "backend = matplotlib.get_backend()"] ) _, reply = execute(cell, user_expressions={"backend": "backend"}, kc=kc) _check_status(reply) backend_bundle = reply["user_expressions"]["backend"] _check_status(backend_bundle) assert "backend_inline" in backend_bundle["data"]["text/plain"] def test_message_order(): N = 100 # number of messages to test with kernel() as kc: _, reply = execute("a = 1", kc=kc) _check_status(reply) offset = reply["execution_count"] + 1 cell = "a += 1\na" msg_ids = [] # submit N executions as fast as we can for _ in range(N): msg_ids.append(kc.execute(cell)) # check message-handling order for i, msg_id in enumerate(msg_ids, offset): reply = kc.get_shell_msg(timeout=TIMEOUT) _check_status(reply["content"]) assert reply["content"]["execution_count"] == i assert reply["parent_header"]["msg_id"] == msg_id @pytest.mark.skipif( sys.platform.startswith("linux") or sys.platform.startswith("darwin"), reason="test only on windows", ) def test_unc_paths(): with kernel() as kc, TemporaryDirectory() as td: drive_file_path = os.path.join(td, "unc.txt") with open(drive_file_path, "w+") as f: f.write("# UNC test") unc_root = "\\\\localhost\\C$" file_path = os.path.splitdrive(os.path.dirname(drive_file_path))[1] unc_file_path = os.path.join(unc_root, file_path[1:]) kc.execute(f"cd {unc_file_path:s}") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "ok" out, err = assemble_output(kc.get_iopub_msg) assert unc_file_path in out flush_channels(kc) kc.execute(code="ls") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "ok" out, err = assemble_output(kc.get_iopub_msg) assert "unc.txt" in out kc.execute(code="cd") reply = kc.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "ok" @pytest.mark.skipif( platform.python_implementation() == "PyPy", reason="does not work on PyPy", ) def test_shutdown(): """Kernel exits after polite shutdown_request""" with new_kernel() as kc: km = kc.parent execute("a = 1", kc=kc) wait_for_idle(kc) kc.shutdown() for _ in range(300): # 30s timeout if km.is_alive(): time.sleep(0.1) else: break assert not km.is_alive() def test_interrupt_during_input(): """ The kernel exits after being interrupted while waiting in input(). input() appears to have issues other functions don't, and it needs to be interruptible in order for pdb to be interruptible. """ with new_kernel() as kc: km = kc.parent msg_id = kc.execute("input()") time.sleep(1) # Make sure it's actually waiting for input. km.interrupt_kernel() from .test_message_spec import validate_message # If we failed to interrupt interrupt, this will timeout: reply = get_reply(kc, msg_id, TIMEOUT) validate_message(reply, "execute_reply", msg_id) @pytest.mark.skipif(os.name == "nt", reason="Message based interrupt not supported on Windows") def test_interrupt_with_message(): with new_kernel() as kc: km = kc.parent km.kernel_spec.interrupt_mode = "message" msg_id = kc.execute("input()") time.sleep(1) # Make sure it's actually waiting for input. km.interrupt_kernel() from .test_message_spec import validate_message # If we failed to interrupt interrupt, this will timeout: reply = get_reply(kc, msg_id, TIMEOUT) validate_message(reply, "execute_reply", msg_id) @pytest.mark.skipif( "__pypy__" in sys.builtin_module_names, reason="fails on pypy", ) def test_interrupt_during_pdb_set_trace(): """ The kernel exits after being interrupted while waiting in pdb.set_trace(). Merely testing input() isn't enough, pdb has its own issues that need to be handled in addition. This test will fail with versions of IPython < 7.14.0. """ with new_kernel() as kc: km = kc.parent msg_id = kc.execute("import pdb; pdb.set_trace()") msg_id2 = kc.execute("3 + 4") time.sleep(1) # Make sure it's actually waiting for input. km.interrupt_kernel() from .test_message_spec import validate_message # If we failed to interrupt interrupt, this will timeout: reply = get_reply(kc, msg_id, TIMEOUT) validate_message(reply, "execute_reply", msg_id) # If we failed to interrupt interrupt, this will timeout: reply = get_reply(kc, msg_id2, TIMEOUT) validate_message(reply, "execute_reply", msg_id2) def test_control_thread_priority(): N = 5 with new_kernel() as kc: msg_id = kc.execute("pass") get_reply(kc, msg_id) sleep_msg_id = kc.execute("import asyncio; await asyncio.sleep(2)") # submit N shell messages shell_msg_ids = [] for i in range(N): shell_msg_ids.append(kc.execute(f"i = {i}")) # ensure all shell messages have arrived at the kernel before any control messages time.sleep(0.5) # at this point, shell messages should be waiting in msg_queue, # rather than zmq while the kernel is still in the middle of processing # the first execution # now send N control messages control_msg_ids = [] for _ in range(N): msg = kc.session.msg("kernel_info_request", {}) kc.control_channel.send(msg) control_msg_ids.append(msg["header"]["msg_id"]) # finally, collect the replies on both channels for comparison get_reply(kc, sleep_msg_id) shell_replies = [] for msg_id in shell_msg_ids: shell_replies.append(get_reply(kc, msg_id)) control_replies = [] for msg_id in control_msg_ids: control_replies.append(get_reply(kc, msg_id, channel="control")) # verify that all control messages were handled before all shell messages shell_dates = [msg["header"]["date"] for msg in shell_replies] control_dates = [msg["header"]["date"] for msg in control_replies] # comparing first to last ought to be enough, since queues preserve order # use <= in case of very-fast handling and/or low resolution timers assert control_dates[-1] <= shell_dates[0] def _child(): print("in child", os.getpid()) def _print_and_exit(sig, frame): print(f"Received signal {sig}") # take some time so retries are triggered time.sleep(0.5) sys.exit(-sig) signal.signal(signal.SIGTERM, _print_and_exit) time.sleep(30) def _start_children(): ip = IPython.get_ipython() # type:ignore[attr-defined] ns = ip.user_ns cmd = [sys.executable, "-c", f"from {__name__} import _child; _child()"] child_pg = Popen(cmd, start_new_session=False) child_newpg = Popen(cmd, start_new_session=True) ns["pid"] = os.getpid() ns["child_pg"] = child_pg.pid ns["child_newpg"] = child_newpg.pid # give them time to start up and register signal handlers time.sleep(1) @pytest.mark.skipif( platform.python_implementation() == "PyPy", reason="does not work on PyPy", ) @pytest.mark.skipif( sys.platform.lower() == "linux", reason="Stalls on linux", ) def test_shutdown_subprocesses(): """Kernel exits after polite shutdown_request""" with new_kernel() as kc: km = kc.parent msg_id, reply = execute( f"from {__name__} import _start_children\n_start_children()", kc=kc, user_expressions={ "pid": "pid", "child_pg": "child_pg", "child_newpg": "child_newpg", }, ) print(reply) expressions = reply["user_expressions"] kernel_process = psutil.Process(int(expressions["pid"]["data"]["text/plain"])) child_pg = psutil.Process(int(expressions["child_pg"]["data"]["text/plain"])) child_newpg = psutil.Process(int(expressions["child_newpg"]["data"]["text/plain"])) wait_for_idle(kc) kc.shutdown() for _ in range(300): # 30s timeout if km.is_alive(): time.sleep(0.1) else: break assert not km.is_alive() assert not kernel_process.is_running() # child in the process group shut down assert not child_pg.is_running() # child outside the process group was not shut down (unix only) if os.name != "nt": assert child_newpg.is_running() try: child_newpg.terminate() except psutil.NoSuchProcess: pass ipykernel-6.29.5/tests/test_kernel_direct.py000066400000000000000000000131711464053401500212110ustar00rootroot00000000000000"""test the IPython Kernel""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import os import warnings import pytest if os.name == "nt": pytest.skip("skipping tests on windows", allow_module_level=True) async def test_direct_kernel_info_request(kernel): reply = await kernel.test_shell_message("kernel_info_request", {}) assert reply["header"]["msg_type"] == "kernel_info_reply" async def test_direct_execute_request(kernel): reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False)) assert reply["header"]["msg_type"] == "execute_reply" async def test_direct_execute_request_aborting(kernel): kernel._aborting = True reply = await kernel.test_shell_message("execute_request", dict(code="hello", silent=False)) assert reply["header"]["msg_type"] == "execute_reply" assert reply["content"]["status"] == "aborted" async def test_direct_execute_request_error(kernel): await kernel.execute_request(None, None, None) async def test_complete_request(kernel): reply = await kernel.test_shell_message("complete_request", dict(code="hello", cursor_pos=0)) assert reply["header"]["msg_type"] == "complete_reply" async def test_inspect_request(kernel): reply = await kernel.test_shell_message("inspect_request", dict(code="hello", cursor_pos=0)) assert reply["header"]["msg_type"] == "inspect_reply" async def test_history_request(kernel): reply = await kernel.test_shell_message( "history_request", dict(hist_access_type="", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await kernel.test_shell_message( "history_request", dict(hist_access_type="tail", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await kernel.test_shell_message( "history_request", dict(hist_access_type="range", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" reply = await kernel.test_shell_message( "history_request", dict(hist_access_type="search", output="", raw="") ) assert reply["header"]["msg_type"] == "history_reply" async def test_comm_info_request(kernel): reply = await kernel.test_shell_message("comm_info_request") assert reply["header"]["msg_type"] == "comm_info_reply" async def test_direct_interrupt_request(kernel): reply = await kernel.test_control_message("interrupt_request", {}) assert reply["header"]["msg_type"] == "interrupt_reply" assert reply["content"] == {"status": "ok"} # test failure on interrupt request def raiseOSError(): msg = "evalue" raise OSError(msg) kernel._send_interrupt_children = raiseOSError reply = await kernel.test_control_message("interrupt_request", {}) assert reply["header"]["msg_type"] == "interrupt_reply" assert reply["content"]["status"] == "error" assert reply["content"]["ename"] == "OSError" assert reply["content"]["evalue"] == "evalue" assert len(reply["content"]["traceback"]) > 0 async def test_direct_shutdown_request(kernel): reply = await kernel.test_shell_message("shutdown_request", dict(restart=False)) assert reply["header"]["msg_type"] == "shutdown_reply" reply = await kernel.test_shell_message("shutdown_request", dict(restart=True)) assert reply["header"]["msg_type"] == "shutdown_reply" async def test_is_complete_request(kernel): reply = await kernel.test_shell_message("is_complete_request", dict(code="hello")) assert reply["header"]["msg_type"] == "is_complete_reply" async def test_direct_debug_request(kernel): reply = await kernel.test_control_message("debug_request", {}) assert reply["header"]["msg_type"] == "debug_reply" async def test_deprecated_features(kernel): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) header = kernel._parent_header assert isinstance(header, dict) shell_streams = kernel.shell_streams assert len(shell_streams) == 1 assert shell_streams[0] == kernel.shell_stream warnings.simplefilter("ignore", RuntimeWarning) kernel.shell_streams = [kernel.shell_stream, kernel.shell_stream] async def test_process_control(kernel): from jupyter_client.session import DELIM class FakeMsg: def __init__(self, bytes): self.bytes = bytes await kernel.process_control([FakeMsg(DELIM), 1]) msg = kernel._prep_msg("does_not_exist") await kernel.process_control(msg) def test_should_handle(kernel): msg = kernel.session.msg("debug_request", {}) kernel.aborted.add(msg["header"]["msg_id"]) assert not kernel.should_handle(kernel.control_stream, msg, []) async def test_dispatch_shell(kernel): from jupyter_client.session import DELIM class FakeMsg: def __init__(self, bytes): self.bytes = bytes await kernel.dispatch_shell([FakeMsg(DELIM), 1]) msg = kernel._prep_msg("does_not_exist") await kernel.dispatch_shell(msg) async def test_do_one_iteration(kernel): kernel.msg_queue = asyncio.Queue() await kernel.do_one_iteration() async def test_publish_debug_event(kernel): kernel._publish_debug_event({}) async def test_connect_request(kernel): await kernel.connect_request(kernel.shell_stream, "foo", {}) async def test_send_interrupt_children(kernel): kernel._send_interrupt_children() # TODO: this causes deadlock # async def test_direct_usage_request(kernel): # reply = await kernel.test_control_message("usage_request", {}) # assert reply['header']['msg_type'] == 'usage_reply' ipykernel-6.29.5/tests/test_kernelapp.py000066400000000000000000000076241464053401500203660ustar00rootroot00000000000000import json import os import threading import time from unittest.mock import patch import pytest from jupyter_core.paths import secure_write from traitlets.config.loader import Config from ipykernel.kernelapp import IPKernelApp from .conftest import MockKernel from .utils import TemporaryWorkingDirectory try: import trio except ImportError: trio = None @pytest.mark.skipif(os.name == "nt", reason="requires ipc") def test_init_ipc_socket(): app = IPKernelApp(transport="ipc") app.init_sockets() app.cleanup_connection_file() app.close() def test_blackhole(): app = IPKernelApp() app.no_stderr = True app.no_stdout = True app.init_blackhole() def test_start_app(): app = IPKernelApp() app.kernel = MockKernel() def trigger_stop(): time.sleep(1) app.io_loop.add_callback(app.io_loop.stop) thread = threading.Thread(target=trigger_stop) thread.start() app.init_sockets() app.start() app.cleanup_connection_file() app.kernel.destroy() app.close() @pytest.mark.skipif(os.name == "nt", reason="permission errors on windows") def test_merge_connection_file(): cfg = Config() with TemporaryWorkingDirectory() as d: cfg.ProfileDir.location = d cf = os.path.join(d, "kernel.json") initial_connection_info = { "ip": "*", "transport": "tcp", "shell_port": 0, "hb_port": 0, "iopub_port": 0, "stdin_port": 0, "control_port": 53555, "key": "abc123", "signature_scheme": "hmac-sha256", "kernel_name": "My Kernel", } # We cannot use connect.write_connection_file since # it replaces port number 0 with a random port # and we want IPKernelApp to do that replacement. with secure_write(cf) as f: json.dump(initial_connection_info, f) assert os.path.exists(cf) app = IPKernelApp(config=cfg, connection_file=cf) # Calling app.initialize() does not work in the test, so we call the relevant functions that initialize() calls # We must pass in an empty argv, otherwise the default is to try to parse the test runner's argv super(IPKernelApp, app).initialize(argv=[""]) app.init_connection_file() app.init_sockets() app.init_heartbeat() app.write_connection_file() # Initialize should have merged the actual connection info # with the connection info in the file assert cf == app.abs_connection_file assert os.path.exists(cf) with open(cf) as f: new_connection_info = json.load(f) # ports originally set as 0 have been replaced for port in ("shell", "hb", "iopub", "stdin"): key = f"{port}_port" # We initially had the port as 0 assert initial_connection_info[key] == 0 # the port is not 0 now assert new_connection_info[key] > 0 # the port matches the port the kernel actually used assert new_connection_info[key] == getattr(app, key), f"{key}" del new_connection_info[key] del initial_connection_info[key] # The wildcard ip address was also replaced assert new_connection_info["ip"] != "*" del new_connection_info["ip"] del initial_connection_info["ip"] # everything else in the connection file is the same assert initial_connection_info == new_connection_info app.close() os.remove(cf) @pytest.mark.skipif(trio is None, reason="requires trio") def test_trio_loop(): app = IPKernelApp(trio_loop=True) app.kernel = MockKernel() app.init_sockets() with patch("ipykernel.trio_runner.TrioRunner.run", lambda _: None): app.start() app.cleanup_connection_file() app.io_loop.add_callback(app.io_loop.stop) app.kernel.destroy() app.close() ipykernel-6.29.5/tests/test_kernelspec.py000066400000000000000000000136131464053401500205330ustar00rootroot00000000000000# Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import json import os import platform import shutil import sys import tempfile from unittest import mock import pytest from jupyter_core.paths import jupyter_data_dir from ipykernel.kernelspec import ( KERNEL_NAME, RESOURCES, InstallIPythonKernelSpecApp, get_kernel_dict, install, make_ipkernel_cmd, write_kernel_spec, ) pjoin = os.path.join is_cpython = platform.python_implementation() == "CPython" def test_make_ipkernel_cmd(): cmd = make_ipkernel_cmd() assert cmd == [sys.executable, "-m", "ipykernel_launcher", "-f", "{connection_file}"] def assert_kernel_dict(d): assert d["argv"] == make_ipkernel_cmd() assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0] assert d["language"] == "python" def test_get_kernel_dict(): d = get_kernel_dict() assert_kernel_dict(d) def assert_kernel_dict_with_profile(d): assert d["argv"] == make_ipkernel_cmd(extra_arguments=["--profile", "test"]) assert d["display_name"] == "Python %i (ipykernel)" % sys.version_info[0] assert d["language"] == "python" def test_get_kernel_dict_with_profile(): d = get_kernel_dict(["--profile", "test"]) assert_kernel_dict_with_profile(d) def assert_is_spec(path): for fname in os.listdir(RESOURCES): dst = pjoin(path, fname) assert os.path.exists(dst) kernel_json = pjoin(path, "kernel.json") assert os.path.exists(kernel_json) with open(kernel_json, encoding="utf8") as f: json.load(f) def test_write_kernel_spec(): path = write_kernel_spec() assert_is_spec(path) shutil.rmtree(path) def test_write_kernel_spec_path(): path = os.path.join(tempfile.mkdtemp(), KERNEL_NAME) path2 = write_kernel_spec(path) assert path == path2 assert_is_spec(path) shutil.rmtree(path) def test_install_kernelspec(): path = tempfile.mkdtemp() try: InstallIPythonKernelSpecApp.launch_instance(argv=["--prefix", path]) assert_is_spec(os.path.join(path, "share", "jupyter", "kernels", KERNEL_NAME)) finally: shutil.rmtree(path) def test_install_user(): tmp = tempfile.mkdtemp() with mock.patch.dict(os.environ, {"HOME": tmp}): install(user=True) data_dir = jupyter_data_dir() assert_is_spec(os.path.join(data_dir, "kernels", KERNEL_NAME)) def test_install(): system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install() assert_is_spec(os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME)) def test_install_profile(): system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install(profile="Test") spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") with open(spec_file) as f: spec = json.load(f) assert spec["display_name"].endswith(" [profile=Test]") assert spec["argv"][-2:] == ["--profile", "Test"] def test_install_display_name_overrides_profile(): system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install(display_name="Display", profile="Test") spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") with open(spec_file) as f: spec = json.load(f) assert spec["display_name"] == "Display" @pytest.mark.parametrize("env", [None, dict(spam="spam"), dict(spam="spam", foo="bar")]) def test_install_env(tmp_path, env): # python 3.5 // tmp_path must be converted to str with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [str(tmp_path)]): install(env=env) spec = tmp_path / "kernels" / KERNEL_NAME / "kernel.json" with spec.open() as f: spec = json.load(f) if env: assert len(env) == len(spec["env"]) for k, v in env.items(): assert spec["env"][k] == v else: assert "env" not in spec @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11") def test_install_frozen_modules_on(): system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install(frozen_modules=True) spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") with open(spec_file) as f: spec = json.load(f) assert spec["env"]["PYDEVD_DISABLE_FILE_VALIDATION"] == "1" assert "-Xfrozen_modules=off" not in spec["argv"] @pytest.mark.skipif(sys.version_info < (3, 11) or not is_cpython, reason="requires cPython 3.11") def test_install_frozen_modules_off(): system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install(frozen_modules=False) spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") with open(spec_file) as f: spec = json.load(f) assert "env" not in spec assert spec["argv"][1] == "-Xfrozen_modules=off" @pytest.mark.skipif( sys.version_info >= (3, 11) or is_cpython, reason="checks versions older than 3.11 and other Python implementations", ) def test_install_frozen_modules_no_op(): # ensure we do not add add Xfrozen_modules on older Python versions # (although cPython does not error out on unknown X options as of 3.8) system_jupyter_dir = tempfile.mkdtemp() with mock.patch("jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH", [system_jupyter_dir]): install(frozen_modules=False) spec_file = os.path.join(system_jupyter_dir, "kernels", KERNEL_NAME, "kernel.json") with open(spec_file) as f: spec = json.load(f) assert "-Xfrozen_modules=off" not in spec["argv"] ipykernel-6.29.5/tests/test_message_spec.py000066400000000000000000000400511464053401500210320ustar00rootroot00000000000000"""Test suite for our zeromq-based message specification.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import re import sys from queue import Empty import pytest from jupyter_client._version import version_info from jupyter_client.blocking.client import BlockingKernelClient from packaging.version import Version as V from traitlets import Bool, Dict, Enum, HasTraits, Integer, List, TraitError, Unicode, observe from .utils import TIMEOUT, execute, flush_channels, get_reply, start_global_kernel # ----------------------------------------------------------------------------- # Globals # ----------------------------------------------------------------------------- KC: BlockingKernelClient = None # type:ignore def setup(): global KC KC = start_global_kernel() # ----------------------------------------------------------------------------- # Message Spec References # ----------------------------------------------------------------------------- class Reference(HasTraits): """ Base class for message spec specification testing. This class is the core of the message specification test. The idea is that child classes implement trait attributes for each message keys, so that message keys can be tested against these traits using :meth:`check` method. """ def check(self, d): """validate a dict against our traits""" for key in self.trait_names(): assert key in d # FIXME: always allow None, probably not a good idea if d[key] is None: continue try: setattr(self, key, d[key]) except TraitError as e: raise AssertionError(str(e)) from None class Version(Unicode): def __init__(self, *args, **kwargs): self.min = kwargs.pop("min", None) self.max = kwargs.pop("max", None) kwargs["default_value"] = self.min super().__init__(*args, **kwargs) def validate(self, obj, value): if self.min and V(value) < V(self.min): msg = f"bad version: {value} < {self.min}" raise TraitError(msg) if self.max and (V(value) > V(self.max)): msg = f"bad version: {value} > {self.max}" raise TraitError(msg) class RMessage(Reference): msg_id = Unicode() msg_type = Unicode() header = Dict() parent_header = Dict() content = Dict() def check(self, d): super().check(d) RHeader().check(self.header) if self.parent_header: RHeader().check(self.parent_header) class RHeader(Reference): msg_id = Unicode() msg_type = Unicode() session = Unicode() username = Unicode() version = Version(min="5.0") mime_pat = re.compile(r"^[\w\-\+\.]+/[\w\-\+\.]+$") class MimeBundle(Reference): metadata = Dict() data = Dict() @observe("data") def _on_data_changed(self, change): for k, v in change["new"].items(): assert mime_pat.match(k) assert isinstance(v, str) # shell replies class Reply(Reference): status = Enum(("ok", "error"), default_value="ok") class ExecuteReply(Reply): execution_count = Integer() def check(self, d): Reference.check(self, d) if d["status"] == "ok": ExecuteReplyOkay().check(d) elif d["status"] == "error": ExecuteReplyError().check(d) elif d["status"] == "aborted": ExecuteReplyAborted().check(d) class ExecuteReplyOkay(Reply): status = Enum(("ok",)) user_expressions = Dict() class ExecuteReplyError(Reply): status = Enum(("error",)) ename = Unicode() evalue = Unicode() traceback = List(Unicode()) class ExecuteReplyAborted(Reply): status = Enum(("aborted",)) class InspectReply(Reply, MimeBundle): found = Bool() class ArgSpec(Reference): args = List(Unicode()) varargs = Unicode() varkw = Unicode() defaults = List() class Status(Reference): execution_state = Enum(("busy", "idle", "starting"), default_value="busy") class CompleteReply(Reply): matches = List(Unicode()) cursor_start = Integer() cursor_end = Integer() status = Unicode() # type:ignore class LanguageInfo(Reference): name = Unicode("python") version = Unicode(sys.version.split()[0]) class KernelInfoReply(Reply): protocol_version = Version(min="5.0") implementation = Unicode("ipython") implementation_version = Version(min="2.1") language_info = Dict() banner = Unicode() def check(self, d): Reference.check(self, d) LanguageInfo().check(d["language_info"]) class ConnectReply(Reference): shell_port = Integer() control_port = Integer() stdin_port = Integer() iopub_port = Integer() hb_port = Integer() class CommInfoReply(Reply): comms = Dict() class IsCompleteReply(Reference): status = Enum(("complete", "incomplete", "invalid", "unknown"), default_value="complete") def check(self, d): Reference.check(self, d) if d["status"] == "incomplete": IsCompleteReplyIncomplete().check(d) class IsCompleteReplyIncomplete(Reference): indent = Unicode() # IOPub messages class ExecuteInput(Reference): code = Unicode() execution_count = Integer() class Error(ExecuteReplyError): """Errors are the same as ExecuteReply, but without status""" status = None # type:ignore # no status field class Stream(Reference): name = Enum(("stdout", "stderr"), default_value="stdout") text = Unicode() class DisplayData(MimeBundle): pass class ExecuteResult(MimeBundle): execution_count = Integer() class HistoryReply(Reply): history = List(List()) references = { "execute_reply": ExecuteReply(), "inspect_reply": InspectReply(), "status": Status(), "complete_reply": CompleteReply(), "kernel_info_reply": KernelInfoReply(), "connect_reply": ConnectReply(), "comm_info_reply": CommInfoReply(), "is_complete_reply": IsCompleteReply(), "execute_input": ExecuteInput(), "execute_result": ExecuteResult(), "history_reply": HistoryReply(), "error": Error(), "stream": Stream(), "display_data": DisplayData(), "header": RHeader(), } # ----------------------------------------------------------------------------- # Specifications of `content` part of the reply messages. # ----------------------------------------------------------------------------- def validate_message(msg, msg_type=None, parent=None): """validate a message This is a generator, and must be iterated through to actually trigger each test. If msg_type and/or parent are given, the msg_type and/or parent msg_id are compared with the given values. """ RMessage().check(msg) if msg_type: assert msg["msg_type"] == msg_type if parent: assert msg["parent_header"]["msg_id"] == parent content = msg["content"] ref = references[msg["msg_type"]] ref.check(content) # ----------------------------------------------------------------------------- # Tests # ----------------------------------------------------------------------------- # Shell channel def test_execute(): flush_channels() msg_id = KC.execute(code="x=1") reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "execute_reply", msg_id) def test_execute_silent(): flush_channels() msg_id, reply = execute(code="x=1", silent=True) # flush status=idle status = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(status, "status", msg_id) assert status["content"]["execution_state"] == "idle" with pytest.raises(Empty): KC.get_iopub_msg(timeout=0.1) count = reply["execution_count"] msg_id, reply = execute(code="x=2", silent=True) # flush status=idle status = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(status, "status", msg_id) assert status["content"]["execution_state"] == "idle" with pytest.raises(Empty): KC.get_iopub_msg(timeout=0.1) count_2 = reply["execution_count"] assert count_2 == count def test_execute_error(): flush_channels() msg_id, reply = execute(code="1/0") assert reply["status"] == "error" assert reply["ename"] == "ZeroDivisionError" error = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(error, "error", msg_id) def test_execute_inc(): """execute request should increment execution_count""" flush_channels() _, reply = execute(code="x=1") count = reply["execution_count"] flush_channels() _, reply = execute(code="x=2") count_2 = reply["execution_count"] assert count_2 == count + 1 def test_execute_stop_on_error(): """execute request should not abort execution queue with stop_on_error False""" flush_channels() fail = "\n".join( [ # sleep to ensure subsequent message is waiting in the queue to be aborted # async sleep to ensure coroutines are processing while this happens "import asyncio", "await asyncio.sleep(1)", "raise ValueError()", ] ) KC.execute(code=fail) KC.execute(code='print("Hello")') KC.execute(code='print("world")') reply = KC.get_shell_msg(timeout=TIMEOUT) print(reply) reply = KC.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "aborted" # second message, too reply = KC.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "aborted" flush_channels() KC.execute(code=fail, stop_on_error=False) KC.execute(code='print("Hello")') KC.get_shell_msg(timeout=TIMEOUT) reply = KC.get_shell_msg(timeout=TIMEOUT) assert reply["content"]["status"] == "ok" def test_non_execute_stop_on_error(): """test that non-execute_request's are not aborted after an error""" flush_channels() fail = "\n".join( [ # sleep to ensure subsequent message is waiting in the queue to be aborted "import time", "time.sleep(0.5)", "raise ValueError", ] ) KC.execute(code=fail) KC.kernel_info() KC.comm_info() KC.inspect(code="print") reply = KC.get_shell_msg(timeout=TIMEOUT) # execute assert reply["content"]["status"] == "error" reply = KC.get_shell_msg(timeout=TIMEOUT) # kernel_info assert reply["content"]["status"] == "ok" reply = KC.get_shell_msg(timeout=TIMEOUT) # comm_info assert reply["content"]["status"] == "ok" reply = KC.get_shell_msg(timeout=TIMEOUT) # inspect assert reply["content"]["status"] == "ok" def test_user_expressions(): flush_channels() msg_id, reply = execute(code="x=1", user_expressions=dict(foo="x+1")) user_expressions = reply["user_expressions"] assert user_expressions == { "foo": { "status": "ok", "data": {"text/plain": "2"}, "metadata": {}, } } def test_user_expressions_fail(): flush_channels() msg_id, reply = execute(code="x=0", user_expressions=dict(foo="nosuchname")) user_expressions = reply["user_expressions"] foo = user_expressions["foo"] assert foo["status"] == "error" assert foo["ename"] == "NameError" def test_oinfo(): flush_channels() msg_id = KC.inspect("a") reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "inspect_reply", msg_id) def test_oinfo_found(): flush_channels() msg_id, reply = execute(code="a=5") msg_id = KC.inspect("a") reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "inspect_reply", msg_id) content = reply["content"] assert content["found"] text = content["data"]["text/plain"] assert "Type:" in text assert "Docstring:" in text def test_oinfo_detail(): flush_channels() msg_id, reply = execute(code="ip=get_ipython()") msg_id = KC.inspect("ip.object_inspect", cursor_pos=10, detail_level=1) reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "inspect_reply", msg_id) content = reply["content"] assert content["found"] text = content["data"]["text/plain"] assert "Signature:" in text assert "Source:" in text def test_oinfo_not_found(): flush_channels() msg_id = KC.inspect("does_not_exist") reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "inspect_reply", msg_id) content = reply["content"] assert not content["found"] def test_complete(): flush_channels() msg_id, reply = execute(code="alpha = albert = 5") msg_id = KC.complete("al", 2) reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "complete_reply", msg_id) matches = reply["content"]["matches"] for name in ("alpha", "albert"): assert name in matches def test_kernel_info_request(): flush_channels() msg_id = KC.kernel_info() reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "kernel_info_reply", msg_id) def test_connect_request(): flush_channels() msg = KC.session.msg("connect_request") KC.shell_channel.send(msg) msg_id = msg["header"]["msg_id"] reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "connect_reply", msg_id) @pytest.mark.skipif( version_info < (5, 0), reason="earlier Jupyter Client don't have comm_info", ) def test_comm_info_request(): flush_channels() msg_id = KC.comm_info() reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "comm_info_reply", msg_id) def test_single_payload(): """ We want to test the set_next_input is not triggered several time per cell. This is (was ?) mostly due to the fact that `?` in a loop would trigger several set_next_input. I'm tempted to thing that we actually want to _allow_ multiple set_next_input (that's users' choice). But that `?` itself (and ?'s transform) should avoid setting multiple set_next_input). """ flush_channels() msg_id, reply = execute( code="ip = get_ipython()\nfor i in range(3):\n ip.set_next_input('Hello There')\n" ) payload = reply["payload"] next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"] assert len(next_input_pls) == 1 def test_is_complete(): flush_channels() msg_id = KC.is_complete("a = 1") reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "is_complete_reply", msg_id) def test_history_range(): flush_channels() KC.execute(code="x=1", store_history=True) KC.get_shell_msg(timeout=TIMEOUT) msg_id = KC.history(hist_access_type="range", raw=True, output=True, start=1, stop=2, session=0) reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "history_reply", msg_id) content = reply["content"] assert len(content["history"]) == 1 def test_history_tail(): flush_channels() KC.execute(code="x=1", store_history=True) KC.get_shell_msg(timeout=TIMEOUT) msg_id = KC.history(hist_access_type="tail", raw=True, output=True, n=1, session=0) reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "history_reply", msg_id) content = reply["content"] assert len(content["history"]) == 1 def test_history_search(): flush_channels() KC.execute(code="x=1", store_history=True) KC.get_shell_msg(timeout=TIMEOUT) msg_id = KC.history( hist_access_type="search", raw=True, output=True, n=1, pattern="*", session=0 ) reply = get_reply(KC, msg_id, TIMEOUT) validate_message(reply, "history_reply", msg_id) content = reply["content"] assert len(content["history"]) == 1 # IOPub channel def test_stream(): flush_channels() msg_id, reply = execute("print('hi')") stdout = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(stdout, "stream", msg_id) content = stdout["content"] assert content["text"] == "hi\n" def test_display_data(): flush_channels() msg_id, reply = execute("from IPython.display import display; display(1)") display = KC.get_iopub_msg(timeout=TIMEOUT) validate_message(display, "display_data", parent=msg_id) data = display["content"]["data"] assert data["text/plain"] == "1" ipykernel-6.29.5/tests/test_parentpoller.py000066400000000000000000000021141464053401500211010ustar00rootroot00000000000000import os import sys import warnings from unittest import mock import pytest from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows @pytest.mark.skipif(os.name == "nt", reason="only works on posix") def test_parent_poller_unix(): poller = ParentPollerUnix() with mock.patch("os.getppid", lambda: 1): # noqa: PT008 def exit_mock(*args): sys.exit(1) with mock.patch("os._exit", exit_mock), pytest.raises(SystemExit): poller.run() def mock_getppid(): msg = "hi" raise ValueError(msg) with mock.patch("os.getppid", mock_getppid), pytest.raises(ValueError): # noqa: PT011 poller.run() @pytest.mark.skipif(os.name != "nt", reason="only works on windows") def test_parent_poller_windows(): poller = ParentPollerWindows(interrupt_handle=1) def mock_wait(*args, **kwargs): return -1 with mock.patch("ctypes.windll.kernel32.WaitForMultipleObjects", mock_wait): # noqa with warnings.catch_warnings(): warnings.simplefilter("ignore") poller.run() ipykernel-6.29.5/tests/test_pickleutil.py000066400000000000000000000023701464053401500205430ustar00rootroot00000000000000import pickle import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") from ipykernel.pickleutil import can, uncan def interactive(f): f.__module__ = "__main__" return f def dumps(obj): return pickle.dumps(can(obj)) def loads(obj): return uncan(pickle.loads(obj)) def test_no_closure(): @interactive def foo(): a = 5 return a pfoo = dumps(foo) bar = loads(pfoo) assert foo() == bar() def test_generator_closure(): # this only creates a closure on Python 3 @interactive def foo(): i = "i" r = [i for j in (1, 2)] return r pfoo = dumps(foo) bar = loads(pfoo) assert foo() == bar() def test_nested_closure(): @interactive def foo(): i = "i" def g(): return i return g() pfoo = dumps(foo) bar = loads(pfoo) assert foo() == bar() def test_closure(): i = "i" @interactive def foo(): return i pfoo = dumps(foo) bar = loads(pfoo) assert foo() == bar() def test_uncan_bytes_buffer(): data = b"data" canned = can(data) canned.buffers = [memoryview(buf) for buf in canned.buffers] out = uncan(canned) assert out == data ipykernel-6.29.5/tests/test_start_kernel.py000066400000000000000000000037141464053401500210760ustar00rootroot00000000000000import os from textwrap import dedent import pytest from flaky import flaky from .test_embed_kernel import setup_kernel TIMEOUT = 15 if os.name == "nt": pytest.skip("skipping tests on windows", allow_module_level=True) @flaky(max_runs=3) def test_ipython_start_kernel_userns(): cmd = dedent( """ from ipykernel.kernelapp import launch_new_instance ns = {"custom": 123} launch_new_instance(user_ns=ns) """ ) with setup_kernel(cmd) as client: client.inspect("custom") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "123" in text # user_module should be an instance of DummyMod client.execute("usermod = get_ipython().user_module") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["status"] == "ok" client.inspect("usermod") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "DummyMod" in text @flaky(max_runs=3) def test_ipython_start_kernel_no_userns(): # Issue #4188 - user_ns should be passed to shell as None, not {} cmd = dedent( """ from ipykernel.kernelapp import launch_new_instance launch_new_instance() """ ) with setup_kernel(cmd) as client: # user_module should not be an instance of DummyMod client.execute("usermod = get_ipython().user_module") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["status"] == "ok" client.inspect("usermod") msg = client.get_shell_msg(timeout=TIMEOUT) content = msg["content"] assert content["found"] text = content["data"]["text/plain"] assert "DummyMod" not in text ipykernel-6.29.5/tests/test_zmq_shell.py000066400000000000000000000155021464053401500203750ustar00rootroot00000000000000""" Tests for zmq shell / display publisher. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import os import unittest import warnings from queue import Queue from threading import Thread from unittest.mock import MagicMock import pytest import zmq from jupyter_client.session import Session from traitlets import Int from ipykernel.zmqshell import ( # type:ignore InteractiveShell, KernelMagics, ZMQDisplayPublisher, ZMQInteractiveShell, ) class NoReturnDisplayHook: """ A dummy DisplayHook which allows us to monitor the number of times an object is called, but which does *not* return a message when it is called. """ call_count = 0 def __call__(self, obj): self.call_count += 1 class ReturnDisplayHook(NoReturnDisplayHook): """ A dummy DisplayHook with the same counting ability as its base class, but which also returns the same message when it is called. """ def __call__(self, obj): super().__call__(obj) return obj class CounterSession(Session): """ This is a simple subclass to allow us to count the calls made to the session object by the display publisher. """ send_count = Int(0) def send(self, *args, **kwargs): """ A trivial override to just augment the existing call with an increment to the send counter. """ self.send_count += 1 super().send(*args, **kwargs) class ZMQDisplayPublisherTests(unittest.TestCase): """ Tests the ZMQDisplayPublisher in zmqshell.py """ def setUp(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) self.session = CounterSession() self.disp_pub = ZMQDisplayPublisher(session=self.session, pub_socket=self.socket) def tearDown(self): """ We need to close the socket in order to proceed with the tests. TODO - There is still an open file handler to '/dev/null', presumably created by zmq. """ self.disp_pub.clear_output() self.socket.close() self.context.term() def test_display_publisher_creation(self): """ Since there's no explicit constructor, here we confirm that keyword args get assigned correctly, and override the defaults. """ assert self.disp_pub.session == self.session assert self.disp_pub.pub_socket == self.socket def test_thread_local_hooks(self): """ Confirms that the thread_local attribute is correctly initialised with an empty list for the display hooks """ assert self.disp_pub._hooks == [] def hook(msg): return msg self.disp_pub.register_hook(hook) assert self.disp_pub._hooks == [hook] q: Queue = Queue() def set_thread_hooks(): q.put(self.disp_pub._hooks) t = Thread(target=set_thread_hooks) t.start() thread_hooks = q.get(timeout=10) assert thread_hooks == [] def test_publish(self): """ Publish should prepare the message and eventually call `send` by default. """ data = dict(a=1) assert self.session.send_count == 0 self.disp_pub.publish(data) assert self.session.send_count == 1 def test_display_hook_halts_send(self): """ If a hook is installed, and on calling the object it does *not* return a message, then we assume that the message has been consumed, and should not be processed (`sent`) in the normal manner. """ data = dict(a=1) hook = NoReturnDisplayHook() self.disp_pub.register_hook(hook) assert hook.call_count == 0 assert self.session.send_count == 0 self.disp_pub.publish(data) assert hook.call_count == 1 assert self.session.send_count == 0 def test_display_hook_return_calls_send(self): """ If a hook is installed and on calling the object it returns a new message, then we assume that this is just a message transformation, and the message should be sent in the usual manner. """ data = dict(a=1) hook = ReturnDisplayHook() self.disp_pub.register_hook(hook) assert hook.call_count == 0 assert self.session.send_count == 0 self.disp_pub.publish(data) assert hook.call_count == 1 assert self.session.send_count == 1 def test_unregister_hook(self): """ Once a hook is unregistered, it should not be called during `publish`. """ data = dict(a=1) hook = NoReturnDisplayHook() self.disp_pub.register_hook(hook) assert hook.call_count == 0 assert self.session.send_count == 0 self.disp_pub.publish(data) assert hook.call_count == 1 assert self.session.send_count == 0 # # After unregistering the `NoReturn` hook, any calls # to publish should *not* got through the DisplayHook, # but should instead hit the usual `session.send` call # at the end. # # As a result, the hook call count should *not* increase, # but the session send count *should* increase. # first = self.disp_pub.unregister_hook(hook) self.disp_pub.publish(data) assert bool(first) assert hook.call_count == 1 assert self.session.send_count == 1 # # If a hook is not installed, `unregister_hook` # should return false. # second = self.disp_pub.unregister_hook(hook) assert not bool(second) def test_magics(tmp_path): context = zmq.Context() socket = context.socket(zmq.PUB) shell = InteractiveShell() shell.user_ns["hi"] = 1 magics = KernelMagics(shell) tmp_file = tmp_path / "test.txt" tmp_file.write_text("hi", "utf8") magics.edit(str(tmp_file)) payload = shell.payload_manager.read_payload()[0] assert payload["filename"] == str(tmp_file) magics.clear([]) magics.less(str(tmp_file)) if os.name == "posix": magics.man("ls") magics.autosave("10") socket.close() context.destroy() def test_zmq_interactive_shell(kernel): shell = ZMQInteractiveShell() with pytest.raises(RuntimeError): shell.enable_gui("tk") with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) shell.data_pub_class = MagicMock() # type:ignore shell.data_pub shell.kernel = kernel shell.set_next_input("hi") assert shell.get_parent() is None if os.name == "posix": shell.system_piped("ls") else: shell.system_piped("dir") shell.ask_exit() if __name__ == "__main__": unittest.main() ipykernel-6.29.5/tests/utils.py000066400000000000000000000133221464053401500164760ustar00rootroot00000000000000"""utilities for testing IPython kernels""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import atexit import os import sys from contextlib import contextmanager from queue import Empty from subprocess import STDOUT from tempfile import TemporaryDirectory from time import time from jupyter_client import manager from jupyter_client.blocking.client import BlockingKernelClient STARTUP_TIMEOUT = 60 TIMEOUT = 100 KM: manager.KernelManager = None # type:ignore KC: BlockingKernelClient = None # type:ignore def start_new_kernel(**kwargs): """start a new kernel, and return its Manager and Client Integrates with our output capturing for tests. """ kwargs["stderr"] = STDOUT try: import nose kwargs["stdout"] = nose.iptest_stdstreams_fileno() except (ImportError, AttributeError): pass return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs) def flush_channels(kc=None): """flush any messages waiting on the queue""" from .test_message_spec import validate_message if kc is None: kc = KC for get_msg in (kc.get_shell_msg, kc.get_iopub_msg): while True: try: msg = get_msg(timeout=0.1) except Empty: break else: validate_message(msg) def get_reply(kc, msg_id, timeout=TIMEOUT, channel="shell"): t0 = time() while True: get_msg = getattr(kc, f"get_{channel}_msg") reply = get_msg(timeout=timeout) if reply["parent_header"]["msg_id"] == msg_id: break # Allow debugging ignored replies print(f"Ignoring reply not to {msg_id}: {reply}") t1 = time() timeout -= t1 - t0 t0 = t1 return reply def execute(code="", kc=None, **kwargs): """wrapper for doing common steps for validating an execution request""" from .test_message_spec import validate_message if kc is None: kc = KC msg_id = kc.execute(code=code, **kwargs) reply = get_reply(kc, msg_id, TIMEOUT) validate_message(reply, "execute_reply", msg_id) busy = kc.get_iopub_msg(timeout=TIMEOUT) validate_message(busy, "status", msg_id) assert busy["content"]["execution_state"] == "busy" if not kwargs.get("silent"): execute_input = kc.get_iopub_msg(timeout=TIMEOUT) validate_message(execute_input, "execute_input", msg_id) assert execute_input["content"]["code"] == code # show tracebacks if present for debugging if reply["content"].get("traceback"): print("\n".join(reply["content"]["traceback"]), file=sys.stderr) return msg_id, reply["content"] def start_global_kernel(): """start the global kernel (if it isn't running) and return its client""" global KM, KC if KM is None: KM, KC = start_new_kernel() atexit.register(stop_global_kernel) else: flush_channels(KC) return KC @contextmanager def kernel(): """Context manager for the global kernel instance Should be used for most kernel tests Returns ------- kernel_client: connected KernelClient instance """ yield start_global_kernel() def uses_kernel(test_f): """Decorator for tests that use the global kernel""" def wrapped_test(): with kernel() as kc: test_f(kc) wrapped_test.__doc__ = test_f.__doc__ wrapped_test.__name__ = test_f.__name__ return wrapped_test def stop_global_kernel(): """Stop the global shared kernel instance, if it exists""" global KM, KC KC.stop_channels() KC = None # type:ignore if KM is None: return KM.shutdown_kernel(now=True) KM = None # type:ignore def new_kernel(argv=None): """Context manager for a new kernel in a subprocess Should only be used for tests where the kernel must not be re-used. Returns ------- kernel_client: connected KernelClient instance """ kwargs = {"stderr": STDOUT} try: import nose kwargs["stdout"] = nose.iptest_stdstreams_fileno() except (ImportError, AttributeError): pass if argv is not None: kwargs["extra_arguments"] = argv return manager.run_kernel(**kwargs) def assemble_output(get_msg): """assemble stdout/err from an execution""" stdout = "" stderr = "" while True: msg = get_msg(timeout=1) msg_type = msg["msg_type"] content = msg["content"] if msg_type == "status" and content["execution_state"] == "idle": # idle message signals end of output break elif msg["msg_type"] == "stream": if content["name"] == "stdout": stdout += content["text"] elif content["name"] == "stderr": stderr += content["text"] else: raise KeyError("bad stream: %r" % content["name"]) else: # other output, ignored pass return stdout, stderr def wait_for_idle(kc): while True: msg = kc.get_iopub_msg(timeout=1) msg_type = msg["msg_type"] content = msg["content"] if msg_type == "status" and content["execution_state"] == "idle": break class TemporaryWorkingDirectory(TemporaryDirectory): """ Creates a temporary directory and sets the cwd to that directory. Automatically reverts to previous cwd upon cleanup. Usage example: with TemporaryWorkingDirectory() as tmpdir: ... """ def __enter__(self): self.old_wd = os.getcwd() os.chdir(self.name) return super().__enter__() def __exit__(self, exc, value, tb): os.chdir(self.old_wd) return super().__exit__(exc, value, tb)