pax_global_header00006660000000000000000000000064141434212730014513gustar00rootroot0000000000000052 comment=9810bb1e5bac844d4692176d8c5ea7d449881b7f nbclient-0.5.6/000077500000000000000000000000001414342127300133215ustar00rootroot00000000000000nbclient-0.5.6/.bumpversion.cfg000066400000000000000000000002011414342127300164220ustar00rootroot00000000000000[bumpversion] current_version = 0.5.6 commit = True tag = True tag_name = {new_version} [bumpversion:file:nbclient/_version.py] nbclient-0.5.6/.circleci/000077500000000000000000000000001414342127300151545ustar00rootroot00000000000000nbclient-0.5.6/.circleci/config.yml000066400000000000000000000013041414342127300171420ustar00rootroot00000000000000version: 2 jobs: build_docs: docker: - image: circleci/python:3.6-stretch steps: # Get our data and merge with upstream - run: sudo apt-get update - checkout - restore_cache: keys: - cache-pip - run: | pip install --user .[sphinx] - save_cache: key: cache-pip paths: - ~/.cache/pip # Build the docs - run: name: Build docs to store command: | sphinx-build -W -b html docs/ docs/_build/html - store_artifacts: path: docs/_build/html/ destination: html workflows: version: 2 default: jobs: - build_docs nbclient-0.5.6/.github/000077500000000000000000000000001414342127300146615ustar00rootroot00000000000000nbclient-0.5.6/.github/workflows/000077500000000000000000000000001414342127300167165ustar00rootroot00000000000000nbclient-0.5.6/.github/workflows/main.yml000066400000000000000000000027311414342127300203700ustar00rootroot00000000000000name: CI on: push: branches: '*' pull_request: branches: '*' jobs: pre-commit: name: Run pre-commit formatters and linters runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v1 with: python-version: 3.8 - uses: pre-commit/action@v2.0.0 build-n-test-n-coverage: name: Build, test and code coverage runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] python-version: [ 3.6, 3.7, 3.8, 3.9 ] exclude: - os: windows-latest python-version: 3.8 - os: windows-latest python-version: 3.9 env: OS: ${{ matrix.os }} PYTHON: '3.9' steps: - name: Checkout uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install -e .[test] pip install tox coverage codecov tox-gh-actions - name: Run the tests run: tox - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: file: ./coverage.xml flags: unittests env_vars: OS,PYTHON name: codecov-umbrella fail_ci_if_error: false nbclient-0.5.6/.gitignore000066400000000000000000000034701414342127300153150ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. Pipfile Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # Pycharm stuff .idea/ # VSCode .vscode nbclient-0.5.6/.pre-commit-config.yaml000066400000000000000000000014231414342127300176020ustar00rootroot00000000000000ci: autoupdate_schedule: monthly autofix_prs: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace # - id: check-json - repo: https://github.com/mgedmin/check-manifest rev: "0.46" hooks: - id: check-manifest additional_dependencies: [setuptools>=46.4.0] - repo: https://github.com/pycqa/isort rev: 5.9.3 hooks: - id: isort - repo: https://github.com/asottile/pyupgrade rev: v2.25.0 hooks: - id: pyupgrade args: [--py36-plus] - repo: https://github.com/psf/black rev: 21.7b0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.1 hooks: - id: flake8 nbclient-0.5.6/.readthedocs.yml000066400000000000000000000005241414342127300164100ustar00rootroot00000000000000# .readthedocs.yml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 sphinx: configuration: docs/conf.py formats: all build: image: latest python: version: 3.7 install: - method: pip path: . extra_requirements: - sphinx nbclient-0.5.6/CHANGELOG.md000066400000000000000000000001741414342127300151340ustar00rootroot00000000000000# Change Log See the Change Log in the [nbclient documentation](https://nbclient.readthedocs.io/en/latest/changelog.html). nbclient-0.5.6/CONTRIBUTING.md000066400000000000000000000023751414342127300155610ustar00rootroot00000000000000# Contributing We follow the [Jupyter Contribution Workflow](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html) and the [IPython Contributing Guide](https://github.com/ipython/ipython/blob/master/CONTRIBUTING.md). ## Code formatting Use the [pre-commit](https://pre-commit.com/) tool to format and lint the codebase: ```console # to apply to only staged files $ pre-commit run # to run against all files $ pre-commit run --all-files # to install so that it is run before commits $ pre-commit install ``` ## Testing In your environment `pip install -e '.[test]'` will be needed to be able to run all of the tests. The recommended way to do this is using [tox](https://tox.readthedocs.io/en/latest/): ```console # to list all environments $ tox -av # to run all tests for a specific environment $ tox -e py38 ``` ## Documentation NbClient needs some PRs to copy over documentation! ## Releasing If you are going to release a version of `nbclient` you should also be capable of testing it and building the docs. Please follow the instructions in [Testing](#testing) and [Documentation](#documentation) if you are unfamiliar with how to do so. The rest of the release process can be found in [these release instructions](./RELEASING.md). nbclient-0.5.6/LICENSE000066400000000000000000000054241414342127300143330ustar00rootroot00000000000000# Licensing terms This project is licensed under the terms of the Modified BSD License (also known as New or Revised or 3-Clause BSD), as follows: - Copyright (c) 2020-, Jupyter Development Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Jupyter Development Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## About the Jupyter Development Team The Jupyter Development Team is the set of all contributors to the Jupyter project. This includes all of the Jupyter subprojects. The core team that coordinates development on GitHub can be found here: https://github.com/jupyter/. ## Our Copyright Policy Jupyter uses a shared copyright model. Each contributor maintains copyright over their contributions to Jupyter. But, it is important to note that these contributions are typically only changes to the repositories. Thus, the Jupyter source code, in its entirety is not the copyright of any single person or institution. Instead, it is the collective copyright of the entire Jupyter Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the Jupyter repositories. With this in mind, the following banner should be used in any source code file to indicate the copyright and license terms: # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. nbclient-0.5.6/MANIFEST.in000066400000000000000000000014751414342127300150660ustar00rootroot00000000000000include LICENSE include MANIFEST.in include requirements.txt include requirements-dev.txt include *.md include .bumpversion.cfg include tox.ini include pyproject.toml include .pre-commit-config.yaml # Code and test files recursive-include nbclient *.ipynb recursive-include nbclient *.png recursive-include nbclient *.py # Documentation graft docs exclude docs/\#* exclude .readthedocs.yml exclude codecov.yml # Examples graft examples # docs subdirs we want to skip prune docs/build prune docs/gh-pages prune docs/dist # Patterns to exclude from any directory global-exclude *~ global-exclude *.pyc global-exclude *.pyo global-exclude .git global-exclude .ipynb_checkpoints # Binder files to be excluded exclude binder recursive-exclude binder *.ipynb recursive-exclude binder *.yml # Exclude CI/CD files prune .circleci nbclient-0.5.6/README.md000066400000000000000000000052241414342127300146030ustar00rootroot00000000000000[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb) [![Build Status](https://github.com/jupyter/nbclient/workflows/CI/badge.svg)](https://github.com/jupyter/nbclient/actions) [![Documentation Status](https://readthedocs.org/projects/nbclient/badge/?version=latest)](https://nbclient.readthedocs.io/en/latest/?badge=latest) [![image](https://codecov.io/github/jupyter/nbclient/coverage.svg?branch=master)](https://codecov.io/github/jupyter/nbclient?branch=master) [![Python 3.6](https://img.shields.io/badge/python-3.6-blue.svg)](https://www.python.org/downloads/release/python-360/) [![Python 3.7](https://img.shields.io/badge/python-3.7-blue.svg)](https://www.python.org/downloads/release/python-370/) [![Python 3.8](https://img.shields.io/badge/python-3.8-blue.svg)](https://www.python.org/downloads/release/python-380/) [![Python 3.9](https://img.shields.io/badge/python-3.9-blue.svg)](https://www.python.org/downloads/release/python-390/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) # nbclient **NBClient** lets you **execute** notebooks. A client library for programmatic notebook execution, **NBClient** is a tool for running Jupyter Notebooks in different execution contexts, including the command line. ## Interactive Demo To demo **NBClient** interactively, click this Binder badge to start the demo: [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb) ## Installation In a terminal, run: python3 -m pip install nbclient ## Documentation See [ReadTheDocs](https://nbclient.readthedocs.io/en/latest/) for more in-depth details about the project and the [API Reference](https://nbclient.readthedocs.io/en/latest/reference/index.html). ## Python Version Support This library currently supports Python 3.6+ versions. As minor Python versions are officially sunset by the Python org, nbclient will similarly drop support in the future. ## Origins This library used to be part of the [nbconvert](https://nbconvert.readthedocs.io/en/latest/) project. NBClient extracted nbconvert's `ExecutePreprocessor`into its own library for easier updating and importing by downstream libraries and applications. ## Relationship to JupyterClient NBClient and JupyterClient are distinct projects. `jupyter_client` is a client library for the jupyter protocol. Specifically, `jupyter_client` provides the Python API for starting, managing and communicating with Jupyter kernels. While, nbclient allows notebooks to be run in different execution contexts. nbclient-0.5.6/RELEASING.md000066400000000000000000000007521414342127300151600ustar00rootroot00000000000000# Releasing ## Prerequisites - First check that the CHANGELOG (see `docs/changelog.md`) is up to date for the next release version - Ensure dev requirements are installed `pip install -r requirements-dev.txt` ## Push to GitHub Change from patch to minor or major for appropriate version updates. ```bash bumpversion patch git push upstream && git push upstream --tags ``` ## Push to PyPI ```bash rm -rf dist/* rm -rf build/* python setup.py sdist bdist_wheel twine upload dist/* ``` nbclient-0.5.6/binder/000077500000000000000000000000001414342127300145645ustar00rootroot00000000000000nbclient-0.5.6/binder/empty_notebook.ipynb000066400000000000000000000034651414342127300206750ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Show a pandas dataframe" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import numpy as np\n", "import scrapbook as sb" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "data = pd.DataFrame(np.random.randn(20, 2), columns=['a', 'b'])\n", "data" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Use scrapbook to store this data in the notebook\n", "sb.glue('dataframe', data.to_dict())" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Make a matplotlib plot" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import matplotlib.pyplot as plt" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Make and display a plot\n", "fig, ax = plt.subplots()\n", "ax.scatter(data['a'], data['b'])\n", "sb.glue('plot', fig, 'display')" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": {}, "version_major": 2, "version_minor": 0 } } }, "nbformat": 4, "nbformat_minor": 4 } nbclient-0.5.6/binder/environment.yml000066400000000000000000000002131414342127300176470ustar00rootroot00000000000000name: nbclient channels: - conda-forge dependencies: - numpy - pandas - matplotlib - nteract-scrapbook - nbformat - nbclient nbclient-0.5.6/binder/run_nbclient.ipynb000066400000000000000000000053731414342127300203210ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import nbclient\n", "import nbformat as nbf\n", "import pandas as pd\n", "import scrapbook as sb" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Background\n", "\n", "This notebook uses `nbclient` to read and execute an *empty* notebook.\n", "The empty notebook generates some fake data, makes a plot, and stores\n", "both the data and the plot inside the notebook using the\n", "[scrapbook package](https://github.com/nteract/scrapbook). We will\n", "then be able to access the generated contents of the notebook here.\n", "\n", "You can see the empty notebook by clicking this button:\n", "\n", "" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Read and execute the empty notebook" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# We use nbformat to represent our empty notebook in-memory\n", "nb = nbf.read('./empty_notebook.ipynb', nbf.NO_CONVERT)\n", "\n", "# Execute our in-memory notebook, which will now have outputs\n", "nb = nbclient.execute(nb)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Inspect the new notebook for its contents" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# First we'll convert our nbformat NotebokNote into a *scrapbook* NotebookNode\n", "nb = sb.read_notebook(nb)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# We can access the dataframe that was created and glued into the empty notebook\n", "pd.DataFrame.from_dict(nb.scraps.get('dataframe').data).head()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# We can also access the generated plot by \"re-gluing\" the notebook here\n", "nb.reglue('plot')" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": {}, "version_major": 2, "version_minor": 0 } } }, "nbformat": 4, "nbformat_minor": 4 } nbclient-0.5.6/docs/000077500000000000000000000000001414342127300142515ustar00rootroot00000000000000nbclient-0.5.6/docs/Makefile000066400000000000000000000011361414342127300157120ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = nbclient SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) nbclient-0.5.6/docs/UPDATE.md000066400000000000000000000001541414342127300155550ustar00rootroot00000000000000TODO: Figure out make options needed for non-api changes ``` sphinx-apidoc -f -o reference ../nbclient ``` nbclient-0.5.6/docs/_static/000077500000000000000000000000001414342127300156775ustar00rootroot00000000000000nbclient-0.5.6/docs/_static/custom.css000066400000000000000000000007041414342127300177240ustar00rootroot00000000000000img.logo { width:100% } .right-next { float: right; max-width: 45%; overflow: auto; text-overflow: ellipsis; white-space: nowrap; } .right-next::after{ content: ' »'; } .left-prev { float: left; max-width: 45%; overflow: auto; text-overflow: ellipsis; white-space: nowrap; } .left-prev::before{ content: '« '; } .prev-next-bottom { margin-top: 3em; } .prev-next-top { margin-bottom: 1em; } nbclient-0.5.6/docs/changelog.md000066400000000000000000000167651414342127300165410ustar00rootroot00000000000000# Changelog ## 0.5.6 - Changed `jupyter execute` to `jupyter run` [#173](https://github.com/jupyter/nbclient/pull/173) ([@palewire](https://github.com/palewire)) - Move IPYKERNEL_CELL_NAME from tox to pytest [#172](https://github.com/jupyter/nbclient/pull/172) ([@frenzymadness](https://github.com/frenzymadness)) ## 0.5.5 - Added CLI to README [#170](https://github.com/jupyter/nbclient/pull/170) ([@palewire](https://github.com/palewire)) - Add "jupyter execute" command-line interface [#165](https://github.com/jupyter/nbclient/pull/165) ([@palewire](https://github.com/palewire)) - Fix: updating buffers overwrote previous buffers [#169](https://github.com/jupyter/nbclient/pull/169) ([@maartenbreddels](https://github.com/maartenbreddels)) - Fix tests for ipykernel without debugpy [#166](https://github.com/jupyter/nbclient/pull/166) ([@frenzymadness](https://github.com/frenzymadness)) - gitignore Pipfile [#164](https://github.com/jupyter/nbclient/pull/164) ([@palewire](https://github.com/palewire)) - Fixed CONTRIBUTING.md link [#163](https://github.com/jupyter/nbclient/pull/163) ([@palewire](https://github.com/palewire)) - Fix typo [#162](https://github.com/jupyter/nbclient/pull/162) ([@The-Compiler](https://github.com/The-Compiler)) - Move format & lint to pre-commit [#161](https://github.com/jupyter/nbclient/pull/161) ([@chrisjsewell](https://github.com/chrisjsewell)) - Add `skip-execution` cell tag functionality [#151](https://github.com/jupyter/nbclient/pull/151) ([@chrisjsewell](https://github.com/chrisjsewell)) ## 0.5.4 - Replace `km.cleanup` with `km.cleanup_resources` [#152](https://github.com/jupyter/nbclient/pull/152) ([@davidbrochart](https://github.com/davidbrochart)) - Use async generator backport only on old python [#154](https://github.com/jupyter/nbclient/pull/154) ([@mkoeppe](https://github.com/mkoeppe)) - Support parsing of IPython dev version [#150](https://github.com/jupyter/nbclient/pull/150) ([@cphyc](https://github.com/cphyc)) - Set `IPYKERNEL_CELL_NAME = ` [#147](https://github.com/jupyter/nbclient/pull/147) ([@davidbrochart](https://github.com/davidbrochart)) - Print useful error message on exception [#142](https://github.com/jupyter/nbclient/pull/142) ([@certik](https://github.com/certik)) ## 0.5.3 - Fix ipykernel's `stop_on_error` value to take into account `raises-exception` tag and `force_raise_errors` [#137](https://github.com/jupyter/nbclient/pull/137) ## 0.5.2 - Set minimum python version supported to 3.6.1 to avoid 3.6.0 issues - CellExecutionError is now unpickleable - Added testing for python 3.9 - Changed travis tests to github actions - Documentation referencing an old model instead of NotebookClient was fixed - `allow_error_names` option was added for a more specific scope of `allow_errors` to be applied ## 0.5.1 - Update kernel client class JIT if it's the synchronous version - Several documentation fixes / improvements ## 0.5.0 - Move `language_info` retrieval before cell execution [#102](https://github.com/jupyter/nbclient/pull/102) - HistoryManager setting for ipython kernels no longer applies twice (fix for 5.0 trailets release) - Improved error handling around language_info missing - `(async_)start_new_kernel_client` is now split into `(async_)start_new_kernel` and `(async_)start_new_kernel_client` ## 0.4.2 - 0.4.3 These patch releases were removed due to backwards incompatible changes that should have been a minor release. If you were using these versions for the couple days they were up, move to 0.5.0 and you shouldn't have any issues. ## 0.4.1 - Python type hinting added to most interfaces! [#83](https://github.com/jupyter/nbclient/pull/83) - Several documentation fixes and improvements were made [#86](https://github.com/jupyter/nbclient/pull/86) - An asynchronous heart beat check was added to correctly raise a DeadKernelError when kernels die unexpectantly [#90](https://github.com/jupyter/nbclient/pull/90) ## 0.4.0 ### Major Changes - Use KernelManager's graceful shutdown rather than KILLing kernels [#64](https://github.com/jupyter/nbclient/pull/64) - Mimic an Output widget at the frontend so that the Output widget behaves correctly [#68](https://github.com/jupyter/nbclient/pull/68) - Nested asyncio is automatic, and works with Tornado [#71](https://github.com/jupyter/nbclient/pull/71) - `async_execute` now has a `reset_kc` argument to control if the client is reset upon execution request [#53](https://github.com/jupyter/nbclient/pull/53) ### Fixes - Fix `OSError: [WinError 6] The handle is invalid` for windows/python<3.7 [#77](https://github.com/jupyter/nbclient/pull/77) - Async wapper Exceptions no longer loose thier caused exception information [#65](https://github.com/jupyter/nbclient/pull/65) - `extra_arguments` are now configurable by config settings [#66](https://github.com/jupyter/nbclient/pull/66) ### Operational - Cross-OS testing now run on PRs via Github Actions [#63](https://github.com/jupyter/nbclient/pull/63) ## 0.3.1 ### Fixes - Check that a kernel manager exists before cleaning up the kernel [#61](https://github.com/jupyter/nbclient/pull/61) - Force client class to be async when kernel manager is MultiKernelManager [#55](https://github.com/jupyter/nbclient/pull/55) - Replace pip install with conda install in Binder [#54](https://github.com/jupyter/nbclient/pull/54) ## 0.3.0 ### Major Changes - The `(async_)start_new_kernel_client` method now supports starting a new client when its kernel manager (`self.km`) is a `MultiKernelManager`. The method now returns the kernel id in addition to the kernel client. If the kernel manager was a `KernelManager`, the returned kernel id is `None`. [#51](https://github.com/jupyter/nbclient/pull/51) - Added sphinx-book-theme for documentation. Added a CircleCI job to let us preview the built documentation in a PR. [#50](https://github.com/jupyter/nbclient/pull/50) - Added `reset_kc` option to `reset_execution_trackers`, so that the kernel client can be reset and a new one created in calls to `(async_)execute` [#44](https://github.com/jupyter/nbclient/pull/44) ### Docs - Fixed documentation [#46](https://github.com/jupyter/nbclient/pull/46) [#47](https://github.com/jupyter/nbclient/pull/47) - Added documentation status badge to the README - Removed conda from documentation build ## 0.2.0 ### Major Changes - Async support is now available on the client. Methods that support async have an `async_` prefix and can be awaited [#10](https://github.com/jupyter/nbclient/pull/10) [#35](https://github.com/jupyter/nbclient/pull/35) [#37](https://github.com/jupyter/nbclient/pull/37) [#38](https://github.com/jupyter/nbclient/pull/38) - Dropped support for Python 3.5 due to async compatability issues [#34](https://github.com/jupyter/nbclient/pull/34) - Notebook documents now include the [new kernel timing fields](https://github.com/jupyter/nbformat/pull/144) [#32](https://github.com/jupyter/nbclient/pull/32) ### Fixes - Memory and process leaks from nbclient should now be fixed [#34](https://github.com/jupyter/nbclient/pull/34) - Notebook execution exceptions now include error information in addition to the message [#41](https://github.com/jupyter/nbclient/pull/41) ### Docs - Added [binder examples](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb) / tests [#7](https://github.com/jupyter/nbclient/pull/7) - Added changelog to docs [#22](https://github.com/jupyter/nbclient/pull/22) - Doc typo fixes [#27](https://github.com/jupyter/nbclient/pull/27) [#30](https://github.com/jupyter/nbclient/pull/30) ## 0.1.0 - Initial release -- moved out of nbconvert 6.0.0-a0 nbclient-0.5.6/docs/client.rst000066400000000000000000000217541414342127300162720ustar00rootroot00000000000000Executing notebooks =================== .. module:: nbclient.client.guide Jupyter notebooks are often saved with output cells that have been cleared. NBClient provides a convenient way to execute the input cells of an .ipynb notebook file and save the results, both input and output cells, as a .ipynb file. In this section we show how to execute a ``.ipynb`` notebook document saving the result in notebook format. If you need to export notebooks to other formats, such as reStructured Text or Markdown (optionally executing them) see `nbconvert `_. Executing notebooks can be very helpful, for example, to run all notebooks in Python library in one step, or as a way to automate the data analysis in projects involving more than one notebook. Using the Python API interface ------------------------------ This section will illustrate the Python API interface. Example ~~~~~~~ Let's start with a complete quick example, leaving detailed explanations to the following sections. **Import**: First we import nbformat and the :class:`NotebookClient` class:: import nbformat from nbclient import NotebookClient **Load**: Assuming that ``notebook_filename`` contains the path to a notebook, we can load it with:: nb = nbformat.read(notebook_filename, as_version=4) **Configure**: Next, we configure the notebook execution mode:: client = NotebookClient(nb, timeout=600, kernel_name='python3', resources={'metadata': {'path': 'notebooks/'}}) We specified two (optional) arguments ``timeout`` and ``kernel_name``, which define respectively the cell execution timeout and the execution kernel. Usually you don't need to set these options, but these and other options are available to control execution context. Note that ``path`` specifies in which folder to execute the notebook. **Execute/Run**: To actually run the notebook we call the method ``execute``:: client.execute() Hopefully, we will not get any errors during the notebook execution (see the last section for error handling). This notebook will now have its cell outputs populated with the result of running each cell. **Save**: Finally, save the resulting notebook with:: nbformat.write(nb, 'executed_notebook.ipynb') That's all. Your executed notebook will be saved in the current folder in the file ``executed_notebook.ipynb``. Execution arguments (traitlets) ------------------------------- The arguments passed to :class:`NotebookClient` are configuration options called `traitlets `_. There are many cool things about traitlets. For example, they enforce the input type, and they can be accessed/modified as class attributes. Let's now discuss in more detail the two traitlets we used. The ``timeout`` traitlet defines the maximum time (in seconds) each notebook cell is allowed to run, if the execution takes longer an exception will be raised. The default is 30 s, so in cases of long-running cells you may want to specify an higher value. The ``timeout`` option can also be set to ``None`` or ``-1`` to remove any restriction on execution time. The second traitlet, ``kernel_name``, allows specifying the name of the kernel to be used for the execution. By default, the kernel name is obtained from the notebook metadata. The traitlet ``kernel_name`` allows specifying a user-defined kernel, overriding the value in the notebook metadata. A common use case is that of a Python 2/3 library which includes documentation/testing notebooks. These notebooks will specify either a python2 or python3 kernel in their metadata (depending on the kernel used the last time the notebook was saved). In reality, these notebooks will work on both Python 2 and Python 3, and, for testing, it is important to be able to execute them programmatically on both versions. Here the traitlet ``kernel_name`` helps simplify and maintain consistency: we can just run a notebook twice, specifying first "python2" and then "python3" as the kernel name. Handling errors and exceptions ------------------------------ In the previous sections we saw how to save an executed notebook, assuming there are no execution errors. But, what if there are errors? Execution until first error ~~~~~~~~~~~~~~~~~~~~~~~~~~~ An error during the notebook execution, by default, will stop the execution and raise a ``CellExecutionError``. Conveniently, the source cell causing the error and the original error name and message are also printed. After an error, we can still save the notebook as before:: nbformat.write(nb, 'executed_notebook.ipynb') The saved notebook contains the output up until the failing cell, and includes a full stack-trace and error (which can help debugging). Handling errors ~~~~~~~~~~~~~~~ A useful pattern to execute notebooks while handling errors is the following:: from nbclient.exceptions import CellExecutionError try: client.execute() except CellExecutionError: msg = 'Error executing the notebook "%s".\n\n' % notebook_filename msg += 'See notebook "%s" for the traceback.' % notebook_filename_out print(msg) raise finally: nbformat.write(nb, notebook_filename_out) This will save the executed notebook regardless of execution errors. In case of errors, however, an additional message is printed and the ``CellExecutionError`` is raised. The message directs the user to the saved notebook for further inspection. Execute and save all errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~ As a last scenario, it is sometimes useful to execute notebooks which raise exceptions, for example to show an error condition. In this case, instead of stopping the execution on the first error, we can keep executing the notebook using the traitlet ``allow_errors`` (default is False). With ``allow_errors=True``, the notebook is executed until the end, regardless of any error encountered during the execution. The output notebook, will contain the stack-traces and error messages for **all** the cells raising exceptions. Widget state ------------ If your notebook contains any `Jupyter Widgets `_, the state of all the widgets can be stored in the notebook's metadata. This allows rendering of the live widgets on for instance nbviewer, or when converting to html. We can tell nbclient to not store the state using the `store_widget_state` argument:: client = NotebookClient(nb, store_widget_state=False) This widget rendering is not performed against a browser during execution, so only widget default states or states manipulated via user code will be calculated during execution. ``%%javascript`` cells will execute upon notebook rendering, enabling complex interactions to function as expected when viewed by a UI. If you can't view widget results after execution, you may need to select :menuselection:`Trust Notebook` under the :menuselection:`File` menu. Using a command-line interface ------------------------------ This section will illustrate how to run notebooks from your terminal. It supports the most basic use case. For more sophisticated execution options, consider the `papermill `_ library. This library's command line tool is available by running `jupyter run`. It expects notebooks as input arguments and accepts optional flags to modify the default behavior. Running a notebook is this easy.:: jupyter run notebook.ipynb You can pass more than one notebook as well.:: jupyter run notebook.ipynb notebook2.ipynb By default, notebook errors will be raised and printed into the terminal. You can suppress them by passing the ``--allow-errors`` flag.:: jupyter run notebook.ipynb --allow-errors Other options allow you to modify the timeout length and dictate the kernel in use. A full set of options is available via the help command.:: jupyter run --help An application used to execute notebook files (*.ipynb) Options ======= The options below are convenience aliases to configurable class-options, as listed in the "Equivalent to" description-line of the aliases. To see all configurable class-options for some , use: --help-all --allow-errors Errors are ignored and execution is continued until the end of the notebook. Equivalent to: [--NbClientApp.allow_errors=True] --timeout= The time to wait (in seconds) for output from executions. If a cell execution takes longer, a TimeoutError is raised. ``-1`` will disable the timeout. Default: None Equivalent to: [--NbClientApp.timeout] --startup_timeout= The time to wait (in seconds) for the kernel to start. If kernel startup takes longer, a RuntimeError is raised. Default: 60 Equivalent to: [--NbClientApp.startup_timeout] --kernel_name= Name of kernel to use to execute the cells. If not set, use the kernel_spec embedded in the notebook. Default: '' Equivalent to: [--NbClientApp.kernel_name] To see all available configurables, use `--help-all`. nbclient-0.5.6/docs/conf.py000066400000000000000000000125061414342127300155540ustar00rootroot00000000000000#!/usr/bin/env python3 # # nbclient documentation build configuration file, created by # sphinx-quickstart on Mon Jan 26 16:00:00 2020. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys import nbclient sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax', 'sphinx.ext.napoleon', 'myst_parser', ] autodoc_mock_imports = ['pytest', 'nbconvert', 'testpath'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] # The master toctree document. master_doc = 'index' # General information about the project. project = 'nbclient' copyright = '2020, Project Jupyter' author = 'Project Jupyter' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '.'.join(nbclient.__version__.split('.')[0:2]) # The full version, including alpha/beta/rc tags. release = nbclient.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line foexitr these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'UPDATE.md'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False default_role = 'any' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_book_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { "path_to_docs": "docs", "repository_url": "https://github.com/jupyter/nbclient", "repository_branch": "master", "use_edit_page_button": True, } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # html_sidebars = {} html_title = "nbclient" # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'nclientdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [(master_doc, 'nbclient.tex', 'nbclient Documentation', 'jupyter team', 'manual')] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, 'nbclient', 'nbclient Documentation', [author], 1)] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, 'nbclient', 'nbclient Documentation', author, 'nbclient', 'One line description of project.', 'Miscellaneous', ) ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} nbclient-0.5.6/docs/index.rst000066400000000000000000000040251414342127300161130ustar00rootroot00000000000000Welcome to nbclient =================== .. image:: https://img.shields.io/github/stars/jupyter/nbclient?label=stars&style=social :alt: GitHub stars :target: https://github.com/jupyter/nbclient .. image:: https://github.com/jupyter/nbclient/workflows/CI/badge.svg :alt: GitHub Actions :target: https://github.com/jupyter/nbclient/actions .. image:: https://codecov.io/github/jupyter/nbclient/coverage.svg?branch=master :alt: CodeCov :target: https://codecov.io/github/jupyter/nbclient --- **NBClient** lets you **execute** notebooks. A client library for programmatic notebook execution, **NBClient** is a tool for running Jupyter Notebooks in different execution contexts, including the command line. NBClient was spun out of `nbconvert `_'s former ``ExecutePreprocessor``. Demo ---- To demo **NBClient** interactively, click the Binder link below: .. image:: https://mybinder.org/badge_logo.svg :target: https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb Origins ------- This library used to be part of `nbconvert `_ and was extracted into its ownlibrary for easier updating and importing by downstream libraries and applications. Python Version Support ---------------------- This library currently supports python 3.6+ versions. As minor python versions are officially sunset by the python org, nbclient will similarly drop support in the future. Documentation ------------- These pages guide you through the installation and usage of nbclient. .. toctree:: :maxdepth: 1 :caption: Documentation installation client changelog API Reference ------------- If you are looking for information about a specific function, class, or method, this documentation section will help you. .. toctree:: :maxdepth: 3 :caption: Table of Contents reference/index.rst reference/nbclient.tests.rst Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` nbclient-0.5.6/docs/installation.rst000066400000000000000000000004471414342127300175110ustar00rootroot00000000000000Installation ============ Installing nbclient ------------------- From the command line: .. code-block:: bash python3 -m pip install nbclient .. seealso:: `Installing Jupyter `__ NBClient is part of the Jupyter ecosystem. nbclient-0.5.6/docs/make.bat000066400000000000000000000014101414342127300156520ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build set SPHINXPROJ=nbclient if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd nbclient-0.5.6/docs/reference/000077500000000000000000000000001414342127300162075ustar00rootroot00000000000000nbclient-0.5.6/docs/reference/index.rst000066400000000000000000000002541414342127300200510ustar00rootroot00000000000000Reference ========= This part of the documentation lists the full API reference of all public classes and functions. .. toctree:: :maxdepth: 2 nbclient modules nbclient-0.5.6/docs/reference/modules.rst000066400000000000000000000000751414342127300204130ustar00rootroot00000000000000nbclient ======== .. toctree:: :maxdepth: 4 nbclient nbclient-0.5.6/docs/reference/nbclient.rst000066400000000000000000000007751414342127300205500ustar00rootroot00000000000000nbclient package ================ Subpackages ----------- .. toctree:: nbclient.tests Submodules ---------- nbclient.client module ---------------------- .. automodule:: nbclient.client :members: :undoc-members: :show-inheritance: nbclient.exceptions module -------------------------- .. automodule:: nbclient.exceptions :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: nbclient :members: :undoc-members: :show-inheritance: nbclient-0.5.6/docs/reference/nbclient.tests.rst000066400000000000000000000012661414342127300217050ustar00rootroot00000000000000nbclient.tests package ====================== Submodules ---------- nbclient.tests.base module -------------------------- .. automodule:: nbclient.tests.base :members: :undoc-members: :show-inheritance: nbclient.tests.fake\_kernelmanager module ----------------------------------------- .. automodule:: nbclient.tests.fake_kernelmanager :members: :undoc-members: :show-inheritance: nbclient.tests.test\_client module ---------------------------------- .. automodule:: nbclient.tests.test_client :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: nbclient.tests :members: :undoc-members: :show-inheritance: nbclient-0.5.6/docs/requirements-doc.txt000066400000000000000000000000641414342127300203000ustar00rootroot00000000000000Sphinx>=1.7 sphinx-book-theme mock moto myst-parser nbclient-0.5.6/nbclient/000077500000000000000000000000001414342127300151175ustar00rootroot00000000000000nbclient-0.5.6/nbclient/__init__.py000066400000000000000000000007411414342127300172320ustar00rootroot00000000000000import subprocess import sys from ._version import version as __version__ # noqa: F401 from .client import NotebookClient, execute # noqa: F401 def _cleanup() -> None: pass # patch subprocess on Windows for python<3.7 # see https://bugs.python.org/issue37380 # the fix for python3.7: https://github.com/python/cpython/pull/15706/files if sys.platform == 'win32': if sys.version_info < (3, 7): subprocess._cleanup = _cleanup subprocess._active = None nbclient-0.5.6/nbclient/_version.py000066400000000000000000000000221414342127300173070ustar00rootroot00000000000000version = '0.5.6' nbclient-0.5.6/nbclient/cli.py000066400000000000000000000113631414342127300162440ustar00rootroot00000000000000import logging import pathlib import sys from textwrap import dedent import nbformat from jupyter_core.application import JupyterApp from traitlets import Bool, Integer, List, Unicode, default from traitlets.config import catch_config_error from nbclient import __version__ from .client import NotebookClient nbclient_aliases = { 'timeout': 'NbClientApp.timeout', 'startup_timeout': 'NbClientApp.startup_timeout', 'kernel_name': 'NbClientApp.kernel_name', } nbclient_flags = { 'allow-errors': ( { 'NbClientApp': { 'allow_errors': True, }, }, "Errors are ignored and execution is continued until the end of the notebook.", ), } class NbClientApp(JupyterApp): """ An application used to execute notebook files (``*.ipynb``) """ version = __version__ name = 'jupyter-run' aliases = nbclient_aliases flags = nbclient_flags description = Unicode("An application used to execute notebook files (*.ipynb)") notebooks = List([], help="Path of notebooks to convert").tag(config=True) timeout: int = Integer( None, allow_none=True, help=dedent( """ The time to wait (in seconds) for output from executions. If a cell execution takes longer, a TimeoutError is raised. ``-1`` will disable the timeout. """ ), ).tag(config=True) startup_timeout: int = Integer( 60, help=dedent( """ The time to wait (in seconds) for the kernel to start. If kernel startup takes longer, a RuntimeError is raised. """ ), ).tag(config=True) allow_errors: bool = Bool( False, help=dedent( """ When a cell raises an error the default behavior is that execution is stopped and a `CellExecutionError` is raised. If this flag is provided, errors are ignored and execution is continued until the end of the notebook. """ ), ).tag(config=True) skip_cells_with_tag: str = Unicode( 'skip-execution', help=dedent( """ Name of the cell tag to use to denote a cell that should be skipped. """ ), ).tag(config=True) kernel_name: str = Unicode( '', help=dedent( """ Name of kernel to use to execute the cells. If not set, use the kernel_spec embedded in the notebook. """ ), ).tag(config=True) @default('log_level') def _log_level_default(self): return logging.INFO @catch_config_error def initialize(self, argv=None): super().initialize(argv) # Get notebooks to run self.notebooks = self.get_notebooks() # If there are none, throw an error if not self.notebooks: print(f"{self.name}: error: expected path to notebook") sys.exit(-1) # Loop and run them one by one [self.run_notebook(path) for path in self.notebooks] def get_notebooks(self): # If notebooks were provided from the command line, use those if self.extra_args: notebooks = self.extra_args # If not, look to the class attribute else: notebooks = self.notebooks # Return what we got. return notebooks def run_notebook(self, notebook_path): # Log it self.log.info(f"Executing {notebook_path}") name = notebook_path.replace(".ipynb", "") # Get its parent directory so we can add it to the $PATH path = pathlib.Path(notebook_path).parent.absolute() # Set the intput file paths input_path = f"{name}.ipynb" # Open up the notebook we're going to run with open(input_path) as f: nb = nbformat.read(f, as_version=4) # Configure nbclient to run the notebook client = NotebookClient( nb, timeout=self.timeout, startup_timeout=self.startup_timeout, skip_cells_with_tag=self.skip_cells_with_tag, allow_errors=self.allow_errors, kernel_name=self.kernel_name, resources={'metadata': {'path': path}}, ) # Run it client.execute() class NbClientAlias(NbClientApp): """ An alias to the run command. """ name = 'jupyter-execute' @catch_config_error def initialize(self, argv=None): print( "This alias to `jupyter run` may be deprecated in the future. " "Please switch to using `run`." ) super().initialize(argv) run = NbClientApp.launch_instance execute = NbClientAlias.launch_instance nbclient-0.5.6/nbclient/client.py000066400000000000000000001204071414342127300167530ustar00rootroot00000000000000import atexit import base64 import collections import datetime import signal from textwrap import dedent try: from contextlib import asynccontextmanager except ImportError: # Use the backport package async-generator for Python < 3.7. # This should be removed when nbclient drops support for Python 3.6 from async_generator import asynccontextmanager # type: ignore import asyncio import typing as t from contextlib import contextmanager from queue import Empty from time import monotonic from jupyter_client import KernelManager from jupyter_client.client import KernelClient from nbformat import NotebookNode from nbformat.v4 import output_from_msg from traitlets import Any, Bool, Dict, Enum, Integer, List, Type, Unicode, default from traitlets.config.configurable import LoggingConfigurable from .exceptions import ( CellControlSignal, CellExecutionComplete, CellExecutionError, CellTimeoutError, DeadKernelError, ) from .output_widget import OutputWidget from .util import ensure_async, run_sync def timestamp() -> str: return datetime.datetime.utcnow().isoformat() + 'Z' class NotebookClient(LoggingConfigurable): """ Encompasses a Client for executing cells in a notebook """ timeout: int = Integer( None, allow_none=True, help=dedent( """ The time to wait (in seconds) for output from executions. If a cell execution takes longer, a TimeoutError is raised. ``None`` or ``-1`` will disable the timeout. If ``timeout_func`` is set, it overrides ``timeout``. """ ), ).tag(config=True) timeout_func: t.Any = Any( default_value=None, allow_none=True, help=dedent( """ A callable which, when given the cell source as input, returns the time to wait (in seconds) for output from cell executions. If a cell execution takes longer, a TimeoutError is raised. Returning ``None`` or ``-1`` will disable the timeout for the cell. Not setting ``timeout_func`` will cause the client to default to using the ``timeout`` trait for all cells. The ``timeout_func`` trait overrides ``timeout`` if it is not ``None``. """ ), ).tag(config=True) interrupt_on_timeout: bool = Bool( False, help=dedent( """ If execution of a cell times out, interrupt the kernel and continue executing other cells rather than throwing an error and stopping. """ ), ).tag(config=True) startup_timeout: int = Integer( 60, help=dedent( """ The time to wait (in seconds) for the kernel to start. If kernel startup takes longer, a RuntimeError is raised. """ ), ).tag(config=True) allow_errors: bool = Bool( False, help=dedent( """ If ``False`` (default), when a cell raises an error the execution is stopped and a `CellExecutionError` is raised, except if the error name is in ``allow_error_names``. If ``True``, execution errors are ignored and the execution is continued until the end of the notebook. Output from exceptions is included in the cell output in both cases. """ ), ).tag(config=True) allow_error_names: t.List[str] = List( Unicode(), help=dedent( """ List of error names which won't stop the execution. Use this if the ``allow_errors`` option it too general and you want to allow only specific kinds of errors. """ ), ).tag(config=True) force_raise_errors: bool = Bool( False, help=dedent( """ If False (default), errors from executing the notebook can be allowed with a ``raises-exception`` tag on a single cell, or the ``allow_errors`` or ``allow_error_names`` configurable options for all cells. An allowed error will be recorded in notebook output, and execution will continue. If an error occurs when it is not explicitly allowed, a `CellExecutionError` will be raised. If True, `CellExecutionError` will be raised for any error that occurs while executing the notebook. This overrides the ``allow_errors`` and ``allow_error_names`` options and the ``raises-exception`` cell tag. """ ), ).tag(config=True) skip_cells_with_tag: str = Unicode( 'skip-execution', help=dedent( """ Name of the cell tag to use to denote a cell that should be skipped. """ ), ).tag(config=True) extra_arguments: t.List = List(Unicode()).tag(config=True) kernel_name: str = Unicode( '', help=dedent( """ Name of kernel to use to execute the cells. If not set, use the kernel_spec embedded in the notebook. """ ), ).tag(config=True) raise_on_iopub_timeout: bool = Bool( False, help=dedent( """ If ``False`` (default), then the kernel will continue waiting for iopub messages until it receives a kernel idle message, or until a timeout occurs, at which point the currently executing cell will be skipped. If ``True``, then an error will be raised after the first timeout. This option generally does not need to be used, but may be useful in contexts where there is the possibility of executing notebooks with memory-consuming infinite loops. """ ), ).tag(config=True) store_widget_state: bool = Bool( True, help=dedent( """ If ``True`` (default), then the state of the Jupyter widgets created at the kernel will be stored in the metadata of the notebook. """ ), ).tag(config=True) record_timing: bool = Bool( True, help=dedent( """ If ``True`` (default), then the execution timings of each cell will be stored in the metadata of the notebook. """ ), ).tag(config=True) iopub_timeout: int = Integer( 4, allow_none=False, help=dedent( """ The time to wait (in seconds) for IOPub output. This generally doesn't need to be set, but on some slow networks (such as CI systems) the default timeout might not be long enough to get all messages. """ ), ).tag(config=True) shell_timeout_interval: int = Integer( 5, allow_none=False, help=dedent( """ The time to wait (in seconds) for Shell output before retrying. This generally doesn't need to be set, but if one needs to check for dead kernels at a faster rate this can help. """ ), ).tag(config=True) shutdown_kernel = Enum( ['graceful', 'immediate'], default_value='graceful', help=dedent( """ If ``graceful`` (default), then the kernel is given time to clean up after executing all cells, e.g., to execute its ``atexit`` hooks. If ``immediate``, then the kernel is signaled to immediately terminate. """ ), ).tag(config=True) ipython_hist_file: str = Unicode( default_value=':memory:', help="""Path to file to use for SQLite history database for an IPython kernel. The specific value ``:memory:`` (including the colon at both end but not the back ticks), avoids creating a history file. Otherwise, IPython will create a history file for each kernel. When running kernels simultaneously (e.g. via multiprocessing) saving history a single SQLite file can result in database errors, so using ``:memory:`` is recommended in non-interactive contexts. """, ).tag(config=True) kernel_manager_class: KernelManager = Type(config=True, help='The kernel manager class to use.') @default('kernel_manager_class') def _kernel_manager_class_default(self) -> KernelManager: """Use a dynamic default to avoid importing jupyter_client at startup""" from jupyter_client import AsyncKernelManager return AsyncKernelManager _display_id_map: t.Dict[str, t.Dict] = Dict( help=dedent( """ mapping of locations of outputs with a given display_id tracks cell index and output index within cell.outputs for each appearance of the display_id { 'display_id': { cell_idx: [output_idx,] } } """ ) ) display_data_priority: t.List = List( [ 'text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/markdown', 'text/plain', ], help=""" An ordered list of preferred output type, the first encountered will usually be used when converting discarding the others. """, ).tag(config=True) resources: t.Dict = Dict( help=dedent( """ Additional resources used in the conversion process. For example, passing ``{'metadata': {'path': run_path}}`` sets the execution path to ``run_path``. """ ) ) def __init__(self, nb: NotebookNode, km: t.Optional[KernelManager] = None, **kw) -> None: """Initializes the execution manager. Parameters ---------- nb : NotebookNode Notebook being executed. km : KernelManager (optional) Optional kernel manager. If none is provided, a kernel manager will be created. """ super().__init__(**kw) self.nb: NotebookNode = nb self.km: t.Optional[KernelManager] = km self.owns_km: bool = km is None # whether the NotebookClient owns the kernel manager self.kc: t.Optional[KernelClient] = None self.reset_execution_trackers() self.widget_registry: t.Dict[str, t.Dict] = { '@jupyter-widgets/output': {'OutputModel': OutputWidget} } # comm_open_handlers should return an object with a .handle_msg(msg) method or None self.comm_open_handlers: t.Dict[str, t.Any] = { 'jupyter.widget': self.on_comm_open_jupyter_widget } def reset_execution_trackers(self) -> None: """Resets any per-execution trackers.""" self.task_poll_for_reply: t.Optional[asyncio.Future] = None self.code_cells_executed = 0 self._display_id_map = {} self.widget_state: t.Dict[str, t.Dict] = {} self.widget_buffers: t.Dict[str, t.Dict[t.Tuple[str, ...], t.Dict[str, str]]] = {} # maps to list of hooks, where the last is used, this is used # to support nested use of output widgets. self.output_hook_stack: t.Any = collections.defaultdict(list) # our front-end mimicing Output widgets self.comm_objects: t.Dict[str, t.Any] = {} def create_kernel_manager(self) -> KernelManager: """Creates a new kernel manager. Returns ------- km : KernelManager Kernel manager whose client class is asynchronous. """ if not self.kernel_name: kn = self.nb.metadata.get('kernelspec', {}).get('name') if kn is not None: self.kernel_name = kn if not self.kernel_name: self.km = self.kernel_manager_class(config=self.config) else: self.km = self.kernel_manager_class(kernel_name=self.kernel_name, config=self.config) # If the current kernel manager is still using the default (synchronous) KernelClient class, # switch to the async version since that's what NBClient prefers. if self.km.client_class == 'jupyter_client.client.KernelClient': self.km.client_class = 'jupyter_client.asynchronous.AsyncKernelClient' return self.km async def _async_cleanup_kernel(self) -> None: assert self.km is not None now = self.shutdown_kernel == "immediate" try: # Queue the manager to kill the process, and recover gracefully if it's already dead. if await ensure_async(self.km.is_alive()): await ensure_async(self.km.shutdown_kernel(now=now)) except RuntimeError as e: # The error isn't specialized, so we have to check the message if 'No kernel is running!' not in str(e): raise finally: # Remove any state left over even if we failed to stop the kernel await ensure_async(self.km.cleanup_resources()) if getattr(self, "kc") and self.kc is not None: await ensure_async(self.kc.stop_channels()) self.kc = None self.km = None _cleanup_kernel = run_sync(_async_cleanup_kernel) async def async_start_new_kernel(self, **kwargs) -> None: """Creates a new kernel. Parameters ---------- kwargs : Any options for ``self.kernel_manager_class.start_kernel()``. Because that defaults to AsyncKernelManager, this will likely include options accepted by ``AsyncKernelManager.start_kernel()``, which includes ``cwd``. """ assert self.km is not None resource_path = self.resources.get('metadata', {}).get('path') or None if resource_path and 'cwd' not in kwargs: kwargs["cwd"] = resource_path has_history_manager_arg = any( arg.startswith('--HistoryManager.hist_file') for arg in self.extra_arguments ) if ( hasattr(self.km, 'ipykernel') and self.km.ipykernel and self.ipython_hist_file and not has_history_manager_arg ): self.extra_arguments += [f'--HistoryManager.hist_file={self.ipython_hist_file}'] await ensure_async(self.km.start_kernel(extra_arguments=self.extra_arguments, **kwargs)) start_new_kernel = run_sync(async_start_new_kernel) async def async_start_new_kernel_client(self) -> KernelClient: """Creates a new kernel client. Returns ------- kc : KernelClient Kernel client as created by the kernel manager ``km``. """ assert self.km is not None self.kc = self.km.client() await ensure_async(self.kc.start_channels()) try: await ensure_async(self.kc.wait_for_ready(timeout=self.startup_timeout)) except RuntimeError: await self._async_cleanup_kernel() raise self.kc.allow_stdin = False return self.kc start_new_kernel_client = run_sync(async_start_new_kernel_client) @contextmanager def setup_kernel(self, **kwargs) -> t.Generator: """ Context manager for setting up the kernel to execute a notebook. The assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``). When control returns from the yield it stops the client's zmq channels, and shuts down the kernel. """ # by default, cleanup the kernel client if we own the kernel manager # and keep it alive if we don't cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km) # Can't use run_until_complete on an asynccontextmanager function :( if self.km is None: self.km = self.create_kernel_manager() if not self.km.has_kernel: self.start_new_kernel(**kwargs) self.start_new_kernel_client() try: yield finally: if cleanup_kc: self._cleanup_kernel() @asynccontextmanager async def async_setup_kernel(self, **kwargs) -> t.AsyncGenerator: """ Context manager for setting up the kernel to execute a notebook. This assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``). When control returns from the yield it stops the client's zmq channels, and shuts down the kernel. Handlers for SIGINT and SIGTERM are also added to cleanup in case of unexpected shutdown. """ # by default, cleanup the kernel client if we own the kernel manager # and keep it alive if we don't cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km) if self.km is None: self.km = self.create_kernel_manager() # self._cleanup_kernel uses run_async, which ensures the ioloop is running again. # This is necessary as the ioloop has stopped once atexit fires. atexit.register(self._cleanup_kernel) def on_signal(): asyncio.ensure_future(self._async_cleanup_kernel()) atexit.unregister(self._cleanup_kernel) loop = asyncio.get_event_loop() try: loop.add_signal_handler(signal.SIGINT, on_signal) loop.add_signal_handler(signal.SIGTERM, on_signal) except (NotImplementedError, RuntimeError): # NotImplementedError: Windows does not support signals. # RuntimeError: Raised when add_signal_handler is called outside the main thread pass if not self.km.has_kernel: await self.async_start_new_kernel(**kwargs) await self.async_start_new_kernel_client() try: yield finally: if cleanup_kc: await self._async_cleanup_kernel() atexit.unregister(self._cleanup_kernel) try: loop.remove_signal_handler(signal.SIGINT) loop.remove_signal_handler(signal.SIGTERM) except (NotImplementedError, RuntimeError): pass async def async_execute(self, reset_kc: bool = False, **kwargs) -> NotebookNode: """ Executes each code cell. Parameters ---------- kwargs : Any option for ``self.kernel_manager_class.start_kernel()``. Because that defaults to AsyncKernelManager, this will likely include options accepted by ``jupyter_client.AsyncKernelManager.start_kernel()``, which includes ``cwd``. ``reset_kc`` if True, the kernel client will be reset and a new one will be created (default: False). Returns ------- nb : NotebookNode The executed notebook. """ if reset_kc and self.owns_km: await self._async_cleanup_kernel() self.reset_execution_trackers() async with self.async_setup_kernel(**kwargs): assert self.kc is not None self.log.info("Executing notebook with kernel: %s" % self.kernel_name) msg_id = await ensure_async(self.kc.kernel_info()) info_msg = await self.async_wait_for_reply(msg_id) if info_msg is not None: if 'language_info' in info_msg['content']: self.nb.metadata['language_info'] = info_msg['content']['language_info'] else: raise RuntimeError( 'Kernel info received message content has no "language_info" key. ' 'Content is:\n' + str(info_msg['content']) ) for index, cell in enumerate(self.nb.cells): # Ignore `'execution_count' in content` as it's always 1 # when store_history is False await self.async_execute_cell( cell, index, execution_count=self.code_cells_executed + 1 ) self.set_widgets_metadata() return self.nb execute = run_sync(async_execute) def set_widgets_metadata(self) -> None: if self.widget_state: self.nb.metadata.widgets = { 'application/vnd.jupyter.widget-state+json': { 'state': { model_id: self._serialize_widget_state(state) for model_id, state in self.widget_state.items() if '_model_name' in state }, 'version_major': 2, 'version_minor': 0, } } for key, widget in self.nb.metadata.widgets[ 'application/vnd.jupyter.widget-state+json' ]['state'].items(): buffers = self.widget_buffers.get(key) if buffers: widget['buffers'] = list(buffers.values()) def _update_display_id(self, display_id: str, msg: t.Dict) -> None: """Update outputs with a given display_id""" if display_id not in self._display_id_map: self.log.debug("display id %r not in %s", display_id, self._display_id_map) return if msg['header']['msg_type'] == 'update_display_data': msg['header']['msg_type'] = 'display_data' try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg['msg_type']) return for cell_idx, output_indices in self._display_id_map[display_id].items(): cell = self.nb['cells'][cell_idx] outputs = cell['outputs'] for output_idx in output_indices: outputs[output_idx]['data'] = out['data'] outputs[output_idx]['metadata'] = out['metadata'] async def _async_poll_for_reply( self, msg_id: str, cell: NotebookNode, timeout: t.Optional[int], task_poll_output_msg: asyncio.Future, task_poll_kernel_alive: asyncio.Future, ) -> t.Dict: assert self.kc is not None new_timeout: t.Optional[float] = None if timeout is not None: deadline = monotonic() + timeout new_timeout = float(timeout) while True: try: msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=new_timeout)) if msg['parent_header'].get('msg_id') == msg_id: if self.record_timing: cell['metadata']['execution']['shell.execute_reply'] = timestamp() try: await asyncio.wait_for(task_poll_output_msg, self.iopub_timeout) except (asyncio.TimeoutError, Empty): if self.raise_on_iopub_timeout: task_poll_kernel_alive.cancel() raise CellTimeoutError.error_from_timeout_and_cell( "Timeout waiting for IOPub output", self.iopub_timeout, cell ) else: self.log.warning("Timeout waiting for IOPub output") task_poll_kernel_alive.cancel() return msg else: if new_timeout is not None: new_timeout = max(0, deadline - monotonic()) except Empty: # received no message, check if kernel is still alive assert timeout is not None task_poll_kernel_alive.cancel() await self._async_check_alive() await self._async_handle_timeout(timeout, cell) async def _async_poll_output_msg( self, parent_msg_id: str, cell: NotebookNode, cell_index: int ) -> None: assert self.kc is not None while True: msg = await ensure_async(self.kc.iopub_channel.get_msg(timeout=None)) if msg['parent_header'].get('msg_id') == parent_msg_id: try: # Will raise CellExecutionComplete when completed self.process_message(msg, cell, cell_index) except CellExecutionComplete: return async def _async_poll_kernel_alive(self) -> None: while True: await asyncio.sleep(1) try: await self._async_check_alive() except DeadKernelError: assert self.task_poll_for_reply is not None self.task_poll_for_reply.cancel() return def _get_timeout(self, cell: t.Optional[NotebookNode]) -> int: if self.timeout_func is not None and cell is not None: timeout = self.timeout_func(cell) else: timeout = self.timeout if not timeout or timeout < 0: timeout = None return timeout async def _async_handle_timeout( self, timeout: int, cell: t.Optional[NotebookNode] = None ) -> None: self.log.error("Timeout waiting for execute reply (%is)." % timeout) if self.interrupt_on_timeout: self.log.error("Interrupting kernel") assert self.km is not None await ensure_async(self.km.interrupt_kernel()) else: raise CellTimeoutError.error_from_timeout_and_cell( "Cell execution timed out", timeout, cell ) async def _async_check_alive(self) -> None: assert self.kc is not None if not await ensure_async(self.kc.is_alive()): self.log.error("Kernel died while waiting for execute reply.") raise DeadKernelError("Kernel died") async def async_wait_for_reply( self, msg_id: str, cell: t.Optional[NotebookNode] = None ) -> t.Optional[t.Dict]: assert self.kc is not None # wait for finish, with timeout timeout = self._get_timeout(cell) cummulative_time = 0 while True: try: msg = await ensure_async( self.kc.shell_channel.get_msg(timeout=self.shell_timeout_interval) ) except Empty: await self._async_check_alive() cummulative_time += self.shell_timeout_interval if timeout and cummulative_time > timeout: await self._async_async_handle_timeout(timeout, cell) break else: if msg['parent_header'].get('msg_id') == msg_id: return msg return None wait_for_reply = run_sync(async_wait_for_reply) # Backwards compatability naming for papermill _wait_for_reply = wait_for_reply def _passed_deadline(self, deadline: int) -> bool: if deadline is not None and deadline - monotonic() <= 0: return True return False def _check_raise_for_error(self, cell: NotebookNode, exec_reply: t.Optional[t.Dict]) -> None: if exec_reply is None: return None exec_reply_content = exec_reply['content'] if exec_reply_content['status'] != 'error': return None cell_allows_errors = (not self.force_raise_errors) and ( self.allow_errors or exec_reply_content.get('ename') in self.allow_error_names or "raises-exception" in cell.metadata.get("tags", []) ) if not cell_allows_errors: raise CellExecutionError.from_cell_and_msg(cell, exec_reply_content) async def async_execute_cell( self, cell: NotebookNode, cell_index: int, execution_count: t.Optional[int] = None, store_history: bool = True, ) -> NotebookNode: """ Executes a single code cell. To execute all cells see :meth:`execute`. Parameters ---------- cell : nbformat.NotebookNode The cell which is currently being processed. cell_index : int The position of the cell within the notebook object. execution_count : int The execution count to be assigned to the cell (default: Use kernel response) store_history : bool Determines if history should be stored in the kernel (default: False). Specific to ipython kernels, which can store command histories. Returns ------- output : dict The execution output payload (or None for no output). Raises ------ CellExecutionError If execution failed and should raise an exception, this will be raised with defaults about the failure. Returns ------- cell : NotebookNode The cell which was just processed. """ assert self.kc is not None if cell.cell_type != 'code' or not cell.source.strip(): self.log.debug("Skipping non-executing cell %s", cell_index) return cell if self.skip_cells_with_tag in cell.metadata.get("tags", []): self.log.debug("Skipping tagged cell %s", cell_index) return cell if self.record_timing and 'execution' not in cell['metadata']: cell['metadata']['execution'] = {} self.log.debug("Executing cell:\n%s", cell.source) cell_allows_errors = (not self.force_raise_errors) and ( self.allow_errors or "raises-exception" in cell.metadata.get("tags", []) ) parent_msg_id = await ensure_async( self.kc.execute( cell.source, store_history=store_history, stop_on_error=not cell_allows_errors ) ) # We launched a code cell to execute self.code_cells_executed += 1 exec_timeout = self._get_timeout(cell) cell.outputs = [] self.clear_before_next_output = False task_poll_kernel_alive = asyncio.ensure_future(self._async_poll_kernel_alive()) task_poll_output_msg = asyncio.ensure_future( self._async_poll_output_msg(parent_msg_id, cell, cell_index) ) self.task_poll_for_reply = asyncio.ensure_future( self._async_poll_for_reply( parent_msg_id, cell, exec_timeout, task_poll_output_msg, task_poll_kernel_alive ) ) try: exec_reply = await self.task_poll_for_reply except asyncio.CancelledError: # can only be cancelled by task_poll_kernel_alive when the kernel is dead task_poll_output_msg.cancel() raise DeadKernelError("Kernel died") except Exception as e: # Best effort to cancel request if it hasn't been resolved try: # Check if the task_poll_output is doing the raising for us if not isinstance(e, CellControlSignal): task_poll_output_msg.cancel() finally: raise if execution_count: cell['execution_count'] = execution_count self._check_raise_for_error(cell, exec_reply) self.nb['cells'][cell_index] = cell return cell execute_cell = run_sync(async_execute_cell) def process_message( self, msg: t.Dict, cell: NotebookNode, cell_index: int ) -> t.Optional[t.List]: """ Processes a kernel message, updates cell state, and returns the resulting output object that was appended to cell.outputs. The input argument *cell* is modified in-place. Parameters ---------- msg : dict The kernel message being processed. cell : nbformat.NotebookNode The cell which is currently being processed. cell_index : int The position of the cell within the notebook object. Returns ------- output : dict The execution output payload (or None for no output). Raises ------ CellExecutionComplete Once a message arrives which indicates computation completeness. """ msg_type = msg['msg_type'] self.log.debug("msg_type: %s", msg_type) content = msg['content'] self.log.debug("content: %s", content) display_id = content.get('transient', {}).get('display_id', None) if display_id and msg_type in {'execute_result', 'display_data', 'update_display_data'}: self._update_display_id(display_id, msg) # set the prompt number for the input and the output if 'execution_count' in content: cell['execution_count'] = content['execution_count'] if self.record_timing: if msg_type == 'status': if content['execution_state'] == 'idle': cell['metadata']['execution']['iopub.status.idle'] = timestamp() elif content['execution_state'] == 'busy': cell['metadata']['execution']['iopub.status.busy'] = timestamp() elif msg_type == 'execute_input': cell['metadata']['execution']['iopub.execute_input'] = timestamp() if msg_type == 'status': if content['execution_state'] == 'idle': raise CellExecutionComplete() elif msg_type == 'clear_output': self.clear_output(cell.outputs, msg, cell_index) elif msg_type.startswith('comm'): self.handle_comm_msg(cell.outputs, msg, cell_index) # Check for remaining messages we don't process elif msg_type not in ['execute_input', 'update_display_data']: # Assign output as our processed "result" return self.output(cell.outputs, msg, display_id, cell_index) return None def output( self, outs: t.List, msg: t.Dict, display_id: str, cell_index: int ) -> t.Optional[t.List]: msg_type = msg['msg_type'] parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: # if we have a hook registered, it will overrride our # default output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.output(outs, msg, display_id, cell_index) return None try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg_type) return None if self.clear_before_next_output: self.log.debug('Executing delayed clear_output') outs[:] = [] self.clear_display_id_mapping(cell_index) self.clear_before_next_output = False if display_id: # record output index in: # _display_id_map[display_id][cell_idx] cell_map = self._display_id_map.setdefault(display_id, {}) output_idx_list = cell_map.setdefault(cell_index, []) output_idx_list.append(len(outs)) outs.append(out) return out def clear_output(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: content = msg['content'] parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: # if we have a hook registered, it will overrride our # default clear_output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.clear_output(outs, msg, cell_index) return if content.get('wait'): self.log.debug('Wait to clear output') self.clear_before_next_output = True else: self.log.debug('Immediate clear output') outs[:] = [] self.clear_display_id_mapping(cell_index) def clear_display_id_mapping(self, cell_index: int) -> None: for display_id, cell_map in self._display_id_map.items(): if cell_index in cell_map: cell_map[cell_index] = [] def handle_comm_msg(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: content = msg['content'] data = content['data'] if self.store_widget_state and 'state' in data: # ignore custom msg'es self.widget_state.setdefault(content['comm_id'], {}).update(data['state']) if 'buffer_paths' in data and data['buffer_paths']: comm_id = content['comm_id'] if comm_id not in self.widget_buffers: self.widget_buffers[comm_id] = {} # for each comm, the path uniquely identifies a buffer new_buffers: t.Dict[t.Tuple[str, ...], t.Dict[str, str]] = { tuple(k["path"]): k for k in self._get_buffer_data(msg) } self.widget_buffers[comm_id].update(new_buffers) # There are cases where we need to mimic a frontend, to get similar behaviour as # when using the Output widget from Jupyter lab/notebook if msg['msg_type'] == 'comm_open': target = msg['content'].get('target_name') handler = self.comm_open_handlers.get(target) if handler: comm_id = msg['content']['comm_id'] comm_object = handler(msg) if comm_object: self.comm_objects[comm_id] = comm_object else: self.log.warning(f'No handler found for comm target {target!r}') elif msg['msg_type'] == 'comm_msg': content = msg['content'] comm_id = msg['content']['comm_id'] if comm_id in self.comm_objects: self.comm_objects[comm_id].handle_msg(msg) def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: """Serialize a widget state, following format in @jupyter-widgets/schema.""" return { 'model_name': state.get('_model_name'), 'model_module': state.get('_model_module'), 'model_module_version': state.get('_model_module_version'), 'state': state, } def _get_buffer_data(self, msg: t.Dict) -> t.List[t.Dict[str, str]]: encoded_buffers = [] paths = msg['content']['data']['buffer_paths'] buffers = msg['buffers'] for path, buffer in zip(paths, buffers): encoded_buffers.append( { 'data': base64.b64encode(buffer).decode('utf-8'), 'encoding': 'base64', 'path': path, } ) return encoded_buffers def register_output_hook(self, msg_id: str, hook: OutputWidget) -> None: """Registers an override object that handles output/clear_output instead. Multiple hooks can be registered, where the last one will be used (stack based) """ # mimics # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#registermessagehook self.output_hook_stack[msg_id].append(hook) def remove_output_hook(self, msg_id: str, hook: OutputWidget) -> None: """Unregisters an override object that handles output/clear_output instead""" # mimics # https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#removemessagehook removed_hook = self.output_hook_stack[msg_id].pop() assert removed_hook == hook def on_comm_open_jupyter_widget(self, msg: t.Dict): content = msg['content'] data = content['data'] state = data['state'] comm_id = msg['content']['comm_id'] module = self.widget_registry.get(state['_model_module']) if module: widget_class = module.get(state['_model_name']) if widget_class: return widget_class(comm_id, state, self.kc, self) def execute( nb: NotebookNode, cwd: t.Optional[str] = None, km: t.Optional[KernelManager] = None, **kwargs ) -> NotebookClient: """Execute a notebook's code, updating outputs within the notebook object. This is a convenient wrapper around NotebookClient. It returns the modified notebook object. Parameters ---------- nb : NotebookNode The notebook object to be executed cwd : str, optional If supplied, the kernel will run in this directory km : AsyncKernelManager, optional If supplied, the specified kernel manager will be used for code execution. kwargs : Any other options for NotebookClient, e.g. timeout, kernel_name """ resources = {} if cwd is not None: resources['metadata'] = {'path': cwd} return NotebookClient(nb=nb, resources=resources, km=km, **kwargs).execute() nbclient-0.5.6/nbclient/exceptions.py000066400000000000000000000061061414342127300176550ustar00rootroot00000000000000from typing import Dict from nbformat import NotebookNode class CellControlSignal(Exception): """ A custom exception used to indicate that the exception is used for cell control actions (not the best model, but it's needed to cover existing behavior without major refactors). """ pass class CellTimeoutError(TimeoutError, CellControlSignal): """ A custom exception to capture when a cell has timed out during execution. """ @classmethod def error_from_timeout_and_cell(cls, msg: str, timeout: int, cell: NotebookNode): if cell and cell.source: src_by_lines = cell.source.strip().split("\n") src = ( cell.source if len(src_by_lines) < 11 else f"{src_by_lines[:5]}\n...\n{src_by_lines[-5:]}" ) else: src = "Cell contents not found." return cls(timeout_err_msg.format(timeout=timeout, msg=msg, cell_contents=src)) class DeadKernelError(RuntimeError): pass class CellExecutionComplete(CellControlSignal): """ Used as a control signal for cell execution across execute_cell and process_message function calls. Raised when all execution requests are completed and no further messages are expected from the kernel over zeromq channels. """ pass class CellExecutionError(CellControlSignal): """ Custom exception to propagate exceptions that are raised during notebook execution to the caller. This is mostly useful when using nbconvert as a library, since it allows to deal with failures gracefully. """ def __init__(self, traceback: str, ename: str, evalue: str) -> None: super().__init__(traceback) self.traceback = traceback self.ename = ename self.evalue = evalue def __reduce__(self) -> tuple: return type(self), (self.traceback, self.ename, self.evalue) def __str__(self) -> str: s = self.__unicode__() if not isinstance(s, str): s = s.encode('utf8', 'replace') return s def __unicode__(self) -> str: return self.traceback @classmethod def from_cell_and_msg(cls, cell: NotebookNode, msg: Dict): """Instantiate from a code cell object and a message contents (message is either execute_reply or error) """ tb = '\n'.join(msg.get('traceback', []) or []) return cls( exec_err_msg.format( cell=cell, traceback=tb, ename=msg.get('ename', ''), evalue=msg.get('evalue', ''), ), ename=msg.get('ename', ''), evalue=msg.get('evalue', ''), ) exec_err_msg: str = """\ An error occurred while executing the following cell: ------------------ {cell.source} ------------------ {traceback} {ename}: {evalue} """ timeout_err_msg: str = """\ A cell timed out while it was being executed, after {timeout} seconds. The message was: {msg}. Here is a preview of the cell contents: ------------------- {cell_contents} ------------------- """ nbclient-0.5.6/nbclient/jsonutil.py000066400000000000000000000147061414342127300173500ustar00rootroot00000000000000"""Utilities to manipulate JSON objects.""" # NOTE: this is a copy of ipykernel/jsonutils.py (+blackified) # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import math import numbers import re import types from binascii import b2a_base64 from datetime import datetime from typing import Dict from ipython_genutils import py3compat from ipython_genutils.py3compat import iteritems, unicode_type next_attr_name = '__next__' if py3compat.PY3 else 'next' # ----------------------------------------------------------------------------- # Globals and constants # ----------------------------------------------------------------------------- # timestamp formats ISO8601 = "%Y-%m-%dT%H:%M:%S.%f" ISO8601_PAT = re.compile( r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(\.\d{1,6})?Z?([\+\-]\d{2}:?\d{2})?$" ) # holy crap, strptime is not threadsafe. # Calling it once at import seems to help. datetime.strptime("1", "%d") # ----------------------------------------------------------------------------- # Classes and functions # ----------------------------------------------------------------------------- # constants for identifying png/jpeg data PNG = b'\x89PNG\r\n\x1a\n' # front of PNG base64-encoded PNG64 = b'iVBORw0KG' JPEG = b'\xff\xd8' # front of JPEG base64-encoded JPEG64 = b'/9' # constants for identifying gif data GIF_64 = b'R0lGODdh' GIF89_64 = b'R0lGODlh' # front of PDF base64-encoded PDF64 = b'JVBER' def encode_images(format_dict: Dict) -> Dict[str, str]: """b64-encodes images in a displaypub format dict Perhaps this should be handled in json_clean itself? Parameters ---------- format_dict : dict A dictionary of display data keyed by mime-type Returns ------- format_dict : dict A copy of the same dictionary, but binary image data ('image/png', 'image/jpeg' or 'application/pdf') is base64-encoded. """ # no need for handling of ambiguous bytestrings on Python 3, # where bytes objects always represent binary data and thus # base64-encoded. if py3compat.PY3: return format_dict encoded = format_dict.copy() pngdata = format_dict.get('image/png') if isinstance(pngdata, bytes): # make sure we don't double-encode if not pngdata.startswith(PNG64): pngdata = b2a_base64(pngdata) encoded['image/png'] = pngdata.decode('ascii') jpegdata = format_dict.get('image/jpeg') if isinstance(jpegdata, bytes): # make sure we don't double-encode if not jpegdata.startswith(JPEG64): jpegdata = b2a_base64(jpegdata) encoded['image/jpeg'] = jpegdata.decode('ascii') gifdata = format_dict.get('image/gif') if isinstance(gifdata, bytes): # make sure we don't double-encode if not gifdata.startswith((GIF_64, GIF89_64)): gifdata = b2a_base64(gifdata) encoded['image/gif'] = gifdata.decode('ascii') pdfdata = format_dict.get('application/pdf') if isinstance(pdfdata, bytes): # make sure we don't double-encode if not pdfdata.startswith(PDF64): pdfdata = b2a_base64(pdfdata) encoded['application/pdf'] = pdfdata.decode('ascii') return encoded def json_clean(obj): """Clean an object to ensure it's safe to encode in JSON. Atomic, immutable objects are returned unmodified. Sets and tuples are converted to lists, lists are copied and dicts are also copied. Note: dicts whose keys could cause collisions upon encoding (such as a dict with both the number 1 and the string '1' as keys) will cause a ValueError to be raised. Parameters ---------- obj : any python object Returns ------- out : object A version of the input which will not cause an encoding error when encoded as JSON. Note that this function does not *encode* its inputs, it simply sanitizes it so that there will be no encoding errors later. """ # types that are 'atomic' and ok in json as-is. atomic_ok = (unicode_type, type(None)) # containers that we need to convert into lists container_to_list = (tuple, set, types.GeneratorType) # Since bools are a subtype of Integrals, which are a subtype of Reals, # we have to check them in that order. if isinstance(obj, bool): return obj if isinstance(obj, numbers.Integral): # cast int to int, in case subclasses override __str__ (e.g. boost enum, #4598) return int(obj) if isinstance(obj, numbers.Real): # cast out-of-range floats to their reprs if math.isnan(obj) or math.isinf(obj): return repr(obj) return float(obj) if isinstance(obj, atomic_ok): return obj if isinstance(obj, bytes): if py3compat.PY3: # unanmbiguous binary data is base64-encoded # (this probably should have happened upstream) return b2a_base64(obj).decode('ascii') else: # Python 2 bytestr is ambiguous, # needs special handling for possible binary bytestrings. # imperfect workaround: if ascii, assume text. # otherwise assume binary, base64-encode (py3 behavior). try: return obj.decode('ascii') except UnicodeDecodeError: return b2a_base64(obj).decode('ascii') if isinstance(obj, container_to_list) or ( hasattr(obj, '__iter__') and hasattr(obj, next_attr_name) ): obj = list(obj) if isinstance(obj, list): return [json_clean(x) for x in obj] if isinstance(obj, dict): # First, validate that the dict won't lose data in conversion due to # key collisions after stringification. This can happen with keys like # True and 'true' or 1 and '1', which collide in JSON. nkeys = len(obj) nkeys_collapsed = len(set(map(unicode_type, obj))) if nkeys != nkeys_collapsed: raise ValueError( 'dict cannot be safely converted to JSON: ' 'key collision would lead to dropped values' ) # If all OK, proceed by making the new dict that will be json-safe out = {} for k, v in iteritems(obj): out[unicode_type(k)] = json_clean(v) return out if isinstance(obj, datetime): return obj.strftime(ISO8601) # we don't understand it, it's probably an unserializable object raise ValueError("Can't clean for JSON: %r" % obj) nbclient-0.5.6/nbclient/output_widget.py000066400000000000000000000076561414342127300204120ustar00rootroot00000000000000from typing import Any, Dict, List, Optional from jupyter_client.client import KernelClient from nbformat.v4 import output_from_msg from .jsonutil import json_clean class OutputWidget: """This class mimics a front end output widget""" def __init__( self, comm_id: str, state: Dict[str, Any], kernel_client: KernelClient, executor ) -> None: self.comm_id: str = comm_id self.state: Dict[str, Any] = state self.kernel_client: KernelClient = kernel_client self.executor = executor self.topic: bytes = ('comm-%s' % self.comm_id).encode('ascii') self.outputs: List = self.state['outputs'] self.clear_before_next_output: bool = False def clear_output(self, outs: List, msg: Dict, cell_index: int) -> None: self.parent_header = msg['parent_header'] content = msg['content'] if content.get('wait'): self.clear_before_next_output = True else: self.outputs = [] # sync back the state to the kernel self.sync_state() if hasattr(self.executor, 'widget_state'): # sync the state to the nbconvert state as well, since that is used for testing self.executor.widget_state[self.comm_id]['outputs'] = self.outputs def sync_state(self) -> None: state = {'outputs': self.outputs} msg = {'method': 'update', 'state': state, 'buffer_paths': []} self.send(msg) def _publish_msg( self, msg_type: str, data: Optional[Dict] = None, metadata: Optional[Dict] = None, buffers: Optional[List] = None, **keys ) -> None: """Helper for sending a comm message on IOPub""" data = {} if data is None else data metadata = {} if metadata is None else metadata content = json_clean(dict(data=data, comm_id=self.comm_id, **keys)) msg = self.kernel_client.session.msg( msg_type, content=content, parent=self.parent_header, metadata=metadata ) self.kernel_client.shell_channel.send(msg) def send( self, data: Optional[Dict] = None, metadata: Optional[Dict] = None, buffers: Optional[List] = None, ) -> None: self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers) def output(self, outs: List, msg: Dict, display_id: str, cell_index: int) -> None: if self.clear_before_next_output: self.outputs = [] self.clear_before_next_output = False self.parent_header = msg['parent_header'] output = output_from_msg(msg) if self.outputs: # try to coalesce/merge output text last_output = self.outputs[-1] if ( last_output['output_type'] == 'stream' and output['output_type'] == 'stream' and last_output['name'] == output['name'] ): last_output['text'] += output['text'] else: self.outputs.append(output) else: self.outputs.append(output) self.sync_state() if hasattr(self.executor, 'widget_state'): # sync the state to the nbconvert state as well, since that is used for testing self.executor.widget_state[self.comm_id]['outputs'] = self.outputs def set_state(self, state: Dict) -> None: if 'msg_id' in state: msg_id = state.get('msg_id') if msg_id: self.executor.register_output_hook(msg_id, self) self.msg_id = msg_id else: self.executor.remove_output_hook(self.msg_id, self) self.msg_id = msg_id def handle_msg(self, msg: Dict) -> None: content = msg['content'] comm_id = content['comm_id'] assert comm_id == self.comm_id data = content['data'] if 'state' in data: self.set_state(data['state']) nbclient-0.5.6/nbclient/tests/000077500000000000000000000000001414342127300162615ustar00rootroot00000000000000nbclient-0.5.6/nbclient/tests/__init__.py000066400000000000000000000000001414342127300203600ustar00rootroot00000000000000nbclient-0.5.6/nbclient/tests/base.py000066400000000000000000000036361414342127300175550ustar00rootroot00000000000000import unittest from nbformat import v4 as nbformat class NBClientTestsBase(unittest.TestCase): def build_notebook(self, with_json_outputs=False): """Build a notebook in memory for use with NotebookClient tests""" outputs = [ nbformat.new_output("stream", name="stdout", text="a"), nbformat.new_output("display_data", data={'text/plain': 'b'}), nbformat.new_output("stream", name="stdout", text="c"), nbformat.new_output("stream", name="stdout", text="d"), nbformat.new_output("stream", name="stderr", text="e"), nbformat.new_output("stream", name="stderr", text="f"), nbformat.new_output("display_data", data={'image/png': 'Zw=='}), # g nbformat.new_output("display_data", data={'application/pdf': 'aA=='}), # h ] if with_json_outputs: outputs.extend( [ nbformat.new_output("display_data", data={'application/json': [1, 2, 3]}), # j nbformat.new_output( "display_data", data={'application/json': {'a': 1, 'c': {'b': 2}}} ), # k nbformat.new_output("display_data", data={'application/json': 'abc'}), # l nbformat.new_output("display_data", data={'application/json': 15.03}), # m ] ) cells = [ nbformat.new_code_cell(source="$ e $", execution_count=1, outputs=outputs), nbformat.new_markdown_cell(source="$ e $"), ] return nbformat.new_notebook(cells=cells) def build_resources(self): """Build an empty resources dictionary.""" return {'metadata': {}} @classmethod def merge_dicts(cls, *dict_args): # Because this is annoying to do inline outcome = {} for d in dict_args: outcome.update(d) return outcome nbclient-0.5.6/nbclient/tests/conftest.py000066400000000000000000000004061414342127300204600ustar00rootroot00000000000000import os # This is important for ipykernel to show the same string # instead of randomly generated file names in outputs. # See: https://github.com/ipython/ipykernel/blob/360685c6/ipykernel/compiler.py#L50-L55 os.environ["IPYKERNEL_CELL_NAME"] = "" nbclient-0.5.6/nbclient/tests/fake_kernelmanager.py000066400000000000000000000014341414342127300224360ustar00rootroot00000000000000from jupyter_client.manager import AsyncKernelManager class FakeCustomKernelManager(AsyncKernelManager): expected_methods = {'__init__': 0, 'client': 0, 'start_kernel': 0} def __init__(self, *args, **kwargs): self.log.info('FakeCustomKernelManager initialized') self.expected_methods['__init__'] += 1 super().__init__(*args, **kwargs) async def start_kernel(self, *args, **kwargs): self.log.info('FakeCustomKernelManager started a kernel') self.expected_methods['start_kernel'] += 1 return await super().start_kernel(*args, **kwargs) def client(self, *args, **kwargs): self.log.info('FakeCustomKernelManager created a client') self.expected_methods['client'] += 1 return super().client(*args, **kwargs) nbclient-0.5.6/nbclient/tests/files/000077500000000000000000000000001414342127300173635ustar00rootroot00000000000000nbclient-0.5.6/nbclient/tests/files/Autokill.ipynb000066400000000000000000000012271414342127300222140ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import os\n", "import signal\n", "pid = os.getpid()\n", "os.kill(pid, signal.SIGTERM)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.3" } }, "nbformat": 4, "nbformat_minor": 4 } nbclient-0.5.6/nbclient/tests/files/Check History in Memory.ipynb000066400000000000000000000007021414342127300247040ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "from IPython import get_ipython" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "scrolled": true }, "outputs": [], "source": [ "ip = get_ipython()\n", "assert ip.history_manager.hist_file == ':memory:'" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Clear Output.ipynb000066400000000000000000000072221414342127300227400ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "from __future__ import print_function\n", "from IPython.display import clear_output" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "9\n" ] } ], "source": [ "for i in range(10):\n", " clear_output()\n", " print(i)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "print(\"Hello world\")\n", "clear_output()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Hello world" ] } ], "source": [ "print(\"Hello world\", end='')\n", "clear_output(wait=True) # no output after this" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "world" ] } ], "source": [ "print(\"Hello\", end='')\n", "clear_output(wait=True) # here we have new output after wait=True\n", "print(\"world\", end='')" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'Hello world'" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "handle0 = display(\"Hello world\", display_id=\"id0\")" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'world'" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "handle1 = display(\"Hello\", display_id=\"id1\")" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "handle1.update('world')" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "handle2 = display(\"Hello world\", display_id=\"id2\")\n", "clear_output() # clears all output, also with display_ids" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'Hello world'" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "handle3 = display(\"Hello world\", display_id=\"id3\")\n", "clear_output(wait=True)" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "world" ] } ], "source": [ "handle4 = display(\"Hello\", display_id=\"id4\")\n", "clear_output(wait=True)\n", "print('world', end='')" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [], "source": [ "handle4.update('Hello world') # it is cleared, so it should not show up in the above cell" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" } }, "nbformat": 4, "nbformat_minor": 1 } nbclient-0.5.6/nbclient/tests/files/Disable Stdin.ipynb000066400000000000000000000005231414342127300230330ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "try:\n", " input = raw_input\n", "except:\n", " pass\n", "\n", "name = input(\"name: \")" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/Empty Cell.ipynb000066400000000000000000000024131414342127300223640ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "Test that executing skips over an empty cell." ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'Code 1'" ] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "\"Code 1\"" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'Code 2'" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "\"Code 2\"" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.2" } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Error.ipynb000066400000000000000000000024131414342127300215170ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "d200673b", "metadata": {}, "outputs": [ { "ename": "ZeroDivisionError", "evalue": "division by zero", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mZeroDivisionError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m/tmp/ipykernel_1277493/182040962.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;36m0\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mZeroDivisionError\u001b[0m: division by zero" ] } ], "source": [ "0/0" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.5" } }, "nbformat": 4, "nbformat_minor": 5 } nbclient-0.5.6/nbclient/tests/files/Factorials.ipynb000066400000000000000000000012471414342127300225210ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [], "source": [ "i, j = 1, 1" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2\n", "3\n", "5\n", "8\n", "13\n", "21\n", "34\n", "55\n", "89\n", "144\n" ] } ], "source": [ "for m in range(10):\n", " i, j = j, i + j\n", " print(j)" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/HelloWorld.ipynb000066400000000000000000000005451414342127300225050ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Hello World\n" ] } ], "source": [ "print(\"Hello World\")" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/Inline Image.ipynb000066400000000000000000000337751414342127300226660ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [], "source": [ "from IPython.display import Image" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "data": { "image/png": [ "iVBORw0KGgoAAAANSUhEUgAAAMgAAABQCAYAAABcbTqwAAAABHNCSVQICAgIfAhkiAAAABl0RVh0\n", "U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAACAASURBVHic7Z15eJNV2vDvJ0/2vUmapk3b\n", "dG9pKYWyFARkd6GojOI2vs6HfjqKI17i+LnNfM68LpfMq4KjM6/44jbgqKCfKLIoUFrWUigglO5N\n", "1yxttmZfnzzP90dImrZpmqY75nddVZJnO0nOfc597u0gBEFUQ4wYMUJCmuwGxIgxlYkJSIwYYYgJ\n", "SIwYYYgJSIwYYYgJSIxh8Xg8SHV1Na+srEw42W2ZaMiT3YAYU5vLly9zdu7cmeXxeEh5eXnGVatW\n", "6Se7TRPJlBGQbqONcr5Rzdaa7BSDxUExWl1ko81JNtmcFLPNRTbZnWTMi5PYdCrGYVAxLouK8ehU\n", "jMemYVwmDeOz6Vg8j+lZXJBqEXAY2GR/nhsFq9VK8Xg8v1pNY1IF5Gy9in2yVsG72NLD69QYGQAI\n", "ABAABMD1/wx4TYDR6qQYrQ4KaAGACDoHfK9JCEIUyESWFUXpvQ+sKNSjKImAGDGiZFIExGBxkl/b\n", "U5laWa+O6xMEJMwVkfdxnCCQmnYNt6ZNwz14vkn0xsaVbRlJQteoGnyDg+M4kEi/2kkiLBP+rdR1\n", "GRgPvnMov7JBHTfez5KrDKyN/7Uv/0h1M2+8nzXdwDAMOX/+PO/9999P37lzp2yy2zNVmXAB+eDH\n", "y1KjzUUZ+D4SbgIZBS7MS9r69ck0lc486Jm/ZiorK+N27tyZVVNTI3C5XOhkt2eqMqECclmuYV5q\n", "1fQbzedlJ8C7jy6DY2/eB6/cVwLpCWM/2NscHvKrn5el4fiY3zrGDc6ECkhVcw8n+DWXQYV3//dy\n", "WFKQDGw6Fe5amA2v/ceScXl2TVsPt6FTwxiXm8e4YZlQATFYHP3UnNIFGUCn9LcT5EgFUJgWPy7P\n", "r2nvZo7LjWPcsEyoFavX6pzUdUB9h5YFAJPq6HI6nSStVkvV6XQUp9OJZmZm2sVisTvS6+VyOUMm\n", "kznJZPKUMV/jOA4EQSAoio64TV6vF+ns7KSr1Wp6YmKiMzU11RnNfQB8hge1Wk1Tq9U0j8dD4vF4\n", "nszMTDuDwYhauZ5QAbG7sH4z1o9VcnjytllAp/Y1o1FhgJp27bg8X2uyT4qAWiwW9OTJk8LKykpR\n", "T0/PIDWPy+W6c3JyzBs3buyi0Wghf8y9e/cmXrp0SaDX6+nbtm37hcPheMM986233spWKpUsAIBN\n", "mza1FBQUWAEADh8+HH/w4EGp1+sNmEVqa2v5Tz/99Gz/661bt9aw2ewh749hGFJWViZsbGzkajQa\n", "ul6vpxEEARKJxDF//nzD7bffrhnObFxTU8P+/vvvpSqViolhff2CTCbjSUlJ9vXr1ysLCwutYW8C\n", "PgE7fvy4sLy8PEGn09EIguhn7mEymdiaNWvUt956q5ZCoYxY8CbVUWh1euCZ/zkO9y/Ng7mZYjhy\n", "uQP2nqqfzCaNOfv27ZP8/PPPiV6vN9AJ2Gy2RywWOx0OB6pWq5lms5laXV0t0mq19GeffbYlVOc8\n", "fvx4YnCnHg6Xy4X6rVPB1+E4jmAYRiKIvr7ify+C2yLHjx8XHj58ONFoNNIGHlQqlSylUsmqra3l\n", "bdmypWWoDrl///6EAwcOSP2dmU6nYxKJxKlUKpkej4fU2dnJ/uCDD3LWrl2rXL9+fU+oe+A4DqdO\n", "nRIcOnQoyWAw0FAUJSQSiUMsFjvtdjtZrVYzrFYrxW63k3/44YeUioqKhOeff75RIpFEPFsDTIFQ\n", "kyttWrjSph3sFZ/m4DgOO3bsSLt8+bIQAEAsFjvWrVunKiwstAQLgF6vp/z973/PUqvVzI6ODvbb\n", "b7+d8+qrrzZEq2YMR2lpqaa0tFRz6tQpwa5du9IBAAoLC3ufeeaZ1uGubWho4DU0NPAQBCHmzp2r\n", "z8/PN+Xl5dnsdjt69uxZQXl5uQQAoLm5mfvDDz9INmzYoB54j48//ji1qqoqHgBAIBC4Hnnkkbac\n", "nBwbiUQCj8eD1NTUcHbv3p1mtVopBw8eTO7u7mY8+eST7cH3aG5uZn7++edpGo2GQSaT8aVLl/bc\n", "dddd3TweLxBihGEYcuDAAfGhQ4ekBEEgJpOJumvXrtQXXnihZSTf16QLyI2Kx+Mh+YUDAOD111+v\n", "C6V2CIVCz3PPPdf82muv5VssFopKpWKePn06btmyZYYJbXCEpKenWx588MGu9PR0R/D7aWlpSgRB\n", "iOPHjycCAJSVlUnWrl2rYTKZgcGgvr6e5ReO+Ph4x4svvtgU3KkpFApRXFxs5vP5zdu3b891Op3o\n", "xYsXhTU1NfrCwkKL/7yGhga2RuOzSBYXF+t/97vfKQa2k0wmE+vXr+9xOByov00tLS1cl8tFGkqN\n", "DUUsvmCCCKeT8/l8bN68eQHjQXl5uXhCGjVCMjMzza+88krTQOHwc9NNNwWEGsMwUkdHB93/Gsdx\n", "2LNnT4r/9e23395vxA8mIyPDUVpaqvS/3rt3bzIepRPrjjvuCKhoBEEgKpVqkGoYjpiATBEWLlwY\n", "6FxKpZLV2Ng45UzSFAolbC+VyWRODofj8b9WqVQBAampqeH4jQZsNtuzaNGi3nD3WrJkiYFMJuMA\n", "AN3d3cyLFy9G5UFms9leNpsdaBOO4yOK2ZhQFeuvDy7ssLuwrtBHh1e5d5VdTThwrmVKjq6jJSMj\n", "w4GiKO5fzHd1dTFyc3Ptk92ukSKTyazXrl2LA/Cpmf73Ozs7A9a7pKQk+3Bmajab7U1LS7O2tLRw\n", "AXzfx/z5803RtIlGo3mtVmtUFswJFZB4HnNUeRpsBjWsaXO6w2QyMYvFQgUAMBgM1MluTzRQqdSQ\n", "s4xarQ4ICI/H84Q6ZyBcLjdwXnd3Nz3cuePFmAiIXG2inWpQczu1FnqnzkLXmx3UgBmRGJjXAX2v\n", "g/8/6Lygk68fs9idYyPQ5vNs6Ngm6/9sAgAQAGqCC5jZdmDNsIFgpRmQ8bEmhYLFYgUExGg0TksB\n", "GQqtVhvo4JEKSLC6ptFopp+AdGgt1A+P1CZV1CgFfQ6aAR18yI4fyXkhjo0FXjsJ3N30wQICAC4V\n", "HSyXfPpu91dWyHqzFWiJEf2goyXYb2CxWG4oCyOFQgnM/h6PJ6J1QLCKNtTMNN5EvUjfc0YufHD7\n", "sZnlNUohQYTNdpq+2BrYcG1jPlivTUiQo9VqDQhFsHpxIyASiQJJa2ZzZKkHwYOEUDg5SW9RCciR\n", "Kwreeweuyrw4cWMKRjBeCxla30gDInIvdrQELyRFItGIPL5TnYSEhEAHj1R9DP4+EhISnOPRruEY\n", "sYBUy7Ws17+5mIHfqLNGKJwdTFB8PK7WM41GQw1WKeLj44ccMV0u17Qzz6empgZ8J11dXSyHwxH2\n", "M3i9XiR43ZKSkhLS9zLejPiL/vDnOqnbi0+7H2jUdO9JBNw9boPCyZMnA153CoWCFxUVmYOP0+n0\n", "gAUw2GQaLURwMNYEUFhYaJFIJHYAnxOxurqaH+78y5cvc/0ziEQiccyZM8cc7vzxYkQdvUllol/r\n", "6uUMf+ZomYLxWLgdhd4T3KgvD+MJtlqt6Llz50T+1yUlJdqB0bopKSkBn8iJEyeGTZgZGNU6EIFA\n", "EFjjGI3GCYlyXrt2bSA269ChQ4lWq3XIVN+KiorA97Fu3TrVZBWVGNFT95yRj08m0wTBpJF9nc5r\n", "jy4HW3806kITr7zySkF5eblgYESu3W5Ht23blmUymagAvtmjtLR0UATr7NmzA57nuro6/q5du5JD\n", "qSk1NTXsN998M1elUoX1xMtksoDKolarmd3d3eNuVi4pKTGmpqZaAQB0Oh39ww8/TB84cOA4Dp9+\n", "+mlKY2MjHwAgIyPDUlJSYhzvtg1FxKZEHAcor1MJxrMxoyKCSSdbKvJ1CnuUYRymaj4QRFQVJvR6\n", "Pf3LL79M379/f7JUKrXHx8c7tVotXS6Xc/yh5kwmE9u0aVOLSCQaZMFatWqVXi6Xsy9cuCACADh1\n", "6lTCpUuXBOnp6VaBQOA2Go0UhULBMhgMg2KNEAQZ9O1wOBzvihUrusvLyyUYhpG2b9+eM2fOHENB\n", "QYG5oKDAOh4jNolEghdeeKF5x44dadeuXYtramrivfzyyzOzs7PN6enpNpVKxWhubuao1WomgC8Q\n", "8bHHHusY84aMgIgFpF1rptmc2LS2zRekiX1qir2JFdUNcDsKjlYaMDNHbHLcsmVL4+HDhxMaGhr4\n", "jY2NvMbGxn6xRUKh0Ll58+YWqVQ65L0fffTRToFA4C4rK5NgGEay2WwUf1iHH6lUaisuLu5tbGzk\n", "NDU18QAAhsqou//++1Verxc5c+ZMvMFgoJWVlSWWlZUlRpKQFS00Gg3fvHlz6zfffJNUUVGRYDAY\n", "aFVVVfH+KF8AACqV6l2zZo16qFyQiSTiDn+1XR9dp5oisBhkrChTYgOCAHC2Rx8IaL3GjEZA8vPz\n", "rfn5+VaFQkFvaGhgaTQautVqJctkMltBQYE1OTl5WDMmmUwmNmzYoF6xYoWutraW7Q+/4PP5nri4\n", "OI9MJnP403ffeeedwFoxOOQ8GBRFiYcfflixdu3anqamJlZPTw/N5XKhdDo9IFCFhYXmzZs3NwEA\n", "cDicYUOF1q1b17148WI9AEBSUlLIz0QikeD+++9XbdiwQd3W1sZobm5mdXd305OSkhw5OTk2mUzm\n", "CDeDLVy40OhXEYPXUkOxcePGdrfbTQrXpqGIWEDqlMYpF106Ev5w16IuNoOGg6GMB7gj+jpQtnoW\n", "wF1hI1HDkZyc7IxEGMIhFAo9N998c9g26PX6gKrFYrHCzgZCodCzaNGikHo+n8/H+Hy+JdSxUKSk\n", "pDhTUlIi+nwoihJZWVn2rKysEQVlxsfHu+Pj4yP2E+Xl5dlGcv9gIlY0WzWWaVsyZ26u1HjP0gID\n", "eIwoKD4YXRVBe/OUHyhwHIfe3t6AgITLL48RnohnkF7r4GqIE8IoLb4rZqfr//TbZb4Q+863UwEz\n", "je5zuDVTPohQp9NR/dYyiURiH6/03V8DEQuI1emZVgt0cRzLtemO+Yq1C3J8qoP6Xwlgqhy9FQ4z\n", "T/nvoaurK+CBzs/PjyqHIoaPUQjI2AxKCAAkCdjO7CS+LUPCc6AoQvTdeuAzCICQ9hhfBDAZJREZ\n", "SXHOWWliO5/D8KkVLjUFOramga0maidfP3AnCl4nCVD6lCxkajQayV9//XUqgM+8u2DBgqjXSzEi\n", "FBCvF0cw79gGJgo5DPczpbO7FhckWTgM2tjryI52GpgrudD9b+moFuWh8OjIgCZPqWBCj8eDlJWV\n", "iY4ePSoxm81UAIC77767KzMzc1JimG4UIhIQFCURdArqdXq8Y9LRVhel6l7aME8REAyXggb2eia4\n", "2hm+qNng5KoBs0hw7sagxCovAq4OBthbmYDbyb773Jjqt8vlIp0+fTquo6ODqVQqmWq1mukPdqTT\n", "6VhpaanqtttuG58KfL8iIlax2HTKqAUEAYA/318iXzc/w7cuMB6LA83uFPBaKP06/sCkqUGJVUP9\n", "23/tOAsFRTSsPwBFUUIoFDr1ev24ZMKRyWRi3759Kf7icAiCEGKx2JGXl2dev369erwcfb82RiIg\n", "mM7iHJUF586SjJ518zOMgJlR6P5nKliqBVMyMDEcJBoeyfqDTCYTW7durW1paWFWVo6BcWAAKIoS\n", "t9xyixpFUSI9Pd2WmZnpGEm9pxiREbGACDk0d7vWErUPQCpkOZ9bX6wCAADl2+ngqJ+euz6hI8v0\n", "i8YRFil33nnnpIdi3OhE7CjMSuSNarH3yr0L2ukUMg6GA6JpKxwAAIzQRdNiTF3MNgf69ZEq4b8O\n", "nhENf3Z/Ip5B8qT866PgyFUiLpPqmZeZYAOvBQXt18kjvsFUgjsn4rCLGFODl//5rez0L81xty6a\n", "qQMA3UiujXgGmZnCj1pNyE6K88XC2BuYQEzz/fC482MCMs3AvL5qimTSyLcEj1hAUkUcN4dOGb7w\n", "W4gmzEiO8wmXs2VaRwQDysaAPTOmYk0zsOthNyg6jgICALA4LyEqr2xO0vXZxzX6XOpJRbDCMG7b\n", "8cYYN7z+GSSKmLQRCci6ualRbV9GRq9ntBHTrxpHPxLuiTnepiF+FQsdTxULAGB+ltgm4TMmpT7R\n", "pMPMsQIr79f52ac5/hkERUc+Po/4is23zxy0WcmvguTfqya7CTGiw4v7VayRzyAjDt1ePSvZVHFN\n", "pTt6VTFim/K0hX+zHuKWjMp6pek1k89caeGotL3Ubr2JKhHy3HNnpFnn5MrsNEr4rQA0vWbyxfp2\n", "Vk2LkuXBMKQwK9k2Ny/NJhXHDXJa2hwu0uGzV/kpCUJXycyMkJl0ap2Rsq/ikuC3ty7U8TmD03Fr\n", "W5WMK01dzLuWzellMfp7501WB1p1Tc6ub1MzUhOFrtnZKfZ06dBF7vxgXhyplSsYV5q7mLWtKpbJ\n", "aifftqjQsH558aB1bV2bil51Tc7xYF6kIENqn52Tah/YjoGYrA70fG0rq7q+neP2YIhUHOdaUJBu\n", "nZWV4sD6ZpCJ2cTzpbvndCn0Vnq90siO5vppBTXBBWl/jHrWvNTQwfz4h5OSc9fkfP9UH2DfCUhL\n", "FNl3/nljsziOG9JCuOvgGdH7e46leLC+TUD3HrsACAKwcd0S5TMPrOkmBRkOHv7Lzly5QsNcNX+G\n", "PpSAlF2o4/7xva+zCQIgns/x3Lt6/qCt3vaf/EXw9ZEqybLiXHNwx3z3i58Sdx+uTCQIAiGREAK/\n", "Xnr2jqWzNa8+dqeCGkLQPZgX+eLwWdGXP51L0PRaAlmOCIIQW357qzL43Dallrbpb7uy1DoT3X8O\n", "QRCIkMd2/23zva3z89NDCvyFujbWs9u+zLLaXWQAgOC2yRKFDr3JV4AuGjNvVALCplPwT/6wvPHj\n", "soaEXRWNSRh2g1Za5C3shcy/dgCZG1Xg3+5DZ0XbvzqSCgBw28KZupuLc015skSHWMDFTv/SxHnr\n", "84OydrWO+ch/fpK7+7XfNwi4/XPHX/zgG9lPlTUiBo3i/cO9KztXzJ1hwrxe5Oj5Ov6n+08lffbj\n", "aWldm4q14+X/JfcLybwZaWa5QsOsqm3lYV4cGahWlJ2v4/tjOY9X1/NDCUjl1RZehjTeHjxD/eWj\n", "71O+P3FJnJUstr36+F0dBRlSZ61cwfjTh9+l/XjqF7HZ5iC///xDbcH3MVrs6Oa3v8i82qLg0Chk\n", "fMOqeT2Fmck2iYjndro8pFyZJLCmq2tT0Z/auivHZHOQn/jNcsUDt5boUBIJ/v1Tpeij7yqSn3jr\n", "X7m7/vp4/cxMaT8z+4lLjZz/8/c9WS4PRpqTm2p+fP0y9YKCDFtnt556/EI978ufz0n8gjNhMwgA\n", "AEoiwRNr8nvWFct6q+UaVpPayGzrNjMcLn992b4oWx6LPjaRpeQ4N5CFrpDh7kNG84aLEIaga/37\n", "g8S7gZFlB3a+HXglw+7THY4vDldKvF4ceeLu5YqnNqzsFzd1y8KZJomI1/y7v+zMV2h66V/+dE70\n", "9H2rAuccOnOV91NljYhCRvGdf3qksTArOdAxslISetKTRM6X/vFtZtW1Vv63ZRcE961eYAAAWL0g\n", "37jn6HmJ1e4iV15tYS+dk9NPNTx3rTUQ5nOxoZ3ncLlJDFrf1gKd3XpqR7ee8fDamwJrrjNXmtnf\n", "n7gkBgDY/tyDrakSoRsAoCgn1f7PF/6j5c4/vl948nJTXJtSqwpWt9776kji1RYFR8BluXf/5+ON\n", "yQmCIXNo3vrsQGqvxU5ZvSBf/9S9fd/VUxtW9sgVGsax83XCz348lfDusw+0+495MC/yxqc/ylwe\n", "jJSeJLL/44WH5ezrM15mstiVmSzWPFx6k7b02e0zdUYrNRoz76jTR6VCllsqTHcDwPhnrvFu1kPi\n", "49Nmsex3UNEo5JD686ysFMeMtCRrXZuKvf/k5fin7l3ZQ0IQcHkw5L2vjqQAAKycN8MQLBx+bltU\n", "aNpXccl0rkbO/+i7Culdy4p7aRQyMS8/3cZnMzxGq4NSdqGOFywgV5o6mXqTlbpiXp6+vLpB6HJj\n", "pIqLDZzbb5oVSMs9WlXLAwBYNX9G4L0vfz4nBgDIShHb/MLhR5YocicIuK4eg5m25+h54UsbSwO/\n", "j9vjK4gXx2Vh4YSjrk1Fv9qi4AAArF6QP6gfLSjIMB87XyesuNggsNidnRymL5r627JqgcZgpgEA\n", "PH3faiU7xDqFTqUQpOuF88bdUTjpYEYy2OqZfX91Q/zV9v1Zg/6cnSPa4XS0BBxU5KFHrvkF6WYA\n", "gB6DmdbYrqYDAFxt7mL2XP/h1y6eNeR20LctKjQAAOiMVmpDm4oBAEBCECgpzDQBAJy52tKvQHRZ\n", "tS9I9Pe/Wd6dKOI5AQCOX6jvd87pK808AZflmZMrswMAuD0YUnlVzgcAWDlvRshBMD8jyQoA0KUx\n", "9Mt9CYR4DNMxD56+IgAAoFHJ+Ip5MwYVqfavPTAvjnSo+8oZHTpzRQjgK3Q51PqkXzuiqBY55QsQ\n", "9MN0Ih6MFfGRJ0wN+De72AjZ78gnqrkB82KYxaGQywro+Wq9iTojPclZf72zAwAMHLGDSUmICxxr\n", "VeloRTmpdgCAlfNmGH+uvCbSGMy0a3Ilw6+3n7nSzEsQcF356UnOJUU5xm/KLviMBzjegZJIYHW4\n", "SDUtCs7qBfkBh3C7Wkfzf45DZ66KTlxqjAMAgoQggFxf92iNvm3j9Mb+G2X6rxvOQdfZ3Vcu9eFX\n", "/ycXQRACQRBAAALPCHxHOiPF/3kUGl9po1SJ0MFjM4ZU4wO/Q5iBaiiml4BMM7AIHFRsZt/6rFvv\n", "K0nUptIFRuL4OM6Q+Sc8Vt+mqEpNbyCZbVlxroVGIeMuD0Y6WlXLn5kpdXTrTZSWLg3r7hVzewAA\n", "Vi3IN31TdkFitjnJ52rk7MVF2daKiw1cD+YlLZ+bF1Cv2pS6QOdNlQgdDNrgraATRTwXAICQ17+t\n", "fhVzuBlEpfV1dC6LgaUkCEI6Y/3P8FvVbA4XyWC2UQEAZmYmh10r+o1I0XjSYwIyjvhVLEqYkcvt\n", "6TPfErjvtGChaVVqaUXZoTeP6bXYAr+fWNDXORk0Kl6cJzNX1sj5p39p4m357S3qY+fr/GsLIwDA\n", "goJ0K4/N8JisDsrRqlr+4qJs68lLjTwqhYwvK84NrFs8QdXon75vlbogQxpxsCYWYQyUf4SXJQod\n", "27Y82B7JvYPXE8N1fAyPTFBDMb3WIJGATJ26blgEKoa2t2+/Pr8FKFHEC6hOjR3qIQM8e/R912an\n", "9N+i7ObiXCMAQItCw1JpeymnfmnisZk0bGFhlu16m2DhzOtrlSvNfJwgoKq2lTcnJ9UcbNXKShYH\n", "7mu1jyyWzhuhgy5VInQCADicnojvT6dSCA7Tt6lQU2d32EzXSNsRihtHQOgZAKkvAczcB5D1DgC3\n", "ZLJbFBRFOvQPE6xOZaf6OnlwZ2/u7BlSQJoVvmM0ChnPSZX0E5BbSmaa/Nse7D/5S9zlhg7ugvwM\n", "U3BbVl6fTTS9Ftq/D1eKjBY7ZemcnH6F5jKk8S4SyXef2lbliFKuI12kpyfFOwEAunr0DJcHizhc\n", "2q92tSm1TGygEzYI/wxFGe9o3tEzjqHishcB4lYAkCgA7CKAtFcB0LGpFRcNHqxPNRlqcWi2OdAz\n", "V5vjAADm5qWZ4vm+6unz89Nt2SkJNgCAps6ekJ0SJwg4fLZGCABwz8p5PQNDMUR8NjYjLdEGALD7\n", "8NlElwfrt7YA6FurAADs+K5cCgCwekFBv3OoFDKRIY23AwB8V35xRBsoRTpyF+fJrAAAZpuT/H3F\n", "pYg3KVq7eJYeAMDlwUi1rcqQ1WO8OB6w00z9GYQyfMxOVLBmAdDT+r9HogII1/Z/jyqZsK2EgwXk\n", "5OUmntHSf1crL47DXz76PsXl9vkKnrxnuTr4+KN3LlUDAPzS1Mn95tiFQVVRdnxbnqAxmGlsJg17\n", "8p4VIYs3LJ3jK7tqtbvIZJRErAzybQD0rVX852SliG2JIt4go8BrT/ymg0JG8a4eA2Pr5weTQj3r\n", "aksX40pzV7/ZLtJF+rLiXMuakgIdAMCO/1curWtTDersJqsDPXjmCh8PcgLfu3qBnkmnegEA3vhk\n", "vyzU7FNe3RAYJf3+k5EwsYt0Rq4NjGVjf198iGzgge+zCqIugz9SggXk4Okr8ccv1AtuLs7pzUoW\n", "O5xujHTyUiO/uauHBQDwxN3LFQsK+sdNrV08y9TRrVd89F158t92HZI1dfYwlszONlvsTrTiYgPv\n", "aFWtSMRnu7c9+4B8KBPnLSUFxo++q0gGACjKTjWH6iDL5+YZK2t8fo7FRdkh6/gWZEgdT9+7SrH9\n", "qyOpXx2pSmzq7GHeVJRliudzsHM1ck5VbStPb7JS581IM33yfx9t8V/nvW50iMSD/dffr++6Jlew\n", "1ToT/bHXP8srXVKknZkptat0RurZK8282lYV24vjiDiO2+D3ebAZNPzdZx9o2bLtq6ymzh7WY69/\n", "mn3nzXN0yQkCd0O7inHkXK2grk3FnpGWaH3uoVsHfceRMMECUjCq0I0hcbQA2OoBWDP63vPaAQxH\n", "+5/HmTM+zw9BsIBsvn91Z0O7mnmsqk74c+W1wPsiPtv91IaVintWzgvpgNt0z4oemUTo/O9vj0u/\n", "KTsv2XvsvAQAgEGjektmZhhff/LujgRB6CBHAICslARXsjjOaXe60Q2r5oVM9lpTUmD6265DRLJY\n", "4Fy3pGjIaIiNdyzRshg0747vyqUXG9p5FxvaAyErSSK+87mHbu144JaSfgl1I0l1ZTNo+Cd/frTp\n", "7S8OS8urG4R7j52X7D3mO0Yho/jNxTmGJ36zvHtGev8NcG6alWV9//mHmv9r96GUGrmS7ffIC7gs\n", "T4Y03v7mprvl65bOjnqPQ4QgiOpoL46K7o+lYPxJAgBDV0mMprIiWQggvAMgbhWA9TKA9juf4Pin\n", "5LhVWkh7pXNcP1sQap2Jctsz784CAHj/+YealhXnWowWOypXaGiaXjOlIEPqCOcEHIjZ5kCvNncx\n", "4uO4WHZqgpMUYepvZ7eeKhXHudEw4OU8RgAAAbxJREFUXuR2lY6aliSKqC1eHAdFj4HaqtTSeGym\n", "NytF7OKyQs9gLg+G4DiOkFGUCGfqHojZ5kDlCg2t12InZyTFO1MkgrDt92O02NH6djUjK0Xs9K/n\n", "RsvECwjhRaDjT9ngbOGMqYCE86QzMm2Q849GIFEnrIxjV4+Bum7Le4UAAP/94sONi4uyJ2z2ijF2\n", "TLyZF0EJSH5ZDqy5Q8YYjSmsmWbIeEM+kcIB0F/FGsnoGWNqMTmedDLPCykvt4HpRC/0fJYKXvPY\n", "716FsjFIfLQLRHdMjCAOwB1kUYkJyPRlckNNeMuMwL3JBKbTPDAeE4G9jje6YtYIADPfDHGr9CBY\n", "bQTS5G1yEzyDRGN/jzE1mPxYLIRCAH+FEfgrjOAxkMF2lQ32OjY4mlng6mKGLRWEUHGgp9mAmWMD\n", "Zr4V2EU2oAjGZHE2WrCgGKZoPLgxpgaTLyDBUAQY8Jcbgb+8zyzntZHAo6cAZiAD7iYByvICyvEC\n", "mesFlIsBMjWjZThMulfEZ7t1Ris1pmJNXybeivUrAicIOHulmV2YlRI2XyHG1CUmIDFihGFq6icx\n", "YkwRYgISI0YYYgISI0YY/j+SFgT3yDrlYgAAAABJRU5ErkJggg==\n" ], "text/plain": [ "" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "Image('python.png')" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/Interrupt.ipynb000066400000000000000000000022401414342127300224200ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mwhile\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "while True: continue" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "done\n" ] } ], "source": [ "print(\"done\")" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/JupyterWidgets.ipynb000066400000000000000000000041221414342127300234160ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "f46f26da84b54255bccc3a69d7eb08de", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Label(value='Hello World')" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets\n", "label = ipywidgets.Label('Hello World')\n", "label" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "# it should also handle custom msg'es\n", "label.send({'msg': 'Hello'})" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.4" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": { "8273e8fe9d9941a4a63c062158e0a630": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.4.0", "model_name": "DescriptionStyleModel", "state": { "description_width": "" } }, "a72770a4f541425f8fe85833a3dc2a8e": { "model_module": "@jupyter-widgets/controls", "model_module_version": "1.4.0", "model_name": "LabelModel", "state": { "context_menu": null, "layout": "IPY_MODEL_dec20f599109458ca607b1df5959469b", "style": "IPY_MODEL_8273e8fe9d9941a4a63c062158e0a630", "value": "Hello World" } }, "dec20f599109458ca607b1df5959469b": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.1.0", "model_name": "LayoutModel", "state": {} } }, "version_major": 2, "version_minor": 0 } } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Other Comms.ipynb000066400000000000000000000024471414342127300225550ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "ExecuteTime": { "end_time": "2020-05-29T11:16:26.365338Z", "start_time": "2020-05-29T11:16:26.362047Z" } }, "outputs": [], "source": [ "from ipykernel.comm import Comm" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2020-05-29T11:16:26.377700Z", "start_time": "2020-05-29T11:16:26.371603Z" } }, "outputs": [], "source": [ "comm = Comm('this-comm-tests-a-missing-handler', data={'id': 'foo'})" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "ExecuteTime": { "end_time": "2020-05-29T11:16:26.584520Z", "start_time": "2020-05-29T11:16:26.581213Z" } }, "outputs": [], "source": [ "comm.send(data={'id': 'bar'})" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Output.ipynb000066400000000000000000000475611414342127300217430ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "e152547dd69d46fcbcb602cf9f92e50b", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "from IPython.display import clear_output\n", "output1 = widgets.Output()\n", "output1" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "hi\n" ] } ], "source": [ "print(\"hi\")\n", "with output1:\n", " print(\"in output\")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "with output1:\n", " raise ValueError(\"trigger msg_type=error\")" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "44dc393cd7c6461a8c4901f85becfc0e", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "output2 = widgets.Output()\n", "output2" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "hi2\n" ] } ], "source": [ "print(\"hi2\")\n", "with output2:\n", " print(\"in output2\")\n", " clear_output(wait=True)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "d6cd7a1de3494d2daff23c6d4ffe42ee", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "output3 = widgets.Output()\n", "output3" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "hi3\n" ] } ], "source": [ "print(\"hi3\")\n", "with output3:\n", " print(\"hello\")\n", " clear_output(wait=True)\n", " print(\"world\")" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "10517a9d5b1d4ea386945642894dd898", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "output4 = widgets.Output()\n", "output4" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "hi4\n" ] } ], "source": [ "print(\"hi4\")\n", "with output4:\n", " print(\"hello world\")\n", " clear_output()" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "37f7ba6a9ecc4c19b519e718cd12aafe", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "output5 = widgets.Output()\n", "output5" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "print(\"hi5\")\n", "with output5:\n", " display(\"hello world\") # this is not a stream but plain text\n", "clear_output()" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "4fb0ee7e557440109c08547514f03c7b", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import ipywidgets as widgets\n", "output_outer = widgets.Output()\n", "output_inner = widgets.Output()\n", "output_inner" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "01ea355e26484c13b1caaaf6d29ac0f2", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Output()" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "output_outer" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "with output_inner:\n", " print('in inner')\n", " with output_outer:\n", " print('in outer')\n", " print('also in inner')" ] } ], "metadata": { "kernelspec": { "language": "python" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.3" }, "widgets": { "application/vnd.jupyter.widget-state+json": { "state": { "01ea355e26484c13b1caaaf6d29ac0f2": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_7213e178683c4d0682b3c848a2452cf1", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in outer\n" } ] } }, "025929abe8a143a08ad23de9e99c610f": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "03c04d8645a74c4dac2e08e2142122a6": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "091f6e59c48442b1bdb13320b4f6605d": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "10517a9d5b1d4ea386945642894dd898": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_2c67de94f62d4887866d22abca7f6f13" } }, "106de0ded502439c873de5449248b00c": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "1b9529b98aaf40ccbbf38e178796be88": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "22592f3cb7674cb79cc60def5e8bc060": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "2468aac6020349139ee6236b5dde0310": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_d5e88b6a26114d6da0b7af215aa2c3bb" } }, "2955dc9c531c4c6b80086da240d0df13": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_1b9529b98aaf40ccbbf38e178796be88", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "world\n" } ] } }, "2c67de94f62d4887866d22abca7f6f13": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "37f7ba6a9ecc4c19b519e718cd12aafe": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_03c04d8645a74c4dac2e08e2142122a6", "outputs": [ { "data": { "text/plain": "'hello world'" }, "metadata": {}, "output_type": "display_data" } ] } }, "3945ce528fbf40dc830767281892ea56": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "3c6bb7a6fd4f4f8786d30ef7b2c7c050": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "3e0e8f5d18fe4992b11e1d5c13faecdf": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "44dc393cd7c6461a8c4901f85becfc0e": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_3c6bb7a6fd4f4f8786d30ef7b2c7c050", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output2\n" } ] } }, "45823daa739447a6ba5393e45204ec8e": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_3e0e8f5d18fe4992b11e1d5c13faecdf", "outputs": [ { "data": { "text/plain": "'hello world'" }, "metadata": {}, "output_type": "display_data" } ] } }, "4fa2d1a41bd64017a20e358526ad9cf3": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_6490daaa1d2e42a0aef909e7b8c8eff4", "outputs": [ { "data": { "text/plain": "'hello world'" }, "metadata": {}, "output_type": "display_data" } ] } }, "4fb0ee7e557440109c08547514f03c7b": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_dbf140d66ba247b7847c0f5642b7f607", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in inner\nalso in inner\n" } ] } }, "55aff5c4b53f440a868919f042cf9c14": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_a14653416772496aabed04b4719268ef", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in inner\nalso in inner\n" } ] } }, "5747ce87279c44519b9df62799e25e6f": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_6ef78dc31eec422ab2afce4be129836f", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output2\n" } ] } }, "6490daaa1d2e42a0aef909e7b8c8eff4": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "6ef78dc31eec422ab2afce4be129836f": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "7134e81fdb364a738c1e58b26ec0d008": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_025929abe8a143a08ad23de9e99c610f", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in inner\nalso in inner\n" } ] } }, "7213e178683c4d0682b3c848a2452cf1": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "804b6628ca0a48dfbad930615626b1fb": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "a14653416772496aabed04b4719268ef": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "a32671b19b814cf5bd964c36368f9f79": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_c843c22ff72e4983984ca4d62ce68e2b", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in outer\n" } ] } }, "aaf673ac9c774aaba4f751db2f3dd6c5": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_106de0ded502439c873de5449248b00c", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output2\n" } ] } }, "bc3d9af2591e4a52af73921f46d79efa": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_22592f3cb7674cb79cc60def5e8bc060" } }, "c843c22ff72e4983984ca4d62ce68e2b": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "cc022dc8b5584570a04facf68f9bdf0b": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_3945ce528fbf40dc830767281892ea56", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in outer\n" } ] } }, "d0cb56db68f2485480da1b2a43ad3c02": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_df4468e2240a430599a01e731472c319", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output\n" }, { "ename": "ValueError", "evalue": "trigger msg_type=error", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0moutput1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"trigger msg_type=error\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mValueError\u001b[0m: trigger msg_type=error" ] } ] } }, "d314a6ef74d947f3a2149bdf9b8b57a3": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_804b6628ca0a48dfbad930615626b1fb", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output\n" } ] } }, "d5e88b6a26114d6da0b7af215aa2c3bb": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "d6cd7a1de3494d2daff23c6d4ffe42ee": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_091f6e59c48442b1bdb13320b4f6605d", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "world\n" } ] } }, "dbf140d66ba247b7847c0f5642b7f607": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "de7ba4c0eed941a3b52fa940387d1415": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "df4468e2240a430599a01e731472c319": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "e152547dd69d46fcbcb602cf9f92e50b": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_de7ba4c0eed941a3b52fa940387d1415", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "in output\n" }, { "ename": "ValueError", "evalue": "trigger msg_type=error", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0moutput1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"trigger msg_type=error\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mValueError\u001b[0m: trigger msg_type=error" ] } ] } }, "e27795e5a4f14450b8c9590cac51cb6b": { "model_module": "@jupyter-widgets/base", "model_module_version": "1.2.0", "model_name": "LayoutModel", "state": {} }, "e3e20af587534a9bb3fa413951ceb28d": { "model_module": "@jupyter-widgets/output", "model_module_version": "1.0.0", "model_name": "OutputModel", "state": { "layout": "IPY_MODEL_e27795e5a4f14450b8c9590cac51cb6b", "outputs": [ { "name": "stdout", "output_type": "stream", "text": "world\n" } ] } } }, "version_major": 2, "version_minor": 0 } } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Parallel Execute A.ipynb000066400000000000000000000046651414342127300237210ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Ensure notebooks can execute in parallel\n", "\n", "This notebook uses a file system based \"lock\" to assert that two instances of the notebook kernel will run in parallel. Each instance writes to a file in a temporary directory, and then tries to read the other file from\n", "the temporary directory, so that running them in sequence will fail, but running them in parallel will succed.\n", "\n", "Two notebooks are launched, each which sets the `this_notebook` variable. One notebook is set to `this_notebook = 'A'` and the other `this_notebook = 'B'`." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import os\n", "import os.path\n", "import tempfile\n", "import time" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# the variable this_notebook is injectected in a cell above by the test framework.\n", "this_notebook = 'A'\n", "other_notebook = 'B'\n", "directory = os.environ['NBEXECUTE_TEST_PARALLEL_TMPDIR']\n", "with open(os.path.join(directory, 'test_file_{}.txt'.format(this_notebook)), 'w') as f:\n", " f.write('Hello from {}'.format(this_notebook))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "start = time.time()\n", "timeout = 5\n", "end = start + timeout\n", "target_file = os.path.join(directory, 'test_file_{}.txt'.format(other_notebook))\n", "while time.time() < end:\n", " time.sleep(0.1)\n", " if os.path.exists(target_file):\n", " with open(target_file, 'r') as f:\n", " text = f.read()\n", " if text == 'Hello from {}'.format(other_notebook):\n", " break\n", "else:\n", " assert False, \"Timed out – didn't get a message from {}\".format(other_notebook)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Parallel Execute B.ipynb000066400000000000000000000046651414342127300237220ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Ensure notebooks can execute in parallel\n", "\n", "This notebook uses a file system based \"lock\" to assert that two instances of the notebook kernel will run in parallel. Each instance writes to a file in a temporary directory, and then tries to read the other file from\n", "the temporary directory, so that running them in sequence will fail, but running them in parallel will succed.\n", "\n", "Two notebooks are launched, each which sets the `this_notebook` variable. One notebook is set to `this_notebook = 'A'` and the other `this_notebook = 'B'`." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import os\n", "import os.path\n", "import tempfile\n", "import time" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# the variable this_notebook is injectected in a cell above by the test framework.\n", "this_notebook = 'B'\n", "other_notebook = 'A'\n", "directory = os.environ['NBEXECUTE_TEST_PARALLEL_TMPDIR']\n", "with open(os.path.join(directory, 'test_file_{}.txt'.format(this_notebook)), 'w') as f:\n", " f.write('Hello from {}'.format(this_notebook))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "start = time.time()\n", "timeout = 5\n", "end = start + timeout\n", "target_file = os.path.join(directory, 'test_file_{}.txt'.format(other_notebook))\n", "while time.time() < end:\n", " time.sleep(0.1)\n", " if os.path.exists(target_file):\n", " with open(target_file, 'r') as f:\n", " text = f.read()\n", " if text == 'Hello from {}'.format(other_notebook):\n", " break\n", "else:\n", " assert False, \"Timed out – didn't get a message from {}\".format(other_notebook)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/SVG.ipynb000066400000000000000000000017221414342127300210670ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [], "source": [ "from IPython.display import SVG" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "data": { "image/svg+xml": [ "\n", " \n", "" ], "text/plain": [ "" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "SVG(data='''\n", "\n", " \n", "''')" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/Skip Exceptions with Cell Tags.ipynb000066400000000000000000000025071414342127300261150ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "tags": [ "raises-exception" ] }, "outputs": [ { "ename": "Exception", "evalue": "message", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# üñîçø∂é\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"message\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mException\u001b[0m: message" ] } ], "source": [ "# üñîçø∂é\n", "raise Exception(\"message\")" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "ok\n" ] } ], "source": [ "print('ok')" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 1 } nbclient-0.5.6/nbclient/tests/files/Skip Exceptions.ipynb000066400000000000000000000025151414342127300234410ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "ename": "Exception", "evalue": "message", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mException\u001b[0m Traceback (most recent call last)", "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;31m# üñîçø∂é\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"message\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[1;31mException\u001b[0m: message" ] } ], "source": [ "# üñîçø∂é\n", "raise Exception(\"message\")" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "ok\n" ] } ], "source": [ "print('ok')" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/Skip Execution with Cell Tag.ipynb000066400000000000000000000007771414342127300255630ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": null, "source": [ "print(\"a long running cell\")" ], "outputs": [], "metadata": { "tags": [ "skip-execution" ] } }, { "cell_type": "code", "execution_count": 1, "source": [ "print('ok')" ], "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "ok\n" ] } ], "metadata": {} } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 1 } nbclient-0.5.6/nbclient/tests/files/Sleep1s.ipynb000066400000000000000000000022551414342127300217460ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import time\n", "import datetime" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "t0 = datetime.datetime.utcnow()\n", "time.sleep(1)\n", "t1 = datetime.datetime.utcnow()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "time_format = '%Y-%m-%dT%H:%M:%S.%fZ'\n", "print(t0.strftime(time_format), end='')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(t1.strftime(time_format), end='')" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.1" } }, "nbformat": 4, "nbformat_minor": 2 } nbclient-0.5.6/nbclient/tests/files/Unicode.ipynb000066400000000000000000000005311414342127300220130ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u2603\n" ] } ], "source": [ "print('\u2603')" ] } ], "metadata": {}, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/UnicodePy3.ipynb000066400000000000000000000006761414342127300224210ustar00rootroot00000000000000{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u2603\n" ] } ], "source": [ "print('\u2603')" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" } }, "nbformat": 4, "nbformat_minor": 0 } nbclient-0.5.6/nbclient/tests/files/python.png000066400000000000000000000210661414342127300214170ustar00rootroot00000000000000PNG  IHDRP\m:sBIT|dtEXtSoftwarewww.inkscape.org< IDATxyxU'OIMtoi)dw6·~:#^|μ.̫36ਠ((PZR( M,mf_n.K4<虿f*++vܙUSS#p\dg2rYa^je'.coW c?򫟗: ΄ HUs'5AwrXR l:Z ǒqyvM[SǸaP1XԜ@HP?.ϯifˍcܰLu@}r:$VKtӉfffb;r9C&9d1_8A (M^줫jzbb355}|ZMS4CxL;ZP~3֏UrxY@5Qav<_kOZ,ɓJQOO 5srr7nh!̽{&^tI۶mx=V*,M6X>A EjkkO?l[ְ!aRVV&lllj4^1|߮l\SS*a}L&III+ ao>;~|8h4T*,RɪmٲeHNcĩT*r֮]\~}O{8N:t(`0P%$C,;v;YV3V+nᇔQ"D<[LP+mZҦ8;vH|@,;֭[*,, ^R̎oꫯ6Df GiiTs)] {ymhh544!Ν7v;zYAyy?H6lؠx?8*@ y䑶DǃpvޝfZ)Lf}:nٲe mp[|tGiiiJAǏ'I֮]a2xNj/ܩ) Q\\l۷ou:ŋ555BFH)L&ׯq8M---\EJ E,`|l޼yAyyxB5B233ͯJ@sM70RGGqسgOo&##QZZ޻wo2;hA *jjL.\\J8L %l/dNVTl6۳hѢpZdL&̋/FAf^6h#٘P.찻GWw]M8peJ%##Á(_wuu1rssݮ"ɬ׮];;;ֻ$pfj6MKKp|MѴFyVkT xsTyl5isd21B0 nO4PԐZ:g \.7p^ww7=ܹŘ\mjPs;zBכԀ}?輠؝c#l&lj v`Ͱ`&bh4NK V Hhth-&U(}|Ȏy!^; .,|nWVzh%o`Xn( #B 'u@64DHsF.|p5J!Avpmc>XMHj Ezq# IkfsdP89IoQ ȑ+ {ʼ8qc F0^ ZH"r/v/$E"шѮT˵׿ߨF(LP|<3FC V)1].״3ϧ|']]],3x^$xݒ2ތ:ۋOhtI=nɓ'^w 0d-Dp0PXXhH$v˗/s3D"q̙3bDIe_ hXܨ/ ZsD%%%ځѺ)))ȉ'M:@X r^vm 6СCVuTߊn:dSO&}k.[4BJAyy``DnGmۖe2٣tPٳ纺:]C)5557|3WRdʢVnV.))1Zt:?L8p8~iJcc# ##RRRb EĦD(S Ƴ1"I'[*u {aj>DT&z=/L߿T*;Z-].sL&۴iSH$dZj^./\ 8uT¥KV@6B2 bp8+%oߞ3gCAA:#6D^xyǎi׮]kjj36Jhnnj&/󆌀]kٜش}j p; V03GlrܲeKÇ~EBйyT:}N@.++`FlXTj+..mll455ʨU^9sL`%%F-4 ߼ys7|TQQ`0hUUU(_*]fz\$]]"d(SbpGhƌF@VBAohh`i4j%d2[AA599yX3&L&6lؠ^b8L&swyZ18<E~Xvڞ&VOOrt:= P͛77p8aC֭[׽xb=@RRRD"WmذAhnnfuwwӓ9996L7-\WRCqvM צX@)]:pע.6#:Pz]a#QÑD! =7|s6bBгhѢz>|%ԱP8SRR"|(YYYeǻ#Fr`"V4[5i[2gnxx((>]A{(pހ/g^j(-+feηS3s5S>PQ2Db_ V tq˵ r|_ `3O+Ϗ*"Q J$ $-Cs(Bz3_0%IqYib;.5:&j'_?p' ^' P,dj4_u*ϼ`K1"G0& 9 3$Y8 Ȏv+oȀ&O`Bǃ=*1Tᄏ+33sRbn"%t uzcV^0O z&HpƠ*/[ɾܘ">}:T*jvXiin |""Vtʨ?_"_7?÷.0Z(:AUC8 E4?EQB(:zd‘db߾})pbؑg^~z}6F" Βu3Qg*XS201$L&[nmiiaVVq`(rEQ"==ݖI94w@*d9[_ਟ>#)wy礇bD(Jjʽ 2i+EbL]6* v"AU".ꙗ`#T;'ⰋS/q.HxZMN@Lc2jdȷX@RE7N[&H eZGƀ=3bM3a7(:8/!*lN5\IE0n7$ ȺQm_FFgӯG?9ަ!~ O `~&3&>Ѥ̱+٧9EG>>ͷYɯ߫& 1ËU #^=+TqM;zU1b▌z5\iᨴn*sgY4J4fvVM00+67/& rZ.W) BW̌tj෷.9qk[+M]̻e1{MVZuMήoS3R)tE`^+W*j'߶а~yum]^uM`^ Cjj؎|m+`TZPnft)VzȎi5izּs|T` HKwyc8B{xM@q3&\B\5>]񽯳  ܻzE*ɲ\sp|wL$!^z5>vB=Yї?KZY[~{26o:AvmC 6ֳ۾̲]dW.3oT¦SO㲆]IvVZ-̿vUCgEۿ: p™sMyDXNyv~7s_O5"ý+;W̝a¼^:O%}ii]% ɼifBìma^Vc9WC H^4687CFkĻevx%/WJ^y劧67u™&Whz_tN}:sSeBFz0+91RzғDΗmfյVe^`X ߸y"W^ma/O5'JG(r'yKK+eaᄣMEڢ^?-(0;_' ؝/۲j`<}j%;:NQ8`F2}uCY#tTGfخ\mb\.5vз-*4VjC@B()4ү@tY/HYޝ(9_w+<噓+=RyUX9oFA0?# Х1} x 1"Qy3=0/t:sE+t9_;9 t"'L 7'bš j:#=Y II kUhE9vf&̴kr%ïKp]I%E9o.8ށH`uH5- pZGCgN\j \_hmeAW.W'AA@|G:#y_iTc3TCj(L3Tlf[+IԦF8ΐ'(Q N!<{}fߢ\#@BRi{)~iⱙ4laaz`k+| 7''lJk,.U"t8ORӷPSgwLHG@/d[E:Ne:ypgoR@c4 I[Jf?K Sp[V^M4ڿW;e霜~2.wVR#]';z 8\گv)Ll6 Eh3V("Wб O5jqh93W楙m) 6Ξ'8|Fpy=C1D|66#-D뷶[\ zAAs2!|W~qD(E:rɬf}ť7)ZxHʐc8L2|NTfG*&W+/_>> g:w.U؅AUQv|[1il& {!7,+jdD mUdm" kOBFcB=jKJsW.E\˚W.kS &=x r߻zIzd,S^%0tF ec_|l .?R+/ n.J;ntR# M]<ѭW|]yv5u06[NbhUHg=|(-%ƏH(N5 +k|~E!dHO߻J#_JlaTess5rNUm+OoRH3}m_ntă߯&W:?+]R)tF+ͼVۋ8y4ghٲc}st wCq\MŞh}[}Ǒ0R0Ѝ!qX3 GǙ3>AlugCyNs">Ԇ{V tϊDoK);/{AzKff_A GW8iw LZSR`ۮCDX\hhw,Ѳ4ʥy!+I"nx~ u#Iue3h'~/K˫{=;F!9'~{Fz pne}k ,O4榻ΎzC h/`ICWI"Y  n2;UZH{s\?[jr3xZ;*Whh^3 C@6w1Xvjaog*ǹ0_cj19t2 )/D/| ^^1H| DwL dQ erCMxˌ<07b3 q Xmmr') current_dir = os.path.dirname(__file__) class AsyncMock(Mock): pass def make_async(mock_value): async def _(): return mock_value return _() def normalize_base64(b64_text): # if it's base64, pass it through b64 decode/encode to avoid # equivalent values from being considered unequal try: return b64encode(b64decode(b64_text.encode('ascii'))).decode('ascii') except (ValueError, TypeError): return b64_text def run_notebook(filename, opts, resources=None): """Loads and runs a notebook, returning both the version prior to running it and the version after running it. """ with open(filename) as f: input_nb = nbformat.read(f, 4) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] if resources: opts = {'resources': resources, **opts} executor = NotebookClient(cleaned_input_nb, **opts) with warnings.catch_warnings(): # suppress warning from jupyter_client's deprecated cleanup() warnings.simplefilter(action='ignore', category=FutureWarning) # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): output_nb = executor.execute() return input_nb, output_nb def run_notebook_wrapper(args): # since concurrent.futures.ProcessPoolExecutor doesn't have starmap, # we need to unpack the arguments return run_notebook(*args) async def async_run_notebook(filename, opts, resources=None): """Loads and runs a notebook, returning both the version prior to running it and the version after running it. """ with open(filename) as f: input_nb = nbformat.read(f, 4) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] if resources: opts = {'resources': resources, **opts} executor = NotebookClient(cleaned_input_nb, **opts) # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): output_nb = await executor.async_execute() return input_nb, output_nb def prepare_cell_mocks(*messages, reply_msg=None): """ This function prepares a executor object which has a fake kernel client to mock the messages sent over zeromq. The mock kernel client will return the messages passed into this wrapper back from ``preproc.kc.iopub_channel.get_msg`` callbacks. It also appends a kernel idle message to the end of messages. """ parent_id = 'fake_id' messages = list(messages) # Always terminate messages with an idle to exit the loop messages.append({'msg_type': 'status', 'content': {'execution_state': 'idle'}}) def shell_channel_message_mock(): # Return the message generator for # self.kc.shell_channel.get_msg => {'parent_header': {'msg_id': parent_id}} return AsyncMock( return_value=make_async( NBClientTestsBase.merge_dicts( { 'parent_header': {'msg_id': parent_id}, 'content': {'status': 'ok', 'execution_count': 1}, }, reply_msg or {}, ) ) ) def iopub_messages_mock(): # Return the message generator for # self.kc.iopub_channel.get_msg => messages[i] return AsyncMock( side_effect=[ # Default the parent_header so mocks don't need to include this make_async( NBClientTestsBase.merge_dicts({'parent_header': {'msg_id': parent_id}}, msg) ) for msg in messages ] ) def prepared_wrapper(func): @functools.wraps(func) def test_mock_wrapper(self): """ This inner function wrapper populates the executor object with the fake kernel client. This client has its iopub and shell channels mocked so as to fake the setup handshake and return the messages passed into prepare_cell_mocks as the execute_cell loop processes them. """ cell_mock = NotebookNode( source='"foo" = "bar"', metadata={}, cell_type='code', outputs=[] ) executor = NotebookClient({}) executor.nb = {'cells': [cell_mock]} # self.kc.iopub_channel.get_msg => message_mock.side_effect[i] message_mock = iopub_messages_mock() executor.kc = MagicMock( iopub_channel=MagicMock(get_msg=message_mock), shell_channel=MagicMock(get_msg=shell_channel_message_mock()), execute=MagicMock(return_value=parent_id), is_alive=MagicMock(return_value=make_async(True)), ) executor.parent_id = parent_id return func(self, executor, cell_mock, message_mock) return test_mock_wrapper return prepared_wrapper def normalize_output(output): """ Normalizes outputs for comparison. """ output = dict(output) if 'metadata' in output: del output['metadata'] if 'text' in output: output['text'] = re.sub(addr_pat, '', output['text']) if 'text/plain' in output.get('data', {}): output['data']['text/plain'] = re.sub(addr_pat, '', output['data']['text/plain']) if 'application/vnd.jupyter.widget-view+json' in output.get('data', {}): output['data']['application/vnd.jupyter.widget-view+json']['model_id'] = '' if 'image/svg+xml' in output.get('data', {}): output['data']['image/svg+xml'] = xmltodict.parse(output['data']['image/svg+xml']) for key, value in output.get('data', {}).items(): if isinstance(value, string_types): output['data'][key] = normalize_base64(value) if 'traceback' in output: tb = [ re.sub(ipython_input_pat, '', strip_ansi(line)) for line in output['traceback'] ] output['traceback'] = tb return output def assert_notebooks_equal(expected, actual): expected_cells = expected['cells'] actual_cells = actual['cells'] assert len(expected_cells) == len(actual_cells) for expected_cell, actual_cell in zip(expected_cells, actual_cells): # Uncomment these to help debug test failures better # from pprint import pprint # pprint(expected_cell) # pprint(actual_cell) expected_outputs = expected_cell.get('outputs', []) actual_outputs = actual_cell.get('outputs', []) normalized_expected_outputs = list(map(normalize_output, expected_outputs)) normalized_actual_outputs = list(map(normalize_output, actual_outputs)) assert normalized_expected_outputs == normalized_actual_outputs expected_execution_count = expected_cell.get('execution_count', None) actual_execution_count = actual_cell.get('execution_count', None) assert expected_execution_count == actual_execution_count def notebook_resources(): """ Prepare a notebook resources dictionary for executing test notebooks in the ``files`` folder. """ return {'metadata': {'path': os.path.join(current_dir, 'files')}} def filter_messages_on_error_output(err_output): allowed_lines = [ # ipykernel migh be installed without debugpy extension "[IPKernelApp] WARNING | debugpy_stream undefined, debugging will not be enabled", ] filtered_result = [line for line in err_output.splitlines() if line not in allowed_lines] return os.linesep.join(filtered_result) @pytest.mark.parametrize( ["input_name", "opts"], [ ("Other Comms.ipynb", dict(kernel_name="python")), ("Clear Output.ipynb", dict(kernel_name="python")), ("Empty Cell.ipynb", dict(kernel_name="python")), ("Factorials.ipynb", dict(kernel_name="python")), ("HelloWorld.ipynb", dict(kernel_name="python")), ("Inline Image.ipynb", dict(kernel_name="python")), ( "Interrupt.ipynb", dict(kernel_name="python", timeout=1, interrupt_on_timeout=True, allow_errors=True), ), ("JupyterWidgets.ipynb", dict(kernel_name="python")), ("Skip Exceptions with Cell Tags.ipynb", dict(kernel_name="python")), ("Skip Exceptions.ipynb", dict(kernel_name="python", allow_errors=True)), ("Skip Execution with Cell Tag.ipynb", dict(kernel_name="python")), ("SVG.ipynb", dict(kernel_name="python")), ("Unicode.ipynb", dict(kernel_name="python")), ("UnicodePy3.ipynb", dict(kernel_name="python")), ("update-display-id.ipynb", dict(kernel_name="python")), ("Check History in Memory.ipynb", dict(kernel_name="python")), ], ) def test_run_all_notebooks(input_name, opts): """Runs a series of test notebooks and compares them to their actual output""" input_file = os.path.join(current_dir, 'files', input_name) input_nb, output_nb = run_notebook(input_file, opts, notebook_resources()) assert_notebooks_equal(input_nb, output_nb) def test_parallel_notebooks(capfd, tmpdir): """Two notebooks should be able to be run simultaneously without problems. The two notebooks spawned here use the filesystem to check that the other notebook wrote to the filesystem.""" opts = dict(kernel_name="python") input_name = "Parallel Execute {label}.ipynb" input_file = os.path.join(current_dir, "files", input_name) res = notebook_resources() with modified_env({"NBEXECUTE_TEST_PARALLEL_TMPDIR": str(tmpdir)}): threads = [ threading.Thread(target=run_notebook, args=(input_file.format(label=label), opts, res)) for label in ("A", "B") ] [t.start() for t in threads] [t.join(timeout=2) for t in threads] captured = capfd.readouterr() assert filter_messages_on_error_output(captured.err) == "" def test_many_parallel_notebooks(capfd): """Ensure that when many IPython kernels are run in parallel, nothing awful happens. Specifically, many IPython kernels when run simultaneously would encounter errors due to using the same SQLite history database. """ opts = dict(kernel_name="python", timeout=5) input_name = "HelloWorld.ipynb" input_file = os.path.join(current_dir, "files", input_name) res = NBClientTestsBase().build_resources() res["metadata"]["path"] = os.path.join(current_dir, "files") with warnings.catch_warnings(): # suppress warning from jupyter_client's deprecated cleanup() warnings.simplefilter(action='ignore', category=FutureWarning) # run once, to trigger creating the original context run_notebook(input_file, opts, res) with concurrent.futures.ProcessPoolExecutor(max_workers=2) as executor: executor.map(run_notebook_wrapper, [(input_file, opts, res) for i in range(8)]) captured = capfd.readouterr() assert filter_messages_on_error_output(captured.err) == "" def test_async_parallel_notebooks(capfd, tmpdir): """Two notebooks should be able to be run simultaneously without problems. The two notebooks spawned here use the filesystem to check that the other notebook wrote to the filesystem.""" opts = dict(kernel_name="python") input_name = "Parallel Execute {label}.ipynb" input_file = os.path.join(current_dir, "files", input_name) res = notebook_resources() with modified_env({"NBEXECUTE_TEST_PARALLEL_TMPDIR": str(tmpdir)}): tasks = [ async_run_notebook(input_file.format(label=label), opts, res) for label in ("A", "B") ] loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.gather(*tasks)) captured = capfd.readouterr() assert filter_messages_on_error_output(captured.err) == "" def test_many_async_parallel_notebooks(capfd): """Ensure that when many IPython kernels are run in parallel, nothing awful happens. Specifically, many IPython kernels when run simultaneously would encounter errors due to using the same SQLite history database. """ opts = dict(kernel_name="python", timeout=5) input_name = "HelloWorld.ipynb" input_file = os.path.join(current_dir, "files", input_name) res = NBClientTestsBase().build_resources() res["metadata"]["path"] = os.path.join(current_dir, "files") # run once, to trigger creating the original context run_notebook(input_file, opts, res) tasks = [async_run_notebook(input_file, opts, res) for i in range(4)] loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.gather(*tasks)) captured = capfd.readouterr() assert filter_messages_on_error_output(captured.err) == "" def test_execution_timing(): """Compare the execution timing information stored in the cell with the actual time it took to run the cell. Also check for the cell timing string format.""" opts = dict(kernel_name="python") input_name = "Sleep1s.ipynb" input_file = os.path.join(current_dir, "files", input_name) res = notebook_resources() input_nb, output_nb = run_notebook(input_file, opts, res) def get_time_from_str(s): time_format = '%Y-%m-%dT%H:%M:%S.%fZ' return datetime.datetime.strptime(s, time_format) execution_timing = output_nb['cells'][1]['metadata']['execution'] status_busy = get_time_from_str(execution_timing['iopub.status.busy']) execute_input = get_time_from_str(execution_timing['iopub.execute_input']) execute_reply = get_time_from_str(execution_timing['shell.execute_reply']) status_idle = get_time_from_str(execution_timing['iopub.status.idle']) cell_start = get_time_from_str(output_nb['cells'][2]['outputs'][0]['text']) cell_end = get_time_from_str(output_nb['cells'][3]['outputs'][0]['text']) delta = datetime.timedelta(milliseconds=100) assert status_busy - cell_start < delta assert execute_input - cell_start < delta assert execute_reply - cell_end < delta assert status_idle - cell_end < delta def test_synchronous_setup_kernel(): nb = nbformat.v4.new_notebook() executor = NotebookClient(nb) with executor.setup_kernel(): # Prove it initalized client assert executor.kc is not None # Prove it removed the client (and hopefully cleaned up) assert executor.kc is None def test_startnewkernel_with_kernelmanager(): nb = nbformat.v4.new_notebook() km = KernelManager() executor = NotebookClient(nb, km=km) executor.start_new_kernel() kc = executor.start_new_kernel_client() # prove it initalized client assert kc is not None # since we are not using the setup_kernel context manager, # cleanup has to be done manually kc.shutdown() km.cleanup_resources() kc.stop_channels() def test_start_new_kernel_history_file_setting(): nb = nbformat.v4.new_notebook() km = KernelManager() executor = NotebookClient(nb, km=km) kc = km.client() # Should start empty assert executor.extra_arguments == [] # Should assign memory setting for ipykernel executor.start_new_kernel() assert executor.extra_arguments == ['--HistoryManager.hist_file=:memory:'] # Should not add a second hist_file assignment executor.start_new_kernel() assert executor.extra_arguments == ['--HistoryManager.hist_file=:memory:'] # since we are not using the setup_kernel context manager, # cleanup has to be done manually kc.shutdown() km.cleanup_resources() kc.stop_channels() class TestExecute(NBClientTestsBase): """Contains test functions for execute.py""" maxDiff = None def test_constructor(self): NotebookClient({}) def test_populate_language_info(self): nb = nbformat.v4.new_notebook() # Certainly has no language_info. executor = NotebookClient(nb, kernel_name="python") nb = executor.execute() assert 'language_info' in nb.metadata def test_empty_path(self): """Can the kernel be started when the path is empty?""" filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') res = self.build_resources() res['metadata']['path'] = '' input_nb, output_nb = run_notebook(filename, {}, res) assert_notebooks_equal(input_nb, output_nb) @pytest.mark.xfail( "python3" not in KernelSpecManager().find_kernel_specs(), reason="requires a python3 kernelspec", ) def test_empty_kernel_name(self): """Can kernel in nb metadata be found when an empty string is passed? Note: this pattern should be discouraged in practice. Passing in no kernel_name to NotebookClient is recommended instead. """ filename = os.path.join(current_dir, 'files', 'UnicodePy3.ipynb') res = self.build_resources() input_nb, output_nb = run_notebook(filename, {"kernel_name": ""}, res) assert_notebooks_equal(input_nb, output_nb) with pytest.raises(TraitError): input_nb, output_nb = run_notebook(filename, {"kernel_name": None}, res) def test_disable_stdin(self): """Test disabling standard input""" filename = os.path.join(current_dir, 'files', 'Disable Stdin.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) input_nb, output_nb = run_notebook(filename, dict(allow_errors=True), res) # We need to special-case this particular notebook, because the # traceback contains machine-specific stuff like where IPython # is installed. It is sufficient here to just check that an error # was thrown, and that it was a StdinNotImplementedError self.assertEqual(len(output_nb['cells']), 1) self.assertEqual(len(output_nb['cells'][0]['outputs']), 1) output = output_nb['cells'][0]['outputs'][0] self.assertEqual(output['output_type'], 'error') self.assertEqual(output['ename'], 'StdinNotImplementedError') self.assertEqual( output['evalue'], 'raw_input was called, but this frontend does not support input requests.', ) def test_timeout(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(TimeoutError) as err: run_notebook(filename, dict(timeout=1), res) self.assertEqual( str(err.value.args[0]), """A cell timed out while it was being executed, after 1 seconds. The message was: Cell execution timed out. Here is a preview of the cell contents: ------------------- while True: continue ------------------- """, ) def test_timeout_func(self): """Check that an error is raised when a computation times out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) def timeout_func(source): return 10 with pytest.raises(TimeoutError): run_notebook(filename, dict(timeout_func=timeout_func), res) def test_kernel_death_after_timeout(self): """Check that an error is raised when the kernel is_alive is false after a cell timed out""" filename = os.path.join(current_dir, 'files', 'Interrupt.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) executor = NotebookClient(input_nb, timeout=1) with pytest.raises(TimeoutError): executor.execute() km = executor.create_kernel_manager() async def is_alive(): return False km.is_alive = is_alive # Will be a RuntimeError or subclass DeadKernelError depending # on if jupyter_client or nbconvert catches the dead client first with pytest.raises(RuntimeError): input_nb, output_nb = executor.execute() def test_kernel_death_during_execution(self): """Check that an error is raised when the kernel is_alive is false during a cell execution. """ filename = os.path.join(current_dir, 'files', 'Autokill.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) executor = NotebookClient(input_nb) with pytest.raises(RuntimeError): executor.execute() def test_allow_errors(self): """ Check that conversion halts if ``allow_errors`` is False. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: run_notebook(filename, dict(allow_errors=False), res) self.assertIsInstance(str(exc.value), str) assert "# üñîçø∂é" in str(exc.value) def test_force_raise_errors(self): """ Check that conversion halts if the ``force_raise_errors`` traitlet on NotebookClient is set to True. """ filename = os.path.join(current_dir, 'files', 'Skip Exceptions with Cell Tags.ipynb') res = self.build_resources() res['metadata']['path'] = os.path.dirname(filename) with pytest.raises(CellExecutionError) as exc: run_notebook(filename, dict(force_raise_errors=True), res) self.assertIsInstance(str(exc.value), str) assert "# üñîçø∂é" in str(exc.value) def test_reset_kernel_client(self): filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) executor = NotebookClient( input_nb, resources=self.build_resources(), ) executor.execute(cleanup_kc=False) # we didn't ask to reset the kernel client, a new one must have been created kc = executor.kc assert kc is not None executor.execute(cleanup_kc=False) # we didn't ask to reset the kernel client, the previously created one must have been reused assert kc == executor.kc executor.execute(reset_kc=True, cleanup_kc=False) # we asked to reset the kernel client, the previous one must have been cleaned up, # a new one must have been created assert kc != executor.kc def test_cleanup_kernel_client(self): filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) executor = NotebookClient( input_nb, resources=self.build_resources(), ) executor.execute() # we asked to cleanup the kernel client (default is True) assert executor.kc is None executor.execute(cleanup_kc=False) # we didn't ask to reset the kernel client # a new one must have been created and should still be available assert executor.kc is not None def test_custom_kernel_manager(self): from .fake_kernelmanager import FakeCustomKernelManager filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) cleaned_input_nb = copy.deepcopy(input_nb) for cell in cleaned_input_nb.cells: if 'execution_count' in cell: del cell['execution_count'] cell['outputs'] = [] executor = NotebookClient( cleaned_input_nb, resources=self.build_resources(), kernel_manager_class=FakeCustomKernelManager, ) # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): executor.execute() expected = FakeCustomKernelManager.expected_methods.items() for method, call_count in expected: self.assertNotEqual(call_count, 0, f'{method} was called') def test_process_message_wrapper(self): outputs = [] class WrappedPreProc(NotebookClient): def process_message(self, msg, cell, cell_index): result = super().process_message(msg, cell, cell_index) if result: outputs.append(result) return result current_dir = os.path.dirname(__file__) filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) wpp = WrappedPreProc(input_nb) executed = wpp.execute() assert outputs == [{'name': 'stdout', 'output_type': 'stream', 'text': 'Hello World\n'}] assert_notebooks_equal(original, executed) def test_execute_function(self): # Test the execute() convenience API filename = os.path.join(current_dir, 'files', 'HelloWorld.ipynb') with open(filename) as f: input_nb = nbformat.read(f, 4) original = copy.deepcopy(input_nb) executed = execute(original, os.path.dirname(filename)) assert_notebooks_equal(original, executed) def test_widgets(self): """Runs a test notebook with widgets and checks the widget state is saved.""" input_file = os.path.join(current_dir, 'files', 'JupyterWidgets.ipynb') opts = dict(kernel_name="python") res = self.build_resources() res['metadata']['path'] = os.path.dirname(input_file) input_nb, output_nb = run_notebook(input_file, opts, res) output_data = [ output.get('data', {}) for cell in output_nb['cells'] for output in cell['outputs'] ] model_ids = [ data['application/vnd.jupyter.widget-view+json']['model_id'] for data in output_data if 'application/vnd.jupyter.widget-view+json' in data ] wdata = output_nb['metadata']['widgets']['application/vnd.jupyter.widget-state+json'] for k in model_ids: d = wdata['state'][k] assert 'model_name' in d assert 'model_module' in d assert 'state' in d assert 'version_major' in wdata assert 'version_minor' in wdata class TestRunCell(NBClientTestsBase): """Contains test functions for NotebookClient.execute_cell""" @prepare_cell_mocks() def test_idle_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # Just the exit message should be fetched assert message_mock.call_count == 1 # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'execute_reply'}, 'parent_header': {'msg_id': 'wrong_parent'}, 'content': {'name': 'stdout', 'text': 'foo'}, } ) def test_message_for_wrong_parent(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An ignored stream followed by an idle assert message_mock.call_count == 2 # Ensure no output was written assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'status', 'header': {'msg_type': 'status'}, 'content': {'execution_state': 'busy'}, } ) def test_busy_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # One busy message, followed by an idle assert message_mock.call_count == 2 # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stderr', 'text': 'bar'}, }, ) def test_deadline_exec_reply(self, executor, cell_mock, message_mock): # exec_reply is never received, so we expect to hit the timeout. async def get_msg(timeout): await asyncio.sleep(timeout) raise Empty executor.kc.shell_channel.get_msg = get_msg executor.timeout = 1 with pytest.raises(TimeoutError): executor.execute_cell(cell_mock, 0) assert message_mock.call_count == 3 # Ensure the output was captured self.assertListEqual( cell_mock.outputs, [ {'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}, {'output_type': 'stream', 'name': 'stderr', 'text': 'bar'}, ], ) @prepare_cell_mocks() def test_deadline_iopub(self, executor, cell_mock, message_mock): # The shell_channel will complete, so we expect only to hit the iopub timeout. message_mock.side_effect = Empty() executor.raise_on_iopub_timeout = True with pytest.raises(TimeoutError): executor.execute_cell(cell_mock, 0) @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stderr', 'text': 'bar'}, }, ) def test_eventual_deadline_iopub(self, executor, cell_mock, message_mock): # Process a few messages before raising a timeout from iopub def message_seq(messages): yield from messages while True: yield Empty() message_mock.side_effect = message_seq(list(message_mock.side_effect)[:-1]) executor.kc.shell_channel.get_msg = Mock( return_value=make_async({'parent_header': {'msg_id': executor.parent_id}}) ) executor.raise_on_iopub_timeout = True with pytest.raises(TimeoutError): executor.execute_cell(cell_mock, 0) assert message_mock.call_count >= 3 # Ensure the output was captured self.assertListEqual( cell_mock.outputs, [ {'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}, {'output_type': 'stream', 'name': 'stderr', 'text': 'bar'}, ], ) @prepare_cell_mocks( {'msg_type': 'execute_input', 'header': {'msg_type': 'execute_input'}, 'content': {}} ) def test_execute_input_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # One ignored execute_input, followed by an idle assert message_mock.call_count == 2 # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stderr', 'text': 'bar'}, }, ) def test_stream_messages(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An stdout then stderr stream followed by an idle assert message_mock.call_count == 3 # Ensure the output was captured self.assertListEqual( cell_mock.outputs, [ {'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}, {'output_type': 'stream', 'name': 'stderr', 'text': 'bar'}, ], ) @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'execute_reply'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, {'msg_type': 'clear_output', 'header': {'msg_type': 'clear_output'}, 'content': {}}, ) def test_clear_output_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A stream, followed by a clear, and then an idle assert message_mock.call_count == 3 # Ensure the output was cleared assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'clear_output', 'header': {'msg_type': 'clear_output'}, 'content': {'wait': True}, }, ) def test_clear_output_wait_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A stream, followed by a clear, and then an idle assert message_mock.call_count == 3 # Should be true without another message to trigger the clear self.assertTrue(executor.clear_before_next_output) # Ensure the output wasn't cleared yet assert cell_mock.outputs == [{'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'clear_output', 'header': {'msg_type': 'clear_output'}, 'content': {'wait': True}, }, { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stderr', 'text': 'bar'}, }, ) def test_clear_output_wait_then_message_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An stdout stream, followed by a wait clear, an stderr stream, and then an idle assert message_mock.call_count == 4 # Should be false after the stderr message assert not executor.clear_before_next_output # Ensure the output wasn't cleared yet assert cell_mock.outputs == [{'output_type': 'stream', 'name': 'stderr', 'text': 'bar'}] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'name': 'stdout', 'text': 'foo'}, }, { 'msg_type': 'clear_output', 'header': {'msg_type': 'clear_output'}, 'content': {'wait': True}, }, { 'msg_type': 'update_display_data', 'header': {'msg_type': 'update_display_data'}, 'content': {'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}}, }, ) def test_clear_output_wait_then_update_display_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An stdout stream, followed by a wait clear, an stderr stream, and then an idle assert message_mock.call_count == 4 # Should be false after the stderr message assert executor.clear_before_next_output # Ensure the output wasn't cleared yet because update_display doesn't add outputs assert cell_mock.outputs == [{'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}] @prepare_cell_mocks( { 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, 'content': {'execution_count': 42}, } ) def test_execution_count_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An execution count followed by an idle assert message_mock.call_count == 2 assert cell_mock.execution_count == 42 # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, 'content': {'execution_count': 42}, } ) def test_execution_count_message_ignored_on_override(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0, execution_count=21) # An execution count followed by an idle assert message_mock.call_count == 2 assert cell_mock.execution_count == 21 # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'stream', 'header': {'msg_type': 'stream'}, 'content': {'execution_count': 42, 'name': 'stdout', 'text': 'foo'}, } ) def test_execution_count_with_stream_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An execution count followed by an idle assert message_mock.call_count == 2 assert cell_mock.execution_count == 42 # Should also consume the message stream assert cell_mock.outputs == [{'output_type': 'stream', 'name': 'stdout', 'text': 'foo'}] @prepare_cell_mocks( { 'msg_type': 'comm', 'header': {'msg_type': 'comm'}, 'content': {'comm_id': 'foobar', 'data': {'state': {'foo': 'bar'}}}, } ) def test_widget_comm_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A comm message without buffer info followed by an idle assert message_mock.call_count == 2 self.assertEqual(executor.widget_state, {'foobar': {'foo': 'bar'}}) # Buffers should still be empty assert not executor.widget_buffers # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'comm', 'header': {'msg_type': 'comm'}, 'buffers': [b'123'], 'content': { 'comm_id': 'foobar', 'data': {'state': {'foo': 'bar'}, 'buffer_paths': [['path']]}, }, } ) def test_widget_comm_buffer_message_single(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A comm message with buffer info followed by an idle assert message_mock.call_count == 2 assert executor.widget_state == {'foobar': {'foo': 'bar'}} assert executor.widget_buffers == { 'foobar': {('path',): {'data': 'MTIz', 'encoding': 'base64', 'path': ['path']}} } # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'comm', 'header': {'msg_type': 'comm'}, 'buffers': [b'123'], 'content': { 'comm_id': 'foobar', 'data': {'state': {'foo': 'bar'}, 'buffer_paths': [['path']]}, }, }, { 'msg_type': 'comm', 'header': {'msg_type': 'comm'}, 'buffers': [b'123'], 'content': { 'comm_id': 'foobar', 'data': {'state': {'foo2': 'bar2'}, 'buffer_paths': [['path2']]}, }, }, ) def test_widget_comm_buffer_messages(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A comm message with buffer info followed by an idle assert message_mock.call_count == 3 assert executor.widget_state == {'foobar': {'foo': 'bar', 'foo2': 'bar2'}} assert executor.widget_buffers == { 'foobar': { ('path',): {'data': 'MTIz', 'encoding': 'base64', 'path': ['path']}, ('path2',): {'data': 'MTIz', 'encoding': 'base64', 'path': ['path2']}, } } # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'comm', 'header': {'msg_type': 'comm'}, 'content': { 'comm_id': 'foobar', # No 'state' 'data': {'foo': 'bar'}, }, } ) def test_unknown_comm_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An unknown comm message followed by an idle assert message_mock.call_count == 2 # Widget states should be empty as the message has the wrong shape assert not executor.widget_state assert not executor.widget_buffers # Ensure no outputs were generated assert cell_mock.outputs == [] @prepare_cell_mocks( { 'msg_type': 'execute_result', 'header': {'msg_type': 'execute_result'}, 'content': { 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, 'execution_count': 42, }, } ) def test_execute_result_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An execute followed by an idle assert message_mock.call_count == 2 assert cell_mock.execution_count == 42 # Should generate an associated message assert cell_mock.outputs == [ { 'output_type': 'execute_result', 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, 'execution_count': 42, } ] # No display id was provided assert not executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'execute_result', 'header': {'msg_type': 'execute_result'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, 'execution_count': 42, }, } ) def test_execute_result_with_display_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An execute followed by an idle assert message_mock.call_count == 2 assert cell_mock.execution_count == 42 # Should generate an associated message assert cell_mock.outputs == [ { 'output_type': 'execute_result', 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, 'execution_count': 42, } ] assert 'foobar' in executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': {'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}}, } ) def test_display_data_without_id_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A display followed by an idle assert message_mock.call_count == 2 # Should generate an associated message assert cell_mock.outputs == [ { 'output_type': 'display_data', 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, } ] # No display id was provided assert not executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, }, } ) def test_display_data_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A display followed by an idle assert message_mock.call_count == 2 # Should generate an associated message assert cell_mock.outputs == [ { 'output_type': 'display_data', 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, } ] assert 'foobar' in executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, }, }, { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar_other'}, 'metadata': {'metafoo_other': 'metabar_other'}, 'data': {'foo': 'bar_other'}, }, }, { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, }, ) def test_display_data_same_id_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A display followed by an idle assert message_mock.call_count == 4 # Original output should be manipulated and a copy of the second now assert cell_mock.outputs == [ { 'output_type': 'display_data', 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, { 'output_type': 'display_data', 'metadata': {'metafoo_other': 'metabar_other'}, 'data': {'foo': 'bar_other'}, }, { 'output_type': 'display_data', 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, ] assert 'foobar' in executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'update_display_data', 'header': {'msg_type': 'update_display_data'}, 'content': {'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}}, } ) def test_update_display_data_without_id_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An update followed by an idle assert message_mock.call_count == 2 # Display updates don't create any outputs assert cell_mock.outputs == [] # No display id was provided assert not executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, }, { 'msg_type': 'update_display_data', 'header': {'msg_type': 'update_display_data'}, 'content': { 'transient': {'display_id': 'foobar2'}, 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, }, ) def test_update_display_data_mismatch_id_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An update followed by an idle assert message_mock.call_count == 3 # Display updates don't create any outputs assert cell_mock.outputs == [ { 'output_type': 'display_data', 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, } ] assert 'foobar' in executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'display_data', 'header': {'msg_type': 'display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo': 'metabar'}, 'data': {'foo': 'bar'}, }, }, { 'msg_type': 'update_display_data', 'header': {'msg_type': 'update_display_data'}, 'content': { 'transient': {'display_id': 'foobar'}, 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, }, }, ) def test_update_display_data_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # A display followed by an update then an idle assert message_mock.call_count == 3 # Original output should be manipulated assert cell_mock.outputs == [ { 'output_type': 'display_data', 'metadata': {'metafoo2': 'metabar2'}, 'data': {'foo': 'bar2', 'baz': 'foobarbaz'}, } ] assert 'foobar' in executor._display_id_map @prepare_cell_mocks( { 'msg_type': 'error', 'header': {'msg_type': 'error'}, 'content': {'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']}, } ) def test_error_message(self, executor, cell_mock, message_mock): executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 2 # Should also consume the message stream assert cell_mock.outputs == [ {'output_type': 'error', 'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']} ] @prepare_cell_mocks( { 'msg_type': 'error', 'header': {'msg_type': 'error'}, 'content': {'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']}, }, reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error'}, }, ) def test_error_and_error_status_messages(self, executor, cell_mock, message_mock): with self.assertRaises(CellExecutionError): executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 2 # Cell outputs should still be copied assert cell_mock.outputs == [ {'output_type': 'error', 'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']} ] @prepare_cell_mocks( { 'msg_type': 'error', 'header': {'msg_type': 'error'}, 'content': {'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']}, }, reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # OK 'content': {'status': 'ok'}, }, ) def test_error_message_only(self, executor, cell_mock, message_mock): # Should NOT raise executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 2 # Should also consume the message stream assert cell_mock.outputs == [ {'output_type': 'error', 'ename': 'foo', 'evalue': 'bar', 'traceback': ['Boom']} ] @prepare_cell_mocks( reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error'}, } ) def test_allow_errors(self, executor, cell_mock, message_mock): executor.allow_errors = True # Should NOT raise executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 1 # Should also consume the message stream assert cell_mock.outputs == [] @prepare_cell_mocks( reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error', 'ename': 'NotImplementedError'}, } ) def test_allow_error_names(self, executor, cell_mock, message_mock): executor.allow_error_names = ['NotImplementedError'] # Should NOT raise executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 1 # Should also consume the message stream assert cell_mock.outputs == [] @prepare_cell_mocks( reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error'}, } ) def test_raises_exception_tag(self, executor, cell_mock, message_mock): cell_mock.metadata['tags'] = ['raises-exception'] # Should NOT raise executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 1 # Should also consume the message stream assert cell_mock.outputs == [] @prepare_cell_mocks( reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error'}, } ) def test_non_code_cell(self, executor, cell_mock, message_mock): cell_mock = NotebookNode(source='"foo" = "bar"', metadata={}, cell_type='raw', outputs=[]) # Should NOT raise nor execute any code executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 0 # Should also consume the message stream assert cell_mock.outputs == [] @prepare_cell_mocks( reply_msg={ 'msg_type': 'execute_reply', 'header': {'msg_type': 'execute_reply'}, # ERROR 'content': {'status': 'error'}, } ) def test_no_source(self, executor, cell_mock, message_mock): cell_mock = NotebookNode( # Stripped source is empty source=' ', metadata={}, cell_type='code', outputs=[], ) # Should NOT raise nor execute any code executor.execute_cell(cell_mock, 0) # An error followed by an idle assert message_mock.call_count == 0 # Should also consume the message stream assert cell_mock.outputs == [] nbclient-0.5.6/nbclient/tests/util.py000066400000000000000000000033441414342127300176140ustar00rootroot00000000000000import asyncio import tornado from nbclient.util import run_sync @run_sync async def some_async_function(): await asyncio.sleep(0.01) return 42 def test_nested_asyncio_with_existing_ioloop(): ioloop = asyncio.new_event_loop() try: asyncio.set_event_loop(ioloop) assert some_async_function() == 42 assert asyncio.get_event_loop() is ioloop finally: asyncio._set_running_loop(None) # it seems nest_asyncio doesn't reset this def test_nested_asyncio_with_no_ioloop(): asyncio.set_event_loop(None) try: assert some_async_function() == 42 finally: asyncio._set_running_loop(None) # it seems nest_asyncio doesn't reset this def test_nested_asyncio_with_tornado(): # This tests if tornado accepts the pure-Python Futures, see # https://github.com/tornadoweb/tornado/issues/2753 # https://github.com/erdewit/nest_asyncio/issues/23 asyncio.set_event_loop(asyncio.new_event_loop()) ioloop = tornado.ioloop.IOLoop.current() async def some_async_function(): future = asyncio.ensure_future(asyncio.sleep(0.1)) # this future is a different future after nested-asyncio has patched # the asyncio module, check if tornado likes it: ioloop.add_future(future, lambda f: f.result()) await future return 42 def some_sync_function(): return run_sync(some_async_function)() async def run(): # calling some_async_function directly should work assert await some_async_function() == 42 # but via a sync function (using nested-asyncio) can lead to issues: # https://github.com/tornadoweb/tornado/issues/2753 assert some_sync_function() == 42 ioloop.run_sync(run) nbclient-0.5.6/nbclient/util.py000066400000000000000000000062431414342127300164530ustar00rootroot00000000000000"""General utility methods""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import inspect import sys from typing import Any, Awaitable, Callable, Union def check_ipython() -> None: # original from vaex/asyncio.py IPython = sys.modules.get('IPython') if IPython: version_str = IPython.__version__ # type: ignore # We get rid of any trailing ".dev" version_str = version_str.replace(".dev", "") IPython_version = tuple(map(int, version_str.split('.'))) if IPython_version < (7, 0, 0): raise RuntimeError( f'You are using IPython {IPython.__version__} ' # type: ignore 'while we require 7.0.0+, please update IPython' ) def check_patch_tornado() -> None: """If tornado is imported, add the patched asyncio.Future to its tuple of acceptable Futures""" # original from vaex/asyncio.py if 'tornado' in sys.modules: import tornado.concurrent # type: ignore if asyncio.Future not in tornado.concurrent.FUTURES: tornado.concurrent.FUTURES = tornado.concurrent.FUTURES + ( asyncio.Future, ) # type: ignore def just_run(coro: Awaitable) -> Any: """Make the coroutine run, even if there is an event loop running (using nest_asyncio)""" # original from vaex/asyncio.py loop = asyncio._get_running_loop() if loop is None: had_running_loop = False try: loop = asyncio.get_event_loop() except RuntimeError: # we can still get 'There is no current event loop in ...' loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) else: had_running_loop = True if had_running_loop: # if there is a running loop, we patch using nest_asyncio # to have reentrant event loops check_ipython() import nest_asyncio nest_asyncio.apply() check_patch_tornado() return loop.run_until_complete(coro) def run_sync(coro: Callable) -> Callable: """Runs a coroutine and blocks until it has executed. An event loop is created if no one already exists. If an event loop is already running, this event loop execution is nested into the already running one if `nest_asyncio` is set to True. Parameters ---------- coro : coroutine The coroutine to be executed. Returns ------- result : Whatever the coroutine returns. """ def wrapped(*args, **kwargs): return just_run(coro(*args, **kwargs)) wrapped.__doc__ = coro.__doc__ return wrapped async def ensure_async(obj: Union[Awaitable, Any]) -> Any: """Convert a non-awaitable object to a coroutine if needed, and await it if it was not already awaited. """ if inspect.isawaitable(obj): try: result = await obj except RuntimeError as e: if str(e) == 'cannot reuse already awaited coroutine': # obj is already the coroutine's result return obj raise return result # obj doesn't need to be awaited return obj nbclient-0.5.6/pyproject.toml000066400000000000000000000016201414342127300162340ustar00rootroot00000000000000# Example configuration for Black. # NOTE: you have to use single-quoted strings in TOML for regular expressions. # It's the equivalent of r-strings in Python. Multiline strings are treated as # verbose regular expressions by Black. Use [ ] to denote a significant space # character. [tool.black] line-length = 100 include = '\.pyi?$' exclude = ''' /( \.git | \.hg | \.mypy_cache | \.tox | \.venv | _build | buck-out | build | dist # The following are specific to Black, you probably don't want those. | blib2to3 | tests/data | profiling )/ ''' skip-string-normalization = true [tool.isort] profile = "black" known_first_party = ["nbclient"] [tool.mypy] python_version = 3.9 [[tool.mypy.overrides]] module = [ "ipython_genutils.*", "nbformat.*", "nest_asyncio.*", "async_generator.*", "traitlets.*", "jupyter_client.*", ] ignore_missing_imports = true nbclient-0.5.6/requirements-dev.txt000066400000000000000000000003311414342127300173560ustar00rootroot00000000000000codecov coverage ipython ipykernel ipywidgets pytest>=4.1 pytest-cov>=2.6.1 check-manifest flake8 mypy tox bumpversion xmltodict black; python_version >= '3.6' pip>=18.1 wheel>=0.31.0 setuptools>=38.6.0 twine>=1.11.0 nbclient-0.5.6/requirements.txt000066400000000000000000000001461414342127300166060ustar00rootroot00000000000000traitlets>=4.2 jupyter_client>=6.1.5 nbformat>=5.0 async_generator; python_version<'3.7' nest_asyncio nbclient-0.5.6/setup.cfg000066400000000000000000000002641414342127300151440ustar00rootroot00000000000000[bdist_wheel] universal=0 [metadata] license_file = LICENSE [check-manifest] ignore = .circleci* [flake8] ignore = E203,E731,F811,W503 max-complexity=23 max-line-length=100 nbclient-0.5.6/setup.py000066400000000000000000000054601414342127300150400ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os from setuptools import setup # the name of the package name = 'nbclient' local_path = os.path.dirname(__file__) # Fix for tox which manipulates execution pathing if not local_path: local_path = '.' here = os.path.abspath(local_path) def version(): with open(here + '/nbclient/_version.py') as ver: for line in ver.readlines(): if line.startswith('version ='): return line.split(' = ')[-1].strip()[1:-1] raise ValueError('No version found in nbclient/version.py') def read(path): with open(path) as fhandle: return fhandle.read() def read_reqs(fname): req_path = os.path.join(here, fname) return [req.strip() for req in read(req_path).splitlines() if req.strip()] long_description = read(os.path.join(os.path.dirname(__file__), "README.md")) requirements = read(os.path.join(os.path.dirname(__file__), "requirements.txt")) dev_reqs = read_reqs(os.path.join(os.path.dirname(__file__), 'requirements-dev.txt')) doc_reqs = read_reqs(os.path.join(os.path.dirname(__file__), 'docs/requirements-doc.txt')) extras_require = {"test": dev_reqs, "dev": dev_reqs, "sphinx": doc_reqs} setup( name=name, version=version(), author='Jupyter Development Team', author_email='jupyter@googlegroups.com', url='https://jupyter.org', description=( "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." ), long_description=long_description, long_description_content_type='text/markdown', packages=['nbclient'], include_package_data=True, python_requires=">=3.6.1", install_requires=requirements, extras_require=extras_require, entry_points={ 'console_scripts': [ 'jupyter-run = nbclient.cli:run', 'jupyter-execute = nbclient.cli:execute', ], }, project_urls={ 'Documentation': 'https://nbclient.readthedocs.io', 'Funding': 'https://numfocus.org/', 'Source': 'https://github.com/jupyter/nbclient', 'Tracker': 'https://github.com/jupyter/nbclient/issues', }, license='BSD', platforms="Linux, Mac OS X, Windows", keywords=['jupyter', 'pipeline', 'notebook', 'executor'], classifiers=[ 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ], ) nbclient-0.5.6/tox.ini000066400000000000000000000036211414342127300146360ustar00rootroot00000000000000[tox] skipsdist = true envlist = py{36,37,38, 39}, mypy, dist, docs [gh-actions] python = 3.6: py36 3.7: py37 3.8: py38 3.9: py39, mypy, dist # Type check [testenv:mypy] skip_install = true deps = mypy commands = mypy nbclient/client.py nbclient/exceptions.py nbclient/__init__.py nbclient/jsonutil.py nbclient/output_widget.py nbclient/util.py nbclient/_version.py # Docs [testenv:docs] description = invoke sphinx-build to build the HTML docs deps = .[dev] -r docs/requirements-doc.txt extras = docs commands = sphinx-build -d "{toxworkdir}/docs_doctree" docs "{toxworkdir}/docs_out" --color -W -bhtml {posargs} python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))' # Distro [testenv:dist] skip_install = true # Have to use /bin/bash or the `*` will cause that argument to get quoted by the tox command line... commands = python setup.py sdist --dist-dir={distdir} bdist_wheel --dist-dir={distdir} /bin/bash -c 'python -m pip install -U --force-reinstall {distdir}/nbclient*.whl' /bin/bash -c 'python -m pip install -U --force-reinstall --no-deps {distdir}/nbclient*.tar.gz' [testenv] # disable Python's hash randomization for tests that stringify dicts, etc setenv = PYTHONHASHSEED = 0 passenv = * basepython = py36: python3.6 py37: python3.7 py38: python3.8 py39: python3.9 mypy: python3.9 binder: python3.9 dist: python3.9 docs: python3.9 deps = .[dev] commands = pytest -vv --maxfail=2 --cov=nbclient --cov-report=xml -W always {posargs} # Binder [testenv:binder] description = ensure /binder/*ipynb are runnable deps = -r binder/requirements.txt commands = python -c "from glob import glob; from nbclient import execute; import nbformat as nbf; [execute(nbf.read(input, nbf.NO_CONVERT), cwd='./binder') for input in glob('binder/**/*.ipynb')]"